id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
sequence | docstring
stringlengths 3
17.3k
| docstring_tokens
sequence | sha
stringlengths 40
40
| url
stringlengths 87
242
|
---|---|---|---|---|---|---|---|---|---|---|---|
5,400 | datajoint/datajoint-python | datajoint/blob.py | BlobReader.read_string | def read_string(self, advance=True):
"""
Read a string terminated by null byte '\0'. The returned string
object is ASCII decoded, and will not include the terminating null byte.
"""
target = self._blob.find(b'\0', self.pos)
assert target >= self._pos
data = self._blob[self._pos:target]
if advance:
self._pos = target + 1
return data.decode('ascii') | python | def read_string(self, advance=True):
"""
Read a string terminated by null byte '\0'. The returned string
object is ASCII decoded, and will not include the terminating null byte.
"""
target = self._blob.find(b'\0', self.pos)
assert target >= self._pos
data = self._blob[self._pos:target]
if advance:
self._pos = target + 1
return data.decode('ascii') | [
"def",
"read_string",
"(",
"self",
",",
"advance",
"=",
"True",
")",
":",
"target",
"=",
"self",
".",
"_blob",
".",
"find",
"(",
"b'\\0'",
",",
"self",
".",
"pos",
")",
"assert",
"target",
">=",
"self",
".",
"_pos",
"data",
"=",
"self",
".",
"_blob",
"[",
"self",
".",
"_pos",
":",
"target",
"]",
"if",
"advance",
":",
"self",
".",
"_pos",
"=",
"target",
"+",
"1",
"return",
"data",
".",
"decode",
"(",
"'ascii'",
")"
] | Read a string terminated by null byte '\0'. The returned string
object is ASCII decoded, and will not include the terminating null byte. | [
"Read",
"a",
"string",
"terminated",
"by",
"null",
"byte",
"\\",
"0",
".",
"The",
"returned",
"string",
"object",
"is",
"ASCII",
"decoded",
"and",
"will",
"not",
"include",
"the",
"terminating",
"null",
"byte",
"."
] | 4f29bb154a7ed2b8b64b4d3a9c8be4c16b39621c | https://github.com/datajoint/datajoint-python/blob/4f29bb154a7ed2b8b64b4d3a9c8be4c16b39621c/datajoint/blob.py#L180-L190 |
5,401 | datajoint/datajoint-python | datajoint/blob.py | BlobReader.read_value | def read_value(self, dtype='uint64', count=1, advance=True):
"""
Read one or more scalars of the indicated dtype. Count specifies the number of
scalars to be read in.
"""
data = np.frombuffer(self._blob, dtype=dtype, count=count, offset=self.pos)
if advance:
# probably the same thing as data.nbytes * 8
self._pos += data.dtype.itemsize * data.size
if count == 1:
data = data[0]
return data | python | def read_value(self, dtype='uint64', count=1, advance=True):
"""
Read one or more scalars of the indicated dtype. Count specifies the number of
scalars to be read in.
"""
data = np.frombuffer(self._blob, dtype=dtype, count=count, offset=self.pos)
if advance:
# probably the same thing as data.nbytes * 8
self._pos += data.dtype.itemsize * data.size
if count == 1:
data = data[0]
return data | [
"def",
"read_value",
"(",
"self",
",",
"dtype",
"=",
"'uint64'",
",",
"count",
"=",
"1",
",",
"advance",
"=",
"True",
")",
":",
"data",
"=",
"np",
".",
"frombuffer",
"(",
"self",
".",
"_blob",
",",
"dtype",
"=",
"dtype",
",",
"count",
"=",
"count",
",",
"offset",
"=",
"self",
".",
"pos",
")",
"if",
"advance",
":",
"# probably the same thing as data.nbytes * 8",
"self",
".",
"_pos",
"+=",
"data",
".",
"dtype",
".",
"itemsize",
"*",
"data",
".",
"size",
"if",
"count",
"==",
"1",
":",
"data",
"=",
"data",
"[",
"0",
"]",
"return",
"data"
] | Read one or more scalars of the indicated dtype. Count specifies the number of
scalars to be read in. | [
"Read",
"one",
"or",
"more",
"scalars",
"of",
"the",
"indicated",
"dtype",
".",
"Count",
"specifies",
"the",
"number",
"of",
"scalars",
"to",
"be",
"read",
"in",
"."
] | 4f29bb154a7ed2b8b64b4d3a9c8be4c16b39621c | https://github.com/datajoint/datajoint-python/blob/4f29bb154a7ed2b8b64b4d3a9c8be4c16b39621c/datajoint/blob.py#L192-L203 |
5,402 | datajoint/datajoint-python | datajoint/external.py | ExternalTable.put | def put(self, store, obj):
"""
put an object in external store
"""
spec = self._get_store_spec(store)
blob = pack(obj)
blob_hash = long_hash(blob) + store[len('external-'):]
if spec['protocol'] == 'file':
folder = os.path.join(spec['location'], self.database)
full_path = os.path.join(folder, blob_hash)
if not os.path.isfile(full_path):
try:
safe_write(full_path, blob)
except FileNotFoundError:
os.makedirs(folder)
safe_write(full_path, blob)
elif spec['protocol'] == 's3':
S3Folder(database=self.database, **spec).put(blob_hash, blob)
else:
raise DataJointError('Unknown external storage protocol {protocol} for {store}'.format(
store=store, protocol=spec['protocol']))
# insert tracking info
self.connection.query(
"INSERT INTO {tab} (hash, size) VALUES ('{hash}', {size}) "
"ON DUPLICATE KEY UPDATE timestamp=CURRENT_TIMESTAMP".format(
tab=self.full_table_name,
hash=blob_hash,
size=len(blob)))
return blob_hash | python | def put(self, store, obj):
"""
put an object in external store
"""
spec = self._get_store_spec(store)
blob = pack(obj)
blob_hash = long_hash(blob) + store[len('external-'):]
if spec['protocol'] == 'file':
folder = os.path.join(spec['location'], self.database)
full_path = os.path.join(folder, blob_hash)
if not os.path.isfile(full_path):
try:
safe_write(full_path, blob)
except FileNotFoundError:
os.makedirs(folder)
safe_write(full_path, blob)
elif spec['protocol'] == 's3':
S3Folder(database=self.database, **spec).put(blob_hash, blob)
else:
raise DataJointError('Unknown external storage protocol {protocol} for {store}'.format(
store=store, protocol=spec['protocol']))
# insert tracking info
self.connection.query(
"INSERT INTO {tab} (hash, size) VALUES ('{hash}', {size}) "
"ON DUPLICATE KEY UPDATE timestamp=CURRENT_TIMESTAMP".format(
tab=self.full_table_name,
hash=blob_hash,
size=len(blob)))
return blob_hash | [
"def",
"put",
"(",
"self",
",",
"store",
",",
"obj",
")",
":",
"spec",
"=",
"self",
".",
"_get_store_spec",
"(",
"store",
")",
"blob",
"=",
"pack",
"(",
"obj",
")",
"blob_hash",
"=",
"long_hash",
"(",
"blob",
")",
"+",
"store",
"[",
"len",
"(",
"'external-'",
")",
":",
"]",
"if",
"spec",
"[",
"'protocol'",
"]",
"==",
"'file'",
":",
"folder",
"=",
"os",
".",
"path",
".",
"join",
"(",
"spec",
"[",
"'location'",
"]",
",",
"self",
".",
"database",
")",
"full_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"folder",
",",
"blob_hash",
")",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"full_path",
")",
":",
"try",
":",
"safe_write",
"(",
"full_path",
",",
"blob",
")",
"except",
"FileNotFoundError",
":",
"os",
".",
"makedirs",
"(",
"folder",
")",
"safe_write",
"(",
"full_path",
",",
"blob",
")",
"elif",
"spec",
"[",
"'protocol'",
"]",
"==",
"'s3'",
":",
"S3Folder",
"(",
"database",
"=",
"self",
".",
"database",
",",
"*",
"*",
"spec",
")",
".",
"put",
"(",
"blob_hash",
",",
"blob",
")",
"else",
":",
"raise",
"DataJointError",
"(",
"'Unknown external storage protocol {protocol} for {store}'",
".",
"format",
"(",
"store",
"=",
"store",
",",
"protocol",
"=",
"spec",
"[",
"'protocol'",
"]",
")",
")",
"# insert tracking info",
"self",
".",
"connection",
".",
"query",
"(",
"\"INSERT INTO {tab} (hash, size) VALUES ('{hash}', {size}) \"",
"\"ON DUPLICATE KEY UPDATE timestamp=CURRENT_TIMESTAMP\"",
".",
"format",
"(",
"tab",
"=",
"self",
".",
"full_table_name",
",",
"hash",
"=",
"blob_hash",
",",
"size",
"=",
"len",
"(",
"blob",
")",
")",
")",
"return",
"blob_hash"
] | put an object in external store | [
"put",
"an",
"object",
"in",
"external",
"store"
] | 4f29bb154a7ed2b8b64b4d3a9c8be4c16b39621c | https://github.com/datajoint/datajoint-python/blob/4f29bb154a7ed2b8b64b4d3a9c8be4c16b39621c/datajoint/external.py#L45-L74 |
5,403 | datajoint/datajoint-python | datajoint/external.py | ExternalTable.get | def get(self, blob_hash):
"""
get an object from external store.
Does not need to check whether it's in the table.
"""
if blob_hash is None:
return None
store = blob_hash[STORE_HASH_LENGTH:]
store = 'external' + ('-' if store else '') + store
cache_folder = config.get('cache', None)
blob = None
if cache_folder:
try:
with open(os.path.join(cache_folder, blob_hash), 'rb') as f:
blob = f.read()
except FileNotFoundError:
pass
if blob is None:
spec = self._get_store_spec(store)
if spec['protocol'] == 'file':
full_path = os.path.join(spec['location'], self.database, blob_hash)
try:
with open(full_path, 'rb') as f:
blob = f.read()
except FileNotFoundError:
raise DataJointError('Lost access to external blob %s.' % full_path) from None
elif spec['protocol'] == 's3':
try:
blob = S3Folder(database=self.database, **spec).get(blob_hash)
except TypeError:
raise DataJointError('External store {store} configuration is incomplete.'.format(store=store))
else:
raise DataJointError('Unknown external storage protocol "%s"' % spec['protocol'])
if cache_folder:
if not os.path.exists(cache_folder):
os.makedirs(cache_folder)
safe_write(os.path.join(cache_folder, blob_hash), blob)
return unpack(blob) | python | def get(self, blob_hash):
"""
get an object from external store.
Does not need to check whether it's in the table.
"""
if blob_hash is None:
return None
store = blob_hash[STORE_HASH_LENGTH:]
store = 'external' + ('-' if store else '') + store
cache_folder = config.get('cache', None)
blob = None
if cache_folder:
try:
with open(os.path.join(cache_folder, blob_hash), 'rb') as f:
blob = f.read()
except FileNotFoundError:
pass
if blob is None:
spec = self._get_store_spec(store)
if spec['protocol'] == 'file':
full_path = os.path.join(spec['location'], self.database, blob_hash)
try:
with open(full_path, 'rb') as f:
blob = f.read()
except FileNotFoundError:
raise DataJointError('Lost access to external blob %s.' % full_path) from None
elif spec['protocol'] == 's3':
try:
blob = S3Folder(database=self.database, **spec).get(blob_hash)
except TypeError:
raise DataJointError('External store {store} configuration is incomplete.'.format(store=store))
else:
raise DataJointError('Unknown external storage protocol "%s"' % spec['protocol'])
if cache_folder:
if not os.path.exists(cache_folder):
os.makedirs(cache_folder)
safe_write(os.path.join(cache_folder, blob_hash), blob)
return unpack(blob) | [
"def",
"get",
"(",
"self",
",",
"blob_hash",
")",
":",
"if",
"blob_hash",
"is",
"None",
":",
"return",
"None",
"store",
"=",
"blob_hash",
"[",
"STORE_HASH_LENGTH",
":",
"]",
"store",
"=",
"'external'",
"+",
"(",
"'-'",
"if",
"store",
"else",
"''",
")",
"+",
"store",
"cache_folder",
"=",
"config",
".",
"get",
"(",
"'cache'",
",",
"None",
")",
"blob",
"=",
"None",
"if",
"cache_folder",
":",
"try",
":",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"cache_folder",
",",
"blob_hash",
")",
",",
"'rb'",
")",
"as",
"f",
":",
"blob",
"=",
"f",
".",
"read",
"(",
")",
"except",
"FileNotFoundError",
":",
"pass",
"if",
"blob",
"is",
"None",
":",
"spec",
"=",
"self",
".",
"_get_store_spec",
"(",
"store",
")",
"if",
"spec",
"[",
"'protocol'",
"]",
"==",
"'file'",
":",
"full_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"spec",
"[",
"'location'",
"]",
",",
"self",
".",
"database",
",",
"blob_hash",
")",
"try",
":",
"with",
"open",
"(",
"full_path",
",",
"'rb'",
")",
"as",
"f",
":",
"blob",
"=",
"f",
".",
"read",
"(",
")",
"except",
"FileNotFoundError",
":",
"raise",
"DataJointError",
"(",
"'Lost access to external blob %s.'",
"%",
"full_path",
")",
"from",
"None",
"elif",
"spec",
"[",
"'protocol'",
"]",
"==",
"'s3'",
":",
"try",
":",
"blob",
"=",
"S3Folder",
"(",
"database",
"=",
"self",
".",
"database",
",",
"*",
"*",
"spec",
")",
".",
"get",
"(",
"blob_hash",
")",
"except",
"TypeError",
":",
"raise",
"DataJointError",
"(",
"'External store {store} configuration is incomplete.'",
".",
"format",
"(",
"store",
"=",
"store",
")",
")",
"else",
":",
"raise",
"DataJointError",
"(",
"'Unknown external storage protocol \"%s\"'",
"%",
"spec",
"[",
"'protocol'",
"]",
")",
"if",
"cache_folder",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"cache_folder",
")",
":",
"os",
".",
"makedirs",
"(",
"cache_folder",
")",
"safe_write",
"(",
"os",
".",
"path",
".",
"join",
"(",
"cache_folder",
",",
"blob_hash",
")",
",",
"blob",
")",
"return",
"unpack",
"(",
"blob",
")"
] | get an object from external store.
Does not need to check whether it's in the table. | [
"get",
"an",
"object",
"from",
"external",
"store",
".",
"Does",
"not",
"need",
"to",
"check",
"whether",
"it",
"s",
"in",
"the",
"table",
"."
] | 4f29bb154a7ed2b8b64b4d3a9c8be4c16b39621c | https://github.com/datajoint/datajoint-python/blob/4f29bb154a7ed2b8b64b4d3a9c8be4c16b39621c/datajoint/external.py#L76-L118 |
5,404 | datajoint/datajoint-python | datajoint/external.py | ExternalTable.delete_garbage | def delete_garbage(self):
"""
Delete items that are no longer referenced.
This operation is safe to perform at any time.
"""
self.connection.query(
"DELETE FROM `{db}`.`{tab}` WHERE ".format(tab=self.table_name, db=self.database) +
" AND ".join(
'hash NOT IN (SELECT {column_name} FROM {referencing_table})'.format(**ref)
for ref in self.references) or "TRUE")
print('Deleted %d items' % self.connection.query("SELECT ROW_COUNT()").fetchone()[0]) | python | def delete_garbage(self):
"""
Delete items that are no longer referenced.
This operation is safe to perform at any time.
"""
self.connection.query(
"DELETE FROM `{db}`.`{tab}` WHERE ".format(tab=self.table_name, db=self.database) +
" AND ".join(
'hash NOT IN (SELECT {column_name} FROM {referencing_table})'.format(**ref)
for ref in self.references) or "TRUE")
print('Deleted %d items' % self.connection.query("SELECT ROW_COUNT()").fetchone()[0]) | [
"def",
"delete_garbage",
"(",
"self",
")",
":",
"self",
".",
"connection",
".",
"query",
"(",
"\"DELETE FROM `{db}`.`{tab}` WHERE \"",
".",
"format",
"(",
"tab",
"=",
"self",
".",
"table_name",
",",
"db",
"=",
"self",
".",
"database",
")",
"+",
"\" AND \"",
".",
"join",
"(",
"'hash NOT IN (SELECT {column_name} FROM {referencing_table})'",
".",
"format",
"(",
"*",
"*",
"ref",
")",
"for",
"ref",
"in",
"self",
".",
"references",
")",
"or",
"\"TRUE\"",
")",
"print",
"(",
"'Deleted %d items'",
"%",
"self",
".",
"connection",
".",
"query",
"(",
"\"SELECT ROW_COUNT()\"",
")",
".",
"fetchone",
"(",
")",
"[",
"0",
"]",
")"
] | Delete items that are no longer referenced.
This operation is safe to perform at any time. | [
"Delete",
"items",
"that",
"are",
"no",
"longer",
"referenced",
".",
"This",
"operation",
"is",
"safe",
"to",
"perform",
"at",
"any",
"time",
"."
] | 4f29bb154a7ed2b8b64b4d3a9c8be4c16b39621c | https://github.com/datajoint/datajoint-python/blob/4f29bb154a7ed2b8b64b4d3a9c8be4c16b39621c/datajoint/external.py#L147-L157 |
5,405 | datajoint/datajoint-python | datajoint/external.py | ExternalTable.clean_store | def clean_store(self, store, display_progress=True):
"""
Clean unused data in an external storage repository from unused blobs.
This must be performed after delete_garbage during low-usage periods to reduce risks of data loss.
"""
spec = self._get_store_spec(store)
progress = tqdm if display_progress else lambda x: x
if spec['protocol'] == 'file':
folder = os.path.join(spec['location'], self.database)
delete_list = set(os.listdir(folder)).difference(self.fetch('hash'))
print('Deleting %d unused items from %s' % (len(delete_list), folder), flush=True)
for f in progress(delete_list):
os.remove(os.path.join(folder, f))
elif spec['protocol'] == 's3':
try:
S3Folder(database=self.database, **spec).clean(self.fetch('hash'))
except TypeError:
raise DataJointError('External store {store} configuration is incomplete.'.format(store=store)) | python | def clean_store(self, store, display_progress=True):
"""
Clean unused data in an external storage repository from unused blobs.
This must be performed after delete_garbage during low-usage periods to reduce risks of data loss.
"""
spec = self._get_store_spec(store)
progress = tqdm if display_progress else lambda x: x
if spec['protocol'] == 'file':
folder = os.path.join(spec['location'], self.database)
delete_list = set(os.listdir(folder)).difference(self.fetch('hash'))
print('Deleting %d unused items from %s' % (len(delete_list), folder), flush=True)
for f in progress(delete_list):
os.remove(os.path.join(folder, f))
elif spec['protocol'] == 's3':
try:
S3Folder(database=self.database, **spec).clean(self.fetch('hash'))
except TypeError:
raise DataJointError('External store {store} configuration is incomplete.'.format(store=store)) | [
"def",
"clean_store",
"(",
"self",
",",
"store",
",",
"display_progress",
"=",
"True",
")",
":",
"spec",
"=",
"self",
".",
"_get_store_spec",
"(",
"store",
")",
"progress",
"=",
"tqdm",
"if",
"display_progress",
"else",
"lambda",
"x",
":",
"x",
"if",
"spec",
"[",
"'protocol'",
"]",
"==",
"'file'",
":",
"folder",
"=",
"os",
".",
"path",
".",
"join",
"(",
"spec",
"[",
"'location'",
"]",
",",
"self",
".",
"database",
")",
"delete_list",
"=",
"set",
"(",
"os",
".",
"listdir",
"(",
"folder",
")",
")",
".",
"difference",
"(",
"self",
".",
"fetch",
"(",
"'hash'",
")",
")",
"print",
"(",
"'Deleting %d unused items from %s'",
"%",
"(",
"len",
"(",
"delete_list",
")",
",",
"folder",
")",
",",
"flush",
"=",
"True",
")",
"for",
"f",
"in",
"progress",
"(",
"delete_list",
")",
":",
"os",
".",
"remove",
"(",
"os",
".",
"path",
".",
"join",
"(",
"folder",
",",
"f",
")",
")",
"elif",
"spec",
"[",
"'protocol'",
"]",
"==",
"'s3'",
":",
"try",
":",
"S3Folder",
"(",
"database",
"=",
"self",
".",
"database",
",",
"*",
"*",
"spec",
")",
".",
"clean",
"(",
"self",
".",
"fetch",
"(",
"'hash'",
")",
")",
"except",
"TypeError",
":",
"raise",
"DataJointError",
"(",
"'External store {store} configuration is incomplete.'",
".",
"format",
"(",
"store",
"=",
"store",
")",
")"
] | Clean unused data in an external storage repository from unused blobs.
This must be performed after delete_garbage during low-usage periods to reduce risks of data loss. | [
"Clean",
"unused",
"data",
"in",
"an",
"external",
"storage",
"repository",
"from",
"unused",
"blobs",
".",
"This",
"must",
"be",
"performed",
"after",
"delete_garbage",
"during",
"low",
"-",
"usage",
"periods",
"to",
"reduce",
"risks",
"of",
"data",
"loss",
"."
] | 4f29bb154a7ed2b8b64b4d3a9c8be4c16b39621c | https://github.com/datajoint/datajoint-python/blob/4f29bb154a7ed2b8b64b4d3a9c8be4c16b39621c/datajoint/external.py#L159-L176 |
5,406 | datajoint/datajoint-python | datajoint/errors.py | is_connection_error | def is_connection_error(e):
"""
Checks if error e pertains to a connection issue
"""
return (isinstance(e, err.InterfaceError) and e.args[0] == "(0, '')") or\
(isinstance(e, err.OperationalError) and e.args[0] in operation_error_codes.values()) | python | def is_connection_error(e):
"""
Checks if error e pertains to a connection issue
"""
return (isinstance(e, err.InterfaceError) and e.args[0] == "(0, '')") or\
(isinstance(e, err.OperationalError) and e.args[0] in operation_error_codes.values()) | [
"def",
"is_connection_error",
"(",
"e",
")",
":",
"return",
"(",
"isinstance",
"(",
"e",
",",
"err",
".",
"InterfaceError",
")",
"and",
"e",
".",
"args",
"[",
"0",
"]",
"==",
"\"(0, '')\"",
")",
"or",
"(",
"isinstance",
"(",
"e",
",",
"err",
".",
"OperationalError",
")",
"and",
"e",
".",
"args",
"[",
"0",
"]",
"in",
"operation_error_codes",
".",
"values",
"(",
")",
")"
] | Checks if error e pertains to a connection issue | [
"Checks",
"if",
"error",
"e",
"pertains",
"to",
"a",
"connection",
"issue"
] | 4f29bb154a7ed2b8b64b4d3a9c8be4c16b39621c | https://github.com/datajoint/datajoint-python/blob/4f29bb154a7ed2b8b64b4d3a9c8be4c16b39621c/datajoint/errors.py#L18-L23 |
5,407 | python-hyper/brotlipy | src/brotli/brotli.py | decompress | def decompress(data):
"""
Decompress a complete Brotli-compressed string.
:param data: A bytestring containing Brotli-compressed data.
"""
d = Decompressor()
data = d.decompress(data)
d.finish()
return data | python | def decompress(data):
"""
Decompress a complete Brotli-compressed string.
:param data: A bytestring containing Brotli-compressed data.
"""
d = Decompressor()
data = d.decompress(data)
d.finish()
return data | [
"def",
"decompress",
"(",
"data",
")",
":",
"d",
"=",
"Decompressor",
"(",
")",
"data",
"=",
"d",
".",
"decompress",
"(",
"data",
")",
"d",
".",
"finish",
"(",
")",
"return",
"data"
] | Decompress a complete Brotli-compressed string.
:param data: A bytestring containing Brotli-compressed data. | [
"Decompress",
"a",
"complete",
"Brotli",
"-",
"compressed",
"string",
"."
] | ffddf2ea5adc584c8c353d246bb1077b7e781b63 | https://github.com/python-hyper/brotlipy/blob/ffddf2ea5adc584c8c353d246bb1077b7e781b63/src/brotli/brotli.py#L82-L91 |
5,408 | python-hyper/brotlipy | src/brotli/brotli.py | compress | def compress(data,
mode=DEFAULT_MODE,
quality=lib.BROTLI_DEFAULT_QUALITY,
lgwin=lib.BROTLI_DEFAULT_WINDOW,
lgblock=0,
dictionary=b''):
"""
Compress a string using Brotli.
.. versionchanged:: 0.5.0
Added ``mode``, ``quality``, `lgwin``, ``lgblock``, and ``dictionary``
parameters.
:param data: A bytestring containing the data to compress.
:type data: ``bytes``
:param mode: The encoder mode.
:type mode: :class:`BrotliEncoderMode` or ``int``
:param quality: Controls the compression-speed vs compression-density
tradeoffs. The higher the quality, the slower the compression. The
range of this value is 0 to 11.
:type quality: ``int``
:param lgwin: The base-2 logarithm of the sliding window size. The range of
this value is 10 to 24.
:type lgwin: ``int``
:param lgblock: The base-2 logarithm of the maximum input block size. The
range of this value is 16 to 24. If set to 0, the value will be set
based on ``quality``.
:type lgblock: ``int``
:param dictionary: A pre-set dictionary for LZ77. Please use this with
caution: if a dictionary is used for compression, the same dictionary
**must** be used for decompression!
:type dictionary: ``bytes``
:returns: The compressed bytestring.
:rtype: ``bytes``
"""
# This method uses private variables on the Compressor object, and
# generally does a whole lot of stuff that's not supported by the public
# API. The goal here is to minimise the number of allocations and copies
# we have to do. Users should prefer this method over the Compressor if
# they know they have single-shot data.
compressor = Compressor(
mode=mode,
quality=quality,
lgwin=lgwin,
lgblock=lgblock,
dictionary=dictionary
)
compressed_data = compressor._compress(data, lib.BROTLI_OPERATION_FINISH)
assert lib.BrotliEncoderIsFinished(compressor._encoder) == lib.BROTLI_TRUE
assert (
lib.BrotliEncoderHasMoreOutput(compressor._encoder) == lib.BROTLI_FALSE
)
return compressed_data | python | def compress(data,
mode=DEFAULT_MODE,
quality=lib.BROTLI_DEFAULT_QUALITY,
lgwin=lib.BROTLI_DEFAULT_WINDOW,
lgblock=0,
dictionary=b''):
"""
Compress a string using Brotli.
.. versionchanged:: 0.5.0
Added ``mode``, ``quality``, `lgwin``, ``lgblock``, and ``dictionary``
parameters.
:param data: A bytestring containing the data to compress.
:type data: ``bytes``
:param mode: The encoder mode.
:type mode: :class:`BrotliEncoderMode` or ``int``
:param quality: Controls the compression-speed vs compression-density
tradeoffs. The higher the quality, the slower the compression. The
range of this value is 0 to 11.
:type quality: ``int``
:param lgwin: The base-2 logarithm of the sliding window size. The range of
this value is 10 to 24.
:type lgwin: ``int``
:param lgblock: The base-2 logarithm of the maximum input block size. The
range of this value is 16 to 24. If set to 0, the value will be set
based on ``quality``.
:type lgblock: ``int``
:param dictionary: A pre-set dictionary for LZ77. Please use this with
caution: if a dictionary is used for compression, the same dictionary
**must** be used for decompression!
:type dictionary: ``bytes``
:returns: The compressed bytestring.
:rtype: ``bytes``
"""
# This method uses private variables on the Compressor object, and
# generally does a whole lot of stuff that's not supported by the public
# API. The goal here is to minimise the number of allocations and copies
# we have to do. Users should prefer this method over the Compressor if
# they know they have single-shot data.
compressor = Compressor(
mode=mode,
quality=quality,
lgwin=lgwin,
lgblock=lgblock,
dictionary=dictionary
)
compressed_data = compressor._compress(data, lib.BROTLI_OPERATION_FINISH)
assert lib.BrotliEncoderIsFinished(compressor._encoder) == lib.BROTLI_TRUE
assert (
lib.BrotliEncoderHasMoreOutput(compressor._encoder) == lib.BROTLI_FALSE
)
return compressed_data | [
"def",
"compress",
"(",
"data",
",",
"mode",
"=",
"DEFAULT_MODE",
",",
"quality",
"=",
"lib",
".",
"BROTLI_DEFAULT_QUALITY",
",",
"lgwin",
"=",
"lib",
".",
"BROTLI_DEFAULT_WINDOW",
",",
"lgblock",
"=",
"0",
",",
"dictionary",
"=",
"b''",
")",
":",
"# This method uses private variables on the Compressor object, and",
"# generally does a whole lot of stuff that's not supported by the public",
"# API. The goal here is to minimise the number of allocations and copies",
"# we have to do. Users should prefer this method over the Compressor if",
"# they know they have single-shot data.",
"compressor",
"=",
"Compressor",
"(",
"mode",
"=",
"mode",
",",
"quality",
"=",
"quality",
",",
"lgwin",
"=",
"lgwin",
",",
"lgblock",
"=",
"lgblock",
",",
"dictionary",
"=",
"dictionary",
")",
"compressed_data",
"=",
"compressor",
".",
"_compress",
"(",
"data",
",",
"lib",
".",
"BROTLI_OPERATION_FINISH",
")",
"assert",
"lib",
".",
"BrotliEncoderIsFinished",
"(",
"compressor",
".",
"_encoder",
")",
"==",
"lib",
".",
"BROTLI_TRUE",
"assert",
"(",
"lib",
".",
"BrotliEncoderHasMoreOutput",
"(",
"compressor",
".",
"_encoder",
")",
"==",
"lib",
".",
"BROTLI_FALSE",
")",
"return",
"compressed_data"
] | Compress a string using Brotli.
.. versionchanged:: 0.5.0
Added ``mode``, ``quality``, `lgwin``, ``lgblock``, and ``dictionary``
parameters.
:param data: A bytestring containing the data to compress.
:type data: ``bytes``
:param mode: The encoder mode.
:type mode: :class:`BrotliEncoderMode` or ``int``
:param quality: Controls the compression-speed vs compression-density
tradeoffs. The higher the quality, the slower the compression. The
range of this value is 0 to 11.
:type quality: ``int``
:param lgwin: The base-2 logarithm of the sliding window size. The range of
this value is 10 to 24.
:type lgwin: ``int``
:param lgblock: The base-2 logarithm of the maximum input block size. The
range of this value is 16 to 24. If set to 0, the value will be set
based on ``quality``.
:type lgblock: ``int``
:param dictionary: A pre-set dictionary for LZ77. Please use this with
caution: if a dictionary is used for compression, the same dictionary
**must** be used for decompression!
:type dictionary: ``bytes``
:returns: The compressed bytestring.
:rtype: ``bytes`` | [
"Compress",
"a",
"string",
"using",
"Brotli",
"."
] | ffddf2ea5adc584c8c353d246bb1077b7e781b63 | https://github.com/python-hyper/brotlipy/blob/ffddf2ea5adc584c8c353d246bb1077b7e781b63/src/brotli/brotli.py#L94-L152 |
5,409 | python-hyper/brotlipy | src/brotli/brotli.py | Compressor._compress | def _compress(self, data, operation):
"""
This private method compresses some data in a given mode. This is used
because almost all of the code uses the exact same setup. It wouldn't
have to, but it doesn't hurt at all.
"""
# The 'algorithm' for working out how big to make this buffer is from
# the Brotli source code, brotlimodule.cc.
original_output_size = int(
math.ceil(len(data) + (len(data) >> 2) + 10240)
)
available_out = ffi.new("size_t *")
available_out[0] = original_output_size
output_buffer = ffi.new("uint8_t []", available_out[0])
ptr_to_output_buffer = ffi.new("uint8_t **", output_buffer)
input_size = ffi.new("size_t *", len(data))
input_buffer = ffi.new("uint8_t []", data)
ptr_to_input_buffer = ffi.new("uint8_t **", input_buffer)
rc = lib.BrotliEncoderCompressStream(
self._encoder,
operation,
input_size,
ptr_to_input_buffer,
available_out,
ptr_to_output_buffer,
ffi.NULL
)
if rc != lib.BROTLI_TRUE: # pragma: no cover
raise Error("Error encountered compressing data.")
assert not input_size[0]
size_of_output = original_output_size - available_out[0]
return ffi.buffer(output_buffer, size_of_output)[:] | python | def _compress(self, data, operation):
"""
This private method compresses some data in a given mode. This is used
because almost all of the code uses the exact same setup. It wouldn't
have to, but it doesn't hurt at all.
"""
# The 'algorithm' for working out how big to make this buffer is from
# the Brotli source code, brotlimodule.cc.
original_output_size = int(
math.ceil(len(data) + (len(data) >> 2) + 10240)
)
available_out = ffi.new("size_t *")
available_out[0] = original_output_size
output_buffer = ffi.new("uint8_t []", available_out[0])
ptr_to_output_buffer = ffi.new("uint8_t **", output_buffer)
input_size = ffi.new("size_t *", len(data))
input_buffer = ffi.new("uint8_t []", data)
ptr_to_input_buffer = ffi.new("uint8_t **", input_buffer)
rc = lib.BrotliEncoderCompressStream(
self._encoder,
operation,
input_size,
ptr_to_input_buffer,
available_out,
ptr_to_output_buffer,
ffi.NULL
)
if rc != lib.BROTLI_TRUE: # pragma: no cover
raise Error("Error encountered compressing data.")
assert not input_size[0]
size_of_output = original_output_size - available_out[0]
return ffi.buffer(output_buffer, size_of_output)[:] | [
"def",
"_compress",
"(",
"self",
",",
"data",
",",
"operation",
")",
":",
"# The 'algorithm' for working out how big to make this buffer is from",
"# the Brotli source code, brotlimodule.cc.",
"original_output_size",
"=",
"int",
"(",
"math",
".",
"ceil",
"(",
"len",
"(",
"data",
")",
"+",
"(",
"len",
"(",
"data",
")",
">>",
"2",
")",
"+",
"10240",
")",
")",
"available_out",
"=",
"ffi",
".",
"new",
"(",
"\"size_t *\"",
")",
"available_out",
"[",
"0",
"]",
"=",
"original_output_size",
"output_buffer",
"=",
"ffi",
".",
"new",
"(",
"\"uint8_t []\"",
",",
"available_out",
"[",
"0",
"]",
")",
"ptr_to_output_buffer",
"=",
"ffi",
".",
"new",
"(",
"\"uint8_t **\"",
",",
"output_buffer",
")",
"input_size",
"=",
"ffi",
".",
"new",
"(",
"\"size_t *\"",
",",
"len",
"(",
"data",
")",
")",
"input_buffer",
"=",
"ffi",
".",
"new",
"(",
"\"uint8_t []\"",
",",
"data",
")",
"ptr_to_input_buffer",
"=",
"ffi",
".",
"new",
"(",
"\"uint8_t **\"",
",",
"input_buffer",
")",
"rc",
"=",
"lib",
".",
"BrotliEncoderCompressStream",
"(",
"self",
".",
"_encoder",
",",
"operation",
",",
"input_size",
",",
"ptr_to_input_buffer",
",",
"available_out",
",",
"ptr_to_output_buffer",
",",
"ffi",
".",
"NULL",
")",
"if",
"rc",
"!=",
"lib",
".",
"BROTLI_TRUE",
":",
"# pragma: no cover",
"raise",
"Error",
"(",
"\"Error encountered compressing data.\"",
")",
"assert",
"not",
"input_size",
"[",
"0",
"]",
"size_of_output",
"=",
"original_output_size",
"-",
"available_out",
"[",
"0",
"]",
"return",
"ffi",
".",
"buffer",
"(",
"output_buffer",
",",
"size_of_output",
")",
"[",
":",
"]"
] | This private method compresses some data in a given mode. This is used
because almost all of the code uses the exact same setup. It wouldn't
have to, but it doesn't hurt at all. | [
"This",
"private",
"method",
"compresses",
"some",
"data",
"in",
"a",
"given",
"mode",
".",
"This",
"is",
"used",
"because",
"almost",
"all",
"of",
"the",
"code",
"uses",
"the",
"exact",
"same",
"setup",
".",
"It",
"wouldn",
"t",
"have",
"to",
"but",
"it",
"doesn",
"t",
"hurt",
"at",
"all",
"."
] | ffddf2ea5adc584c8c353d246bb1077b7e781b63 | https://github.com/python-hyper/brotlipy/blob/ffddf2ea5adc584c8c353d246bb1077b7e781b63/src/brotli/brotli.py#L283-L317 |
5,410 | python-hyper/brotlipy | src/brotli/brotli.py | Compressor.flush | def flush(self):
"""
Flush the compressor. This will emit the remaining output data, but
will not destroy the compressor. It can be used, for example, to ensure
that given chunks of content will decompress immediately.
"""
chunks = []
chunks.append(self._compress(b'', lib.BROTLI_OPERATION_FLUSH))
while lib.BrotliEncoderHasMoreOutput(self._encoder) == lib.BROTLI_TRUE:
chunks.append(self._compress(b'', lib.BROTLI_OPERATION_FLUSH))
return b''.join(chunks) | python | def flush(self):
"""
Flush the compressor. This will emit the remaining output data, but
will not destroy the compressor. It can be used, for example, to ensure
that given chunks of content will decompress immediately.
"""
chunks = []
chunks.append(self._compress(b'', lib.BROTLI_OPERATION_FLUSH))
while lib.BrotliEncoderHasMoreOutput(self._encoder) == lib.BROTLI_TRUE:
chunks.append(self._compress(b'', lib.BROTLI_OPERATION_FLUSH))
return b''.join(chunks) | [
"def",
"flush",
"(",
"self",
")",
":",
"chunks",
"=",
"[",
"]",
"chunks",
".",
"append",
"(",
"self",
".",
"_compress",
"(",
"b''",
",",
"lib",
".",
"BROTLI_OPERATION_FLUSH",
")",
")",
"while",
"lib",
".",
"BrotliEncoderHasMoreOutput",
"(",
"self",
".",
"_encoder",
")",
"==",
"lib",
".",
"BROTLI_TRUE",
":",
"chunks",
".",
"append",
"(",
"self",
".",
"_compress",
"(",
"b''",
",",
"lib",
".",
"BROTLI_OPERATION_FLUSH",
")",
")",
"return",
"b''",
".",
"join",
"(",
"chunks",
")"
] | Flush the compressor. This will emit the remaining output data, but
will not destroy the compressor. It can be used, for example, to ensure
that given chunks of content will decompress immediately. | [
"Flush",
"the",
"compressor",
".",
"This",
"will",
"emit",
"the",
"remaining",
"output",
"data",
"but",
"will",
"not",
"destroy",
"the",
"compressor",
".",
"It",
"can",
"be",
"used",
"for",
"example",
"to",
"ensure",
"that",
"given",
"chunks",
"of",
"content",
"will",
"decompress",
"immediately",
"."
] | ffddf2ea5adc584c8c353d246bb1077b7e781b63 | https://github.com/python-hyper/brotlipy/blob/ffddf2ea5adc584c8c353d246bb1077b7e781b63/src/brotli/brotli.py#L330-L342 |
5,411 | python-hyper/brotlipy | src/brotli/brotli.py | Compressor.finish | def finish(self):
"""
Finish the compressor. This will emit the remaining output data and
transition the compressor to a completed state. The compressor cannot
be used again after this point, and must be replaced.
"""
chunks = []
while lib.BrotliEncoderIsFinished(self._encoder) == lib.BROTLI_FALSE:
chunks.append(self._compress(b'', lib.BROTLI_OPERATION_FINISH))
return b''.join(chunks) | python | def finish(self):
"""
Finish the compressor. This will emit the remaining output data and
transition the compressor to a completed state. The compressor cannot
be used again after this point, and must be replaced.
"""
chunks = []
while lib.BrotliEncoderIsFinished(self._encoder) == lib.BROTLI_FALSE:
chunks.append(self._compress(b'', lib.BROTLI_OPERATION_FINISH))
return b''.join(chunks) | [
"def",
"finish",
"(",
"self",
")",
":",
"chunks",
"=",
"[",
"]",
"while",
"lib",
".",
"BrotliEncoderIsFinished",
"(",
"self",
".",
"_encoder",
")",
"==",
"lib",
".",
"BROTLI_FALSE",
":",
"chunks",
".",
"append",
"(",
"self",
".",
"_compress",
"(",
"b''",
",",
"lib",
".",
"BROTLI_OPERATION_FINISH",
")",
")",
"return",
"b''",
".",
"join",
"(",
"chunks",
")"
] | Finish the compressor. This will emit the remaining output data and
transition the compressor to a completed state. The compressor cannot
be used again after this point, and must be replaced. | [
"Finish",
"the",
"compressor",
".",
"This",
"will",
"emit",
"the",
"remaining",
"output",
"data",
"and",
"transition",
"the",
"compressor",
"to",
"a",
"completed",
"state",
".",
"The",
"compressor",
"cannot",
"be",
"used",
"again",
"after",
"this",
"point",
"and",
"must",
"be",
"replaced",
"."
] | ffddf2ea5adc584c8c353d246bb1077b7e781b63 | https://github.com/python-hyper/brotlipy/blob/ffddf2ea5adc584c8c353d246bb1077b7e781b63/src/brotli/brotli.py#L344-L354 |
5,412 | python-hyper/brotlipy | src/brotli/brotli.py | Decompressor.decompress | def decompress(self, data):
"""
Decompress part of a complete Brotli-compressed string.
:param data: A bytestring containing Brotli-compressed data.
:returns: A bytestring containing the decompressed data.
"""
chunks = []
available_in = ffi.new("size_t *", len(data))
in_buffer = ffi.new("uint8_t[]", data)
next_in = ffi.new("uint8_t **", in_buffer)
while True:
# Allocate a buffer that's hopefully overlarge, but if it's not we
# don't mind: we'll spin around again.
buffer_size = 5 * len(data)
available_out = ffi.new("size_t *", buffer_size)
out_buffer = ffi.new("uint8_t[]", buffer_size)
next_out = ffi.new("uint8_t **", out_buffer)
rc = lib.BrotliDecoderDecompressStream(self._decoder,
available_in,
next_in,
available_out,
next_out,
ffi.NULL)
# First, check for errors.
if rc == lib.BROTLI_DECODER_RESULT_ERROR:
error_code = lib.BrotliDecoderGetErrorCode(self._decoder)
error_message = lib.BrotliDecoderErrorString(error_code)
raise Error(
"Decompression error: %s" % ffi.string(error_message)
)
# Next, copy the result out.
chunk = ffi.buffer(out_buffer, buffer_size - available_out[0])[:]
chunks.append(chunk)
if rc == lib.BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT:
assert available_in[0] == 0
break
elif rc == lib.BROTLI_DECODER_RESULT_SUCCESS:
break
else:
# It's cool if we need more output, we just loop again.
assert rc == lib.BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT
return b''.join(chunks) | python | def decompress(self, data):
"""
Decompress part of a complete Brotli-compressed string.
:param data: A bytestring containing Brotli-compressed data.
:returns: A bytestring containing the decompressed data.
"""
chunks = []
available_in = ffi.new("size_t *", len(data))
in_buffer = ffi.new("uint8_t[]", data)
next_in = ffi.new("uint8_t **", in_buffer)
while True:
# Allocate a buffer that's hopefully overlarge, but if it's not we
# don't mind: we'll spin around again.
buffer_size = 5 * len(data)
available_out = ffi.new("size_t *", buffer_size)
out_buffer = ffi.new("uint8_t[]", buffer_size)
next_out = ffi.new("uint8_t **", out_buffer)
rc = lib.BrotliDecoderDecompressStream(self._decoder,
available_in,
next_in,
available_out,
next_out,
ffi.NULL)
# First, check for errors.
if rc == lib.BROTLI_DECODER_RESULT_ERROR:
error_code = lib.BrotliDecoderGetErrorCode(self._decoder)
error_message = lib.BrotliDecoderErrorString(error_code)
raise Error(
"Decompression error: %s" % ffi.string(error_message)
)
# Next, copy the result out.
chunk = ffi.buffer(out_buffer, buffer_size - available_out[0])[:]
chunks.append(chunk)
if rc == lib.BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT:
assert available_in[0] == 0
break
elif rc == lib.BROTLI_DECODER_RESULT_SUCCESS:
break
else:
# It's cool if we need more output, we just loop again.
assert rc == lib.BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT
return b''.join(chunks) | [
"def",
"decompress",
"(",
"self",
",",
"data",
")",
":",
"chunks",
"=",
"[",
"]",
"available_in",
"=",
"ffi",
".",
"new",
"(",
"\"size_t *\"",
",",
"len",
"(",
"data",
")",
")",
"in_buffer",
"=",
"ffi",
".",
"new",
"(",
"\"uint8_t[]\"",
",",
"data",
")",
"next_in",
"=",
"ffi",
".",
"new",
"(",
"\"uint8_t **\"",
",",
"in_buffer",
")",
"while",
"True",
":",
"# Allocate a buffer that's hopefully overlarge, but if it's not we",
"# don't mind: we'll spin around again.",
"buffer_size",
"=",
"5",
"*",
"len",
"(",
"data",
")",
"available_out",
"=",
"ffi",
".",
"new",
"(",
"\"size_t *\"",
",",
"buffer_size",
")",
"out_buffer",
"=",
"ffi",
".",
"new",
"(",
"\"uint8_t[]\"",
",",
"buffer_size",
")",
"next_out",
"=",
"ffi",
".",
"new",
"(",
"\"uint8_t **\"",
",",
"out_buffer",
")",
"rc",
"=",
"lib",
".",
"BrotliDecoderDecompressStream",
"(",
"self",
".",
"_decoder",
",",
"available_in",
",",
"next_in",
",",
"available_out",
",",
"next_out",
",",
"ffi",
".",
"NULL",
")",
"# First, check for errors.",
"if",
"rc",
"==",
"lib",
".",
"BROTLI_DECODER_RESULT_ERROR",
":",
"error_code",
"=",
"lib",
".",
"BrotliDecoderGetErrorCode",
"(",
"self",
".",
"_decoder",
")",
"error_message",
"=",
"lib",
".",
"BrotliDecoderErrorString",
"(",
"error_code",
")",
"raise",
"Error",
"(",
"\"Decompression error: %s\"",
"%",
"ffi",
".",
"string",
"(",
"error_message",
")",
")",
"# Next, copy the result out.",
"chunk",
"=",
"ffi",
".",
"buffer",
"(",
"out_buffer",
",",
"buffer_size",
"-",
"available_out",
"[",
"0",
"]",
")",
"[",
":",
"]",
"chunks",
".",
"append",
"(",
"chunk",
")",
"if",
"rc",
"==",
"lib",
".",
"BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT",
":",
"assert",
"available_in",
"[",
"0",
"]",
"==",
"0",
"break",
"elif",
"rc",
"==",
"lib",
".",
"BROTLI_DECODER_RESULT_SUCCESS",
":",
"break",
"else",
":",
"# It's cool if we need more output, we just loop again.",
"assert",
"rc",
"==",
"lib",
".",
"BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT",
"return",
"b''",
".",
"join",
"(",
"chunks",
")"
] | Decompress part of a complete Brotli-compressed string.
:param data: A bytestring containing Brotli-compressed data.
:returns: A bytestring containing the decompressed data. | [
"Decompress",
"part",
"of",
"a",
"complete",
"Brotli",
"-",
"compressed",
"string",
"."
] | ffddf2ea5adc584c8c353d246bb1077b7e781b63 | https://github.com/python-hyper/brotlipy/blob/ffddf2ea5adc584c8c353d246bb1077b7e781b63/src/brotli/brotli.py#L386-L435 |
5,413 | python-hyper/brotlipy | src/brotli/brotli.py | Decompressor.finish | def finish(self):
"""
Finish the decompressor. As the decompressor decompresses eagerly, this
will never actually emit any data. However, it will potentially throw
errors if a truncated or damaged data stream has been used.
Note that, once this method is called, the decompressor is no longer
safe for further use and must be thrown away.
"""
assert (
lib.BrotliDecoderHasMoreOutput(self._decoder) == lib.BROTLI_FALSE
)
if lib.BrotliDecoderIsFinished(self._decoder) == lib.BROTLI_FALSE:
raise Error("Decompression error: incomplete compressed stream.")
return b'' | python | def finish(self):
"""
Finish the decompressor. As the decompressor decompresses eagerly, this
will never actually emit any data. However, it will potentially throw
errors if a truncated or damaged data stream has been used.
Note that, once this method is called, the decompressor is no longer
safe for further use and must be thrown away.
"""
assert (
lib.BrotliDecoderHasMoreOutput(self._decoder) == lib.BROTLI_FALSE
)
if lib.BrotliDecoderIsFinished(self._decoder) == lib.BROTLI_FALSE:
raise Error("Decompression error: incomplete compressed stream.")
return b'' | [
"def",
"finish",
"(",
"self",
")",
":",
"assert",
"(",
"lib",
".",
"BrotliDecoderHasMoreOutput",
"(",
"self",
".",
"_decoder",
")",
"==",
"lib",
".",
"BROTLI_FALSE",
")",
"if",
"lib",
".",
"BrotliDecoderIsFinished",
"(",
"self",
".",
"_decoder",
")",
"==",
"lib",
".",
"BROTLI_FALSE",
":",
"raise",
"Error",
"(",
"\"Decompression error: incomplete compressed stream.\"",
")",
"return",
"b''"
] | Finish the decompressor. As the decompressor decompresses eagerly, this
will never actually emit any data. However, it will potentially throw
errors if a truncated or damaged data stream has been used.
Note that, once this method is called, the decompressor is no longer
safe for further use and must be thrown away. | [
"Finish",
"the",
"decompressor",
".",
"As",
"the",
"decompressor",
"decompresses",
"eagerly",
"this",
"will",
"never",
"actually",
"emit",
"any",
"data",
".",
"However",
"it",
"will",
"potentially",
"throw",
"errors",
"if",
"a",
"truncated",
"or",
"damaged",
"data",
"stream",
"has",
"been",
"used",
"."
] | ffddf2ea5adc584c8c353d246bb1077b7e781b63 | https://github.com/python-hyper/brotlipy/blob/ffddf2ea5adc584c8c353d246bb1077b7e781b63/src/brotli/brotli.py#L451-L466 |
5,414 | adafruit/Adafruit_Python_CharLCD | examples/char_lcd_rgb_pwm.py | hsv_to_rgb | def hsv_to_rgb(hsv):
"""Converts a tuple of hue, saturation, value to a tuple of red, green blue.
Hue should be an angle from 0.0 to 359.0. Saturation and value should be a
value from 0.0 to 1.0, where saturation controls the intensity of the hue and
value controls the brightness.
"""
# Algorithm adapted from http://www.cs.rit.edu/~ncs/color/t_convert.html
h, s, v = hsv
if s == 0:
return (v, v, v)
h /= 60.0
i = math.floor(h)
f = h-i
p = v*(1.0-s)
q = v*(1.0-s*f)
t = v*(1.0-s*(1.0-f))
if i == 0:
return (v, t, p)
elif i == 1:
return (q, v, p)
elif i == 2:
return (p, v, t)
elif i == 3:
return (p, q, v)
elif i == 4:
return (t, p, v)
else:
return (v, p, q) | python | def hsv_to_rgb(hsv):
"""Converts a tuple of hue, saturation, value to a tuple of red, green blue.
Hue should be an angle from 0.0 to 359.0. Saturation and value should be a
value from 0.0 to 1.0, where saturation controls the intensity of the hue and
value controls the brightness.
"""
# Algorithm adapted from http://www.cs.rit.edu/~ncs/color/t_convert.html
h, s, v = hsv
if s == 0:
return (v, v, v)
h /= 60.0
i = math.floor(h)
f = h-i
p = v*(1.0-s)
q = v*(1.0-s*f)
t = v*(1.0-s*(1.0-f))
if i == 0:
return (v, t, p)
elif i == 1:
return (q, v, p)
elif i == 2:
return (p, v, t)
elif i == 3:
return (p, q, v)
elif i == 4:
return (t, p, v)
else:
return (v, p, q) | [
"def",
"hsv_to_rgb",
"(",
"hsv",
")",
":",
"# Algorithm adapted from http://www.cs.rit.edu/~ncs/color/t_convert.html",
"h",
",",
"s",
",",
"v",
"=",
"hsv",
"if",
"s",
"==",
"0",
":",
"return",
"(",
"v",
",",
"v",
",",
"v",
")",
"h",
"/=",
"60.0",
"i",
"=",
"math",
".",
"floor",
"(",
"h",
")",
"f",
"=",
"h",
"-",
"i",
"p",
"=",
"v",
"*",
"(",
"1.0",
"-",
"s",
")",
"q",
"=",
"v",
"*",
"(",
"1.0",
"-",
"s",
"*",
"f",
")",
"t",
"=",
"v",
"*",
"(",
"1.0",
"-",
"s",
"*",
"(",
"1.0",
"-",
"f",
")",
")",
"if",
"i",
"==",
"0",
":",
"return",
"(",
"v",
",",
"t",
",",
"p",
")",
"elif",
"i",
"==",
"1",
":",
"return",
"(",
"q",
",",
"v",
",",
"p",
")",
"elif",
"i",
"==",
"2",
":",
"return",
"(",
"p",
",",
"v",
",",
"t",
")",
"elif",
"i",
"==",
"3",
":",
"return",
"(",
"p",
",",
"q",
",",
"v",
")",
"elif",
"i",
"==",
"4",
":",
"return",
"(",
"t",
",",
"p",
",",
"v",
")",
"else",
":",
"return",
"(",
"v",
",",
"p",
",",
"q",
")"
] | Converts a tuple of hue, saturation, value to a tuple of red, green blue.
Hue should be an angle from 0.0 to 359.0. Saturation and value should be a
value from 0.0 to 1.0, where saturation controls the intensity of the hue and
value controls the brightness. | [
"Converts",
"a",
"tuple",
"of",
"hue",
"saturation",
"value",
"to",
"a",
"tuple",
"of",
"red",
"green",
"blue",
".",
"Hue",
"should",
"be",
"an",
"angle",
"from",
"0",
".",
"0",
"to",
"359",
".",
"0",
".",
"Saturation",
"and",
"value",
"should",
"be",
"a",
"value",
"from",
"0",
".",
"0",
"to",
"1",
".",
"0",
"where",
"saturation",
"controls",
"the",
"intensity",
"of",
"the",
"hue",
"and",
"value",
"controls",
"the",
"brightness",
"."
] | c126e6b673074c12a03f4bd36afb2fe40272341e | https://github.com/adafruit/Adafruit_Python_CharLCD/blob/c126e6b673074c12a03f4bd36afb2fe40272341e/examples/char_lcd_rgb_pwm.py#L9-L36 |
5,415 | adafruit/Adafruit_Python_CharLCD | Adafruit_CharLCD/Adafruit_CharLCD.py | Adafruit_CharLCD.set_cursor | def set_cursor(self, col, row):
"""Move the cursor to an explicit column and row position."""
# Clamp row to the last row of the display.
if row > self._lines:
row = self._lines - 1
# Set location.
self.write8(LCD_SETDDRAMADDR | (col + LCD_ROW_OFFSETS[row])) | python | def set_cursor(self, col, row):
"""Move the cursor to an explicit column and row position."""
# Clamp row to the last row of the display.
if row > self._lines:
row = self._lines - 1
# Set location.
self.write8(LCD_SETDDRAMADDR | (col + LCD_ROW_OFFSETS[row])) | [
"def",
"set_cursor",
"(",
"self",
",",
"col",
",",
"row",
")",
":",
"# Clamp row to the last row of the display.",
"if",
"row",
">",
"self",
".",
"_lines",
":",
"row",
"=",
"self",
".",
"_lines",
"-",
"1",
"# Set location.",
"self",
".",
"write8",
"(",
"LCD_SETDDRAMADDR",
"|",
"(",
"col",
"+",
"LCD_ROW_OFFSETS",
"[",
"row",
"]",
")",
")"
] | Move the cursor to an explicit column and row position. | [
"Move",
"the",
"cursor",
"to",
"an",
"explicit",
"column",
"and",
"row",
"position",
"."
] | c126e6b673074c12a03f4bd36afb2fe40272341e | https://github.com/adafruit/Adafruit_Python_CharLCD/blob/c126e6b673074c12a03f4bd36afb2fe40272341e/Adafruit_CharLCD/Adafruit_CharLCD.py#L183-L189 |
5,416 | adafruit/Adafruit_Python_CharLCD | Adafruit_CharLCD/Adafruit_CharLCD.py | Adafruit_CharLCD.enable_display | def enable_display(self, enable):
"""Enable or disable the display. Set enable to True to enable."""
if enable:
self.displaycontrol |= LCD_DISPLAYON
else:
self.displaycontrol &= ~LCD_DISPLAYON
self.write8(LCD_DISPLAYCONTROL | self.displaycontrol) | python | def enable_display(self, enable):
"""Enable or disable the display. Set enable to True to enable."""
if enable:
self.displaycontrol |= LCD_DISPLAYON
else:
self.displaycontrol &= ~LCD_DISPLAYON
self.write8(LCD_DISPLAYCONTROL | self.displaycontrol) | [
"def",
"enable_display",
"(",
"self",
",",
"enable",
")",
":",
"if",
"enable",
":",
"self",
".",
"displaycontrol",
"|=",
"LCD_DISPLAYON",
"else",
":",
"self",
".",
"displaycontrol",
"&=",
"~",
"LCD_DISPLAYON",
"self",
".",
"write8",
"(",
"LCD_DISPLAYCONTROL",
"|",
"self",
".",
"displaycontrol",
")"
] | Enable or disable the display. Set enable to True to enable. | [
"Enable",
"or",
"disable",
"the",
"display",
".",
"Set",
"enable",
"to",
"True",
"to",
"enable",
"."
] | c126e6b673074c12a03f4bd36afb2fe40272341e | https://github.com/adafruit/Adafruit_Python_CharLCD/blob/c126e6b673074c12a03f4bd36afb2fe40272341e/Adafruit_CharLCD/Adafruit_CharLCD.py#L191-L197 |
5,417 | adafruit/Adafruit_Python_CharLCD | Adafruit_CharLCD/Adafruit_CharLCD.py | Adafruit_CharLCD.show_cursor | def show_cursor(self, show):
"""Show or hide the cursor. Cursor is shown if show is True."""
if show:
self.displaycontrol |= LCD_CURSORON
else:
self.displaycontrol &= ~LCD_CURSORON
self.write8(LCD_DISPLAYCONTROL | self.displaycontrol) | python | def show_cursor(self, show):
"""Show or hide the cursor. Cursor is shown if show is True."""
if show:
self.displaycontrol |= LCD_CURSORON
else:
self.displaycontrol &= ~LCD_CURSORON
self.write8(LCD_DISPLAYCONTROL | self.displaycontrol) | [
"def",
"show_cursor",
"(",
"self",
",",
"show",
")",
":",
"if",
"show",
":",
"self",
".",
"displaycontrol",
"|=",
"LCD_CURSORON",
"else",
":",
"self",
".",
"displaycontrol",
"&=",
"~",
"LCD_CURSORON",
"self",
".",
"write8",
"(",
"LCD_DISPLAYCONTROL",
"|",
"self",
".",
"displaycontrol",
")"
] | Show or hide the cursor. Cursor is shown if show is True. | [
"Show",
"or",
"hide",
"the",
"cursor",
".",
"Cursor",
"is",
"shown",
"if",
"show",
"is",
"True",
"."
] | c126e6b673074c12a03f4bd36afb2fe40272341e | https://github.com/adafruit/Adafruit_Python_CharLCD/blob/c126e6b673074c12a03f4bd36afb2fe40272341e/Adafruit_CharLCD/Adafruit_CharLCD.py#L199-L205 |
5,418 | adafruit/Adafruit_Python_CharLCD | Adafruit_CharLCD/Adafruit_CharLCD.py | Adafruit_CharLCD.blink | def blink(self, blink):
"""Turn on or off cursor blinking. Set blink to True to enable blinking."""
if blink:
self.displaycontrol |= LCD_BLINKON
else:
self.displaycontrol &= ~LCD_BLINKON
self.write8(LCD_DISPLAYCONTROL | self.displaycontrol) | python | def blink(self, blink):
"""Turn on or off cursor blinking. Set blink to True to enable blinking."""
if blink:
self.displaycontrol |= LCD_BLINKON
else:
self.displaycontrol &= ~LCD_BLINKON
self.write8(LCD_DISPLAYCONTROL | self.displaycontrol) | [
"def",
"blink",
"(",
"self",
",",
"blink",
")",
":",
"if",
"blink",
":",
"self",
".",
"displaycontrol",
"|=",
"LCD_BLINKON",
"else",
":",
"self",
".",
"displaycontrol",
"&=",
"~",
"LCD_BLINKON",
"self",
".",
"write8",
"(",
"LCD_DISPLAYCONTROL",
"|",
"self",
".",
"displaycontrol",
")"
] | Turn on or off cursor blinking. Set blink to True to enable blinking. | [
"Turn",
"on",
"or",
"off",
"cursor",
"blinking",
".",
"Set",
"blink",
"to",
"True",
"to",
"enable",
"blinking",
"."
] | c126e6b673074c12a03f4bd36afb2fe40272341e | https://github.com/adafruit/Adafruit_Python_CharLCD/blob/c126e6b673074c12a03f4bd36afb2fe40272341e/Adafruit_CharLCD/Adafruit_CharLCD.py#L207-L213 |
5,419 | adafruit/Adafruit_Python_CharLCD | Adafruit_CharLCD/Adafruit_CharLCD.py | Adafruit_CharLCD.set_left_to_right | def set_left_to_right(self):
"""Set text direction left to right."""
self.displaymode |= LCD_ENTRYLEFT
self.write8(LCD_ENTRYMODESET | self.displaymode) | python | def set_left_to_right(self):
"""Set text direction left to right."""
self.displaymode |= LCD_ENTRYLEFT
self.write8(LCD_ENTRYMODESET | self.displaymode) | [
"def",
"set_left_to_right",
"(",
"self",
")",
":",
"self",
".",
"displaymode",
"|=",
"LCD_ENTRYLEFT",
"self",
".",
"write8",
"(",
"LCD_ENTRYMODESET",
"|",
"self",
".",
"displaymode",
")"
] | Set text direction left to right. | [
"Set",
"text",
"direction",
"left",
"to",
"right",
"."
] | c126e6b673074c12a03f4bd36afb2fe40272341e | https://github.com/adafruit/Adafruit_Python_CharLCD/blob/c126e6b673074c12a03f4bd36afb2fe40272341e/Adafruit_CharLCD/Adafruit_CharLCD.py#L223-L226 |
5,420 | adafruit/Adafruit_Python_CharLCD | Adafruit_CharLCD/Adafruit_CharLCD.py | Adafruit_CharLCD.set_right_to_left | def set_right_to_left(self):
"""Set text direction right to left."""
self.displaymode &= ~LCD_ENTRYLEFT
self.write8(LCD_ENTRYMODESET | self.displaymode) | python | def set_right_to_left(self):
"""Set text direction right to left."""
self.displaymode &= ~LCD_ENTRYLEFT
self.write8(LCD_ENTRYMODESET | self.displaymode) | [
"def",
"set_right_to_left",
"(",
"self",
")",
":",
"self",
".",
"displaymode",
"&=",
"~",
"LCD_ENTRYLEFT",
"self",
".",
"write8",
"(",
"LCD_ENTRYMODESET",
"|",
"self",
".",
"displaymode",
")"
] | Set text direction right to left. | [
"Set",
"text",
"direction",
"right",
"to",
"left",
"."
] | c126e6b673074c12a03f4bd36afb2fe40272341e | https://github.com/adafruit/Adafruit_Python_CharLCD/blob/c126e6b673074c12a03f4bd36afb2fe40272341e/Adafruit_CharLCD/Adafruit_CharLCD.py#L228-L231 |
5,421 | adafruit/Adafruit_Python_CharLCD | Adafruit_CharLCD/Adafruit_CharLCD.py | Adafruit_CharLCD.autoscroll | def autoscroll(self, autoscroll):
"""Autoscroll will 'right justify' text from the cursor if set True,
otherwise it will 'left justify' the text.
"""
if autoscroll:
self.displaymode |= LCD_ENTRYSHIFTINCREMENT
else:
self.displaymode &= ~LCD_ENTRYSHIFTINCREMENT
self.write8(LCD_ENTRYMODESET | self.displaymode) | python | def autoscroll(self, autoscroll):
"""Autoscroll will 'right justify' text from the cursor if set True,
otherwise it will 'left justify' the text.
"""
if autoscroll:
self.displaymode |= LCD_ENTRYSHIFTINCREMENT
else:
self.displaymode &= ~LCD_ENTRYSHIFTINCREMENT
self.write8(LCD_ENTRYMODESET | self.displaymode) | [
"def",
"autoscroll",
"(",
"self",
",",
"autoscroll",
")",
":",
"if",
"autoscroll",
":",
"self",
".",
"displaymode",
"|=",
"LCD_ENTRYSHIFTINCREMENT",
"else",
":",
"self",
".",
"displaymode",
"&=",
"~",
"LCD_ENTRYSHIFTINCREMENT",
"self",
".",
"write8",
"(",
"LCD_ENTRYMODESET",
"|",
"self",
".",
"displaymode",
")"
] | Autoscroll will 'right justify' text from the cursor if set True,
otherwise it will 'left justify' the text. | [
"Autoscroll",
"will",
"right",
"justify",
"text",
"from",
"the",
"cursor",
"if",
"set",
"True",
"otherwise",
"it",
"will",
"left",
"justify",
"the",
"text",
"."
] | c126e6b673074c12a03f4bd36afb2fe40272341e | https://github.com/adafruit/Adafruit_Python_CharLCD/blob/c126e6b673074c12a03f4bd36afb2fe40272341e/Adafruit_CharLCD/Adafruit_CharLCD.py#L233-L241 |
5,422 | adafruit/Adafruit_Python_CharLCD | Adafruit_CharLCD/Adafruit_CharLCD.py | Adafruit_CharLCD.message | def message(self, text):
"""Write text to display. Note that text can include newlines."""
line = 0
# Iterate through each character.
for char in text:
# Advance to next line if character is a new line.
if char == '\n':
line += 1
# Move to left or right side depending on text direction.
col = 0 if self.displaymode & LCD_ENTRYLEFT > 0 else self._cols-1
self.set_cursor(col, line)
# Write the character to the display.
else:
self.write8(ord(char), True) | python | def message(self, text):
"""Write text to display. Note that text can include newlines."""
line = 0
# Iterate through each character.
for char in text:
# Advance to next line if character is a new line.
if char == '\n':
line += 1
# Move to left or right side depending on text direction.
col = 0 if self.displaymode & LCD_ENTRYLEFT > 0 else self._cols-1
self.set_cursor(col, line)
# Write the character to the display.
else:
self.write8(ord(char), True) | [
"def",
"message",
"(",
"self",
",",
"text",
")",
":",
"line",
"=",
"0",
"# Iterate through each character.",
"for",
"char",
"in",
"text",
":",
"# Advance to next line if character is a new line.",
"if",
"char",
"==",
"'\\n'",
":",
"line",
"+=",
"1",
"# Move to left or right side depending on text direction.",
"col",
"=",
"0",
"if",
"self",
".",
"displaymode",
"&",
"LCD_ENTRYLEFT",
">",
"0",
"else",
"self",
".",
"_cols",
"-",
"1",
"self",
".",
"set_cursor",
"(",
"col",
",",
"line",
")",
"# Write the character to the display.",
"else",
":",
"self",
".",
"write8",
"(",
"ord",
"(",
"char",
")",
",",
"True",
")"
] | Write text to display. Note that text can include newlines. | [
"Write",
"text",
"to",
"display",
".",
"Note",
"that",
"text",
"can",
"include",
"newlines",
"."
] | c126e6b673074c12a03f4bd36afb2fe40272341e | https://github.com/adafruit/Adafruit_Python_CharLCD/blob/c126e6b673074c12a03f4bd36afb2fe40272341e/Adafruit_CharLCD/Adafruit_CharLCD.py#L243-L256 |
5,423 | i3visio/osrframework | osrframework/utils/regexp.py | RegexpObject.findExp | def findExp(self, data):
'''
Method to look for the current regular expression in the provided string.
:param data: string containing the text where the expressions will be looked for.
:return: a list of verified regular expressions.
'''
temp = []
for r in self.reg_exp:
try:
temp += re.findall(r, data)
except:
print self.name
print r
print "CABOOOOM!"
verifiedExp = []
# verification
for t in temp:
# Remember: the regexps include two extra charactes (before and later) that should be removed now.
if self.isValidExp(t):
if t not in verifiedExp:
verifiedExp.append(t)
return self.getResults(verifiedExp) | python | def findExp(self, data):
'''
Method to look for the current regular expression in the provided string.
:param data: string containing the text where the expressions will be looked for.
:return: a list of verified regular expressions.
'''
temp = []
for r in self.reg_exp:
try:
temp += re.findall(r, data)
except:
print self.name
print r
print "CABOOOOM!"
verifiedExp = []
# verification
for t in temp:
# Remember: the regexps include two extra charactes (before and later) that should be removed now.
if self.isValidExp(t):
if t not in verifiedExp:
verifiedExp.append(t)
return self.getResults(verifiedExp) | [
"def",
"findExp",
"(",
"self",
",",
"data",
")",
":",
"temp",
"=",
"[",
"]",
"for",
"r",
"in",
"self",
".",
"reg_exp",
":",
"try",
":",
"temp",
"+=",
"re",
".",
"findall",
"(",
"r",
",",
"data",
")",
"except",
":",
"print",
"self",
".",
"name",
"print",
"r",
"print",
"\"CABOOOOM!\"",
"verifiedExp",
"=",
"[",
"]",
"# verification",
"for",
"t",
"in",
"temp",
":",
"# Remember: the regexps include two extra charactes (before and later) that should be removed now.",
"if",
"self",
".",
"isValidExp",
"(",
"t",
")",
":",
"if",
"t",
"not",
"in",
"verifiedExp",
":",
"verifiedExp",
".",
"append",
"(",
"t",
")",
"return",
"self",
".",
"getResults",
"(",
"verifiedExp",
")"
] | Method to look for the current regular expression in the provided string.
:param data: string containing the text where the expressions will be looked for.
:return: a list of verified regular expressions. | [
"Method",
"to",
"look",
"for",
"the",
"current",
"regular",
"expression",
"in",
"the",
"provided",
"string",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/regexp.py#L125-L150 |
5,424 | i3visio/osrframework | osrframework/utils/general.py | exportUsufy | def exportUsufy(data, ext, fileH):
"""
Method that exports the different structures onto different formats.
Args:
-----
data: Data to export.
ext: One of the following: csv, excel, json, ods.
fileH: Fileheader for the output files.
Returns:
--------
Performs the export as requested by parameter.
"""
if ext == "csv":
usufyToCsvExport(data, fileH+"."+ext)
elif ext == "gml":
usufyToGmlExport(data, fileH+"."+ext)
elif ext == "json":
usufyToJsonExport(data, fileH+"."+ext)
elif ext == "ods":
usufyToOdsExport(data, fileH+"."+ext)
elif ext == "png":
usufyToPngExport(data, fileH+"."+ext)
elif ext == "txt":
usufyToTextExport(data, fileH+"."+ext)
elif ext == "xls":
usufyToXlsExport(data, fileH+"."+ext)
elif ext == "xlsx":
usufyToXlsxExport(data, fileH+"."+ext) | python | def exportUsufy(data, ext, fileH):
"""
Method that exports the different structures onto different formats.
Args:
-----
data: Data to export.
ext: One of the following: csv, excel, json, ods.
fileH: Fileheader for the output files.
Returns:
--------
Performs the export as requested by parameter.
"""
if ext == "csv":
usufyToCsvExport(data, fileH+"."+ext)
elif ext == "gml":
usufyToGmlExport(data, fileH+"."+ext)
elif ext == "json":
usufyToJsonExport(data, fileH+"."+ext)
elif ext == "ods":
usufyToOdsExport(data, fileH+"."+ext)
elif ext == "png":
usufyToPngExport(data, fileH+"."+ext)
elif ext == "txt":
usufyToTextExport(data, fileH+"."+ext)
elif ext == "xls":
usufyToXlsExport(data, fileH+"."+ext)
elif ext == "xlsx":
usufyToXlsxExport(data, fileH+"."+ext) | [
"def",
"exportUsufy",
"(",
"data",
",",
"ext",
",",
"fileH",
")",
":",
"if",
"ext",
"==",
"\"csv\"",
":",
"usufyToCsvExport",
"(",
"data",
",",
"fileH",
"+",
"\".\"",
"+",
"ext",
")",
"elif",
"ext",
"==",
"\"gml\"",
":",
"usufyToGmlExport",
"(",
"data",
",",
"fileH",
"+",
"\".\"",
"+",
"ext",
")",
"elif",
"ext",
"==",
"\"json\"",
":",
"usufyToJsonExport",
"(",
"data",
",",
"fileH",
"+",
"\".\"",
"+",
"ext",
")",
"elif",
"ext",
"==",
"\"ods\"",
":",
"usufyToOdsExport",
"(",
"data",
",",
"fileH",
"+",
"\".\"",
"+",
"ext",
")",
"elif",
"ext",
"==",
"\"png\"",
":",
"usufyToPngExport",
"(",
"data",
",",
"fileH",
"+",
"\".\"",
"+",
"ext",
")",
"elif",
"ext",
"==",
"\"txt\"",
":",
"usufyToTextExport",
"(",
"data",
",",
"fileH",
"+",
"\".\"",
"+",
"ext",
")",
"elif",
"ext",
"==",
"\"xls\"",
":",
"usufyToXlsExport",
"(",
"data",
",",
"fileH",
"+",
"\".\"",
"+",
"ext",
")",
"elif",
"ext",
"==",
"\"xlsx\"",
":",
"usufyToXlsxExport",
"(",
"data",
",",
"fileH",
"+",
"\".\"",
"+",
"ext",
")"
] | Method that exports the different structures onto different formats.
Args:
-----
data: Data to export.
ext: One of the following: csv, excel, json, ods.
fileH: Fileheader for the output files.
Returns:
--------
Performs the export as requested by parameter. | [
"Method",
"that",
"exports",
"the",
"different",
"structures",
"onto",
"different",
"formats",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/general.py#L40-L69 |
5,425 | i3visio/osrframework | osrframework/utils/general.py | _generateTabularData | def _generateTabularData(res, oldTabularData = {}, isTerminal=False, canUnicode=True):
"""
Method that recovers the values and columns from the current structure
This method is used by:
- usufyToCsvExport
- usufyToOdsExport
- usufyToXlsExport
- usufyToXlsxExport
Args:
-----
res: New data to export.
oldTabularData: The previous data stored.
{
"OSRFramework": [
[
"i3visio.alias",
"i3visio.platform",
"i3visio.uri"
],
[
"i3visio",
"Twitter",
"https://twitter.com/i3visio",
]
]
}
isTerminal: If isTerminal is activated, only information related to
relevant utils will be shown.
canUnicode: Variable that stores if the printed output can deal with
Unicode characters.
Returns:
--------
The values, as a dictionary containing all the information stored.
Values is like:
{
"OSRFramework": [
[
"i3visio.alias",
"i3visio.platform",
"i3visio.uri"
],
[
"i3visio",
"Twitter",
"https://twitter.com/i3visio",
],
[
"i3visio",
"Github",
"https://github.com/i3visio",
]
]
}
"""
def _grabbingNewHeader(h):
"""
Updates the headers to be general.
Changing the starting @ for a '_' and changing the "i3visio." for
"i3visio_". Changed in 0.9.4+.
Args:
-----
h: A header to be sanitised.
Returns:
--------
string: The modified header.
"""
if h[0] == "@":
h = h.replace("@", "_")
elif "i3visio." in h:
h = h.replace("i3visio.", "i3visio_")
return h
# Entities allowed for the output in terminal
allowedInTerminal = [
"i3visio_alias",
"i3visio_uri",
"i3visio_platform",
"i3visio_email",
"i3visio_ipv4",
"i3visio_phone",
"i3visio_dni",
"i3visio_domain",
"i3visio_platform_leaked",
#"_source"
]
# List of profiles found
values = {}
headers = ["_id"]
try:
if not isTerminal:
# Recovering the headers in the first line of the old Data
headers = oldTabularData["OSRFramework"][0]
else:
# Recovering only the printable headers if in Terminal mode
oldHeaders = oldTabularData["OSRFramework"][0]
headers = []
for h in oldHeaders:
h = _grabbingNewHeader(h)
if h in allowedInTerminal:
# Set to simplify the table shown in mailfy for leaked platforms
if h in ["i3visio_domain", "i3visio_alias"] and "_source" in old_headers:
pass
else:
headers.append(h)
# Changing the starting @ for a '_' and changing the "i3visio." for "i3visio_". Changed in 0.9.4+
for i, h in enumerate(headers):
h = _grabbingNewHeader(h)
# Replacing the header
headers[i] = h
except:
# No previous files... Easy...
headers = ["_id"]
# We are assuming that we received a list of profiles.
for p in res:
# Creating the dictionaries
values[p["value"]] = {}
attributes = p["attributes"]
# Processing all the attributes found
for a in attributes:
# Grabbing the type in the new format
h = _grabbingNewHeader(a["type"])
# Default behaviour for the output methods
if not isTerminal:
values[p["value"]][h] = a["value"]
# Appending the column if not already included
if str(h) not in headers:
headers.append(str(h))
# Specific table construction for the terminal output
else:
if h in allowedInTerminal:
values[p["value"]][h] = a["value"]
# Appending the column if not already included
if str(h) not in headers:
headers.append(str(h))
data = {}
# Note that each row should be a list!
workingSheet = []
# Appending the headers
workingSheet.append(headers)
# First, we will iterate through the previously stored values
try:
for dataRow in oldTabularData["OSRFramework"][1:]:
# Recovering the previous data
newRow = []
for cell in dataRow:
newRow.append(cell)
# Now, we will fill the rest of the cells with "N/A" values
for i in range(len(headers)-len(dataRow)):
# Printing a Not Applicable value
newRow.append("[N/A]")
# Appending the newRow to the data structure
workingSheet.append(newRow)
except Exception, e:
# No previous value found!
pass
# After having all the previous data stored an updated... We will go through the rest:
for prof in values.keys():
# Creating an empty structure
newRow = []
for i, col in enumerate(headers):
try:
if col == "_id":
newRow.append(len(workingSheet))
else:
if canUnicode:
newRow.append(unicode(values[prof][col]))
else:
newRow.append(str(values[prof][col]))
except UnicodeEncodeError as e:
# Printing that an error was found
newRow.append("[WARNING: Unicode Encode]")
except:
# Printing that this is not applicable value
newRow.append("[N/A]")
# Appending the newRow to the data structure
workingSheet.append(newRow)
# Storing the workingSheet onto the data structure to be stored
data.update({"OSRFramework": workingSheet})
return data | python | def _generateTabularData(res, oldTabularData = {}, isTerminal=False, canUnicode=True):
"""
Method that recovers the values and columns from the current structure
This method is used by:
- usufyToCsvExport
- usufyToOdsExport
- usufyToXlsExport
- usufyToXlsxExport
Args:
-----
res: New data to export.
oldTabularData: The previous data stored.
{
"OSRFramework": [
[
"i3visio.alias",
"i3visio.platform",
"i3visio.uri"
],
[
"i3visio",
"Twitter",
"https://twitter.com/i3visio",
]
]
}
isTerminal: If isTerminal is activated, only information related to
relevant utils will be shown.
canUnicode: Variable that stores if the printed output can deal with
Unicode characters.
Returns:
--------
The values, as a dictionary containing all the information stored.
Values is like:
{
"OSRFramework": [
[
"i3visio.alias",
"i3visio.platform",
"i3visio.uri"
],
[
"i3visio",
"Twitter",
"https://twitter.com/i3visio",
],
[
"i3visio",
"Github",
"https://github.com/i3visio",
]
]
}
"""
def _grabbingNewHeader(h):
"""
Updates the headers to be general.
Changing the starting @ for a '_' and changing the "i3visio." for
"i3visio_". Changed in 0.9.4+.
Args:
-----
h: A header to be sanitised.
Returns:
--------
string: The modified header.
"""
if h[0] == "@":
h = h.replace("@", "_")
elif "i3visio." in h:
h = h.replace("i3visio.", "i3visio_")
return h
# Entities allowed for the output in terminal
allowedInTerminal = [
"i3visio_alias",
"i3visio_uri",
"i3visio_platform",
"i3visio_email",
"i3visio_ipv4",
"i3visio_phone",
"i3visio_dni",
"i3visio_domain",
"i3visio_platform_leaked",
#"_source"
]
# List of profiles found
values = {}
headers = ["_id"]
try:
if not isTerminal:
# Recovering the headers in the first line of the old Data
headers = oldTabularData["OSRFramework"][0]
else:
# Recovering only the printable headers if in Terminal mode
oldHeaders = oldTabularData["OSRFramework"][0]
headers = []
for h in oldHeaders:
h = _grabbingNewHeader(h)
if h in allowedInTerminal:
# Set to simplify the table shown in mailfy for leaked platforms
if h in ["i3visio_domain", "i3visio_alias"] and "_source" in old_headers:
pass
else:
headers.append(h)
# Changing the starting @ for a '_' and changing the "i3visio." for "i3visio_". Changed in 0.9.4+
for i, h in enumerate(headers):
h = _grabbingNewHeader(h)
# Replacing the header
headers[i] = h
except:
# No previous files... Easy...
headers = ["_id"]
# We are assuming that we received a list of profiles.
for p in res:
# Creating the dictionaries
values[p["value"]] = {}
attributes = p["attributes"]
# Processing all the attributes found
for a in attributes:
# Grabbing the type in the new format
h = _grabbingNewHeader(a["type"])
# Default behaviour for the output methods
if not isTerminal:
values[p["value"]][h] = a["value"]
# Appending the column if not already included
if str(h) not in headers:
headers.append(str(h))
# Specific table construction for the terminal output
else:
if h in allowedInTerminal:
values[p["value"]][h] = a["value"]
# Appending the column if not already included
if str(h) not in headers:
headers.append(str(h))
data = {}
# Note that each row should be a list!
workingSheet = []
# Appending the headers
workingSheet.append(headers)
# First, we will iterate through the previously stored values
try:
for dataRow in oldTabularData["OSRFramework"][1:]:
# Recovering the previous data
newRow = []
for cell in dataRow:
newRow.append(cell)
# Now, we will fill the rest of the cells with "N/A" values
for i in range(len(headers)-len(dataRow)):
# Printing a Not Applicable value
newRow.append("[N/A]")
# Appending the newRow to the data structure
workingSheet.append(newRow)
except Exception, e:
# No previous value found!
pass
# After having all the previous data stored an updated... We will go through the rest:
for prof in values.keys():
# Creating an empty structure
newRow = []
for i, col in enumerate(headers):
try:
if col == "_id":
newRow.append(len(workingSheet))
else:
if canUnicode:
newRow.append(unicode(values[prof][col]))
else:
newRow.append(str(values[prof][col]))
except UnicodeEncodeError as e:
# Printing that an error was found
newRow.append("[WARNING: Unicode Encode]")
except:
# Printing that this is not applicable value
newRow.append("[N/A]")
# Appending the newRow to the data structure
workingSheet.append(newRow)
# Storing the workingSheet onto the data structure to be stored
data.update({"OSRFramework": workingSheet})
return data | [
"def",
"_generateTabularData",
"(",
"res",
",",
"oldTabularData",
"=",
"{",
"}",
",",
"isTerminal",
"=",
"False",
",",
"canUnicode",
"=",
"True",
")",
":",
"def",
"_grabbingNewHeader",
"(",
"h",
")",
":",
"\"\"\"\n Updates the headers to be general.\n\n Changing the starting @ for a '_' and changing the \"i3visio.\" for\n \"i3visio_\". Changed in 0.9.4+.\n\n Args:\n -----\n h: A header to be sanitised.\n\n Returns:\n --------\n string: The modified header.\n \"\"\"",
"if",
"h",
"[",
"0",
"]",
"==",
"\"@\"",
":",
"h",
"=",
"h",
".",
"replace",
"(",
"\"@\"",
",",
"\"_\"",
")",
"elif",
"\"i3visio.\"",
"in",
"h",
":",
"h",
"=",
"h",
".",
"replace",
"(",
"\"i3visio.\"",
",",
"\"i3visio_\"",
")",
"return",
"h",
"# Entities allowed for the output in terminal",
"allowedInTerminal",
"=",
"[",
"\"i3visio_alias\"",
",",
"\"i3visio_uri\"",
",",
"\"i3visio_platform\"",
",",
"\"i3visio_email\"",
",",
"\"i3visio_ipv4\"",
",",
"\"i3visio_phone\"",
",",
"\"i3visio_dni\"",
",",
"\"i3visio_domain\"",
",",
"\"i3visio_platform_leaked\"",
",",
"#\"_source\"",
"]",
"# List of profiles found",
"values",
"=",
"{",
"}",
"headers",
"=",
"[",
"\"_id\"",
"]",
"try",
":",
"if",
"not",
"isTerminal",
":",
"# Recovering the headers in the first line of the old Data",
"headers",
"=",
"oldTabularData",
"[",
"\"OSRFramework\"",
"]",
"[",
"0",
"]",
"else",
":",
"# Recovering only the printable headers if in Terminal mode",
"oldHeaders",
"=",
"oldTabularData",
"[",
"\"OSRFramework\"",
"]",
"[",
"0",
"]",
"headers",
"=",
"[",
"]",
"for",
"h",
"in",
"oldHeaders",
":",
"h",
"=",
"_grabbingNewHeader",
"(",
"h",
")",
"if",
"h",
"in",
"allowedInTerminal",
":",
"# Set to simplify the table shown in mailfy for leaked platforms",
"if",
"h",
"in",
"[",
"\"i3visio_domain\"",
",",
"\"i3visio_alias\"",
"]",
"and",
"\"_source\"",
"in",
"old_headers",
":",
"pass",
"else",
":",
"headers",
".",
"append",
"(",
"h",
")",
"# Changing the starting @ for a '_' and changing the \"i3visio.\" for \"i3visio_\". Changed in 0.9.4+",
"for",
"i",
",",
"h",
"in",
"enumerate",
"(",
"headers",
")",
":",
"h",
"=",
"_grabbingNewHeader",
"(",
"h",
")",
"# Replacing the header",
"headers",
"[",
"i",
"]",
"=",
"h",
"except",
":",
"# No previous files... Easy...",
"headers",
"=",
"[",
"\"_id\"",
"]",
"# We are assuming that we received a list of profiles.",
"for",
"p",
"in",
"res",
":",
"# Creating the dictionaries",
"values",
"[",
"p",
"[",
"\"value\"",
"]",
"]",
"=",
"{",
"}",
"attributes",
"=",
"p",
"[",
"\"attributes\"",
"]",
"# Processing all the attributes found",
"for",
"a",
"in",
"attributes",
":",
"# Grabbing the type in the new format",
"h",
"=",
"_grabbingNewHeader",
"(",
"a",
"[",
"\"type\"",
"]",
")",
"# Default behaviour for the output methods",
"if",
"not",
"isTerminal",
":",
"values",
"[",
"p",
"[",
"\"value\"",
"]",
"]",
"[",
"h",
"]",
"=",
"a",
"[",
"\"value\"",
"]",
"# Appending the column if not already included",
"if",
"str",
"(",
"h",
")",
"not",
"in",
"headers",
":",
"headers",
".",
"append",
"(",
"str",
"(",
"h",
")",
")",
"# Specific table construction for the terminal output",
"else",
":",
"if",
"h",
"in",
"allowedInTerminal",
":",
"values",
"[",
"p",
"[",
"\"value\"",
"]",
"]",
"[",
"h",
"]",
"=",
"a",
"[",
"\"value\"",
"]",
"# Appending the column if not already included",
"if",
"str",
"(",
"h",
")",
"not",
"in",
"headers",
":",
"headers",
".",
"append",
"(",
"str",
"(",
"h",
")",
")",
"data",
"=",
"{",
"}",
"# Note that each row should be a list!",
"workingSheet",
"=",
"[",
"]",
"# Appending the headers",
"workingSheet",
".",
"append",
"(",
"headers",
")",
"# First, we will iterate through the previously stored values",
"try",
":",
"for",
"dataRow",
"in",
"oldTabularData",
"[",
"\"OSRFramework\"",
"]",
"[",
"1",
":",
"]",
":",
"# Recovering the previous data",
"newRow",
"=",
"[",
"]",
"for",
"cell",
"in",
"dataRow",
":",
"newRow",
".",
"append",
"(",
"cell",
")",
"# Now, we will fill the rest of the cells with \"N/A\" values",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"headers",
")",
"-",
"len",
"(",
"dataRow",
")",
")",
":",
"# Printing a Not Applicable value",
"newRow",
".",
"append",
"(",
"\"[N/A]\"",
")",
"# Appending the newRow to the data structure",
"workingSheet",
".",
"append",
"(",
"newRow",
")",
"except",
"Exception",
",",
"e",
":",
"# No previous value found!",
"pass",
"# After having all the previous data stored an updated... We will go through the rest:",
"for",
"prof",
"in",
"values",
".",
"keys",
"(",
")",
":",
"# Creating an empty structure",
"newRow",
"=",
"[",
"]",
"for",
"i",
",",
"col",
"in",
"enumerate",
"(",
"headers",
")",
":",
"try",
":",
"if",
"col",
"==",
"\"_id\"",
":",
"newRow",
".",
"append",
"(",
"len",
"(",
"workingSheet",
")",
")",
"else",
":",
"if",
"canUnicode",
":",
"newRow",
".",
"append",
"(",
"unicode",
"(",
"values",
"[",
"prof",
"]",
"[",
"col",
"]",
")",
")",
"else",
":",
"newRow",
".",
"append",
"(",
"str",
"(",
"values",
"[",
"prof",
"]",
"[",
"col",
"]",
")",
")",
"except",
"UnicodeEncodeError",
"as",
"e",
":",
"# Printing that an error was found",
"newRow",
".",
"append",
"(",
"\"[WARNING: Unicode Encode]\"",
")",
"except",
":",
"# Printing that this is not applicable value",
"newRow",
".",
"append",
"(",
"\"[N/A]\"",
")",
"# Appending the newRow to the data structure",
"workingSheet",
".",
"append",
"(",
"newRow",
")",
"# Storing the workingSheet onto the data structure to be stored",
"data",
".",
"update",
"(",
"{",
"\"OSRFramework\"",
":",
"workingSheet",
"}",
")",
"return",
"data"
] | Method that recovers the values and columns from the current structure
This method is used by:
- usufyToCsvExport
- usufyToOdsExport
- usufyToXlsExport
- usufyToXlsxExport
Args:
-----
res: New data to export.
oldTabularData: The previous data stored.
{
"OSRFramework": [
[
"i3visio.alias",
"i3visio.platform",
"i3visio.uri"
],
[
"i3visio",
"Twitter",
"https://twitter.com/i3visio",
]
]
}
isTerminal: If isTerminal is activated, only information related to
relevant utils will be shown.
canUnicode: Variable that stores if the printed output can deal with
Unicode characters.
Returns:
--------
The values, as a dictionary containing all the information stored.
Values is like:
{
"OSRFramework": [
[
"i3visio.alias",
"i3visio.platform",
"i3visio.uri"
],
[
"i3visio",
"Twitter",
"https://twitter.com/i3visio",
],
[
"i3visio",
"Github",
"https://github.com/i3visio",
]
]
} | [
"Method",
"that",
"recovers",
"the",
"values",
"and",
"columns",
"from",
"the",
"current",
"structure"
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/general.py#L72-L266 |
5,426 | i3visio/osrframework | osrframework/utils/general.py | usufyToJsonExport | def usufyToJsonExport(d, fPath):
"""
Workaround to export to a json file.
Args:
-----
d: Data to export.
fPath: File path for the output file.
"""
oldData = []
try:
with open (fPath) as iF:
oldText = iF.read()
if oldText != "":
oldData = json.loads(oldText)
except:
# No file found, so we will create it...
pass
jsonText = json.dumps(oldData+d, indent=2, sort_keys=True)
with open (fPath, "w") as oF:
oF.write(jsonText) | python | def usufyToJsonExport(d, fPath):
"""
Workaround to export to a json file.
Args:
-----
d: Data to export.
fPath: File path for the output file.
"""
oldData = []
try:
with open (fPath) as iF:
oldText = iF.read()
if oldText != "":
oldData = json.loads(oldText)
except:
# No file found, so we will create it...
pass
jsonText = json.dumps(oldData+d, indent=2, sort_keys=True)
with open (fPath, "w") as oF:
oF.write(jsonText) | [
"def",
"usufyToJsonExport",
"(",
"d",
",",
"fPath",
")",
":",
"oldData",
"=",
"[",
"]",
"try",
":",
"with",
"open",
"(",
"fPath",
")",
"as",
"iF",
":",
"oldText",
"=",
"iF",
".",
"read",
"(",
")",
"if",
"oldText",
"!=",
"\"\"",
":",
"oldData",
"=",
"json",
".",
"loads",
"(",
"oldText",
")",
"except",
":",
"# No file found, so we will create it...",
"pass",
"jsonText",
"=",
"json",
".",
"dumps",
"(",
"oldData",
"+",
"d",
",",
"indent",
"=",
"2",
",",
"sort_keys",
"=",
"True",
")",
"with",
"open",
"(",
"fPath",
",",
"\"w\"",
")",
"as",
"oF",
":",
"oF",
".",
"write",
"(",
"jsonText",
")"
] | Workaround to export to a json file.
Args:
-----
d: Data to export.
fPath: File path for the output file. | [
"Workaround",
"to",
"export",
"to",
"a",
"json",
"file",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/general.py#L269-L291 |
5,427 | i3visio/osrframework | osrframework/utils/general.py | usufyToTextExport | def usufyToTextExport(d, fPath=None):
"""
Workaround to export to a .txt file or to show the information.
Args:
-----
d: Data to export.
fPath: File path for the output file. If None was provided, it will
assume that it has to print it.
Returns:
--------
unicode: It sometimes returns a unicode representation of the Sheet
received.
"""
# Manual check...
if d == []:
return "+------------------+\n| No data found... |\n+------------------+"
import pyexcel as pe
import pyexcel.ext.text as text
if fPath == None:
isTerminal = True
else:
isTerminal = False
try:
oldData = get_data(fPath)
except:
# No information has been recovered
oldData = {"OSRFramework":[]}
# Generating the new tabular data
tabularData = _generateTabularData(d, {"OSRFramework":[[]]}, True, canUnicode=False)
# The tabular data contains a dict representing the whole book and we need only the sheet!!
sheet = pe.Sheet(tabularData["OSRFramework"])
sheet.name = "Profiles recovered (" + getCurrentStrDatetime() +")."
# Defining the headers
sheet.name_columns_by_row(0)
text.TABLEFMT = "grid"
try:
with open(fPath, "w") as oF:
oF.write(str(sheet))
except Exception as e:
# If a fPath was not provided... We will only print the info:
return unicode(sheet) | python | def usufyToTextExport(d, fPath=None):
"""
Workaround to export to a .txt file or to show the information.
Args:
-----
d: Data to export.
fPath: File path for the output file. If None was provided, it will
assume that it has to print it.
Returns:
--------
unicode: It sometimes returns a unicode representation of the Sheet
received.
"""
# Manual check...
if d == []:
return "+------------------+\n| No data found... |\n+------------------+"
import pyexcel as pe
import pyexcel.ext.text as text
if fPath == None:
isTerminal = True
else:
isTerminal = False
try:
oldData = get_data(fPath)
except:
# No information has been recovered
oldData = {"OSRFramework":[]}
# Generating the new tabular data
tabularData = _generateTabularData(d, {"OSRFramework":[[]]}, True, canUnicode=False)
# The tabular data contains a dict representing the whole book and we need only the sheet!!
sheet = pe.Sheet(tabularData["OSRFramework"])
sheet.name = "Profiles recovered (" + getCurrentStrDatetime() +")."
# Defining the headers
sheet.name_columns_by_row(0)
text.TABLEFMT = "grid"
try:
with open(fPath, "w") as oF:
oF.write(str(sheet))
except Exception as e:
# If a fPath was not provided... We will only print the info:
return unicode(sheet) | [
"def",
"usufyToTextExport",
"(",
"d",
",",
"fPath",
"=",
"None",
")",
":",
"# Manual check...",
"if",
"d",
"==",
"[",
"]",
":",
"return",
"\"+------------------+\\n| No data found... |\\n+------------------+\"",
"import",
"pyexcel",
"as",
"pe",
"import",
"pyexcel",
".",
"ext",
".",
"text",
"as",
"text",
"if",
"fPath",
"==",
"None",
":",
"isTerminal",
"=",
"True",
"else",
":",
"isTerminal",
"=",
"False",
"try",
":",
"oldData",
"=",
"get_data",
"(",
"fPath",
")",
"except",
":",
"# No information has been recovered",
"oldData",
"=",
"{",
"\"OSRFramework\"",
":",
"[",
"]",
"}",
"# Generating the new tabular data",
"tabularData",
"=",
"_generateTabularData",
"(",
"d",
",",
"{",
"\"OSRFramework\"",
":",
"[",
"[",
"]",
"]",
"}",
",",
"True",
",",
"canUnicode",
"=",
"False",
")",
"# The tabular data contains a dict representing the whole book and we need only the sheet!!",
"sheet",
"=",
"pe",
".",
"Sheet",
"(",
"tabularData",
"[",
"\"OSRFramework\"",
"]",
")",
"sheet",
".",
"name",
"=",
"\"Profiles recovered (\"",
"+",
"getCurrentStrDatetime",
"(",
")",
"+",
"\").\"",
"# Defining the headers",
"sheet",
".",
"name_columns_by_row",
"(",
"0",
")",
"text",
".",
"TABLEFMT",
"=",
"\"grid\"",
"try",
":",
"with",
"open",
"(",
"fPath",
",",
"\"w\"",
")",
"as",
"oF",
":",
"oF",
".",
"write",
"(",
"str",
"(",
"sheet",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"# If a fPath was not provided... We will only print the info:",
"return",
"unicode",
"(",
"sheet",
")"
] | Workaround to export to a .txt file or to show the information.
Args:
-----
d: Data to export.
fPath: File path for the output file. If None was provided, it will
assume that it has to print it.
Returns:
--------
unicode: It sometimes returns a unicode representation of the Sheet
received. | [
"Workaround",
"to",
"export",
"to",
"a",
".",
"txt",
"file",
"or",
"to",
"show",
"the",
"information",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/general.py#L294-L342 |
5,428 | i3visio/osrframework | osrframework/utils/general.py | usufyToCsvExport | def usufyToCsvExport(d, fPath):
"""
Workaround to export to a CSV file.
Args:
-----
d: Data to export.
fPath: File path for the output file.
"""
from pyexcel_io import get_data
try:
oldData = {"OSRFramework": get_data(fPath) }
except:
# No information has been recovered
oldData = {"OSRFramework":[]}
# Generating the new tabular data.
tabularData = _generateTabularData(d, oldData)
from pyexcel_io import save_data
# Storing the file
# NOTE: when working with CSV files it is no longer a dict because it is a one-sheet-format
save_data(fPath, tabularData["OSRFramework"]) | python | def usufyToCsvExport(d, fPath):
"""
Workaround to export to a CSV file.
Args:
-----
d: Data to export.
fPath: File path for the output file.
"""
from pyexcel_io import get_data
try:
oldData = {"OSRFramework": get_data(fPath) }
except:
# No information has been recovered
oldData = {"OSRFramework":[]}
# Generating the new tabular data.
tabularData = _generateTabularData(d, oldData)
from pyexcel_io import save_data
# Storing the file
# NOTE: when working with CSV files it is no longer a dict because it is a one-sheet-format
save_data(fPath, tabularData["OSRFramework"]) | [
"def",
"usufyToCsvExport",
"(",
"d",
",",
"fPath",
")",
":",
"from",
"pyexcel_io",
"import",
"get_data",
"try",
":",
"oldData",
"=",
"{",
"\"OSRFramework\"",
":",
"get_data",
"(",
"fPath",
")",
"}",
"except",
":",
"# No information has been recovered",
"oldData",
"=",
"{",
"\"OSRFramework\"",
":",
"[",
"]",
"}",
"# Generating the new tabular data.",
"tabularData",
"=",
"_generateTabularData",
"(",
"d",
",",
"oldData",
")",
"from",
"pyexcel_io",
"import",
"save_data",
"# Storing the file",
"# NOTE: when working with CSV files it is no longer a dict because it is a one-sheet-format",
"save_data",
"(",
"fPath",
",",
"tabularData",
"[",
"\"OSRFramework\"",
"]",
")"
] | Workaround to export to a CSV file.
Args:
-----
d: Data to export.
fPath: File path for the output file. | [
"Workaround",
"to",
"export",
"to",
"a",
"CSV",
"file",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/general.py#L345-L368 |
5,429 | i3visio/osrframework | osrframework/utils/general.py | usufyToOdsExport | def usufyToOdsExport(d, fPath):
"""
Workaround to export to a .ods file.
Args:
-----
d: Data to export.
fPath: File path for the output file.
"""
from pyexcel_ods import get_data
try:
#oldData = get_data(fPath)
# A change in the API now returns only an array of arrays if there is only one sheet.
oldData = {"OSRFramework": get_data(fPath) }
except:
# No information has been recovered
oldData = {"OSRFramework":[]}
# Generating the new tabular data
tabularData = _generateTabularData(d, oldData)
from pyexcel_ods import save_data
# Storing the file
save_data(fPath, tabularData) | python | def usufyToOdsExport(d, fPath):
"""
Workaround to export to a .ods file.
Args:
-----
d: Data to export.
fPath: File path for the output file.
"""
from pyexcel_ods import get_data
try:
#oldData = get_data(fPath)
# A change in the API now returns only an array of arrays if there is only one sheet.
oldData = {"OSRFramework": get_data(fPath) }
except:
# No information has been recovered
oldData = {"OSRFramework":[]}
# Generating the new tabular data
tabularData = _generateTabularData(d, oldData)
from pyexcel_ods import save_data
# Storing the file
save_data(fPath, tabularData) | [
"def",
"usufyToOdsExport",
"(",
"d",
",",
"fPath",
")",
":",
"from",
"pyexcel_ods",
"import",
"get_data",
"try",
":",
"#oldData = get_data(fPath)",
"# A change in the API now returns only an array of arrays if there is only one sheet.",
"oldData",
"=",
"{",
"\"OSRFramework\"",
":",
"get_data",
"(",
"fPath",
")",
"}",
"except",
":",
"# No information has been recovered",
"oldData",
"=",
"{",
"\"OSRFramework\"",
":",
"[",
"]",
"}",
"# Generating the new tabular data",
"tabularData",
"=",
"_generateTabularData",
"(",
"d",
",",
"oldData",
")",
"from",
"pyexcel_ods",
"import",
"save_data",
"# Storing the file",
"save_data",
"(",
"fPath",
",",
"tabularData",
")"
] | Workaround to export to a .ods file.
Args:
-----
d: Data to export.
fPath: File path for the output file. | [
"Workaround",
"to",
"export",
"to",
"a",
".",
"ods",
"file",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/general.py#L371-L394 |
5,430 | i3visio/osrframework | osrframework/utils/general.py | usufyToXlsExport | def usufyToXlsExport(d, fPath):
"""
Workaround to export to a .xls file.
Args:
-----
d: Data to export.
fPath: File path for the output file.
"""
from pyexcel_xls import get_data
try:
#oldData = get_data(fPath)
# A change in the API now returns only an array of arrays if there is only one sheet.
oldData = {"OSRFramework": get_data(fPath) }
except:
# No information has been recovered
oldData = {"OSRFramework":[]}
# Generating the new tabular data
tabularData = _generateTabularData(d, oldData)
from pyexcel_xls import save_data
# Storing the file
save_data(fPath, tabularData) | python | def usufyToXlsExport(d, fPath):
"""
Workaround to export to a .xls file.
Args:
-----
d: Data to export.
fPath: File path for the output file.
"""
from pyexcel_xls import get_data
try:
#oldData = get_data(fPath)
# A change in the API now returns only an array of arrays if there is only one sheet.
oldData = {"OSRFramework": get_data(fPath) }
except:
# No information has been recovered
oldData = {"OSRFramework":[]}
# Generating the new tabular data
tabularData = _generateTabularData(d, oldData)
from pyexcel_xls import save_data
# Storing the file
save_data(fPath, tabularData) | [
"def",
"usufyToXlsExport",
"(",
"d",
",",
"fPath",
")",
":",
"from",
"pyexcel_xls",
"import",
"get_data",
"try",
":",
"#oldData = get_data(fPath)",
"# A change in the API now returns only an array of arrays if there is only one sheet.",
"oldData",
"=",
"{",
"\"OSRFramework\"",
":",
"get_data",
"(",
"fPath",
")",
"}",
"except",
":",
"# No information has been recovered",
"oldData",
"=",
"{",
"\"OSRFramework\"",
":",
"[",
"]",
"}",
"# Generating the new tabular data",
"tabularData",
"=",
"_generateTabularData",
"(",
"d",
",",
"oldData",
")",
"from",
"pyexcel_xls",
"import",
"save_data",
"# Storing the file",
"save_data",
"(",
"fPath",
",",
"tabularData",
")"
] | Workaround to export to a .xls file.
Args:
-----
d: Data to export.
fPath: File path for the output file. | [
"Workaround",
"to",
"export",
"to",
"a",
".",
"xls",
"file",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/general.py#L397-L419 |
5,431 | i3visio/osrframework | osrframework/utils/general.py | usufyToXlsxExport | def usufyToXlsxExport(d, fPath):
"""
Workaround to export to a .xlsx file.
Args:
-----
d: Data to export.
fPath: File path for the output file.
"""
from pyexcel_xlsx import get_data
try:
#oldData = get_data(fPath)
# A change in the API now returns only an array of arrays if there is only one sheet.
oldData = {"OSRFramework": get_data(fPath) }
except:
# No information has been recovered
oldData = {"OSRFramework":[]}
# Generating the new tabular data
tabularData = _generateTabularData(d, oldData)
from pyexcel_xlsx import save_data
# Storing the file
save_data(fPath, tabularData) | python | def usufyToXlsxExport(d, fPath):
"""
Workaround to export to a .xlsx file.
Args:
-----
d: Data to export.
fPath: File path for the output file.
"""
from pyexcel_xlsx import get_data
try:
#oldData = get_data(fPath)
# A change in the API now returns only an array of arrays if there is only one sheet.
oldData = {"OSRFramework": get_data(fPath) }
except:
# No information has been recovered
oldData = {"OSRFramework":[]}
# Generating the new tabular data
tabularData = _generateTabularData(d, oldData)
from pyexcel_xlsx import save_data
# Storing the file
save_data(fPath, tabularData) | [
"def",
"usufyToXlsxExport",
"(",
"d",
",",
"fPath",
")",
":",
"from",
"pyexcel_xlsx",
"import",
"get_data",
"try",
":",
"#oldData = get_data(fPath)",
"# A change in the API now returns only an array of arrays if there is only one sheet.",
"oldData",
"=",
"{",
"\"OSRFramework\"",
":",
"get_data",
"(",
"fPath",
")",
"}",
"except",
":",
"# No information has been recovered",
"oldData",
"=",
"{",
"\"OSRFramework\"",
":",
"[",
"]",
"}",
"# Generating the new tabular data",
"tabularData",
"=",
"_generateTabularData",
"(",
"d",
",",
"oldData",
")",
"from",
"pyexcel_xlsx",
"import",
"save_data",
"# Storing the file",
"save_data",
"(",
"fPath",
",",
"tabularData",
")"
] | Workaround to export to a .xlsx file.
Args:
-----
d: Data to export.
fPath: File path for the output file. | [
"Workaround",
"to",
"export",
"to",
"a",
".",
"xlsx",
"file",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/general.py#L422-L445 |
5,432 | i3visio/osrframework | osrframework/utils/general.py | _generateGraphData | def _generateGraphData(data, oldData=nx.Graph()):
"""
Processing the data from i3visio structures to generate nodes and edges
This function uses the networkx graph library. It will create a new node
for each and i3visio.<something> entities while it will add properties for
all the attribute starting with "@".
Args:
-----
d: The i3visio structures containing a list of
oldData: A graph structure representing the previous information.
Returns:
--------
A graph structure representing the updated information.
"""
def _addNewNode(ent, g):
"""
Wraps the creation of a node
Args:
-----
ent: The hi3visio-like entities to be used as the identifier.
ent = {
"value":"i3visio",
"type":"i3visio.alias,
}
g: The graph in which the entity will be stored.
Returns:
-------
The label used to represent this element.
"""
try:
label = unicode(ent["value"])
except UnicodeEncodeError as e:
# Printing that an error was found
label = str(ent["value"])
g.add_node(label)
g.node[label]["type"] = ent["type"]
return label
def _processAttributes(elems, g):
"""
Function that processes a list of elements to obtain new attributes.
Args:
-----
elems: List of i3visio-like entities.
g: The graph in which the entity will be stored.
Returns:
--------
newAtts: Dict of attributes (to be stored as attributes for the
given entity).
newEntities: List of new Entities (to be stored as attributes for
the given entity).
"""
newAtts = {}
newEntities= []
for att in elems:
# If it is an attribute
if att["type"][0] == "@":
# Removing the @ and the _ of the attributes
attName = str(att["type"][1:]).replace('_', '')
try:
newAtts[attName] = int(att["value"])
except:
newAtts[attName] = att["value"]
elif att["type"][:8] == "i3visio.":
# Creating a dict to represent the pair: type, value entity.
ent = {
"value":att["value"],
"type":att["type"].replace("i3visio.", "i3visio_"),
}
# Appending the new Entity to the entity list
newEntities.append(ent)
# Appending the new node
hashLabel = _addNewNode(ent, g)
# Make this recursive to link the attributes in each and every att
newAttsInAttributes, newEntitiesInAttributes = _processAttributes(att["attributes"], g)
# Updating the attributes to the current entity
g.node[hashLabel].update(newAttsInAttributes)
# Creating the edges (the new entities have also been created in the _processAttributes
for new in newEntitiesInAttributes:
graphData.add_edge(hashLabel, json.dumps(new))
try:
# Here, we would add the properties of the edge
#graphData.edge[hashLabel][json.dumps(new)]["@times_seen"] +=1
pass
except:
# If the attribute does not exist, we would initialize it
#graphData.edge[hashLabel][json.dumps(new)]["@times_seen"] = 1
pass
else:
# An unexpected type
pass
return newAtts, newEntities
graphData = oldData
# Iterating through the results
for elem in data:
# Creating a dict to represent the pair: type, value entity.
ent = {
"value":elem["value"],
"type":elem["type"],
}
# Appending the new node
new_node = _addNewNode(ent, graphData)
# Processing the attributes to grab the attributes (starting with "@..." and entities)
newAtts, newEntities = _processAttributes(elem["attributes"], graphData)
# Updating the attributes to the current entity
graphData.node[new_node].update(newAtts)
# Creating the edges (the new entities have also been created in the _processAttributes
for other_node in newEntities:
# Serializing the second entity
serEnt = json.dumps(new_node)
try:
other_node = unicode(other_node["value"])
except UnicodeEncodeError as e:
# Printing that an error was found
other_node = str(other_node["value"])
# Adding the edge
graphData.add_edge(new_node, other_node)
try:
# Here, we would add the properties of the edge
#graphData.edge[hashLabel][hashLabelSeconds]["times_seen"] +=1
pass
except:
# If the attribute does not exist, we would initialize it
#graphData.edge[hashLabel][hashLabelSeconds]["times_seen"] = 1
pass
return graphData | python | def _generateGraphData(data, oldData=nx.Graph()):
"""
Processing the data from i3visio structures to generate nodes and edges
This function uses the networkx graph library. It will create a new node
for each and i3visio.<something> entities while it will add properties for
all the attribute starting with "@".
Args:
-----
d: The i3visio structures containing a list of
oldData: A graph structure representing the previous information.
Returns:
--------
A graph structure representing the updated information.
"""
def _addNewNode(ent, g):
"""
Wraps the creation of a node
Args:
-----
ent: The hi3visio-like entities to be used as the identifier.
ent = {
"value":"i3visio",
"type":"i3visio.alias,
}
g: The graph in which the entity will be stored.
Returns:
-------
The label used to represent this element.
"""
try:
label = unicode(ent["value"])
except UnicodeEncodeError as e:
# Printing that an error was found
label = str(ent["value"])
g.add_node(label)
g.node[label]["type"] = ent["type"]
return label
def _processAttributes(elems, g):
"""
Function that processes a list of elements to obtain new attributes.
Args:
-----
elems: List of i3visio-like entities.
g: The graph in which the entity will be stored.
Returns:
--------
newAtts: Dict of attributes (to be stored as attributes for the
given entity).
newEntities: List of new Entities (to be stored as attributes for
the given entity).
"""
newAtts = {}
newEntities= []
for att in elems:
# If it is an attribute
if att["type"][0] == "@":
# Removing the @ and the _ of the attributes
attName = str(att["type"][1:]).replace('_', '')
try:
newAtts[attName] = int(att["value"])
except:
newAtts[attName] = att["value"]
elif att["type"][:8] == "i3visio.":
# Creating a dict to represent the pair: type, value entity.
ent = {
"value":att["value"],
"type":att["type"].replace("i3visio.", "i3visio_"),
}
# Appending the new Entity to the entity list
newEntities.append(ent)
# Appending the new node
hashLabel = _addNewNode(ent, g)
# Make this recursive to link the attributes in each and every att
newAttsInAttributes, newEntitiesInAttributes = _processAttributes(att["attributes"], g)
# Updating the attributes to the current entity
g.node[hashLabel].update(newAttsInAttributes)
# Creating the edges (the new entities have also been created in the _processAttributes
for new in newEntitiesInAttributes:
graphData.add_edge(hashLabel, json.dumps(new))
try:
# Here, we would add the properties of the edge
#graphData.edge[hashLabel][json.dumps(new)]["@times_seen"] +=1
pass
except:
# If the attribute does not exist, we would initialize it
#graphData.edge[hashLabel][json.dumps(new)]["@times_seen"] = 1
pass
else:
# An unexpected type
pass
return newAtts, newEntities
graphData = oldData
# Iterating through the results
for elem in data:
# Creating a dict to represent the pair: type, value entity.
ent = {
"value":elem["value"],
"type":elem["type"],
}
# Appending the new node
new_node = _addNewNode(ent, graphData)
# Processing the attributes to grab the attributes (starting with "@..." and entities)
newAtts, newEntities = _processAttributes(elem["attributes"], graphData)
# Updating the attributes to the current entity
graphData.node[new_node].update(newAtts)
# Creating the edges (the new entities have also been created in the _processAttributes
for other_node in newEntities:
# Serializing the second entity
serEnt = json.dumps(new_node)
try:
other_node = unicode(other_node["value"])
except UnicodeEncodeError as e:
# Printing that an error was found
other_node = str(other_node["value"])
# Adding the edge
graphData.add_edge(new_node, other_node)
try:
# Here, we would add the properties of the edge
#graphData.edge[hashLabel][hashLabelSeconds]["times_seen"] +=1
pass
except:
# If the attribute does not exist, we would initialize it
#graphData.edge[hashLabel][hashLabelSeconds]["times_seen"] = 1
pass
return graphData | [
"def",
"_generateGraphData",
"(",
"data",
",",
"oldData",
"=",
"nx",
".",
"Graph",
"(",
")",
")",
":",
"def",
"_addNewNode",
"(",
"ent",
",",
"g",
")",
":",
"\"\"\"\n Wraps the creation of a node\n\n Args:\n -----\n ent: The hi3visio-like entities to be used as the identifier.\n ent = {\n \"value\":\"i3visio\",\n \"type\":\"i3visio.alias,\n }\n g: The graph in which the entity will be stored.\n\n Returns:\n -------\n The label used to represent this element.\n \"\"\"",
"try",
":",
"label",
"=",
"unicode",
"(",
"ent",
"[",
"\"value\"",
"]",
")",
"except",
"UnicodeEncodeError",
"as",
"e",
":",
"# Printing that an error was found",
"label",
"=",
"str",
"(",
"ent",
"[",
"\"value\"",
"]",
")",
"g",
".",
"add_node",
"(",
"label",
")",
"g",
".",
"node",
"[",
"label",
"]",
"[",
"\"type\"",
"]",
"=",
"ent",
"[",
"\"type\"",
"]",
"return",
"label",
"def",
"_processAttributes",
"(",
"elems",
",",
"g",
")",
":",
"\"\"\"\n Function that processes a list of elements to obtain new attributes.\n\n Args:\n -----\n elems: List of i3visio-like entities.\n g: The graph in which the entity will be stored.\n\n Returns:\n --------\n newAtts: Dict of attributes (to be stored as attributes for the\n given entity).\n newEntities: List of new Entities (to be stored as attributes for\n the given entity).\n \"\"\"",
"newAtts",
"=",
"{",
"}",
"newEntities",
"=",
"[",
"]",
"for",
"att",
"in",
"elems",
":",
"# If it is an attribute",
"if",
"att",
"[",
"\"type\"",
"]",
"[",
"0",
"]",
"==",
"\"@\"",
":",
"# Removing the @ and the _ of the attributes",
"attName",
"=",
"str",
"(",
"att",
"[",
"\"type\"",
"]",
"[",
"1",
":",
"]",
")",
".",
"replace",
"(",
"'_'",
",",
"''",
")",
"try",
":",
"newAtts",
"[",
"attName",
"]",
"=",
"int",
"(",
"att",
"[",
"\"value\"",
"]",
")",
"except",
":",
"newAtts",
"[",
"attName",
"]",
"=",
"att",
"[",
"\"value\"",
"]",
"elif",
"att",
"[",
"\"type\"",
"]",
"[",
":",
"8",
"]",
"==",
"\"i3visio.\"",
":",
"# Creating a dict to represent the pair: type, value entity.",
"ent",
"=",
"{",
"\"value\"",
":",
"att",
"[",
"\"value\"",
"]",
",",
"\"type\"",
":",
"att",
"[",
"\"type\"",
"]",
".",
"replace",
"(",
"\"i3visio.\"",
",",
"\"i3visio_\"",
")",
",",
"}",
"# Appending the new Entity to the entity list",
"newEntities",
".",
"append",
"(",
"ent",
")",
"# Appending the new node",
"hashLabel",
"=",
"_addNewNode",
"(",
"ent",
",",
"g",
")",
"# Make this recursive to link the attributes in each and every att",
"newAttsInAttributes",
",",
"newEntitiesInAttributes",
"=",
"_processAttributes",
"(",
"att",
"[",
"\"attributes\"",
"]",
",",
"g",
")",
"# Updating the attributes to the current entity",
"g",
".",
"node",
"[",
"hashLabel",
"]",
".",
"update",
"(",
"newAttsInAttributes",
")",
"# Creating the edges (the new entities have also been created in the _processAttributes",
"for",
"new",
"in",
"newEntitiesInAttributes",
":",
"graphData",
".",
"add_edge",
"(",
"hashLabel",
",",
"json",
".",
"dumps",
"(",
"new",
")",
")",
"try",
":",
"# Here, we would add the properties of the edge",
"#graphData.edge[hashLabel][json.dumps(new)][\"@times_seen\"] +=1",
"pass",
"except",
":",
"# If the attribute does not exist, we would initialize it",
"#graphData.edge[hashLabel][json.dumps(new)][\"@times_seen\"] = 1",
"pass",
"else",
":",
"# An unexpected type",
"pass",
"return",
"newAtts",
",",
"newEntities",
"graphData",
"=",
"oldData",
"# Iterating through the results",
"for",
"elem",
"in",
"data",
":",
"# Creating a dict to represent the pair: type, value entity.",
"ent",
"=",
"{",
"\"value\"",
":",
"elem",
"[",
"\"value\"",
"]",
",",
"\"type\"",
":",
"elem",
"[",
"\"type\"",
"]",
",",
"}",
"# Appending the new node",
"new_node",
"=",
"_addNewNode",
"(",
"ent",
",",
"graphData",
")",
"# Processing the attributes to grab the attributes (starting with \"@...\" and entities)",
"newAtts",
",",
"newEntities",
"=",
"_processAttributes",
"(",
"elem",
"[",
"\"attributes\"",
"]",
",",
"graphData",
")",
"# Updating the attributes to the current entity",
"graphData",
".",
"node",
"[",
"new_node",
"]",
".",
"update",
"(",
"newAtts",
")",
"# Creating the edges (the new entities have also been created in the _processAttributes",
"for",
"other_node",
"in",
"newEntities",
":",
"# Serializing the second entity",
"serEnt",
"=",
"json",
".",
"dumps",
"(",
"new_node",
")",
"try",
":",
"other_node",
"=",
"unicode",
"(",
"other_node",
"[",
"\"value\"",
"]",
")",
"except",
"UnicodeEncodeError",
"as",
"e",
":",
"# Printing that an error was found",
"other_node",
"=",
"str",
"(",
"other_node",
"[",
"\"value\"",
"]",
")",
"# Adding the edge",
"graphData",
".",
"add_edge",
"(",
"new_node",
",",
"other_node",
")",
"try",
":",
"# Here, we would add the properties of the edge",
"#graphData.edge[hashLabel][hashLabelSeconds][\"times_seen\"] +=1",
"pass",
"except",
":",
"# If the attribute does not exist, we would initialize it",
"#graphData.edge[hashLabel][hashLabelSeconds][\"times_seen\"] = 1",
"pass",
"return",
"graphData"
] | Processing the data from i3visio structures to generate nodes and edges
This function uses the networkx graph library. It will create a new node
for each and i3visio.<something> entities while it will add properties for
all the attribute starting with "@".
Args:
-----
d: The i3visio structures containing a list of
oldData: A graph structure representing the previous information.
Returns:
--------
A graph structure representing the updated information. | [
"Processing",
"the",
"data",
"from",
"i3visio",
"structures",
"to",
"generate",
"nodes",
"and",
"edges"
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/general.py#L448-L594 |
5,433 | i3visio/osrframework | osrframework/utils/general.py | usufyToGmlExport | def usufyToGmlExport(d, fPath):
"""
Workaround to export data to a .gml file.
Args:
-----
d: Data to export.
fPath: File path for the output file.
"""
# Reading the previous gml file
try:
oldData=nx.read_gml(fPath)
except UnicodeDecodeError as e:
print("UnicodeDecodeError:\t" + str(e))
print("Something went wrong when reading the .gml file relating to the decoding of UNICODE.")
import time as time
fPath+="_" +str(time.time())
print("To avoid losing data, the output file will be renamed to use the timestamp as:\n" + fPath + "_" + str(time.time()))
print()
# No information has been recovered
oldData = nx.Graph()
except Exception as e:
# No information has been recovered
oldData = nx.Graph()
newGraph = _generateGraphData(d, oldData)
# Writing the gml file
nx.write_gml(newGraph,fPath) | python | def usufyToGmlExport(d, fPath):
"""
Workaround to export data to a .gml file.
Args:
-----
d: Data to export.
fPath: File path for the output file.
"""
# Reading the previous gml file
try:
oldData=nx.read_gml(fPath)
except UnicodeDecodeError as e:
print("UnicodeDecodeError:\t" + str(e))
print("Something went wrong when reading the .gml file relating to the decoding of UNICODE.")
import time as time
fPath+="_" +str(time.time())
print("To avoid losing data, the output file will be renamed to use the timestamp as:\n" + fPath + "_" + str(time.time()))
print()
# No information has been recovered
oldData = nx.Graph()
except Exception as e:
# No information has been recovered
oldData = nx.Graph()
newGraph = _generateGraphData(d, oldData)
# Writing the gml file
nx.write_gml(newGraph,fPath) | [
"def",
"usufyToGmlExport",
"(",
"d",
",",
"fPath",
")",
":",
"# Reading the previous gml file",
"try",
":",
"oldData",
"=",
"nx",
".",
"read_gml",
"(",
"fPath",
")",
"except",
"UnicodeDecodeError",
"as",
"e",
":",
"print",
"(",
"\"UnicodeDecodeError:\\t\"",
"+",
"str",
"(",
"e",
")",
")",
"print",
"(",
"\"Something went wrong when reading the .gml file relating to the decoding of UNICODE.\"",
")",
"import",
"time",
"as",
"time",
"fPath",
"+=",
"\"_\"",
"+",
"str",
"(",
"time",
".",
"time",
"(",
")",
")",
"print",
"(",
"\"To avoid losing data, the output file will be renamed to use the timestamp as:\\n\"",
"+",
"fPath",
"+",
"\"_\"",
"+",
"str",
"(",
"time",
".",
"time",
"(",
")",
")",
")",
"print",
"(",
")",
"# No information has been recovered",
"oldData",
"=",
"nx",
".",
"Graph",
"(",
")",
"except",
"Exception",
"as",
"e",
":",
"# No information has been recovered",
"oldData",
"=",
"nx",
".",
"Graph",
"(",
")",
"newGraph",
"=",
"_generateGraphData",
"(",
"d",
",",
"oldData",
")",
"# Writing the gml file",
"nx",
".",
"write_gml",
"(",
"newGraph",
",",
"fPath",
")"
] | Workaround to export data to a .gml file.
Args:
-----
d: Data to export.
fPath: File path for the output file. | [
"Workaround",
"to",
"export",
"data",
"to",
"a",
".",
"gml",
"file",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/general.py#L597-L625 |
5,434 | i3visio/osrframework | osrframework/utils/general.py | usufyToPngExport | def usufyToPngExport(d, fPath):
"""
Workaround to export to a png file.
Args:
-----
d: Data to export.
fPath: File path for the output file.
"""
newGraph = _generateGraphData(d)
import matplotlib.pyplot as plt
# Writing the png file
nx.draw(newGraph)
plt.savefig(fPath) | python | def usufyToPngExport(d, fPath):
"""
Workaround to export to a png file.
Args:
-----
d: Data to export.
fPath: File path for the output file.
"""
newGraph = _generateGraphData(d)
import matplotlib.pyplot as plt
# Writing the png file
nx.draw(newGraph)
plt.savefig(fPath) | [
"def",
"usufyToPngExport",
"(",
"d",
",",
"fPath",
")",
":",
"newGraph",
"=",
"_generateGraphData",
"(",
"d",
")",
"import",
"matplotlib",
".",
"pyplot",
"as",
"plt",
"# Writing the png file",
"nx",
".",
"draw",
"(",
"newGraph",
")",
"plt",
".",
"savefig",
"(",
"fPath",
")"
] | Workaround to export to a png file.
Args:
-----
d: Data to export.
fPath: File path for the output file. | [
"Workaround",
"to",
"export",
"to",
"a",
"png",
"file",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/general.py#L628-L642 |
5,435 | i3visio/osrframework | osrframework/utils/general.py | fileToMD5 | def fileToMD5(filename, block_size=256*128, binary=False):
"""
A function that calculates the MD5 hash of a file.
Args:
-----
filename: Path to the file.
block_size: Chunks of suitable size. Block size directly depends on
the block size of your filesystem to avoid performances issues.
Blocks of 4096 octets (Default NTFS).
binary: A boolean representing whether the returned info is in binary
format or not.
Returns:
--------
string: The MD5 hash of the file.
"""
md5 = hashlib.md5()
with open(filename,'rb') as f:
for chunk in iter(lambda: f.read(block_size), b''):
md5.update(chunk)
if not binary:
return md5.hexdigest()
return md5.digest() | python | def fileToMD5(filename, block_size=256*128, binary=False):
"""
A function that calculates the MD5 hash of a file.
Args:
-----
filename: Path to the file.
block_size: Chunks of suitable size. Block size directly depends on
the block size of your filesystem to avoid performances issues.
Blocks of 4096 octets (Default NTFS).
binary: A boolean representing whether the returned info is in binary
format or not.
Returns:
--------
string: The MD5 hash of the file.
"""
md5 = hashlib.md5()
with open(filename,'rb') as f:
for chunk in iter(lambda: f.read(block_size), b''):
md5.update(chunk)
if not binary:
return md5.hexdigest()
return md5.digest() | [
"def",
"fileToMD5",
"(",
"filename",
",",
"block_size",
"=",
"256",
"*",
"128",
",",
"binary",
"=",
"False",
")",
":",
"md5",
"=",
"hashlib",
".",
"md5",
"(",
")",
"with",
"open",
"(",
"filename",
",",
"'rb'",
")",
"as",
"f",
":",
"for",
"chunk",
"in",
"iter",
"(",
"lambda",
":",
"f",
".",
"read",
"(",
"block_size",
")",
",",
"b''",
")",
":",
"md5",
".",
"update",
"(",
"chunk",
")",
"if",
"not",
"binary",
":",
"return",
"md5",
".",
"hexdigest",
"(",
")",
"return",
"md5",
".",
"digest",
"(",
")"
] | A function that calculates the MD5 hash of a file.
Args:
-----
filename: Path to the file.
block_size: Chunks of suitable size. Block size directly depends on
the block size of your filesystem to avoid performances issues.
Blocks of 4096 octets (Default NTFS).
binary: A boolean representing whether the returned info is in binary
format or not.
Returns:
--------
string: The MD5 hash of the file. | [
"A",
"function",
"that",
"calculates",
"the",
"MD5",
"hash",
"of",
"a",
"file",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/general.py#L645-L668 |
5,436 | i3visio/osrframework | osrframework/utils/general.py | getCurrentStrDatetime | def getCurrentStrDatetime():
"""
Generating the current Datetime with a given format
Returns:
--------
string: The string of a date.
"""
# Generating current time
i = datetime.datetime.now()
strTime = "%s-%s-%s_%sh%sm" % (i.year, i.month, i.day, i.hour, i.minute)
return strTime | python | def getCurrentStrDatetime():
"""
Generating the current Datetime with a given format
Returns:
--------
string: The string of a date.
"""
# Generating current time
i = datetime.datetime.now()
strTime = "%s-%s-%s_%sh%sm" % (i.year, i.month, i.day, i.hour, i.minute)
return strTime | [
"def",
"getCurrentStrDatetime",
"(",
")",
":",
"# Generating current time",
"i",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"strTime",
"=",
"\"%s-%s-%s_%sh%sm\"",
"%",
"(",
"i",
".",
"year",
",",
"i",
".",
"month",
",",
"i",
".",
"day",
",",
"i",
".",
"hour",
",",
"i",
".",
"minute",
")",
"return",
"strTime"
] | Generating the current Datetime with a given format
Returns:
--------
string: The string of a date. | [
"Generating",
"the",
"current",
"Datetime",
"with",
"a",
"given",
"format"
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/general.py#L671-L682 |
5,437 | i3visio/osrframework | osrframework/utils/general.py | getFilesFromAFolder | def getFilesFromAFolder(path):
"""
Getting all the files in a folder.
Args:
-----
path: The path in which looking for the files
Returns:
--------
list: The list of filenames found.
"""
from os import listdir
from os.path import isfile, join
#onlyfiles = [ f for f in listdir(path) if isfile(join(path,f)) ]
onlyFiles = []
for f in listdir(path):
if isfile(join(path, f)):
onlyFiles.append(f)
return onlyFiles | python | def getFilesFromAFolder(path):
"""
Getting all the files in a folder.
Args:
-----
path: The path in which looking for the files
Returns:
--------
list: The list of filenames found.
"""
from os import listdir
from os.path import isfile, join
#onlyfiles = [ f for f in listdir(path) if isfile(join(path,f)) ]
onlyFiles = []
for f in listdir(path):
if isfile(join(path, f)):
onlyFiles.append(f)
return onlyFiles | [
"def",
"getFilesFromAFolder",
"(",
"path",
")",
":",
"from",
"os",
"import",
"listdir",
"from",
"os",
".",
"path",
"import",
"isfile",
",",
"join",
"#onlyfiles = [ f for f in listdir(path) if isfile(join(path,f)) ]",
"onlyFiles",
"=",
"[",
"]",
"for",
"f",
"in",
"listdir",
"(",
"path",
")",
":",
"if",
"isfile",
"(",
"join",
"(",
"path",
",",
"f",
")",
")",
":",
"onlyFiles",
".",
"append",
"(",
"f",
")",
"return",
"onlyFiles"
] | Getting all the files in a folder.
Args:
-----
path: The path in which looking for the files
Returns:
--------
list: The list of filenames found. | [
"Getting",
"all",
"the",
"files",
"in",
"a",
"folder",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/general.py#L685-L704 |
5,438 | i3visio/osrframework | osrframework/utils/general.py | urisToBrowser | def urisToBrowser(uris=[], autoraise=True):
"""
Method that launches the URI in the default browser of the system
This function temporally deactivates the standard ouptut and errors to
prevent the system to show unwanted messages. This method is based on this
question from Stackoverflow.
https://stackoverflow.com/questions/2323080/how-can-i-disable-the-webbrowser-message-in-python
Args:
-----
uri: a list of strings representing the URI to be opened in the browser.
"""
# Cloning stdout (1) and stderr (2)
savout1 = os.dup(1)
savout2 = os.dup(2)
# Closing them
os.close(1)
os.close(2)
os.open(os.devnull, os.O_RDWR)
try:
for uri in uris:
# Opening the Tor URI using onion.cab proxy
if ".onion" in uri:
wb.open(uri.replace(".onion", ".onion.city"), new=2, autoraise=autoraise)
else:
wb.open(uri, new=2, autoraise=autoraise)
finally:
# Reopening them...
os.dup2(savout1, 1)
os.dup2(savout2, 2) | python | def urisToBrowser(uris=[], autoraise=True):
"""
Method that launches the URI in the default browser of the system
This function temporally deactivates the standard ouptut and errors to
prevent the system to show unwanted messages. This method is based on this
question from Stackoverflow.
https://stackoverflow.com/questions/2323080/how-can-i-disable-the-webbrowser-message-in-python
Args:
-----
uri: a list of strings representing the URI to be opened in the browser.
"""
# Cloning stdout (1) and stderr (2)
savout1 = os.dup(1)
savout2 = os.dup(2)
# Closing them
os.close(1)
os.close(2)
os.open(os.devnull, os.O_RDWR)
try:
for uri in uris:
# Opening the Tor URI using onion.cab proxy
if ".onion" in uri:
wb.open(uri.replace(".onion", ".onion.city"), new=2, autoraise=autoraise)
else:
wb.open(uri, new=2, autoraise=autoraise)
finally:
# Reopening them...
os.dup2(savout1, 1)
os.dup2(savout2, 2) | [
"def",
"urisToBrowser",
"(",
"uris",
"=",
"[",
"]",
",",
"autoraise",
"=",
"True",
")",
":",
"# Cloning stdout (1) and stderr (2)",
"savout1",
"=",
"os",
".",
"dup",
"(",
"1",
")",
"savout2",
"=",
"os",
".",
"dup",
"(",
"2",
")",
"# Closing them",
"os",
".",
"close",
"(",
"1",
")",
"os",
".",
"close",
"(",
"2",
")",
"os",
".",
"open",
"(",
"os",
".",
"devnull",
",",
"os",
".",
"O_RDWR",
")",
"try",
":",
"for",
"uri",
"in",
"uris",
":",
"# Opening the Tor URI using onion.cab proxy",
"if",
"\".onion\"",
"in",
"uri",
":",
"wb",
".",
"open",
"(",
"uri",
".",
"replace",
"(",
"\".onion\"",
",",
"\".onion.city\"",
")",
",",
"new",
"=",
"2",
",",
"autoraise",
"=",
"autoraise",
")",
"else",
":",
"wb",
".",
"open",
"(",
"uri",
",",
"new",
"=",
"2",
",",
"autoraise",
"=",
"autoraise",
")",
"finally",
":",
"# Reopening them...",
"os",
".",
"dup2",
"(",
"savout1",
",",
"1",
")",
"os",
".",
"dup2",
"(",
"savout2",
",",
"2",
")"
] | Method that launches the URI in the default browser of the system
This function temporally deactivates the standard ouptut and errors to
prevent the system to show unwanted messages. This method is based on this
question from Stackoverflow.
https://stackoverflow.com/questions/2323080/how-can-i-disable-the-webbrowser-message-in-python
Args:
-----
uri: a list of strings representing the URI to be opened in the browser. | [
"Method",
"that",
"launches",
"the",
"URI",
"in",
"the",
"default",
"browser",
"of",
"the",
"system"
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/general.py#L707-L740 |
5,439 | i3visio/osrframework | osrframework/utils/general.py | openResultsInBrowser | def openResultsInBrowser(res):
"""
Method that collects the URI from a list of entities and opens them
Args:
-----
res: A list containing several i3visio entities.
"""
print(emphasis("\n\tOpening URIs in the default web browser..."))
urisToBrowser(["https://github.com/i3visio/osrframework"], autoraise=False)
# Waiting 2 seconds to confirm that the browser is opened and prevent the OS from opening several windows
time.sleep(2)
uris = []
for r in res:
for att in r["attributes"]:
if att["type"] == "i3visio.uri":
uris.append(att["value"])
urisToBrowser(uris) | python | def openResultsInBrowser(res):
"""
Method that collects the URI from a list of entities and opens them
Args:
-----
res: A list containing several i3visio entities.
"""
print(emphasis("\n\tOpening URIs in the default web browser..."))
urisToBrowser(["https://github.com/i3visio/osrframework"], autoraise=False)
# Waiting 2 seconds to confirm that the browser is opened and prevent the OS from opening several windows
time.sleep(2)
uris = []
for r in res:
for att in r["attributes"]:
if att["type"] == "i3visio.uri":
uris.append(att["value"])
urisToBrowser(uris) | [
"def",
"openResultsInBrowser",
"(",
"res",
")",
":",
"print",
"(",
"emphasis",
"(",
"\"\\n\\tOpening URIs in the default web browser...\"",
")",
")",
"urisToBrowser",
"(",
"[",
"\"https://github.com/i3visio/osrframework\"",
"]",
",",
"autoraise",
"=",
"False",
")",
"# Waiting 2 seconds to confirm that the browser is opened and prevent the OS from opening several windows",
"time",
".",
"sleep",
"(",
"2",
")",
"uris",
"=",
"[",
"]",
"for",
"r",
"in",
"res",
":",
"for",
"att",
"in",
"r",
"[",
"\"attributes\"",
"]",
":",
"if",
"att",
"[",
"\"type\"",
"]",
"==",
"\"i3visio.uri\"",
":",
"uris",
".",
"append",
"(",
"att",
"[",
"\"value\"",
"]",
")",
"urisToBrowser",
"(",
"uris",
")"
] | Method that collects the URI from a list of entities and opens them
Args:
-----
res: A list containing several i3visio entities. | [
"Method",
"that",
"collects",
"the",
"URI",
"from",
"a",
"list",
"of",
"entities",
"and",
"opens",
"them"
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/general.py#L743-L763 |
5,440 | i3visio/osrframework | osrframework/utils/general.py | colorize | def colorize(text, messageType=None):
"""
Function that colorizes a message.
Args:
-----
text: The string to be colorized.
messageType: Possible options include "ERROR", "WARNING", "SUCCESS",
"INFO" or "BOLD".
Returns:
--------
string: Colorized if the option is correct, including a tag at the end
to reset the formatting.
"""
formattedText = str(text)
# Set colors
if "ERROR" in messageType:
formattedText = colorama.Fore.RED + formattedText
elif "WARNING" in messageType:
formattedText = colorama.Fore.YELLOW + formattedText
elif "SUCCESS" in messageType:
formattedText = colorama.Fore.GREEN + formattedText
elif "INFO" in messageType:
formattedText = colorama.Fore.BLUE + formattedText
# Set emphashis mode
if "BOLD" in messageType:
formattedText = colorama.Style.BRIGHT + formattedText
return formattedText + colorama.Style.RESET_ALL | python | def colorize(text, messageType=None):
"""
Function that colorizes a message.
Args:
-----
text: The string to be colorized.
messageType: Possible options include "ERROR", "WARNING", "SUCCESS",
"INFO" or "BOLD".
Returns:
--------
string: Colorized if the option is correct, including a tag at the end
to reset the formatting.
"""
formattedText = str(text)
# Set colors
if "ERROR" in messageType:
formattedText = colorama.Fore.RED + formattedText
elif "WARNING" in messageType:
formattedText = colorama.Fore.YELLOW + formattedText
elif "SUCCESS" in messageType:
formattedText = colorama.Fore.GREEN + formattedText
elif "INFO" in messageType:
formattedText = colorama.Fore.BLUE + formattedText
# Set emphashis mode
if "BOLD" in messageType:
formattedText = colorama.Style.BRIGHT + formattedText
return formattedText + colorama.Style.RESET_ALL | [
"def",
"colorize",
"(",
"text",
",",
"messageType",
"=",
"None",
")",
":",
"formattedText",
"=",
"str",
"(",
"text",
")",
"# Set colors",
"if",
"\"ERROR\"",
"in",
"messageType",
":",
"formattedText",
"=",
"colorama",
".",
"Fore",
".",
"RED",
"+",
"formattedText",
"elif",
"\"WARNING\"",
"in",
"messageType",
":",
"formattedText",
"=",
"colorama",
".",
"Fore",
".",
"YELLOW",
"+",
"formattedText",
"elif",
"\"SUCCESS\"",
"in",
"messageType",
":",
"formattedText",
"=",
"colorama",
".",
"Fore",
".",
"GREEN",
"+",
"formattedText",
"elif",
"\"INFO\"",
"in",
"messageType",
":",
"formattedText",
"=",
"colorama",
".",
"Fore",
".",
"BLUE",
"+",
"formattedText",
"# Set emphashis mode",
"if",
"\"BOLD\"",
"in",
"messageType",
":",
"formattedText",
"=",
"colorama",
".",
"Style",
".",
"BRIGHT",
"+",
"formattedText",
"return",
"formattedText",
"+",
"colorama",
".",
"Style",
".",
"RESET_ALL"
] | Function that colorizes a message.
Args:
-----
text: The string to be colorized.
messageType: Possible options include "ERROR", "WARNING", "SUCCESS",
"INFO" or "BOLD".
Returns:
--------
string: Colorized if the option is correct, including a tag at the end
to reset the formatting. | [
"Function",
"that",
"colorizes",
"a",
"message",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/general.py#L766-L796 |
5,441 | i3visio/osrframework | osrframework/utils/general.py | showLicense | def showLicense():
"""
Method that prints the license if requested.
It tries to find the license online and manually download it. This method
only prints its contents in plain text.
"""
print("Trying to recover the contents of the license...\n")
try:
# Grab the license online and print it.
text = urllib.urlopen(LICENSE_URL).read()
print("License retrieved from " + emphasis(LICENSE_URL) + ".")
raw_input("\n\tPress " + emphasis("<ENTER>") + " to print it.\n")
print(text)
except:
print(warning("The license could not be downloaded and printed.")) | python | def showLicense():
"""
Method that prints the license if requested.
It tries to find the license online and manually download it. This method
only prints its contents in plain text.
"""
print("Trying to recover the contents of the license...\n")
try:
# Grab the license online and print it.
text = urllib.urlopen(LICENSE_URL).read()
print("License retrieved from " + emphasis(LICENSE_URL) + ".")
raw_input("\n\tPress " + emphasis("<ENTER>") + " to print it.\n")
print(text)
except:
print(warning("The license could not be downloaded and printed.")) | [
"def",
"showLicense",
"(",
")",
":",
"print",
"(",
"\"Trying to recover the contents of the license...\\n\"",
")",
"try",
":",
"# Grab the license online and print it.",
"text",
"=",
"urllib",
".",
"urlopen",
"(",
"LICENSE_URL",
")",
".",
"read",
"(",
")",
"print",
"(",
"\"License retrieved from \"",
"+",
"emphasis",
"(",
"LICENSE_URL",
")",
"+",
"\".\"",
")",
"raw_input",
"(",
"\"\\n\\tPress \"",
"+",
"emphasis",
"(",
"\"<ENTER>\"",
")",
"+",
"\" to print it.\\n\"",
")",
"print",
"(",
"text",
")",
"except",
":",
"print",
"(",
"warning",
"(",
"\"The license could not be downloaded and printed.\"",
")",
")"
] | Method that prints the license if requested.
It tries to find the license online and manually download it. This method
only prints its contents in plain text. | [
"Method",
"that",
"prints",
"the",
"license",
"if",
"requested",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/general.py#L823-L838 |
5,442 | i3visio/osrframework | osrframework/utils/general.py | expandEntitiesFromEmail | def expandEntitiesFromEmail(e):
"""
Method that receives an email an creates linked entities
Args:
-----
e: Email to verify.
Returns:
--------
Three different values: email, alias and domain in a list.
"""
# Grabbing the email
email = {}
email["type"] = "i3visio.email"
email["value"] = e
email["attributes"] = []
# Grabbing the alias
alias = {}
alias["type"] = "i3visio.alias"
alias["value"] = e.split("@")[0]
alias["attributes"] = []
# Grabbing the domain
domain= {}
domain["type"] = "i3visio.domain"
domain["value"] = e.split("@")[1]
domain["attributes"] = []
return [email, alias, domain] | python | def expandEntitiesFromEmail(e):
"""
Method that receives an email an creates linked entities
Args:
-----
e: Email to verify.
Returns:
--------
Three different values: email, alias and domain in a list.
"""
# Grabbing the email
email = {}
email["type"] = "i3visio.email"
email["value"] = e
email["attributes"] = []
# Grabbing the alias
alias = {}
alias["type"] = "i3visio.alias"
alias["value"] = e.split("@")[0]
alias["attributes"] = []
# Grabbing the domain
domain= {}
domain["type"] = "i3visio.domain"
domain["value"] = e.split("@")[1]
domain["attributes"] = []
return [email, alias, domain] | [
"def",
"expandEntitiesFromEmail",
"(",
"e",
")",
":",
"# Grabbing the email",
"email",
"=",
"{",
"}",
"email",
"[",
"\"type\"",
"]",
"=",
"\"i3visio.email\"",
"email",
"[",
"\"value\"",
"]",
"=",
"e",
"email",
"[",
"\"attributes\"",
"]",
"=",
"[",
"]",
"# Grabbing the alias",
"alias",
"=",
"{",
"}",
"alias",
"[",
"\"type\"",
"]",
"=",
"\"i3visio.alias\"",
"alias",
"[",
"\"value\"",
"]",
"=",
"e",
".",
"split",
"(",
"\"@\"",
")",
"[",
"0",
"]",
"alias",
"[",
"\"attributes\"",
"]",
"=",
"[",
"]",
"# Grabbing the domain",
"domain",
"=",
"{",
"}",
"domain",
"[",
"\"type\"",
"]",
"=",
"\"i3visio.domain\"",
"domain",
"[",
"\"value\"",
"]",
"=",
"e",
".",
"split",
"(",
"\"@\"",
")",
"[",
"1",
"]",
"domain",
"[",
"\"attributes\"",
"]",
"=",
"[",
"]",
"return",
"[",
"email",
",",
"alias",
",",
"domain",
"]"
] | Method that receives an email an creates linked entities
Args:
-----
e: Email to verify.
Returns:
--------
Three different values: email, alias and domain in a list. | [
"Method",
"that",
"receives",
"an",
"email",
"an",
"creates",
"linked",
"entities"
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/general.py#L842-L872 |
5,443 | i3visio/osrframework | osrframework/domainfy.py | getNumberTLD | def getNumberTLD():
"""
Counting the total number of TLD being processed.
"""
total = 0
for typeTld in TLD.keys():
total+= len(TLD[typeTld])
return total | python | def getNumberTLD():
"""
Counting the total number of TLD being processed.
"""
total = 0
for typeTld in TLD.keys():
total+= len(TLD[typeTld])
return total | [
"def",
"getNumberTLD",
"(",
")",
":",
"total",
"=",
"0",
"for",
"typeTld",
"in",
"TLD",
".",
"keys",
"(",
")",
":",
"total",
"+=",
"len",
"(",
"TLD",
"[",
"typeTld",
"]",
")",
"return",
"total"
] | Counting the total number of TLD being processed. | [
"Counting",
"the",
"total",
"number",
"of",
"TLD",
"being",
"processed",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/domainfy.py#L63-L70 |
5,444 | i3visio/osrframework | osrframework/domainfy.py | getWhoisInfo | def getWhoisInfo(domain):
"""
Method that trie to recover the whois info from a domain.
Args:
-----
domain: The domain to verify.
Returns:
--------
dict: A dictionary containing the result as an i3visio entity with its
`value`, `type` and `attributes`.
"""
new = []
# Grabbing the aliases
try:
emails = {}
emails["type"] = "i3visio.alias"
emails["value"] = str(domain.split(".")[0])
emails["attributes"] = []
new.append(emails)
except:
pass
info = whois.whois(domain)
if info.status == None:
raise Exception("UnknownDomainError: " + domain + " could not be resolved.")
# Grabbing the emails
try:
emails = {}
emails["type"] = "i3visio.email"
if type(info.emails) is not list:
aux = [info.emails]
emails["value"] = json.dumps(aux)
else:
emails["value"] = json.dumps(info.emails)
emails["attributes"] = []
new.append(emails)
except:
pass
# Grabbing the country
try:
tmp = {}
tmp["type"] = "i3visio.location.country"
tmp["value"] = str(info.country)
tmp["attributes"] = []
new.append(tmp)
except:
pass
# Grabbing the regitrar
try:
tmp = {}
tmp["type"] = "i3visio.registrar"
tmp["value"] = str(info.registrar)
tmp["attributes"] = []
new.append(tmp)
except:
pass
# Grabbing the regitrar
try:
tmp = {}
tmp["type"] = "i3visio.fullname"
try:
tmp["value"] = str(info.name)
except:
tmp["value"] = info.name
tmp["attributes"] = []
new.append(tmp)
except:
pass
return new | python | def getWhoisInfo(domain):
"""
Method that trie to recover the whois info from a domain.
Args:
-----
domain: The domain to verify.
Returns:
--------
dict: A dictionary containing the result as an i3visio entity with its
`value`, `type` and `attributes`.
"""
new = []
# Grabbing the aliases
try:
emails = {}
emails["type"] = "i3visio.alias"
emails["value"] = str(domain.split(".")[0])
emails["attributes"] = []
new.append(emails)
except:
pass
info = whois.whois(domain)
if info.status == None:
raise Exception("UnknownDomainError: " + domain + " could not be resolved.")
# Grabbing the emails
try:
emails = {}
emails["type"] = "i3visio.email"
if type(info.emails) is not list:
aux = [info.emails]
emails["value"] = json.dumps(aux)
else:
emails["value"] = json.dumps(info.emails)
emails["attributes"] = []
new.append(emails)
except:
pass
# Grabbing the country
try:
tmp = {}
tmp["type"] = "i3visio.location.country"
tmp["value"] = str(info.country)
tmp["attributes"] = []
new.append(tmp)
except:
pass
# Grabbing the regitrar
try:
tmp = {}
tmp["type"] = "i3visio.registrar"
tmp["value"] = str(info.registrar)
tmp["attributes"] = []
new.append(tmp)
except:
pass
# Grabbing the regitrar
try:
tmp = {}
tmp["type"] = "i3visio.fullname"
try:
tmp["value"] = str(info.name)
except:
tmp["value"] = info.name
tmp["attributes"] = []
new.append(tmp)
except:
pass
return new | [
"def",
"getWhoisInfo",
"(",
"domain",
")",
":",
"new",
"=",
"[",
"]",
"# Grabbing the aliases",
"try",
":",
"emails",
"=",
"{",
"}",
"emails",
"[",
"\"type\"",
"]",
"=",
"\"i3visio.alias\"",
"emails",
"[",
"\"value\"",
"]",
"=",
"str",
"(",
"domain",
".",
"split",
"(",
"\".\"",
")",
"[",
"0",
"]",
")",
"emails",
"[",
"\"attributes\"",
"]",
"=",
"[",
"]",
"new",
".",
"append",
"(",
"emails",
")",
"except",
":",
"pass",
"info",
"=",
"whois",
".",
"whois",
"(",
"domain",
")",
"if",
"info",
".",
"status",
"==",
"None",
":",
"raise",
"Exception",
"(",
"\"UnknownDomainError: \"",
"+",
"domain",
"+",
"\" could not be resolved.\"",
")",
"# Grabbing the emails",
"try",
":",
"emails",
"=",
"{",
"}",
"emails",
"[",
"\"type\"",
"]",
"=",
"\"i3visio.email\"",
"if",
"type",
"(",
"info",
".",
"emails",
")",
"is",
"not",
"list",
":",
"aux",
"=",
"[",
"info",
".",
"emails",
"]",
"emails",
"[",
"\"value\"",
"]",
"=",
"json",
".",
"dumps",
"(",
"aux",
")",
"else",
":",
"emails",
"[",
"\"value\"",
"]",
"=",
"json",
".",
"dumps",
"(",
"info",
".",
"emails",
")",
"emails",
"[",
"\"attributes\"",
"]",
"=",
"[",
"]",
"new",
".",
"append",
"(",
"emails",
")",
"except",
":",
"pass",
"# Grabbing the country",
"try",
":",
"tmp",
"=",
"{",
"}",
"tmp",
"[",
"\"type\"",
"]",
"=",
"\"i3visio.location.country\"",
"tmp",
"[",
"\"value\"",
"]",
"=",
"str",
"(",
"info",
".",
"country",
")",
"tmp",
"[",
"\"attributes\"",
"]",
"=",
"[",
"]",
"new",
".",
"append",
"(",
"tmp",
")",
"except",
":",
"pass",
"# Grabbing the regitrar",
"try",
":",
"tmp",
"=",
"{",
"}",
"tmp",
"[",
"\"type\"",
"]",
"=",
"\"i3visio.registrar\"",
"tmp",
"[",
"\"value\"",
"]",
"=",
"str",
"(",
"info",
".",
"registrar",
")",
"tmp",
"[",
"\"attributes\"",
"]",
"=",
"[",
"]",
"new",
".",
"append",
"(",
"tmp",
")",
"except",
":",
"pass",
"# Grabbing the regitrar",
"try",
":",
"tmp",
"=",
"{",
"}",
"tmp",
"[",
"\"type\"",
"]",
"=",
"\"i3visio.fullname\"",
"try",
":",
"tmp",
"[",
"\"value\"",
"]",
"=",
"str",
"(",
"info",
".",
"name",
")",
"except",
":",
"tmp",
"[",
"\"value\"",
"]",
"=",
"info",
".",
"name",
"tmp",
"[",
"\"attributes\"",
"]",
"=",
"[",
"]",
"new",
".",
"append",
"(",
"tmp",
")",
"except",
":",
"pass",
"return",
"new"
] | Method that trie to recover the whois info from a domain.
Args:
-----
domain: The domain to verify.
Returns:
--------
dict: A dictionary containing the result as an i3visio entity with its
`value`, `type` and `attributes`. | [
"Method",
"that",
"trie",
"to",
"recover",
"the",
"whois",
"info",
"from",
"a",
"domain",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/domainfy.py#L73-L150 |
5,445 | i3visio/osrframework | osrframework/domainfy.py | createDomains | def createDomains(tlds, nicks=None, nicksFile=None):
"""
Method that globally permits to generate the domains to be checked.
Args:
-----
tlds: List of tlds.
nicks: List of aliases.
nicksFile: The filepath to the aliases file.
Returns:
--------
list: list of domains to be checked.
"""
domain_candidates = []
if nicks != None:
for n in nicks:
for t in tlds:
tmp = {
"domain" : n + t["tld"],
"type" : t["type"],
"tld": t["tld"]
}
domain_candidates.append(tmp)
elif nicksFile != None:
with open(nicksFile, "r") as iF:
nicks = iF.read().splitlines()
for n in nicks:
for t in tlds:
tmp = {
"domain" : n + t["tld"],
"type" : t["type"],
"tld": t["tld"]
}
domain_candidates.append(tmp)
return domain_candidates | python | def createDomains(tlds, nicks=None, nicksFile=None):
"""
Method that globally permits to generate the domains to be checked.
Args:
-----
tlds: List of tlds.
nicks: List of aliases.
nicksFile: The filepath to the aliases file.
Returns:
--------
list: list of domains to be checked.
"""
domain_candidates = []
if nicks != None:
for n in nicks:
for t in tlds:
tmp = {
"domain" : n + t["tld"],
"type" : t["type"],
"tld": t["tld"]
}
domain_candidates.append(tmp)
elif nicksFile != None:
with open(nicksFile, "r") as iF:
nicks = iF.read().splitlines()
for n in nicks:
for t in tlds:
tmp = {
"domain" : n + t["tld"],
"type" : t["type"],
"tld": t["tld"]
}
domain_candidates.append(tmp)
return domain_candidates | [
"def",
"createDomains",
"(",
"tlds",
",",
"nicks",
"=",
"None",
",",
"nicksFile",
"=",
"None",
")",
":",
"domain_candidates",
"=",
"[",
"]",
"if",
"nicks",
"!=",
"None",
":",
"for",
"n",
"in",
"nicks",
":",
"for",
"t",
"in",
"tlds",
":",
"tmp",
"=",
"{",
"\"domain\"",
":",
"n",
"+",
"t",
"[",
"\"tld\"",
"]",
",",
"\"type\"",
":",
"t",
"[",
"\"type\"",
"]",
",",
"\"tld\"",
":",
"t",
"[",
"\"tld\"",
"]",
"}",
"domain_candidates",
".",
"append",
"(",
"tmp",
")",
"elif",
"nicksFile",
"!=",
"None",
":",
"with",
"open",
"(",
"nicksFile",
",",
"\"r\"",
")",
"as",
"iF",
":",
"nicks",
"=",
"iF",
".",
"read",
"(",
")",
".",
"splitlines",
"(",
")",
"for",
"n",
"in",
"nicks",
":",
"for",
"t",
"in",
"tlds",
":",
"tmp",
"=",
"{",
"\"domain\"",
":",
"n",
"+",
"t",
"[",
"\"tld\"",
"]",
",",
"\"type\"",
":",
"t",
"[",
"\"type\"",
"]",
",",
"\"tld\"",
":",
"t",
"[",
"\"tld\"",
"]",
"}",
"domain_candidates",
".",
"append",
"(",
"tmp",
")",
"return",
"domain_candidates"
] | Method that globally permits to generate the domains to be checked.
Args:
-----
tlds: List of tlds.
nicks: List of aliases.
nicksFile: The filepath to the aliases file.
Returns:
--------
list: list of domains to be checked. | [
"Method",
"that",
"globally",
"permits",
"to",
"generate",
"the",
"domains",
"to",
"be",
"checked",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/domainfy.py#L153-L188 |
5,446 | i3visio/osrframework | osrframework/mailfy.py | weCanCheckTheseDomains | def weCanCheckTheseDomains(email):
"""
Method that verifies if a domain can be safely verified.
Args:
-----
email: the email whose domain will be verified.
Returns:
--------
bool: it represents whether the domain can be verified.
"""
# Known platform not to be working...
notWorking = [
"@aol.com",
"@bk.ru",
"@breakthru.com",
"@gmx.",
"@hotmail.co",
"@inbox.com",
"@latinmail.com",
"@libero.it",
"@mail.ru",
"@mail2tor.com",
"@outlook.com",
"@rambler.ru",
"@rocketmail.com",
"@starmedia.com",
"@ukr.net"
"@yahoo.",
"@ymail."
]
#notWorking = []
for n in notWorking:
if n in email:
print("\t[*] Verification of '{}' aborted. Details:\n\t\t{}".format(general.warning(email), "This domain CANNOT be verified using mailfy."))
return False
emailDomains = EMAIL_DOMAINS
safe = False
for e in EMAIL_DOMAINS:
if e in email:
safe = True
if not safe:
print("\t[*] Verification of '{}' aborted. Details:\n\t\t{}".format(general.warning(email), "This domain CANNOT be verified using mailfy."))
return False
return True | python | def weCanCheckTheseDomains(email):
"""
Method that verifies if a domain can be safely verified.
Args:
-----
email: the email whose domain will be verified.
Returns:
--------
bool: it represents whether the domain can be verified.
"""
# Known platform not to be working...
notWorking = [
"@aol.com",
"@bk.ru",
"@breakthru.com",
"@gmx.",
"@hotmail.co",
"@inbox.com",
"@latinmail.com",
"@libero.it",
"@mail.ru",
"@mail2tor.com",
"@outlook.com",
"@rambler.ru",
"@rocketmail.com",
"@starmedia.com",
"@ukr.net"
"@yahoo.",
"@ymail."
]
#notWorking = []
for n in notWorking:
if n in email:
print("\t[*] Verification of '{}' aborted. Details:\n\t\t{}".format(general.warning(email), "This domain CANNOT be verified using mailfy."))
return False
emailDomains = EMAIL_DOMAINS
safe = False
for e in EMAIL_DOMAINS:
if e in email:
safe = True
if not safe:
print("\t[*] Verification of '{}' aborted. Details:\n\t\t{}".format(general.warning(email), "This domain CANNOT be verified using mailfy."))
return False
return True | [
"def",
"weCanCheckTheseDomains",
"(",
"email",
")",
":",
"# Known platform not to be working...",
"notWorking",
"=",
"[",
"\"@aol.com\"",
",",
"\"@bk.ru\"",
",",
"\"@breakthru.com\"",
",",
"\"@gmx.\"",
",",
"\"@hotmail.co\"",
",",
"\"@inbox.com\"",
",",
"\"@latinmail.com\"",
",",
"\"@libero.it\"",
",",
"\"@mail.ru\"",
",",
"\"@mail2tor.com\"",
",",
"\"@outlook.com\"",
",",
"\"@rambler.ru\"",
",",
"\"@rocketmail.com\"",
",",
"\"@starmedia.com\"",
",",
"\"@ukr.net\"",
"\"@yahoo.\"",
",",
"\"@ymail.\"",
"]",
"#notWorking = []",
"for",
"n",
"in",
"notWorking",
":",
"if",
"n",
"in",
"email",
":",
"print",
"(",
"\"\\t[*] Verification of '{}' aborted. Details:\\n\\t\\t{}\"",
".",
"format",
"(",
"general",
".",
"warning",
"(",
"email",
")",
",",
"\"This domain CANNOT be verified using mailfy.\"",
")",
")",
"return",
"False",
"emailDomains",
"=",
"EMAIL_DOMAINS",
"safe",
"=",
"False",
"for",
"e",
"in",
"EMAIL_DOMAINS",
":",
"if",
"e",
"in",
"email",
":",
"safe",
"=",
"True",
"if",
"not",
"safe",
":",
"print",
"(",
"\"\\t[*] Verification of '{}' aborted. Details:\\n\\t\\t{}\"",
".",
"format",
"(",
"general",
".",
"warning",
"(",
"email",
")",
",",
"\"This domain CANNOT be verified using mailfy.\"",
")",
")",
"return",
"False",
"return",
"True"
] | Method that verifies if a domain can be safely verified.
Args:
-----
email: the email whose domain will be verified.
Returns:
--------
bool: it represents whether the domain can be verified. | [
"Method",
"that",
"verifies",
"if",
"a",
"domain",
"can",
"be",
"safely",
"verified",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/mailfy.py#L112-L161 |
5,447 | i3visio/osrframework | osrframework/mailfy.py | grabEmails | def grabEmails(emails=None, emailsFile=None, nicks=None, nicksFile=None, domains=EMAIL_DOMAINS, excludeDomains=[]):
"""
Method that generates a list of emails.
Args:
-----
emails: Any premade list of emails.
emailsFile: Filepath to the emails file (one per line).
nicks: A list of aliases.
nicksFile: Filepath to the aliases file (one per line).
domains: Domains where the aliases will be tested.
excludeDomains: Domains to be excluded from the created list.
Returns:
--------
list: the list of emails that will be verified.
"""
email_candidates = []
if emails != None:
email_candidates = emails
elif emailsFile != None:
# Reading the emails file
with open(emailsFile, "r") as iF:
email_candidates = iF.read().splitlines()
elif nicks != None:
# Iterating the list of nicks
for n in nicks:
# Iterating the list of possible domains to build the emails
for d in domains:
if d not in excludeDomains:
email_candidates.append(n+"@"+d)
elif nicksFile != None:
# Reading the list of nicks
with open(nicksFile, "r") as iF:
nicks = iF.read().splitlines()
# Iterating the list of nicks
for n in nicks:
# Iterating the list of possible domains to build the emails
for d in domains:
if d not in excludeDomains:
email_candidates.append(n+"@"+d)
return email_candidates | python | def grabEmails(emails=None, emailsFile=None, nicks=None, nicksFile=None, domains=EMAIL_DOMAINS, excludeDomains=[]):
"""
Method that generates a list of emails.
Args:
-----
emails: Any premade list of emails.
emailsFile: Filepath to the emails file (one per line).
nicks: A list of aliases.
nicksFile: Filepath to the aliases file (one per line).
domains: Domains where the aliases will be tested.
excludeDomains: Domains to be excluded from the created list.
Returns:
--------
list: the list of emails that will be verified.
"""
email_candidates = []
if emails != None:
email_candidates = emails
elif emailsFile != None:
# Reading the emails file
with open(emailsFile, "r") as iF:
email_candidates = iF.read().splitlines()
elif nicks != None:
# Iterating the list of nicks
for n in nicks:
# Iterating the list of possible domains to build the emails
for d in domains:
if d not in excludeDomains:
email_candidates.append(n+"@"+d)
elif nicksFile != None:
# Reading the list of nicks
with open(nicksFile, "r") as iF:
nicks = iF.read().splitlines()
# Iterating the list of nicks
for n in nicks:
# Iterating the list of possible domains to build the emails
for d in domains:
if d not in excludeDomains:
email_candidates.append(n+"@"+d)
return email_candidates | [
"def",
"grabEmails",
"(",
"emails",
"=",
"None",
",",
"emailsFile",
"=",
"None",
",",
"nicks",
"=",
"None",
",",
"nicksFile",
"=",
"None",
",",
"domains",
"=",
"EMAIL_DOMAINS",
",",
"excludeDomains",
"=",
"[",
"]",
")",
":",
"email_candidates",
"=",
"[",
"]",
"if",
"emails",
"!=",
"None",
":",
"email_candidates",
"=",
"emails",
"elif",
"emailsFile",
"!=",
"None",
":",
"# Reading the emails file",
"with",
"open",
"(",
"emailsFile",
",",
"\"r\"",
")",
"as",
"iF",
":",
"email_candidates",
"=",
"iF",
".",
"read",
"(",
")",
".",
"splitlines",
"(",
")",
"elif",
"nicks",
"!=",
"None",
":",
"# Iterating the list of nicks",
"for",
"n",
"in",
"nicks",
":",
"# Iterating the list of possible domains to build the emails",
"for",
"d",
"in",
"domains",
":",
"if",
"d",
"not",
"in",
"excludeDomains",
":",
"email_candidates",
".",
"append",
"(",
"n",
"+",
"\"@\"",
"+",
"d",
")",
"elif",
"nicksFile",
"!=",
"None",
":",
"# Reading the list of nicks",
"with",
"open",
"(",
"nicksFile",
",",
"\"r\"",
")",
"as",
"iF",
":",
"nicks",
"=",
"iF",
".",
"read",
"(",
")",
".",
"splitlines",
"(",
")",
"# Iterating the list of nicks",
"for",
"n",
"in",
"nicks",
":",
"# Iterating the list of possible domains to build the emails",
"for",
"d",
"in",
"domains",
":",
"if",
"d",
"not",
"in",
"excludeDomains",
":",
"email_candidates",
".",
"append",
"(",
"n",
"+",
"\"@\"",
"+",
"d",
")",
"return",
"email_candidates"
] | Method that generates a list of emails.
Args:
-----
emails: Any premade list of emails.
emailsFile: Filepath to the emails file (one per line).
nicks: A list of aliases.
nicksFile: Filepath to the aliases file (one per line).
domains: Domains where the aliases will be tested.
excludeDomains: Domains to be excluded from the created list.
Returns:
--------
list: the list of emails that will be verified. | [
"Method",
"that",
"generates",
"a",
"list",
"of",
"emails",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/mailfy.py#L164-L206 |
5,448 | i3visio/osrframework | osrframework/mailfy.py | processMailList | def processMailList(platformNames=[], emails=[]):
"""
Method to perform the email search.
Args:
-----
platformNames: List of names of the platforms.
emails: List of numbers to be queried.
Return:
-------
A list of verified emails.
"""
# Grabbing the <Platform> objects
platforms = platform_selection.getPlatformsByName(platformNames, mode="mailfy")
results = []
for e in emails:
for pla in platforms:
# This returns a json.txt!
entities = pla.getInfo(query=e, mode="mailfy")
if entities != {}:
results += json.loads(entities)
return results | python | def processMailList(platformNames=[], emails=[]):
"""
Method to perform the email search.
Args:
-----
platformNames: List of names of the platforms.
emails: List of numbers to be queried.
Return:
-------
A list of verified emails.
"""
# Grabbing the <Platform> objects
platforms = platform_selection.getPlatformsByName(platformNames, mode="mailfy")
results = []
for e in emails:
for pla in platforms:
# This returns a json.txt!
entities = pla.getInfo(query=e, mode="mailfy")
if entities != {}:
results += json.loads(entities)
return results | [
"def",
"processMailList",
"(",
"platformNames",
"=",
"[",
"]",
",",
"emails",
"=",
"[",
"]",
")",
":",
"# Grabbing the <Platform> objects",
"platforms",
"=",
"platform_selection",
".",
"getPlatformsByName",
"(",
"platformNames",
",",
"mode",
"=",
"\"mailfy\"",
")",
"results",
"=",
"[",
"]",
"for",
"e",
"in",
"emails",
":",
"for",
"pla",
"in",
"platforms",
":",
"# This returns a json.txt!",
"entities",
"=",
"pla",
".",
"getInfo",
"(",
"query",
"=",
"e",
",",
"mode",
"=",
"\"mailfy\"",
")",
"if",
"entities",
"!=",
"{",
"}",
":",
"results",
"+=",
"json",
".",
"loads",
"(",
"entities",
")",
"return",
"results"
] | Method to perform the email search.
Args:
-----
platformNames: List of names of the platforms.
emails: List of numbers to be queried.
Return:
-------
A list of verified emails. | [
"Method",
"to",
"perform",
"the",
"email",
"search",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/mailfy.py#L209-L232 |
5,449 | i3visio/osrframework | osrframework/mailfy.py | pool_function | def pool_function(args):
"""
A wrapper for being able to launch all the threads.
We will use python-emailahoy library for the verification.
Args:
-----
args: reception of the parameters for getPageWrapper as a tuple.
Returns:
--------
A dictionary representing whether the verification was ended
successfully. The format is as follows:
```
{"platform": "str(domain["value"])", "status": "DONE", "data": aux}
```
"""
is_valid = True
try:
checker = emailahoy.VerifyEmail()
status, message = checker.verify_email_smtp(args, from_host='gmail.com', from_email='[email protected]')
if status == 250:
print("\t[*] Verification of '{}' status: {}. Details:\n\t\t{}".format(general.success(args), general.success("SUCCESS ({})".format(str(status))), message.replace('\n', '\n\t\t')))
is_valid = True
else:
print("\t[*] Verification of '{}' status: {}. Details:\n\t\t{}".format(general.error(args), general.error("FAILED ({})".format(str(status))), message.replace('\n', '\n\t\t')))
is_valid = False
except Exception, e:
print(general.warning("WARNING. An error was found when performing the search. You can omit this message.\n" + str(e)))
is_valid = False
aux = {}
aux["type"] = "i3visio.profile"
aux["value"] = "Email - " + args
aux["attributes"] = general.expandEntitiesFromEmail(args)
platform = aux["attributes"][2]["value"].title()
aux["attributes"].append({
"type": "i3visio.platform",
"value": platform,
"attributes": []
}
)
if is_valid:
return {"platform": platform, "status": "DONE", "data": aux}
else:
return {"platform": platform, "status": "DONE", "data": {}} | python | def pool_function(args):
"""
A wrapper for being able to launch all the threads.
We will use python-emailahoy library for the verification.
Args:
-----
args: reception of the parameters for getPageWrapper as a tuple.
Returns:
--------
A dictionary representing whether the verification was ended
successfully. The format is as follows:
```
{"platform": "str(domain["value"])", "status": "DONE", "data": aux}
```
"""
is_valid = True
try:
checker = emailahoy.VerifyEmail()
status, message = checker.verify_email_smtp(args, from_host='gmail.com', from_email='[email protected]')
if status == 250:
print("\t[*] Verification of '{}' status: {}. Details:\n\t\t{}".format(general.success(args), general.success("SUCCESS ({})".format(str(status))), message.replace('\n', '\n\t\t')))
is_valid = True
else:
print("\t[*] Verification of '{}' status: {}. Details:\n\t\t{}".format(general.error(args), general.error("FAILED ({})".format(str(status))), message.replace('\n', '\n\t\t')))
is_valid = False
except Exception, e:
print(general.warning("WARNING. An error was found when performing the search. You can omit this message.\n" + str(e)))
is_valid = False
aux = {}
aux["type"] = "i3visio.profile"
aux["value"] = "Email - " + args
aux["attributes"] = general.expandEntitiesFromEmail(args)
platform = aux["attributes"][2]["value"].title()
aux["attributes"].append({
"type": "i3visio.platform",
"value": platform,
"attributes": []
}
)
if is_valid:
return {"platform": platform, "status": "DONE", "data": aux}
else:
return {"platform": platform, "status": "DONE", "data": {}} | [
"def",
"pool_function",
"(",
"args",
")",
":",
"is_valid",
"=",
"True",
"try",
":",
"checker",
"=",
"emailahoy",
".",
"VerifyEmail",
"(",
")",
"status",
",",
"message",
"=",
"checker",
".",
"verify_email_smtp",
"(",
"args",
",",
"from_host",
"=",
"'gmail.com'",
",",
"from_email",
"=",
"'[email protected]'",
")",
"if",
"status",
"==",
"250",
":",
"print",
"(",
"\"\\t[*] Verification of '{}' status: {}. Details:\\n\\t\\t{}\"",
".",
"format",
"(",
"general",
".",
"success",
"(",
"args",
")",
",",
"general",
".",
"success",
"(",
"\"SUCCESS ({})\"",
".",
"format",
"(",
"str",
"(",
"status",
")",
")",
")",
",",
"message",
".",
"replace",
"(",
"'\\n'",
",",
"'\\n\\t\\t'",
")",
")",
")",
"is_valid",
"=",
"True",
"else",
":",
"print",
"(",
"\"\\t[*] Verification of '{}' status: {}. Details:\\n\\t\\t{}\"",
".",
"format",
"(",
"general",
".",
"error",
"(",
"args",
")",
",",
"general",
".",
"error",
"(",
"\"FAILED ({})\"",
".",
"format",
"(",
"str",
"(",
"status",
")",
")",
")",
",",
"message",
".",
"replace",
"(",
"'\\n'",
",",
"'\\n\\t\\t'",
")",
")",
")",
"is_valid",
"=",
"False",
"except",
"Exception",
",",
"e",
":",
"print",
"(",
"general",
".",
"warning",
"(",
"\"WARNING. An error was found when performing the search. You can omit this message.\\n\"",
"+",
"str",
"(",
"e",
")",
")",
")",
"is_valid",
"=",
"False",
"aux",
"=",
"{",
"}",
"aux",
"[",
"\"type\"",
"]",
"=",
"\"i3visio.profile\"",
"aux",
"[",
"\"value\"",
"]",
"=",
"\"Email - \"",
"+",
"args",
"aux",
"[",
"\"attributes\"",
"]",
"=",
"general",
".",
"expandEntitiesFromEmail",
"(",
"args",
")",
"platform",
"=",
"aux",
"[",
"\"attributes\"",
"]",
"[",
"2",
"]",
"[",
"\"value\"",
"]",
".",
"title",
"(",
")",
"aux",
"[",
"\"attributes\"",
"]",
".",
"append",
"(",
"{",
"\"type\"",
":",
"\"i3visio.platform\"",
",",
"\"value\"",
":",
"platform",
",",
"\"attributes\"",
":",
"[",
"]",
"}",
")",
"if",
"is_valid",
":",
"return",
"{",
"\"platform\"",
":",
"platform",
",",
"\"status\"",
":",
"\"DONE\"",
",",
"\"data\"",
":",
"aux",
"}",
"else",
":",
"return",
"{",
"\"platform\"",
":",
"platform",
",",
"\"status\"",
":",
"\"DONE\"",
",",
"\"data\"",
":",
"{",
"}",
"}"
] | A wrapper for being able to launch all the threads.
We will use python-emailahoy library for the verification.
Args:
-----
args: reception of the parameters for getPageWrapper as a tuple.
Returns:
--------
A dictionary representing whether the verification was ended
successfully. The format is as follows:
```
{"platform": "str(domain["value"])", "status": "DONE", "data": aux}
``` | [
"A",
"wrapper",
"for",
"being",
"able",
"to",
"launch",
"all",
"the",
"threads",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/mailfy.py#L235-L283 |
5,450 | i3visio/osrframework | osrframework/utils/browser.py | Browser.recoverURL | def recoverURL(self, url):
"""
Public method to recover a resource.
Args:
-----
url: The URL to be collected.
Returns:
--------
Returns a resource that has to be read, for instance, with html = self.br.read()
"""
# Configuring user agents...
self.setUserAgent()
# Configuring proxies
if "https://" in url:
self.setProxy(protocol = "https")
else:
self.setProxy(protocol = "http")
# Giving special treatment for .onion platforms
if ".onion" in url:
try:
# TODO: configuring manually the tor bundle
pass
except:
# TODO: capturing the error and eventually trying the tor2web approach
#url = url.replace(".onion", ".tor2web.org")
pass
url = url.replace(".onion", ".onion.cab")
# Opening the resource
try:
recurso = self.br.open(url)
except:
# Something happened. Maybe the request was forbidden?
return None
html = recurso.read()
return html | python | def recoverURL(self, url):
"""
Public method to recover a resource.
Args:
-----
url: The URL to be collected.
Returns:
--------
Returns a resource that has to be read, for instance, with html = self.br.read()
"""
# Configuring user agents...
self.setUserAgent()
# Configuring proxies
if "https://" in url:
self.setProxy(protocol = "https")
else:
self.setProxy(protocol = "http")
# Giving special treatment for .onion platforms
if ".onion" in url:
try:
# TODO: configuring manually the tor bundle
pass
except:
# TODO: capturing the error and eventually trying the tor2web approach
#url = url.replace(".onion", ".tor2web.org")
pass
url = url.replace(".onion", ".onion.cab")
# Opening the resource
try:
recurso = self.br.open(url)
except:
# Something happened. Maybe the request was forbidden?
return None
html = recurso.read()
return html | [
"def",
"recoverURL",
"(",
"self",
",",
"url",
")",
":",
"# Configuring user agents...",
"self",
".",
"setUserAgent",
"(",
")",
"# Configuring proxies",
"if",
"\"https://\"",
"in",
"url",
":",
"self",
".",
"setProxy",
"(",
"protocol",
"=",
"\"https\"",
")",
"else",
":",
"self",
".",
"setProxy",
"(",
"protocol",
"=",
"\"http\"",
")",
"# Giving special treatment for .onion platforms",
"if",
"\".onion\"",
"in",
"url",
":",
"try",
":",
"# TODO: configuring manually the tor bundle",
"pass",
"except",
":",
"# TODO: capturing the error and eventually trying the tor2web approach",
"#url = url.replace(\".onion\", \".tor2web.org\")",
"pass",
"url",
"=",
"url",
".",
"replace",
"(",
"\".onion\"",
",",
"\".onion.cab\"",
")",
"# Opening the resource",
"try",
":",
"recurso",
"=",
"self",
".",
"br",
".",
"open",
"(",
"url",
")",
"except",
":",
"# Something happened. Maybe the request was forbidden?",
"return",
"None",
"html",
"=",
"recurso",
".",
"read",
"(",
")",
"return",
"html"
] | Public method to recover a resource.
Args:
-----
url: The URL to be collected.
Returns:
--------
Returns a resource that has to be read, for instance, with html = self.br.read() | [
"Public",
"method",
"to",
"recover",
"a",
"resource",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/browser.py#L141-L182 |
5,451 | i3visio/osrframework | osrframework/utils/browser.py | Browser.setNewPassword | def setNewPassword(self, url, username, password):
"""
Public method to manually set the credentials for a url in the browser.
"""
self.br.add_password(url, username, password) | python | def setNewPassword(self, url, username, password):
"""
Public method to manually set the credentials for a url in the browser.
"""
self.br.add_password(url, username, password) | [
"def",
"setNewPassword",
"(",
"self",
",",
"url",
",",
"username",
",",
"password",
")",
":",
"self",
".",
"br",
".",
"add_password",
"(",
"url",
",",
"username",
",",
"password",
")"
] | Public method to manually set the credentials for a url in the browser. | [
"Public",
"method",
"to",
"manually",
"set",
"the",
"credentials",
"for",
"a",
"url",
"in",
"the",
"browser",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/browser.py#L184-L188 |
5,452 | i3visio/osrframework | osrframework/utils/browser.py | Browser.setProxy | def setProxy(self, protocol="http"):
"""
Public method to set a proxy for the browser.
"""
# Setting proxy
try:
new = { protocol: self.proxies[protocol]}
self.br.set_proxies( new )
except:
# No proxy defined for that protocol
pass | python | def setProxy(self, protocol="http"):
"""
Public method to set a proxy for the browser.
"""
# Setting proxy
try:
new = { protocol: self.proxies[protocol]}
self.br.set_proxies( new )
except:
# No proxy defined for that protocol
pass | [
"def",
"setProxy",
"(",
"self",
",",
"protocol",
"=",
"\"http\"",
")",
":",
"# Setting proxy",
"try",
":",
"new",
"=",
"{",
"protocol",
":",
"self",
".",
"proxies",
"[",
"protocol",
"]",
"}",
"self",
".",
"br",
".",
"set_proxies",
"(",
"new",
")",
"except",
":",
"# No proxy defined for that protocol",
"pass"
] | Public method to set a proxy for the browser. | [
"Public",
"method",
"to",
"set",
"a",
"proxy",
"for",
"the",
"browser",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/browser.py#L190-L200 |
5,453 | i3visio/osrframework | osrframework/utils/browser.py | Browser.setUserAgent | def setUserAgent(self, uA=None):
"""
This method will be called whenever a new query will be executed.
:param uA: Any User Agent that was needed to be inserted. This parameter is optional.
:return: Returns True if a User Agent was inserted and False if no User Agent could be inserted.
"""
logger = logging.getLogger("osrframework.utils")
if not uA:
# Setting the User Agents
if self.userAgents:
# User-Agent (this is cheating, ok?)
logger = logging.debug("Selecting a new random User Agent.")
uA = random.choice(self.userAgents)
else:
logger = logging.debug("No user agent was inserted.")
return False
#logger.debug("Setting the user agent:\t" + str(uA))
self.br.addheaders = [ ('User-agent', uA), ]
#self.br.addheaders = [('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11'), ('Accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'), ('Accept-Charset', 'ISO-8859-1,utf-8;q=0.7,*;q=0.3'), ('Accept-Encoding', 'none'), ('Accept-Language', 'es-es,es;q=0.8'), ('Connection', 'keep-alive')]
#self.br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(), max_time=1)
return True | python | def setUserAgent(self, uA=None):
"""
This method will be called whenever a new query will be executed.
:param uA: Any User Agent that was needed to be inserted. This parameter is optional.
:return: Returns True if a User Agent was inserted and False if no User Agent could be inserted.
"""
logger = logging.getLogger("osrframework.utils")
if not uA:
# Setting the User Agents
if self.userAgents:
# User-Agent (this is cheating, ok?)
logger = logging.debug("Selecting a new random User Agent.")
uA = random.choice(self.userAgents)
else:
logger = logging.debug("No user agent was inserted.")
return False
#logger.debug("Setting the user agent:\t" + str(uA))
self.br.addheaders = [ ('User-agent', uA), ]
#self.br.addheaders = [('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11'), ('Accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'), ('Accept-Charset', 'ISO-8859-1,utf-8;q=0.7,*;q=0.3'), ('Accept-Encoding', 'none'), ('Accept-Language', 'es-es,es;q=0.8'), ('Connection', 'keep-alive')]
#self.br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(), max_time=1)
return True | [
"def",
"setUserAgent",
"(",
"self",
",",
"uA",
"=",
"None",
")",
":",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"\"osrframework.utils\"",
")",
"if",
"not",
"uA",
":",
"# Setting the User Agents",
"if",
"self",
".",
"userAgents",
":",
"# User-Agent (this is cheating, ok?)",
"logger",
"=",
"logging",
".",
"debug",
"(",
"\"Selecting a new random User Agent.\"",
")",
"uA",
"=",
"random",
".",
"choice",
"(",
"self",
".",
"userAgents",
")",
"else",
":",
"logger",
"=",
"logging",
".",
"debug",
"(",
"\"No user agent was inserted.\"",
")",
"return",
"False",
"#logger.debug(\"Setting the user agent:\\t\" + str(uA))",
"self",
".",
"br",
".",
"addheaders",
"=",
"[",
"(",
"'User-agent'",
",",
"uA",
")",
",",
"]",
"#self.br.addheaders = [('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11'), ('Accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'), ('Accept-Charset', 'ISO-8859-1,utf-8;q=0.7,*;q=0.3'), ('Accept-Encoding', 'none'), ('Accept-Language', 'es-es,es;q=0.8'), ('Connection', 'keep-alive')]",
"#self.br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(), max_time=1)",
"return",
"True"
] | This method will be called whenever a new query will be executed.
:param uA: Any User Agent that was needed to be inserted. This parameter is optional.
:return: Returns True if a User Agent was inserted and False if no User Agent could be inserted. | [
"This",
"method",
"will",
"be",
"called",
"whenever",
"a",
"new",
"query",
"will",
"be",
"executed",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/browser.py#L202-L228 |
5,454 | i3visio/osrframework | osrframework/api/twitter_api.py | main | def main(args):
"""
Query manager.
"""
# Creating the instance
tAW = TwitterAPIWrapper()
# Selecting the query to be launched
if args.type == "get_all_docs":
results = tAW.get_all_docs(args.query)
elif args.type == "get_user":
results = tAW.get_user(args.query)
elif args.type == "get_followers":
results = tAW.get_followers(args.query)
print "... %s followers downloaded... " % (len(results))
#write the csv
with open('%s_followers.csv' % args.query, 'wb') as f:
writer = csv.writer(f)
for r in results:
writer.writerow([args.query,str(r)])
elif args.type == "get_friends":
results = tAW.get_friends(args.query)
print "... %s friends downloaded... " % (len(results))
#write the csv
with open('%s_friends.csv' % args.query, 'wb') as f:
writer = csv.writer(f)
for r in results:
writer.writerow([args.query,str(r)])
elif args.type == "search_users":
results = tAW.search_users(args.query)
return results | python | def main(args):
"""
Query manager.
"""
# Creating the instance
tAW = TwitterAPIWrapper()
# Selecting the query to be launched
if args.type == "get_all_docs":
results = tAW.get_all_docs(args.query)
elif args.type == "get_user":
results = tAW.get_user(args.query)
elif args.type == "get_followers":
results = tAW.get_followers(args.query)
print "... %s followers downloaded... " % (len(results))
#write the csv
with open('%s_followers.csv' % args.query, 'wb') as f:
writer = csv.writer(f)
for r in results:
writer.writerow([args.query,str(r)])
elif args.type == "get_friends":
results = tAW.get_friends(args.query)
print "... %s friends downloaded... " % (len(results))
#write the csv
with open('%s_friends.csv' % args.query, 'wb') as f:
writer = csv.writer(f)
for r in results:
writer.writerow([args.query,str(r)])
elif args.type == "search_users":
results = tAW.search_users(args.query)
return results | [
"def",
"main",
"(",
"args",
")",
":",
"# Creating the instance",
"tAW",
"=",
"TwitterAPIWrapper",
"(",
")",
"# Selecting the query to be launched",
"if",
"args",
".",
"type",
"==",
"\"get_all_docs\"",
":",
"results",
"=",
"tAW",
".",
"get_all_docs",
"(",
"args",
".",
"query",
")",
"elif",
"args",
".",
"type",
"==",
"\"get_user\"",
":",
"results",
"=",
"tAW",
".",
"get_user",
"(",
"args",
".",
"query",
")",
"elif",
"args",
".",
"type",
"==",
"\"get_followers\"",
":",
"results",
"=",
"tAW",
".",
"get_followers",
"(",
"args",
".",
"query",
")",
"print",
"\"... %s followers downloaded... \"",
"%",
"(",
"len",
"(",
"results",
")",
")",
"#write the csv",
"with",
"open",
"(",
"'%s_followers.csv'",
"%",
"args",
".",
"query",
",",
"'wb'",
")",
"as",
"f",
":",
"writer",
"=",
"csv",
".",
"writer",
"(",
"f",
")",
"for",
"r",
"in",
"results",
":",
"writer",
".",
"writerow",
"(",
"[",
"args",
".",
"query",
",",
"str",
"(",
"r",
")",
"]",
")",
"elif",
"args",
".",
"type",
"==",
"\"get_friends\"",
":",
"results",
"=",
"tAW",
".",
"get_friends",
"(",
"args",
".",
"query",
")",
"print",
"\"... %s friends downloaded... \"",
"%",
"(",
"len",
"(",
"results",
")",
")",
"#write the csv",
"with",
"open",
"(",
"'%s_friends.csv'",
"%",
"args",
".",
"query",
",",
"'wb'",
")",
"as",
"f",
":",
"writer",
"=",
"csv",
".",
"writer",
"(",
"f",
")",
"for",
"r",
"in",
"results",
":",
"writer",
".",
"writerow",
"(",
"[",
"args",
".",
"query",
",",
"str",
"(",
"r",
")",
"]",
")",
"elif",
"args",
".",
"type",
"==",
"\"search_users\"",
":",
"results",
"=",
"tAW",
".",
"search_users",
"(",
"args",
".",
"query",
")",
"return",
"results"
] | Query manager. | [
"Query",
"manager",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/api/twitter_api.py#L775-L811 |
5,455 | i3visio/osrframework | osrframework/api/twitter_api.py | TwitterAPIWrapper._rate_limit_status | def _rate_limit_status(self, api=None, mode=None):
"""
Verifying the API limits
"""
if api == None:
api = self.connectToAPI()
if mode == None:
print json.dumps(api.rate_limit_status(), indent=2)
raw_input("<Press ENTER>")
else:
# Testing if we have enough queries
while True:
allLimits = api.rate_limit_status()
if mode == "get_user":
limit = allLimits["resources"]["users"]["/users/show/:id"]["limit"]
remaining = allLimits["resources"]["users"]["/users/show/:id"]["remaining"]
reset = allLimits["resources"]["users"]["/users/show/:id"]["reset"]
elif mode == "get_followers":
limit = allLimits["resources"]["followers"]["/followers/ids"]["limit"]
remaining = allLimits["resources"]["followers"]["/followers/ids"]["remaining"]
reset = allLimits["resources"]["followers"]["/followers/ids"]["reset"]
elif mode == "get_friends":
limit = allLimits["resources"]["friends"]["/friends/ids"]["limit"]
remaining = allLimits["resources"]["friends"]["/friends/ids"]["remaining"]
reset = allLimits["resources"]["friends"]["/friends/ids"]["reset"]
elif mode == "search_users":
limit = allLimits["resources"]["users"]["/users/search"]["limit"]
remaining = allLimits["resources"]["users"]["/users/search"]["remaining"]
reset = allLimits["resources"]["users"]["/users/search"]["reset"]
else:
remaining = 1
"""elif mode == "get_all_docs":
limit = allLimits["resources"]REPLACEME["limit"]
remaining = allLimits["resources"]REPLACEME["remaining"]
reset = allLimits["resources"]REPLACEME["reset"]"""
"""elif mode == "get_users":
limit = allLimits["resources"]REPLACEME["limit"]
remaining = allLimits["resources"]REPLACEME["remaining"]
reset = allLimits["resources"]REPLACEME["reset"] """
"""else:
remaining = 1"""
# Checking if we have enough remaining queries
if remaining > 0:
#raw_input(str(remaining) + " queries yet...")
break
else:
waitTime = 60
print "No more queries remaining, sleeping for " + str(waitTime) +" seconds..."
time.sleep(waitTime)
return 0 | python | def _rate_limit_status(self, api=None, mode=None):
"""
Verifying the API limits
"""
if api == None:
api = self.connectToAPI()
if mode == None:
print json.dumps(api.rate_limit_status(), indent=2)
raw_input("<Press ENTER>")
else:
# Testing if we have enough queries
while True:
allLimits = api.rate_limit_status()
if mode == "get_user":
limit = allLimits["resources"]["users"]["/users/show/:id"]["limit"]
remaining = allLimits["resources"]["users"]["/users/show/:id"]["remaining"]
reset = allLimits["resources"]["users"]["/users/show/:id"]["reset"]
elif mode == "get_followers":
limit = allLimits["resources"]["followers"]["/followers/ids"]["limit"]
remaining = allLimits["resources"]["followers"]["/followers/ids"]["remaining"]
reset = allLimits["resources"]["followers"]["/followers/ids"]["reset"]
elif mode == "get_friends":
limit = allLimits["resources"]["friends"]["/friends/ids"]["limit"]
remaining = allLimits["resources"]["friends"]["/friends/ids"]["remaining"]
reset = allLimits["resources"]["friends"]["/friends/ids"]["reset"]
elif mode == "search_users":
limit = allLimits["resources"]["users"]["/users/search"]["limit"]
remaining = allLimits["resources"]["users"]["/users/search"]["remaining"]
reset = allLimits["resources"]["users"]["/users/search"]["reset"]
else:
remaining = 1
"""elif mode == "get_all_docs":
limit = allLimits["resources"]REPLACEME["limit"]
remaining = allLimits["resources"]REPLACEME["remaining"]
reset = allLimits["resources"]REPLACEME["reset"]"""
"""elif mode == "get_users":
limit = allLimits["resources"]REPLACEME["limit"]
remaining = allLimits["resources"]REPLACEME["remaining"]
reset = allLimits["resources"]REPLACEME["reset"] """
"""else:
remaining = 1"""
# Checking if we have enough remaining queries
if remaining > 0:
#raw_input(str(remaining) + " queries yet...")
break
else:
waitTime = 60
print "No more queries remaining, sleeping for " + str(waitTime) +" seconds..."
time.sleep(waitTime)
return 0 | [
"def",
"_rate_limit_status",
"(",
"self",
",",
"api",
"=",
"None",
",",
"mode",
"=",
"None",
")",
":",
"if",
"api",
"==",
"None",
":",
"api",
"=",
"self",
".",
"connectToAPI",
"(",
")",
"if",
"mode",
"==",
"None",
":",
"print",
"json",
".",
"dumps",
"(",
"api",
".",
"rate_limit_status",
"(",
")",
",",
"indent",
"=",
"2",
")",
"raw_input",
"(",
"\"<Press ENTER>\"",
")",
"else",
":",
"# Testing if we have enough queries",
"while",
"True",
":",
"allLimits",
"=",
"api",
".",
"rate_limit_status",
"(",
")",
"if",
"mode",
"==",
"\"get_user\"",
":",
"limit",
"=",
"allLimits",
"[",
"\"resources\"",
"]",
"[",
"\"users\"",
"]",
"[",
"\"/users/show/:id\"",
"]",
"[",
"\"limit\"",
"]",
"remaining",
"=",
"allLimits",
"[",
"\"resources\"",
"]",
"[",
"\"users\"",
"]",
"[",
"\"/users/show/:id\"",
"]",
"[",
"\"remaining\"",
"]",
"reset",
"=",
"allLimits",
"[",
"\"resources\"",
"]",
"[",
"\"users\"",
"]",
"[",
"\"/users/show/:id\"",
"]",
"[",
"\"reset\"",
"]",
"elif",
"mode",
"==",
"\"get_followers\"",
":",
"limit",
"=",
"allLimits",
"[",
"\"resources\"",
"]",
"[",
"\"followers\"",
"]",
"[",
"\"/followers/ids\"",
"]",
"[",
"\"limit\"",
"]",
"remaining",
"=",
"allLimits",
"[",
"\"resources\"",
"]",
"[",
"\"followers\"",
"]",
"[",
"\"/followers/ids\"",
"]",
"[",
"\"remaining\"",
"]",
"reset",
"=",
"allLimits",
"[",
"\"resources\"",
"]",
"[",
"\"followers\"",
"]",
"[",
"\"/followers/ids\"",
"]",
"[",
"\"reset\"",
"]",
"elif",
"mode",
"==",
"\"get_friends\"",
":",
"limit",
"=",
"allLimits",
"[",
"\"resources\"",
"]",
"[",
"\"friends\"",
"]",
"[",
"\"/friends/ids\"",
"]",
"[",
"\"limit\"",
"]",
"remaining",
"=",
"allLimits",
"[",
"\"resources\"",
"]",
"[",
"\"friends\"",
"]",
"[",
"\"/friends/ids\"",
"]",
"[",
"\"remaining\"",
"]",
"reset",
"=",
"allLimits",
"[",
"\"resources\"",
"]",
"[",
"\"friends\"",
"]",
"[",
"\"/friends/ids\"",
"]",
"[",
"\"reset\"",
"]",
"elif",
"mode",
"==",
"\"search_users\"",
":",
"limit",
"=",
"allLimits",
"[",
"\"resources\"",
"]",
"[",
"\"users\"",
"]",
"[",
"\"/users/search\"",
"]",
"[",
"\"limit\"",
"]",
"remaining",
"=",
"allLimits",
"[",
"\"resources\"",
"]",
"[",
"\"users\"",
"]",
"[",
"\"/users/search\"",
"]",
"[",
"\"remaining\"",
"]",
"reset",
"=",
"allLimits",
"[",
"\"resources\"",
"]",
"[",
"\"users\"",
"]",
"[",
"\"/users/search\"",
"]",
"[",
"\"reset\"",
"]",
"else",
":",
"remaining",
"=",
"1",
"\"\"\"elif mode == \"get_all_docs\":\n limit = allLimits[\"resources\"]REPLACEME[\"limit\"]\n remaining = allLimits[\"resources\"]REPLACEME[\"remaining\"]\n reset = allLimits[\"resources\"]REPLACEME[\"reset\"]\"\"\"",
"\"\"\"elif mode == \"get_users\":\n limit = allLimits[\"resources\"]REPLACEME[\"limit\"]\n remaining = allLimits[\"resources\"]REPLACEME[\"remaining\"]\n reset = allLimits[\"resources\"]REPLACEME[\"reset\"] \"\"\"",
"\"\"\"else:\n remaining = 1\"\"\"",
"# Checking if we have enough remaining queries",
"if",
"remaining",
">",
"0",
":",
"#raw_input(str(remaining) + \" queries yet...\")",
"break",
"else",
":",
"waitTime",
"=",
"60",
"print",
"\"No more queries remaining, sleeping for \"",
"+",
"str",
"(",
"waitTime",
")",
"+",
"\" seconds...\"",
"time",
".",
"sleep",
"(",
"waitTime",
")",
"return",
"0"
] | Verifying the API limits | [
"Verifying",
"the",
"API",
"limits"
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/api/twitter_api.py#L62-L113 |
5,456 | i3visio/osrframework | osrframework/api/twitter_api.py | TwitterAPIWrapper.get_followers | def get_followers(self, query):
"""
Method to get the followers of a user.
:param query: Query to be performed.
:return: List of ids.
"""
# Connecting to the API
api = self._connectToAPI()
# Verifying the limits of the API
self._rate_limit_status(api=api, mode="get_followers")
# Making the call to the API
try:
friends_ids = api.followers_ids(query)
except:
return []
"""res = []
# Extracting the information from each profile
for a in aux:
us= self.getUser(a)
res.append(self._processUser(us))"""
return friends_ids | python | def get_followers(self, query):
"""
Method to get the followers of a user.
:param query: Query to be performed.
:return: List of ids.
"""
# Connecting to the API
api = self._connectToAPI()
# Verifying the limits of the API
self._rate_limit_status(api=api, mode="get_followers")
# Making the call to the API
try:
friends_ids = api.followers_ids(query)
except:
return []
"""res = []
# Extracting the information from each profile
for a in aux:
us= self.getUser(a)
res.append(self._processUser(us))"""
return friends_ids | [
"def",
"get_followers",
"(",
"self",
",",
"query",
")",
":",
"# Connecting to the API",
"api",
"=",
"self",
".",
"_connectToAPI",
"(",
")",
"# Verifying the limits of the API",
"self",
".",
"_rate_limit_status",
"(",
"api",
"=",
"api",
",",
"mode",
"=",
"\"get_followers\"",
")",
"# Making the call to the API",
"try",
":",
"friends_ids",
"=",
"api",
".",
"followers_ids",
"(",
"query",
")",
"except",
":",
"return",
"[",
"]",
"\"\"\"res = []\n # Extracting the information from each profile\n for a in aux:\n us= self.getUser(a)\n res.append(self._processUser(us))\"\"\"",
"return",
"friends_ids"
] | Method to get the followers of a user.
:param query: Query to be performed.
:return: List of ids. | [
"Method",
"to",
"get",
"the",
"followers",
"of",
"a",
"user",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/api/twitter_api.py#L641-L667 |
5,457 | i3visio/osrframework | osrframework/api/twitter_api.py | TwitterAPIWrapper.get_friends | def get_friends(self, query):
"""
Method to get the friends of a user.
:param query: Query to be performed.
:return: List of users.
"""
# Connecting to the API
api = self._connectToAPI()
# Verifying the limits of the API
self._rate_limit_status(api=api, mode="get_friends")
# Making the call to the API
try:
friends_ids = api.friends_ids(query)
except:
return []
"""res = []
# Extracting the information from each profile
for a in aux:
us= self.getUser(a)
res.append(self._processUser(us))"""
return friends_ids | python | def get_friends(self, query):
"""
Method to get the friends of a user.
:param query: Query to be performed.
:return: List of users.
"""
# Connecting to the API
api = self._connectToAPI()
# Verifying the limits of the API
self._rate_limit_status(api=api, mode="get_friends")
# Making the call to the API
try:
friends_ids = api.friends_ids(query)
except:
return []
"""res = []
# Extracting the information from each profile
for a in aux:
us= self.getUser(a)
res.append(self._processUser(us))"""
return friends_ids | [
"def",
"get_friends",
"(",
"self",
",",
"query",
")",
":",
"# Connecting to the API",
"api",
"=",
"self",
".",
"_connectToAPI",
"(",
")",
"# Verifying the limits of the API",
"self",
".",
"_rate_limit_status",
"(",
"api",
"=",
"api",
",",
"mode",
"=",
"\"get_friends\"",
")",
"# Making the call to the API",
"try",
":",
"friends_ids",
"=",
"api",
".",
"friends_ids",
"(",
"query",
")",
"except",
":",
"return",
"[",
"]",
"\"\"\"res = []\n # Extracting the information from each profile\n for a in aux:\n us= self.getUser(a)\n res.append(self._processUser(us))\"\"\"",
"return",
"friends_ids"
] | Method to get the friends of a user.
:param query: Query to be performed.
:return: List of users. | [
"Method",
"to",
"get",
"the",
"friends",
"of",
"a",
"user",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/api/twitter_api.py#L669-L695 |
5,458 | i3visio/osrframework | osrframework/api/twitter_api.py | TwitterAPIWrapper.get_user | def get_user(self, screen_name):
"""
Method to perform the usufy searches.
:param screen_name: nickname to be searched.
:return: User.
"""
# Connecting to the API
api = self._connectToAPI()
# Verifying the limits of the API
self._rate_limit_status(api=api, mode="get_user")
aux = []
try:
user = api.get_user(screen_name)
# Iterate through the results using user._json
aux.append(user._json)
except tweepy.error.TweepError as e:
pass
res = []
# Extracting the information from each profile
for a in aux:
res.append(self._processUser(a))
return res | python | def get_user(self, screen_name):
"""
Method to perform the usufy searches.
:param screen_name: nickname to be searched.
:return: User.
"""
# Connecting to the API
api = self._connectToAPI()
# Verifying the limits of the API
self._rate_limit_status(api=api, mode="get_user")
aux = []
try:
user = api.get_user(screen_name)
# Iterate through the results using user._json
aux.append(user._json)
except tweepy.error.TweepError as e:
pass
res = []
# Extracting the information from each profile
for a in aux:
res.append(self._processUser(a))
return res | [
"def",
"get_user",
"(",
"self",
",",
"screen_name",
")",
":",
"# Connecting to the API",
"api",
"=",
"self",
".",
"_connectToAPI",
"(",
")",
"# Verifying the limits of the API",
"self",
".",
"_rate_limit_status",
"(",
"api",
"=",
"api",
",",
"mode",
"=",
"\"get_user\"",
")",
"aux",
"=",
"[",
"]",
"try",
":",
"user",
"=",
"api",
".",
"get_user",
"(",
"screen_name",
")",
"# Iterate through the results using user._json",
"aux",
".",
"append",
"(",
"user",
".",
"_json",
")",
"except",
"tweepy",
".",
"error",
".",
"TweepError",
"as",
"e",
":",
"pass",
"res",
"=",
"[",
"]",
"# Extracting the information from each profile",
"for",
"a",
"in",
"aux",
":",
"res",
".",
"append",
"(",
"self",
".",
"_processUser",
"(",
"a",
")",
")",
"return",
"res"
] | Method to perform the usufy searches.
:param screen_name: nickname to be searched.
:return: User. | [
"Method",
"to",
"perform",
"the",
"usufy",
"searches",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/api/twitter_api.py#L698-L724 |
5,459 | i3visio/osrframework | osrframework/api/twitter_api.py | TwitterAPIWrapper.search_users | def search_users(self, query, n=20, maxUsers=60):
"""
Method to perform the searchfy searches.
:param query: Query to be performed.
:param n: Number of results per query.
:param maxUsers: Max. number of users to be recovered.
:return: List of users
"""
# Connecting to the API
api = self._connectToAPI()
# Verifying the limits of the API
self._rate_limit_status(api=api, mode="search_users")
aux = []
page = 0
# print "Getting page %s of new users..." % page+1
# Making the call to the API
try:
newUsers = api.search_users(query, n, page)
for n in newUsers:
aux.append(n._json)
#keep grabbing tweets until there are no tweets left to grab
while len(aux) < maxUsers & len(newUsers)>0:
page+=1
print "Getting page %s of new users..." % page
# Grabbing new Users
newUsers = api.search_users(query, n, page)
# Save the users found
aux.extend(newUsers)
except:
pass
res = []
# Extracting the information from each profile
for a in aux:
res.append(self._processUser(a))
return res | python | def search_users(self, query, n=20, maxUsers=60):
"""
Method to perform the searchfy searches.
:param query: Query to be performed.
:param n: Number of results per query.
:param maxUsers: Max. number of users to be recovered.
:return: List of users
"""
# Connecting to the API
api = self._connectToAPI()
# Verifying the limits of the API
self._rate_limit_status(api=api, mode="search_users")
aux = []
page = 0
# print "Getting page %s of new users..." % page+1
# Making the call to the API
try:
newUsers = api.search_users(query, n, page)
for n in newUsers:
aux.append(n._json)
#keep grabbing tweets until there are no tweets left to grab
while len(aux) < maxUsers & len(newUsers)>0:
page+=1
print "Getting page %s of new users..." % page
# Grabbing new Users
newUsers = api.search_users(query, n, page)
# Save the users found
aux.extend(newUsers)
except:
pass
res = []
# Extracting the information from each profile
for a in aux:
res.append(self._processUser(a))
return res | [
"def",
"search_users",
"(",
"self",
",",
"query",
",",
"n",
"=",
"20",
",",
"maxUsers",
"=",
"60",
")",
":",
"# Connecting to the API",
"api",
"=",
"self",
".",
"_connectToAPI",
"(",
")",
"# Verifying the limits of the API",
"self",
".",
"_rate_limit_status",
"(",
"api",
"=",
"api",
",",
"mode",
"=",
"\"search_users\"",
")",
"aux",
"=",
"[",
"]",
"page",
"=",
"0",
"# print \"Getting page %s of new users...\" % page+1",
"# Making the call to the API",
"try",
":",
"newUsers",
"=",
"api",
".",
"search_users",
"(",
"query",
",",
"n",
",",
"page",
")",
"for",
"n",
"in",
"newUsers",
":",
"aux",
".",
"append",
"(",
"n",
".",
"_json",
")",
"#keep grabbing tweets until there are no tweets left to grab",
"while",
"len",
"(",
"aux",
")",
"<",
"maxUsers",
"&",
"len",
"(",
"newUsers",
")",
">",
"0",
":",
"page",
"+=",
"1",
"print",
"\"Getting page %s of new users...\"",
"%",
"page",
"# Grabbing new Users",
"newUsers",
"=",
"api",
".",
"search_users",
"(",
"query",
",",
"n",
",",
"page",
")",
"# Save the users found",
"aux",
".",
"extend",
"(",
"newUsers",
")",
"except",
":",
"pass",
"res",
"=",
"[",
"]",
"# Extracting the information from each profile",
"for",
"a",
"in",
"aux",
":",
"res",
".",
"append",
"(",
"self",
".",
"_processUser",
"(",
"a",
")",
")",
"return",
"res"
] | Method to perform the searchfy searches.
:param query: Query to be performed.
:param n: Number of results per query.
:param maxUsers: Max. number of users to be recovered.
:return: List of users | [
"Method",
"to",
"perform",
"the",
"searchfy",
"searches",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/api/twitter_api.py#L726-L772 |
5,460 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/source.py | Source.validate_categories | def validate_categories(categories):
"""Take an iterable of source categories and raise ValueError if some
of them are invalid."""
if not set(categories) <= Source.categories:
invalid = list(set(categories) - Source.categories)
raise ValueError('Invalid categories: %s' % invalid) | python | def validate_categories(categories):
"""Take an iterable of source categories and raise ValueError if some
of them are invalid."""
if not set(categories) <= Source.categories:
invalid = list(set(categories) - Source.categories)
raise ValueError('Invalid categories: %s' % invalid) | [
"def",
"validate_categories",
"(",
"categories",
")",
":",
"if",
"not",
"set",
"(",
"categories",
")",
"<=",
"Source",
".",
"categories",
":",
"invalid",
"=",
"list",
"(",
"set",
"(",
"categories",
")",
"-",
"Source",
".",
"categories",
")",
"raise",
"ValueError",
"(",
"'Invalid categories: %s'",
"%",
"invalid",
")"
] | Take an iterable of source categories and raise ValueError if some
of them are invalid. | [
"Take",
"an",
"iterable",
"of",
"source",
"categories",
"and",
"raise",
"ValueError",
"if",
"some",
"of",
"them",
"are",
"invalid",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/source.py#L50-L55 |
5,461 | i3visio/osrframework | osrframework/thirdparties/md5db_net/checkIfHashIsCracked.py | checkIfHashIsCracked | def checkIfHashIsCracked(hash=None):
"""
Method that checks if the given hash is stored in the md5db.net website.
:param hash: hash to verify.
:return: Resolved hash. If nothing was found, it will return an empty list.
"""
apiURL = "http://md5db.net/api/" + str(hash).lower()
try:
# Getting the result of the query from MD5db.net
data = urllib2.urlopen(apiURL).read()
return data
except:
# No information was found, then we return a null entity
return [] | python | def checkIfHashIsCracked(hash=None):
"""
Method that checks if the given hash is stored in the md5db.net website.
:param hash: hash to verify.
:return: Resolved hash. If nothing was found, it will return an empty list.
"""
apiURL = "http://md5db.net/api/" + str(hash).lower()
try:
# Getting the result of the query from MD5db.net
data = urllib2.urlopen(apiURL).read()
return data
except:
# No information was found, then we return a null entity
return [] | [
"def",
"checkIfHashIsCracked",
"(",
"hash",
"=",
"None",
")",
":",
"apiURL",
"=",
"\"http://md5db.net/api/\"",
"+",
"str",
"(",
"hash",
")",
".",
"lower",
"(",
")",
"try",
":",
"# Getting the result of the query from MD5db.net",
"data",
"=",
"urllib2",
".",
"urlopen",
"(",
"apiURL",
")",
".",
"read",
"(",
")",
"return",
"data",
"except",
":",
"# No information was found, then we return a null entity",
"return",
"[",
"]"
] | Method that checks if the given hash is stored in the md5db.net website.
:param hash: hash to verify.
:return: Resolved hash. If nothing was found, it will return an empty list. | [
"Method",
"that",
"checks",
"if",
"the",
"given",
"hash",
"is",
"stored",
"in",
"the",
"md5db",
".",
"net",
"website",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/md5db_net/checkIfHashIsCracked.py#L26-L46 |
5,462 | i3visio/osrframework | osrframework/usufy.py | fuzzUsufy | def fuzzUsufy(fDomains = None, fFuzzStruct = None):
"""
Method to guess the usufy path against a list of domains or subdomains.
Args:
-----
fDomains: A list to strings containing the domains and (optionally) a
nick.
fFuzzStruct: A list to strings containing the transforms to be
performed.
Returns:
--------
dict: A dictionary of the form of `{"domain": "url"}`.
"""
if fFuzzStruct == None:
# Loading these structures by default
fuzzingStructures = [
"http://<DOMAIN>/<USERNAME>",
"http://<DOMAIN>/~<USERNAME>",
"http://<DOMAIN>/?action=profile;user=<USERNAME>",
"http://<DOMAIN>/causes/author/<USERNAME>",
"http://<DOMAIN>/channel/<USERNAME>",
"http://<DOMAIN>/community/profile/<USERNAME>",
"http://<DOMAIN>/component/comprofiler/userprofiler/<USERNAME>",
"http://<DOMAIN>/details/@<USERNAME>",
"http://<DOMAIN>/foros/member.php?username=<USERNAME>",
"http://<DOMAIN>/forum/member/<USERNAME>",
"http://<DOMAIN>/forum/member.php?username=<USERNAME>",
"http://<DOMAIN>/forum/profile.php?mode=viewprofile&u=<USERNAME>",
"http://<DOMAIN>/home/<USERNAME>",
"http://<DOMAIN>/index.php?action=profile;user=<USERNAME>",
"http://<DOMAIN>/member_profile.php?u=<USERNAME>",
"http://<DOMAIN>/member.php?username=<USERNAME>",
"http://<DOMAIN>/members/?username=<USERNAME>",
"http://<DOMAIN>/members/<USERNAME>",
"http://<DOMAIN>/members/view/<USERNAME>",
"http://<DOMAIN>/mi-espacio/<USERNAME>",
"http://<DOMAIN>/u<USERNAME>",
"http://<DOMAIN>/u/<USERNAME>",
"http://<DOMAIN>/user-<USERNAME>",
"http://<DOMAIN>/user/<USERNAME>",
"http://<DOMAIN>/user/<USERNAME>.html",
"http://<DOMAIN>/users/<USERNAME>",
"http://<DOMAIN>/usr/<USERNAME>",
"http://<DOMAIN>/usuario/<USERNAME>",
"http://<DOMAIN>/usuarios/<USERNAME>",
"http://<DOMAIN>/en/users/<USERNAME>",
"http://<DOMAIN>/people/<USERNAME>",
"http://<DOMAIN>/profil/<USERNAME>",
"http://<DOMAIN>/profile/<USERNAME>",
"http://<DOMAIN>/profile/page/<USERNAME>",
"http://<DOMAIN>/rapidforum/index.php?action=profile;user=<USERNAME>",
"http://<DOMAIN>/social/usuarios/<USERNAME>",
"http://<USERNAME>.<DOMAIN>",
"http://<USERNAME>.<DOMAIN>/user/"
]
else:
try:
fuzzingStructures = fFuzzStruct.read().splitlines()
except:
print("Usufy could NOT open the following file: " + fFuzzStruct)
res = {}
lines = fDomains.read().splitlines()
# Going through all the lines
for l in lines:
domain = l.split()[0]
print("Performing tests for" + domain + "...")
# selecting the number of nicks to be tested in this domain
nick = l.split()[1]
# possibleURLs found
possibleURL = []
for struct in fuzzingStructures:
# initiating list
urlToTry = struct.replace("<DOMAIN>", domain)
test = urlToTry.replace("<USERNAME>", nick.lower())
print("Processing "+ test + "...")
i3Browser = browser.Browser()
try:
html = i3Browser.recoverURL(test)
if nick in html:
possibleURL.append(test)
print(general.success("\tPossible usufy found!!!\n"))
except:
print("The resource could not be downloaded.")
res[domain] = possibleURL
print(json.dumps(res, indent = 2))
return res | python | def fuzzUsufy(fDomains = None, fFuzzStruct = None):
"""
Method to guess the usufy path against a list of domains or subdomains.
Args:
-----
fDomains: A list to strings containing the domains and (optionally) a
nick.
fFuzzStruct: A list to strings containing the transforms to be
performed.
Returns:
--------
dict: A dictionary of the form of `{"domain": "url"}`.
"""
if fFuzzStruct == None:
# Loading these structures by default
fuzzingStructures = [
"http://<DOMAIN>/<USERNAME>",
"http://<DOMAIN>/~<USERNAME>",
"http://<DOMAIN>/?action=profile;user=<USERNAME>",
"http://<DOMAIN>/causes/author/<USERNAME>",
"http://<DOMAIN>/channel/<USERNAME>",
"http://<DOMAIN>/community/profile/<USERNAME>",
"http://<DOMAIN>/component/comprofiler/userprofiler/<USERNAME>",
"http://<DOMAIN>/details/@<USERNAME>",
"http://<DOMAIN>/foros/member.php?username=<USERNAME>",
"http://<DOMAIN>/forum/member/<USERNAME>",
"http://<DOMAIN>/forum/member.php?username=<USERNAME>",
"http://<DOMAIN>/forum/profile.php?mode=viewprofile&u=<USERNAME>",
"http://<DOMAIN>/home/<USERNAME>",
"http://<DOMAIN>/index.php?action=profile;user=<USERNAME>",
"http://<DOMAIN>/member_profile.php?u=<USERNAME>",
"http://<DOMAIN>/member.php?username=<USERNAME>",
"http://<DOMAIN>/members/?username=<USERNAME>",
"http://<DOMAIN>/members/<USERNAME>",
"http://<DOMAIN>/members/view/<USERNAME>",
"http://<DOMAIN>/mi-espacio/<USERNAME>",
"http://<DOMAIN>/u<USERNAME>",
"http://<DOMAIN>/u/<USERNAME>",
"http://<DOMAIN>/user-<USERNAME>",
"http://<DOMAIN>/user/<USERNAME>",
"http://<DOMAIN>/user/<USERNAME>.html",
"http://<DOMAIN>/users/<USERNAME>",
"http://<DOMAIN>/usr/<USERNAME>",
"http://<DOMAIN>/usuario/<USERNAME>",
"http://<DOMAIN>/usuarios/<USERNAME>",
"http://<DOMAIN>/en/users/<USERNAME>",
"http://<DOMAIN>/people/<USERNAME>",
"http://<DOMAIN>/profil/<USERNAME>",
"http://<DOMAIN>/profile/<USERNAME>",
"http://<DOMAIN>/profile/page/<USERNAME>",
"http://<DOMAIN>/rapidforum/index.php?action=profile;user=<USERNAME>",
"http://<DOMAIN>/social/usuarios/<USERNAME>",
"http://<USERNAME>.<DOMAIN>",
"http://<USERNAME>.<DOMAIN>/user/"
]
else:
try:
fuzzingStructures = fFuzzStruct.read().splitlines()
except:
print("Usufy could NOT open the following file: " + fFuzzStruct)
res = {}
lines = fDomains.read().splitlines()
# Going through all the lines
for l in lines:
domain = l.split()[0]
print("Performing tests for" + domain + "...")
# selecting the number of nicks to be tested in this domain
nick = l.split()[1]
# possibleURLs found
possibleURL = []
for struct in fuzzingStructures:
# initiating list
urlToTry = struct.replace("<DOMAIN>", domain)
test = urlToTry.replace("<USERNAME>", nick.lower())
print("Processing "+ test + "...")
i3Browser = browser.Browser()
try:
html = i3Browser.recoverURL(test)
if nick in html:
possibleURL.append(test)
print(general.success("\tPossible usufy found!!!\n"))
except:
print("The resource could not be downloaded.")
res[domain] = possibleURL
print(json.dumps(res, indent = 2))
return res | [
"def",
"fuzzUsufy",
"(",
"fDomains",
"=",
"None",
",",
"fFuzzStruct",
"=",
"None",
")",
":",
"if",
"fFuzzStruct",
"==",
"None",
":",
"# Loading these structures by default",
"fuzzingStructures",
"=",
"[",
"\"http://<DOMAIN>/<USERNAME>\"",
",",
"\"http://<DOMAIN>/~<USERNAME>\"",
",",
"\"http://<DOMAIN>/?action=profile;user=<USERNAME>\"",
",",
"\"http://<DOMAIN>/causes/author/<USERNAME>\"",
",",
"\"http://<DOMAIN>/channel/<USERNAME>\"",
",",
"\"http://<DOMAIN>/community/profile/<USERNAME>\"",
",",
"\"http://<DOMAIN>/component/comprofiler/userprofiler/<USERNAME>\"",
",",
"\"http://<DOMAIN>/details/@<USERNAME>\"",
",",
"\"http://<DOMAIN>/foros/member.php?username=<USERNAME>\"",
",",
"\"http://<DOMAIN>/forum/member/<USERNAME>\"",
",",
"\"http://<DOMAIN>/forum/member.php?username=<USERNAME>\"",
",",
"\"http://<DOMAIN>/forum/profile.php?mode=viewprofile&u=<USERNAME>\"",
",",
"\"http://<DOMAIN>/home/<USERNAME>\"",
",",
"\"http://<DOMAIN>/index.php?action=profile;user=<USERNAME>\"",
",",
"\"http://<DOMAIN>/member_profile.php?u=<USERNAME>\"",
",",
"\"http://<DOMAIN>/member.php?username=<USERNAME>\"",
",",
"\"http://<DOMAIN>/members/?username=<USERNAME>\"",
",",
"\"http://<DOMAIN>/members/<USERNAME>\"",
",",
"\"http://<DOMAIN>/members/view/<USERNAME>\"",
",",
"\"http://<DOMAIN>/mi-espacio/<USERNAME>\"",
",",
"\"http://<DOMAIN>/u<USERNAME>\"",
",",
"\"http://<DOMAIN>/u/<USERNAME>\"",
",",
"\"http://<DOMAIN>/user-<USERNAME>\"",
",",
"\"http://<DOMAIN>/user/<USERNAME>\"",
",",
"\"http://<DOMAIN>/user/<USERNAME>.html\"",
",",
"\"http://<DOMAIN>/users/<USERNAME>\"",
",",
"\"http://<DOMAIN>/usr/<USERNAME>\"",
",",
"\"http://<DOMAIN>/usuario/<USERNAME>\"",
",",
"\"http://<DOMAIN>/usuarios/<USERNAME>\"",
",",
"\"http://<DOMAIN>/en/users/<USERNAME>\"",
",",
"\"http://<DOMAIN>/people/<USERNAME>\"",
",",
"\"http://<DOMAIN>/profil/<USERNAME>\"",
",",
"\"http://<DOMAIN>/profile/<USERNAME>\"",
",",
"\"http://<DOMAIN>/profile/page/<USERNAME>\"",
",",
"\"http://<DOMAIN>/rapidforum/index.php?action=profile;user=<USERNAME>\"",
",",
"\"http://<DOMAIN>/social/usuarios/<USERNAME>\"",
",",
"\"http://<USERNAME>.<DOMAIN>\"",
",",
"\"http://<USERNAME>.<DOMAIN>/user/\"",
"]",
"else",
":",
"try",
":",
"fuzzingStructures",
"=",
"fFuzzStruct",
".",
"read",
"(",
")",
".",
"splitlines",
"(",
")",
"except",
":",
"print",
"(",
"\"Usufy could NOT open the following file: \"",
"+",
"fFuzzStruct",
")",
"res",
"=",
"{",
"}",
"lines",
"=",
"fDomains",
".",
"read",
"(",
")",
".",
"splitlines",
"(",
")",
"# Going through all the lines",
"for",
"l",
"in",
"lines",
":",
"domain",
"=",
"l",
".",
"split",
"(",
")",
"[",
"0",
"]",
"print",
"(",
"\"Performing tests for\"",
"+",
"domain",
"+",
"\"...\"",
")",
"# selecting the number of nicks to be tested in this domain",
"nick",
"=",
"l",
".",
"split",
"(",
")",
"[",
"1",
"]",
"# possibleURLs found",
"possibleURL",
"=",
"[",
"]",
"for",
"struct",
"in",
"fuzzingStructures",
":",
"# initiating list",
"urlToTry",
"=",
"struct",
".",
"replace",
"(",
"\"<DOMAIN>\"",
",",
"domain",
")",
"test",
"=",
"urlToTry",
".",
"replace",
"(",
"\"<USERNAME>\"",
",",
"nick",
".",
"lower",
"(",
")",
")",
"print",
"(",
"\"Processing \"",
"+",
"test",
"+",
"\"...\"",
")",
"i3Browser",
"=",
"browser",
".",
"Browser",
"(",
")",
"try",
":",
"html",
"=",
"i3Browser",
".",
"recoverURL",
"(",
"test",
")",
"if",
"nick",
"in",
"html",
":",
"possibleURL",
".",
"append",
"(",
"test",
")",
"print",
"(",
"general",
".",
"success",
"(",
"\"\\tPossible usufy found!!!\\n\"",
")",
")",
"except",
":",
"print",
"(",
"\"The resource could not be downloaded.\"",
")",
"res",
"[",
"domain",
"]",
"=",
"possibleURL",
"print",
"(",
"json",
".",
"dumps",
"(",
"res",
",",
"indent",
"=",
"2",
")",
")",
"return",
"res"
] | Method to guess the usufy path against a list of domains or subdomains.
Args:
-----
fDomains: A list to strings containing the domains and (optionally) a
nick.
fFuzzStruct: A list to strings containing the transforms to be
performed.
Returns:
--------
dict: A dictionary of the form of `{"domain": "url"}`. | [
"Method",
"to",
"guess",
"the",
"usufy",
"path",
"against",
"a",
"list",
"of",
"domains",
"or",
"subdomains",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/usufy.py#L50-L145 |
5,463 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/search.py | SearchAPIRequest._prepare_filtering_params | def _prepare_filtering_params(domain=None, category=None,
sponsored_source=None, has_field=None,
has_fields=None, query_params_match=None,
query_person_match=None, **kwargs):
"""Transform the params to the API format, return a list of params."""
if query_params_match not in (None, True):
raise ValueError('query_params_match can only be `True`')
if query_person_match not in (None, True):
raise ValueError('query_person_match can only be `True`')
params = []
if domain is not None:
params.append('domain:%s' % domain)
if category is not None:
Source.validate_categories([category])
params.append('category:%s' % category)
if sponsored_source is not None:
params.append('sponsored_source:%s' % sponsored_source)
if query_params_match is not None:
params.append('query_params_match')
if query_person_match is not None:
params.append('query_person_match')
has_fields = has_fields or []
if has_field is not None:
has_fields.append(has_field)
for has_field in has_fields:
params.append('has_field:%s' % has_field.__name__)
return params | python | def _prepare_filtering_params(domain=None, category=None,
sponsored_source=None, has_field=None,
has_fields=None, query_params_match=None,
query_person_match=None, **kwargs):
"""Transform the params to the API format, return a list of params."""
if query_params_match not in (None, True):
raise ValueError('query_params_match can only be `True`')
if query_person_match not in (None, True):
raise ValueError('query_person_match can only be `True`')
params = []
if domain is not None:
params.append('domain:%s' % domain)
if category is not None:
Source.validate_categories([category])
params.append('category:%s' % category)
if sponsored_source is not None:
params.append('sponsored_source:%s' % sponsored_source)
if query_params_match is not None:
params.append('query_params_match')
if query_person_match is not None:
params.append('query_person_match')
has_fields = has_fields or []
if has_field is not None:
has_fields.append(has_field)
for has_field in has_fields:
params.append('has_field:%s' % has_field.__name__)
return params | [
"def",
"_prepare_filtering_params",
"(",
"domain",
"=",
"None",
",",
"category",
"=",
"None",
",",
"sponsored_source",
"=",
"None",
",",
"has_field",
"=",
"None",
",",
"has_fields",
"=",
"None",
",",
"query_params_match",
"=",
"None",
",",
"query_person_match",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"query_params_match",
"not",
"in",
"(",
"None",
",",
"True",
")",
":",
"raise",
"ValueError",
"(",
"'query_params_match can only be `True`'",
")",
"if",
"query_person_match",
"not",
"in",
"(",
"None",
",",
"True",
")",
":",
"raise",
"ValueError",
"(",
"'query_person_match can only be `True`'",
")",
"params",
"=",
"[",
"]",
"if",
"domain",
"is",
"not",
"None",
":",
"params",
".",
"append",
"(",
"'domain:%s'",
"%",
"domain",
")",
"if",
"category",
"is",
"not",
"None",
":",
"Source",
".",
"validate_categories",
"(",
"[",
"category",
"]",
")",
"params",
".",
"append",
"(",
"'category:%s'",
"%",
"category",
")",
"if",
"sponsored_source",
"is",
"not",
"None",
":",
"params",
".",
"append",
"(",
"'sponsored_source:%s'",
"%",
"sponsored_source",
")",
"if",
"query_params_match",
"is",
"not",
"None",
":",
"params",
".",
"append",
"(",
"'query_params_match'",
")",
"if",
"query_person_match",
"is",
"not",
"None",
":",
"params",
".",
"append",
"(",
"'query_person_match'",
")",
"has_fields",
"=",
"has_fields",
"or",
"[",
"]",
"if",
"has_field",
"is",
"not",
"None",
":",
"has_fields",
".",
"append",
"(",
"has_field",
")",
"for",
"has_field",
"in",
"has_fields",
":",
"params",
".",
"append",
"(",
"'has_field:%s'",
"%",
"has_field",
".",
"__name__",
")",
"return",
"params"
] | Transform the params to the API format, return a list of params. | [
"Transform",
"the",
"params",
"to",
"the",
"API",
"format",
"return",
"a",
"list",
"of",
"params",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/search.py#L180-L207 |
5,464 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/search.py | SearchAPIRequest.validate_query_params | def validate_query_params(self, strict=True):
"""Check if the request is valid and can be sent, raise ValueError if
not.
`strict` is a boolean argument that defaults to True which means an
exception is raised on every invalid query parameter, if set to False
an exception is raised only when the search request cannot be performed
because required query params are missing.
"""
if not (self.api_key or default_api_key):
raise ValueError('API key is missing')
if strict and self.query_params_mode not in (None, 'and', 'or'):
raise ValueError('query_params_match should be one of "and"/"or"')
if not self.person.is_searchable:
raise ValueError('No valid name/username/phone/email in request')
if strict and self.person.unsearchable_fields:
raise ValueError('Some fields are unsearchable: %s'
% self.person.unsearchable_fields) | python | def validate_query_params(self, strict=True):
"""Check if the request is valid and can be sent, raise ValueError if
not.
`strict` is a boolean argument that defaults to True which means an
exception is raised on every invalid query parameter, if set to False
an exception is raised only when the search request cannot be performed
because required query params are missing.
"""
if not (self.api_key or default_api_key):
raise ValueError('API key is missing')
if strict and self.query_params_mode not in (None, 'and', 'or'):
raise ValueError('query_params_match should be one of "and"/"or"')
if not self.person.is_searchable:
raise ValueError('No valid name/username/phone/email in request')
if strict and self.person.unsearchable_fields:
raise ValueError('Some fields are unsearchable: %s'
% self.person.unsearchable_fields) | [
"def",
"validate_query_params",
"(",
"self",
",",
"strict",
"=",
"True",
")",
":",
"if",
"not",
"(",
"self",
".",
"api_key",
"or",
"default_api_key",
")",
":",
"raise",
"ValueError",
"(",
"'API key is missing'",
")",
"if",
"strict",
"and",
"self",
".",
"query_params_mode",
"not",
"in",
"(",
"None",
",",
"'and'",
",",
"'or'",
")",
":",
"raise",
"ValueError",
"(",
"'query_params_match should be one of \"and\"/\"or\"'",
")",
"if",
"not",
"self",
".",
"person",
".",
"is_searchable",
":",
"raise",
"ValueError",
"(",
"'No valid name/username/phone/email in request'",
")",
"if",
"strict",
"and",
"self",
".",
"person",
".",
"unsearchable_fields",
":",
"raise",
"ValueError",
"(",
"'Some fields are unsearchable: %s'",
"%",
"self",
".",
"person",
".",
"unsearchable_fields",
")"
] | Check if the request is valid and can be sent, raise ValueError if
not.
`strict` is a boolean argument that defaults to True which means an
exception is raised on every invalid query parameter, if set to False
an exception is raised only when the search request cannot be performed
because required query params are missing. | [
"Check",
"if",
"the",
"request",
"is",
"valid",
"and",
"can",
"be",
"sent",
"raise",
"ValueError",
"if",
"not",
".",
"strict",
"is",
"a",
"boolean",
"argument",
"that",
"defaults",
"to",
"True",
"which",
"means",
"an",
"exception",
"is",
"raised",
"on",
"every",
"invalid",
"query",
"parameter",
"if",
"set",
"to",
"False",
"an",
"exception",
"is",
"raised",
"only",
"when",
"the",
"search",
"request",
"cannot",
"be",
"performed",
"because",
"required",
"query",
"params",
"are",
"missing",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/search.py#L322-L340 |
5,465 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/search.py | SearchAPIResponse.group_records_by_domain | def group_records_by_domain(self):
"""Return the records grouped by the domain they came from.
The return value is a dict, a key in this dict is a domain
and the value is a list of all the records with this domain.
"""
key_function = lambda record: record.source.domain
return self.group_records(key_function) | python | def group_records_by_domain(self):
"""Return the records grouped by the domain they came from.
The return value is a dict, a key in this dict is a domain
and the value is a list of all the records with this domain.
"""
key_function = lambda record: record.source.domain
return self.group_records(key_function) | [
"def",
"group_records_by_domain",
"(",
"self",
")",
":",
"key_function",
"=",
"lambda",
"record",
":",
"record",
".",
"source",
".",
"domain",
"return",
"self",
".",
"group_records",
"(",
"key_function",
")"
] | Return the records grouped by the domain they came from.
The return value is a dict, a key in this dict is a domain
and the value is a list of all the records with this domain. | [
"Return",
"the",
"records",
"grouped",
"by",
"the",
"domain",
"they",
"came",
"from",
".",
"The",
"return",
"value",
"is",
"a",
"dict",
"a",
"key",
"in",
"this",
"dict",
"is",
"a",
"domain",
"and",
"the",
"value",
"is",
"a",
"list",
"of",
"all",
"the",
"records",
"with",
"this",
"domain",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/search.py#L532-L540 |
5,466 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/search.py | SearchAPIResponse.group_records_by_category | def group_records_by_category(self):
"""Return the records grouped by the category of their source.
The return value is a dict, a key in this dict is a category
and the value is a list of all the records with this category.
"""
Source.validate_categories(categories)
key_function = lambda record: record.source.category
return self.group_records(key_function) | python | def group_records_by_category(self):
"""Return the records grouped by the category of their source.
The return value is a dict, a key in this dict is a category
and the value is a list of all the records with this category.
"""
Source.validate_categories(categories)
key_function = lambda record: record.source.category
return self.group_records(key_function) | [
"def",
"group_records_by_category",
"(",
"self",
")",
":",
"Source",
".",
"validate_categories",
"(",
"categories",
")",
"key_function",
"=",
"lambda",
"record",
":",
"record",
".",
"source",
".",
"category",
"return",
"self",
".",
"group_records",
"(",
"key_function",
")"
] | Return the records grouped by the category of their source.
The return value is a dict, a key in this dict is a category
and the value is a list of all the records with this category. | [
"Return",
"the",
"records",
"grouped",
"by",
"the",
"category",
"of",
"their",
"source",
".",
"The",
"return",
"value",
"is",
"a",
"dict",
"a",
"key",
"in",
"this",
"dict",
"is",
"a",
"category",
"and",
"the",
"value",
"is",
"a",
"list",
"of",
"all",
"the",
"records",
"with",
"this",
"category",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/search.py#L542-L551 |
5,467 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/search.py | SearchAPIResponse.from_dict | def from_dict(d):
"""Transform the dict to a response object and return the response."""
warnings_ = d.get('warnings', [])
query = d.get('query') or None
if query:
query = Person.from_dict(query)
person = d.get('person') or None
if person:
person = Person.from_dict(person)
records = d.get('records')
if records:
records = [Record.from_dict(record) for record in records]
suggested_searches = d.get('suggested_searches')
if suggested_searches:
suggested_searches = [Record.from_dict(record)
for record in suggested_searches]
return SearchAPIResponse(query=query, person=person, records=records,
suggested_searches=suggested_searches,
warnings_=warnings_) | python | def from_dict(d):
"""Transform the dict to a response object and return the response."""
warnings_ = d.get('warnings', [])
query = d.get('query') or None
if query:
query = Person.from_dict(query)
person = d.get('person') or None
if person:
person = Person.from_dict(person)
records = d.get('records')
if records:
records = [Record.from_dict(record) for record in records]
suggested_searches = d.get('suggested_searches')
if suggested_searches:
suggested_searches = [Record.from_dict(record)
for record in suggested_searches]
return SearchAPIResponse(query=query, person=person, records=records,
suggested_searches=suggested_searches,
warnings_=warnings_) | [
"def",
"from_dict",
"(",
"d",
")",
":",
"warnings_",
"=",
"d",
".",
"get",
"(",
"'warnings'",
",",
"[",
"]",
")",
"query",
"=",
"d",
".",
"get",
"(",
"'query'",
")",
"or",
"None",
"if",
"query",
":",
"query",
"=",
"Person",
".",
"from_dict",
"(",
"query",
")",
"person",
"=",
"d",
".",
"get",
"(",
"'person'",
")",
"or",
"None",
"if",
"person",
":",
"person",
"=",
"Person",
".",
"from_dict",
"(",
"person",
")",
"records",
"=",
"d",
".",
"get",
"(",
"'records'",
")",
"if",
"records",
":",
"records",
"=",
"[",
"Record",
".",
"from_dict",
"(",
"record",
")",
"for",
"record",
"in",
"records",
"]",
"suggested_searches",
"=",
"d",
".",
"get",
"(",
"'suggested_searches'",
")",
"if",
"suggested_searches",
":",
"suggested_searches",
"=",
"[",
"Record",
".",
"from_dict",
"(",
"record",
")",
"for",
"record",
"in",
"suggested_searches",
"]",
"return",
"SearchAPIResponse",
"(",
"query",
"=",
"query",
",",
"person",
"=",
"person",
",",
"records",
"=",
"records",
",",
"suggested_searches",
"=",
"suggested_searches",
",",
"warnings_",
"=",
"warnings_",
")"
] | Transform the dict to a response object and return the response. | [
"Transform",
"the",
"dict",
"to",
"a",
"response",
"object",
"and",
"return",
"the",
"response",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/search.py#L576-L594 |
5,468 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/search.py | SearchAPIResponse.to_dict | def to_dict(self):
"""Return a dict representation of the response."""
d = {}
if self.warnings:
d['warnings'] = self.warnings
if self.query is not None:
d['query'] = self.query.to_dict()
if self.person is not None:
d['person'] = self.person.to_dict()
if self.records:
d['records'] = [record.to_dict() for record in self.records]
if self.suggested_searches:
d['suggested_searches'] = [record.to_dict()
for record in self.suggested_searches]
return d | python | def to_dict(self):
"""Return a dict representation of the response."""
d = {}
if self.warnings:
d['warnings'] = self.warnings
if self.query is not None:
d['query'] = self.query.to_dict()
if self.person is not None:
d['person'] = self.person.to_dict()
if self.records:
d['records'] = [record.to_dict() for record in self.records]
if self.suggested_searches:
d['suggested_searches'] = [record.to_dict()
for record in self.suggested_searches]
return d | [
"def",
"to_dict",
"(",
"self",
")",
":",
"d",
"=",
"{",
"}",
"if",
"self",
".",
"warnings",
":",
"d",
"[",
"'warnings'",
"]",
"=",
"self",
".",
"warnings",
"if",
"self",
".",
"query",
"is",
"not",
"None",
":",
"d",
"[",
"'query'",
"]",
"=",
"self",
".",
"query",
".",
"to_dict",
"(",
")",
"if",
"self",
".",
"person",
"is",
"not",
"None",
":",
"d",
"[",
"'person'",
"]",
"=",
"self",
".",
"person",
".",
"to_dict",
"(",
")",
"if",
"self",
".",
"records",
":",
"d",
"[",
"'records'",
"]",
"=",
"[",
"record",
".",
"to_dict",
"(",
")",
"for",
"record",
"in",
"self",
".",
"records",
"]",
"if",
"self",
".",
"suggested_searches",
":",
"d",
"[",
"'suggested_searches'",
"]",
"=",
"[",
"record",
".",
"to_dict",
"(",
")",
"for",
"record",
"in",
"self",
".",
"suggested_searches",
"]",
"return",
"d"
] | Return a dict representation of the response. | [
"Return",
"a",
"dict",
"representation",
"of",
"the",
"response",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/search.py#L596-L610 |
5,469 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/fields.py | Field.from_dict | def from_dict(cls, d):
"""Transform the dict to a field object and return the field."""
kwargs = {}
for key, val in d.iteritems():
if key.startswith('display'): # includes phone.display_international
continue
if key.startswith('@'):
key = key[1:]
if key == 'type':
key = 'type_'
elif key == 'valid_since':
val = str_to_datetime(val)
elif key == 'date_range':
val = DateRange.from_dict(val)
kwargs[key.encode('ascii')] = val
return cls(**kwargs) | python | def from_dict(cls, d):
"""Transform the dict to a field object and return the field."""
kwargs = {}
for key, val in d.iteritems():
if key.startswith('display'): # includes phone.display_international
continue
if key.startswith('@'):
key = key[1:]
if key == 'type':
key = 'type_'
elif key == 'valid_since':
val = str_to_datetime(val)
elif key == 'date_range':
val = DateRange.from_dict(val)
kwargs[key.encode('ascii')] = val
return cls(**kwargs) | [
"def",
"from_dict",
"(",
"cls",
",",
"d",
")",
":",
"kwargs",
"=",
"{",
"}",
"for",
"key",
",",
"val",
"in",
"d",
".",
"iteritems",
"(",
")",
":",
"if",
"key",
".",
"startswith",
"(",
"'display'",
")",
":",
"# includes phone.display_international\r",
"continue",
"if",
"key",
".",
"startswith",
"(",
"'@'",
")",
":",
"key",
"=",
"key",
"[",
"1",
":",
"]",
"if",
"key",
"==",
"'type'",
":",
"key",
"=",
"'type_'",
"elif",
"key",
"==",
"'valid_since'",
":",
"val",
"=",
"str_to_datetime",
"(",
"val",
")",
"elif",
"key",
"==",
"'date_range'",
":",
"val",
"=",
"DateRange",
".",
"from_dict",
"(",
"val",
")",
"kwargs",
"[",
"key",
".",
"encode",
"(",
"'ascii'",
")",
"]",
"=",
"val",
"return",
"cls",
"(",
"*",
"*",
"kwargs",
")"
] | Transform the dict to a field object and return the field. | [
"Transform",
"the",
"dict",
"to",
"a",
"field",
"object",
"and",
"return",
"the",
"field",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/fields.py#L87-L102 |
5,470 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/fields.py | Field.to_dict | def to_dict(self):
"""Return a dict representation of the field."""
d = {}
if self.valid_since is not None:
d['@valid_since'] = datetime_to_str(self.valid_since)
for attr_list, prefix in [(self.attributes, '@'), (self.children, '')]:
for attr in attr_list:
value = getattr(self, attr)
if isinstance(value, Serializable):
value = value.to_dict()
if value or isinstance(value, (bool, int, long)):
d[prefix + attr] = value
if hasattr(self, 'display') and self.display:
d['display'] = self.display
return d | python | def to_dict(self):
"""Return a dict representation of the field."""
d = {}
if self.valid_since is not None:
d['@valid_since'] = datetime_to_str(self.valid_since)
for attr_list, prefix in [(self.attributes, '@'), (self.children, '')]:
for attr in attr_list:
value = getattr(self, attr)
if isinstance(value, Serializable):
value = value.to_dict()
if value or isinstance(value, (bool, int, long)):
d[prefix + attr] = value
if hasattr(self, 'display') and self.display:
d['display'] = self.display
return d | [
"def",
"to_dict",
"(",
"self",
")",
":",
"d",
"=",
"{",
"}",
"if",
"self",
".",
"valid_since",
"is",
"not",
"None",
":",
"d",
"[",
"'@valid_since'",
"]",
"=",
"datetime_to_str",
"(",
"self",
".",
"valid_since",
")",
"for",
"attr_list",
",",
"prefix",
"in",
"[",
"(",
"self",
".",
"attributes",
",",
"'@'",
")",
",",
"(",
"self",
".",
"children",
",",
"''",
")",
"]",
":",
"for",
"attr",
"in",
"attr_list",
":",
"value",
"=",
"getattr",
"(",
"self",
",",
"attr",
")",
"if",
"isinstance",
"(",
"value",
",",
"Serializable",
")",
":",
"value",
"=",
"value",
".",
"to_dict",
"(",
")",
"if",
"value",
"or",
"isinstance",
"(",
"value",
",",
"(",
"bool",
",",
"int",
",",
"long",
")",
")",
":",
"d",
"[",
"prefix",
"+",
"attr",
"]",
"=",
"value",
"if",
"hasattr",
"(",
"self",
",",
"'display'",
")",
"and",
"self",
".",
"display",
":",
"d",
"[",
"'display'",
"]",
"=",
"self",
".",
"display",
"return",
"d"
] | Return a dict representation of the field. | [
"Return",
"a",
"dict",
"representation",
"of",
"the",
"field",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/fields.py#L104-L118 |
5,471 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/fields.py | Name.is_searchable | def is_searchable(self):
"""A bool value that indicates whether the name is a valid name to
search by."""
first = alpha_chars(self.first or u'')
last = alpha_chars(self.last or u'')
raw = alpha_chars(self.raw or u'')
return (len(first) >= 2 and len(last) >= 2) or len(raw) >= 4 | python | def is_searchable(self):
"""A bool value that indicates whether the name is a valid name to
search by."""
first = alpha_chars(self.first or u'')
last = alpha_chars(self.last or u'')
raw = alpha_chars(self.raw or u'')
return (len(first) >= 2 and len(last) >= 2) or len(raw) >= 4 | [
"def",
"is_searchable",
"(",
"self",
")",
":",
"first",
"=",
"alpha_chars",
"(",
"self",
".",
"first",
"or",
"u''",
")",
"last",
"=",
"alpha_chars",
"(",
"self",
".",
"last",
"or",
"u''",
")",
"raw",
"=",
"alpha_chars",
"(",
"self",
".",
"raw",
"or",
"u''",
")",
"return",
"(",
"len",
"(",
"first",
")",
">=",
"2",
"and",
"len",
"(",
"last",
")",
">=",
"2",
")",
"or",
"len",
"(",
"raw",
")",
">=",
"4"
] | A bool value that indicates whether the name is a valid name to
search by. | [
"A",
"bool",
"value",
"that",
"indicates",
"whether",
"the",
"name",
"is",
"a",
"valid",
"name",
"to",
"search",
"by",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/fields.py#L163-L169 |
5,472 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/fields.py | Address.is_searchable | def is_searchable(self):
"""A bool value that indicates whether the address is a valid address
to search by."""
return self.raw or (self.is_valid_country and
(not self.state or self.is_valid_state)) | python | def is_searchable(self):
"""A bool value that indicates whether the address is a valid address
to search by."""
return self.raw or (self.is_valid_country and
(not self.state or self.is_valid_state)) | [
"def",
"is_searchable",
"(",
"self",
")",
":",
"return",
"self",
".",
"raw",
"or",
"(",
"self",
".",
"is_valid_country",
"and",
"(",
"not",
"self",
".",
"state",
"or",
"self",
".",
"is_valid_state",
")",
")"
] | A bool value that indicates whether the address is a valid address
to search by. | [
"A",
"bool",
"value",
"that",
"indicates",
"whether",
"the",
"address",
"is",
"a",
"valid",
"address",
"to",
"search",
"by",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/fields.py#L233-L237 |
5,473 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/fields.py | Address.is_valid_state | def is_valid_state(self):
"""A bool value that indicates whether the object's state is a valid
state code."""
return self.is_valid_country and self.country.upper() in STATES and \
self.state is not None and \
self.state.upper() in STATES[self.country.upper()] | python | def is_valid_state(self):
"""A bool value that indicates whether the object's state is a valid
state code."""
return self.is_valid_country and self.country.upper() in STATES and \
self.state is not None and \
self.state.upper() in STATES[self.country.upper()] | [
"def",
"is_valid_state",
"(",
"self",
")",
":",
"return",
"self",
".",
"is_valid_country",
"and",
"self",
".",
"country",
".",
"upper",
"(",
")",
"in",
"STATES",
"and",
"self",
".",
"state",
"is",
"not",
"None",
"and",
"self",
".",
"state",
".",
"upper",
"(",
")",
"in",
"STATES",
"[",
"self",
".",
"country",
".",
"upper",
"(",
")",
"]"
] | A bool value that indicates whether the object's state is a valid
state code. | [
"A",
"bool",
"value",
"that",
"indicates",
"whether",
"the",
"object",
"s",
"state",
"is",
"a",
"valid",
"state",
"code",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/fields.py#L246-L251 |
5,474 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/fields.py | Phone.to_dict | def to_dict(self):
"""Extend Field.to_dict, take the display_international attribute."""
d = Field.to_dict(self)
if self.display_international:
d['display_international'] = self.display_international
return d | python | def to_dict(self):
"""Extend Field.to_dict, take the display_international attribute."""
d = Field.to_dict(self)
if self.display_international:
d['display_international'] = self.display_international
return d | [
"def",
"to_dict",
"(",
"self",
")",
":",
"d",
"=",
"Field",
".",
"to_dict",
"(",
"self",
")",
"if",
"self",
".",
"display_international",
":",
"d",
"[",
"'display_international'",
"]",
"=",
"self",
".",
"display_international",
"return",
"d"
] | Extend Field.to_dict, take the display_international attribute. | [
"Extend",
"Field",
".",
"to_dict",
"take",
"the",
"display_international",
"attribute",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/fields.py#L345-L350 |
5,475 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/fields.py | Email.is_valid_email | def is_valid_email(self):
"""A bool value that indicates whether the address is a valid
email address.
Note that the check is done be matching to the regular expression
at Email.re_email which is very basic and far from covering end-cases...
"""
return bool(self.address and Email.re_email.match(self.address)) | python | def is_valid_email(self):
"""A bool value that indicates whether the address is a valid
email address.
Note that the check is done be matching to the regular expression
at Email.re_email which is very basic and far from covering end-cases...
"""
return bool(self.address and Email.re_email.match(self.address)) | [
"def",
"is_valid_email",
"(",
"self",
")",
":",
"return",
"bool",
"(",
"self",
".",
"address",
"and",
"Email",
".",
"re_email",
".",
"match",
"(",
"self",
".",
"address",
")",
")"
] | A bool value that indicates whether the address is a valid
email address.
Note that the check is done be matching to the regular expression
at Email.re_email which is very basic and far from covering end-cases... | [
"A",
"bool",
"value",
"that",
"indicates",
"whether",
"the",
"address",
"is",
"a",
"valid",
"email",
"address",
".",
"Note",
"that",
"the",
"check",
"is",
"done",
"be",
"matching",
"to",
"the",
"regular",
"expression",
"at",
"Email",
".",
"re_email",
"which",
"is",
"very",
"basic",
"and",
"far",
"from",
"covering",
"end",
"-",
"cases",
"..."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/fields.py#L383-L391 |
5,476 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/fields.py | DOB.age | def age(self):
"""int, the estimated age of the person.
Note that A DOB object is based on a date-range and the exact date is
usually unknown so for age calculation the the middle of the range is
assumed to be the real date-of-birth.
"""
if self.date_range is None:
return
dob = self.date_range.middle
today = datetime.date.today()
if (today.month, today.day) < (dob.month, dob.day):
return today.year - dob.year - 1
else:
return today.year - dob.year | python | def age(self):
"""int, the estimated age of the person.
Note that A DOB object is based on a date-range and the exact date is
usually unknown so for age calculation the the middle of the range is
assumed to be the real date-of-birth.
"""
if self.date_range is None:
return
dob = self.date_range.middle
today = datetime.date.today()
if (today.month, today.day) < (dob.month, dob.day):
return today.year - dob.year - 1
else:
return today.year - dob.year | [
"def",
"age",
"(",
"self",
")",
":",
"if",
"self",
".",
"date_range",
"is",
"None",
":",
"return",
"dob",
"=",
"self",
".",
"date_range",
".",
"middle",
"today",
"=",
"datetime",
".",
"date",
".",
"today",
"(",
")",
"if",
"(",
"today",
".",
"month",
",",
"today",
".",
"day",
")",
"<",
"(",
"dob",
".",
"month",
",",
"dob",
".",
"day",
")",
":",
"return",
"today",
".",
"year",
"-",
"dob",
".",
"year",
"-",
"1",
"else",
":",
"return",
"today",
".",
"year",
"-",
"dob",
".",
"year"
] | int, the estimated age of the person.
Note that A DOB object is based on a date-range and the exact date is
usually unknown so for age calculation the the middle of the range is
assumed to be the real date-of-birth. | [
"int",
"the",
"estimated",
"age",
"of",
"the",
"person",
".",
"Note",
"that",
"A",
"DOB",
"object",
"is",
"based",
"on",
"a",
"date",
"-",
"range",
"and",
"the",
"exact",
"date",
"is",
"usually",
"unknown",
"so",
"for",
"age",
"calculation",
"the",
"the",
"middle",
"of",
"the",
"range",
"is",
"assumed",
"to",
"be",
"the",
"real",
"date",
"-",
"of",
"-",
"birth",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/fields.py#L619-L634 |
5,477 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/fields.py | DOB.age_range | def age_range(self):
"""A tuple of two ints - the minimum and maximum age of the person."""
if self.date_range is None:
return None, None
start_date = DateRange(self.date_range.start, self.date_range.start)
end_date = DateRange(self.date_range.end, self.date_range.end)
start_age = DOB(date_range=end_date).age
end_age = DOB(date_range=start_date).age
return start_age, end_age | python | def age_range(self):
"""A tuple of two ints - the minimum and maximum age of the person."""
if self.date_range is None:
return None, None
start_date = DateRange(self.date_range.start, self.date_range.start)
end_date = DateRange(self.date_range.end, self.date_range.end)
start_age = DOB(date_range=end_date).age
end_age = DOB(date_range=start_date).age
return start_age, end_age | [
"def",
"age_range",
"(",
"self",
")",
":",
"if",
"self",
".",
"date_range",
"is",
"None",
":",
"return",
"None",
",",
"None",
"start_date",
"=",
"DateRange",
"(",
"self",
".",
"date_range",
".",
"start",
",",
"self",
".",
"date_range",
".",
"start",
")",
"end_date",
"=",
"DateRange",
"(",
"self",
".",
"date_range",
".",
"end",
",",
"self",
".",
"date_range",
".",
"end",
")",
"start_age",
"=",
"DOB",
"(",
"date_range",
"=",
"end_date",
")",
".",
"age",
"end_age",
"=",
"DOB",
"(",
"date_range",
"=",
"start_date",
")",
".",
"age",
"return",
"start_age",
",",
"end_age"
] | A tuple of two ints - the minimum and maximum age of the person. | [
"A",
"tuple",
"of",
"two",
"ints",
"-",
"the",
"minimum",
"and",
"maximum",
"age",
"of",
"the",
"person",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/fields.py#L637-L645 |
5,478 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/fields.py | DOB.from_age_range | def from_age_range(start_age, end_age):
"""Take a person's minimal and maximal age and return a new DOB object
suitable for him."""
if start_age < 0 or end_age < 0:
raise ValueError('start_age and end_age can\'t be negative')
if start_age > end_age:
start_age, end_age = end_age, start_age
today = datetime.date.today()
try:
start_date = today.replace(year=today.year - end_age - 1)
except ValueError: # February 29
start_date = today.replace(year=today.year - end_age - 1, day=28)
start_date += datetime.timedelta(days=1)
try:
end_date = today.replace(year=today.year - start_age)
except ValueError: # February 29
end_date = today.replace(year=today.year - start_age, day=28)
date_range = DateRange(start_date, end_date)
return DOB(date_range=date_range) | python | def from_age_range(start_age, end_age):
"""Take a person's minimal and maximal age and return a new DOB object
suitable for him."""
if start_age < 0 or end_age < 0:
raise ValueError('start_age and end_age can\'t be negative')
if start_age > end_age:
start_age, end_age = end_age, start_age
today = datetime.date.today()
try:
start_date = today.replace(year=today.year - end_age - 1)
except ValueError: # February 29
start_date = today.replace(year=today.year - end_age - 1, day=28)
start_date += datetime.timedelta(days=1)
try:
end_date = today.replace(year=today.year - start_age)
except ValueError: # February 29
end_date = today.replace(year=today.year - start_age, day=28)
date_range = DateRange(start_date, end_date)
return DOB(date_range=date_range) | [
"def",
"from_age_range",
"(",
"start_age",
",",
"end_age",
")",
":",
"if",
"start_age",
"<",
"0",
"or",
"end_age",
"<",
"0",
":",
"raise",
"ValueError",
"(",
"'start_age and end_age can\\'t be negative'",
")",
"if",
"start_age",
">",
"end_age",
":",
"start_age",
",",
"end_age",
"=",
"end_age",
",",
"start_age",
"today",
"=",
"datetime",
".",
"date",
".",
"today",
"(",
")",
"try",
":",
"start_date",
"=",
"today",
".",
"replace",
"(",
"year",
"=",
"today",
".",
"year",
"-",
"end_age",
"-",
"1",
")",
"except",
"ValueError",
":",
"# February 29\r",
"start_date",
"=",
"today",
".",
"replace",
"(",
"year",
"=",
"today",
".",
"year",
"-",
"end_age",
"-",
"1",
",",
"day",
"=",
"28",
")",
"start_date",
"+=",
"datetime",
".",
"timedelta",
"(",
"days",
"=",
"1",
")",
"try",
":",
"end_date",
"=",
"today",
".",
"replace",
"(",
"year",
"=",
"today",
".",
"year",
"-",
"start_age",
")",
"except",
"ValueError",
":",
"# February 29\r",
"end_date",
"=",
"today",
".",
"replace",
"(",
"year",
"=",
"today",
".",
"year",
"-",
"start_age",
",",
"day",
"=",
"28",
")",
"date_range",
"=",
"DateRange",
"(",
"start_date",
",",
"end_date",
")",
"return",
"DOB",
"(",
"date_range",
"=",
"date_range",
")"
] | Take a person's minimal and maximal age and return a new DOB object
suitable for him. | [
"Take",
"a",
"person",
"s",
"minimal",
"and",
"maximal",
"age",
"and",
"return",
"a",
"new",
"DOB",
"object",
"suitable",
"for",
"him",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/fields.py#L672-L695 |
5,479 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/fields.py | Relationship.from_dict | def from_dict(cls, d):
"""Extend Field.from_dict and also load the name from the dict."""
relationship = super(cls, cls).from_dict(d)
if relationship.name is not None:
relationship.name = Name.from_dict(relationship.name)
return relationship | python | def from_dict(cls, d):
"""Extend Field.from_dict and also load the name from the dict."""
relationship = super(cls, cls).from_dict(d)
if relationship.name is not None:
relationship.name = Name.from_dict(relationship.name)
return relationship | [
"def",
"from_dict",
"(",
"cls",
",",
"d",
")",
":",
"relationship",
"=",
"super",
"(",
"cls",
",",
"cls",
")",
".",
"from_dict",
"(",
"d",
")",
"if",
"relationship",
".",
"name",
"is",
"not",
"None",
":",
"relationship",
".",
"name",
"=",
"Name",
".",
"from_dict",
"(",
"relationship",
".",
"name",
")",
"return",
"relationship"
] | Extend Field.from_dict and also load the name from the dict. | [
"Extend",
"Field",
".",
"from_dict",
"and",
"also",
"load",
"the",
"name",
"from",
"the",
"dict",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/fields.py#L762-L767 |
5,480 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/fields.py | DateRange.from_dict | def from_dict(d):
"""Transform the dict to a DateRange object."""
start = d.get('start')
end = d.get('end')
if not (start and end):
raise ValueError('DateRange must have both start and end')
start = str_to_date(start)
end = str_to_date(end)
return DateRange(start, end) | python | def from_dict(d):
"""Transform the dict to a DateRange object."""
start = d.get('start')
end = d.get('end')
if not (start and end):
raise ValueError('DateRange must have both start and end')
start = str_to_date(start)
end = str_to_date(end)
return DateRange(start, end) | [
"def",
"from_dict",
"(",
"d",
")",
":",
"start",
"=",
"d",
".",
"get",
"(",
"'start'",
")",
"end",
"=",
"d",
".",
"get",
"(",
"'end'",
")",
"if",
"not",
"(",
"start",
"and",
"end",
")",
":",
"raise",
"ValueError",
"(",
"'DateRange must have both start and end'",
")",
"start",
"=",
"str_to_date",
"(",
"start",
")",
"end",
"=",
"str_to_date",
"(",
"end",
")",
"return",
"DateRange",
"(",
"start",
",",
"end",
")"
] | Transform the dict to a DateRange object. | [
"Transform",
"the",
"dict",
"to",
"a",
"DateRange",
"object",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/fields.py#L854-L862 |
5,481 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/fields.py | DateRange.to_dict | def to_dict(self):
"""Transform the date-range to a dict."""
d = {}
d['start'] = date_to_str(self.start)
d['end'] = date_to_str(self.end)
return d | python | def to_dict(self):
"""Transform the date-range to a dict."""
d = {}
d['start'] = date_to_str(self.start)
d['end'] = date_to_str(self.end)
return d | [
"def",
"to_dict",
"(",
"self",
")",
":",
"d",
"=",
"{",
"}",
"d",
"[",
"'start'",
"]",
"=",
"date_to_str",
"(",
"self",
".",
"start",
")",
"d",
"[",
"'end'",
"]",
"=",
"date_to_str",
"(",
"self",
".",
"end",
")",
"return",
"d"
] | Transform the date-range to a dict. | [
"Transform",
"the",
"date",
"-",
"range",
"to",
"a",
"dict",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/fields.py#L864-L869 |
5,482 | i3visio/osrframework | osrframework/enumeration.py | enumerateURL | def enumerateURL(urlDict, outputFolder, startIndex= 0, maxErrors = 100):
"""
Function that performs the enumeration itself.
"""
for i, url in enumerate(urlDict.keys()):
# Grabbing domain name:
domain = re.findall("://(.*)/", url)[0]
# Defining the starting index
index = startIndex
# The app will stop when this value reaches maxErrors
consecutiveErrors = 0
i3Browser = browser.Browser()
# Main loop that checks if the maximum number of errors has been reached
while consecutiveErrors <= maxErrors:
# creating the new URL to download
newQuery = url.replace("<INDEX>", str(index))
print(newQuery)
# Downloading the file
try:
data = i3Browser.recoverURL(newQuery)
filename = domain.replace("/", "|") + "_" + "-profile_" + str(index).rjust(10, "0") +".html"
if urlDict[url] != None:
if urlDict[url] in data:
print(general.info("Storing resource as:\t" + filename + "..."))
# The profile was found so we will store it:
with open( outputFolder + "/" + filename, "w") as oF:
oF.write(data)
else:
# The profile was found so we will store it:
print(general.info("Storing resource as:\t" + filename + "..."))
with open( outputFolder + "/" + filename, "w") as oF:
oF.write(data)
except:
pass
#logger.error("The resource could not be downloaded.")
index+=1 | python | def enumerateURL(urlDict, outputFolder, startIndex= 0, maxErrors = 100):
"""
Function that performs the enumeration itself.
"""
for i, url in enumerate(urlDict.keys()):
# Grabbing domain name:
domain = re.findall("://(.*)/", url)[0]
# Defining the starting index
index = startIndex
# The app will stop when this value reaches maxErrors
consecutiveErrors = 0
i3Browser = browser.Browser()
# Main loop that checks if the maximum number of errors has been reached
while consecutiveErrors <= maxErrors:
# creating the new URL to download
newQuery = url.replace("<INDEX>", str(index))
print(newQuery)
# Downloading the file
try:
data = i3Browser.recoverURL(newQuery)
filename = domain.replace("/", "|") + "_" + "-profile_" + str(index).rjust(10, "0") +".html"
if urlDict[url] != None:
if urlDict[url] in data:
print(general.info("Storing resource as:\t" + filename + "..."))
# The profile was found so we will store it:
with open( outputFolder + "/" + filename, "w") as oF:
oF.write(data)
else:
# The profile was found so we will store it:
print(general.info("Storing resource as:\t" + filename + "..."))
with open( outputFolder + "/" + filename, "w") as oF:
oF.write(data)
except:
pass
#logger.error("The resource could not be downloaded.")
index+=1 | [
"def",
"enumerateURL",
"(",
"urlDict",
",",
"outputFolder",
",",
"startIndex",
"=",
"0",
",",
"maxErrors",
"=",
"100",
")",
":",
"for",
"i",
",",
"url",
"in",
"enumerate",
"(",
"urlDict",
".",
"keys",
"(",
")",
")",
":",
"# Grabbing domain name:\r",
"domain",
"=",
"re",
".",
"findall",
"(",
"\"://(.*)/\"",
",",
"url",
")",
"[",
"0",
"]",
"# Defining the starting index\r",
"index",
"=",
"startIndex",
"# The app will stop when this value reaches maxErrors\r",
"consecutiveErrors",
"=",
"0",
"i3Browser",
"=",
"browser",
".",
"Browser",
"(",
")",
"# Main loop that checks if the maximum number of errors has been reached\r",
"while",
"consecutiveErrors",
"<=",
"maxErrors",
":",
"# creating the new URL to download\r",
"newQuery",
"=",
"url",
".",
"replace",
"(",
"\"<INDEX>\"",
",",
"str",
"(",
"index",
")",
")",
"print",
"(",
"newQuery",
")",
"# Downloading the file\r",
"try",
":",
"data",
"=",
"i3Browser",
".",
"recoverURL",
"(",
"newQuery",
")",
"filename",
"=",
"domain",
".",
"replace",
"(",
"\"/\"",
",",
"\"|\"",
")",
"+",
"\"_\"",
"+",
"\"-profile_\"",
"+",
"str",
"(",
"index",
")",
".",
"rjust",
"(",
"10",
",",
"\"0\"",
")",
"+",
"\".html\"",
"if",
"urlDict",
"[",
"url",
"]",
"!=",
"None",
":",
"if",
"urlDict",
"[",
"url",
"]",
"in",
"data",
":",
"print",
"(",
"general",
".",
"info",
"(",
"\"Storing resource as:\\t\"",
"+",
"filename",
"+",
"\"...\"",
")",
")",
"# The profile was found so we will store it:\r",
"with",
"open",
"(",
"outputFolder",
"+",
"\"/\"",
"+",
"filename",
",",
"\"w\"",
")",
"as",
"oF",
":",
"oF",
".",
"write",
"(",
"data",
")",
"else",
":",
"# The profile was found so we will store it:\r",
"print",
"(",
"general",
".",
"info",
"(",
"\"Storing resource as:\\t\"",
"+",
"filename",
"+",
"\"...\"",
")",
")",
"with",
"open",
"(",
"outputFolder",
"+",
"\"/\"",
"+",
"filename",
",",
"\"w\"",
")",
"as",
"oF",
":",
"oF",
".",
"write",
"(",
"data",
")",
"except",
":",
"pass",
"#logger.error(\"The resource could not be downloaded.\")\r",
"index",
"+=",
"1"
] | Function that performs the enumeration itself. | [
"Function",
"that",
"performs",
"the",
"enumeration",
"itself",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/enumeration.py#L36-L78 |
5,483 | i3visio/osrframework | osrframework/thirdparties/haveibeenpwned_com/hibp.py | checkIfEmailWasHacked | def checkIfEmailWasHacked(email=None, sleepSeconds=1):
"""
Method that checks if the given email is stored in the HIBP website.
This function automatically waits a second to avoid problems with the API
rate limit. An example of the json received:
```
[{"Title":"Adobe","Name":"Adobe","Domain":"adobe.com","BreachDate":"2013-10-4","AddedDate":"2013-12-04T00:12Z","PwnCount":152445165,"Description":"The big one. In October 2013, 153 million Adobe accounts were breached with each containing an internal ID, username, email, <em>encrypted</em> password and a password hint in plain text. The password cryptography was poorly done and <a href=\"http://stricture-group.com/files/adobe-top100.txt\" target=\"_blank\">many were quickly resolved back to plain text</a>. The unencrypted hints also <a href=\"http://www.troyhunt.com/2013/11/adobe-credentials-and-serious.html\" target=\"_blank\">disclosed much about the passwords</a> adding further to the risk that hundreds of millions of Adobe customers already faced.","DataClasses":["Email addresses","Password hints","Passwords","Usernames"]}]
```
Args:
-----
email: Email to verify in HIBP.
Returns:
--------
A python structure for the json received. If nothing was found, it will
return an empty list.
"""
# Sleeping just a little bit
time.sleep(sleepSeconds)
print("\t[*] Bypassing Cloudflare Restriction...")
ua = 'osrframework 0.18'
useragent = {'User-Agent': ua}
cookies, user_agent = cfscrape.get_tokens('https://haveibeenpwned.com/api/v2/breachedaccount/[email protected]', user_agent=ua)
leaks = []
apiURL = "https://haveibeenpwned.com/api/v2/breachedaccount/{}".format(email)
# Accessing the HIBP API
time.sleep(sleepSeconds)
# Building API query
data = requests.get(
apiURL,
headers=useragent,
cookies=cookies,
verify=True
).text
# Reading the text data onto python structures
try:
jsonData = json.loads(data)
for e in jsonData:
# Building the i3visio like structure
new = {}
new["value"] = "(HIBP) " + e.get("Name") + " - " + email
new["type"] = "i3visio.profile"
new["attributes"] = [
{
"type": "i3visio.platform_leaked",
"value": e.get("Name"),
"attributes": []
},
{
"type": "@source",
"value": "haveibeenpwned.com",
"attributes": []
},
{
"type": "@source_uri",
"value": apiURL,
"attributes": []
},
{
"type": "@pwn_count",
"value": e.get("PwnCount"),
"attributes": []
},
{
"type": "@added_date",
"value": e.get("AddedDate"),
"attributes": []
},
{
"type": "@breach_date",
"value": e.get("BreachDate"),
"attributes": []
},
{
"type": "@description",
"value": e.get("Description"),
"attributes": []
}
] + general.expandEntitiesFromEmail(email)
leaks.append(new)
except ValueError:
return []
except Exception:
print("ERROR: Something happenned when using HIBP API.")
return []
return leaks | python | def checkIfEmailWasHacked(email=None, sleepSeconds=1):
"""
Method that checks if the given email is stored in the HIBP website.
This function automatically waits a second to avoid problems with the API
rate limit. An example of the json received:
```
[{"Title":"Adobe","Name":"Adobe","Domain":"adobe.com","BreachDate":"2013-10-4","AddedDate":"2013-12-04T00:12Z","PwnCount":152445165,"Description":"The big one. In October 2013, 153 million Adobe accounts were breached with each containing an internal ID, username, email, <em>encrypted</em> password and a password hint in plain text. The password cryptography was poorly done and <a href=\"http://stricture-group.com/files/adobe-top100.txt\" target=\"_blank\">many were quickly resolved back to plain text</a>. The unencrypted hints also <a href=\"http://www.troyhunt.com/2013/11/adobe-credentials-and-serious.html\" target=\"_blank\">disclosed much about the passwords</a> adding further to the risk that hundreds of millions of Adobe customers already faced.","DataClasses":["Email addresses","Password hints","Passwords","Usernames"]}]
```
Args:
-----
email: Email to verify in HIBP.
Returns:
--------
A python structure for the json received. If nothing was found, it will
return an empty list.
"""
# Sleeping just a little bit
time.sleep(sleepSeconds)
print("\t[*] Bypassing Cloudflare Restriction...")
ua = 'osrframework 0.18'
useragent = {'User-Agent': ua}
cookies, user_agent = cfscrape.get_tokens('https://haveibeenpwned.com/api/v2/breachedaccount/[email protected]', user_agent=ua)
leaks = []
apiURL = "https://haveibeenpwned.com/api/v2/breachedaccount/{}".format(email)
# Accessing the HIBP API
time.sleep(sleepSeconds)
# Building API query
data = requests.get(
apiURL,
headers=useragent,
cookies=cookies,
verify=True
).text
# Reading the text data onto python structures
try:
jsonData = json.loads(data)
for e in jsonData:
# Building the i3visio like structure
new = {}
new["value"] = "(HIBP) " + e.get("Name") + " - " + email
new["type"] = "i3visio.profile"
new["attributes"] = [
{
"type": "i3visio.platform_leaked",
"value": e.get("Name"),
"attributes": []
},
{
"type": "@source",
"value": "haveibeenpwned.com",
"attributes": []
},
{
"type": "@source_uri",
"value": apiURL,
"attributes": []
},
{
"type": "@pwn_count",
"value": e.get("PwnCount"),
"attributes": []
},
{
"type": "@added_date",
"value": e.get("AddedDate"),
"attributes": []
},
{
"type": "@breach_date",
"value": e.get("BreachDate"),
"attributes": []
},
{
"type": "@description",
"value": e.get("Description"),
"attributes": []
}
] + general.expandEntitiesFromEmail(email)
leaks.append(new)
except ValueError:
return []
except Exception:
print("ERROR: Something happenned when using HIBP API.")
return []
return leaks | [
"def",
"checkIfEmailWasHacked",
"(",
"email",
"=",
"None",
",",
"sleepSeconds",
"=",
"1",
")",
":",
"# Sleeping just a little bit",
"time",
".",
"sleep",
"(",
"sleepSeconds",
")",
"print",
"(",
"\"\\t[*] Bypassing Cloudflare Restriction...\"",
")",
"ua",
"=",
"'osrframework 0.18'",
"useragent",
"=",
"{",
"'User-Agent'",
":",
"ua",
"}",
"cookies",
",",
"user_agent",
"=",
"cfscrape",
".",
"get_tokens",
"(",
"'https://haveibeenpwned.com/api/v2/breachedaccount/[email protected]'",
",",
"user_agent",
"=",
"ua",
")",
"leaks",
"=",
"[",
"]",
"apiURL",
"=",
"\"https://haveibeenpwned.com/api/v2/breachedaccount/{}\"",
".",
"format",
"(",
"email",
")",
"# Accessing the HIBP API",
"time",
".",
"sleep",
"(",
"sleepSeconds",
")",
"# Building API query",
"data",
"=",
"requests",
".",
"get",
"(",
"apiURL",
",",
"headers",
"=",
"useragent",
",",
"cookies",
"=",
"cookies",
",",
"verify",
"=",
"True",
")",
".",
"text",
"# Reading the text data onto python structures",
"try",
":",
"jsonData",
"=",
"json",
".",
"loads",
"(",
"data",
")",
"for",
"e",
"in",
"jsonData",
":",
"# Building the i3visio like structure",
"new",
"=",
"{",
"}",
"new",
"[",
"\"value\"",
"]",
"=",
"\"(HIBP) \"",
"+",
"e",
".",
"get",
"(",
"\"Name\"",
")",
"+",
"\" - \"",
"+",
"email",
"new",
"[",
"\"type\"",
"]",
"=",
"\"i3visio.profile\"",
"new",
"[",
"\"attributes\"",
"]",
"=",
"[",
"{",
"\"type\"",
":",
"\"i3visio.platform_leaked\"",
",",
"\"value\"",
":",
"e",
".",
"get",
"(",
"\"Name\"",
")",
",",
"\"attributes\"",
":",
"[",
"]",
"}",
",",
"{",
"\"type\"",
":",
"\"@source\"",
",",
"\"value\"",
":",
"\"haveibeenpwned.com\"",
",",
"\"attributes\"",
":",
"[",
"]",
"}",
",",
"{",
"\"type\"",
":",
"\"@source_uri\"",
",",
"\"value\"",
":",
"apiURL",
",",
"\"attributes\"",
":",
"[",
"]",
"}",
",",
"{",
"\"type\"",
":",
"\"@pwn_count\"",
",",
"\"value\"",
":",
"e",
".",
"get",
"(",
"\"PwnCount\"",
")",
",",
"\"attributes\"",
":",
"[",
"]",
"}",
",",
"{",
"\"type\"",
":",
"\"@added_date\"",
",",
"\"value\"",
":",
"e",
".",
"get",
"(",
"\"AddedDate\"",
")",
",",
"\"attributes\"",
":",
"[",
"]",
"}",
",",
"{",
"\"type\"",
":",
"\"@breach_date\"",
",",
"\"value\"",
":",
"e",
".",
"get",
"(",
"\"BreachDate\"",
")",
",",
"\"attributes\"",
":",
"[",
"]",
"}",
",",
"{",
"\"type\"",
":",
"\"@description\"",
",",
"\"value\"",
":",
"e",
".",
"get",
"(",
"\"Description\"",
")",
",",
"\"attributes\"",
":",
"[",
"]",
"}",
"]",
"+",
"general",
".",
"expandEntitiesFromEmail",
"(",
"email",
")",
"leaks",
".",
"append",
"(",
"new",
")",
"except",
"ValueError",
":",
"return",
"[",
"]",
"except",
"Exception",
":",
"print",
"(",
"\"ERROR: Something happenned when using HIBP API.\"",
")",
"return",
"[",
"]",
"return",
"leaks"
] | Method that checks if the given email is stored in the HIBP website.
This function automatically waits a second to avoid problems with the API
rate limit. An example of the json received:
```
[{"Title":"Adobe","Name":"Adobe","Domain":"adobe.com","BreachDate":"2013-10-4","AddedDate":"2013-12-04T00:12Z","PwnCount":152445165,"Description":"The big one. In October 2013, 153 million Adobe accounts were breached with each containing an internal ID, username, email, <em>encrypted</em> password and a password hint in plain text. The password cryptography was poorly done and <a href=\"http://stricture-group.com/files/adobe-top100.txt\" target=\"_blank\">many were quickly resolved back to plain text</a>. The unencrypted hints also <a href=\"http://www.troyhunt.com/2013/11/adobe-credentials-and-serious.html\" target=\"_blank\">disclosed much about the passwords</a> adding further to the risk that hundreds of millions of Adobe customers already faced.","DataClasses":["Email addresses","Password hints","Passwords","Usernames"]}]
```
Args:
-----
email: Email to verify in HIBP.
Returns:
--------
A python structure for the json received. If nothing was found, it will
return an empty list. | [
"Method",
"that",
"checks",
"if",
"the",
"given",
"email",
"is",
"stored",
"in",
"the",
"HIBP",
"website",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/haveibeenpwned_com/hibp.py#L30-L124 |
5,484 | i3visio/osrframework | osrframework/searchengines/google.py | get_page | def get_page(url):
"""
Request the given URL and return the response page, using the cookie jar.
@type url: str
@param url: URL to retrieve.
@rtype: str
@return: Web page retrieved for the given URL.
@raise IOError: An exception is raised on error.
@raise urllib2.URLError: An exception is raised on error.
@raise urllib2.HTTPError: An exception is raised on error.
"""
request = Request(url)
request.add_header('User-Agent',
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0)')
cookie_jar.add_cookie_header(request)
response = urlopen(request)
cookie_jar.extract_cookies(response, request)
html = response.read()
response.close()
cookie_jar.save()
return html | python | def get_page(url):
"""
Request the given URL and return the response page, using the cookie jar.
@type url: str
@param url: URL to retrieve.
@rtype: str
@return: Web page retrieved for the given URL.
@raise IOError: An exception is raised on error.
@raise urllib2.URLError: An exception is raised on error.
@raise urllib2.HTTPError: An exception is raised on error.
"""
request = Request(url)
request.add_header('User-Agent',
'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0)')
cookie_jar.add_cookie_header(request)
response = urlopen(request)
cookie_jar.extract_cookies(response, request)
html = response.read()
response.close()
cookie_jar.save()
return html | [
"def",
"get_page",
"(",
"url",
")",
":",
"request",
"=",
"Request",
"(",
"url",
")",
"request",
".",
"add_header",
"(",
"'User-Agent'",
",",
"'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0)'",
")",
"cookie_jar",
".",
"add_cookie_header",
"(",
"request",
")",
"response",
"=",
"urlopen",
"(",
"request",
")",
"cookie_jar",
".",
"extract_cookies",
"(",
"response",
",",
"request",
")",
"html",
"=",
"response",
".",
"read",
"(",
")",
"response",
".",
"close",
"(",
")",
"cookie_jar",
".",
"save",
"(",
")",
"return",
"html"
] | Request the given URL and return the response page, using the cookie jar.
@type url: str
@param url: URL to retrieve.
@rtype: str
@return: Web page retrieved for the given URL.
@raise IOError: An exception is raised on error.
@raise urllib2.URLError: An exception is raised on error.
@raise urllib2.HTTPError: An exception is raised on error. | [
"Request",
"the",
"given",
"URL",
"and",
"return",
"the",
"response",
"page",
"using",
"the",
"cookie",
"jar",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/searchengines/google.py#L62-L85 |
5,485 | i3visio/osrframework | osrframework/searchengines/google.py | search | def search(query, tld='com', lang='en', num=10, start=0, stop=None, pause=2.0,
only_standard=False):
"""
Search the given query string using Google.
@type query: str
@param query: Query string. Must NOT be url-encoded.
@type tld: str
@param tld: Top level domain.
@type lang: str
@param lang: Languaje.
@type num: int
@param num: Number of results per page.
@type start: int
@param start: First result to retrieve.
@type stop: int
@param stop: Last result to retrieve.
Use C{None} to keep searching forever.
@type pause: float
@param pause: Lapse to wait between HTTP requests.
A lapse too long will make the search slow, but a lapse too short may
cause Google to block your IP. Your mileage may vary!
@type only_standard: bool
@param only_standard: If C{True}, only returns the standard results from
each page. If C{False}, it returns every possible link from each page,
except for those that point back to Google itself. Defaults to C{False}
for backwards compatibility with older versions of this module.
@rtype: generator
@return: Generator (iterator) that yields found URLs. If the C{stop}
parameter is C{None} the iterator will loop forever.
"""
# Lazy import of BeautifulSoup.
# Try to use BeautifulSoup 4 if available, fall back to 3 otherwise.
global BeautifulSoup
if BeautifulSoup is None:
try:
from bs4 import BeautifulSoup
except ImportError:
from BeautifulSoup import BeautifulSoup
# Set of hashes for the results found.
# This is used to avoid repeated results.
hashes = set()
# Prepare the search string.
query = quote_plus(query)
# Grab the cookie from the home page.
get_page(url_home % vars())
# Prepare the URL of the first request.
if start:
if num == 10:
url = url_next_page % vars()
else:
url = url_next_page_num % vars()
else:
if num == 10:
url = url_search % vars()
else:
url = url_search_num % vars()
# Loop until we reach the maximum result, if any (otherwise, loop forever).
while not stop or start < stop:
# Sleep between requests.
time.sleep(pause)
# Request the Google Search results page.
html = get_page(url)
# Parse the response and process every anchored URL.
soup = BeautifulSoup(html)
anchors = soup.find(id='search').findAll('a')
for a in anchors:
# Leave only the "standard" results if requested.
# Otherwise grab all possible links.
if only_standard and (
not a.parent or a.parent.name.lower() != "h3"):
continue
# Get the URL from the anchor tag.
try:
link = a['href']
except KeyError:
continue
# Filter invalid links and links pointing to Google itself.
link = filter_result(link)
if not link:
continue
# Discard repeated results.
h = hash(link)
if h in hashes:
continue
hashes.add(h)
# Yield the result.
yield link
# End if there are no more results.
if not soup.find(id='nav'):
break
# Prepare the URL for the next request.
start += num
if num == 10:
url = url_next_page % vars()
else:
url = url_next_page_num % vars() | python | def search(query, tld='com', lang='en', num=10, start=0, stop=None, pause=2.0,
only_standard=False):
"""
Search the given query string using Google.
@type query: str
@param query: Query string. Must NOT be url-encoded.
@type tld: str
@param tld: Top level domain.
@type lang: str
@param lang: Languaje.
@type num: int
@param num: Number of results per page.
@type start: int
@param start: First result to retrieve.
@type stop: int
@param stop: Last result to retrieve.
Use C{None} to keep searching forever.
@type pause: float
@param pause: Lapse to wait between HTTP requests.
A lapse too long will make the search slow, but a lapse too short may
cause Google to block your IP. Your mileage may vary!
@type only_standard: bool
@param only_standard: If C{True}, only returns the standard results from
each page. If C{False}, it returns every possible link from each page,
except for those that point back to Google itself. Defaults to C{False}
for backwards compatibility with older versions of this module.
@rtype: generator
@return: Generator (iterator) that yields found URLs. If the C{stop}
parameter is C{None} the iterator will loop forever.
"""
# Lazy import of BeautifulSoup.
# Try to use BeautifulSoup 4 if available, fall back to 3 otherwise.
global BeautifulSoup
if BeautifulSoup is None:
try:
from bs4 import BeautifulSoup
except ImportError:
from BeautifulSoup import BeautifulSoup
# Set of hashes for the results found.
# This is used to avoid repeated results.
hashes = set()
# Prepare the search string.
query = quote_plus(query)
# Grab the cookie from the home page.
get_page(url_home % vars())
# Prepare the URL of the first request.
if start:
if num == 10:
url = url_next_page % vars()
else:
url = url_next_page_num % vars()
else:
if num == 10:
url = url_search % vars()
else:
url = url_search_num % vars()
# Loop until we reach the maximum result, if any (otherwise, loop forever).
while not stop or start < stop:
# Sleep between requests.
time.sleep(pause)
# Request the Google Search results page.
html = get_page(url)
# Parse the response and process every anchored URL.
soup = BeautifulSoup(html)
anchors = soup.find(id='search').findAll('a')
for a in anchors:
# Leave only the "standard" results if requested.
# Otherwise grab all possible links.
if only_standard and (
not a.parent or a.parent.name.lower() != "h3"):
continue
# Get the URL from the anchor tag.
try:
link = a['href']
except KeyError:
continue
# Filter invalid links and links pointing to Google itself.
link = filter_result(link)
if not link:
continue
# Discard repeated results.
h = hash(link)
if h in hashes:
continue
hashes.add(h)
# Yield the result.
yield link
# End if there are no more results.
if not soup.find(id='nav'):
break
# Prepare the URL for the next request.
start += num
if num == 10:
url = url_next_page % vars()
else:
url = url_next_page_num % vars() | [
"def",
"search",
"(",
"query",
",",
"tld",
"=",
"'com'",
",",
"lang",
"=",
"'en'",
",",
"num",
"=",
"10",
",",
"start",
"=",
"0",
",",
"stop",
"=",
"None",
",",
"pause",
"=",
"2.0",
",",
"only_standard",
"=",
"False",
")",
":",
"# Lazy import of BeautifulSoup.\r",
"# Try to use BeautifulSoup 4 if available, fall back to 3 otherwise.\r",
"global",
"BeautifulSoup",
"if",
"BeautifulSoup",
"is",
"None",
":",
"try",
":",
"from",
"bs4",
"import",
"BeautifulSoup",
"except",
"ImportError",
":",
"from",
"BeautifulSoup",
"import",
"BeautifulSoup",
"# Set of hashes for the results found.\r",
"# This is used to avoid repeated results.\r",
"hashes",
"=",
"set",
"(",
")",
"# Prepare the search string.\r",
"query",
"=",
"quote_plus",
"(",
"query",
")",
"# Grab the cookie from the home page.\r",
"get_page",
"(",
"url_home",
"%",
"vars",
"(",
")",
")",
"# Prepare the URL of the first request.\r",
"if",
"start",
":",
"if",
"num",
"==",
"10",
":",
"url",
"=",
"url_next_page",
"%",
"vars",
"(",
")",
"else",
":",
"url",
"=",
"url_next_page_num",
"%",
"vars",
"(",
")",
"else",
":",
"if",
"num",
"==",
"10",
":",
"url",
"=",
"url_search",
"%",
"vars",
"(",
")",
"else",
":",
"url",
"=",
"url_search_num",
"%",
"vars",
"(",
")",
"# Loop until we reach the maximum result, if any (otherwise, loop forever).\r",
"while",
"not",
"stop",
"or",
"start",
"<",
"stop",
":",
"# Sleep between requests.\r",
"time",
".",
"sleep",
"(",
"pause",
")",
"# Request the Google Search results page.\r",
"html",
"=",
"get_page",
"(",
"url",
")",
"# Parse the response and process every anchored URL.\r",
"soup",
"=",
"BeautifulSoup",
"(",
"html",
")",
"anchors",
"=",
"soup",
".",
"find",
"(",
"id",
"=",
"'search'",
")",
".",
"findAll",
"(",
"'a'",
")",
"for",
"a",
"in",
"anchors",
":",
"# Leave only the \"standard\" results if requested.\r",
"# Otherwise grab all possible links.\r",
"if",
"only_standard",
"and",
"(",
"not",
"a",
".",
"parent",
"or",
"a",
".",
"parent",
".",
"name",
".",
"lower",
"(",
")",
"!=",
"\"h3\"",
")",
":",
"continue",
"# Get the URL from the anchor tag.\r",
"try",
":",
"link",
"=",
"a",
"[",
"'href'",
"]",
"except",
"KeyError",
":",
"continue",
"# Filter invalid links and links pointing to Google itself.\r",
"link",
"=",
"filter_result",
"(",
"link",
")",
"if",
"not",
"link",
":",
"continue",
"# Discard repeated results.\r",
"h",
"=",
"hash",
"(",
"link",
")",
"if",
"h",
"in",
"hashes",
":",
"continue",
"hashes",
".",
"add",
"(",
"h",
")",
"# Yield the result.\r",
"yield",
"link",
"# End if there are no more results.\r",
"if",
"not",
"soup",
".",
"find",
"(",
"id",
"=",
"'nav'",
")",
":",
"break",
"# Prepare the URL for the next request.\r",
"start",
"+=",
"num",
"if",
"num",
"==",
"10",
":",
"url",
"=",
"url_next_page",
"%",
"vars",
"(",
")",
"else",
":",
"url",
"=",
"url_next_page_num",
"%",
"vars",
"(",
")"
] | Search the given query string using Google.
@type query: str
@param query: Query string. Must NOT be url-encoded.
@type tld: str
@param tld: Top level domain.
@type lang: str
@param lang: Languaje.
@type num: int
@param num: Number of results per page.
@type start: int
@param start: First result to retrieve.
@type stop: int
@param stop: Last result to retrieve.
Use C{None} to keep searching forever.
@type pause: float
@param pause: Lapse to wait between HTTP requests.
A lapse too long will make the search slow, but a lapse too short may
cause Google to block your IP. Your mileage may vary!
@type only_standard: bool
@param only_standard: If C{True}, only returns the standard results from
each page. If C{False}, it returns every possible link from each page,
except for those that point back to Google itself. Defaults to C{False}
for backwards compatibility with older versions of this module.
@rtype: generator
@return: Generator (iterator) that yields found URLs. If the C{stop}
parameter is C{None} the iterator will loop forever. | [
"Search",
"the",
"given",
"query",
"string",
"using",
"Google",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/searchengines/google.py#L114-L234 |
5,486 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/containers.py | FieldsContainer.add_fields | def add_fields(self, fields):
"""Add the fields to their corresponding container.
`fields` is an iterable of field objects from osrframework.thirdparties.pipl_com.lib.fields.
"""
for field in fields:
cls = field.__class__
try:
container = FieldsContainer.class_container[cls]
except KeyError:
raise ValueError('Object of type %s is an invalid field' % cls)
getattr(self, container).append(field) | python | def add_fields(self, fields):
"""Add the fields to their corresponding container.
`fields` is an iterable of field objects from osrframework.thirdparties.pipl_com.lib.fields.
"""
for field in fields:
cls = field.__class__
try:
container = FieldsContainer.class_container[cls]
except KeyError:
raise ValueError('Object of type %s is an invalid field' % cls)
getattr(self, container).append(field) | [
"def",
"add_fields",
"(",
"self",
",",
"fields",
")",
":",
"for",
"field",
"in",
"fields",
":",
"cls",
"=",
"field",
".",
"__class__",
"try",
":",
"container",
"=",
"FieldsContainer",
".",
"class_container",
"[",
"cls",
"]",
"except",
"KeyError",
":",
"raise",
"ValueError",
"(",
"'Object of type %s is an invalid field'",
"%",
"cls",
")",
"getattr",
"(",
"self",
",",
"container",
")",
".",
"append",
"(",
"field",
")"
] | Add the fields to their corresponding container.
`fields` is an iterable of field objects from osrframework.thirdparties.pipl_com.lib.fields. | [
"Add",
"the",
"fields",
"to",
"their",
"corresponding",
"container",
".",
"fields",
"is",
"an",
"iterable",
"of",
"field",
"objects",
"from",
"osrframework",
".",
"thirdparties",
".",
"pipl_com",
".",
"lib",
".",
"fields",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/containers.py#L48-L60 |
5,487 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/containers.py | FieldsContainer.all_fields | def all_fields(self):
"""A list with all the fields contained in this object."""
return [field
for container in FieldsContainer.class_container.values()
for field in getattr(self, container)] | python | def all_fields(self):
"""A list with all the fields contained in this object."""
return [field
for container in FieldsContainer.class_container.values()
for field in getattr(self, container)] | [
"def",
"all_fields",
"(",
"self",
")",
":",
"return",
"[",
"field",
"for",
"container",
"in",
"FieldsContainer",
".",
"class_container",
".",
"values",
"(",
")",
"for",
"field",
"in",
"getattr",
"(",
"self",
",",
"container",
")",
"]"
] | A list with all the fields contained in this object. | [
"A",
"list",
"with",
"all",
"the",
"fields",
"contained",
"in",
"this",
"object",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/containers.py#L63-L67 |
5,488 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/containers.py | FieldsContainer.fields_from_dict | def fields_from_dict(d):
"""Load the fields from the dict, return a list with all the fields."""
class_container = FieldsContainer.class_container
fields = [field_cls.from_dict(field_dict)
for field_cls, container in class_container.iteritems()
for field_dict in d.get(container, [])]
return fields | python | def fields_from_dict(d):
"""Load the fields from the dict, return a list with all the fields."""
class_container = FieldsContainer.class_container
fields = [field_cls.from_dict(field_dict)
for field_cls, container in class_container.iteritems()
for field_dict in d.get(container, [])]
return fields | [
"def",
"fields_from_dict",
"(",
"d",
")",
":",
"class_container",
"=",
"FieldsContainer",
".",
"class_container",
"fields",
"=",
"[",
"field_cls",
".",
"from_dict",
"(",
"field_dict",
")",
"for",
"field_cls",
",",
"container",
"in",
"class_container",
".",
"iteritems",
"(",
")",
"for",
"field_dict",
"in",
"d",
".",
"get",
"(",
"container",
",",
"[",
"]",
")",
"]",
"return",
"fields"
] | Load the fields from the dict, return a list with all the fields. | [
"Load",
"the",
"fields",
"from",
"the",
"dict",
"return",
"a",
"list",
"with",
"all",
"the",
"fields",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/containers.py#L70-L76 |
5,489 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/containers.py | FieldsContainer.fields_to_dict | def fields_to_dict(self):
"""Transform the object to a dict and return the dict."""
d = {}
for container in FieldsContainer.class_container.values():
fields = getattr(self, container)
if fields:
d[container] = [field.to_dict() for field in fields]
return d | python | def fields_to_dict(self):
"""Transform the object to a dict and return the dict."""
d = {}
for container in FieldsContainer.class_container.values():
fields = getattr(self, container)
if fields:
d[container] = [field.to_dict() for field in fields]
return d | [
"def",
"fields_to_dict",
"(",
"self",
")",
":",
"d",
"=",
"{",
"}",
"for",
"container",
"in",
"FieldsContainer",
".",
"class_container",
".",
"values",
"(",
")",
":",
"fields",
"=",
"getattr",
"(",
"self",
",",
"container",
")",
"if",
"fields",
":",
"d",
"[",
"container",
"]",
"=",
"[",
"field",
".",
"to_dict",
"(",
")",
"for",
"field",
"in",
"fields",
"]",
"return",
"d"
] | Transform the object to a dict and return the dict. | [
"Transform",
"the",
"object",
"to",
"a",
"dict",
"and",
"return",
"the",
"dict",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/containers.py#L78-L85 |
5,490 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/containers.py | Record.from_dict | def from_dict(d):
"""Transform the dict to a record object and return the record."""
query_params_match = d.get('@query_params_match')
query_person_match = d.get('@query_person_match')
valid_since = d.get('@valid_since')
if valid_since:
valid_since = str_to_datetime(valid_since)
source = Source.from_dict(d.get('source', {}))
fields = Record.fields_from_dict(d)
return Record(source=source, fields=fields,
query_params_match=query_params_match,
query_person_match=query_person_match,
valid_since=valid_since) | python | def from_dict(d):
"""Transform the dict to a record object and return the record."""
query_params_match = d.get('@query_params_match')
query_person_match = d.get('@query_person_match')
valid_since = d.get('@valid_since')
if valid_since:
valid_since = str_to_datetime(valid_since)
source = Source.from_dict(d.get('source', {}))
fields = Record.fields_from_dict(d)
return Record(source=source, fields=fields,
query_params_match=query_params_match,
query_person_match=query_person_match,
valid_since=valid_since) | [
"def",
"from_dict",
"(",
"d",
")",
":",
"query_params_match",
"=",
"d",
".",
"get",
"(",
"'@query_params_match'",
")",
"query_person_match",
"=",
"d",
".",
"get",
"(",
"'@query_person_match'",
")",
"valid_since",
"=",
"d",
".",
"get",
"(",
"'@valid_since'",
")",
"if",
"valid_since",
":",
"valid_since",
"=",
"str_to_datetime",
"(",
"valid_since",
")",
"source",
"=",
"Source",
".",
"from_dict",
"(",
"d",
".",
"get",
"(",
"'source'",
",",
"{",
"}",
")",
")",
"fields",
"=",
"Record",
".",
"fields_from_dict",
"(",
"d",
")",
"return",
"Record",
"(",
"source",
"=",
"source",
",",
"fields",
"=",
"fields",
",",
"query_params_match",
"=",
"query_params_match",
",",
"query_person_match",
"=",
"query_person_match",
",",
"valid_since",
"=",
"valid_since",
")"
] | Transform the dict to a record object and return the record. | [
"Transform",
"the",
"dict",
"to",
"a",
"record",
"object",
"and",
"return",
"the",
"record",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/containers.py#L145-L157 |
5,491 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/containers.py | Record.to_dict | def to_dict(self):
"""Return a dict representation of the record."""
d = {}
if self.query_params_match is not None:
d['@query_params_match'] = self.query_params_match
if self.query_person_match is not None:
d['@query_person_match'] = self.query_person_match
if self.valid_since is not None:
d['@valid_since'] = datetime_to_str(self.valid_since)
if self.source is not None:
d['source'] = self.source.to_dict()
d.update(self.fields_to_dict())
return d | python | def to_dict(self):
"""Return a dict representation of the record."""
d = {}
if self.query_params_match is not None:
d['@query_params_match'] = self.query_params_match
if self.query_person_match is not None:
d['@query_person_match'] = self.query_person_match
if self.valid_since is not None:
d['@valid_since'] = datetime_to_str(self.valid_since)
if self.source is not None:
d['source'] = self.source.to_dict()
d.update(self.fields_to_dict())
return d | [
"def",
"to_dict",
"(",
"self",
")",
":",
"d",
"=",
"{",
"}",
"if",
"self",
".",
"query_params_match",
"is",
"not",
"None",
":",
"d",
"[",
"'@query_params_match'",
"]",
"=",
"self",
".",
"query_params_match",
"if",
"self",
".",
"query_person_match",
"is",
"not",
"None",
":",
"d",
"[",
"'@query_person_match'",
"]",
"=",
"self",
".",
"query_person_match",
"if",
"self",
".",
"valid_since",
"is",
"not",
"None",
":",
"d",
"[",
"'@valid_since'",
"]",
"=",
"datetime_to_str",
"(",
"self",
".",
"valid_since",
")",
"if",
"self",
".",
"source",
"is",
"not",
"None",
":",
"d",
"[",
"'source'",
"]",
"=",
"self",
".",
"source",
".",
"to_dict",
"(",
")",
"d",
".",
"update",
"(",
"self",
".",
"fields_to_dict",
"(",
")",
")",
"return",
"d"
] | Return a dict representation of the record. | [
"Return",
"a",
"dict",
"representation",
"of",
"the",
"record",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/containers.py#L159-L171 |
5,492 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/containers.py | Person.is_searchable | def is_searchable(self):
"""A bool value that indicates whether the person has enough data and
can be sent as a query to the API."""
filter_func = lambda field: field.is_searchable
return bool(filter(filter_func, self.names) or
filter(filter_func, self.emails) or
filter(filter_func, self.phones) or
filter(filter_func, self.usernames)) | python | def is_searchable(self):
"""A bool value that indicates whether the person has enough data and
can be sent as a query to the API."""
filter_func = lambda field: field.is_searchable
return bool(filter(filter_func, self.names) or
filter(filter_func, self.emails) or
filter(filter_func, self.phones) or
filter(filter_func, self.usernames)) | [
"def",
"is_searchable",
"(",
"self",
")",
":",
"filter_func",
"=",
"lambda",
"field",
":",
"field",
".",
"is_searchable",
"return",
"bool",
"(",
"filter",
"(",
"filter_func",
",",
"self",
".",
"names",
")",
"or",
"filter",
"(",
"filter_func",
",",
"self",
".",
"emails",
")",
"or",
"filter",
"(",
"filter_func",
",",
"self",
".",
"phones",
")",
"or",
"filter",
"(",
"filter_func",
",",
"self",
".",
"usernames",
")",
")"
] | A bool value that indicates whether the person has enough data and
can be sent as a query to the API. | [
"A",
"bool",
"value",
"that",
"indicates",
"whether",
"the",
"person",
"has",
"enough",
"data",
"and",
"can",
"be",
"sent",
"as",
"a",
"query",
"to",
"the",
"API",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/containers.py#L222-L229 |
5,493 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/containers.py | Person.from_dict | def from_dict(d):
"""Transform the dict to a person object and return the person."""
query_params_match = d.get('@query_params_match')
sources = [Source.from_dict(source) for source in d.get('sources', [])]
fields = Person.fields_from_dict(d)
return Person(fields=fields, sources=sources,
query_params_match=query_params_match) | python | def from_dict(d):
"""Transform the dict to a person object and return the person."""
query_params_match = d.get('@query_params_match')
sources = [Source.from_dict(source) for source in d.get('sources', [])]
fields = Person.fields_from_dict(d)
return Person(fields=fields, sources=sources,
query_params_match=query_params_match) | [
"def",
"from_dict",
"(",
"d",
")",
":",
"query_params_match",
"=",
"d",
".",
"get",
"(",
"'@query_params_match'",
")",
"sources",
"=",
"[",
"Source",
".",
"from_dict",
"(",
"source",
")",
"for",
"source",
"in",
"d",
".",
"get",
"(",
"'sources'",
",",
"[",
"]",
")",
"]",
"fields",
"=",
"Person",
".",
"fields_from_dict",
"(",
"d",
")",
"return",
"Person",
"(",
"fields",
"=",
"fields",
",",
"sources",
"=",
"sources",
",",
"query_params_match",
"=",
"query_params_match",
")"
] | Transform the dict to a person object and return the person. | [
"Transform",
"the",
"dict",
"to",
"a",
"person",
"object",
"and",
"return",
"the",
"person",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/containers.py#L248-L254 |
5,494 | i3visio/osrframework | osrframework/thirdparties/pipl_com/lib/containers.py | Person.to_dict | def to_dict(self):
"""Return a dict representation of the person."""
d = {}
if self.query_params_match is not None:
d['@query_params_match'] = self.query_params_match
if self.sources:
d['sources'] = [source.to_dict() for source in self.sources]
d.update(self.fields_to_dict())
return d | python | def to_dict(self):
"""Return a dict representation of the person."""
d = {}
if self.query_params_match is not None:
d['@query_params_match'] = self.query_params_match
if self.sources:
d['sources'] = [source.to_dict() for source in self.sources]
d.update(self.fields_to_dict())
return d | [
"def",
"to_dict",
"(",
"self",
")",
":",
"d",
"=",
"{",
"}",
"if",
"self",
".",
"query_params_match",
"is",
"not",
"None",
":",
"d",
"[",
"'@query_params_match'",
"]",
"=",
"self",
".",
"query_params_match",
"if",
"self",
".",
"sources",
":",
"d",
"[",
"'sources'",
"]",
"=",
"[",
"source",
".",
"to_dict",
"(",
")",
"for",
"source",
"in",
"self",
".",
"sources",
"]",
"d",
".",
"update",
"(",
"self",
".",
"fields_to_dict",
"(",
")",
")",
"return",
"d"
] | Return a dict representation of the person. | [
"Return",
"a",
"dict",
"representation",
"of",
"the",
"person",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/thirdparties/pipl_com/lib/containers.py#L256-L264 |
5,495 | i3visio/osrframework | osrframework/phonefy.py | processPhoneList | def processPhoneList(platformNames=[], numbers=[], excludePlatformNames=[]):
"""
Method to perform searchs on a series of numbers.
Args:
-----
platformNames: List of names of the platforms.
numbers: List of numbers to be queried.
excludePlatformNames: A list of platforms not to be searched.
Return:
-------
A list of verified emails.
"""
# Grabbing the <Platform> objects
platforms = platform_selection.getPlatformsByName(platformNames, mode="phonefy", excludePlatformNames=excludePlatformNames)
results = []
for num in numbers:
for pla in platforms:
# This returns a json.txt!
entities = pla.getInfo(query=num, process=True, mode="phonefy")
if entities != {}:
results+=json.loads(entities)
return results | python | def processPhoneList(platformNames=[], numbers=[], excludePlatformNames=[]):
"""
Method to perform searchs on a series of numbers.
Args:
-----
platformNames: List of names of the platforms.
numbers: List of numbers to be queried.
excludePlatformNames: A list of platforms not to be searched.
Return:
-------
A list of verified emails.
"""
# Grabbing the <Platform> objects
platforms = platform_selection.getPlatformsByName(platformNames, mode="phonefy", excludePlatformNames=excludePlatformNames)
results = []
for num in numbers:
for pla in platforms:
# This returns a json.txt!
entities = pla.getInfo(query=num, process=True, mode="phonefy")
if entities != {}:
results+=json.loads(entities)
return results | [
"def",
"processPhoneList",
"(",
"platformNames",
"=",
"[",
"]",
",",
"numbers",
"=",
"[",
"]",
",",
"excludePlatformNames",
"=",
"[",
"]",
")",
":",
"# Grabbing the <Platform> objects",
"platforms",
"=",
"platform_selection",
".",
"getPlatformsByName",
"(",
"platformNames",
",",
"mode",
"=",
"\"phonefy\"",
",",
"excludePlatformNames",
"=",
"excludePlatformNames",
")",
"results",
"=",
"[",
"]",
"for",
"num",
"in",
"numbers",
":",
"for",
"pla",
"in",
"platforms",
":",
"# This returns a json.txt!",
"entities",
"=",
"pla",
".",
"getInfo",
"(",
"query",
"=",
"num",
",",
"process",
"=",
"True",
",",
"mode",
"=",
"\"phonefy\"",
")",
"if",
"entities",
"!=",
"{",
"}",
":",
"results",
"+=",
"json",
".",
"loads",
"(",
"entities",
")",
"return",
"results"
] | Method to perform searchs on a series of numbers.
Args:
-----
platformNames: List of names of the platforms.
numbers: List of numbers to be queried.
excludePlatformNames: A list of platforms not to be searched.
Return:
-------
A list of verified emails. | [
"Method",
"to",
"perform",
"searchs",
"on",
"a",
"series",
"of",
"numbers",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/phonefy.py#L37-L61 |
5,496 | i3visio/osrframework | osrframework/utils/platforms.py | Platform.createURL | def createURL(self, word, mode="phonefy"):
"""
Method to create the URL replacing the word in the appropriate URL.
Args:
-----
word: Word to be searched.
mode: Mode to be executed.
Return:
-------
The URL to be queried.
"""
try:
return self.modes[mode]["url"].format(placeholder=urllib.pathname2url(word))
except:
if mode == "base":
if word[0] == "/":
return self.baseURL+word[1:], word
else:
return self.baseURL+word
else:
try:
return self.url[mode].replace("<"+mode+">", urllib.pathname2url(word))
except:
pass
return None | python | def createURL(self, word, mode="phonefy"):
"""
Method to create the URL replacing the word in the appropriate URL.
Args:
-----
word: Word to be searched.
mode: Mode to be executed.
Return:
-------
The URL to be queried.
"""
try:
return self.modes[mode]["url"].format(placeholder=urllib.pathname2url(word))
except:
if mode == "base":
if word[0] == "/":
return self.baseURL+word[1:], word
else:
return self.baseURL+word
else:
try:
return self.url[mode].replace("<"+mode+">", urllib.pathname2url(word))
except:
pass
return None | [
"def",
"createURL",
"(",
"self",
",",
"word",
",",
"mode",
"=",
"\"phonefy\"",
")",
":",
"try",
":",
"return",
"self",
".",
"modes",
"[",
"mode",
"]",
"[",
"\"url\"",
"]",
".",
"format",
"(",
"placeholder",
"=",
"urllib",
".",
"pathname2url",
"(",
"word",
")",
")",
"except",
":",
"if",
"mode",
"==",
"\"base\"",
":",
"if",
"word",
"[",
"0",
"]",
"==",
"\"/\"",
":",
"return",
"self",
".",
"baseURL",
"+",
"word",
"[",
"1",
":",
"]",
",",
"word",
"else",
":",
"return",
"self",
".",
"baseURL",
"+",
"word",
"else",
":",
"try",
":",
"return",
"self",
".",
"url",
"[",
"mode",
"]",
".",
"replace",
"(",
"\"<\"",
"+",
"mode",
"+",
"\">\"",
",",
"urllib",
".",
"pathname2url",
"(",
"word",
")",
")",
"except",
":",
"pass",
"return",
"None"
] | Method to create the URL replacing the word in the appropriate URL.
Args:
-----
word: Word to be searched.
mode: Mode to be executed.
Return:
-------
The URL to be queried. | [
"Method",
"to",
"create",
"the",
"URL",
"replacing",
"the",
"word",
"in",
"the",
"appropriate",
"URL",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/platforms.py#L86-L112 |
5,497 | i3visio/osrframework | osrframework/utils/platforms.py | Platform.launchQueryForMode | def launchQueryForMode(self, query=None, mode=None):
"""
Method that launches an i3Browser to collect data.
Args:
-----
query: The query to be performed
mode: The mode to be used to build the query.
Return:
-------
A string containing the recovered data or None.
"""
# Creating the query URL for that mode
qURL = self.createURL(word=query, mode=mode)
i3Browser = browser.Browser()
try:
# Check if it needs creds
if self.needsCredentials[mode]:
self._getAuthenticated(i3Browser, qURL)
data = i3Browser.recoverURL(qURL)
else:
# Accessing the resources
data = i3Browser.recoverURL(qURL)
return data
except KeyError:
print(general.error("[*] '{}' is not a valid mode for this wrapper ({}).".format(mode, self.__class__.__name__)))
return None | python | def launchQueryForMode(self, query=None, mode=None):
"""
Method that launches an i3Browser to collect data.
Args:
-----
query: The query to be performed
mode: The mode to be used to build the query.
Return:
-------
A string containing the recovered data or None.
"""
# Creating the query URL for that mode
qURL = self.createURL(word=query, mode=mode)
i3Browser = browser.Browser()
try:
# Check if it needs creds
if self.needsCredentials[mode]:
self._getAuthenticated(i3Browser, qURL)
data = i3Browser.recoverURL(qURL)
else:
# Accessing the resources
data = i3Browser.recoverURL(qURL)
return data
except KeyError:
print(general.error("[*] '{}' is not a valid mode for this wrapper ({}).".format(mode, self.__class__.__name__)))
return None | [
"def",
"launchQueryForMode",
"(",
"self",
",",
"query",
"=",
"None",
",",
"mode",
"=",
"None",
")",
":",
"# Creating the query URL for that mode",
"qURL",
"=",
"self",
".",
"createURL",
"(",
"word",
"=",
"query",
",",
"mode",
"=",
"mode",
")",
"i3Browser",
"=",
"browser",
".",
"Browser",
"(",
")",
"try",
":",
"# Check if it needs creds",
"if",
"self",
".",
"needsCredentials",
"[",
"mode",
"]",
":",
"self",
".",
"_getAuthenticated",
"(",
"i3Browser",
",",
"qURL",
")",
"data",
"=",
"i3Browser",
".",
"recoverURL",
"(",
"qURL",
")",
"else",
":",
"# Accessing the resources",
"data",
"=",
"i3Browser",
".",
"recoverURL",
"(",
"qURL",
")",
"return",
"data",
"except",
"KeyError",
":",
"print",
"(",
"general",
".",
"error",
"(",
"\"[*] '{}' is not a valid mode for this wrapper ({}).\"",
".",
"format",
"(",
"mode",
",",
"self",
".",
"__class__",
".",
"__name__",
")",
")",
")",
"return",
"None"
] | Method that launches an i3Browser to collect data.
Args:
-----
query: The query to be performed
mode: The mode to be used to build the query.
Return:
-------
A string containing the recovered data or None. | [
"Method",
"that",
"launches",
"an",
"i3Browser",
"to",
"collect",
"data",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/platforms.py#L115-L144 |
5,498 | i3visio/osrframework | osrframework/utils/platforms.py | Platform.getInfo | def getInfo(self, query=None, process=False, mode="phonefy", qURI=None):
"""
Method that checks the presence of a given query and recovers the first list of complains.
Args:
-----
query: Query to verify.
process: Calling the processing function.
mode: Mode to be executed.
qURI: A query to be checked.
Return:
-------
Python structure for the html processed.
Raises:
-------
NoCredentialsException.
NotImplementedModeError.
BadImplementationError.
"""
results = []
data = ""
if self._modeIsValid(mode=mode) and self._isValidQuery(query, mode=mode):
if mode in ["mailfy", "phonefy", "searchfy", "usufy"]:
try:
results = getattr(self, "do_{}".format(mode))(query)
except AttributeError as e:
raise NotImplementedModeError(str(self), mode)
return json.dumps(results) | python | def getInfo(self, query=None, process=False, mode="phonefy", qURI=None):
"""
Method that checks the presence of a given query and recovers the first list of complains.
Args:
-----
query: Query to verify.
process: Calling the processing function.
mode: Mode to be executed.
qURI: A query to be checked.
Return:
-------
Python structure for the html processed.
Raises:
-------
NoCredentialsException.
NotImplementedModeError.
BadImplementationError.
"""
results = []
data = ""
if self._modeIsValid(mode=mode) and self._isValidQuery(query, mode=mode):
if mode in ["mailfy", "phonefy", "searchfy", "usufy"]:
try:
results = getattr(self, "do_{}".format(mode))(query)
except AttributeError as e:
raise NotImplementedModeError(str(self), mode)
return json.dumps(results) | [
"def",
"getInfo",
"(",
"self",
",",
"query",
"=",
"None",
",",
"process",
"=",
"False",
",",
"mode",
"=",
"\"phonefy\"",
",",
"qURI",
"=",
"None",
")",
":",
"results",
"=",
"[",
"]",
"data",
"=",
"\"\"",
"if",
"self",
".",
"_modeIsValid",
"(",
"mode",
"=",
"mode",
")",
"and",
"self",
".",
"_isValidQuery",
"(",
"query",
",",
"mode",
"=",
"mode",
")",
":",
"if",
"mode",
"in",
"[",
"\"mailfy\"",
",",
"\"phonefy\"",
",",
"\"searchfy\"",
",",
"\"usufy\"",
"]",
":",
"try",
":",
"results",
"=",
"getattr",
"(",
"self",
",",
"\"do_{}\"",
".",
"format",
"(",
"mode",
")",
")",
"(",
"query",
")",
"except",
"AttributeError",
"as",
"e",
":",
"raise",
"NotImplementedModeError",
"(",
"str",
"(",
"self",
")",
",",
"mode",
")",
"return",
"json",
".",
"dumps",
"(",
"results",
")"
] | Method that checks the presence of a given query and recovers the first list of complains.
Args:
-----
query: Query to verify.
process: Calling the processing function.
mode: Mode to be executed.
qURI: A query to be checked.
Return:
-------
Python structure for the html processed.
Raises:
-------
NoCredentialsException.
NotImplementedModeError.
BadImplementationError. | [
"Method",
"that",
"checks",
"the",
"presence",
"of",
"a",
"given",
"query",
"and",
"recovers",
"the",
"first",
"list",
"of",
"complains",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/platforms.py#L147-L178 |
5,499 | i3visio/osrframework | osrframework/utils/platforms.py | Platform._modeIsValid | def _modeIsValid(self, mode):
"""
Verification of whether the mode is a correct option to be used.
Args:
-----
mode: Mode to be executed.
Return:
-------
True if the mode exists in the three main folders.
"""
try:
# Suport for version 2 of wrappers
return mode in self.modes.keys()
except AttributeError as e:
# Legacy for mantaining old wrappers
if mode in self.isValidMode.keys():
if mode in self.isValidMode.keys():
return True
return False | python | def _modeIsValid(self, mode):
"""
Verification of whether the mode is a correct option to be used.
Args:
-----
mode: Mode to be executed.
Return:
-------
True if the mode exists in the three main folders.
"""
try:
# Suport for version 2 of wrappers
return mode in self.modes.keys()
except AttributeError as e:
# Legacy for mantaining old wrappers
if mode in self.isValidMode.keys():
if mode in self.isValidMode.keys():
return True
return False | [
"def",
"_modeIsValid",
"(",
"self",
",",
"mode",
")",
":",
"try",
":",
"# Suport for version 2 of wrappers",
"return",
"mode",
"in",
"self",
".",
"modes",
".",
"keys",
"(",
")",
"except",
"AttributeError",
"as",
"e",
":",
"# Legacy for mantaining old wrappers",
"if",
"mode",
"in",
"self",
".",
"isValidMode",
".",
"keys",
"(",
")",
":",
"if",
"mode",
"in",
"self",
".",
"isValidMode",
".",
"keys",
"(",
")",
":",
"return",
"True",
"return",
"False"
] | Verification of whether the mode is a correct option to be used.
Args:
-----
mode: Mode to be executed.
Return:
-------
True if the mode exists in the three main folders. | [
"Verification",
"of",
"whether",
"the",
"mode",
"is",
"a",
"correct",
"option",
"to",
"be",
"used",
"."
] | 83437f4c14c9c08cb80a896bd9834c77f6567871 | https://github.com/i3visio/osrframework/blob/83437f4c14c9c08cb80a896bd9834c77f6567871/osrframework/utils/platforms.py#L181-L201 |
Subsets and Splits