repo
stringlengths 7
55
| path
stringlengths 4
223
| url
stringlengths 87
315
| code
stringlengths 75
104k
| code_tokens
list | docstring
stringlengths 1
46.9k
| docstring_tokens
list | language
stringclasses 1
value | partition
stringclasses 3
values | avg_line_len
float64 7.91
980
|
---|---|---|---|---|---|---|---|---|---|
andrewramsay/sk8-drivers
|
pysk8/calibration/sk8_calibration_gui.py
|
https://github.com/andrewramsay/sk8-drivers/blob/67347a71762fb421f5ae65a595def5c7879e8b0c/pysk8/calibration/sk8_calibration_gui.py#L285-L288
|
def imu_changed(self, val):
"""Handle clicks on the IMU index spinner."""
self.current_imuid = '{}_IMU{}'.format(self.sk8.get_device_name(), val)
self.update_data_display(self.get_current_data())
|
[
"def",
"imu_changed",
"(",
"self",
",",
"val",
")",
":",
"self",
".",
"current_imuid",
"=",
"'{}_IMU{}'",
".",
"format",
"(",
"self",
".",
"sk8",
".",
"get_device_name",
"(",
")",
",",
"val",
")",
"self",
".",
"update_data_display",
"(",
"self",
".",
"get_current_data",
"(",
")",
")"
] |
Handle clicks on the IMU index spinner.
|
[
"Handle",
"clicks",
"on",
"the",
"IMU",
"index",
"spinner",
"."
] |
python
|
train
| 54 |
ecell/ecell4
|
ecell4/extra/ensemble.py
|
https://github.com/ecell/ecell4/blob/a4a1229661c39b2059adbbacae9090e5ba664e01/ecell4/extra/ensemble.py#L503-L523
|
def run_azure(target, jobs, n=1, nproc=None, path='.', delete=True, config=None, **kwargs):
"""
Evaluate the given function with each set of arguments, and return a list of results.
This function does in parallel with Microsoft Azure Batch.
This function is the work in progress.
The argument `nproc` doesn't work yet.
See `ecell4.extra.azure_batch.run_azure` for details.
See Also
--------
ecell4.extra.ensemble.run_serial
ecell4.extra.ensemble.run_sge
ecell4.extra.ensemble.run_slurm
ecell4.extra.ensemble.run_multiprocessing
ecell4.extra.ensemble.run_azure
ecell4.extra.azure_batch.run_azure
"""
import ecell4.extra.azure_batch as azure_batch
return azure_batch.run_azure(target, jobs, n, path, delete, config)
|
[
"def",
"run_azure",
"(",
"target",
",",
"jobs",
",",
"n",
"=",
"1",
",",
"nproc",
"=",
"None",
",",
"path",
"=",
"'.'",
",",
"delete",
"=",
"True",
",",
"config",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"import",
"ecell4",
".",
"extra",
".",
"azure_batch",
"as",
"azure_batch",
"return",
"azure_batch",
".",
"run_azure",
"(",
"target",
",",
"jobs",
",",
"n",
",",
"path",
",",
"delete",
",",
"config",
")"
] |
Evaluate the given function with each set of arguments, and return a list of results.
This function does in parallel with Microsoft Azure Batch.
This function is the work in progress.
The argument `nproc` doesn't work yet.
See `ecell4.extra.azure_batch.run_azure` for details.
See Also
--------
ecell4.extra.ensemble.run_serial
ecell4.extra.ensemble.run_sge
ecell4.extra.ensemble.run_slurm
ecell4.extra.ensemble.run_multiprocessing
ecell4.extra.ensemble.run_azure
ecell4.extra.azure_batch.run_azure
|
[
"Evaluate",
"the",
"given",
"function",
"with",
"each",
"set",
"of",
"arguments",
"and",
"return",
"a",
"list",
"of",
"results",
".",
"This",
"function",
"does",
"in",
"parallel",
"with",
"Microsoft",
"Azure",
"Batch",
"."
] |
python
|
train
| 36.380952 |
minio/minio-py
|
minio/api.py
|
https://github.com/minio/minio-py/blob/7107c84183cf5fb4deff68c0a16ab9f1c0b4c37e/minio/api.py#L1712-L1747
|
def _complete_multipart_upload(self, bucket_name, object_name,
upload_id, uploaded_parts):
"""
Complete an active multipart upload request.
:param bucket_name: Bucket name of the multipart request.
:param object_name: Object name of the multipart request.
:param upload_id: Upload id of the active multipart request.
:param uploaded_parts: Key, Value dictionary of uploaded parts.
"""
is_valid_bucket_name(bucket_name)
is_non_empty_string(object_name)
is_non_empty_string(upload_id)
# Order uploaded parts as required by S3 specification
ordered_parts = []
for part in sorted(uploaded_parts.keys()):
ordered_parts.append(uploaded_parts[part])
data = xml_marshal_complete_multipart_upload(ordered_parts)
sha256_hex = get_sha256_hexdigest(data)
md5_base64 = get_md5_base64digest(data)
headers = {
'Content-Length': len(data),
'Content-Type': 'application/xml',
'Content-Md5': md5_base64,
}
response = self._url_open('POST', bucket_name=bucket_name,
object_name=object_name,
query={'uploadId': upload_id},
headers=headers, body=data,
content_sha256=sha256_hex)
return parse_multipart_upload_result(response.data)
|
[
"def",
"_complete_multipart_upload",
"(",
"self",
",",
"bucket_name",
",",
"object_name",
",",
"upload_id",
",",
"uploaded_parts",
")",
":",
"is_valid_bucket_name",
"(",
"bucket_name",
")",
"is_non_empty_string",
"(",
"object_name",
")",
"is_non_empty_string",
"(",
"upload_id",
")",
"# Order uploaded parts as required by S3 specification",
"ordered_parts",
"=",
"[",
"]",
"for",
"part",
"in",
"sorted",
"(",
"uploaded_parts",
".",
"keys",
"(",
")",
")",
":",
"ordered_parts",
".",
"append",
"(",
"uploaded_parts",
"[",
"part",
"]",
")",
"data",
"=",
"xml_marshal_complete_multipart_upload",
"(",
"ordered_parts",
")",
"sha256_hex",
"=",
"get_sha256_hexdigest",
"(",
"data",
")",
"md5_base64",
"=",
"get_md5_base64digest",
"(",
"data",
")",
"headers",
"=",
"{",
"'Content-Length'",
":",
"len",
"(",
"data",
")",
",",
"'Content-Type'",
":",
"'application/xml'",
",",
"'Content-Md5'",
":",
"md5_base64",
",",
"}",
"response",
"=",
"self",
".",
"_url_open",
"(",
"'POST'",
",",
"bucket_name",
"=",
"bucket_name",
",",
"object_name",
"=",
"object_name",
",",
"query",
"=",
"{",
"'uploadId'",
":",
"upload_id",
"}",
",",
"headers",
"=",
"headers",
",",
"body",
"=",
"data",
",",
"content_sha256",
"=",
"sha256_hex",
")",
"return",
"parse_multipart_upload_result",
"(",
"response",
".",
"data",
")"
] |
Complete an active multipart upload request.
:param bucket_name: Bucket name of the multipart request.
:param object_name: Object name of the multipart request.
:param upload_id: Upload id of the active multipart request.
:param uploaded_parts: Key, Value dictionary of uploaded parts.
|
[
"Complete",
"an",
"active",
"multipart",
"upload",
"request",
"."
] |
python
|
train
| 40.527778 |
hasgeek/coaster
|
coaster/sqlalchemy/mixins.py
|
https://github.com/hasgeek/coaster/blob/07f7eb5d5f516e22fa14fdf4dc70e0ae13ee398d/coaster/sqlalchemy/mixins.py#L517-L529
|
def short_title(self):
"""
Generates an abbreviated title by subtracting the parent's title from this instance's title.
"""
if self.title and self.parent is not None and hasattr(self.parent, 'title') and self.parent.title:
if self.title.startswith(self.parent.title):
short = self.title[len(self.parent.title):].strip()
match = _punctuation_re.match(short)
if match:
short = short[match.end():].strip()
if short:
return short
return self.title
|
[
"def",
"short_title",
"(",
"self",
")",
":",
"if",
"self",
".",
"title",
"and",
"self",
".",
"parent",
"is",
"not",
"None",
"and",
"hasattr",
"(",
"self",
".",
"parent",
",",
"'title'",
")",
"and",
"self",
".",
"parent",
".",
"title",
":",
"if",
"self",
".",
"title",
".",
"startswith",
"(",
"self",
".",
"parent",
".",
"title",
")",
":",
"short",
"=",
"self",
".",
"title",
"[",
"len",
"(",
"self",
".",
"parent",
".",
"title",
")",
":",
"]",
".",
"strip",
"(",
")",
"match",
"=",
"_punctuation_re",
".",
"match",
"(",
"short",
")",
"if",
"match",
":",
"short",
"=",
"short",
"[",
"match",
".",
"end",
"(",
")",
":",
"]",
".",
"strip",
"(",
")",
"if",
"short",
":",
"return",
"short",
"return",
"self",
".",
"title"
] |
Generates an abbreviated title by subtracting the parent's title from this instance's title.
|
[
"Generates",
"an",
"abbreviated",
"title",
"by",
"subtracting",
"the",
"parent",
"s",
"title",
"from",
"this",
"instance",
"s",
"title",
"."
] |
python
|
train
| 45.153846 |
luismasuelli/django-trackmodels-ritual
|
grimoire/django/tracked/reports.py
|
https://github.com/luismasuelli/django-trackmodels-ritual/blob/ee0a6e07a5851ed477c9c1e3b9f8aafd9da35657/grimoire/django/tracked/reports.py#L300-L310
|
def dump_report_content(self, request, result):
"""
Dumps the content to a string, suitable to being written on a file.
:param result: The result being processed.
:return: string
"""
output = StringIO()
writer = csv.writer(output, **self.csv_kwargs)
writer.writerows([result.headers] + result.values)
return output.getvalue()
|
[
"def",
"dump_report_content",
"(",
"self",
",",
"request",
",",
"result",
")",
":",
"output",
"=",
"StringIO",
"(",
")",
"writer",
"=",
"csv",
".",
"writer",
"(",
"output",
",",
"*",
"*",
"self",
".",
"csv_kwargs",
")",
"writer",
".",
"writerows",
"(",
"[",
"result",
".",
"headers",
"]",
"+",
"result",
".",
"values",
")",
"return",
"output",
".",
"getvalue",
"(",
")"
] |
Dumps the content to a string, suitable to being written on a file.
:param result: The result being processed.
:return: string
|
[
"Dumps",
"the",
"content",
"to",
"a",
"string",
"suitable",
"to",
"being",
"written",
"on",
"a",
"file",
".",
":",
"param",
"result",
":",
"The",
"result",
"being",
"processed",
".",
":",
"return",
":",
"string"
] |
python
|
train
| 35.272727 |
InfoAgeTech/django-core
|
django_core/auth/views.py
|
https://github.com/InfoAgeTech/django-core/blob/9664a145473b75120bf71e1644e9c8086e7e8955/django_core/auth/views.py#L54-L60
|
def get_authorization_user(self, **kwargs):
"""Gets the user the authorization object is for."""
if self.authorization_user is not None:
return self.authorization_user
self.authorization_user = self.request.user
return self.request.user
|
[
"def",
"get_authorization_user",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"self",
".",
"authorization_user",
"is",
"not",
"None",
":",
"return",
"self",
".",
"authorization_user",
"self",
".",
"authorization_user",
"=",
"self",
".",
"request",
".",
"user",
"return",
"self",
".",
"request",
".",
"user"
] |
Gets the user the authorization object is for.
|
[
"Gets",
"the",
"user",
"the",
"authorization",
"object",
"is",
"for",
"."
] |
python
|
train
| 39.285714 |
rackerlabs/silverberg
|
scripts/python-lint.py
|
https://github.com/rackerlabs/silverberg/blob/c6fae78923a019f1615e9516ab30fa105c72a542/scripts/python-lint.py#L24-L49
|
def lint(to_lint):
"""
Run all linters against a list of files.
:param to_lint: a list of files to lint.
"""
exit_code = 0
for linter, options in (('pyflakes', []), ('pep8', [])):
try:
output = local[linter](*(options + to_lint))
except commands.ProcessExecutionError as e:
output = e.stdout
if output:
exit_code = 1
print "{0} Errors:".format(linter)
print output
output = hacked_pep257(to_lint)
if output:
exit_code = 1
print "Docstring Errors:".format(linter.upper())
print output
sys.exit(exit_code)
|
[
"def",
"lint",
"(",
"to_lint",
")",
":",
"exit_code",
"=",
"0",
"for",
"linter",
",",
"options",
"in",
"(",
"(",
"'pyflakes'",
",",
"[",
"]",
")",
",",
"(",
"'pep8'",
",",
"[",
"]",
")",
")",
":",
"try",
":",
"output",
"=",
"local",
"[",
"linter",
"]",
"(",
"*",
"(",
"options",
"+",
"to_lint",
")",
")",
"except",
"commands",
".",
"ProcessExecutionError",
"as",
"e",
":",
"output",
"=",
"e",
".",
"stdout",
"if",
"output",
":",
"exit_code",
"=",
"1",
"print",
"\"{0} Errors:\"",
".",
"format",
"(",
"linter",
")",
"print",
"output",
"output",
"=",
"hacked_pep257",
"(",
"to_lint",
")",
"if",
"output",
":",
"exit_code",
"=",
"1",
"print",
"\"Docstring Errors:\"",
".",
"format",
"(",
"linter",
".",
"upper",
"(",
")",
")",
"print",
"output",
"sys",
".",
"exit",
"(",
"exit_code",
")"
] |
Run all linters against a list of files.
:param to_lint: a list of files to lint.
|
[
"Run",
"all",
"linters",
"against",
"a",
"list",
"of",
"files",
"."
] |
python
|
train
| 24.115385 |
gtaylor/petfinder-api
|
petfinder/exceptions.py
|
https://github.com/gtaylor/petfinder-api/blob/4f1bc76c276d537208c9b11f7c87282f6d2bb50d/petfinder/exceptions.py#L93-L113
|
def _get_exception_class_from_status_code(status_code):
"""
Utility function that accepts a status code, and spits out a reference
to the correct exception class to raise.
:param str status_code: The status code to return an exception class for.
:rtype: PetfinderAPIError or None
:returns: The appropriate PetfinderAPIError subclass. If the status code
is not an error, return ``None``.
"""
if status_code == '100':
return None
exc_class = STATUS_CODE_MAPPING.get(status_code)
if not exc_class:
# No status code match, return the "I don't know wtf this is"
# exception class.
return STATUS_CODE_MAPPING['UNKNOWN']
else:
# Match found, yay.
return exc_class
|
[
"def",
"_get_exception_class_from_status_code",
"(",
"status_code",
")",
":",
"if",
"status_code",
"==",
"'100'",
":",
"return",
"None",
"exc_class",
"=",
"STATUS_CODE_MAPPING",
".",
"get",
"(",
"status_code",
")",
"if",
"not",
"exc_class",
":",
"# No status code match, return the \"I don't know wtf this is\"",
"# exception class.",
"return",
"STATUS_CODE_MAPPING",
"[",
"'UNKNOWN'",
"]",
"else",
":",
"# Match found, yay.",
"return",
"exc_class"
] |
Utility function that accepts a status code, and spits out a reference
to the correct exception class to raise.
:param str status_code: The status code to return an exception class for.
:rtype: PetfinderAPIError or None
:returns: The appropriate PetfinderAPIError subclass. If the status code
is not an error, return ``None``.
|
[
"Utility",
"function",
"that",
"accepts",
"a",
"status",
"code",
"and",
"spits",
"out",
"a",
"reference",
"to",
"the",
"correct",
"exception",
"class",
"to",
"raise",
"."
] |
python
|
train
| 35.142857 |
DancingQuanta/pyusbiss
|
usbiss/spi.py
|
https://github.com/DancingQuanta/pyusbiss/blob/fc64e123f1c97f53ad153c474d230ad38044c3cb/usbiss/spi.py#L118-L140
|
def exchange(self, data):
"""
Perform SPI transaction.
The first received byte is either ACK or NACK.
:TODO: enforce rule that up to 63 bytes of data can be sent.
:TODO: enforce rule that there is no gaps in data bytes (what define a gap?)
:param data: List of bytes
:returns: List of bytes
:rtype: List of bytes
"""
self._usbiss.write_data([self._usbiss.SPI_CMD] + data)
response = self._usbiss.read_data(1 + len(data))
if len(response) != 0:
response = self._usbiss.decode(response)
status = response.pop(0)
if status == 0:
raise USBISSError('SPI Transmission Error')
return response
else:
raise USBISSError('SPI Transmission Error: No bytes received!')
|
[
"def",
"exchange",
"(",
"self",
",",
"data",
")",
":",
"self",
".",
"_usbiss",
".",
"write_data",
"(",
"[",
"self",
".",
"_usbiss",
".",
"SPI_CMD",
"]",
"+",
"data",
")",
"response",
"=",
"self",
".",
"_usbiss",
".",
"read_data",
"(",
"1",
"+",
"len",
"(",
"data",
")",
")",
"if",
"len",
"(",
"response",
")",
"!=",
"0",
":",
"response",
"=",
"self",
".",
"_usbiss",
".",
"decode",
"(",
"response",
")",
"status",
"=",
"response",
".",
"pop",
"(",
"0",
")",
"if",
"status",
"==",
"0",
":",
"raise",
"USBISSError",
"(",
"'SPI Transmission Error'",
")",
"return",
"response",
"else",
":",
"raise",
"USBISSError",
"(",
"'SPI Transmission Error: No bytes received!'",
")"
] |
Perform SPI transaction.
The first received byte is either ACK or NACK.
:TODO: enforce rule that up to 63 bytes of data can be sent.
:TODO: enforce rule that there is no gaps in data bytes (what define a gap?)
:param data: List of bytes
:returns: List of bytes
:rtype: List of bytes
|
[
"Perform",
"SPI",
"transaction",
"."
] |
python
|
train
| 35.478261 |
ConsenSys/mythril-classic
|
mythril/ethereum/interface/leveldb/client.py
|
https://github.com/ConsenSys/mythril-classic/blob/27af71c34b2ce94f4fae5613ec457f93df1a8f56/mythril/ethereum/interface/leveldb/client.py#L286-L293
|
def eth_getCode(self, address):
"""Get account code.
:param address:
:return:
"""
account = self.reader._get_account(address)
return _encode_hex(account.code)
|
[
"def",
"eth_getCode",
"(",
"self",
",",
"address",
")",
":",
"account",
"=",
"self",
".",
"reader",
".",
"_get_account",
"(",
"address",
")",
"return",
"_encode_hex",
"(",
"account",
".",
"code",
")"
] |
Get account code.
:param address:
:return:
|
[
"Get",
"account",
"code",
"."
] |
python
|
train
| 25 |
androguard/androguard
|
androguard/core/bytecodes/dvm.py
|
https://github.com/androguard/androguard/blob/984c0d981be2950cf0451e484f7b0d4d53bc4911/androguard/core/bytecodes/dvm.py#L8281-L8293
|
def disassemble(self, offset, size):
"""
Disassembles a given offset in the DEX file
:param offset: offset to disassemble in the file (from the beginning of the file)
:type offset: int
:param size:
:type size:
"""
for i in DCode(
self.CM, offset, size,
self.get_buff()[offset:offset + size]).get_instructions():
yield i
|
[
"def",
"disassemble",
"(",
"self",
",",
"offset",
",",
"size",
")",
":",
"for",
"i",
"in",
"DCode",
"(",
"self",
".",
"CM",
",",
"offset",
",",
"size",
",",
"self",
".",
"get_buff",
"(",
")",
"[",
"offset",
":",
"offset",
"+",
"size",
"]",
")",
".",
"get_instructions",
"(",
")",
":",
"yield",
"i"
] |
Disassembles a given offset in the DEX file
:param offset: offset to disassemble in the file (from the beginning of the file)
:type offset: int
:param size:
:type size:
|
[
"Disassembles",
"a",
"given",
"offset",
"in",
"the",
"DEX",
"file"
] |
python
|
train
| 32 |
xeroc/python-graphenelib
|
graphenestorage/__init__.py
|
https://github.com/xeroc/python-graphenelib/blob/8bb5396bc79998ee424cf3813af478304173f3a6/graphenestorage/__init__.py#L27-L35
|
def get_default_key_store(*args, config, **kwargs):
""" This method returns the default **key** store
that uses an SQLite database internally.
:params str appname: The appname that is used internally to distinguish
different SQLite files
"""
kwargs["appname"] = kwargs.get("appname", "graphene")
return SqliteEncryptedKeyStore(config=config, **kwargs)
|
[
"def",
"get_default_key_store",
"(",
"*",
"args",
",",
"config",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"[",
"\"appname\"",
"]",
"=",
"kwargs",
".",
"get",
"(",
"\"appname\"",
",",
"\"graphene\"",
")",
"return",
"SqliteEncryptedKeyStore",
"(",
"config",
"=",
"config",
",",
"*",
"*",
"kwargs",
")"
] |
This method returns the default **key** store
that uses an SQLite database internally.
:params str appname: The appname that is used internally to distinguish
different SQLite files
|
[
"This",
"method",
"returns",
"the",
"default",
"**",
"key",
"**",
"store",
"that",
"uses",
"an",
"SQLite",
"database",
"internally",
"."
] |
python
|
valid
| 43.111111 |
apacha/OMR-Datasets
|
omrdatasettools/downloaders/MuscimaPlusPlusDatasetDownloader.py
|
https://github.com/apacha/OMR-Datasets/blob/d0a22a03ae35caeef211729efa340e1ec0e01ea5/omrdatasettools/downloaders/MuscimaPlusPlusDatasetDownloader.py#L33-L54
|
def download_and_extract_dataset(self, destination_directory: str):
"""
Downloads and extracts the MUSCIMA++ dataset along with the images from the CVC-MUSCIMA dataset
that were manually annotated (140 out of 1000 images).
"""
if not os.path.exists(self.get_dataset_filename()):
print("Downloading MUSCIMA++ Dataset...")
self.download_file(self.get_dataset_download_url(), self.get_dataset_filename())
if not os.path.exists(self.get_imageset_filename()):
print("Downloading MUSCIMA++ Images...")
self.download_file(self.get_images_download_url(), self.get_imageset_filename())
print("Extracting MUSCIMA++ Dataset...")
self.extract_dataset(os.path.abspath(destination_directory))
absolute_path_to_temp_folder = os.path.abspath('MuscimaPpImages')
self.extract_dataset(absolute_path_to_temp_folder, self.get_imageset_filename())
DatasetDownloader.copytree(os.path.join(absolute_path_to_temp_folder, "fulls"),
os.path.join(os.path.abspath(destination_directory), self.dataset_version(), "data",
"images"))
self.clean_up_temp_directory(absolute_path_to_temp_folder)
|
[
"def",
"download_and_extract_dataset",
"(",
"self",
",",
"destination_directory",
":",
"str",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"get_dataset_filename",
"(",
")",
")",
":",
"print",
"(",
"\"Downloading MUSCIMA++ Dataset...\"",
")",
"self",
".",
"download_file",
"(",
"self",
".",
"get_dataset_download_url",
"(",
")",
",",
"self",
".",
"get_dataset_filename",
"(",
")",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"get_imageset_filename",
"(",
")",
")",
":",
"print",
"(",
"\"Downloading MUSCIMA++ Images...\"",
")",
"self",
".",
"download_file",
"(",
"self",
".",
"get_images_download_url",
"(",
")",
",",
"self",
".",
"get_imageset_filename",
"(",
")",
")",
"print",
"(",
"\"Extracting MUSCIMA++ Dataset...\"",
")",
"self",
".",
"extract_dataset",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"destination_directory",
")",
")",
"absolute_path_to_temp_folder",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"'MuscimaPpImages'",
")",
"self",
".",
"extract_dataset",
"(",
"absolute_path_to_temp_folder",
",",
"self",
".",
"get_imageset_filename",
"(",
")",
")",
"DatasetDownloader",
".",
"copytree",
"(",
"os",
".",
"path",
".",
"join",
"(",
"absolute_path_to_temp_folder",
",",
"\"fulls\"",
")",
",",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"destination_directory",
")",
",",
"self",
".",
"dataset_version",
"(",
")",
",",
"\"data\"",
",",
"\"images\"",
")",
")",
"self",
".",
"clean_up_temp_directory",
"(",
"absolute_path_to_temp_folder",
")"
] |
Downloads and extracts the MUSCIMA++ dataset along with the images from the CVC-MUSCIMA dataset
that were manually annotated (140 out of 1000 images).
|
[
"Downloads",
"and",
"extracts",
"the",
"MUSCIMA",
"++",
"dataset",
"along",
"with",
"the",
"images",
"from",
"the",
"CVC",
"-",
"MUSCIMA",
"dataset",
"that",
"were",
"manually",
"annotated",
"(",
"140",
"out",
"of",
"1000",
"images",
")",
"."
] |
python
|
train
| 57.681818 |
inveniosoftware/invenio-accounts
|
invenio_accounts/forms.py
|
https://github.com/inveniosoftware/invenio-accounts/blob/b0d2f0739b00dbefea22ca15d7d374a1b4a63aec/invenio_accounts/forms.py#L43-L52
|
def register_form_factory(Form, app):
"""Return extended registration form."""
if app.config.get('RECAPTCHA_PUBLIC_KEY') and \
app.config.get('RECAPTCHA_PRIVATE_KEY'):
class RegisterForm(Form):
recaptcha = FormField(RegistrationFormRecaptcha, separator='.')
return RegisterForm
return Form
|
[
"def",
"register_form_factory",
"(",
"Form",
",",
"app",
")",
":",
"if",
"app",
".",
"config",
".",
"get",
"(",
"'RECAPTCHA_PUBLIC_KEY'",
")",
"and",
"app",
".",
"config",
".",
"get",
"(",
"'RECAPTCHA_PRIVATE_KEY'",
")",
":",
"class",
"RegisterForm",
"(",
"Form",
")",
":",
"recaptcha",
"=",
"FormField",
"(",
"RegistrationFormRecaptcha",
",",
"separator",
"=",
"'.'",
")",
"return",
"RegisterForm",
"return",
"Form"
] |
Return extended registration form.
|
[
"Return",
"extended",
"registration",
"form",
"."
] |
python
|
train
| 33.4 |
timeyyy/apptools
|
peasoup/peasoup.py
|
https://github.com/timeyyy/apptools/blob/d3c0f324b0c2689c35f5601348276f4efd6cb240/peasoup/peasoup.py#L64-L71
|
def save(self):
'''saves our config objet to file'''
if self.app.cfg_mode == 'json':
with open(self.app.cfg_file, 'w') as opened_file:
json.dump(self.app.cfg, opened_file)
else:
with open(self.app.cfg_file, 'w')as opened_file:
yaml.dump(self.app.cfg, opened_file)
|
[
"def",
"save",
"(",
"self",
")",
":",
"if",
"self",
".",
"app",
".",
"cfg_mode",
"==",
"'json'",
":",
"with",
"open",
"(",
"self",
".",
"app",
".",
"cfg_file",
",",
"'w'",
")",
"as",
"opened_file",
":",
"json",
".",
"dump",
"(",
"self",
".",
"app",
".",
"cfg",
",",
"opened_file",
")",
"else",
":",
"with",
"open",
"(",
"self",
".",
"app",
".",
"cfg_file",
",",
"'w'",
")",
"as",
"opened_file",
":",
"yaml",
".",
"dump",
"(",
"self",
".",
"app",
".",
"cfg",
",",
"opened_file",
")"
] |
saves our config objet to file
|
[
"saves",
"our",
"config",
"objet",
"to",
"file"
] |
python
|
train
| 42 |
spulec/moto
|
moto/packages/httpretty/core.py
|
https://github.com/spulec/moto/blob/4a286c4bc288933bb023396e2784a6fdbb966bc9/moto/packages/httpretty/core.py#L809-L837
|
def get_next_entry(self, method, info, request):
"""Cycle through available responses, but only once.
Any subsequent requests will receive the last response"""
if method not in self.current_entries:
self.current_entries[method] = 0
# restrict selection to entries that match the requested method
entries_for_method = [e for e in self.entries if e.method == method]
if self.current_entries[method] >= len(entries_for_method):
self.current_entries[method] = -1
if not self.entries or not entries_for_method:
raise ValueError('I have no entries for method %s: %s'
% (method, self))
entry = entries_for_method[self.current_entries[method]]
if self.current_entries[method] != -1:
self.current_entries[method] += 1
# Attach more info to the entry
# So the callback can be more clever about what to do
# This does also fix the case where the callback
# would be handed a compiled regex as uri instead of the
# real uri
entry.info = info
entry.request = request
return entry
|
[
"def",
"get_next_entry",
"(",
"self",
",",
"method",
",",
"info",
",",
"request",
")",
":",
"if",
"method",
"not",
"in",
"self",
".",
"current_entries",
":",
"self",
".",
"current_entries",
"[",
"method",
"]",
"=",
"0",
"# restrict selection to entries that match the requested method",
"entries_for_method",
"=",
"[",
"e",
"for",
"e",
"in",
"self",
".",
"entries",
"if",
"e",
".",
"method",
"==",
"method",
"]",
"if",
"self",
".",
"current_entries",
"[",
"method",
"]",
">=",
"len",
"(",
"entries_for_method",
")",
":",
"self",
".",
"current_entries",
"[",
"method",
"]",
"=",
"-",
"1",
"if",
"not",
"self",
".",
"entries",
"or",
"not",
"entries_for_method",
":",
"raise",
"ValueError",
"(",
"'I have no entries for method %s: %s'",
"%",
"(",
"method",
",",
"self",
")",
")",
"entry",
"=",
"entries_for_method",
"[",
"self",
".",
"current_entries",
"[",
"method",
"]",
"]",
"if",
"self",
".",
"current_entries",
"[",
"method",
"]",
"!=",
"-",
"1",
":",
"self",
".",
"current_entries",
"[",
"method",
"]",
"+=",
"1",
"# Attach more info to the entry",
"# So the callback can be more clever about what to do",
"# This does also fix the case where the callback",
"# would be handed a compiled regex as uri instead of the",
"# real uri",
"entry",
".",
"info",
"=",
"info",
"entry",
".",
"request",
"=",
"request",
"return",
"entry"
] |
Cycle through available responses, but only once.
Any subsequent requests will receive the last response
|
[
"Cycle",
"through",
"available",
"responses",
"but",
"only",
"once",
".",
"Any",
"subsequent",
"requests",
"will",
"receive",
"the",
"last",
"response"
] |
python
|
train
| 39.896552 |
deepmind/sonnet
|
sonnet/python/modules/util.py
|
https://github.com/deepmind/sonnet/blob/00612ca3178964d86b556e062694d808ff81fcca/sonnet/python/modules/util.py#L881-L894
|
def name_for_callable(func):
"""Returns a module name for a callable or `None` if no name can be found."""
if isinstance(func, functools.partial):
return name_for_callable(func.func)
try:
name = func.__name__
except AttributeError:
return None
if name == "<lambda>":
return None
else:
return to_snake_case(name)
|
[
"def",
"name_for_callable",
"(",
"func",
")",
":",
"if",
"isinstance",
"(",
"func",
",",
"functools",
".",
"partial",
")",
":",
"return",
"name_for_callable",
"(",
"func",
".",
"func",
")",
"try",
":",
"name",
"=",
"func",
".",
"__name__",
"except",
"AttributeError",
":",
"return",
"None",
"if",
"name",
"==",
"\"<lambda>\"",
":",
"return",
"None",
"else",
":",
"return",
"to_snake_case",
"(",
"name",
")"
] |
Returns a module name for a callable or `None` if no name can be found.
|
[
"Returns",
"a",
"module",
"name",
"for",
"a",
"callable",
"or",
"None",
"if",
"no",
"name",
"can",
"be",
"found",
"."
] |
python
|
train
| 23.714286 |
the01/python-paps
|
paps/si/app/sensor.py
|
https://github.com/the01/python-paps/blob/2dde5a71913e4c7b22901cf05c6ecedd890919c4/paps/si/app/sensor.py#L374-L386
|
def stop(self):
"""
Stop the interface
:rtype: None
"""
should_sleep = self._is_running
super(Sensor, self).stop()
if should_sleep:
# Make sure everything has enough time to exit
time.sleep(max(self._select_timeout, self._retransmit_timeout) + 1)
if self._listen_socket is not None:
self._shutdown_listen_socket()
|
[
"def",
"stop",
"(",
"self",
")",
":",
"should_sleep",
"=",
"self",
".",
"_is_running",
"super",
"(",
"Sensor",
",",
"self",
")",
".",
"stop",
"(",
")",
"if",
"should_sleep",
":",
"# Make sure everything has enough time to exit",
"time",
".",
"sleep",
"(",
"max",
"(",
"self",
".",
"_select_timeout",
",",
"self",
".",
"_retransmit_timeout",
")",
"+",
"1",
")",
"if",
"self",
".",
"_listen_socket",
"is",
"not",
"None",
":",
"self",
".",
"_shutdown_listen_socket",
"(",
")"
] |
Stop the interface
:rtype: None
|
[
"Stop",
"the",
"interface"
] |
python
|
train
| 30.923077 |
suds-community/suds
|
suds/argparser.py
|
https://github.com/suds-community/suds/blob/6fb0a829337b5037a66c20aae6f89b41acd77e40/suds/argparser.py#L124-L147
|
def __all_parameters_processed(self):
"""
Finish the argument processing.
Should be called after all the web service operation's parameters have
been successfully processed and, afterwards, no further parameter
processing is allowed.
Returns a 2-tuple containing the number of required & allowed
arguments.
See the _ArgParser class description for more detailed information.
"""
assert self.active()
sentinel_frame = self.__stack[0]
self.__pop_frames_above(sentinel_frame)
assert len(self.__stack) == 1
self.__pop_top_frame()
assert not self.active()
args_required = sentinel_frame.args_required()
args_allowed = sentinel_frame.args_allowed()
self.__check_for_extra_arguments(args_required, args_allowed)
return args_required, args_allowed
|
[
"def",
"__all_parameters_processed",
"(",
"self",
")",
":",
"assert",
"self",
".",
"active",
"(",
")",
"sentinel_frame",
"=",
"self",
".",
"__stack",
"[",
"0",
"]",
"self",
".",
"__pop_frames_above",
"(",
"sentinel_frame",
")",
"assert",
"len",
"(",
"self",
".",
"__stack",
")",
"==",
"1",
"self",
".",
"__pop_top_frame",
"(",
")",
"assert",
"not",
"self",
".",
"active",
"(",
")",
"args_required",
"=",
"sentinel_frame",
".",
"args_required",
"(",
")",
"args_allowed",
"=",
"sentinel_frame",
".",
"args_allowed",
"(",
")",
"self",
".",
"__check_for_extra_arguments",
"(",
"args_required",
",",
"args_allowed",
")",
"return",
"args_required",
",",
"args_allowed"
] |
Finish the argument processing.
Should be called after all the web service operation's parameters have
been successfully processed and, afterwards, no further parameter
processing is allowed.
Returns a 2-tuple containing the number of required & allowed
arguments.
See the _ArgParser class description for more detailed information.
|
[
"Finish",
"the",
"argument",
"processing",
"."
] |
python
|
train
| 36.333333 |
KelSolaar/Umbra
|
umbra/managers/file_system_events_manager.py
|
https://github.com/KelSolaar/Umbra/blob/66f45f08d9d723787f1191989f8b0dda84b412ce/umbra/managers/file_system_events_manager.py#L406-L421
|
def unregister_path(self, path):
"""
Unregisters given path.
:param path: Path name.
:type path: unicode
:return: Method success.
:rtype: bool
"""
if not path in self:
raise umbra.exceptions.PathExistsError("{0} | '{1}' path isn't registered!".format(
self.__class__.__name__, path))
del (self.__paths[path])
return True
|
[
"def",
"unregister_path",
"(",
"self",
",",
"path",
")",
":",
"if",
"not",
"path",
"in",
"self",
":",
"raise",
"umbra",
".",
"exceptions",
".",
"PathExistsError",
"(",
"\"{0} | '{1}' path isn't registered!\"",
".",
"format",
"(",
"self",
".",
"__class__",
".",
"__name__",
",",
"path",
")",
")",
"del",
"(",
"self",
".",
"__paths",
"[",
"path",
"]",
")",
"return",
"True"
] |
Unregisters given path.
:param path: Path name.
:type path: unicode
:return: Method success.
:rtype: bool
|
[
"Unregisters",
"given",
"path",
"."
] |
python
|
train
| 26 |
simpleai-team/simpleai
|
simpleai/machine_learning/classifiers.py
|
https://github.com/simpleai-team/simpleai/blob/2836befa7e970013f62e0ee75562652aacac6f65/simpleai/machine_learning/classifiers.py#L322-L334
|
def _max_gain_split(self, examples):
"""
Returns an OnlineInformationGain of the attribute with
max gain based on `examples`.
"""
gains = self._new_set_of_gain_counters()
for example in examples:
for gain in gains:
gain.add(example)
winner = max(gains, key=lambda gain: gain.get_gain())
if not winner.get_target_class_counts():
raise ValueError("Dataset is empty")
return winner
|
[
"def",
"_max_gain_split",
"(",
"self",
",",
"examples",
")",
":",
"gains",
"=",
"self",
".",
"_new_set_of_gain_counters",
"(",
")",
"for",
"example",
"in",
"examples",
":",
"for",
"gain",
"in",
"gains",
":",
"gain",
".",
"add",
"(",
"example",
")",
"winner",
"=",
"max",
"(",
"gains",
",",
"key",
"=",
"lambda",
"gain",
":",
"gain",
".",
"get_gain",
"(",
")",
")",
"if",
"not",
"winner",
".",
"get_target_class_counts",
"(",
")",
":",
"raise",
"ValueError",
"(",
"\"Dataset is empty\"",
")",
"return",
"winner"
] |
Returns an OnlineInformationGain of the attribute with
max gain based on `examples`.
|
[
"Returns",
"an",
"OnlineInformationGain",
"of",
"the",
"attribute",
"with",
"max",
"gain",
"based",
"on",
"examples",
"."
] |
python
|
train
| 36.769231 |
annoviko/pyclustering
|
pyclustering/container/kdtree.py
|
https://github.com/annoviko/pyclustering/blob/98aa0dd89fd36f701668fb1eb29c8fb5662bf7d0/pyclustering/container/kdtree.py#L459-L473
|
def find_node(self, point, cur_node = None):
"""!
@brief Find node with coordinates that are defined by specified point.
@details If node with specified parameters does not exist then None will be returned,
otherwise required node will be returned.
@param[in] point (list): Coordinates of the point whose node should be found.
@param[in] cur_node (node): Node from which search should be started.
@return (node) Node if it satisfies to input parameters, otherwise it return None.
"""
rule_search = lambda node, point=point: self.__point_comparator(node.data, point)
return self.__find_node_by_rule(point, rule_search, cur_node)
|
[
"def",
"find_node",
"(",
"self",
",",
"point",
",",
"cur_node",
"=",
"None",
")",
":",
"rule_search",
"=",
"lambda",
"node",
",",
"point",
"=",
"point",
":",
"self",
".",
"__point_comparator",
"(",
"node",
".",
"data",
",",
"point",
")",
"return",
"self",
".",
"__find_node_by_rule",
"(",
"point",
",",
"rule_search",
",",
"cur_node",
")"
] |
!
@brief Find node with coordinates that are defined by specified point.
@details If node with specified parameters does not exist then None will be returned,
otherwise required node will be returned.
@param[in] point (list): Coordinates of the point whose node should be found.
@param[in] cur_node (node): Node from which search should be started.
@return (node) Node if it satisfies to input parameters, otherwise it return None.
|
[
"!"
] |
python
|
valid
| 50.266667 |
etcher-be/emiz
|
emiz/miz.py
|
https://github.com/etcher-be/emiz/blob/1c3e32711921d7e600e85558ffe5d337956372de/emiz/miz.py#L334-L364
|
def zip(self, destination: typing.Union[str, Path] = None, encode: bool = True) -> str:
"""
Write mission, dictionary etc. to a MIZ file
Args:
destination: target MIZ file (if none, defaults to source MIZ + "_EMIZ"
Returns: destination file
"""
if encode:
self._encode()
if destination is None:
destination_path = self.miz_path.parent.joinpath(f'{self.miz_path.stem}_EMIZ.miz')
else:
destination_path = elib.path.ensure_file(destination, must_exist=False)
LOGGER.debug('zipping mission to: %s', destination_path)
destination_path.write_bytes(dummy_miz)
with ZipFile(str(destination_path), mode='w', compression=8) as zip_file:
for root, _, items in os.walk(self.temp_dir.absolute()):
for item in items:
item_abs_path = Path(root, item).absolute()
item_rel_path = Path(item_abs_path).relative_to(self.temp_dir)
zip_file.write(item_abs_path, arcname=item_rel_path)
return str(destination_path)
|
[
"def",
"zip",
"(",
"self",
",",
"destination",
":",
"typing",
".",
"Union",
"[",
"str",
",",
"Path",
"]",
"=",
"None",
",",
"encode",
":",
"bool",
"=",
"True",
")",
"->",
"str",
":",
"if",
"encode",
":",
"self",
".",
"_encode",
"(",
")",
"if",
"destination",
"is",
"None",
":",
"destination_path",
"=",
"self",
".",
"miz_path",
".",
"parent",
".",
"joinpath",
"(",
"f'{self.miz_path.stem}_EMIZ.miz'",
")",
"else",
":",
"destination_path",
"=",
"elib",
".",
"path",
".",
"ensure_file",
"(",
"destination",
",",
"must_exist",
"=",
"False",
")",
"LOGGER",
".",
"debug",
"(",
"'zipping mission to: %s'",
",",
"destination_path",
")",
"destination_path",
".",
"write_bytes",
"(",
"dummy_miz",
")",
"with",
"ZipFile",
"(",
"str",
"(",
"destination_path",
")",
",",
"mode",
"=",
"'w'",
",",
"compression",
"=",
"8",
")",
"as",
"zip_file",
":",
"for",
"root",
",",
"_",
",",
"items",
"in",
"os",
".",
"walk",
"(",
"self",
".",
"temp_dir",
".",
"absolute",
"(",
")",
")",
":",
"for",
"item",
"in",
"items",
":",
"item_abs_path",
"=",
"Path",
"(",
"root",
",",
"item",
")",
".",
"absolute",
"(",
")",
"item_rel_path",
"=",
"Path",
"(",
"item_abs_path",
")",
".",
"relative_to",
"(",
"self",
".",
"temp_dir",
")",
"zip_file",
".",
"write",
"(",
"item_abs_path",
",",
"arcname",
"=",
"item_rel_path",
")",
"return",
"str",
"(",
"destination_path",
")"
] |
Write mission, dictionary etc. to a MIZ file
Args:
destination: target MIZ file (if none, defaults to source MIZ + "_EMIZ"
Returns: destination file
|
[
"Write",
"mission",
"dictionary",
"etc",
".",
"to",
"a",
"MIZ",
"file"
] |
python
|
train
| 35.548387 |
MartijnBraam/pyElectronics
|
electronics/devices/bmp180.py
|
https://github.com/MartijnBraam/pyElectronics/blob/a20878c9fa190135f1e478e9ea0b54ca43ff308e/electronics/devices/bmp180.py#L87-L104
|
def temperature(self):
"""Get the temperature from the sensor.
:returns: The temperature in degree celcius as a float
:example:
>>> sensor = BMP180(gw)
>>> sensor.load_calibration()
>>> sensor.temperature()
21.4
"""
ut = self.get_raw_temp()
x1 = ((ut - self.cal['AC6']) * self.cal['AC5']) >> 15
x2 = (self.cal['MC'] << 11) // (x1 + self.cal['MD'])
b5 = x1 + x2
return ((b5 + 8) >> 4) / 10
|
[
"def",
"temperature",
"(",
"self",
")",
":",
"ut",
"=",
"self",
".",
"get_raw_temp",
"(",
")",
"x1",
"=",
"(",
"(",
"ut",
"-",
"self",
".",
"cal",
"[",
"'AC6'",
"]",
")",
"*",
"self",
".",
"cal",
"[",
"'AC5'",
"]",
")",
">>",
"15",
"x2",
"=",
"(",
"self",
".",
"cal",
"[",
"'MC'",
"]",
"<<",
"11",
")",
"//",
"(",
"x1",
"+",
"self",
".",
"cal",
"[",
"'MD'",
"]",
")",
"b5",
"=",
"x1",
"+",
"x2",
"return",
"(",
"(",
"b5",
"+",
"8",
")",
">>",
"4",
")",
"/",
"10"
] |
Get the temperature from the sensor.
:returns: The temperature in degree celcius as a float
:example:
>>> sensor = BMP180(gw)
>>> sensor.load_calibration()
>>> sensor.temperature()
21.4
|
[
"Get",
"the",
"temperature",
"from",
"the",
"sensor",
"."
] |
python
|
train
| 26.611111 |
recurly/recurly-client-python
|
recurly/__init__.py
|
https://github.com/recurly/recurly-client-python/blob/682217c4e85ec5c8d4e41519ee0620d2dc4d84d7/recurly/__init__.py#L362-L367
|
def create_shipping_address(self, shipping_address):
"""Creates a shipping address on an existing account. If you are
creating an account, you can embed the shipping addresses with the
request"""
url = urljoin(self._url, '/shipping_addresses')
return shipping_address.post(url)
|
[
"def",
"create_shipping_address",
"(",
"self",
",",
"shipping_address",
")",
":",
"url",
"=",
"urljoin",
"(",
"self",
".",
"_url",
",",
"'/shipping_addresses'",
")",
"return",
"shipping_address",
".",
"post",
"(",
"url",
")"
] |
Creates a shipping address on an existing account. If you are
creating an account, you can embed the shipping addresses with the
request
|
[
"Creates",
"a",
"shipping",
"address",
"on",
"an",
"existing",
"account",
".",
"If",
"you",
"are",
"creating",
"an",
"account",
"you",
"can",
"embed",
"the",
"shipping",
"addresses",
"with",
"the",
"request"
] |
python
|
train
| 52 |
phoebe-project/phoebe2
|
phoebe/parameters/constraint.py
|
https://github.com/phoebe-project/phoebe2/blob/e64b8be683977064e2d55dd1b3ac400f64c3e379/phoebe/parameters/constraint.py#L189-L193
|
def t0_supconj_to_perpass(t0_supconj, period, ecc, per0):
"""
TODO: add documentation
"""
return ConstraintParameter(t0_supconj._bundle, "t0_supconj_to_perpass({}, {}, {}, {})".format(_get_expr(t0_supconj), _get_expr(period), _get_expr(ecc), _get_expr(per0)))
|
[
"def",
"t0_supconj_to_perpass",
"(",
"t0_supconj",
",",
"period",
",",
"ecc",
",",
"per0",
")",
":",
"return",
"ConstraintParameter",
"(",
"t0_supconj",
".",
"_bundle",
",",
"\"t0_supconj_to_perpass({}, {}, {}, {})\"",
".",
"format",
"(",
"_get_expr",
"(",
"t0_supconj",
")",
",",
"_get_expr",
"(",
"period",
")",
",",
"_get_expr",
"(",
"ecc",
")",
",",
"_get_expr",
"(",
"per0",
")",
")",
")"
] |
TODO: add documentation
|
[
"TODO",
":",
"add",
"documentation"
] |
python
|
train
| 54.2 |
AtteqCom/zsl
|
src/zsl/resource/json_server_resource.py
|
https://github.com/AtteqCom/zsl/blob/ab51a96da1780ff642912396d4b85bdcb72560c1/src/zsl/resource/json_server_resource.py#L233-L247
|
def update(self, *args, **kwargs):
"""Modifies the parameters and adds metadata for update results.
Currently it does not support `PUT` method, which works as replacing
the resource. This is somehow questionable in relation DB.
"""
if request.method == 'PUT':
logging.warning("Called not implemented resource method PUT")
resource = super(JsonServerResource, self).update(*args, **kwargs)
if resource:
return resource
else:
return NOT_FOUND
|
[
"def",
"update",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"request",
".",
"method",
"==",
"'PUT'",
":",
"logging",
".",
"warning",
"(",
"\"Called not implemented resource method PUT\"",
")",
"resource",
"=",
"super",
"(",
"JsonServerResource",
",",
"self",
")",
".",
"update",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"if",
"resource",
":",
"return",
"resource",
"else",
":",
"return",
"NOT_FOUND"
] |
Modifies the parameters and adds metadata for update results.
Currently it does not support `PUT` method, which works as replacing
the resource. This is somehow questionable in relation DB.
|
[
"Modifies",
"the",
"parameters",
"and",
"adds",
"metadata",
"for",
"update",
"results",
"."
] |
python
|
train
| 35.266667 |
juju/python-libjuju
|
juju/model.py
|
https://github.com/juju/python-libjuju/blob/58f0011f4c57cd68830258952fa952eaadca6b38/juju/model.py#L2097-L2164
|
async def deploy(self, charm, series, application, options, constraints,
storage, endpoint_bindings, *args):
"""
:param charm string:
Charm holds the URL of the charm to be used to deploy this
application.
:param series string:
Series holds the series of the application to be deployed
if the charm default is not sufficient.
:param application string:
Application holds the application name.
:param options map[string]interface{}:
Options holds application options.
:param constraints string:
Constraints holds the optional application constraints.
:param storage map[string]string:
Storage holds the optional storage constraints.
:param endpoint_bindings map[string]string:
EndpointBindings holds the optional endpoint bindings
:param devices map[string]string:
Devices holds the optional devices constraints.
(Only given on Juju 2.5+)
:param resources map[string]int:
Resources identifies the revision to use for each resource
of the application's charm.
:param num_units int:
NumUnits holds the number of units required. For IAAS models, this
will be 0 and separate AddUnitChanges will be used. For Kubernetes
models, this will be used to scale the application.
(Only given on Juju 2.5+)
"""
# resolve indirect references
charm = self.resolve(charm)
if len(args) == 1:
# Juju 2.4 and below only sends the resources
resources = args[0]
devices, num_units = None, None
else:
# Juju 2.5+ sends devices before resources, as well as num_units
# There might be placement but we need to ignore that.
devices, resources, num_units = args[:3]
if not charm.startswith('local:'):
resources = await self.model._add_store_resources(
application, charm, overrides=resources)
await self.model._deploy(
charm_url=charm,
application=application,
series=series,
config=options,
constraints=constraints,
endpoint_bindings=endpoint_bindings,
resources=resources,
storage=storage,
devices=devices,
num_units=num_units,
)
return application
|
[
"async",
"def",
"deploy",
"(",
"self",
",",
"charm",
",",
"series",
",",
"application",
",",
"options",
",",
"constraints",
",",
"storage",
",",
"endpoint_bindings",
",",
"*",
"args",
")",
":",
"# resolve indirect references",
"charm",
"=",
"self",
".",
"resolve",
"(",
"charm",
")",
"if",
"len",
"(",
"args",
")",
"==",
"1",
":",
"# Juju 2.4 and below only sends the resources",
"resources",
"=",
"args",
"[",
"0",
"]",
"devices",
",",
"num_units",
"=",
"None",
",",
"None",
"else",
":",
"# Juju 2.5+ sends devices before resources, as well as num_units",
"# There might be placement but we need to ignore that.",
"devices",
",",
"resources",
",",
"num_units",
"=",
"args",
"[",
":",
"3",
"]",
"if",
"not",
"charm",
".",
"startswith",
"(",
"'local:'",
")",
":",
"resources",
"=",
"await",
"self",
".",
"model",
".",
"_add_store_resources",
"(",
"application",
",",
"charm",
",",
"overrides",
"=",
"resources",
")",
"await",
"self",
".",
"model",
".",
"_deploy",
"(",
"charm_url",
"=",
"charm",
",",
"application",
"=",
"application",
",",
"series",
"=",
"series",
",",
"config",
"=",
"options",
",",
"constraints",
"=",
"constraints",
",",
"endpoint_bindings",
"=",
"endpoint_bindings",
",",
"resources",
"=",
"resources",
",",
"storage",
"=",
"storage",
",",
"devices",
"=",
"devices",
",",
"num_units",
"=",
"num_units",
",",
")",
"return",
"application"
] |
:param charm string:
Charm holds the URL of the charm to be used to deploy this
application.
:param series string:
Series holds the series of the application to be deployed
if the charm default is not sufficient.
:param application string:
Application holds the application name.
:param options map[string]interface{}:
Options holds application options.
:param constraints string:
Constraints holds the optional application constraints.
:param storage map[string]string:
Storage holds the optional storage constraints.
:param endpoint_bindings map[string]string:
EndpointBindings holds the optional endpoint bindings
:param devices map[string]string:
Devices holds the optional devices constraints.
(Only given on Juju 2.5+)
:param resources map[string]int:
Resources identifies the revision to use for each resource
of the application's charm.
:param num_units int:
NumUnits holds the number of units required. For IAAS models, this
will be 0 and separate AddUnitChanges will be used. For Kubernetes
models, this will be used to scale the application.
(Only given on Juju 2.5+)
|
[
":",
"param",
"charm",
"string",
":",
"Charm",
"holds",
"the",
"URL",
"of",
"the",
"charm",
"to",
"be",
"used",
"to",
"deploy",
"this",
"application",
"."
] |
python
|
train
| 36.367647 |
tonysimpson/nanomsg-python
|
_nanomsg_ctypes/__init__.py
|
https://github.com/tonysimpson/nanomsg-python/blob/3acd9160f90f91034d4a43ce603aaa19fbaf1f2e/_nanomsg_ctypes/__init__.py#L224-L244
|
def nn_poll(fds, timeout=-1):
"""
nn_pollfds
:param fds: dict (file descriptor => pollmode)
:param timeout: timeout in milliseconds
:return:
"""
polls = []
for i, entry in enumerate(fds.items()):
s = PollFds()
fd, event = entry
s.fd = fd
s.events = event
s.revents = 0
polls.append(s)
poll_array = (PollFds*len(fds))(*polls)
res = _nn_poll(poll_array, len(fds), int(timeout))
if res <= 0:
return res, {}
return res, {item.fd: item.revents for item in poll_array}
|
[
"def",
"nn_poll",
"(",
"fds",
",",
"timeout",
"=",
"-",
"1",
")",
":",
"polls",
"=",
"[",
"]",
"for",
"i",
",",
"entry",
"in",
"enumerate",
"(",
"fds",
".",
"items",
"(",
")",
")",
":",
"s",
"=",
"PollFds",
"(",
")",
"fd",
",",
"event",
"=",
"entry",
"s",
".",
"fd",
"=",
"fd",
"s",
".",
"events",
"=",
"event",
"s",
".",
"revents",
"=",
"0",
"polls",
".",
"append",
"(",
"s",
")",
"poll_array",
"=",
"(",
"PollFds",
"*",
"len",
"(",
"fds",
")",
")",
"(",
"*",
"polls",
")",
"res",
"=",
"_nn_poll",
"(",
"poll_array",
",",
"len",
"(",
"fds",
")",
",",
"int",
"(",
"timeout",
")",
")",
"if",
"res",
"<=",
"0",
":",
"return",
"res",
",",
"{",
"}",
"return",
"res",
",",
"{",
"item",
".",
"fd",
":",
"item",
".",
"revents",
"for",
"item",
"in",
"poll_array",
"}"
] |
nn_pollfds
:param fds: dict (file descriptor => pollmode)
:param timeout: timeout in milliseconds
:return:
|
[
"nn_pollfds",
":",
"param",
"fds",
":",
"dict",
"(",
"file",
"descriptor",
"=",
">",
"pollmode",
")",
":",
"param",
"timeout",
":",
"timeout",
"in",
"milliseconds",
":",
"return",
":"
] |
python
|
train
| 26.047619 |
pantsbuild/pants
|
src/python/pants/ivy/ivy.py
|
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/ivy/ivy.py#L74-L94
|
def execute(self, jvm_options=None, args=None, executor=None,
workunit_factory=None, workunit_name=None, workunit_labels=None):
"""Executes the ivy commandline client with the given args.
Raises Ivy.Error if the command fails for any reason.
:param executor: Java executor to run ivy with.
"""
# NB(gmalmquist): It should be OK that we can't declare a subsystem_dependency in this file
# (because it's just a plain old object), because Ivy is only constructed by Bootstrapper, which
# makes an explicit call to IvySubsystem.global_instance() in its constructor, which in turn has
# a declared dependency on DistributionLocator.
executor = executor or SubprocessExecutor(DistributionLocator.cached())
runner = self.runner(jvm_options=jvm_options, args=args, executor=executor)
try:
with self.resolution_lock:
result = util.execute_runner(runner, workunit_factory, workunit_name, workunit_labels)
if result != 0:
raise self.Error('Ivy command failed with exit code {}{}'.format(
result, ': ' + ' '.join(args) if args else ''))
except executor.Error as e:
raise self.Error('Problem executing ivy: {}'.format(e))
|
[
"def",
"execute",
"(",
"self",
",",
"jvm_options",
"=",
"None",
",",
"args",
"=",
"None",
",",
"executor",
"=",
"None",
",",
"workunit_factory",
"=",
"None",
",",
"workunit_name",
"=",
"None",
",",
"workunit_labels",
"=",
"None",
")",
":",
"# NB(gmalmquist): It should be OK that we can't declare a subsystem_dependency in this file",
"# (because it's just a plain old object), because Ivy is only constructed by Bootstrapper, which",
"# makes an explicit call to IvySubsystem.global_instance() in its constructor, which in turn has",
"# a declared dependency on DistributionLocator.",
"executor",
"=",
"executor",
"or",
"SubprocessExecutor",
"(",
"DistributionLocator",
".",
"cached",
"(",
")",
")",
"runner",
"=",
"self",
".",
"runner",
"(",
"jvm_options",
"=",
"jvm_options",
",",
"args",
"=",
"args",
",",
"executor",
"=",
"executor",
")",
"try",
":",
"with",
"self",
".",
"resolution_lock",
":",
"result",
"=",
"util",
".",
"execute_runner",
"(",
"runner",
",",
"workunit_factory",
",",
"workunit_name",
",",
"workunit_labels",
")",
"if",
"result",
"!=",
"0",
":",
"raise",
"self",
".",
"Error",
"(",
"'Ivy command failed with exit code {}{}'",
".",
"format",
"(",
"result",
",",
"': '",
"+",
"' '",
".",
"join",
"(",
"args",
")",
"if",
"args",
"else",
"''",
")",
")",
"except",
"executor",
".",
"Error",
"as",
"e",
":",
"raise",
"self",
".",
"Error",
"(",
"'Problem executing ivy: {}'",
".",
"format",
"(",
"e",
")",
")"
] |
Executes the ivy commandline client with the given args.
Raises Ivy.Error if the command fails for any reason.
:param executor: Java executor to run ivy with.
|
[
"Executes",
"the",
"ivy",
"commandline",
"client",
"with",
"the",
"given",
"args",
"."
] |
python
|
train
| 57.714286 |
apache/airflow
|
airflow/utils/log/gcs_task_handler.py
|
https://github.com/apache/airflow/blob/b69c686ad8a0c89b9136bb4b31767257eb7b2597/airflow/utils/log/gcs_task_handler.py#L132-L163
|
def gcs_write(self, log, remote_log_location, append=True):
"""
Writes the log to the remote_log_location. Fails silently if no hook
was created.
:param log: the log to write to the remote_log_location
:type log: str
:param remote_log_location: the log's location in remote storage
:type remote_log_location: str (path)
:param append: if False, any existing log file is overwritten. If True,
the new log is appended to any existing logs.
:type append: bool
"""
if append:
try:
old_log = self.gcs_read(remote_log_location)
log = '\n'.join([old_log, log]) if old_log else log
except Exception as e:
if not hasattr(e, 'resp') or e.resp.get('status') != '404':
log = '*** Previous log discarded: {}\n\n'.format(str(e)) + log
try:
bkt, blob = self.parse_gcs_url(remote_log_location)
from tempfile import NamedTemporaryFile
with NamedTemporaryFile(mode='w+') as tmpfile:
tmpfile.write(log)
# Force the file to be flushed, since we're doing the
# upload from within the file context (it hasn't been
# closed).
tmpfile.flush()
self.hook.upload(bkt, blob, tmpfile.name)
except Exception as e:
self.log.error('Could not write logs to %s: %s', remote_log_location, e)
|
[
"def",
"gcs_write",
"(",
"self",
",",
"log",
",",
"remote_log_location",
",",
"append",
"=",
"True",
")",
":",
"if",
"append",
":",
"try",
":",
"old_log",
"=",
"self",
".",
"gcs_read",
"(",
"remote_log_location",
")",
"log",
"=",
"'\\n'",
".",
"join",
"(",
"[",
"old_log",
",",
"log",
"]",
")",
"if",
"old_log",
"else",
"log",
"except",
"Exception",
"as",
"e",
":",
"if",
"not",
"hasattr",
"(",
"e",
",",
"'resp'",
")",
"or",
"e",
".",
"resp",
".",
"get",
"(",
"'status'",
")",
"!=",
"'404'",
":",
"log",
"=",
"'*** Previous log discarded: {}\\n\\n'",
".",
"format",
"(",
"str",
"(",
"e",
")",
")",
"+",
"log",
"try",
":",
"bkt",
",",
"blob",
"=",
"self",
".",
"parse_gcs_url",
"(",
"remote_log_location",
")",
"from",
"tempfile",
"import",
"NamedTemporaryFile",
"with",
"NamedTemporaryFile",
"(",
"mode",
"=",
"'w+'",
")",
"as",
"tmpfile",
":",
"tmpfile",
".",
"write",
"(",
"log",
")",
"# Force the file to be flushed, since we're doing the",
"# upload from within the file context (it hasn't been",
"# closed).",
"tmpfile",
".",
"flush",
"(",
")",
"self",
".",
"hook",
".",
"upload",
"(",
"bkt",
",",
"blob",
",",
"tmpfile",
".",
"name",
")",
"except",
"Exception",
"as",
"e",
":",
"self",
".",
"log",
".",
"error",
"(",
"'Could not write logs to %s: %s'",
",",
"remote_log_location",
",",
"e",
")"
] |
Writes the log to the remote_log_location. Fails silently if no hook
was created.
:param log: the log to write to the remote_log_location
:type log: str
:param remote_log_location: the log's location in remote storage
:type remote_log_location: str (path)
:param append: if False, any existing log file is overwritten. If True,
the new log is appended to any existing logs.
:type append: bool
|
[
"Writes",
"the",
"log",
"to",
"the",
"remote_log_location",
".",
"Fails",
"silently",
"if",
"no",
"hook",
"was",
"created",
".",
":",
"param",
"log",
":",
"the",
"log",
"to",
"write",
"to",
"the",
"remote_log_location",
":",
"type",
"log",
":",
"str",
":",
"param",
"remote_log_location",
":",
"the",
"log",
"s",
"location",
"in",
"remote",
"storage",
":",
"type",
"remote_log_location",
":",
"str",
"(",
"path",
")",
":",
"param",
"append",
":",
"if",
"False",
"any",
"existing",
"log",
"file",
"is",
"overwritten",
".",
"If",
"True",
"the",
"new",
"log",
"is",
"appended",
"to",
"any",
"existing",
"logs",
".",
":",
"type",
"append",
":",
"bool"
] |
python
|
test
| 46.1875 |
BerkeleyAutomation/autolab_core
|
autolab_core/data_stream_recorder.py
|
https://github.com/BerkeleyAutomation/autolab_core/blob/8f3813f6401972868cc5e3981ba1b4382d4418d5/autolab_core/data_stream_recorder.py#L209-L217
|
def _stop(self):
""" Stops recording. Returns all recorded data and their timestamps. Destroys recorder process."""
self._pause()
self._cmds_q.put(("stop",))
try:
self._recorder.terminate()
except Exception:
pass
self._recording = False
|
[
"def",
"_stop",
"(",
"self",
")",
":",
"self",
".",
"_pause",
"(",
")",
"self",
".",
"_cmds_q",
".",
"put",
"(",
"(",
"\"stop\"",
",",
")",
")",
"try",
":",
"self",
".",
"_recorder",
".",
"terminate",
"(",
")",
"except",
"Exception",
":",
"pass",
"self",
".",
"_recording",
"=",
"False"
] |
Stops recording. Returns all recorded data and their timestamps. Destroys recorder process.
|
[
"Stops",
"recording",
".",
"Returns",
"all",
"recorded",
"data",
"and",
"their",
"timestamps",
".",
"Destroys",
"recorder",
"process",
"."
] |
python
|
train
| 33.333333 |
walkr/nanoservice
|
benchmarks/bench_req_rep.py
|
https://github.com/walkr/nanoservice/blob/e2098986b1baa5f283167ae487d14f3c6c21961a/benchmarks/bench_req_rep.py#L8-L21
|
def start_service(addr, n):
""" Start a service """
s = Service(addr)
s.register('add', lambda x, y: x + y)
started = time.time()
for _ in range(n):
s.process()
duration = time.time() - started
time.sleep(0.1)
print('Service stats:')
util.print_stats(n, duration)
return
|
[
"def",
"start_service",
"(",
"addr",
",",
"n",
")",
":",
"s",
"=",
"Service",
"(",
"addr",
")",
"s",
".",
"register",
"(",
"'add'",
",",
"lambda",
"x",
",",
"y",
":",
"x",
"+",
"y",
")",
"started",
"=",
"time",
".",
"time",
"(",
")",
"for",
"_",
"in",
"range",
"(",
"n",
")",
":",
"s",
".",
"process",
"(",
")",
"duration",
"=",
"time",
".",
"time",
"(",
")",
"-",
"started",
"time",
".",
"sleep",
"(",
"0.1",
")",
"print",
"(",
"'Service stats:'",
")",
"util",
".",
"print_stats",
"(",
"n",
",",
"duration",
")",
"return"
] |
Start a service
|
[
"Start",
"a",
"service"
] |
python
|
train
| 21.928571 |
merll/docker-fabric
|
dockerfabric/apiclient.py
|
https://github.com/merll/docker-fabric/blob/785d84e40e17265b667d8b11a6e30d8e6b2bf8d4/dockerfabric/apiclient.py#L223-L237
|
def pull(self, repository, tag=None, stream=True, **kwargs):
"""
Identical to :meth:`dockermap.client.base.DockerClientWrapper.pull` with two enhancements:
* additional logging;
* the ``insecure_registry`` flag can be passed through ``kwargs``, or set as default using
``env.docker_registry_insecure``.
"""
c_insecure = kwargs.pop('insecure_registry', env.get('docker_registry_insecure'))
set_raise_on_error(kwargs)
try:
return super(DockerFabricClient, self).pull(repository, tag=tag, stream=stream,
insecure_registry=c_insecure, **kwargs)
except DockerStatusError as e:
error(e.message)
|
[
"def",
"pull",
"(",
"self",
",",
"repository",
",",
"tag",
"=",
"None",
",",
"stream",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"c_insecure",
"=",
"kwargs",
".",
"pop",
"(",
"'insecure_registry'",
",",
"env",
".",
"get",
"(",
"'docker_registry_insecure'",
")",
")",
"set_raise_on_error",
"(",
"kwargs",
")",
"try",
":",
"return",
"super",
"(",
"DockerFabricClient",
",",
"self",
")",
".",
"pull",
"(",
"repository",
",",
"tag",
"=",
"tag",
",",
"stream",
"=",
"stream",
",",
"insecure_registry",
"=",
"c_insecure",
",",
"*",
"*",
"kwargs",
")",
"except",
"DockerStatusError",
"as",
"e",
":",
"error",
"(",
"e",
".",
"message",
")"
] |
Identical to :meth:`dockermap.client.base.DockerClientWrapper.pull` with two enhancements:
* additional logging;
* the ``insecure_registry`` flag can be passed through ``kwargs``, or set as default using
``env.docker_registry_insecure``.
|
[
"Identical",
"to",
":",
"meth",
":",
"dockermap",
".",
"client",
".",
"base",
".",
"DockerClientWrapper",
".",
"pull",
"with",
"two",
"enhancements",
":"
] |
python
|
train
| 49.133333 |
BerkeleyAutomation/perception
|
perception/image.py
|
https://github.com/BerkeleyAutomation/perception/blob/03d9b37dd6b66896cdfe173905c9413c8c3c5df6/perception/image.py#L761-L771
|
def nonzero_pixels(self):
""" Return an array of the nonzero pixels.
Returns
-------
:obj:`numpy.ndarray`
Nx2 array of the nonzero pixels
"""
nonzero_px = np.where(np.sum(self.raw_data, axis=2) > 0)
nonzero_px = np.c_[nonzero_px[0], nonzero_px[1]]
return nonzero_px
|
[
"def",
"nonzero_pixels",
"(",
"self",
")",
":",
"nonzero_px",
"=",
"np",
".",
"where",
"(",
"np",
".",
"sum",
"(",
"self",
".",
"raw_data",
",",
"axis",
"=",
"2",
")",
">",
"0",
")",
"nonzero_px",
"=",
"np",
".",
"c_",
"[",
"nonzero_px",
"[",
"0",
"]",
",",
"nonzero_px",
"[",
"1",
"]",
"]",
"return",
"nonzero_px"
] |
Return an array of the nonzero pixels.
Returns
-------
:obj:`numpy.ndarray`
Nx2 array of the nonzero pixels
|
[
"Return",
"an",
"array",
"of",
"the",
"nonzero",
"pixels",
"."
] |
python
|
train
| 30.272727 |
joedborg/CoPing
|
CoPing/ping.py
|
https://github.com/joedborg/CoPing/blob/2239729ee4107b999c1cba696d94f7d48ab73d36/CoPing/ping.py#L194-L232
|
def send_one_ping(self, current_socket):
"""
Send one ICMP ECHO_REQUEST.
"""
# Header is type (8), code (8), checksum (16), id (16), sequence (16)
checksum = 0
# Make a dummy header with a 0 checksum.
header = struct.pack(
"!BBHHH", ICMP_ECHO, 0, checksum, self.own_id, self.seq_number
)
padBytes = []
startVal = 0x42
for i in range(startVal, startVal + (self.packet_size)):
padBytes += [(i & 0xff)] # Keep chars in the 0-255 range
data = bytes(padBytes)
# Calculate the checksum on the data and the dummy header.
checksum = calculate_checksum(header + data) # Checksum is in network order
# Now that we have the right checksum, we put that in. It's just easier
# to make up a new header than to stuff it into the dummy.
header = struct.pack(
"!BBHHH", ICMP_ECHO, 0, checksum, self.own_id, self.seq_number
)
packet = header + data
send_time = default_timer()
try:
current_socket.sendto(packet, (self.destination, 1)) # Port number is irrelevant for ICMP
except socket.error as e:
print("General failure (%s)" % (e.args[1]))
current_socket.close()
return
return send_time
|
[
"def",
"send_one_ping",
"(",
"self",
",",
"current_socket",
")",
":",
"# Header is type (8), code (8), checksum (16), id (16), sequence (16)",
"checksum",
"=",
"0",
"# Make a dummy header with a 0 checksum.",
"header",
"=",
"struct",
".",
"pack",
"(",
"\"!BBHHH\"",
",",
"ICMP_ECHO",
",",
"0",
",",
"checksum",
",",
"self",
".",
"own_id",
",",
"self",
".",
"seq_number",
")",
"padBytes",
"=",
"[",
"]",
"startVal",
"=",
"0x42",
"for",
"i",
"in",
"range",
"(",
"startVal",
",",
"startVal",
"+",
"(",
"self",
".",
"packet_size",
")",
")",
":",
"padBytes",
"+=",
"[",
"(",
"i",
"&",
"0xff",
")",
"]",
"# Keep chars in the 0-255 range",
"data",
"=",
"bytes",
"(",
"padBytes",
")",
"# Calculate the checksum on the data and the dummy header.",
"checksum",
"=",
"calculate_checksum",
"(",
"header",
"+",
"data",
")",
"# Checksum is in network order",
"# Now that we have the right checksum, we put that in. It's just easier",
"# to make up a new header than to stuff it into the dummy.",
"header",
"=",
"struct",
".",
"pack",
"(",
"\"!BBHHH\"",
",",
"ICMP_ECHO",
",",
"0",
",",
"checksum",
",",
"self",
".",
"own_id",
",",
"self",
".",
"seq_number",
")",
"packet",
"=",
"header",
"+",
"data",
"send_time",
"=",
"default_timer",
"(",
")",
"try",
":",
"current_socket",
".",
"sendto",
"(",
"packet",
",",
"(",
"self",
".",
"destination",
",",
"1",
")",
")",
"# Port number is irrelevant for ICMP",
"except",
"socket",
".",
"error",
"as",
"e",
":",
"print",
"(",
"\"General failure (%s)\"",
"%",
"(",
"e",
".",
"args",
"[",
"1",
"]",
")",
")",
"current_socket",
".",
"close",
"(",
")",
"return",
"return",
"send_time"
] |
Send one ICMP ECHO_REQUEST.
|
[
"Send",
"one",
"ICMP",
"ECHO_REQUEST",
"."
] |
python
|
train
| 33.564103 |
observermedia/django-wordpress-rest
|
wordpress/loading.py
|
https://github.com/observermedia/django-wordpress-rest/blob/f0d96891d8ac5a69c8ba90e044876e756fad1bfe/wordpress/loading.py#L807-L816
|
def get_or_create_media(self, api_media):
"""
Find or create a Media object given API data.
:param api_media: the API data for the Media
:return: a tuple of an Media instance and a boolean indicating whether the Media was created or not
"""
return Media.objects.get_or_create(site_id=self.site_id,
wp_id=api_media["ID"],
defaults=self.api_object_data("media", api_media))
|
[
"def",
"get_or_create_media",
"(",
"self",
",",
"api_media",
")",
":",
"return",
"Media",
".",
"objects",
".",
"get_or_create",
"(",
"site_id",
"=",
"self",
".",
"site_id",
",",
"wp_id",
"=",
"api_media",
"[",
"\"ID\"",
"]",
",",
"defaults",
"=",
"self",
".",
"api_object_data",
"(",
"\"media\"",
",",
"api_media",
")",
")"
] |
Find or create a Media object given API data.
:param api_media: the API data for the Media
:return: a tuple of an Media instance and a boolean indicating whether the Media was created or not
|
[
"Find",
"or",
"create",
"a",
"Media",
"object",
"given",
"API",
"data",
"."
] |
python
|
train
| 49.7 |
StackStorm/pybind
|
pybind/slxos/v17r_1_01a/brocade_mpls_rpc/__init__.py
|
https://github.com/StackStorm/pybind/blob/44c467e71b2b425be63867aba6e6fa28b2cfe7fb/pybind/slxos/v17r_1_01a/brocade_mpls_rpc/__init__.py#L1047-L1068
|
def _set_show_mpls_rsvp(self, v, load=False):
"""
Setter method for show_mpls_rsvp, mapped from YANG variable /brocade_mpls_rpc/show_mpls_rsvp (rpc)
If this variable is read-only (config: false) in the
source YANG file, then _set_show_mpls_rsvp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_show_mpls_rsvp() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=show_mpls_rsvp.show_mpls_rsvp, is_leaf=True, yang_name="show-mpls-rsvp", rest_name="show-mpls-rsvp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'full', u'actionpoint': u'showMplsRsvp'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='rpc', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """show_mpls_rsvp must be of a type compatible with rpc""",
'defined-type': "rpc",
'generated-type': """YANGDynClass(base=show_mpls_rsvp.show_mpls_rsvp, is_leaf=True, yang_name="show-mpls-rsvp", rest_name="show-mpls-rsvp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'full', u'actionpoint': u'showMplsRsvp'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='rpc', is_config=True)""",
})
self.__show_mpls_rsvp = t
if hasattr(self, '_set'):
self._set()
|
[
"def",
"_set_show_mpls_rsvp",
"(",
"self",
",",
"v",
",",
"load",
"=",
"False",
")",
":",
"if",
"hasattr",
"(",
"v",
",",
"\"_utype\"",
")",
":",
"v",
"=",
"v",
".",
"_utype",
"(",
"v",
")",
"try",
":",
"t",
"=",
"YANGDynClass",
"(",
"v",
",",
"base",
"=",
"show_mpls_rsvp",
".",
"show_mpls_rsvp",
",",
"is_leaf",
"=",
"True",
",",
"yang_name",
"=",
"\"show-mpls-rsvp\"",
",",
"rest_name",
"=",
"\"show-mpls-rsvp\"",
",",
"parent",
"=",
"self",
",",
"path_helper",
"=",
"self",
".",
"_path_helper",
",",
"extmethods",
"=",
"self",
".",
"_extmethods",
",",
"register_paths",
"=",
"False",
",",
"extensions",
"=",
"{",
"u'tailf-common'",
":",
"{",
"u'hidden'",
":",
"u'full'",
",",
"u'actionpoint'",
":",
"u'showMplsRsvp'",
"}",
"}",
",",
"namespace",
"=",
"'urn:brocade.com:mgmt:brocade-mpls'",
",",
"defining_module",
"=",
"'brocade-mpls'",
",",
"yang_type",
"=",
"'rpc'",
",",
"is_config",
"=",
"True",
")",
"except",
"(",
"TypeError",
",",
"ValueError",
")",
":",
"raise",
"ValueError",
"(",
"{",
"'error-string'",
":",
"\"\"\"show_mpls_rsvp must be of a type compatible with rpc\"\"\"",
",",
"'defined-type'",
":",
"\"rpc\"",
",",
"'generated-type'",
":",
"\"\"\"YANGDynClass(base=show_mpls_rsvp.show_mpls_rsvp, is_leaf=True, yang_name=\"show-mpls-rsvp\", rest_name=\"show-mpls-rsvp\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'full', u'actionpoint': u'showMplsRsvp'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='rpc', is_config=True)\"\"\"",
",",
"}",
")",
"self",
".",
"__show_mpls_rsvp",
"=",
"t",
"if",
"hasattr",
"(",
"self",
",",
"'_set'",
")",
":",
"self",
".",
"_set",
"(",
")"
] |
Setter method for show_mpls_rsvp, mapped from YANG variable /brocade_mpls_rpc/show_mpls_rsvp (rpc)
If this variable is read-only (config: false) in the
source YANG file, then _set_show_mpls_rsvp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_show_mpls_rsvp() directly.
|
[
"Setter",
"method",
"for",
"show_mpls_rsvp",
"mapped",
"from",
"YANG",
"variable",
"/",
"brocade_mpls_rpc",
"/",
"show_mpls_rsvp",
"(",
"rpc",
")",
"If",
"this",
"variable",
"is",
"read",
"-",
"only",
"(",
"config",
":",
"false",
")",
"in",
"the",
"source",
"YANG",
"file",
"then",
"_set_show_mpls_rsvp",
"is",
"considered",
"as",
"a",
"private",
"method",
".",
"Backends",
"looking",
"to",
"populate",
"this",
"variable",
"should",
"do",
"so",
"via",
"calling",
"thisObj",
".",
"_set_show_mpls_rsvp",
"()",
"directly",
"."
] |
python
|
train
| 72.590909 |
frascoweb/frasco
|
frasco/commands.py
|
https://github.com/frascoweb/frasco/blob/ea519d69dd5ca6deaf3650175692ee4a1a02518f/frasco/commands.py#L76-L99
|
def as_command(self):
"""Creates the click command wrapping the function
"""
try:
params = self.unbound_func.__click_params__
params.reverse()
del self.unbound_func.__click_params__
except AttributeError:
params = []
help = inspect.getdoc(self.real_func)
if isinstance(help, bytes):
help = help.decode('utf-8')
self.options.setdefault('help', help)
@pass_script_info_decorator
def callback(info, *args, **kwargs):
if self.with_reloader:
app = info.load_app()
if app.debug:
def inner():
return self.command_callback(info, *args, **kwargs)
run_with_reloader(inner, extra_files=get_reloader_extra_files())
return
self.command_callback(info, *args, **kwargs)
return self.cls(name=self.name, callback=callback, params=params, **self.options)
|
[
"def",
"as_command",
"(",
"self",
")",
":",
"try",
":",
"params",
"=",
"self",
".",
"unbound_func",
".",
"__click_params__",
"params",
".",
"reverse",
"(",
")",
"del",
"self",
".",
"unbound_func",
".",
"__click_params__",
"except",
"AttributeError",
":",
"params",
"=",
"[",
"]",
"help",
"=",
"inspect",
".",
"getdoc",
"(",
"self",
".",
"real_func",
")",
"if",
"isinstance",
"(",
"help",
",",
"bytes",
")",
":",
"help",
"=",
"help",
".",
"decode",
"(",
"'utf-8'",
")",
"self",
".",
"options",
".",
"setdefault",
"(",
"'help'",
",",
"help",
")",
"@",
"pass_script_info_decorator",
"def",
"callback",
"(",
"info",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"self",
".",
"with_reloader",
":",
"app",
"=",
"info",
".",
"load_app",
"(",
")",
"if",
"app",
".",
"debug",
":",
"def",
"inner",
"(",
")",
":",
"return",
"self",
".",
"command_callback",
"(",
"info",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"run_with_reloader",
"(",
"inner",
",",
"extra_files",
"=",
"get_reloader_extra_files",
"(",
")",
")",
"return",
"self",
".",
"command_callback",
"(",
"info",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"self",
".",
"cls",
"(",
"name",
"=",
"self",
".",
"name",
",",
"callback",
"=",
"callback",
",",
"params",
"=",
"params",
",",
"*",
"*",
"self",
".",
"options",
")"
] |
Creates the click command wrapping the function
|
[
"Creates",
"the",
"click",
"command",
"wrapping",
"the",
"function"
] |
python
|
train
| 41.375 |
PyHDI/Pyverilog
|
pyverilog/vparser/parser.py
|
https://github.com/PyHDI/Pyverilog/blob/b852cc5ed6a7a2712e33639f9d9782d0d1587a53/pyverilog/vparser/parser.py#L927-L930
|
def p_expression_ulnot(self, p):
'expression : LNOT expression %prec ULNOT'
p[0] = Ulnot(p[2], lineno=p.lineno(1))
p.set_lineno(0, p.lineno(1))
|
[
"def",
"p_expression_ulnot",
"(",
"self",
",",
"p",
")",
":",
"p",
"[",
"0",
"]",
"=",
"Ulnot",
"(",
"p",
"[",
"2",
"]",
",",
"lineno",
"=",
"p",
".",
"lineno",
"(",
"1",
")",
")",
"p",
".",
"set_lineno",
"(",
"0",
",",
"p",
".",
"lineno",
"(",
"1",
")",
")"
] |
expression : LNOT expression %prec ULNOT
|
[
"expression",
":",
"LNOT",
"expression",
"%prec",
"ULNOT"
] |
python
|
train
| 41 |
Parsl/parsl
|
parsl/providers/jetstream/jetstream.py
|
https://github.com/Parsl/parsl/blob/d7afb3bc37f50dcf224ae78637944172edb35dac/parsl/providers/jetstream/jetstream.py#L119-L129
|
def scale_in(self, blocks=0, machines=0, strategy=None):
''' Scale in resources
'''
count = 0
instances = self.client.servers.list()
for instance in instances[0:machines]:
print("Deleting : ", instance)
instance.delete()
count += 1
return count
|
[
"def",
"scale_in",
"(",
"self",
",",
"blocks",
"=",
"0",
",",
"machines",
"=",
"0",
",",
"strategy",
"=",
"None",
")",
":",
"count",
"=",
"0",
"instances",
"=",
"self",
".",
"client",
".",
"servers",
".",
"list",
"(",
")",
"for",
"instance",
"in",
"instances",
"[",
"0",
":",
"machines",
"]",
":",
"print",
"(",
"\"Deleting : \"",
",",
"instance",
")",
"instance",
".",
"delete",
"(",
")",
"count",
"+=",
"1",
"return",
"count"
] |
Scale in resources
|
[
"Scale",
"in",
"resources"
] |
python
|
valid
| 29 |
seequent/properties
|
properties/base/instance.py
|
https://github.com/seequent/properties/blob/096b07012fff86b0a880c8c018320c3b512751b9/properties/base/instance.py#L125-L139
|
def serialize(self, value, **kwargs):
"""Serialize instance to JSON
If the value is a HasProperties instance, it is serialized with
the include_class argument passed along. Otherwise, to_json is
called.
"""
kwargs.update({'include_class': kwargs.get('include_class', True)})
if self.serializer is not None:
return self.serializer(value, **kwargs)
if value is None:
return None
if isinstance(value, HasProperties):
return value.serialize(**kwargs)
return self.to_json(value, **kwargs)
|
[
"def",
"serialize",
"(",
"self",
",",
"value",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
".",
"update",
"(",
"{",
"'include_class'",
":",
"kwargs",
".",
"get",
"(",
"'include_class'",
",",
"True",
")",
"}",
")",
"if",
"self",
".",
"serializer",
"is",
"not",
"None",
":",
"return",
"self",
".",
"serializer",
"(",
"value",
",",
"*",
"*",
"kwargs",
")",
"if",
"value",
"is",
"None",
":",
"return",
"None",
"if",
"isinstance",
"(",
"value",
",",
"HasProperties",
")",
":",
"return",
"value",
".",
"serialize",
"(",
"*",
"*",
"kwargs",
")",
"return",
"self",
".",
"to_json",
"(",
"value",
",",
"*",
"*",
"kwargs",
")"
] |
Serialize instance to JSON
If the value is a HasProperties instance, it is serialized with
the include_class argument passed along. Otherwise, to_json is
called.
|
[
"Serialize",
"instance",
"to",
"JSON"
] |
python
|
train
| 39.066667 |
couchbase/couchbase-python-client
|
couchbase/bucket.py
|
https://github.com/couchbase/couchbase-python-client/blob/a7bada167785bf79a29c39f820d932a433a6a535/couchbase/bucket.py#L1274-L1294
|
def endure_multi(self, keys, persist_to=-1, replicate_to=-1,
timeout=5.0, interval=0.010, check_removed=False):
"""Check durability requirements for multiple keys
:param keys: The keys to check
The type of keys may be one of the following:
* Sequence of keys
* A :class:`~couchbase.result.MultiResult` object
* A ``dict`` with CAS values as the dictionary value
* A sequence of :class:`~couchbase.result.Result` objects
:return: A :class:`~.MultiResult` object
of :class:`~.OperationResult` items.
.. seealso:: :meth:`endure`
"""
return _Base.endure_multi(self, keys, persist_to=persist_to,
replicate_to=replicate_to,
timeout=timeout, interval=interval,
check_removed=check_removed)
|
[
"def",
"endure_multi",
"(",
"self",
",",
"keys",
",",
"persist_to",
"=",
"-",
"1",
",",
"replicate_to",
"=",
"-",
"1",
",",
"timeout",
"=",
"5.0",
",",
"interval",
"=",
"0.010",
",",
"check_removed",
"=",
"False",
")",
":",
"return",
"_Base",
".",
"endure_multi",
"(",
"self",
",",
"keys",
",",
"persist_to",
"=",
"persist_to",
",",
"replicate_to",
"=",
"replicate_to",
",",
"timeout",
"=",
"timeout",
",",
"interval",
"=",
"interval",
",",
"check_removed",
"=",
"check_removed",
")"
] |
Check durability requirements for multiple keys
:param keys: The keys to check
The type of keys may be one of the following:
* Sequence of keys
* A :class:`~couchbase.result.MultiResult` object
* A ``dict`` with CAS values as the dictionary value
* A sequence of :class:`~couchbase.result.Result` objects
:return: A :class:`~.MultiResult` object
of :class:`~.OperationResult` items.
.. seealso:: :meth:`endure`
|
[
"Check",
"durability",
"requirements",
"for",
"multiple",
"keys"
] |
python
|
train
| 43.095238 |
astrocatalogs/astrocats
|
astrocats/catalog/entry.py
|
https://github.com/astrocatalogs/astrocats/blob/11abc3131c6366ecd23964369e55ff264add7805/astrocats/catalog/entry.py#L881-L902
|
def get_source_by_alias(self, alias):
"""Given an alias, find the corresponding source in this entry.
If the given alias doesn't exist (e.g. there are no sources), then a
`ValueError` is raised.
Arguments
---------
alias : str
The str-integer (e.g. '8') of the target source.
Returns
-------
source : `astrocats.catalog.source.Source` object
The source object corresponding to the passed alias.
"""
for source in self.get(self._KEYS.SOURCES, []):
if source[self._KEYS.ALIAS] == alias:
return source
raise ValueError("Source '{}': alias '{}' not found!".format(self[
self._KEYS.NAME], alias))
|
[
"def",
"get_source_by_alias",
"(",
"self",
",",
"alias",
")",
":",
"for",
"source",
"in",
"self",
".",
"get",
"(",
"self",
".",
"_KEYS",
".",
"SOURCES",
",",
"[",
"]",
")",
":",
"if",
"source",
"[",
"self",
".",
"_KEYS",
".",
"ALIAS",
"]",
"==",
"alias",
":",
"return",
"source",
"raise",
"ValueError",
"(",
"\"Source '{}': alias '{}' not found!\"",
".",
"format",
"(",
"self",
"[",
"self",
".",
"_KEYS",
".",
"NAME",
"]",
",",
"alias",
")",
")"
] |
Given an alias, find the corresponding source in this entry.
If the given alias doesn't exist (e.g. there are no sources), then a
`ValueError` is raised.
Arguments
---------
alias : str
The str-integer (e.g. '8') of the target source.
Returns
-------
source : `astrocats.catalog.source.Source` object
The source object corresponding to the passed alias.
|
[
"Given",
"an",
"alias",
"find",
"the",
"corresponding",
"source",
"in",
"this",
"entry",
"."
] |
python
|
train
| 33.363636 |
mehmetg/streak_client
|
streak_client/streak_client.py
|
https://github.com/mehmetg/streak_client/blob/46575510b4e4163a4a3cc06f7283a1ae377cdce6/streak_client/streak_client.py#L685-L703
|
def get_box_field(self, box_key, field_key = None):
'''Gets one/all field in a box
Args:
box_key key for pipeline
field_key key for field (default: None i.e. ALL)
returns status code, field dict or list thereof
'''
#does not work
self._raise_unimplemented_error()
uri = '/'.join([self.api_uri,
self.boxes_suffix,
box_key,
self.fields_suffix
])
if field_key:
uri = '/'.join([uri, field_key])
return self._req('get', uri)
|
[
"def",
"get_box_field",
"(",
"self",
",",
"box_key",
",",
"field_key",
"=",
"None",
")",
":",
"#does not work",
"self",
".",
"_raise_unimplemented_error",
"(",
")",
"uri",
"=",
"'/'",
".",
"join",
"(",
"[",
"self",
".",
"api_uri",
",",
"self",
".",
"boxes_suffix",
",",
"box_key",
",",
"self",
".",
"fields_suffix",
"]",
")",
"if",
"field_key",
":",
"uri",
"=",
"'/'",
".",
"join",
"(",
"[",
"uri",
",",
"field_key",
"]",
")",
"return",
"self",
".",
"_req",
"(",
"'get'",
",",
"uri",
")"
] |
Gets one/all field in a box
Args:
box_key key for pipeline
field_key key for field (default: None i.e. ALL)
returns status code, field dict or list thereof
|
[
"Gets",
"one",
"/",
"all",
"field",
"in",
"a",
"box",
"Args",
":",
"box_key",
"key",
"for",
"pipeline",
"field_key",
"key",
"for",
"field",
"(",
"default",
":",
"None",
"i",
".",
"e",
".",
"ALL",
")",
"returns",
"status",
"code",
"field",
"dict",
"or",
"list",
"thereof"
] |
python
|
train
| 24.473684 |
etcher-be/emiz
|
emiz/avwx/service.py
|
https://github.com/etcher-be/emiz/blob/1c3e32711921d7e600e85558ffe5d337956372de/emiz/avwx/service.py#L128-L134
|
def _extract(self, raw: str, station: str) -> str: # type: ignore
"""
Extracts the reports message using string finding
"""
report = raw[raw.find(station.upper() + ' '):]
report = report[:report.find(' =')]
return report
|
[
"def",
"_extract",
"(",
"self",
",",
"raw",
":",
"str",
",",
"station",
":",
"str",
")",
"->",
"str",
":",
"# type: ignore",
"report",
"=",
"raw",
"[",
"raw",
".",
"find",
"(",
"station",
".",
"upper",
"(",
")",
"+",
"' '",
")",
":",
"]",
"report",
"=",
"report",
"[",
":",
"report",
".",
"find",
"(",
"' ='",
")",
"]",
"return",
"report"
] |
Extracts the reports message using string finding
|
[
"Extracts",
"the",
"reports",
"message",
"using",
"string",
"finding"
] |
python
|
train
| 37.571429 |
SUSE-Enceladus/ipa
|
ipa/ipa_gce.py
|
https://github.com/SUSE-Enceladus/ipa/blob/0845eed0ea25a27dbb059ad1016105fa60002228/ipa/ipa_gce.py#L244-L263
|
def _validate_region(self):
"""Validate region was passed in and is a valid GCE zone."""
if not self.region:
raise GCECloudException(
'Zone is required for GCE cloud framework: '
'Example: us-west1-a'
)
try:
zone = self.compute_driver.ex_get_zone(self.region)
except Exception:
zone = None
if not zone:
raise GCECloudException(
'{region} is not a valid GCE zone. '
'Example: us-west1-a'.format(
region=self.region
)
)
|
[
"def",
"_validate_region",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"region",
":",
"raise",
"GCECloudException",
"(",
"'Zone is required for GCE cloud framework: '",
"'Example: us-west1-a'",
")",
"try",
":",
"zone",
"=",
"self",
".",
"compute_driver",
".",
"ex_get_zone",
"(",
"self",
".",
"region",
")",
"except",
"Exception",
":",
"zone",
"=",
"None",
"if",
"not",
"zone",
":",
"raise",
"GCECloudException",
"(",
"'{region} is not a valid GCE zone. '",
"'Example: us-west1-a'",
".",
"format",
"(",
"region",
"=",
"self",
".",
"region",
")",
")"
] |
Validate region was passed in and is a valid GCE zone.
|
[
"Validate",
"region",
"was",
"passed",
"in",
"and",
"is",
"a",
"valid",
"GCE",
"zone",
"."
] |
python
|
train
| 30.6 |
gbiggs/rtsprofile
|
rtsprofile/config_set.py
|
https://github.com/gbiggs/rtsprofile/blob/fded6eddcb0b25fe9808b1b12336a4413ea00905/rtsprofile/config_set.py#L216-L220
|
def save_xml(self, doc, element):
'''Save this configuration data into an xml.dom.Element object.'''
element.setAttributeNS(RTS_NS, RTS_NS_S + 'name', self.name)
if self.data:
element.setAttributeNS(RTS_NS, RTS_NS_S + 'data', self.data)
|
[
"def",
"save_xml",
"(",
"self",
",",
"doc",
",",
"element",
")",
":",
"element",
".",
"setAttributeNS",
"(",
"RTS_NS",
",",
"RTS_NS_S",
"+",
"'name'",
",",
"self",
".",
"name",
")",
"if",
"self",
".",
"data",
":",
"element",
".",
"setAttributeNS",
"(",
"RTS_NS",
",",
"RTS_NS_S",
"+",
"'data'",
",",
"self",
".",
"data",
")"
] |
Save this configuration data into an xml.dom.Element object.
|
[
"Save",
"this",
"configuration",
"data",
"into",
"an",
"xml",
".",
"dom",
".",
"Element",
"object",
"."
] |
python
|
train
| 53.6 |
DLR-RM/RAFCON
|
source/rafcon/gui/mygaphas/utils/gap_helper.py
|
https://github.com/DLR-RM/RAFCON/blob/24942ef1a904531f49ab8830a1dbb604441be498/source/rafcon/gui/mygaphas/utils/gap_helper.py#L200-L265
|
def add_transition_to_state(from_port, to_port):
"""Interface method between Gaphas and RAFCON core for adding transitions
The method checks the types of the given ports (IncomeView or OutcomeView) and from this determines the necessary
parameters for the add_transition method of the RAFCON core. Also the parent state is derived from the ports.
:param from_port: Port from which the transition starts
:param to_port: Port to which the transition goes to
:return: True if a transition was added, False if an error occurred
"""
from rafcon.gui.mygaphas.items.ports import IncomeView, OutcomeView
from_state_v = from_port.parent
to_state_v = to_port.parent
from_state_m = from_state_v.model
to_state_m = to_state_v.model
# Gather necessary information to create transition
from_state_id = from_state_m.state.state_id
to_state_id = to_state_m.state.state_id
responsible_parent_m = None
# Start transition
if isinstance(from_port, IncomeView):
from_state_id = None
from_outcome_id = None
responsible_parent_m = from_state_m
# Transition from parent income to child income
if isinstance(to_port, IncomeView):
to_outcome_id = None
# Transition from parent income to parent outcome
elif isinstance(to_port, OutcomeView):
to_outcome_id = to_port.outcome_id
elif isinstance(from_port, OutcomeView):
from_outcome_id = from_port.outcome_id
# Transition from child outcome to child income
if isinstance(to_port, IncomeView):
responsible_parent_m = from_state_m.parent
to_outcome_id = None
# Transition from child outcome to parent outcome
elif isinstance(to_port, OutcomeView):
responsible_parent_m = to_state_m
to_outcome_id = to_port.outcome_id
else:
raise ValueError("Invalid port type")
from rafcon.gui.models.container_state import ContainerStateModel
if not responsible_parent_m:
logger.error("Transitions only exist between incomes and outcomes. Given: {0} and {1}".format(type(
from_port), type(to_port)))
return False
elif not isinstance(responsible_parent_m, ContainerStateModel):
logger.error("Transitions only exist in container states (e.g. hierarchy states)")
return False
try:
t_id = responsible_parent_m.state.add_transition(from_state_id, from_outcome_id, to_state_id, to_outcome_id)
if from_state_id == to_state_id:
gui_helper_meta_data.insert_self_transition_meta_data(responsible_parent_m.states[from_state_id], t_id,
combined_action=True)
return True
except (ValueError, AttributeError, TypeError) as e:
logger.error("Transition couldn't be added: {0}".format(e))
return False
|
[
"def",
"add_transition_to_state",
"(",
"from_port",
",",
"to_port",
")",
":",
"from",
"rafcon",
".",
"gui",
".",
"mygaphas",
".",
"items",
".",
"ports",
"import",
"IncomeView",
",",
"OutcomeView",
"from_state_v",
"=",
"from_port",
".",
"parent",
"to_state_v",
"=",
"to_port",
".",
"parent",
"from_state_m",
"=",
"from_state_v",
".",
"model",
"to_state_m",
"=",
"to_state_v",
".",
"model",
"# Gather necessary information to create transition",
"from_state_id",
"=",
"from_state_m",
".",
"state",
".",
"state_id",
"to_state_id",
"=",
"to_state_m",
".",
"state",
".",
"state_id",
"responsible_parent_m",
"=",
"None",
"# Start transition",
"if",
"isinstance",
"(",
"from_port",
",",
"IncomeView",
")",
":",
"from_state_id",
"=",
"None",
"from_outcome_id",
"=",
"None",
"responsible_parent_m",
"=",
"from_state_m",
"# Transition from parent income to child income",
"if",
"isinstance",
"(",
"to_port",
",",
"IncomeView",
")",
":",
"to_outcome_id",
"=",
"None",
"# Transition from parent income to parent outcome",
"elif",
"isinstance",
"(",
"to_port",
",",
"OutcomeView",
")",
":",
"to_outcome_id",
"=",
"to_port",
".",
"outcome_id",
"elif",
"isinstance",
"(",
"from_port",
",",
"OutcomeView",
")",
":",
"from_outcome_id",
"=",
"from_port",
".",
"outcome_id",
"# Transition from child outcome to child income",
"if",
"isinstance",
"(",
"to_port",
",",
"IncomeView",
")",
":",
"responsible_parent_m",
"=",
"from_state_m",
".",
"parent",
"to_outcome_id",
"=",
"None",
"# Transition from child outcome to parent outcome",
"elif",
"isinstance",
"(",
"to_port",
",",
"OutcomeView",
")",
":",
"responsible_parent_m",
"=",
"to_state_m",
"to_outcome_id",
"=",
"to_port",
".",
"outcome_id",
"else",
":",
"raise",
"ValueError",
"(",
"\"Invalid port type\"",
")",
"from",
"rafcon",
".",
"gui",
".",
"models",
".",
"container_state",
"import",
"ContainerStateModel",
"if",
"not",
"responsible_parent_m",
":",
"logger",
".",
"error",
"(",
"\"Transitions only exist between incomes and outcomes. Given: {0} and {1}\"",
".",
"format",
"(",
"type",
"(",
"from_port",
")",
",",
"type",
"(",
"to_port",
")",
")",
")",
"return",
"False",
"elif",
"not",
"isinstance",
"(",
"responsible_parent_m",
",",
"ContainerStateModel",
")",
":",
"logger",
".",
"error",
"(",
"\"Transitions only exist in container states (e.g. hierarchy states)\"",
")",
"return",
"False",
"try",
":",
"t_id",
"=",
"responsible_parent_m",
".",
"state",
".",
"add_transition",
"(",
"from_state_id",
",",
"from_outcome_id",
",",
"to_state_id",
",",
"to_outcome_id",
")",
"if",
"from_state_id",
"==",
"to_state_id",
":",
"gui_helper_meta_data",
".",
"insert_self_transition_meta_data",
"(",
"responsible_parent_m",
".",
"states",
"[",
"from_state_id",
"]",
",",
"t_id",
",",
"combined_action",
"=",
"True",
")",
"return",
"True",
"except",
"(",
"ValueError",
",",
"AttributeError",
",",
"TypeError",
")",
"as",
"e",
":",
"logger",
".",
"error",
"(",
"\"Transition couldn't be added: {0}\"",
".",
"format",
"(",
"e",
")",
")",
"return",
"False"
] |
Interface method between Gaphas and RAFCON core for adding transitions
The method checks the types of the given ports (IncomeView or OutcomeView) and from this determines the necessary
parameters for the add_transition method of the RAFCON core. Also the parent state is derived from the ports.
:param from_port: Port from which the transition starts
:param to_port: Port to which the transition goes to
:return: True if a transition was added, False if an error occurred
|
[
"Interface",
"method",
"between",
"Gaphas",
"and",
"RAFCON",
"core",
"for",
"adding",
"transitions"
] |
python
|
train
| 43.454545 |
JasonKessler/scattertext
|
scattertext/TermDocMatrix.py
|
https://github.com/JasonKessler/scattertext/blob/cacf1f687d218ee8cae3fc05cc901db824bb1b81/scattertext/TermDocMatrix.py#L298-L310
|
def remove_terms_by_indices(self, idx_to_delete_list):
'''
Parameters
----------
idx_to_delete_list, list
Returns
-------
TermDocMatrix
'''
new_X, new_term_idx_store = self._get_X_after_delete_terms(idx_to_delete_list)
return self._make_new_term_doc_matrix(new_X, self._mX, self._y, new_term_idx_store, self._category_idx_store,
self._metadata_idx_store, self._y == self._y)
|
[
"def",
"remove_terms_by_indices",
"(",
"self",
",",
"idx_to_delete_list",
")",
":",
"new_X",
",",
"new_term_idx_store",
"=",
"self",
".",
"_get_X_after_delete_terms",
"(",
"idx_to_delete_list",
")",
"return",
"self",
".",
"_make_new_term_doc_matrix",
"(",
"new_X",
",",
"self",
".",
"_mX",
",",
"self",
".",
"_y",
",",
"new_term_idx_store",
",",
"self",
".",
"_category_idx_store",
",",
"self",
".",
"_metadata_idx_store",
",",
"self",
".",
"_y",
"==",
"self",
".",
"_y",
")"
] |
Parameters
----------
idx_to_delete_list, list
Returns
-------
TermDocMatrix
|
[
"Parameters",
"----------",
"idx_to_delete_list",
"list"
] |
python
|
train
| 37.615385 |
Microsoft/azure-devops-python-api
|
azure-devops/azure/devops/v5_0/build/build_client.py
|
https://github.com/Microsoft/azure-devops-python-api/blob/4777ffda2f5052fabbaddb2abe9cb434e0cf1aa8/azure-devops/azure/devops/v5_0/build/build_client.py#L550-L574
|
def get_changes_between_builds(self, project, from_build_id=None, to_build_id=None, top=None):
"""GetChangesBetweenBuilds.
[Preview API] Gets the changes made to the repository between two given builds.
:param str project: Project ID or project name
:param int from_build_id: The ID of the first build.
:param int to_build_id: The ID of the last build.
:param int top: The maximum number of changes to return.
:rtype: [Change]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
query_parameters = {}
if from_build_id is not None:
query_parameters['fromBuildId'] = self._serialize.query('from_build_id', from_build_id, 'int')
if to_build_id is not None:
query_parameters['toBuildId'] = self._serialize.query('to_build_id', to_build_id, 'int')
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
response = self._send(http_method='GET',
location_id='f10f0ea5-18a1-43ec-a8fb-2042c7be9b43',
version='5.0-preview.2',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[Change]', self._unwrap_collection(response))
|
[
"def",
"get_changes_between_builds",
"(",
"self",
",",
"project",
",",
"from_build_id",
"=",
"None",
",",
"to_build_id",
"=",
"None",
",",
"top",
"=",
"None",
")",
":",
"route_values",
"=",
"{",
"}",
"if",
"project",
"is",
"not",
"None",
":",
"route_values",
"[",
"'project'",
"]",
"=",
"self",
".",
"_serialize",
".",
"url",
"(",
"'project'",
",",
"project",
",",
"'str'",
")",
"query_parameters",
"=",
"{",
"}",
"if",
"from_build_id",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'fromBuildId'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"'from_build_id'",
",",
"from_build_id",
",",
"'int'",
")",
"if",
"to_build_id",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'toBuildId'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"'to_build_id'",
",",
"to_build_id",
",",
"'int'",
")",
"if",
"top",
"is",
"not",
"None",
":",
"query_parameters",
"[",
"'$top'",
"]",
"=",
"self",
".",
"_serialize",
".",
"query",
"(",
"'top'",
",",
"top",
",",
"'int'",
")",
"response",
"=",
"self",
".",
"_send",
"(",
"http_method",
"=",
"'GET'",
",",
"location_id",
"=",
"'f10f0ea5-18a1-43ec-a8fb-2042c7be9b43'",
",",
"version",
"=",
"'5.0-preview.2'",
",",
"route_values",
"=",
"route_values",
",",
"query_parameters",
"=",
"query_parameters",
")",
"return",
"self",
".",
"_deserialize",
"(",
"'[Change]'",
",",
"self",
".",
"_unwrap_collection",
"(",
"response",
")",
")"
] |
GetChangesBetweenBuilds.
[Preview API] Gets the changes made to the repository between two given builds.
:param str project: Project ID or project name
:param int from_build_id: The ID of the first build.
:param int to_build_id: The ID of the last build.
:param int top: The maximum number of changes to return.
:rtype: [Change]
|
[
"GetChangesBetweenBuilds",
".",
"[",
"Preview",
"API",
"]",
"Gets",
"the",
"changes",
"made",
"to",
"the",
"repository",
"between",
"two",
"given",
"builds",
".",
":",
"param",
"str",
"project",
":",
"Project",
"ID",
"or",
"project",
"name",
":",
"param",
"int",
"from_build_id",
":",
"The",
"ID",
"of",
"the",
"first",
"build",
".",
":",
"param",
"int",
"to_build_id",
":",
"The",
"ID",
"of",
"the",
"last",
"build",
".",
":",
"param",
"int",
"top",
":",
"The",
"maximum",
"number",
"of",
"changes",
"to",
"return",
".",
":",
"rtype",
":",
"[",
"Change",
"]"
] |
python
|
train
| 56.84 |
reorx/torext
|
torext/script.py
|
https://github.com/reorx/torext/blob/84c4300ebc7fab0dbd11cf8b020bc7d4d1570171/torext/script.py#L241-L258
|
def command(self, profile=False):
"""This is a Flask-Script like decorator, provide functionality like
@manager.command
def foo():
pass
@manager.command
def foo(first_arg, second_arg, first_option=True, second_option=3):
pass
"""
def wraped(func):
assert inspect.isfunction(func)
self._commands[func.__name__] = Command(func, profile)
self._commands_list.append(func.__name__)
return func
return wraped
|
[
"def",
"command",
"(",
"self",
",",
"profile",
"=",
"False",
")",
":",
"def",
"wraped",
"(",
"func",
")",
":",
"assert",
"inspect",
".",
"isfunction",
"(",
"func",
")",
"self",
".",
"_commands",
"[",
"func",
".",
"__name__",
"]",
"=",
"Command",
"(",
"func",
",",
"profile",
")",
"self",
".",
"_commands_list",
".",
"append",
"(",
"func",
".",
"__name__",
")",
"return",
"func",
"return",
"wraped"
] |
This is a Flask-Script like decorator, provide functionality like
@manager.command
def foo():
pass
@manager.command
def foo(first_arg, second_arg, first_option=True, second_option=3):
pass
|
[
"This",
"is",
"a",
"Flask",
"-",
"Script",
"like",
"decorator",
"provide",
"functionality",
"like",
"@manager",
".",
"command",
"def",
"foo",
"()",
":",
"pass"
] |
python
|
train
| 29.111111 |
kwikteam/phy
|
phy/plot/panzoom.py
|
https://github.com/kwikteam/phy/blob/7e9313dc364304b7d2bd03b92938347343703003/phy/plot/panzoom.py#L464-L483
|
def attach(self, canvas):
"""Attach this interact to a canvas."""
super(PanZoom, self).attach(canvas)
canvas.panzoom = self
canvas.transforms.add_on_gpu([self._translate, self._scale])
# Add the variable declarations.
vs = ('uniform vec2 {};\n'.format(self.pan_var_name) +
'uniform vec2 {};\n'.format(self.zoom_var_name))
canvas.inserter.insert_vert(vs, 'header')
canvas.connect(self.on_resize)
canvas.connect(self.on_mouse_move)
canvas.connect(self.on_touch)
canvas.connect(self.on_key_press)
if self.enable_mouse_wheel:
canvas.connect(self.on_mouse_wheel)
self._set_canvas_aspect()
|
[
"def",
"attach",
"(",
"self",
",",
"canvas",
")",
":",
"super",
"(",
"PanZoom",
",",
"self",
")",
".",
"attach",
"(",
"canvas",
")",
"canvas",
".",
"panzoom",
"=",
"self",
"canvas",
".",
"transforms",
".",
"add_on_gpu",
"(",
"[",
"self",
".",
"_translate",
",",
"self",
".",
"_scale",
"]",
")",
"# Add the variable declarations.",
"vs",
"=",
"(",
"'uniform vec2 {};\\n'",
".",
"format",
"(",
"self",
".",
"pan_var_name",
")",
"+",
"'uniform vec2 {};\\n'",
".",
"format",
"(",
"self",
".",
"zoom_var_name",
")",
")",
"canvas",
".",
"inserter",
".",
"insert_vert",
"(",
"vs",
",",
"'header'",
")",
"canvas",
".",
"connect",
"(",
"self",
".",
"on_resize",
")",
"canvas",
".",
"connect",
"(",
"self",
".",
"on_mouse_move",
")",
"canvas",
".",
"connect",
"(",
"self",
".",
"on_touch",
")",
"canvas",
".",
"connect",
"(",
"self",
".",
"on_key_press",
")",
"if",
"self",
".",
"enable_mouse_wheel",
":",
"canvas",
".",
"connect",
"(",
"self",
".",
"on_mouse_wheel",
")",
"self",
".",
"_set_canvas_aspect",
"(",
")"
] |
Attach this interact to a canvas.
|
[
"Attach",
"this",
"interact",
"to",
"a",
"canvas",
"."
] |
python
|
train
| 34.9 |
disqus/disqus-python
|
disqusapi/utils.py
|
https://github.com/disqus/disqus-python/blob/605f33c7b735fcb85e16041c27658fbba49d7a7b/disqusapi/utils.py#L90-L98
|
def get_body_hash(params):
"""
Returns BASE64 ( HASH (text) ) as described in OAuth2 MAC spec.
http://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-00#section-3.2
"""
norm_params = get_normalized_params(params)
return binascii.b2a_base64(hashlib.sha1(norm_params).digest())[:-1]
|
[
"def",
"get_body_hash",
"(",
"params",
")",
":",
"norm_params",
"=",
"get_normalized_params",
"(",
"params",
")",
"return",
"binascii",
".",
"b2a_base64",
"(",
"hashlib",
".",
"sha1",
"(",
"norm_params",
")",
".",
"digest",
"(",
")",
")",
"[",
":",
"-",
"1",
"]"
] |
Returns BASE64 ( HASH (text) ) as described in OAuth2 MAC spec.
http://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-00#section-3.2
|
[
"Returns",
"BASE64",
"(",
"HASH",
"(",
"text",
")",
")",
"as",
"described",
"in",
"OAuth2",
"MAC",
"spec",
"."
] |
python
|
train
| 33.222222 |
EmbodiedCognition/pagoda
|
pagoda/parser.py
|
https://github.com/EmbodiedCognition/pagoda/blob/8892f847026d98aba8646ecbc4589397e6dec7bd/pagoda/parser.py#L263-L285
|
def parse_asf(source, world, jointgroup=None, density=1000, color=None):
'''Load and parse a source file.
Parameters
----------
source : file
A file-like object that contains text information describing bodies and
joints to add to the world.
world : :class:`pagoda.physics.World`
The world to add objects and joints to.
jointgroup : ode.JointGroup, optional
If provided, add all joints from this parse to the given group. The
default behavior adds joints to the world without an explicit group.
density : float, optional
Default density for bodies. This is overridden if the source provides a
density or mass value for a given body.
color : tuple of floats, optional
Default color for bodies from this source. Defaults to None, which does
not assign a color to parsed bodies.
'''
visitor = AsfVisitor(world, jointgroup, density, color)
visitor.parse(re.sub(r'#.*', ' ', source.read()))
return visitor
|
[
"def",
"parse_asf",
"(",
"source",
",",
"world",
",",
"jointgroup",
"=",
"None",
",",
"density",
"=",
"1000",
",",
"color",
"=",
"None",
")",
":",
"visitor",
"=",
"AsfVisitor",
"(",
"world",
",",
"jointgroup",
",",
"density",
",",
"color",
")",
"visitor",
".",
"parse",
"(",
"re",
".",
"sub",
"(",
"r'#.*'",
",",
"' '",
",",
"source",
".",
"read",
"(",
")",
")",
")",
"return",
"visitor"
] |
Load and parse a source file.
Parameters
----------
source : file
A file-like object that contains text information describing bodies and
joints to add to the world.
world : :class:`pagoda.physics.World`
The world to add objects and joints to.
jointgroup : ode.JointGroup, optional
If provided, add all joints from this parse to the given group. The
default behavior adds joints to the world without an explicit group.
density : float, optional
Default density for bodies. This is overridden if the source provides a
density or mass value for a given body.
color : tuple of floats, optional
Default color for bodies from this source. Defaults to None, which does
not assign a color to parsed bodies.
|
[
"Load",
"and",
"parse",
"a",
"source",
"file",
"."
] |
python
|
valid
| 43.434783 |
angr/claripy
|
claripy/vsa/strided_interval.py
|
https://github.com/angr/claripy/blob/4ed61924880af1ea8fb778047d896ec0156412a6/claripy/vsa/strided_interval.py#L2064-L2113
|
def bitwise_or(self, t):
"""
Binary operation: logical or
:param b: The other operand
:return: self | b
"""
"""
This implementation combines the approaches used by 'WYSINWYX: what you see is not what you execute'
paper and 'Signedness-Agnostic Program Analysis: Precise Integer Bounds for Low-Level Code'. The
first paper provides an sound way to approximate the stride, whereas the second provides a way
to calculate the or operation using wrapping intervals.
Note that, even though according Warren's work 'Hacker's delight', one should follow different
approaches to calculate the minimun/maximum values of an or operations according on the type
of the operands (signed/unsigned). On the other other hand, by splitting the wrapping-intervals
at the south pole, we can safely and soundly only use the Warren's functions for unsigned
integers.
"""
s = self
result_interval = list()
for u in s._ssplit():
for v in t._ssplit():
w = u.bits
# u |w v
if u.is_integer:
s_t = StridedInterval._ntz(v.stride)
elif v.is_integer:
s_t = StridedInterval._ntz(u.stride)
else:
s_t = min(StridedInterval._ntz(u.stride), StridedInterval._ntz(v.stride))
if u.is_integer and u.lower_bound == 0:
new_stride = v.stride
elif v.is_integer and v.lower_bound == 0:
new_stride = u.stride
else:
new_stride = 2 ** s_t
mask = (1 << s_t) - 1
r = (u.lower_bound & mask) | (v.lower_bound & mask)
m = (2 ** w) - 1
low_bound = WarrenMethods.min_or(u.lower_bound & (~mask & m), u.upper_bound & (~mask & m), v.lower_bound & (~mask & m), v.upper_bound & (~mask & m), w)
upper_bound = WarrenMethods.max_or(u.lower_bound & (~mask & m), u.upper_bound & (~mask & m), v.lower_bound & (~mask & m), v.upper_bound & (~mask & m), w)
if low_bound == upper_bound:
new_stride = 0
new_interval = StridedInterval(lower_bound=((low_bound & (~mask & m)) | r), upper_bound=((upper_bound & (~mask & m)) | r), bits=w, stride=new_stride)
result_interval.append(new_interval)
return StridedInterval.least_upper_bound(*result_interval).normalize()
|
[
"def",
"bitwise_or",
"(",
"self",
",",
"t",
")",
":",
"\"\"\"\n This implementation combines the approaches used by 'WYSINWYX: what you see is not what you execute'\n paper and 'Signedness-Agnostic Program Analysis: Precise Integer Bounds for Low-Level Code'. The\n first paper provides an sound way to approximate the stride, whereas the second provides a way\n to calculate the or operation using wrapping intervals.\n Note that, even though according Warren's work 'Hacker's delight', one should follow different\n approaches to calculate the minimun/maximum values of an or operations according on the type\n of the operands (signed/unsigned). On the other other hand, by splitting the wrapping-intervals\n at the south pole, we can safely and soundly only use the Warren's functions for unsigned\n integers.\n \"\"\"",
"s",
"=",
"self",
"result_interval",
"=",
"list",
"(",
")",
"for",
"u",
"in",
"s",
".",
"_ssplit",
"(",
")",
":",
"for",
"v",
"in",
"t",
".",
"_ssplit",
"(",
")",
":",
"w",
"=",
"u",
".",
"bits",
"# u |w v",
"if",
"u",
".",
"is_integer",
":",
"s_t",
"=",
"StridedInterval",
".",
"_ntz",
"(",
"v",
".",
"stride",
")",
"elif",
"v",
".",
"is_integer",
":",
"s_t",
"=",
"StridedInterval",
".",
"_ntz",
"(",
"u",
".",
"stride",
")",
"else",
":",
"s_t",
"=",
"min",
"(",
"StridedInterval",
".",
"_ntz",
"(",
"u",
".",
"stride",
")",
",",
"StridedInterval",
".",
"_ntz",
"(",
"v",
".",
"stride",
")",
")",
"if",
"u",
".",
"is_integer",
"and",
"u",
".",
"lower_bound",
"==",
"0",
":",
"new_stride",
"=",
"v",
".",
"stride",
"elif",
"v",
".",
"is_integer",
"and",
"v",
".",
"lower_bound",
"==",
"0",
":",
"new_stride",
"=",
"u",
".",
"stride",
"else",
":",
"new_stride",
"=",
"2",
"**",
"s_t",
"mask",
"=",
"(",
"1",
"<<",
"s_t",
")",
"-",
"1",
"r",
"=",
"(",
"u",
".",
"lower_bound",
"&",
"mask",
")",
"|",
"(",
"v",
".",
"lower_bound",
"&",
"mask",
")",
"m",
"=",
"(",
"2",
"**",
"w",
")",
"-",
"1",
"low_bound",
"=",
"WarrenMethods",
".",
"min_or",
"(",
"u",
".",
"lower_bound",
"&",
"(",
"~",
"mask",
"&",
"m",
")",
",",
"u",
".",
"upper_bound",
"&",
"(",
"~",
"mask",
"&",
"m",
")",
",",
"v",
".",
"lower_bound",
"&",
"(",
"~",
"mask",
"&",
"m",
")",
",",
"v",
".",
"upper_bound",
"&",
"(",
"~",
"mask",
"&",
"m",
")",
",",
"w",
")",
"upper_bound",
"=",
"WarrenMethods",
".",
"max_or",
"(",
"u",
".",
"lower_bound",
"&",
"(",
"~",
"mask",
"&",
"m",
")",
",",
"u",
".",
"upper_bound",
"&",
"(",
"~",
"mask",
"&",
"m",
")",
",",
"v",
".",
"lower_bound",
"&",
"(",
"~",
"mask",
"&",
"m",
")",
",",
"v",
".",
"upper_bound",
"&",
"(",
"~",
"mask",
"&",
"m",
")",
",",
"w",
")",
"if",
"low_bound",
"==",
"upper_bound",
":",
"new_stride",
"=",
"0",
"new_interval",
"=",
"StridedInterval",
"(",
"lower_bound",
"=",
"(",
"(",
"low_bound",
"&",
"(",
"~",
"mask",
"&",
"m",
")",
")",
"|",
"r",
")",
",",
"upper_bound",
"=",
"(",
"(",
"upper_bound",
"&",
"(",
"~",
"mask",
"&",
"m",
")",
")",
"|",
"r",
")",
",",
"bits",
"=",
"w",
",",
"stride",
"=",
"new_stride",
")",
"result_interval",
".",
"append",
"(",
"new_interval",
")",
"return",
"StridedInterval",
".",
"least_upper_bound",
"(",
"*",
"result_interval",
")",
".",
"normalize",
"(",
")"
] |
Binary operation: logical or
:param b: The other operand
:return: self | b
|
[
"Binary",
"operation",
":",
"logical",
"or"
] |
python
|
train
| 50.26 |
sdcooke/django_bundles
|
django_bundles/utils/__init__.py
|
https://github.com/sdcooke/django_bundles/blob/2810fc455ec7391283792c1f108f4e8340f5d12f/django_bundles/utils/__init__.py#L1-L21
|
def get_class(class_string):
"""
Get a class from a dotted string
"""
split_string = class_string.encode('ascii').split('.')
import_path = '.'.join(split_string[:-1])
class_name = split_string[-1]
if class_name:
try:
if import_path:
mod = __import__(import_path, globals(), {}, [class_name])
cls = getattr(mod, class_name)
else:
cls = __import__(class_name, globals(), {})
if cls:
return cls
except (ImportError, AttributeError):
pass
return None
|
[
"def",
"get_class",
"(",
"class_string",
")",
":",
"split_string",
"=",
"class_string",
".",
"encode",
"(",
"'ascii'",
")",
".",
"split",
"(",
"'.'",
")",
"import_path",
"=",
"'.'",
".",
"join",
"(",
"split_string",
"[",
":",
"-",
"1",
"]",
")",
"class_name",
"=",
"split_string",
"[",
"-",
"1",
"]",
"if",
"class_name",
":",
"try",
":",
"if",
"import_path",
":",
"mod",
"=",
"__import__",
"(",
"import_path",
",",
"globals",
"(",
")",
",",
"{",
"}",
",",
"[",
"class_name",
"]",
")",
"cls",
"=",
"getattr",
"(",
"mod",
",",
"class_name",
")",
"else",
":",
"cls",
"=",
"__import__",
"(",
"class_name",
",",
"globals",
"(",
")",
",",
"{",
"}",
")",
"if",
"cls",
":",
"return",
"cls",
"except",
"(",
"ImportError",
",",
"AttributeError",
")",
":",
"pass",
"return",
"None"
] |
Get a class from a dotted string
|
[
"Get",
"a",
"class",
"from",
"a",
"dotted",
"string"
] |
python
|
train
| 28 |
fracpete/python-weka-wrapper3
|
python/weka/classifiers.py
|
https://github.com/fracpete/python-weka-wrapper3/blob/d850ab1bdb25fbd5a8d86e99f34a397975425838/python/weka/classifiers.py#L573-L585
|
def classifiers(self):
"""
Returns the list of base classifiers.
:return: the classifier list
:rtype: list
"""
objects = javabridge.get_env().get_object_array_elements(
javabridge.call(self.jobject, "getClassifiers", "()[Lweka/classifiers/Classifier;"))
result = []
for obj in objects:
result.append(Classifier(jobject=obj))
return result
|
[
"def",
"classifiers",
"(",
"self",
")",
":",
"objects",
"=",
"javabridge",
".",
"get_env",
"(",
")",
".",
"get_object_array_elements",
"(",
"javabridge",
".",
"call",
"(",
"self",
".",
"jobject",
",",
"\"getClassifiers\"",
",",
"\"()[Lweka/classifiers/Classifier;\"",
")",
")",
"result",
"=",
"[",
"]",
"for",
"obj",
"in",
"objects",
":",
"result",
".",
"append",
"(",
"Classifier",
"(",
"jobject",
"=",
"obj",
")",
")",
"return",
"result"
] |
Returns the list of base classifiers.
:return: the classifier list
:rtype: list
|
[
"Returns",
"the",
"list",
"of",
"base",
"classifiers",
"."
] |
python
|
train
| 32.538462 |
ninuxorg/nodeshot
|
nodeshot/community/mailing/models/outward.py
|
https://github.com/ninuxorg/nodeshot/blob/2466f0a55f522b2696026f196436ce7ba3f1e5c6/nodeshot/community/mailing/models/outward.py#L236-L245
|
def save(self, *args, **kwargs):
"""
Custom save method
"""
# change status to scheduled if necessary
if self.is_scheduled and self.status is not OUTWARD_STATUS.get('scheduled'):
self.status = OUTWARD_STATUS.get('scheduled')
# call super.save()
super(Outward, self).save(*args, **kwargs)
|
[
"def",
"save",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# change status to scheduled if necessary",
"if",
"self",
".",
"is_scheduled",
"and",
"self",
".",
"status",
"is",
"not",
"OUTWARD_STATUS",
".",
"get",
"(",
"'scheduled'",
")",
":",
"self",
".",
"status",
"=",
"OUTWARD_STATUS",
".",
"get",
"(",
"'scheduled'",
")",
"# call super.save()",
"super",
"(",
"Outward",
",",
"self",
")",
".",
"save",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
Custom save method
|
[
"Custom",
"save",
"method"
] |
python
|
train
| 34.7 |
mryellow/maze_explorer
|
mazeexp/engine/world_items.py
|
https://github.com/mryellow/maze_explorer/blob/ab8a25ccd05105d2fe57e0213d690cfc07e45827/mazeexp/engine/world_items.py#L116-L151
|
def update_collisions(self):
"""
Test player for collisions with items
"""
if not self.mode['items'] or len(self.mode['items']) == 0: return
# update collman
# FIXME: Why update each frame?
self.collman.clear()
for z, node in self.children:
if hasattr(node, 'cshape') and type(node.cshape) == cm.CircleShape:
self.collman.add(node)
# interactions player - others
for other in self.collman.iter_colliding(self.player):
typeball = other.btype
self.logger.debug('collision', typeball)
# TODO: Limit player position on non-removable items
#if not other.removable:
# pass
if other.removable:
self.to_remove.append(other)
self.reward_item(typeball)
#
# elif (typeball == 'wall' or
# typeball == 'gate' and self.cnt_food > 0):
# self.level_losed()
#
# elif typeball == 'gate':
# self.level_conquered()
self.remove_items()
|
[
"def",
"update_collisions",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"mode",
"[",
"'items'",
"]",
"or",
"len",
"(",
"self",
".",
"mode",
"[",
"'items'",
"]",
")",
"==",
"0",
":",
"return",
"# update collman",
"# FIXME: Why update each frame?",
"self",
".",
"collman",
".",
"clear",
"(",
")",
"for",
"z",
",",
"node",
"in",
"self",
".",
"children",
":",
"if",
"hasattr",
"(",
"node",
",",
"'cshape'",
")",
"and",
"type",
"(",
"node",
".",
"cshape",
")",
"==",
"cm",
".",
"CircleShape",
":",
"self",
".",
"collman",
".",
"add",
"(",
"node",
")",
"# interactions player - others",
"for",
"other",
"in",
"self",
".",
"collman",
".",
"iter_colliding",
"(",
"self",
".",
"player",
")",
":",
"typeball",
"=",
"other",
".",
"btype",
"self",
".",
"logger",
".",
"debug",
"(",
"'collision'",
",",
"typeball",
")",
"# TODO: Limit player position on non-removable items",
"#if not other.removable:",
"# pass",
"if",
"other",
".",
"removable",
":",
"self",
".",
"to_remove",
".",
"append",
"(",
"other",
")",
"self",
".",
"reward_item",
"(",
"typeball",
")",
"#",
"# elif (typeball == 'wall' or",
"# typeball == 'gate' and self.cnt_food > 0):",
"# self.level_losed()",
"#",
"# elif typeball == 'gate':",
"# self.level_conquered()",
"self",
".",
"remove_items",
"(",
")"
] |
Test player for collisions with items
|
[
"Test",
"player",
"for",
"collisions",
"with",
"items"
] |
python
|
train
| 30.277778 |
lumpywizard/check_email_status
|
check_email_status/resolvers.py
|
https://github.com/lumpywizard/check_email_status/blob/3a4c5dc42ada61325d5d9baad9e2b1b78084ee2f/check_email_status/resolvers.py#L61-L76
|
def get_mx_records(domain):
"""
Gets an array of MXRecords associated to the domain specified.
:param domain:
:return: [MXRecord]
"""
import dns.resolver
response = dns.resolver.query(domain, 'MX')
mx_records = []
for answer in response.answers:
mx_records.append(MXRecord(priority=answer.preference, exchange=answer.exchange, domain=domain))
return sorted(mx_records, key=lambda record: record.priority)
|
[
"def",
"get_mx_records",
"(",
"domain",
")",
":",
"import",
"dns",
".",
"resolver",
"response",
"=",
"dns",
".",
"resolver",
".",
"query",
"(",
"domain",
",",
"'MX'",
")",
"mx_records",
"=",
"[",
"]",
"for",
"answer",
"in",
"response",
".",
"answers",
":",
"mx_records",
".",
"append",
"(",
"MXRecord",
"(",
"priority",
"=",
"answer",
".",
"preference",
",",
"exchange",
"=",
"answer",
".",
"exchange",
",",
"domain",
"=",
"domain",
")",
")",
"return",
"sorted",
"(",
"mx_records",
",",
"key",
"=",
"lambda",
"record",
":",
"record",
".",
"priority",
")"
] |
Gets an array of MXRecords associated to the domain specified.
:param domain:
:return: [MXRecord]
|
[
"Gets",
"an",
"array",
"of",
"MXRecords",
"associated",
"to",
"the",
"domain",
"specified",
"."
] |
python
|
train
| 30.3125 |
ericmjl/nxviz
|
nxviz/plots.py
|
https://github.com/ericmjl/nxviz/blob/6ea5823a8030a686f165fbe37d7a04d0f037ecc9/nxviz/plots.py#L1018-L1027
|
def draw(self):
"""
Draws the plot to screen.
Note to self: Do NOT call super(MatrixPlot, self).draw(); the
underlying logic for drawing here is completely different from other
plots, and as such necessitates a different implementation.
"""
matrix = nx.to_numpy_matrix(self.graph, nodelist=self.nodes)
self.ax.matshow(matrix, cmap=self.cmap)
|
[
"def",
"draw",
"(",
"self",
")",
":",
"matrix",
"=",
"nx",
".",
"to_numpy_matrix",
"(",
"self",
".",
"graph",
",",
"nodelist",
"=",
"self",
".",
"nodes",
")",
"self",
".",
"ax",
".",
"matshow",
"(",
"matrix",
",",
"cmap",
"=",
"self",
".",
"cmap",
")"
] |
Draws the plot to screen.
Note to self: Do NOT call super(MatrixPlot, self).draw(); the
underlying logic for drawing here is completely different from other
plots, and as such necessitates a different implementation.
|
[
"Draws",
"the",
"plot",
"to",
"screen",
"."
] |
python
|
train
| 39.7 |
NASA-AMMOS/AIT-Core
|
ait/core/dtype.py
|
https://github.com/NASA-AMMOS/AIT-Core/blob/9d85bd9c738e7a6a6fbdff672bea708238b02a3a/ait/core/dtype.py#L613-L628
|
def decode(self, bytes, raw=False):
"""decode(bytearray, raw=False) -> value
Decodes the given bytearray and returns the number of
(fractional) seconds.
If the optional parameter ``raw`` is ``True``, the byte (U8)
itself will be returned.
"""
result = super(Time8Type, self).decode(bytes)
if not raw:
result /= 256.0
return result
|
[
"def",
"decode",
"(",
"self",
",",
"bytes",
",",
"raw",
"=",
"False",
")",
":",
"result",
"=",
"super",
"(",
"Time8Type",
",",
"self",
")",
".",
"decode",
"(",
"bytes",
")",
"if",
"not",
"raw",
":",
"result",
"/=",
"256.0",
"return",
"result"
] |
decode(bytearray, raw=False) -> value
Decodes the given bytearray and returns the number of
(fractional) seconds.
If the optional parameter ``raw`` is ``True``, the byte (U8)
itself will be returned.
|
[
"decode",
"(",
"bytearray",
"raw",
"=",
"False",
")",
"-",
">",
"value"
] |
python
|
train
| 25.25 |
pantsbuild/pants
|
src/python/pants/goal/goal.py
|
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/goal/goal.py#L54-L79
|
def register(cls, name, description, options_registrar_cls=None):
"""Register a goal description.
Otherwise the description must be set when registering some task on the goal,
which is clunky, and dependent on things like registration order of tasks in the goal.
A goal that isn't explicitly registered with a description will fall back to the description
of the task in that goal with the same name (if any). So singleton goals (e.g., 'clean-all')
need not be registered explicitly. This method is primarily useful for setting a
description on a generic goal like 'compile' or 'test', that multiple backends will
register tasks on.
:API: public
:param string name: The name of the goal; ie: the way to specify it on the command line.
:param string description: A description of the tasks in the goal do.
:param :class:pants.option.Optionable options_registrar_cls: A class for registering options
at the goal scope. Useful for registering recursive options on all tasks in a goal.
:return: The freshly registered goal.
:rtype: :class:`_Goal`
"""
goal = cls.by_name(name)
goal._description = description
goal._options_registrar_cls = (options_registrar_cls.registrar_for_scope(name)
if options_registrar_cls else None)
return goal
|
[
"def",
"register",
"(",
"cls",
",",
"name",
",",
"description",
",",
"options_registrar_cls",
"=",
"None",
")",
":",
"goal",
"=",
"cls",
".",
"by_name",
"(",
"name",
")",
"goal",
".",
"_description",
"=",
"description",
"goal",
".",
"_options_registrar_cls",
"=",
"(",
"options_registrar_cls",
".",
"registrar_for_scope",
"(",
"name",
")",
"if",
"options_registrar_cls",
"else",
"None",
")",
"return",
"goal"
] |
Register a goal description.
Otherwise the description must be set when registering some task on the goal,
which is clunky, and dependent on things like registration order of tasks in the goal.
A goal that isn't explicitly registered with a description will fall back to the description
of the task in that goal with the same name (if any). So singleton goals (e.g., 'clean-all')
need not be registered explicitly. This method is primarily useful for setting a
description on a generic goal like 'compile' or 'test', that multiple backends will
register tasks on.
:API: public
:param string name: The name of the goal; ie: the way to specify it on the command line.
:param string description: A description of the tasks in the goal do.
:param :class:pants.option.Optionable options_registrar_cls: A class for registering options
at the goal scope. Useful for registering recursive options on all tasks in a goal.
:return: The freshly registered goal.
:rtype: :class:`_Goal`
|
[
"Register",
"a",
"goal",
"description",
"."
] |
python
|
train
| 51.269231 |
ns1/ns1-python
|
ns1/zones.py
|
https://github.com/ns1/ns1-python/blob/f3e1d90a3b76a1bd18f855f2c622a8a49d4b585e/ns1/zones.py#L228-L238
|
def loadRecord(self, domain, rtype, callback=None, errback=None):
"""
Load a high level Record object from a domain within this Zone.
:param str domain: The name of the record to load
:param str rtype: The DNS record type
:rtype: ns1.records.Record
:return: new Record
"""
rec = Record(self, domain, rtype)
return rec.load(callback=callback, errback=errback)
|
[
"def",
"loadRecord",
"(",
"self",
",",
"domain",
",",
"rtype",
",",
"callback",
"=",
"None",
",",
"errback",
"=",
"None",
")",
":",
"rec",
"=",
"Record",
"(",
"self",
",",
"domain",
",",
"rtype",
")",
"return",
"rec",
".",
"load",
"(",
"callback",
"=",
"callback",
",",
"errback",
"=",
"errback",
")"
] |
Load a high level Record object from a domain within this Zone.
:param str domain: The name of the record to load
:param str rtype: The DNS record type
:rtype: ns1.records.Record
:return: new Record
|
[
"Load",
"a",
"high",
"level",
"Record",
"object",
"from",
"a",
"domain",
"within",
"this",
"Zone",
"."
] |
python
|
train
| 38.272727 |
PyHDI/Pyverilog
|
pyverilog/vparser/parser.py
|
https://github.com/PyHDI/Pyverilog/blob/b852cc5ed6a7a2712e33639f9d9782d0d1587a53/pyverilog/vparser/parser.py#L1277-L1279
|
def p_always_comb(self, p):
'always_comb : ALWAYS_COMB senslist always_statement'
p[0] = AlwaysComb(p[2], p[3], lineno=p.lineno(1))
|
[
"def",
"p_always_comb",
"(",
"self",
",",
"p",
")",
":",
"p",
"[",
"0",
"]",
"=",
"AlwaysComb",
"(",
"p",
"[",
"2",
"]",
",",
"p",
"[",
"3",
"]",
",",
"lineno",
"=",
"p",
".",
"lineno",
"(",
"1",
")",
")"
] |
always_comb : ALWAYS_COMB senslist always_statement
|
[
"always_comb",
":",
"ALWAYS_COMB",
"senslist",
"always_statement"
] |
python
|
train
| 48.333333 |
thisfred/val
|
val/_val.py
|
https://github.com/thisfred/val/blob/ba022e0c6c47acb3b8a45e7c44c84cc0f495c41c/val/_val.py#L141-L153
|
def _validate_type_key(key, value, types, validated):
"""Validate a key's value by type."""
for key_schema, value_schema in types.items():
if not isinstance(key, key_schema):
continue
try:
validated[key] = value_schema(value)
except NotValid:
continue
else:
return []
return ['%r: %r not matched' % (key, value)]
|
[
"def",
"_validate_type_key",
"(",
"key",
",",
"value",
",",
"types",
",",
"validated",
")",
":",
"for",
"key_schema",
",",
"value_schema",
"in",
"types",
".",
"items",
"(",
")",
":",
"if",
"not",
"isinstance",
"(",
"key",
",",
"key_schema",
")",
":",
"continue",
"try",
":",
"validated",
"[",
"key",
"]",
"=",
"value_schema",
"(",
"value",
")",
"except",
"NotValid",
":",
"continue",
"else",
":",
"return",
"[",
"]",
"return",
"[",
"'%r: %r not matched'",
"%",
"(",
"key",
",",
"value",
")",
"]"
] |
Validate a key's value by type.
|
[
"Validate",
"a",
"key",
"s",
"value",
"by",
"type",
"."
] |
python
|
train
| 30.230769 |
ParthKolekar/parthsql
|
parthsql/parthsql.py
|
https://github.com/ParthKolekar/parthsql/blob/98b69448aeaca1331c9db29bc85e731702a6b0d9/parthsql/parthsql.py#L28-L55
|
def load_contents(self):
"""
Loads contents of the tables into database.
"""
with open(METADATA_FILE) as f:
lines = f.readlines()
lines = map(lambda x: x.strip(), lines)
exclude_strings = ['<begin_table>', '<end_table>']
list_of_databases_and_columns = filter(
lambda x: not x[0] in exclude_strings, [
list(value) for key, value in itertools.groupby(
lines,
lambda x: x in exclude_strings
)
]
)
for iterator in list_of_databases_and_columns:
self.create_table_raw(
tablename=iterator[0],
columns=iterator[1:][:],
)
for i in self.tables:
i.load_contents()
|
[
"def",
"load_contents",
"(",
"self",
")",
":",
"with",
"open",
"(",
"METADATA_FILE",
")",
"as",
"f",
":",
"lines",
"=",
"f",
".",
"readlines",
"(",
")",
"lines",
"=",
"map",
"(",
"lambda",
"x",
":",
"x",
".",
"strip",
"(",
")",
",",
"lines",
")",
"exclude_strings",
"=",
"[",
"'<begin_table>'",
",",
"'<end_table>'",
"]",
"list_of_databases_and_columns",
"=",
"filter",
"(",
"lambda",
"x",
":",
"not",
"x",
"[",
"0",
"]",
"in",
"exclude_strings",
",",
"[",
"list",
"(",
"value",
")",
"for",
"key",
",",
"value",
"in",
"itertools",
".",
"groupby",
"(",
"lines",
",",
"lambda",
"x",
":",
"x",
"in",
"exclude_strings",
")",
"]",
")",
"for",
"iterator",
"in",
"list_of_databases_and_columns",
":",
"self",
".",
"create_table_raw",
"(",
"tablename",
"=",
"iterator",
"[",
"0",
"]",
",",
"columns",
"=",
"iterator",
"[",
"1",
":",
"]",
"[",
":",
"]",
",",
")",
"for",
"i",
"in",
"self",
".",
"tables",
":",
"i",
".",
"load_contents",
"(",
")"
] |
Loads contents of the tables into database.
|
[
"Loads",
"contents",
"of",
"the",
"tables",
"into",
"database",
"."
] |
python
|
train
| 28.285714 |
jantman/pypi-download-stats
|
pypi_download_stats/dataquery.py
|
https://github.com/jantman/pypi-download-stats/blob/44a7a6bbcd61a9e7f02bd02c52584a98183f80c5/pypi_download_stats/dataquery.py#L582-L597
|
def run_queries(self, backfill_num_days=7):
"""
Run the data queries for the specified projects.
:param backfill_num_days: number of days of historical data to backfill,
if missing
:type backfill_num_days: int
"""
available_tables = self._get_download_table_ids()
logger.debug('Found %d available download tables: %s',
len(available_tables), available_tables)
today_table = available_tables[-1]
yesterday_table = available_tables[-2]
self.query_one_table(today_table)
self.query_one_table(yesterday_table)
self.backfill_history(backfill_num_days, available_tables)
|
[
"def",
"run_queries",
"(",
"self",
",",
"backfill_num_days",
"=",
"7",
")",
":",
"available_tables",
"=",
"self",
".",
"_get_download_table_ids",
"(",
")",
"logger",
".",
"debug",
"(",
"'Found %d available download tables: %s'",
",",
"len",
"(",
"available_tables",
")",
",",
"available_tables",
")",
"today_table",
"=",
"available_tables",
"[",
"-",
"1",
"]",
"yesterday_table",
"=",
"available_tables",
"[",
"-",
"2",
"]",
"self",
".",
"query_one_table",
"(",
"today_table",
")",
"self",
".",
"query_one_table",
"(",
"yesterday_table",
")",
"self",
".",
"backfill_history",
"(",
"backfill_num_days",
",",
"available_tables",
")"
] |
Run the data queries for the specified projects.
:param backfill_num_days: number of days of historical data to backfill,
if missing
:type backfill_num_days: int
|
[
"Run",
"the",
"data",
"queries",
"for",
"the",
"specified",
"projects",
"."
] |
python
|
train
| 42.3125 |
douban/brownant
|
brownant/app.py
|
https://github.com/douban/brownant/blob/3c7e6d30f67b8f0f8ca1f823ea3daed74e8725cd/brownant/app.py#L53-L81
|
def validate_url(self, url):
"""Validate the :class:`~urllib.parse.ParseResult` object.
This method will make sure the :meth:`~brownant.app.BrownAnt.parse_url`
could work as expected even meet a unexpected URL string.
:param url: the parsed url.
:type url: :class:`~urllib.parse.ParseResult`
"""
# fix up the non-ascii path
url_path = to_bytes_safe(url.path)
url_path = urllib.parse.quote(url_path, safe=b"/%")
# fix up the non-ascii query
url_query = to_bytes_safe(url.query)
url_query = urllib.parse.quote(url_query, safe=b"?=&")
url = urllib.parse.ParseResult(url.scheme, url.netloc, url_path,
url.params, url_query, url.fragment)
# validate the components of URL
has_hostname = url.hostname is not None and len(url.hostname) > 0
has_http_scheme = url.scheme in ("http", "https")
has_path = not len(url.path) or url.path.startswith("/")
if not (has_hostname and has_http_scheme and has_path):
raise NotSupported("invalid url: %s" % repr(url))
return url
|
[
"def",
"validate_url",
"(",
"self",
",",
"url",
")",
":",
"# fix up the non-ascii path",
"url_path",
"=",
"to_bytes_safe",
"(",
"url",
".",
"path",
")",
"url_path",
"=",
"urllib",
".",
"parse",
".",
"quote",
"(",
"url_path",
",",
"safe",
"=",
"b\"/%\"",
")",
"# fix up the non-ascii query",
"url_query",
"=",
"to_bytes_safe",
"(",
"url",
".",
"query",
")",
"url_query",
"=",
"urllib",
".",
"parse",
".",
"quote",
"(",
"url_query",
",",
"safe",
"=",
"b\"?=&\"",
")",
"url",
"=",
"urllib",
".",
"parse",
".",
"ParseResult",
"(",
"url",
".",
"scheme",
",",
"url",
".",
"netloc",
",",
"url_path",
",",
"url",
".",
"params",
",",
"url_query",
",",
"url",
".",
"fragment",
")",
"# validate the components of URL",
"has_hostname",
"=",
"url",
".",
"hostname",
"is",
"not",
"None",
"and",
"len",
"(",
"url",
".",
"hostname",
")",
">",
"0",
"has_http_scheme",
"=",
"url",
".",
"scheme",
"in",
"(",
"\"http\"",
",",
"\"https\"",
")",
"has_path",
"=",
"not",
"len",
"(",
"url",
".",
"path",
")",
"or",
"url",
".",
"path",
".",
"startswith",
"(",
"\"/\"",
")",
"if",
"not",
"(",
"has_hostname",
"and",
"has_http_scheme",
"and",
"has_path",
")",
":",
"raise",
"NotSupported",
"(",
"\"invalid url: %s\"",
"%",
"repr",
"(",
"url",
")",
")",
"return",
"url"
] |
Validate the :class:`~urllib.parse.ParseResult` object.
This method will make sure the :meth:`~brownant.app.BrownAnt.parse_url`
could work as expected even meet a unexpected URL string.
:param url: the parsed url.
:type url: :class:`~urllib.parse.ParseResult`
|
[
"Validate",
"the",
":",
"class",
":",
"~urllib",
".",
"parse",
".",
"ParseResult",
"object",
"."
] |
python
|
train
| 39.241379 |
DocNow/twarc
|
twarc/client.py
|
https://github.com/DocNow/twarc/blob/47dd87d0c00592a4d583412c9d660ba574fc6f26/twarc/client.py#L438-L447
|
def dehydrate(self, iterator):
"""
Pass in an iterator of tweets' JSON and get back an iterator of the
IDs of each tweet.
"""
for line in iterator:
try:
yield json.loads(line)['id_str']
except Exception as e:
log.error("uhoh: %s\n" % e)
|
[
"def",
"dehydrate",
"(",
"self",
",",
"iterator",
")",
":",
"for",
"line",
"in",
"iterator",
":",
"try",
":",
"yield",
"json",
".",
"loads",
"(",
"line",
")",
"[",
"'id_str'",
"]",
"except",
"Exception",
"as",
"e",
":",
"log",
".",
"error",
"(",
"\"uhoh: %s\\n\"",
"%",
"e",
")"
] |
Pass in an iterator of tweets' JSON and get back an iterator of the
IDs of each tweet.
|
[
"Pass",
"in",
"an",
"iterator",
"of",
"tweets",
"JSON",
"and",
"get",
"back",
"an",
"iterator",
"of",
"the",
"IDs",
"of",
"each",
"tweet",
"."
] |
python
|
train
| 32.3 |
Othernet-Project/conz
|
conz/console.py
|
https://github.com/Othernet-Project/conz/blob/051214fa95a837c21595b03426a2c54c522d07a0/conz/console.py#L87-L89
|
def pwa(self, val, wa='WARN'):
""" Print val: WARN in yellow on STDOUT """
self.pstd(self.color.yellow('{}: {}'.format(val, wa)))
|
[
"def",
"pwa",
"(",
"self",
",",
"val",
",",
"wa",
"=",
"'WARN'",
")",
":",
"self",
".",
"pstd",
"(",
"self",
".",
"color",
".",
"yellow",
"(",
"'{}: {}'",
".",
"format",
"(",
"val",
",",
"wa",
")",
")",
")"
] |
Print val: WARN in yellow on STDOUT
|
[
"Print",
"val",
":",
"WARN",
"in",
"yellow",
"on",
"STDOUT"
] |
python
|
train
| 47.666667 |
ewels/MultiQC
|
multiqc/modules/fastp/fastp.py
|
https://github.com/ewels/MultiQC/blob/2037d6322b2554146a74efbf869156ad20d4c4ec/multiqc/modules/fastp/fastp.py#L368-L383
|
def fastp_read_gc_plot(self):
""" Make the read GC plot for Fastp """
data_labels, pdata = self.filter_pconfig_pdata_subplots(self.fastp_gc_content_data, 'Base Content Percent')
pconfig = {
'id': 'fastp-seq-content-gc-plot',
'title': 'Fastp: Read GC Content',
'xlab': 'Read Position',
'ylab': 'R1 Before filtering: Base Content Percent',
'ymax': 100,
'ymin': 0,
'xDecimals': False,
'yLabelFormat': '{value}%',
'tt_label': '{point.x}: {point.y:.2f}%',
'data_labels': data_labels
}
return linegraph.plot(pdata, pconfig)
|
[
"def",
"fastp_read_gc_plot",
"(",
"self",
")",
":",
"data_labels",
",",
"pdata",
"=",
"self",
".",
"filter_pconfig_pdata_subplots",
"(",
"self",
".",
"fastp_gc_content_data",
",",
"'Base Content Percent'",
")",
"pconfig",
"=",
"{",
"'id'",
":",
"'fastp-seq-content-gc-plot'",
",",
"'title'",
":",
"'Fastp: Read GC Content'",
",",
"'xlab'",
":",
"'Read Position'",
",",
"'ylab'",
":",
"'R1 Before filtering: Base Content Percent'",
",",
"'ymax'",
":",
"100",
",",
"'ymin'",
":",
"0",
",",
"'xDecimals'",
":",
"False",
",",
"'yLabelFormat'",
":",
"'{value}%'",
",",
"'tt_label'",
":",
"'{point.x}: {point.y:.2f}%'",
",",
"'data_labels'",
":",
"data_labels",
"}",
"return",
"linegraph",
".",
"plot",
"(",
"pdata",
",",
"pconfig",
")"
] |
Make the read GC plot for Fastp
|
[
"Make",
"the",
"read",
"GC",
"plot",
"for",
"Fastp"
] |
python
|
train
| 41.375 |
dopefishh/pympi
|
pympi/Elan.py
|
https://github.com/dopefishh/pympi/blob/79c747cde45b5ba203ed93154d8c123ac9c3ef56/pympi/Elan.py#L1254-L1263
|
def remove_tier(self, id_tier, clean=True):
"""Remove a tier.
:param str id_tier: Name of the tier.
:param bool clean: Flag to also clean the timeslots.
:raises KeyError: If tier is non existent.
"""
del(self.tiers[id_tier])
if clean:
self.clean_time_slots()
|
[
"def",
"remove_tier",
"(",
"self",
",",
"id_tier",
",",
"clean",
"=",
"True",
")",
":",
"del",
"(",
"self",
".",
"tiers",
"[",
"id_tier",
"]",
")",
"if",
"clean",
":",
"self",
".",
"clean_time_slots",
"(",
")"
] |
Remove a tier.
:param str id_tier: Name of the tier.
:param bool clean: Flag to also clean the timeslots.
:raises KeyError: If tier is non existent.
|
[
"Remove",
"a",
"tier",
"."
] |
python
|
test
| 31.8 |
LordGaav/python-chaos
|
chaos/threading/scheduler.py
|
https://github.com/LordGaav/python-chaos/blob/52cd29a6fd15693ee1e53786b93bcb23fbf84ddd/chaos/threading/scheduler.py#L72-L88
|
def setStartAction(self, action, *args, **kwargs):
"""
Set a function to call when run() is called, before the main action is called.
Parameters
----------
action: function pointer
The function to call.
*args
Positional arguments to pass to action.
**kwargs:
Keyword arguments to pass to action.
"""
self.init_action = action
self.init_args = args
self.init_kwargs = kwargs
|
[
"def",
"setStartAction",
"(",
"self",
",",
"action",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"init_action",
"=",
"action",
"self",
".",
"init_args",
"=",
"args",
"self",
".",
"init_kwargs",
"=",
"kwargs"
] |
Set a function to call when run() is called, before the main action is called.
Parameters
----------
action: function pointer
The function to call.
*args
Positional arguments to pass to action.
**kwargs:
Keyword arguments to pass to action.
|
[
"Set",
"a",
"function",
"to",
"call",
"when",
"run",
"()",
"is",
"called",
"before",
"the",
"main",
"action",
"is",
"called",
"."
] |
python
|
train
| 22.941176 |
lsst-sqre/documenteer
|
documenteer/sphinxext/lssttasks/topiclists.py
|
https://github.com/lsst-sqre/documenteer/blob/75f02901a80042b28d074df1cc1dca32eb8e38c8/documenteer/sphinxext/lssttasks/topiclists.py#L74-L104
|
def _build_toctree(self):
"""Create a hidden toctree node with the contents of a directory
prefixed by the directory name specified by the `toctree` directive
option.
"""
dirname = posixpath.dirname(self._env.docname)
tree_prefix = self.options['toctree'].strip()
root = posixpath.normpath(posixpath.join(dirname, tree_prefix))
docnames = [docname for docname in self._env.found_docs
if docname.startswith(root)]
# Sort docnames alphabetically based on **class** name.
# The standard we assume is that task doc pages are named after
# their Python namespace.
# NOTE: this ordering only applies to the toctree; the visual ordering
# is set by `process_task_topic_list`.
# NOTE: docnames are **always** POSIX-like paths
class_names = [docname.split('/')[-1].split('.')[-1]
for docname in docnames]
docnames = [docname for docname, _ in
sorted(zip(docnames, class_names),
key=lambda pair: pair[1])]
tocnode = sphinx.addnodes.toctree()
tocnode['includefiles'] = docnames
tocnode['entries'] = [(None, docname) for docname in docnames]
tocnode['maxdepth'] = -1
tocnode['glob'] = None
tocnode['hidden'] = True
return tocnode
|
[
"def",
"_build_toctree",
"(",
"self",
")",
":",
"dirname",
"=",
"posixpath",
".",
"dirname",
"(",
"self",
".",
"_env",
".",
"docname",
")",
"tree_prefix",
"=",
"self",
".",
"options",
"[",
"'toctree'",
"]",
".",
"strip",
"(",
")",
"root",
"=",
"posixpath",
".",
"normpath",
"(",
"posixpath",
".",
"join",
"(",
"dirname",
",",
"tree_prefix",
")",
")",
"docnames",
"=",
"[",
"docname",
"for",
"docname",
"in",
"self",
".",
"_env",
".",
"found_docs",
"if",
"docname",
".",
"startswith",
"(",
"root",
")",
"]",
"# Sort docnames alphabetically based on **class** name.",
"# The standard we assume is that task doc pages are named after",
"# their Python namespace.",
"# NOTE: this ordering only applies to the toctree; the visual ordering",
"# is set by `process_task_topic_list`.",
"# NOTE: docnames are **always** POSIX-like paths",
"class_names",
"=",
"[",
"docname",
".",
"split",
"(",
"'/'",
")",
"[",
"-",
"1",
"]",
".",
"split",
"(",
"'.'",
")",
"[",
"-",
"1",
"]",
"for",
"docname",
"in",
"docnames",
"]",
"docnames",
"=",
"[",
"docname",
"for",
"docname",
",",
"_",
"in",
"sorted",
"(",
"zip",
"(",
"docnames",
",",
"class_names",
")",
",",
"key",
"=",
"lambda",
"pair",
":",
"pair",
"[",
"1",
"]",
")",
"]",
"tocnode",
"=",
"sphinx",
".",
"addnodes",
".",
"toctree",
"(",
")",
"tocnode",
"[",
"'includefiles'",
"]",
"=",
"docnames",
"tocnode",
"[",
"'entries'",
"]",
"=",
"[",
"(",
"None",
",",
"docname",
")",
"for",
"docname",
"in",
"docnames",
"]",
"tocnode",
"[",
"'maxdepth'",
"]",
"=",
"-",
"1",
"tocnode",
"[",
"'glob'",
"]",
"=",
"None",
"tocnode",
"[",
"'hidden'",
"]",
"=",
"True",
"return",
"tocnode"
] |
Create a hidden toctree node with the contents of a directory
prefixed by the directory name specified by the `toctree` directive
option.
|
[
"Create",
"a",
"hidden",
"toctree",
"node",
"with",
"the",
"contents",
"of",
"a",
"directory",
"prefixed",
"by",
"the",
"directory",
"name",
"specified",
"by",
"the",
"toctree",
"directive",
"option",
"."
] |
python
|
train
| 44 |
bwohlberg/sporco
|
sporco/cnvrep.py
|
https://github.com/bwohlberg/sporco/blob/8946a04331106f4e39904fbdf2dc7351900baa04/sporco/cnvrep.py#L456-L477
|
def stdformD(D, Cd, M, dimN=2):
"""Reshape dictionary array (`D` in :mod:`.admm.cbpdn` module, `X` in
:mod:`.admm.ccmod` module) to internal standard form.
Parameters
----------
D : array_like
Dictionary array
Cd : int
Size of dictionary channel index
M : int
Number of filters in dictionary
dimN : int, optional (default 2)
Number of problem spatial indices
Returns
-------
Dr : ndarray
Reshaped dictionary array
"""
return D.reshape(D.shape[0:dimN] + (Cd,) + (1,) + (M,))
|
[
"def",
"stdformD",
"(",
"D",
",",
"Cd",
",",
"M",
",",
"dimN",
"=",
"2",
")",
":",
"return",
"D",
".",
"reshape",
"(",
"D",
".",
"shape",
"[",
"0",
":",
"dimN",
"]",
"+",
"(",
"Cd",
",",
")",
"+",
"(",
"1",
",",
")",
"+",
"(",
"M",
",",
")",
")"
] |
Reshape dictionary array (`D` in :mod:`.admm.cbpdn` module, `X` in
:mod:`.admm.ccmod` module) to internal standard form.
Parameters
----------
D : array_like
Dictionary array
Cd : int
Size of dictionary channel index
M : int
Number of filters in dictionary
dimN : int, optional (default 2)
Number of problem spatial indices
Returns
-------
Dr : ndarray
Reshaped dictionary array
|
[
"Reshape",
"dictionary",
"array",
"(",
"D",
"in",
":",
"mod",
":",
".",
"admm",
".",
"cbpdn",
"module",
"X",
"in",
":",
"mod",
":",
".",
"admm",
".",
"ccmod",
"module",
")",
"to",
"internal",
"standard",
"form",
"."
] |
python
|
train
| 24.409091 |
mitsei/dlkit
|
dlkit/primordium/locale/types/language.py
|
https://github.com/mitsei/dlkit/blob/445f968a175d61c8d92c0f617a3c17dc1dc7c584/dlkit/primordium/locale/types/language.py#L1234-L1261
|
def get_type_data(name):
"""Return dictionary representation of type.
Can be used to initialize primordium.type.primitives.Type
"""
name = name.upper()
if name in ISO_LANGUAGE_CODES:
name = ISO_LANGUAGE_CODES[name]
if name in ISO_MAJOR_LANGUAGE_TYPES:
namespace = '639-2'
lang_name = ISO_MAJOR_LANGUAGE_TYPES[name]
elif name in ISO_OTHER_LANGUAGE_TYPES:
namespace = '639-3'
lang_name = ISO_OTHER_LANGUAGE_TYPES[name]
else:
raise NotFound('Language Type: ' + name)
return {
'authority': 'ISO',
'namespace': namespace,
'identifier': name,
'domain': 'DisplayText Languages',
'display_name': lang_name + ' Language Type',
'display_label': lang_name,
'description': ('The display text language type for the ' +
lang_name + ' language.')
}
|
[
"def",
"get_type_data",
"(",
"name",
")",
":",
"name",
"=",
"name",
".",
"upper",
"(",
")",
"if",
"name",
"in",
"ISO_LANGUAGE_CODES",
":",
"name",
"=",
"ISO_LANGUAGE_CODES",
"[",
"name",
"]",
"if",
"name",
"in",
"ISO_MAJOR_LANGUAGE_TYPES",
":",
"namespace",
"=",
"'639-2'",
"lang_name",
"=",
"ISO_MAJOR_LANGUAGE_TYPES",
"[",
"name",
"]",
"elif",
"name",
"in",
"ISO_OTHER_LANGUAGE_TYPES",
":",
"namespace",
"=",
"'639-3'",
"lang_name",
"=",
"ISO_OTHER_LANGUAGE_TYPES",
"[",
"name",
"]",
"else",
":",
"raise",
"NotFound",
"(",
"'Language Type: '",
"+",
"name",
")",
"return",
"{",
"'authority'",
":",
"'ISO'",
",",
"'namespace'",
":",
"namespace",
",",
"'identifier'",
":",
"name",
",",
"'domain'",
":",
"'DisplayText Languages'",
",",
"'display_name'",
":",
"lang_name",
"+",
"' Language Type'",
",",
"'display_label'",
":",
"lang_name",
",",
"'description'",
":",
"(",
"'The display text language type for the '",
"+",
"lang_name",
"+",
"' language.'",
")",
"}"
] |
Return dictionary representation of type.
Can be used to initialize primordium.type.primitives.Type
|
[
"Return",
"dictionary",
"representation",
"of",
"type",
"."
] |
python
|
train
| 31.321429 |
Cornices/cornice.ext.swagger
|
cornice_swagger/swagger.py
|
https://github.com/Cornices/cornice.ext.swagger/blob/c31a5cc8d5dd112b11dc41ccb6d09b423b537abc/cornice_swagger/swagger.py#L687-L705
|
def _extract_transform_colander_schema(self, args):
"""
Extract schema from view args and transform it using
the pipeline of schema transformers
:param args:
Arguments from the view decorator.
:rtype: colander.MappingSchema()
:returns: View schema cloned and transformed
"""
schema = args.get('schema', colander.MappingSchema())
if not isinstance(schema, colander.Schema):
schema = schema()
schema = schema.clone()
for transformer in self.schema_transformers:
schema = transformer(schema, args)
return schema
|
[
"def",
"_extract_transform_colander_schema",
"(",
"self",
",",
"args",
")",
":",
"schema",
"=",
"args",
".",
"get",
"(",
"'schema'",
",",
"colander",
".",
"MappingSchema",
"(",
")",
")",
"if",
"not",
"isinstance",
"(",
"schema",
",",
"colander",
".",
"Schema",
")",
":",
"schema",
"=",
"schema",
"(",
")",
"schema",
"=",
"schema",
".",
"clone",
"(",
")",
"for",
"transformer",
"in",
"self",
".",
"schema_transformers",
":",
"schema",
"=",
"transformer",
"(",
"schema",
",",
"args",
")",
"return",
"schema"
] |
Extract schema from view args and transform it using
the pipeline of schema transformers
:param args:
Arguments from the view decorator.
:rtype: colander.MappingSchema()
:returns: View schema cloned and transformed
|
[
"Extract",
"schema",
"from",
"view",
"args",
"and",
"transform",
"it",
"using",
"the",
"pipeline",
"of",
"schema",
"transformers"
] |
python
|
valid
| 32.894737 |
qubole/qds-sdk-py
|
qds_sdk/util.py
|
https://github.com/qubole/qds-sdk-py/blob/77210fb64e5a7d567aedeea3b742a1d872fd0e5e/qds_sdk/util.py#L146-L160
|
def _make_minimal(dictionary):
"""
This function removes all the keys whose value is either None or an empty
dictionary.
"""
new_dict = {}
for key, value in dictionary.items():
if value is not None:
if isinstance(value, dict):
new_value = _make_minimal(value)
if new_value:
new_dict[key] = new_value
else:
new_dict[key] = value
return new_dict
|
[
"def",
"_make_minimal",
"(",
"dictionary",
")",
":",
"new_dict",
"=",
"{",
"}",
"for",
"key",
",",
"value",
"in",
"dictionary",
".",
"items",
"(",
")",
":",
"if",
"value",
"is",
"not",
"None",
":",
"if",
"isinstance",
"(",
"value",
",",
"dict",
")",
":",
"new_value",
"=",
"_make_minimal",
"(",
"value",
")",
"if",
"new_value",
":",
"new_dict",
"[",
"key",
"]",
"=",
"new_value",
"else",
":",
"new_dict",
"[",
"key",
"]",
"=",
"value",
"return",
"new_dict"
] |
This function removes all the keys whose value is either None or an empty
dictionary.
|
[
"This",
"function",
"removes",
"all",
"the",
"keys",
"whose",
"value",
"is",
"either",
"None",
"or",
"an",
"empty",
"dictionary",
"."
] |
python
|
train
| 30.466667 |
ethpm/py-ethpm
|
ethpm/backends/ipfs.py
|
https://github.com/ethpm/py-ethpm/blob/81ed58d7c636fe00c6770edeb0401812b1a5e8fc/ethpm/backends/ipfs.py#L158-L170
|
def pin_assets(self, file_or_dir_path: Path) -> List[Dict[str, str]]:
"""
Return a dict containing the IPFS hash, file name, and size of a file.
"""
if file_or_dir_path.is_dir():
asset_data = [dummy_ipfs_pin(path) for path in file_or_dir_path.glob("*")]
elif file_or_dir_path.is_file():
asset_data = [dummy_ipfs_pin(file_or_dir_path)]
else:
raise FileNotFoundError(
f"{file_or_dir_path} is not a valid file or directory path."
)
return asset_data
|
[
"def",
"pin_assets",
"(",
"self",
",",
"file_or_dir_path",
":",
"Path",
")",
"->",
"List",
"[",
"Dict",
"[",
"str",
",",
"str",
"]",
"]",
":",
"if",
"file_or_dir_path",
".",
"is_dir",
"(",
")",
":",
"asset_data",
"=",
"[",
"dummy_ipfs_pin",
"(",
"path",
")",
"for",
"path",
"in",
"file_or_dir_path",
".",
"glob",
"(",
"\"*\"",
")",
"]",
"elif",
"file_or_dir_path",
".",
"is_file",
"(",
")",
":",
"asset_data",
"=",
"[",
"dummy_ipfs_pin",
"(",
"file_or_dir_path",
")",
"]",
"else",
":",
"raise",
"FileNotFoundError",
"(",
"f\"{file_or_dir_path} is not a valid file or directory path.\"",
")",
"return",
"asset_data"
] |
Return a dict containing the IPFS hash, file name, and size of a file.
|
[
"Return",
"a",
"dict",
"containing",
"the",
"IPFS",
"hash",
"file",
"name",
"and",
"size",
"of",
"a",
"file",
"."
] |
python
|
train
| 42.615385 |
tino/pyFirmata
|
pyfirmata/pyfirmata.py
|
https://github.com/tino/pyFirmata/blob/05881909c4d7c4e808e9ed457144670b2136706e/pyfirmata/pyfirmata.py#L247-L257
|
def send_sysex(self, sysex_cmd, data):
"""
Sends a SysEx msg.
:arg sysex_cmd: A sysex command byte
: arg data: a bytearray of 7-bit bytes of arbitrary data
"""
msg = bytearray([START_SYSEX, sysex_cmd])
msg.extend(data)
msg.append(END_SYSEX)
self.sp.write(msg)
|
[
"def",
"send_sysex",
"(",
"self",
",",
"sysex_cmd",
",",
"data",
")",
":",
"msg",
"=",
"bytearray",
"(",
"[",
"START_SYSEX",
",",
"sysex_cmd",
"]",
")",
"msg",
".",
"extend",
"(",
"data",
")",
"msg",
".",
"append",
"(",
"END_SYSEX",
")",
"self",
".",
"sp",
".",
"write",
"(",
"msg",
")"
] |
Sends a SysEx msg.
:arg sysex_cmd: A sysex command byte
: arg data: a bytearray of 7-bit bytes of arbitrary data
|
[
"Sends",
"a",
"SysEx",
"msg",
"."
] |
python
|
train
| 29.272727 |
sdss/tree
|
python/tree/misc/docutree.py
|
https://github.com/sdss/tree/blob/f61fe0876c138ccb61874912d4b8590dadfa835c/python/tree/misc/docutree.py#L105-L145
|
def _generate_section(self, name, config, cfg_section='default', remove_sasbase=False):
"""Generate the relevant Sphinx nodes.
Generates a section for the Tree datamodel. Formats a tree section
as a list-table directive.
Parameters:
name (str):
The name of the config to be documented, e.g. 'sdsswork'
config (dict):
The tree dictionary of the loaded config environ
cfg_section (str):
The section of the config to load
remove_sasbase (bool):
If True, removes the SAS_BASE_DIR from the beginning of each path
Returns:
A section docutil node
"""
# the source name
source_name = name
# Title
section = nodes.section(
'',
nodes.title(text=cfg_section),
ids=[nodes.make_id(cfg_section)],
names=[nodes.fully_normalize_name(cfg_section)])
# Summarize
result = statemachine.ViewList()
base = config['default']['filesystem'] if remove_sasbase else None
lines = _format_command(cfg_section, config[cfg_section], base=base)
for line in lines:
result.append(line, source_name)
self.state.nested_parse(result, 0, section)
return [section]
|
[
"def",
"_generate_section",
"(",
"self",
",",
"name",
",",
"config",
",",
"cfg_section",
"=",
"'default'",
",",
"remove_sasbase",
"=",
"False",
")",
":",
"# the source name",
"source_name",
"=",
"name",
"# Title",
"section",
"=",
"nodes",
".",
"section",
"(",
"''",
",",
"nodes",
".",
"title",
"(",
"text",
"=",
"cfg_section",
")",
",",
"ids",
"=",
"[",
"nodes",
".",
"make_id",
"(",
"cfg_section",
")",
"]",
",",
"names",
"=",
"[",
"nodes",
".",
"fully_normalize_name",
"(",
"cfg_section",
")",
"]",
")",
"# Summarize",
"result",
"=",
"statemachine",
".",
"ViewList",
"(",
")",
"base",
"=",
"config",
"[",
"'default'",
"]",
"[",
"'filesystem'",
"]",
"if",
"remove_sasbase",
"else",
"None",
"lines",
"=",
"_format_command",
"(",
"cfg_section",
",",
"config",
"[",
"cfg_section",
"]",
",",
"base",
"=",
"base",
")",
"for",
"line",
"in",
"lines",
":",
"result",
".",
"append",
"(",
"line",
",",
"source_name",
")",
"self",
".",
"state",
".",
"nested_parse",
"(",
"result",
",",
"0",
",",
"section",
")",
"return",
"[",
"section",
"]"
] |
Generate the relevant Sphinx nodes.
Generates a section for the Tree datamodel. Formats a tree section
as a list-table directive.
Parameters:
name (str):
The name of the config to be documented, e.g. 'sdsswork'
config (dict):
The tree dictionary of the loaded config environ
cfg_section (str):
The section of the config to load
remove_sasbase (bool):
If True, removes the SAS_BASE_DIR from the beginning of each path
Returns:
A section docutil node
|
[
"Generate",
"the",
"relevant",
"Sphinx",
"nodes",
"."
] |
python
|
train
| 32.073171 |
fabioz/PyDev.Debugger
|
third_party/pep8/pycodestyle.py
|
https://github.com/fabioz/PyDev.Debugger/blob/ed9c4307662a5593b8a7f1f3389ecd0e79b8c503/third_party/pep8/pycodestyle.py#L1201-L1214
|
def bare_except(logical_line, noqa):
r"""When catching exceptions, mention specific exceptions whenever possible.
Okay: except Exception:
Okay: except BaseException:
E722: except:
"""
if noqa:
return
regex = re.compile(r"except\s*:")
match = regex.match(logical_line)
if match:
yield match.start(), "E722 do not use bare except'"
|
[
"def",
"bare_except",
"(",
"logical_line",
",",
"noqa",
")",
":",
"if",
"noqa",
":",
"return",
"regex",
"=",
"re",
".",
"compile",
"(",
"r\"except\\s*:\"",
")",
"match",
"=",
"regex",
".",
"match",
"(",
"logical_line",
")",
"if",
"match",
":",
"yield",
"match",
".",
"start",
"(",
")",
",",
"\"E722 do not use bare except'\""
] |
r"""When catching exceptions, mention specific exceptions whenever possible.
Okay: except Exception:
Okay: except BaseException:
E722: except:
|
[
"r",
"When",
"catching",
"exceptions",
"mention",
"specific",
"exceptions",
"whenever",
"possible",
"."
] |
python
|
train
| 26.428571 |
mitsei/dlkit
|
dlkit/json_/cataloging/sessions.py
|
https://github.com/mitsei/dlkit/blob/445f968a175d61c8d92c0f617a3c17dc1dc7c584/dlkit/json_/cataloging/sessions.py#L1368-L1386
|
def remove_child_catalog(self, catalog_id, child_id):
"""Removes a child from a catalog.
arg: catalog_id (osid.id.Id): the ``Id`` of a catalog
arg: child_id (osid.id.Id): the ``Id`` of the new child
raise: NotFound - ``catalog_id`` is not a parent of
``child_id``
raise: NullArgument - ``catalog_id`` or ``child_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchyDesignSession.remove_child_bin_template
if self._catalog_session is not None:
return self._catalog_session.remove_child_catalog(catalog_id=catalog_id, child_id=child_id)
return self._hierarchy_session.remove_child(id_=catalog_id, child_id=child_id)
|
[
"def",
"remove_child_catalog",
"(",
"self",
",",
"catalog_id",
",",
"child_id",
")",
":",
"# Implemented from template for",
"# osid.resource.BinHierarchyDesignSession.remove_child_bin_template",
"if",
"self",
".",
"_catalog_session",
"is",
"not",
"None",
":",
"return",
"self",
".",
"_catalog_session",
".",
"remove_child_catalog",
"(",
"catalog_id",
"=",
"catalog_id",
",",
"child_id",
"=",
"child_id",
")",
"return",
"self",
".",
"_hierarchy_session",
".",
"remove_child",
"(",
"id_",
"=",
"catalog_id",
",",
"child_id",
"=",
"child_id",
")"
] |
Removes a child from a catalog.
arg: catalog_id (osid.id.Id): the ``Id`` of a catalog
arg: child_id (osid.id.Id): the ``Id`` of the new child
raise: NotFound - ``catalog_id`` is not a parent of
``child_id``
raise: NullArgument - ``catalog_id`` or ``child_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
*compliance: mandatory -- This method must be implemented.*
|
[
"Removes",
"a",
"child",
"from",
"a",
"catalog",
"."
] |
python
|
train
| 50.105263 |
saltstack/salt
|
salt/modules/boto_s3_bucket.py
|
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/modules/boto_s3_bucket.py#L646-L671
|
def put_policy(Bucket, Policy,
region=None, key=None, keyid=None, profile=None):
'''
Given a valid config, update the policy for a bucket.
Returns {updated: true} if policy was updated and returns
{updated: False} if policy was not updated.
CLI Example:
.. code-block:: bash
salt myminion boto_s3_bucket.put_policy my_bucket {...}
'''
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
if Policy is None:
Policy = '{}'
elif not isinstance(Policy, six.string_types):
Policy = salt.utils.json.dumps(Policy)
conn.put_bucket_policy(Bucket=Bucket, Policy=Policy)
return {'updated': True, 'name': Bucket}
except ClientError as e:
return {'updated': False, 'error': __utils__['boto3.get_error'](e)}
|
[
"def",
"put_policy",
"(",
"Bucket",
",",
"Policy",
",",
"region",
"=",
"None",
",",
"key",
"=",
"None",
",",
"keyid",
"=",
"None",
",",
"profile",
"=",
"None",
")",
":",
"try",
":",
"conn",
"=",
"_get_conn",
"(",
"region",
"=",
"region",
",",
"key",
"=",
"key",
",",
"keyid",
"=",
"keyid",
",",
"profile",
"=",
"profile",
")",
"if",
"Policy",
"is",
"None",
":",
"Policy",
"=",
"'{}'",
"elif",
"not",
"isinstance",
"(",
"Policy",
",",
"six",
".",
"string_types",
")",
":",
"Policy",
"=",
"salt",
".",
"utils",
".",
"json",
".",
"dumps",
"(",
"Policy",
")",
"conn",
".",
"put_bucket_policy",
"(",
"Bucket",
"=",
"Bucket",
",",
"Policy",
"=",
"Policy",
")",
"return",
"{",
"'updated'",
":",
"True",
",",
"'name'",
":",
"Bucket",
"}",
"except",
"ClientError",
"as",
"e",
":",
"return",
"{",
"'updated'",
":",
"False",
",",
"'error'",
":",
"__utils__",
"[",
"'boto3.get_error'",
"]",
"(",
"e",
")",
"}"
] |
Given a valid config, update the policy for a bucket.
Returns {updated: true} if policy was updated and returns
{updated: False} if policy was not updated.
CLI Example:
.. code-block:: bash
salt myminion boto_s3_bucket.put_policy my_bucket {...}
|
[
"Given",
"a",
"valid",
"config",
"update",
"the",
"policy",
"for",
"a",
"bucket",
"."
] |
python
|
train
| 31.692308 |
doconix/django-mako-plus
|
django_mako_plus/router/discover.py
|
https://github.com/doconix/django-mako-plus/blob/a90f9b4af19e5fa9f83452989cdcaed21569a181/django_mako_plus/router/discover.py#L50-L99
|
def find_view_function(module_name, function_name, fallback_app=None, fallback_template=None, verify_decorator=True):
'''
Finds a view function, class-based view, or template view.
Raises ViewDoesNotExist if not found.
'''
dmp = apps.get_app_config('django_mako_plus')
# I'm first calling find_spec first here beacuse I don't want import_module in
# a try/except -- there are lots of reasons that importing can fail, and I just want to
# know whether the file actually exists. find_spec raises AttributeError if not found.
try:
spec = find_spec(module_name)
except ValueError:
spec = None
if spec is None:
# no view module, so create a view function that directly renders the template
try:
return create_view_for_template(fallback_app, fallback_template)
except TemplateDoesNotExist as e:
raise ViewDoesNotExist('view module {} not found, and fallback template {} could not be loaded ({})'.format(module_name, fallback_template, e))
# load the module and function
try:
module = import_module(module_name)
func = getattr(module, function_name)
func.view_type = 'function'
except ImportError as e:
raise ViewDoesNotExist('module "{}" could not be imported: {}'.format(module_name, e))
except AttributeError as e:
raise ViewDoesNotExist('module "{}" found successfully, but "{}" was not found: {}'.format(module_name, function_name, e))
# if class-based view, call as_view() to get a view function to it
if inspect.isclass(func) and issubclass(func, View):
func = func.as_view()
func.view_type = 'class'
# if regular view function, check the decorator
elif verify_decorator and not view_function.is_decorated(func):
raise ViewDoesNotExist("view {}.{} was found successfully, but it must be decorated with @view_function or be a subclass of django.views.generic.View.".format(module_name, function_name))
# attach a converter to the view function
if dmp.options['PARAMETER_CONVERTER'] is not None:
try:
converter = import_qualified(dmp.options['PARAMETER_CONVERTER'])(func)
setattr(func, CONVERTER_ATTRIBUTE_NAME, converter)
except ImportError as e:
raise ImproperlyConfigured('Cannot find PARAMETER_CONVERTER: {}'.format(str(e)))
# return the function/class
return func
|
[
"def",
"find_view_function",
"(",
"module_name",
",",
"function_name",
",",
"fallback_app",
"=",
"None",
",",
"fallback_template",
"=",
"None",
",",
"verify_decorator",
"=",
"True",
")",
":",
"dmp",
"=",
"apps",
".",
"get_app_config",
"(",
"'django_mako_plus'",
")",
"# I'm first calling find_spec first here beacuse I don't want import_module in",
"# a try/except -- there are lots of reasons that importing can fail, and I just want to",
"# know whether the file actually exists. find_spec raises AttributeError if not found.",
"try",
":",
"spec",
"=",
"find_spec",
"(",
"module_name",
")",
"except",
"ValueError",
":",
"spec",
"=",
"None",
"if",
"spec",
"is",
"None",
":",
"# no view module, so create a view function that directly renders the template",
"try",
":",
"return",
"create_view_for_template",
"(",
"fallback_app",
",",
"fallback_template",
")",
"except",
"TemplateDoesNotExist",
"as",
"e",
":",
"raise",
"ViewDoesNotExist",
"(",
"'view module {} not found, and fallback template {} could not be loaded ({})'",
".",
"format",
"(",
"module_name",
",",
"fallback_template",
",",
"e",
")",
")",
"# load the module and function",
"try",
":",
"module",
"=",
"import_module",
"(",
"module_name",
")",
"func",
"=",
"getattr",
"(",
"module",
",",
"function_name",
")",
"func",
".",
"view_type",
"=",
"'function'",
"except",
"ImportError",
"as",
"e",
":",
"raise",
"ViewDoesNotExist",
"(",
"'module \"{}\" could not be imported: {}'",
".",
"format",
"(",
"module_name",
",",
"e",
")",
")",
"except",
"AttributeError",
"as",
"e",
":",
"raise",
"ViewDoesNotExist",
"(",
"'module \"{}\" found successfully, but \"{}\" was not found: {}'",
".",
"format",
"(",
"module_name",
",",
"function_name",
",",
"e",
")",
")",
"# if class-based view, call as_view() to get a view function to it",
"if",
"inspect",
".",
"isclass",
"(",
"func",
")",
"and",
"issubclass",
"(",
"func",
",",
"View",
")",
":",
"func",
"=",
"func",
".",
"as_view",
"(",
")",
"func",
".",
"view_type",
"=",
"'class'",
"# if regular view function, check the decorator",
"elif",
"verify_decorator",
"and",
"not",
"view_function",
".",
"is_decorated",
"(",
"func",
")",
":",
"raise",
"ViewDoesNotExist",
"(",
"\"view {}.{} was found successfully, but it must be decorated with @view_function or be a subclass of django.views.generic.View.\"",
".",
"format",
"(",
"module_name",
",",
"function_name",
")",
")",
"# attach a converter to the view function",
"if",
"dmp",
".",
"options",
"[",
"'PARAMETER_CONVERTER'",
"]",
"is",
"not",
"None",
":",
"try",
":",
"converter",
"=",
"import_qualified",
"(",
"dmp",
".",
"options",
"[",
"'PARAMETER_CONVERTER'",
"]",
")",
"(",
"func",
")",
"setattr",
"(",
"func",
",",
"CONVERTER_ATTRIBUTE_NAME",
",",
"converter",
")",
"except",
"ImportError",
"as",
"e",
":",
"raise",
"ImproperlyConfigured",
"(",
"'Cannot find PARAMETER_CONVERTER: {}'",
".",
"format",
"(",
"str",
"(",
"e",
")",
")",
")",
"# return the function/class",
"return",
"func"
] |
Finds a view function, class-based view, or template view.
Raises ViewDoesNotExist if not found.
|
[
"Finds",
"a",
"view",
"function",
"class",
"-",
"based",
"view",
"or",
"template",
"view",
".",
"Raises",
"ViewDoesNotExist",
"if",
"not",
"found",
"."
] |
python
|
train
| 47.92 |
cloud9ers/gurumate
|
environment/lib/python2.7/site-packages/psutil/_pslinux.py
|
https://github.com/cloud9ers/gurumate/blob/075dc74d1ee62a8c6b7a8bf2b271364f01629d1e/environment/lib/python2.7/site-packages/psutil/_pslinux.py#L551-L609
|
def get_memory_maps(self):
"""Return process's mapped memory regions as a list of nameduples.
Fields are explained in 'man proc'; here is an updated (Apr 2012)
version: http://goo.gl/fmebo
"""
f = None
try:
f = open("/proc/%s/smaps" % self.pid)
first_line = f.readline()
current_block = [first_line]
def get_blocks():
data = {}
for line in f:
fields = line.split(None, 5)
if len(fields) >= 5:
yield (current_block.pop(), data)
current_block.append(line)
else:
data[fields[0]] = int(fields[1]) * 1024
yield (current_block.pop(), data)
if first_line: # smaps file can be empty
for header, data in get_blocks():
hfields = header.split(None, 5)
try:
addr, perms, offset, dev, inode, path = hfields
except ValueError:
addr, perms, offset, dev, inode, path = hfields + ['']
if not path:
path = '[anon]'
else:
path = path.strip()
yield (addr, perms, path,
data['Rss:'],
data['Size:'],
data.get('Pss:', 0),
data['Shared_Clean:'], data['Shared_Clean:'],
data['Private_Clean:'], data['Private_Dirty:'],
data['Referenced:'],
data['Anonymous:'],
data['Swap:'])
f.close()
except EnvironmentError:
# XXX - Can't use wrap_exceptions decorator as we're
# returning a generator; this probably needs some
# refactoring in order to avoid this code duplication.
if f is not None:
f.close()
err = sys.exc_info()[1]
if err.errno in (errno.ENOENT, errno.ESRCH):
raise NoSuchProcess(self.pid, self._process_name)
if err.errno in (errno.EPERM, errno.EACCES):
raise AccessDenied(self.pid, self._process_name)
raise
except:
if f is not None:
f.close()
raise
|
[
"def",
"get_memory_maps",
"(",
"self",
")",
":",
"f",
"=",
"None",
"try",
":",
"f",
"=",
"open",
"(",
"\"/proc/%s/smaps\"",
"%",
"self",
".",
"pid",
")",
"first_line",
"=",
"f",
".",
"readline",
"(",
")",
"current_block",
"=",
"[",
"first_line",
"]",
"def",
"get_blocks",
"(",
")",
":",
"data",
"=",
"{",
"}",
"for",
"line",
"in",
"f",
":",
"fields",
"=",
"line",
".",
"split",
"(",
"None",
",",
"5",
")",
"if",
"len",
"(",
"fields",
")",
">=",
"5",
":",
"yield",
"(",
"current_block",
".",
"pop",
"(",
")",
",",
"data",
")",
"current_block",
".",
"append",
"(",
"line",
")",
"else",
":",
"data",
"[",
"fields",
"[",
"0",
"]",
"]",
"=",
"int",
"(",
"fields",
"[",
"1",
"]",
")",
"*",
"1024",
"yield",
"(",
"current_block",
".",
"pop",
"(",
")",
",",
"data",
")",
"if",
"first_line",
":",
"# smaps file can be empty",
"for",
"header",
",",
"data",
"in",
"get_blocks",
"(",
")",
":",
"hfields",
"=",
"header",
".",
"split",
"(",
"None",
",",
"5",
")",
"try",
":",
"addr",
",",
"perms",
",",
"offset",
",",
"dev",
",",
"inode",
",",
"path",
"=",
"hfields",
"except",
"ValueError",
":",
"addr",
",",
"perms",
",",
"offset",
",",
"dev",
",",
"inode",
",",
"path",
"=",
"hfields",
"+",
"[",
"''",
"]",
"if",
"not",
"path",
":",
"path",
"=",
"'[anon]'",
"else",
":",
"path",
"=",
"path",
".",
"strip",
"(",
")",
"yield",
"(",
"addr",
",",
"perms",
",",
"path",
",",
"data",
"[",
"'Rss:'",
"]",
",",
"data",
"[",
"'Size:'",
"]",
",",
"data",
".",
"get",
"(",
"'Pss:'",
",",
"0",
")",
",",
"data",
"[",
"'Shared_Clean:'",
"]",
",",
"data",
"[",
"'Shared_Clean:'",
"]",
",",
"data",
"[",
"'Private_Clean:'",
"]",
",",
"data",
"[",
"'Private_Dirty:'",
"]",
",",
"data",
"[",
"'Referenced:'",
"]",
",",
"data",
"[",
"'Anonymous:'",
"]",
",",
"data",
"[",
"'Swap:'",
"]",
")",
"f",
".",
"close",
"(",
")",
"except",
"EnvironmentError",
":",
"# XXX - Can't use wrap_exceptions decorator as we're",
"# returning a generator; this probably needs some",
"# refactoring in order to avoid this code duplication.",
"if",
"f",
"is",
"not",
"None",
":",
"f",
".",
"close",
"(",
")",
"err",
"=",
"sys",
".",
"exc_info",
"(",
")",
"[",
"1",
"]",
"if",
"err",
".",
"errno",
"in",
"(",
"errno",
".",
"ENOENT",
",",
"errno",
".",
"ESRCH",
")",
":",
"raise",
"NoSuchProcess",
"(",
"self",
".",
"pid",
",",
"self",
".",
"_process_name",
")",
"if",
"err",
".",
"errno",
"in",
"(",
"errno",
".",
"EPERM",
",",
"errno",
".",
"EACCES",
")",
":",
"raise",
"AccessDenied",
"(",
"self",
".",
"pid",
",",
"self",
".",
"_process_name",
")",
"raise",
"except",
":",
"if",
"f",
"is",
"not",
"None",
":",
"f",
".",
"close",
"(",
")",
"raise"
] |
Return process's mapped memory regions as a list of nameduples.
Fields are explained in 'man proc'; here is an updated (Apr 2012)
version: http://goo.gl/fmebo
|
[
"Return",
"process",
"s",
"mapped",
"memory",
"regions",
"as",
"a",
"list",
"of",
"nameduples",
".",
"Fields",
"are",
"explained",
"in",
"man",
"proc",
";",
"here",
"is",
"an",
"updated",
"(",
"Apr",
"2012",
")",
"version",
":",
"http",
":",
"//",
"goo",
".",
"gl",
"/",
"fmebo"
] |
python
|
test
| 41.084746 |
saltstack/salt
|
salt/cloud/clouds/oneandone.py
|
https://github.com/saltstack/salt/blob/e8541fd6e744ab0df786c0f76102e41631f45d46/salt/cloud/clouds/oneandone.py#L1020-L1039
|
def load_public_key(vm_):
'''
Load the public key file if exists.
'''
public_key_filename = config.get_cloud_config_value(
'ssh_public_key', vm_, __opts__, search_global=False, default=None
)
if public_key_filename is not None:
public_key_filename = os.path.expanduser(public_key_filename)
if not os.path.isfile(public_key_filename):
raise SaltCloudConfigError(
'The defined ssh_public_key \'{0}\' does not exist'.format(
public_key_filename
)
)
with salt.utils.files.fopen(public_key_filename, 'r') as public_key:
key = salt.utils.stringutils.to_unicode(public_key.read().replace('\n', ''))
return key
|
[
"def",
"load_public_key",
"(",
"vm_",
")",
":",
"public_key_filename",
"=",
"config",
".",
"get_cloud_config_value",
"(",
"'ssh_public_key'",
",",
"vm_",
",",
"__opts__",
",",
"search_global",
"=",
"False",
",",
"default",
"=",
"None",
")",
"if",
"public_key_filename",
"is",
"not",
"None",
":",
"public_key_filename",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"public_key_filename",
")",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"public_key_filename",
")",
":",
"raise",
"SaltCloudConfigError",
"(",
"'The defined ssh_public_key \\'{0}\\' does not exist'",
".",
"format",
"(",
"public_key_filename",
")",
")",
"with",
"salt",
".",
"utils",
".",
"files",
".",
"fopen",
"(",
"public_key_filename",
",",
"'r'",
")",
"as",
"public_key",
":",
"key",
"=",
"salt",
".",
"utils",
".",
"stringutils",
".",
"to_unicode",
"(",
"public_key",
".",
"read",
"(",
")",
".",
"replace",
"(",
"'\\n'",
",",
"''",
")",
")",
"return",
"key"
] |
Load the public key file if exists.
|
[
"Load",
"the",
"public",
"key",
"file",
"if",
"exists",
"."
] |
python
|
train
| 37.05 |
openvax/varcode
|
varcode/reference.py
|
https://github.com/openvax/varcode/blob/981633db45ca2b31f76c06894a7360ea5d70a9b8/varcode/reference.py#L59-L99
|
def infer_reference_name(reference_name_or_path):
"""
Given a string containing a reference name (such as a path to
that reference's FASTA file), return its canonical name
as used by Ensembl.
"""
# identify all cases where reference name or path matches candidate aliases
reference_file_name = os.path.basename(reference_name_or_path)
matches = {'file_name': list(), 'full_path': list()}
for assembly_name in reference_alias_dict.keys():
candidate_list = [assembly_name] + reference_alias_dict[assembly_name]
for candidate in candidate_list:
if candidate.lower() in reference_file_name.lower():
matches['file_name'].append(assembly_name)
elif candidate.lower() in reference_name_or_path.lower():
matches['full_path'].append(assembly_name)
# remove duplicate matches (happens due to overlapping aliases)
matches['file_name'] = list(set(matches['file_name']))
matches['full_path'] = list(set(matches['full_path']))
# given set of existing matches, choose one to return
# (first select based on file_name, then full path. If multiples, use most recent)
if len(matches['file_name']) == 1:
match = matches['file_name'][0]
elif len(matches['file_name']) > 1:
# separate logic for >1 vs 1 to give informative warning
match = _most_recent_assembly(matches['file_name'])
warn(
('More than one reference ({}) matches path in header ({}); '
'the most recent one ({}) was used.').format(
','.join(matches['file_name']), reference_file_name, match))
elif len(matches['full_path']) >= 1:
# combine full-path logic since warning is the same
match = _most_recent_assembly(matches['full_path'])
warn((
'Reference could not be matched against filename ({}); '
'using best match against full path ({}).').format(
reference_name_or_path, match))
else:
raise ValueError(
"Failed to infer genome assembly name for %s" % reference_name_or_path)
return match
|
[
"def",
"infer_reference_name",
"(",
"reference_name_or_path",
")",
":",
"# identify all cases where reference name or path matches candidate aliases",
"reference_file_name",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"reference_name_or_path",
")",
"matches",
"=",
"{",
"'file_name'",
":",
"list",
"(",
")",
",",
"'full_path'",
":",
"list",
"(",
")",
"}",
"for",
"assembly_name",
"in",
"reference_alias_dict",
".",
"keys",
"(",
")",
":",
"candidate_list",
"=",
"[",
"assembly_name",
"]",
"+",
"reference_alias_dict",
"[",
"assembly_name",
"]",
"for",
"candidate",
"in",
"candidate_list",
":",
"if",
"candidate",
".",
"lower",
"(",
")",
"in",
"reference_file_name",
".",
"lower",
"(",
")",
":",
"matches",
"[",
"'file_name'",
"]",
".",
"append",
"(",
"assembly_name",
")",
"elif",
"candidate",
".",
"lower",
"(",
")",
"in",
"reference_name_or_path",
".",
"lower",
"(",
")",
":",
"matches",
"[",
"'full_path'",
"]",
".",
"append",
"(",
"assembly_name",
")",
"# remove duplicate matches (happens due to overlapping aliases)",
"matches",
"[",
"'file_name'",
"]",
"=",
"list",
"(",
"set",
"(",
"matches",
"[",
"'file_name'",
"]",
")",
")",
"matches",
"[",
"'full_path'",
"]",
"=",
"list",
"(",
"set",
"(",
"matches",
"[",
"'full_path'",
"]",
")",
")",
"# given set of existing matches, choose one to return",
"# (first select based on file_name, then full path. If multiples, use most recent)",
"if",
"len",
"(",
"matches",
"[",
"'file_name'",
"]",
")",
"==",
"1",
":",
"match",
"=",
"matches",
"[",
"'file_name'",
"]",
"[",
"0",
"]",
"elif",
"len",
"(",
"matches",
"[",
"'file_name'",
"]",
")",
">",
"1",
":",
"# separate logic for >1 vs 1 to give informative warning",
"match",
"=",
"_most_recent_assembly",
"(",
"matches",
"[",
"'file_name'",
"]",
")",
"warn",
"(",
"(",
"'More than one reference ({}) matches path in header ({}); '",
"'the most recent one ({}) was used.'",
")",
".",
"format",
"(",
"','",
".",
"join",
"(",
"matches",
"[",
"'file_name'",
"]",
")",
",",
"reference_file_name",
",",
"match",
")",
")",
"elif",
"len",
"(",
"matches",
"[",
"'full_path'",
"]",
")",
">=",
"1",
":",
"# combine full-path logic since warning is the same",
"match",
"=",
"_most_recent_assembly",
"(",
"matches",
"[",
"'full_path'",
"]",
")",
"warn",
"(",
"(",
"'Reference could not be matched against filename ({}); '",
"'using best match against full path ({}).'",
")",
".",
"format",
"(",
"reference_name_or_path",
",",
"match",
")",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"Failed to infer genome assembly name for %s\"",
"%",
"reference_name_or_path",
")",
"return",
"match"
] |
Given a string containing a reference name (such as a path to
that reference's FASTA file), return its canonical name
as used by Ensembl.
|
[
"Given",
"a",
"string",
"containing",
"a",
"reference",
"name",
"(",
"such",
"as",
"a",
"path",
"to",
"that",
"reference",
"s",
"FASTA",
"file",
")",
"return",
"its",
"canonical",
"name",
"as",
"used",
"by",
"Ensembl",
"."
] |
python
|
train
| 51.195122 |
tradenity/python-sdk
|
tradenity/resources/country.py
|
https://github.com/tradenity/python-sdk/blob/d13fbe23f4d6ff22554c6d8d2deaf209371adaf1/tradenity/resources/country.py#L401-L421
|
def create_country(cls, country, **kwargs):
"""Create Country
Create a new Country
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_country(country, async=True)
>>> result = thread.get()
:param async bool
:param Country country: Attributes of country to create (required)
:return: Country
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._create_country_with_http_info(country, **kwargs)
else:
(data) = cls._create_country_with_http_info(country, **kwargs)
return data
|
[
"def",
"create_country",
"(",
"cls",
",",
"country",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"[",
"'_return_http_data_only'",
"]",
"=",
"True",
"if",
"kwargs",
".",
"get",
"(",
"'async'",
")",
":",
"return",
"cls",
".",
"_create_country_with_http_info",
"(",
"country",
",",
"*",
"*",
"kwargs",
")",
"else",
":",
"(",
"data",
")",
"=",
"cls",
".",
"_create_country_with_http_info",
"(",
"country",
",",
"*",
"*",
"kwargs",
")",
"return",
"data"
] |
Create Country
Create a new Country
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.create_country(country, async=True)
>>> result = thread.get()
:param async bool
:param Country country: Attributes of country to create (required)
:return: Country
If the method is called asynchronously,
returns the request thread.
|
[
"Create",
"Country"
] |
python
|
train
| 38.809524 |
stepank/pyws
|
src/pyws/server.py
|
https://github.com/stepank/pyws/blob/ff39133aabeb56bbb08d66286ac0cc8731eda7dd/src/pyws/server.py#L117-L125
|
def add_function(self, function):
"""
Registers the function to the server's default fixed function manager.
"""
#noinspection PyTypeChecker
if not len(self.settings.FUNCTION_MANAGERS):
raise ConfigurationError(
'Where have the default function manager gone?!')
self.settings.FUNCTION_MANAGERS[0].add_function(function)
|
[
"def",
"add_function",
"(",
"self",
",",
"function",
")",
":",
"#noinspection PyTypeChecker",
"if",
"not",
"len",
"(",
"self",
".",
"settings",
".",
"FUNCTION_MANAGERS",
")",
":",
"raise",
"ConfigurationError",
"(",
"'Where have the default function manager gone?!'",
")",
"self",
".",
"settings",
".",
"FUNCTION_MANAGERS",
"[",
"0",
"]",
".",
"add_function",
"(",
"function",
")"
] |
Registers the function to the server's default fixed function manager.
|
[
"Registers",
"the",
"function",
"to",
"the",
"server",
"s",
"default",
"fixed",
"function",
"manager",
"."
] |
python
|
train
| 43 |
KeplerGO/K2fov
|
K2fov/K2findCampaigns.py
|
https://github.com/KeplerGO/K2fov/blob/fb122b35687340e0357cba9e0dd47b3be0760693/K2fov/K2findCampaigns.py#L133-L169
|
def K2findCampaigns_byname_main(args=None):
"""Exposes K2findCampaigns to the command line."""
parser = argparse.ArgumentParser(
description="Check if a target is "
"(or was) observable by any past or future "
"observing campaign of NASA's K2 mission.")
parser.add_argument('name', nargs=1, type=str,
help="Name of the object. This will be passed on "
"to the CDS name resolver "
"to retrieve coordinate information.")
parser.add_argument('-p', '--plot', action='store_true',
help="Produce a plot showing the target position "
"with respect to all K2 campaigns.")
args = parser.parse_args(args)
targetname = args.name[0]
try:
campaigns, ra, dec = findCampaignsByName(targetname)
except ValueError:
print("Error: could not retrieve coordinates for {0}.".format(targetname))
print("The target may be unknown or there may be a problem "
"connecting to the coordinate server.")
sys.exit(1)
# Print the result
if len(campaigns):
print(Highlight.GREEN +
"Success! {0} is on silicon ".format(targetname) +
"during K2 campaigns {0}.".format(campaigns) +
Highlight.END)
else:
print(Highlight.RED + "Sorry, {} is not on silicon "
"during any K2 campaign.".format(targetname) + Highlight.END)
# Print the pixel positions
for c in campaigns:
printChannelColRow(c, ra, dec)
# Make a context plot if the user requested so
if args.plot:
save_context_plots(ra, dec, targetname=targetname)
|
[
"def",
"K2findCampaigns_byname_main",
"(",
"args",
"=",
"None",
")",
":",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"description",
"=",
"\"Check if a target is \"",
"\"(or was) observable by any past or future \"",
"\"observing campaign of NASA's K2 mission.\"",
")",
"parser",
".",
"add_argument",
"(",
"'name'",
",",
"nargs",
"=",
"1",
",",
"type",
"=",
"str",
",",
"help",
"=",
"\"Name of the object. This will be passed on \"",
"\"to the CDS name resolver \"",
"\"to retrieve coordinate information.\"",
")",
"parser",
".",
"add_argument",
"(",
"'-p'",
",",
"'--plot'",
",",
"action",
"=",
"'store_true'",
",",
"help",
"=",
"\"Produce a plot showing the target position \"",
"\"with respect to all K2 campaigns.\"",
")",
"args",
"=",
"parser",
".",
"parse_args",
"(",
"args",
")",
"targetname",
"=",
"args",
".",
"name",
"[",
"0",
"]",
"try",
":",
"campaigns",
",",
"ra",
",",
"dec",
"=",
"findCampaignsByName",
"(",
"targetname",
")",
"except",
"ValueError",
":",
"print",
"(",
"\"Error: could not retrieve coordinates for {0}.\"",
".",
"format",
"(",
"targetname",
")",
")",
"print",
"(",
"\"The target may be unknown or there may be a problem \"",
"\"connecting to the coordinate server.\"",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"# Print the result",
"if",
"len",
"(",
"campaigns",
")",
":",
"print",
"(",
"Highlight",
".",
"GREEN",
"+",
"\"Success! {0} is on silicon \"",
".",
"format",
"(",
"targetname",
")",
"+",
"\"during K2 campaigns {0}.\"",
".",
"format",
"(",
"campaigns",
")",
"+",
"Highlight",
".",
"END",
")",
"else",
":",
"print",
"(",
"Highlight",
".",
"RED",
"+",
"\"Sorry, {} is not on silicon \"",
"\"during any K2 campaign.\"",
".",
"format",
"(",
"targetname",
")",
"+",
"Highlight",
".",
"END",
")",
"# Print the pixel positions",
"for",
"c",
"in",
"campaigns",
":",
"printChannelColRow",
"(",
"c",
",",
"ra",
",",
"dec",
")",
"# Make a context plot if the user requested so",
"if",
"args",
".",
"plot",
":",
"save_context_plots",
"(",
"ra",
",",
"dec",
",",
"targetname",
"=",
"targetname",
")"
] |
Exposes K2findCampaigns to the command line.
|
[
"Exposes",
"K2findCampaigns",
"to",
"the",
"command",
"line",
"."
] |
python
|
train
| 47.297297 |
log2timeline/plaso
|
plaso/parsers/winreg_plugins/sam_users.py
|
https://github.com/log2timeline/plaso/blob/9c564698d2da3ffbe23607a3c54c0582ea18a6cc/plaso/parsers/winreg_plugins/sam_users.py#L60-L85
|
def _ParseFValue(self, registry_key):
"""Parses an F value.
Args:
registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
Returns:
f_value: F value stored in the Windows Registry key.
Raises:
ParseError: if the Windows Registry key does not contain an F value or
F value cannot be parsed.
"""
registry_value = registry_key.GetValueByName('F')
if not registry_value:
raise errors.ParseError(
'missing value: "F" in Windows Registry key: {0:s}.'.format(
registry_key.name))
f_value_map = self._GetDataTypeMap('f_value')
try:
return self._ReadStructureFromByteStream(
registry_value.data, 0, f_value_map)
except (ValueError, errors.ParseError) as exception:
raise errors.ParseError(exception)
|
[
"def",
"_ParseFValue",
"(",
"self",
",",
"registry_key",
")",
":",
"registry_value",
"=",
"registry_key",
".",
"GetValueByName",
"(",
"'F'",
")",
"if",
"not",
"registry_value",
":",
"raise",
"errors",
".",
"ParseError",
"(",
"'missing value: \"F\" in Windows Registry key: {0:s}.'",
".",
"format",
"(",
"registry_key",
".",
"name",
")",
")",
"f_value_map",
"=",
"self",
".",
"_GetDataTypeMap",
"(",
"'f_value'",
")",
"try",
":",
"return",
"self",
".",
"_ReadStructureFromByteStream",
"(",
"registry_value",
".",
"data",
",",
"0",
",",
"f_value_map",
")",
"except",
"(",
"ValueError",
",",
"errors",
".",
"ParseError",
")",
"as",
"exception",
":",
"raise",
"errors",
".",
"ParseError",
"(",
"exception",
")"
] |
Parses an F value.
Args:
registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
Returns:
f_value: F value stored in the Windows Registry key.
Raises:
ParseError: if the Windows Registry key does not contain an F value or
F value cannot be parsed.
|
[
"Parses",
"an",
"F",
"value",
"."
] |
python
|
train
| 30.576923 |
cloudant/python-cloudant
|
src/cloudant/_client_session.py
|
https://github.com/cloudant/python-cloudant/blob/e0ba190f6ba07fe3522a668747128214ad573c7e/src/cloudant/_client_session.py#L44-L58
|
def base64_user_pass(self):
"""
Composes a basic http auth string, suitable for use with the
_replicator database, and other places that need it.
:returns: Basic http authentication string
"""
if self._username is None or self._password is None:
return None
hash_ = base64.urlsafe_b64encode(bytes_("{username}:{password}".format(
username=self._username,
password=self._password
)))
return "Basic {0}".format(unicode_(hash_))
|
[
"def",
"base64_user_pass",
"(",
"self",
")",
":",
"if",
"self",
".",
"_username",
"is",
"None",
"or",
"self",
".",
"_password",
"is",
"None",
":",
"return",
"None",
"hash_",
"=",
"base64",
".",
"urlsafe_b64encode",
"(",
"bytes_",
"(",
"\"{username}:{password}\"",
".",
"format",
"(",
"username",
"=",
"self",
".",
"_username",
",",
"password",
"=",
"self",
".",
"_password",
")",
")",
")",
"return",
"\"Basic {0}\"",
".",
"format",
"(",
"unicode_",
"(",
"hash_",
")",
")"
] |
Composes a basic http auth string, suitable for use with the
_replicator database, and other places that need it.
:returns: Basic http authentication string
|
[
"Composes",
"a",
"basic",
"http",
"auth",
"string",
"suitable",
"for",
"use",
"with",
"the",
"_replicator",
"database",
"and",
"other",
"places",
"that",
"need",
"it",
"."
] |
python
|
train
| 34.733333 |
hobson/aima
|
aima/probability.py
|
https://github.com/hobson/aima/blob/3572b2fb92039b4a1abe384be8545560fbd3d470/aima/probability.py#L265-L275
|
def enumeration_ask(X, e, bn):
"""Return the conditional probability distribution of variable X
given evidence e, from BayesNet bn. [Fig. 14.9]
>>> enumeration_ask('Burglary', dict(JohnCalls=T, MaryCalls=T), burglary
... ).show_approx()
'False: 0.716, True: 0.284'"""
assert X not in e, "Query variable must be distinct from evidence"
Q = ProbDist(X)
for xi in bn.variable_values(X):
Q[xi] = enumerate_all(bn.vars, extend(e, X, xi), bn)
return Q.normalize()
|
[
"def",
"enumeration_ask",
"(",
"X",
",",
"e",
",",
"bn",
")",
":",
"assert",
"X",
"not",
"in",
"e",
",",
"\"Query variable must be distinct from evidence\"",
"Q",
"=",
"ProbDist",
"(",
"X",
")",
"for",
"xi",
"in",
"bn",
".",
"variable_values",
"(",
"X",
")",
":",
"Q",
"[",
"xi",
"]",
"=",
"enumerate_all",
"(",
"bn",
".",
"vars",
",",
"extend",
"(",
"e",
",",
"X",
",",
"xi",
")",
",",
"bn",
")",
"return",
"Q",
".",
"normalize",
"(",
")"
] |
Return the conditional probability distribution of variable X
given evidence e, from BayesNet bn. [Fig. 14.9]
>>> enumeration_ask('Burglary', dict(JohnCalls=T, MaryCalls=T), burglary
... ).show_approx()
'False: 0.716, True: 0.284
|
[
"Return",
"the",
"conditional",
"probability",
"distribution",
"of",
"variable",
"X",
"given",
"evidence",
"e",
"from",
"BayesNet",
"bn",
".",
"[",
"Fig",
".",
"14",
".",
"9",
"]",
">>>",
"enumeration_ask",
"(",
"Burglary",
"dict",
"(",
"JohnCalls",
"=",
"T",
"MaryCalls",
"=",
"T",
")",
"burglary",
"...",
")",
".",
"show_approx",
"()",
"False",
":",
"0",
".",
"716",
"True",
":",
"0",
".",
"284"
] |
python
|
valid
| 44.727273 |
CalebBell/fpi
|
fpi/drag.py
|
https://github.com/CalebBell/fpi/blob/6e6da3b9d0c17e10cc0886c97bc1bb8aeba2cca5/fpi/drag.py#L278-L340
|
def Morsi_Alexander(Re):
r'''Calculates drag coefficient of a smooth sphere using the method in
[1]_ as described in [2]_.
.. math::
C_D = \left\{ \begin{array}{ll}
\frac{24}{Re} & \mbox{if $Re < 0.1$}\\
\frac{22.73}{Re}+\frac{0.0903}{Re^2} + 3.69 & \mbox{if $0.1 < Re < 1$}\\
\frac{29.1667}{Re}-\frac{3.8889}{Re^2} + 1.2220 & \mbox{if $1 < Re < 10$}\\
\frac{46.5}{Re}-\frac{116.67}{Re^2} + 0.6167 & \mbox{if $10 < Re < 100$}\\
\frac{98.33}{Re}-\frac{2778}{Re^2} + 0.3644 & \mbox{if $100 < Re < 1000$}\\
\frac{148.62}{Re}-\frac{4.75\times10^4}{Re^2} + 0.3570 & \mbox{if $1000 < Re < 5000$}\\
\frac{-490.5460}{Re}+\frac{57.87\times10^4}{Re^2} + 0.46 & \mbox{if $5000 < Re < 10000$}\\
\frac{-1662.5}{Re}+\frac{5.4167\times10^6}{Re^2} + 0.5191 & \mbox{if $10000 < Re < 50000$}\end{array} \right.
Parameters
----------
Re : float
Reynolds number of the sphere, [-]
Returns
-------
Cd : float
Drag coefficient [-]
Notes
-----
Range is Re <= 2E5.
Original was reviewed, and confirmed to contain the cited equations.
Examples
--------
>>> Morsi_Alexander(200)
0.7866
References
----------
.. [1] Morsi, S. A., and A. J. Alexander. "An Investigation of Particle
Trajectories in Two-Phase Flow Systems." Journal of Fluid Mechanics
55, no. 02 (September 1972): 193-208. doi:10.1017/S0022112072001806.
.. [2] Barati, Reza, Seyed Ali Akbar Salehi Neyshabouri, and Goodarz
Ahmadi. "Development of Empirical Models with High Accuracy for
Estimation of Drag Coefficient of Flow around a Smooth Sphere: An
Evolutionary Approach." Powder Technology 257 (May 2014): 11-19.
doi:10.1016/j.powtec.2014.02.045.
'''
if Re < 0.1:
Cd = 24./Re
elif Re < 1:
Cd = 22.73/Re + 0.0903/Re**2 + 3.69
elif Re < 10:
Cd = 29.1667/Re - 3.8889/Re**2 + 1.222
elif Re < 100:
Cd = 46.5/Re - 116.67/Re**2 + 0.6167
elif Re < 1000:
Cd = 98.33/Re - 2778./Re**2 + 0.3644
elif Re < 5000:
Cd = 148.62/Re - 4.75E4/Re**2 + 0.357
elif Re < 10000:
Cd = -490.546/Re + 57.87E4/Re**2 + 0.46
else:
Cd = -1662.5/Re + 5.4167E6/Re**2 + 0.5191
return Cd
|
[
"def",
"Morsi_Alexander",
"(",
"Re",
")",
":",
"if",
"Re",
"<",
"0.1",
":",
"Cd",
"=",
"24.",
"/",
"Re",
"elif",
"Re",
"<",
"1",
":",
"Cd",
"=",
"22.73",
"/",
"Re",
"+",
"0.0903",
"/",
"Re",
"**",
"2",
"+",
"3.69",
"elif",
"Re",
"<",
"10",
":",
"Cd",
"=",
"29.1667",
"/",
"Re",
"-",
"3.8889",
"/",
"Re",
"**",
"2",
"+",
"1.222",
"elif",
"Re",
"<",
"100",
":",
"Cd",
"=",
"46.5",
"/",
"Re",
"-",
"116.67",
"/",
"Re",
"**",
"2",
"+",
"0.6167",
"elif",
"Re",
"<",
"1000",
":",
"Cd",
"=",
"98.33",
"/",
"Re",
"-",
"2778.",
"/",
"Re",
"**",
"2",
"+",
"0.3644",
"elif",
"Re",
"<",
"5000",
":",
"Cd",
"=",
"148.62",
"/",
"Re",
"-",
"4.75E4",
"/",
"Re",
"**",
"2",
"+",
"0.357",
"elif",
"Re",
"<",
"10000",
":",
"Cd",
"=",
"-",
"490.546",
"/",
"Re",
"+",
"57.87E4",
"/",
"Re",
"**",
"2",
"+",
"0.46",
"else",
":",
"Cd",
"=",
"-",
"1662.5",
"/",
"Re",
"+",
"5.4167E6",
"/",
"Re",
"**",
"2",
"+",
"0.5191",
"return",
"Cd"
] |
r'''Calculates drag coefficient of a smooth sphere using the method in
[1]_ as described in [2]_.
.. math::
C_D = \left\{ \begin{array}{ll}
\frac{24}{Re} & \mbox{if $Re < 0.1$}\\
\frac{22.73}{Re}+\frac{0.0903}{Re^2} + 3.69 & \mbox{if $0.1 < Re < 1$}\\
\frac{29.1667}{Re}-\frac{3.8889}{Re^2} + 1.2220 & \mbox{if $1 < Re < 10$}\\
\frac{46.5}{Re}-\frac{116.67}{Re^2} + 0.6167 & \mbox{if $10 < Re < 100$}\\
\frac{98.33}{Re}-\frac{2778}{Re^2} + 0.3644 & \mbox{if $100 < Re < 1000$}\\
\frac{148.62}{Re}-\frac{4.75\times10^4}{Re^2} + 0.3570 & \mbox{if $1000 < Re < 5000$}\\
\frac{-490.5460}{Re}+\frac{57.87\times10^4}{Re^2} + 0.46 & \mbox{if $5000 < Re < 10000$}\\
\frac{-1662.5}{Re}+\frac{5.4167\times10^6}{Re^2} + 0.5191 & \mbox{if $10000 < Re < 50000$}\end{array} \right.
Parameters
----------
Re : float
Reynolds number of the sphere, [-]
Returns
-------
Cd : float
Drag coefficient [-]
Notes
-----
Range is Re <= 2E5.
Original was reviewed, and confirmed to contain the cited equations.
Examples
--------
>>> Morsi_Alexander(200)
0.7866
References
----------
.. [1] Morsi, S. A., and A. J. Alexander. "An Investigation of Particle
Trajectories in Two-Phase Flow Systems." Journal of Fluid Mechanics
55, no. 02 (September 1972): 193-208. doi:10.1017/S0022112072001806.
.. [2] Barati, Reza, Seyed Ali Akbar Salehi Neyshabouri, and Goodarz
Ahmadi. "Development of Empirical Models with High Accuracy for
Estimation of Drag Coefficient of Flow around a Smooth Sphere: An
Evolutionary Approach." Powder Technology 257 (May 2014): 11-19.
doi:10.1016/j.powtec.2014.02.045.
|
[
"r",
"Calculates",
"drag",
"coefficient",
"of",
"a",
"smooth",
"sphere",
"using",
"the",
"method",
"in",
"[",
"1",
"]",
"_",
"as",
"described",
"in",
"[",
"2",
"]",
"_",
"."
] |
python
|
train
| 35.793651 |
StagPython/StagPy
|
stagpy/processing.py
|
https://github.com/StagPython/StagPy/blob/18c4416cc4a1011db2fd736ee8b0ec29aa6e4fd4/stagpy/processing.py#L180-L193
|
def diffs_prof(step):
"""Scaled diffusion.
This computation takes sphericity into account if necessary.
Args:
step (:class:`~stagpy.stagyydata._Step`): a step of a StagyyData
instance.
Returns:
tuple of :class:`numpy.array`: the diffusion and the radial position
at which it is evaluated.
"""
diff, rad = diff_prof(step)
return _scale_prof(step, diff, rad), rad
|
[
"def",
"diffs_prof",
"(",
"step",
")",
":",
"diff",
",",
"rad",
"=",
"diff_prof",
"(",
"step",
")",
"return",
"_scale_prof",
"(",
"step",
",",
"diff",
",",
"rad",
")",
",",
"rad"
] |
Scaled diffusion.
This computation takes sphericity into account if necessary.
Args:
step (:class:`~stagpy.stagyydata._Step`): a step of a StagyyData
instance.
Returns:
tuple of :class:`numpy.array`: the diffusion and the radial position
at which it is evaluated.
|
[
"Scaled",
"diffusion",
"."
] |
python
|
train
| 29.571429 |
SatelliteQE/nailgun
|
nailgun/entity_mixins.py
|
https://github.com/SatelliteQE/nailgun/blob/c36d8c20862e87bf6975bd48ac1ca40a9e634eaa/nailgun/entity_mixins.py#L657-L693
|
def delete(self, synchronous=True):
"""Delete the current entity.
Call :meth:`delete_raw` and check for an HTTP 4XX or 5XX response.
Return either the JSON-decoded response or information about a
completed foreman task.
:param synchronous: A boolean. What should happen if the server returns
an HTTP 202 (accepted) status code? Wait for the task to complete
if ``True``. Immediately return a response otherwise.
:returns: A dict. Either the JSON-decoded response or information about
a foreman task.
:raises: ``requests.exceptions.HTTPError`` if the response has an HTTP
4XX or 5XX status code.
:raises: ``ValueError`` If an HTTP 202 response is received and the
response JSON can not be decoded.
:raises nailgun.entity_mixins.TaskTimedOutError: If an HTTP 202
response is received, ``synchronous is True`` and the task times
out.
"""
response = self.delete_raw()
response.raise_for_status()
if (synchronous is True and
response.status_code == http_client.ACCEPTED):
return _poll_task(response.json()['id'], self._server_config)
elif (response.status_code == http_client.NO_CONTENT or
(response.status_code == http_client.OK and
hasattr(response, 'content') and
not response.content.strip())):
# "The server successfully processed the request, but is not
# returning any content. Usually used as a response to a successful
# delete request."
return
return response.json()
|
[
"def",
"delete",
"(",
"self",
",",
"synchronous",
"=",
"True",
")",
":",
"response",
"=",
"self",
".",
"delete_raw",
"(",
")",
"response",
".",
"raise_for_status",
"(",
")",
"if",
"(",
"synchronous",
"is",
"True",
"and",
"response",
".",
"status_code",
"==",
"http_client",
".",
"ACCEPTED",
")",
":",
"return",
"_poll_task",
"(",
"response",
".",
"json",
"(",
")",
"[",
"'id'",
"]",
",",
"self",
".",
"_server_config",
")",
"elif",
"(",
"response",
".",
"status_code",
"==",
"http_client",
".",
"NO_CONTENT",
"or",
"(",
"response",
".",
"status_code",
"==",
"http_client",
".",
"OK",
"and",
"hasattr",
"(",
"response",
",",
"'content'",
")",
"and",
"not",
"response",
".",
"content",
".",
"strip",
"(",
")",
")",
")",
":",
"# \"The server successfully processed the request, but is not",
"# returning any content. Usually used as a response to a successful",
"# delete request.\"",
"return",
"return",
"response",
".",
"json",
"(",
")"
] |
Delete the current entity.
Call :meth:`delete_raw` and check for an HTTP 4XX or 5XX response.
Return either the JSON-decoded response or information about a
completed foreman task.
:param synchronous: A boolean. What should happen if the server returns
an HTTP 202 (accepted) status code? Wait for the task to complete
if ``True``. Immediately return a response otherwise.
:returns: A dict. Either the JSON-decoded response or information about
a foreman task.
:raises: ``requests.exceptions.HTTPError`` if the response has an HTTP
4XX or 5XX status code.
:raises: ``ValueError`` If an HTTP 202 response is received and the
response JSON can not be decoded.
:raises nailgun.entity_mixins.TaskTimedOutError: If an HTTP 202
response is received, ``synchronous is True`` and the task times
out.
|
[
"Delete",
"the",
"current",
"entity",
"."
] |
python
|
train
| 44.972973 |
theonion/django-bulbs
|
bulbs/special_coverage/management/commands/migrate_active_to_published.py
|
https://github.com/theonion/django-bulbs/blob/0c0e6e3127a7dc487b96677fab95cacd2b3806da/bulbs/special_coverage/management/commands/migrate_active_to_published.py#L10-L13
|
def get_month_start_date(self):
"""Returns the first day of the current month"""
now = timezone.now()
return timezone.datetime(day=1, month=now.month, year=now.year, tzinfo=now.tzinfo)
|
[
"def",
"get_month_start_date",
"(",
"self",
")",
":",
"now",
"=",
"timezone",
".",
"now",
"(",
")",
"return",
"timezone",
".",
"datetime",
"(",
"day",
"=",
"1",
",",
"month",
"=",
"now",
".",
"month",
",",
"year",
"=",
"now",
".",
"year",
",",
"tzinfo",
"=",
"now",
".",
"tzinfo",
")"
] |
Returns the first day of the current month
|
[
"Returns",
"the",
"first",
"day",
"of",
"the",
"current",
"month"
] |
python
|
train
| 51.25 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.