body
stringlengths 26
98.2k
| body_hash
int64 -9,222,864,604,528,158,000
9,221,803,474B
| docstring
stringlengths 1
16.8k
| path
stringlengths 5
230
| name
stringlengths 1
96
| repository_name
stringlengths 7
89
| lang
stringclasses 1
value | body_without_docstring
stringlengths 20
98.2k
|
---|---|---|---|---|---|---|---|
@utils.retry(exception.SnapshotIsBusy)
def wait_for_busy_snapshot(self, flexvol, snapshot_name):
'Checks for and handles a busy snapshot.\n\n If a snapshot is busy, for reasons other than cloning, an exception is\n raised immediately. Otherwise, wait for a period of time for the clone\n dependency to finish before giving up. If the snapshot is not busy then\n no action is taken and the method exits.\n '
snapshot = self.get_snapshot(flexvol, snapshot_name)
if (not snapshot['busy']):
LOG.debug('Backing consistency group snapshot %s available for deletion.', snapshot_name)
return
else:
LOG.debug('Snapshot %(snap)s for vol %(vol)s is busy, waiting for volume clone dependency to clear.', {'snap': snapshot_name, 'vol': flexvol})
raise exception.SnapshotIsBusy(snapshot_name=snapshot_name) | -4,402,261,725,681,839,000 | Checks for and handles a busy snapshot.
If a snapshot is busy, for reasons other than cloning, an exception is
raised immediately. Otherwise, wait for a period of time for the clone
dependency to finish before giving up. If the snapshot is not busy then
no action is taken and the method exits. | cinder/volume/drivers/netapp/dataontap/client/client_base.py | wait_for_busy_snapshot | sapcc/cinder | python | @utils.retry(exception.SnapshotIsBusy)
def wait_for_busy_snapshot(self, flexvol, snapshot_name):
'Checks for and handles a busy snapshot.\n\n If a snapshot is busy, for reasons other than cloning, an exception is\n raised immediately. Otherwise, wait for a period of time for the clone\n dependency to finish before giving up. If the snapshot is not busy then\n no action is taken and the method exits.\n '
snapshot = self.get_snapshot(flexvol, snapshot_name)
if (not snapshot['busy']):
LOG.debug('Backing consistency group snapshot %s available for deletion.', snapshot_name)
return
else:
LOG.debug('Snapshot %(snap)s for vol %(vol)s is busy, waiting for volume clone dependency to clear.', {'snap': snapshot_name, 'vol': flexvol})
raise exception.SnapshotIsBusy(snapshot_name=snapshot_name) |
def mark_snapshot_for_deletion(self, volume, snapshot_name):
'Mark snapshot for deletion by renaming snapshot.'
return self.rename_snapshot(volume, snapshot_name, (DELETED_PREFIX + snapshot_name)) | 7,240,036,683,890,597,000 | Mark snapshot for deletion by renaming snapshot. | cinder/volume/drivers/netapp/dataontap/client/client_base.py | mark_snapshot_for_deletion | sapcc/cinder | python | def mark_snapshot_for_deletion(self, volume, snapshot_name):
return self.rename_snapshot(volume, snapshot_name, (DELETED_PREFIX + snapshot_name)) |
def rename_snapshot(self, volume, current_name, new_name):
'Renames a snapshot.'
api_args = {'volume': volume, 'current-name': current_name, 'new-name': new_name}
return self.connection.send_request('snapshot-rename', api_args) | -129,262,650,287,414,140 | Renames a snapshot. | cinder/volume/drivers/netapp/dataontap/client/client_base.py | rename_snapshot | sapcc/cinder | python | def rename_snapshot(self, volume, current_name, new_name):
api_args = {'volume': volume, 'current-name': current_name, 'new-name': new_name}
return self.connection.send_request('snapshot-rename', api_args) |
def test_missing_cwl_version():
'No cwlVersion in the workflow.'
assert (main([get_data('tests/wf/missing_cwlVersion.cwl')]) == 1) | -6,386,936,996,461,566,000 | No cwlVersion in the workflow. | tests/test_cwl_version.py | test_missing_cwl_version | jayvdb/cwltool | python | def test_missing_cwl_version():
assert (main([get_data('tests/wf/missing_cwlVersion.cwl')]) == 1) |
def test_incorrect_cwl_version():
'Using cwlVersion: v0.1 in the workflow.'
assert (main([get_data('tests/wf/wrong_cwlVersion.cwl')]) == 1) | 3,053,709,548,438,482,000 | Using cwlVersion: v0.1 in the workflow. | tests/test_cwl_version.py | test_incorrect_cwl_version | jayvdb/cwltool | python | def test_incorrect_cwl_version():
assert (main([get_data('tests/wf/wrong_cwlVersion.cwl')]) == 1) |
def should_retry_start_pod(exception: Exception):
'Check if an Exception indicates a transient error and warrants retrying'
if isinstance(exception, ApiException):
return (exception.status == 409)
return False | 5,044,609,249,089,285,000 | Check if an Exception indicates a transient error and warrants retrying | airflow/providers/cncf/kubernetes/utils/pod_launcher.py | should_retry_start_pod | kevin0120/airflow | python | def should_retry_start_pod(exception: Exception):
if isinstance(exception, ApiException):
return (exception.status == 409)
return False |
def __init__(self, kube_client: client.CoreV1Api=None, in_cluster: bool=True, cluster_context: Optional[str]=None, extract_xcom: bool=False):
'\n Creates the launcher.\n\n :param kube_client: kubernetes client\n :param in_cluster: whether we are in cluster\n :param cluster_context: context of the cluster\n :param extract_xcom: whether we should extract xcom\n '
super().__init__()
self._client = (kube_client or get_kube_client(in_cluster=in_cluster, cluster_context=cluster_context))
self._watch = watch.Watch()
self.extract_xcom = extract_xcom | 4,478,250,144,300,716,000 | Creates the launcher.
:param kube_client: kubernetes client
:param in_cluster: whether we are in cluster
:param cluster_context: context of the cluster
:param extract_xcom: whether we should extract xcom | airflow/providers/cncf/kubernetes/utils/pod_launcher.py | __init__ | kevin0120/airflow | python | def __init__(self, kube_client: client.CoreV1Api=None, in_cluster: bool=True, cluster_context: Optional[str]=None, extract_xcom: bool=False):
'\n Creates the launcher.\n\n :param kube_client: kubernetes client\n :param in_cluster: whether we are in cluster\n :param cluster_context: context of the cluster\n :param extract_xcom: whether we should extract xcom\n '
super().__init__()
self._client = (kube_client or get_kube_client(in_cluster=in_cluster, cluster_context=cluster_context))
self._watch = watch.Watch()
self.extract_xcom = extract_xcom |
def run_pod_async(self, pod: V1Pod, **kwargs):
'Runs POD asynchronously'
pod_mutation_hook(pod)
sanitized_pod = self._client.api_client.sanitize_for_serialization(pod)
json_pod = json.dumps(sanitized_pod, indent=2)
self.log.debug('Pod Creation Request: \n%s', json_pod)
try:
resp = self._client.create_namespaced_pod(body=sanitized_pod, namespace=pod.metadata.namespace, **kwargs)
self.log.debug('Pod Creation Response: %s', resp)
except Exception as e:
self.log.exception('Exception when attempting to create Namespaced Pod: %s', json_pod)
raise e
return resp | -3,677,110,547,369,796,600 | Runs POD asynchronously | airflow/providers/cncf/kubernetes/utils/pod_launcher.py | run_pod_async | kevin0120/airflow | python | def run_pod_async(self, pod: V1Pod, **kwargs):
pod_mutation_hook(pod)
sanitized_pod = self._client.api_client.sanitize_for_serialization(pod)
json_pod = json.dumps(sanitized_pod, indent=2)
self.log.debug('Pod Creation Request: \n%s', json_pod)
try:
resp = self._client.create_namespaced_pod(body=sanitized_pod, namespace=pod.metadata.namespace, **kwargs)
self.log.debug('Pod Creation Response: %s', resp)
except Exception as e:
self.log.exception('Exception when attempting to create Namespaced Pod: %s', json_pod)
raise e
return resp |
def delete_pod(self, pod: V1Pod):
'Deletes POD'
try:
self._client.delete_namespaced_pod(pod.metadata.name, pod.metadata.namespace, body=client.V1DeleteOptions())
except ApiException as e:
if (e.status != 404):
raise | -814,575,188,438,192,100 | Deletes POD | airflow/providers/cncf/kubernetes/utils/pod_launcher.py | delete_pod | kevin0120/airflow | python | def delete_pod(self, pod: V1Pod):
try:
self._client.delete_namespaced_pod(pod.metadata.name, pod.metadata.namespace, body=client.V1DeleteOptions())
except ApiException as e:
if (e.status != 404):
raise |
@tenacity.retry(stop=tenacity.stop_after_attempt(3), wait=tenacity.wait_random_exponential(), reraise=True, retry=tenacity.retry_if_exception(should_retry_start_pod))
def start_pod(self, pod: V1Pod, startup_timeout: int=120):
'\n Launches the pod synchronously and waits for completion.\n\n :param pod:\n :param startup_timeout: Timeout for startup of the pod (if pod is pending for too long, fails task)\n :return:\n '
resp = self.run_pod_async(pod)
curr_time = dt.now()
if (resp.status.start_time is None):
while self.pod_not_started(pod):
self.log.warning('Pod not yet started: %s', pod.metadata.name)
delta = (dt.now() - curr_time)
if (delta.total_seconds() >= startup_timeout):
raise AirflowException('Pod took too long to start')
time.sleep(1) | -4,370,534,234,727,821,000 | Launches the pod synchronously and waits for completion.
:param pod:
:param startup_timeout: Timeout for startup of the pod (if pod is pending for too long, fails task)
:return: | airflow/providers/cncf/kubernetes/utils/pod_launcher.py | start_pod | kevin0120/airflow | python | @tenacity.retry(stop=tenacity.stop_after_attempt(3), wait=tenacity.wait_random_exponential(), reraise=True, retry=tenacity.retry_if_exception(should_retry_start_pod))
def start_pod(self, pod: V1Pod, startup_timeout: int=120):
'\n Launches the pod synchronously and waits for completion.\n\n :param pod:\n :param startup_timeout: Timeout for startup of the pod (if pod is pending for too long, fails task)\n :return:\n '
resp = self.run_pod_async(pod)
curr_time = dt.now()
if (resp.status.start_time is None):
while self.pod_not_started(pod):
self.log.warning('Pod not yet started: %s', pod.metadata.name)
delta = (dt.now() - curr_time)
if (delta.total_seconds() >= startup_timeout):
raise AirflowException('Pod took too long to start')
time.sleep(1) |
def monitor_pod(self, pod: V1Pod, get_logs: bool) -> Tuple[(State, V1Pod, Optional[str])]:
'\n Monitors a pod and returns the final state, pod and xcom result\n\n :param pod: pod spec that will be monitored\n :param get_logs: whether to read the logs locally\n :return: Tuple[State, Optional[str]]\n '
if get_logs:
read_logs_since_sec = None
last_log_time = None
while True:
logs = self.read_pod_logs(pod, timestamps=True, since_seconds=read_logs_since_sec)
for line in logs:
(timestamp, message) = self.parse_log_line(line.decode('utf-8'))
last_log_time = pendulum.parse(timestamp)
self.log.info(message)
time.sleep(1)
if (not self.base_container_is_running(pod)):
break
self.log.warning('Pod %s log read interrupted', pod.metadata.name)
if last_log_time:
delta = (pendulum.now() - last_log_time)
read_logs_since_sec = math.ceil(delta.total_seconds())
result = None
if self.extract_xcom:
while self.base_container_is_running(pod):
self.log.info('Container %s has state %s', pod.metadata.name, State.RUNNING)
time.sleep(2)
result = self._extract_xcom(pod)
self.log.info(result)
result = json.loads(result)
while self.pod_is_running(pod):
self.log.info('Pod %s has state %s', pod.metadata.name, State.RUNNING)
time.sleep(2)
remote_pod = self.read_pod(pod)
return (self._task_status(remote_pod), remote_pod, result) | -4,386,082,111,302,640,000 | Monitors a pod and returns the final state, pod and xcom result
:param pod: pod spec that will be monitored
:param get_logs: whether to read the logs locally
:return: Tuple[State, Optional[str]] | airflow/providers/cncf/kubernetes/utils/pod_launcher.py | monitor_pod | kevin0120/airflow | python | def monitor_pod(self, pod: V1Pod, get_logs: bool) -> Tuple[(State, V1Pod, Optional[str])]:
'\n Monitors a pod and returns the final state, pod and xcom result\n\n :param pod: pod spec that will be monitored\n :param get_logs: whether to read the logs locally\n :return: Tuple[State, Optional[str]]\n '
if get_logs:
read_logs_since_sec = None
last_log_time = None
while True:
logs = self.read_pod_logs(pod, timestamps=True, since_seconds=read_logs_since_sec)
for line in logs:
(timestamp, message) = self.parse_log_line(line.decode('utf-8'))
last_log_time = pendulum.parse(timestamp)
self.log.info(message)
time.sleep(1)
if (not self.base_container_is_running(pod)):
break
self.log.warning('Pod %s log read interrupted', pod.metadata.name)
if last_log_time:
delta = (pendulum.now() - last_log_time)
read_logs_since_sec = math.ceil(delta.total_seconds())
result = None
if self.extract_xcom:
while self.base_container_is_running(pod):
self.log.info('Container %s has state %s', pod.metadata.name, State.RUNNING)
time.sleep(2)
result = self._extract_xcom(pod)
self.log.info(result)
result = json.loads(result)
while self.pod_is_running(pod):
self.log.info('Pod %s has state %s', pod.metadata.name, State.RUNNING)
time.sleep(2)
remote_pod = self.read_pod(pod)
return (self._task_status(remote_pod), remote_pod, result) |
def parse_log_line(self, line: str) -> Tuple[(str, str)]:
'\n Parse K8s log line and returns the final state\n\n :param line: k8s log line\n :type line: str\n :return: timestamp and log message\n :rtype: Tuple[str, str]\n '
split_at = line.find(' ')
if (split_at == (- 1)):
raise Exception(f'Log not in "{{timestamp}} {{log}}" format. Got: {line}')
timestamp = line[:split_at]
message = line[(split_at + 1):].rstrip()
return (timestamp, message) | -177,840,822,609,086,880 | Parse K8s log line and returns the final state
:param line: k8s log line
:type line: str
:return: timestamp and log message
:rtype: Tuple[str, str] | airflow/providers/cncf/kubernetes/utils/pod_launcher.py | parse_log_line | kevin0120/airflow | python | def parse_log_line(self, line: str) -> Tuple[(str, str)]:
'\n Parse K8s log line and returns the final state\n\n :param line: k8s log line\n :type line: str\n :return: timestamp and log message\n :rtype: Tuple[str, str]\n '
split_at = line.find(' ')
if (split_at == (- 1)):
raise Exception(f'Log not in "{{timestamp}} {{log}}" format. Got: {line}')
timestamp = line[:split_at]
message = line[(split_at + 1):].rstrip()
return (timestamp, message) |
def pod_not_started(self, pod: V1Pod):
'Tests if pod has not started'
state = self._task_status(self.read_pod(pod))
return (state == State.QUEUED) | 3,744,247,621,719,285,000 | Tests if pod has not started | airflow/providers/cncf/kubernetes/utils/pod_launcher.py | pod_not_started | kevin0120/airflow | python | def pod_not_started(self, pod: V1Pod):
state = self._task_status(self.read_pod(pod))
return (state == State.QUEUED) |
def pod_is_running(self, pod: V1Pod):
'Tests if pod is running'
state = self._task_status(self.read_pod(pod))
return (state not in (State.SUCCESS, State.FAILED)) | 714,326,589,583,116,200 | Tests if pod is running | airflow/providers/cncf/kubernetes/utils/pod_launcher.py | pod_is_running | kevin0120/airflow | python | def pod_is_running(self, pod: V1Pod):
state = self._task_status(self.read_pod(pod))
return (state not in (State.SUCCESS, State.FAILED)) |
def base_container_is_running(self, pod: V1Pod):
'Tests if base container is running'
event = self.read_pod(pod)
status = next(iter(filter((lambda s: (s.name == 'base')), event.status.container_statuses)), None)
if (not status):
return False
return (status.state.running is not None) | -6,574,634,565,868,021,000 | Tests if base container is running | airflow/providers/cncf/kubernetes/utils/pod_launcher.py | base_container_is_running | kevin0120/airflow | python | def base_container_is_running(self, pod: V1Pod):
event = self.read_pod(pod)
status = next(iter(filter((lambda s: (s.name == 'base')), event.status.container_statuses)), None)
if (not status):
return False
return (status.state.running is not None) |
@tenacity.retry(stop=tenacity.stop_after_attempt(3), wait=tenacity.wait_exponential(), reraise=True)
def read_pod_logs(self, pod: V1Pod, tail_lines: Optional[int]=None, timestamps: bool=False, since_seconds: Optional[int]=None):
'Reads log from the POD'
additional_kwargs = {}
if since_seconds:
additional_kwargs['since_seconds'] = since_seconds
if tail_lines:
additional_kwargs['tail_lines'] = tail_lines
try:
return self._client.read_namespaced_pod_log(name=pod.metadata.name, namespace=pod.metadata.namespace, container='base', follow=True, timestamps=timestamps, _preload_content=False, **additional_kwargs)
except BaseHTTPError as e:
raise AirflowException(f'There was an error reading the kubernetes API: {e}') | -2,652,322,289,505,761,300 | Reads log from the POD | airflow/providers/cncf/kubernetes/utils/pod_launcher.py | read_pod_logs | kevin0120/airflow | python | @tenacity.retry(stop=tenacity.stop_after_attempt(3), wait=tenacity.wait_exponential(), reraise=True)
def read_pod_logs(self, pod: V1Pod, tail_lines: Optional[int]=None, timestamps: bool=False, since_seconds: Optional[int]=None):
additional_kwargs = {}
if since_seconds:
additional_kwargs['since_seconds'] = since_seconds
if tail_lines:
additional_kwargs['tail_lines'] = tail_lines
try:
return self._client.read_namespaced_pod_log(name=pod.metadata.name, namespace=pod.metadata.namespace, container='base', follow=True, timestamps=timestamps, _preload_content=False, **additional_kwargs)
except BaseHTTPError as e:
raise AirflowException(f'There was an error reading the kubernetes API: {e}') |
@tenacity.retry(stop=tenacity.stop_after_attempt(3), wait=tenacity.wait_exponential(), reraise=True)
def read_pod_events(self, pod):
'Reads events from the POD'
try:
return self._client.list_namespaced_event(namespace=pod.metadata.namespace, field_selector=f'involvedObject.name={pod.metadata.name}')
except BaseHTTPError as e:
raise AirflowException(f'There was an error reading the kubernetes API: {e}') | 6,136,619,614,378,336,000 | Reads events from the POD | airflow/providers/cncf/kubernetes/utils/pod_launcher.py | read_pod_events | kevin0120/airflow | python | @tenacity.retry(stop=tenacity.stop_after_attempt(3), wait=tenacity.wait_exponential(), reraise=True)
def read_pod_events(self, pod):
try:
return self._client.list_namespaced_event(namespace=pod.metadata.namespace, field_selector=f'involvedObject.name={pod.metadata.name}')
except BaseHTTPError as e:
raise AirflowException(f'There was an error reading the kubernetes API: {e}') |
@tenacity.retry(stop=tenacity.stop_after_attempt(3), wait=tenacity.wait_exponential(), reraise=True)
def read_pod(self, pod: V1Pod):
'Read POD information'
try:
return self._client.read_namespaced_pod(pod.metadata.name, pod.metadata.namespace)
except BaseHTTPError as e:
raise AirflowException(f'There was an error reading the kubernetes API: {e}') | -1,437,380,743,902,670,600 | Read POD information | airflow/providers/cncf/kubernetes/utils/pod_launcher.py | read_pod | kevin0120/airflow | python | @tenacity.retry(stop=tenacity.stop_after_attempt(3), wait=tenacity.wait_exponential(), reraise=True)
def read_pod(self, pod: V1Pod):
try:
return self._client.read_namespaced_pod(pod.metadata.name, pod.metadata.namespace)
except BaseHTTPError as e:
raise AirflowException(f'There was an error reading the kubernetes API: {e}') |
def process_status(self, job_id, status):
'Process status information for the JOB'
status = status.lower()
if (status == PodStatus.PENDING):
return State.QUEUED
elif (status == PodStatus.FAILED):
self.log.error('Event with job id %s Failed', job_id)
return State.FAILED
elif (status == PodStatus.SUCCEEDED):
self.log.info('Event with job id %s Succeeded', job_id)
return State.SUCCESS
elif (status == PodStatus.RUNNING):
return State.RUNNING
else:
self.log.error('Event: Invalid state %s on job %s', status, job_id)
return State.FAILED | 8,111,311,642,689,768,000 | Process status information for the JOB | airflow/providers/cncf/kubernetes/utils/pod_launcher.py | process_status | kevin0120/airflow | python | def process_status(self, job_id, status):
status = status.lower()
if (status == PodStatus.PENDING):
return State.QUEUED
elif (status == PodStatus.FAILED):
self.log.error('Event with job id %s Failed', job_id)
return State.FAILED
elif (status == PodStatus.SUCCEEDED):
self.log.info('Event with job id %s Succeeded', job_id)
return State.SUCCESS
elif (status == PodStatus.RUNNING):
return State.RUNNING
else:
self.log.error('Event: Invalid state %s on job %s', status, job_id)
return State.FAILED |
def get_host_error_message():
'Return host error message.'
buf = create_string_buffer(PM_HOST_ERROR_MSG_LEN)
lib.Pm_GetHostErrorText(buf, PM_HOST_ERROR_MSG_LEN)
return buf.raw.decode().rstrip('\x00') | 2,564,961,688,497,364,500 | Return host error message. | mido/backends/portmidi_init.py | get_host_error_message | EnjoyLifeFund/macHighSierra-py36-pkgs | python | def get_host_error_message():
buf = create_string_buffer(PM_HOST_ERROR_MSG_LEN)
lib.Pm_GetHostErrorText(buf, PM_HOST_ERROR_MSG_LEN)
return buf.raw.decode().rstrip('\x00') |
def layer_op(self, inputs, is_training=True):
'\n Consists of::\n\n (inputs)--conv_0-o-conv_1--conv_2-+-(conv_res)--down_sample--\n | |\n o----------------o\n\n conv_0, conv_res is also returned for feature forwarding purpose\n '
conv_0 = Conv(n_output_chns=self.n_output_chns, kernel_size=self.kernel_size, acti_func=self.acti_func, with_bias=False, feature_normalization='batch', **self.conv_param)(inputs, is_training)
conv_res = ResUnit(n_output_chns=self.n_output_chns, kernel_size=self.kernel_size, acti_func=self.acti_func, type_string=self.type_string, **self.conv_param)(conv_0, is_training)
conv_down = Down('Max', kernel_size=self.downsample_kernel_size, stride=self.downsample_stride)(conv_res)
return (conv_down, conv_0, conv_res) | 5,549,436,719,628,099,000 | Consists of::
(inputs)--conv_0-o-conv_1--conv_2-+-(conv_res)--down_sample--
| |
o----------------o
conv_0, conv_res is also returned for feature forwarding purpose | niftynet/layer/downsample_res_block.py | layer_op | BRAINSia/NiftyNet | python | def layer_op(self, inputs, is_training=True):
'\n Consists of::\n\n (inputs)--conv_0-o-conv_1--conv_2-+-(conv_res)--down_sample--\n | |\n o----------------o\n\n conv_0, conv_res is also returned for feature forwarding purpose\n '
conv_0 = Conv(n_output_chns=self.n_output_chns, kernel_size=self.kernel_size, acti_func=self.acti_func, with_bias=False, feature_normalization='batch', **self.conv_param)(inputs, is_training)
conv_res = ResUnit(n_output_chns=self.n_output_chns, kernel_size=self.kernel_size, acti_func=self.acti_func, type_string=self.type_string, **self.conv_param)(conv_0, is_training)
conv_down = Down('Max', kernel_size=self.downsample_kernel_size, stride=self.downsample_stride)(conv_res)
return (conv_down, conv_0, conv_res) |
def _align_32(f):
'Align to the next 32-bit position in a file'
pos = f.tell()
if ((pos % 4) != 0):
f.seek(((pos + 4) - (pos % 4)))
return | 6,305,285,988,810,641,000 | Align to the next 32-bit position in a file | scipy/io/idl.py | _align_32 | ikamensh/scipy | python | def _align_32(f):
pos = f.tell()
if ((pos % 4) != 0):
f.seek(((pos + 4) - (pos % 4)))
return |
def _skip_bytes(f, n):
'Skip `n` bytes'
f.read(n)
return | -7,825,523,354,475,326,000 | Skip `n` bytes | scipy/io/idl.py | _skip_bytes | ikamensh/scipy | python | def _skip_bytes(f, n):
f.read(n)
return |
def _read_bytes(f, n):
'Read the next `n` bytes'
return f.read(n) | 4,321,045,088,081,049,000 | Read the next `n` bytes | scipy/io/idl.py | _read_bytes | ikamensh/scipy | python | def _read_bytes(f, n):
return f.read(n) |
def _read_byte(f):
'Read a single byte'
return np.uint8(struct.unpack('>B', f.read(4)[:1])[0]) | 9,107,050,557,744,537,000 | Read a single byte | scipy/io/idl.py | _read_byte | ikamensh/scipy | python | def _read_byte(f):
return np.uint8(struct.unpack('>B', f.read(4)[:1])[0]) |
def _read_long(f):
'Read a signed 32-bit integer'
return np.int32(struct.unpack('>l', f.read(4))[0]) | -8,751,783,939,665,687,000 | Read a signed 32-bit integer | scipy/io/idl.py | _read_long | ikamensh/scipy | python | def _read_long(f):
return np.int32(struct.unpack('>l', f.read(4))[0]) |
def _read_int16(f):
'Read a signed 16-bit integer'
return np.int16(struct.unpack('>h', f.read(4)[2:4])[0]) | 6,401,136,554,309,534,000 | Read a signed 16-bit integer | scipy/io/idl.py | _read_int16 | ikamensh/scipy | python | def _read_int16(f):
return np.int16(struct.unpack('>h', f.read(4)[2:4])[0]) |
def _read_int32(f):
'Read a signed 32-bit integer'
return np.int32(struct.unpack('>i', f.read(4))[0]) | -1,097,645,181,429,358,300 | Read a signed 32-bit integer | scipy/io/idl.py | _read_int32 | ikamensh/scipy | python | def _read_int32(f):
return np.int32(struct.unpack('>i', f.read(4))[0]) |
def _read_int64(f):
'Read a signed 64-bit integer'
return np.int64(struct.unpack('>q', f.read(8))[0]) | 6,429,411,224,549,086,000 | Read a signed 64-bit integer | scipy/io/idl.py | _read_int64 | ikamensh/scipy | python | def _read_int64(f):
return np.int64(struct.unpack('>q', f.read(8))[0]) |
def _read_uint16(f):
'Read an unsigned 16-bit integer'
return np.uint16(struct.unpack('>H', f.read(4)[2:4])[0]) | -4,292,512,204,628,479,000 | Read an unsigned 16-bit integer | scipy/io/idl.py | _read_uint16 | ikamensh/scipy | python | def _read_uint16(f):
return np.uint16(struct.unpack('>H', f.read(4)[2:4])[0]) |
def _read_uint32(f):
'Read an unsigned 32-bit integer'
return np.uint32(struct.unpack('>I', f.read(4))[0]) | -897,737,826,672,022,300 | Read an unsigned 32-bit integer | scipy/io/idl.py | _read_uint32 | ikamensh/scipy | python | def _read_uint32(f):
return np.uint32(struct.unpack('>I', f.read(4))[0]) |
def _read_uint64(f):
'Read an unsigned 64-bit integer'
return np.uint64(struct.unpack('>Q', f.read(8))[0]) | -8,666,359,356,249,165,000 | Read an unsigned 64-bit integer | scipy/io/idl.py | _read_uint64 | ikamensh/scipy | python | def _read_uint64(f):
return np.uint64(struct.unpack('>Q', f.read(8))[0]) |
def _read_float32(f):
'Read a 32-bit float'
return np.float32(struct.unpack('>f', f.read(4))[0]) | 3,231,013,858,213,479,000 | Read a 32-bit float | scipy/io/idl.py | _read_float32 | ikamensh/scipy | python | def _read_float32(f):
return np.float32(struct.unpack('>f', f.read(4))[0]) |
def _read_float64(f):
'Read a 64-bit float'
return np.float64(struct.unpack('>d', f.read(8))[0]) | -1,608,278,296,860,134,100 | Read a 64-bit float | scipy/io/idl.py | _read_float64 | ikamensh/scipy | python | def _read_float64(f):
return np.float64(struct.unpack('>d', f.read(8))[0]) |
def _read_string(f):
'Read a string'
length = _read_long(f)
if (length > 0):
chars = _read_bytes(f, length)
_align_32(f)
chars = asstr(chars)
else:
chars = ''
return chars | -243,961,948,650,962,400 | Read a string | scipy/io/idl.py | _read_string | ikamensh/scipy | python | def _read_string(f):
length = _read_long(f)
if (length > 0):
chars = _read_bytes(f, length)
_align_32(f)
chars = asstr(chars)
else:
chars =
return chars |
def _read_string_data(f):
'Read a data string (length is specified twice)'
length = _read_long(f)
if (length > 0):
length = _read_long(f)
string_data = _read_bytes(f, length)
_align_32(f)
else:
string_data = ''
return string_data | -6,102,874,539,855,738,000 | Read a data string (length is specified twice) | scipy/io/idl.py | _read_string_data | ikamensh/scipy | python | def _read_string_data(f):
length = _read_long(f)
if (length > 0):
length = _read_long(f)
string_data = _read_bytes(f, length)
_align_32(f)
else:
string_data =
return string_data |
def _read_data(f, dtype):
'Read a variable with a specified data type'
if (dtype == 1):
if (_read_int32(f) != 1):
raise Exception('Error occurred while reading byte variable')
return _read_byte(f)
elif (dtype == 2):
return _read_int16(f)
elif (dtype == 3):
return _read_int32(f)
elif (dtype == 4):
return _read_float32(f)
elif (dtype == 5):
return _read_float64(f)
elif (dtype == 6):
real = _read_float32(f)
imag = _read_float32(f)
return np.complex64((real + (imag * 1j)))
elif (dtype == 7):
return _read_string_data(f)
elif (dtype == 8):
raise Exception('Should not be here - please report this')
elif (dtype == 9):
real = _read_float64(f)
imag = _read_float64(f)
return np.complex128((real + (imag * 1j)))
elif (dtype == 10):
return Pointer(_read_int32(f))
elif (dtype == 11):
return ObjectPointer(_read_int32(f))
elif (dtype == 12):
return _read_uint16(f)
elif (dtype == 13):
return _read_uint32(f)
elif (dtype == 14):
return _read_int64(f)
elif (dtype == 15):
return _read_uint64(f)
else:
raise Exception(('Unknown IDL type: %i - please report this' % dtype)) | 9,175,905,220,718,660,000 | Read a variable with a specified data type | scipy/io/idl.py | _read_data | ikamensh/scipy | python | def _read_data(f, dtype):
if (dtype == 1):
if (_read_int32(f) != 1):
raise Exception('Error occurred while reading byte variable')
return _read_byte(f)
elif (dtype == 2):
return _read_int16(f)
elif (dtype == 3):
return _read_int32(f)
elif (dtype == 4):
return _read_float32(f)
elif (dtype == 5):
return _read_float64(f)
elif (dtype == 6):
real = _read_float32(f)
imag = _read_float32(f)
return np.complex64((real + (imag * 1j)))
elif (dtype == 7):
return _read_string_data(f)
elif (dtype == 8):
raise Exception('Should not be here - please report this')
elif (dtype == 9):
real = _read_float64(f)
imag = _read_float64(f)
return np.complex128((real + (imag * 1j)))
elif (dtype == 10):
return Pointer(_read_int32(f))
elif (dtype == 11):
return ObjectPointer(_read_int32(f))
elif (dtype == 12):
return _read_uint16(f)
elif (dtype == 13):
return _read_uint32(f)
elif (dtype == 14):
return _read_int64(f)
elif (dtype == 15):
return _read_uint64(f)
else:
raise Exception(('Unknown IDL type: %i - please report this' % dtype)) |
def _read_structure(f, array_desc, struct_desc):
'\n Read a structure, with the array and structure descriptors given as\n `array_desc` and `structure_desc` respectively.\n '
nrows = array_desc['nelements']
columns = struct_desc['tagtable']
dtype = []
for col in columns:
if (col['structure'] or col['array']):
dtype.append(((col['name'].lower(), col['name']), np.object_))
elif (col['typecode'] in DTYPE_DICT):
dtype.append(((col['name'].lower(), col['name']), DTYPE_DICT[col['typecode']]))
else:
raise Exception(('Variable type %i not implemented' % col['typecode']))
structure = np.recarray((nrows,), dtype=dtype)
for i in range(nrows):
for col in columns:
dtype = col['typecode']
if col['structure']:
structure[col['name']][i] = _read_structure(f, struct_desc['arrtable'][col['name']], struct_desc['structtable'][col['name']])
elif col['array']:
structure[col['name']][i] = _read_array(f, dtype, struct_desc['arrtable'][col['name']])
else:
structure[col['name']][i] = _read_data(f, dtype)
if (array_desc['ndims'] > 1):
dims = array_desc['dims'][:int(array_desc['ndims'])]
dims.reverse()
structure = structure.reshape(dims)
return structure | 6,718,153,048,390,739,000 | Read a structure, with the array and structure descriptors given as
`array_desc` and `structure_desc` respectively. | scipy/io/idl.py | _read_structure | ikamensh/scipy | python | def _read_structure(f, array_desc, struct_desc):
'\n Read a structure, with the array and structure descriptors given as\n `array_desc` and `structure_desc` respectively.\n '
nrows = array_desc['nelements']
columns = struct_desc['tagtable']
dtype = []
for col in columns:
if (col['structure'] or col['array']):
dtype.append(((col['name'].lower(), col['name']), np.object_))
elif (col['typecode'] in DTYPE_DICT):
dtype.append(((col['name'].lower(), col['name']), DTYPE_DICT[col['typecode']]))
else:
raise Exception(('Variable type %i not implemented' % col['typecode']))
structure = np.recarray((nrows,), dtype=dtype)
for i in range(nrows):
for col in columns:
dtype = col['typecode']
if col['structure']:
structure[col['name']][i] = _read_structure(f, struct_desc['arrtable'][col['name']], struct_desc['structtable'][col['name']])
elif col['array']:
structure[col['name']][i] = _read_array(f, dtype, struct_desc['arrtable'][col['name']])
else:
structure[col['name']][i] = _read_data(f, dtype)
if (array_desc['ndims'] > 1):
dims = array_desc['dims'][:int(array_desc['ndims'])]
dims.reverse()
structure = structure.reshape(dims)
return structure |
def _read_array(f, typecode, array_desc):
'\n Read an array of type `typecode`, with the array descriptor given as\n `array_desc`.\n '
if (typecode in [1, 3, 4, 5, 6, 9, 13, 14, 15]):
if (typecode == 1):
nbytes = _read_int32(f)
if (nbytes != array_desc['nbytes']):
warnings.warn('Not able to verify number of bytes from header')
array = np.frombuffer(f.read(array_desc['nbytes']), dtype=DTYPE_DICT[typecode])
elif (typecode in [2, 12]):
array = np.frombuffer(f.read((array_desc['nbytes'] * 2)), dtype=DTYPE_DICT[typecode])[1::2]
else:
array = []
for i in range(array_desc['nelements']):
dtype = typecode
data = _read_data(f, dtype)
array.append(data)
array = np.array(array, dtype=np.object_)
if (array_desc['ndims'] > 1):
dims = array_desc['dims'][:int(array_desc['ndims'])]
dims.reverse()
array = array.reshape(dims)
_align_32(f)
return array | 8,033,665,999,947,200,000 | Read an array of type `typecode`, with the array descriptor given as
`array_desc`. | scipy/io/idl.py | _read_array | ikamensh/scipy | python | def _read_array(f, typecode, array_desc):
'\n Read an array of type `typecode`, with the array descriptor given as\n `array_desc`.\n '
if (typecode in [1, 3, 4, 5, 6, 9, 13, 14, 15]):
if (typecode == 1):
nbytes = _read_int32(f)
if (nbytes != array_desc['nbytes']):
warnings.warn('Not able to verify number of bytes from header')
array = np.frombuffer(f.read(array_desc['nbytes']), dtype=DTYPE_DICT[typecode])
elif (typecode in [2, 12]):
array = np.frombuffer(f.read((array_desc['nbytes'] * 2)), dtype=DTYPE_DICT[typecode])[1::2]
else:
array = []
for i in range(array_desc['nelements']):
dtype = typecode
data = _read_data(f, dtype)
array.append(data)
array = np.array(array, dtype=np.object_)
if (array_desc['ndims'] > 1):
dims = array_desc['dims'][:int(array_desc['ndims'])]
dims.reverse()
array = array.reshape(dims)
_align_32(f)
return array |
def _read_record(f):
'Function to read in a full record'
record = {'rectype': _read_long(f)}
nextrec = _read_uint32(f)
nextrec += (_read_uint32(f) * (2 ** 32))
_skip_bytes(f, 4)
if (not (record['rectype'] in RECTYPE_DICT)):
raise Exception(('Unknown RECTYPE: %i' % record['rectype']))
record['rectype'] = RECTYPE_DICT[record['rectype']]
if (record['rectype'] in ['VARIABLE', 'HEAP_DATA']):
if (record['rectype'] == 'VARIABLE'):
record['varname'] = _read_string(f)
else:
record['heap_index'] = _read_long(f)
_skip_bytes(f, 4)
rectypedesc = _read_typedesc(f)
if (rectypedesc['typecode'] == 0):
if (nextrec == f.tell()):
record['data'] = None
else:
raise ValueError('Unexpected type code: 0')
else:
varstart = _read_long(f)
if (varstart != 7):
raise Exception('VARSTART is not 7')
if rectypedesc['structure']:
record['data'] = _read_structure(f, rectypedesc['array_desc'], rectypedesc['struct_desc'])
elif rectypedesc['array']:
record['data'] = _read_array(f, rectypedesc['typecode'], rectypedesc['array_desc'])
else:
dtype = rectypedesc['typecode']
record['data'] = _read_data(f, dtype)
elif (record['rectype'] == 'TIMESTAMP'):
_skip_bytes(f, (4 * 256))
record['date'] = _read_string(f)
record['user'] = _read_string(f)
record['host'] = _read_string(f)
elif (record['rectype'] == 'VERSION'):
record['format'] = _read_long(f)
record['arch'] = _read_string(f)
record['os'] = _read_string(f)
record['release'] = _read_string(f)
elif (record['rectype'] == 'IDENTIFICATON'):
record['author'] = _read_string(f)
record['title'] = _read_string(f)
record['idcode'] = _read_string(f)
elif (record['rectype'] == 'NOTICE'):
record['notice'] = _read_string(f)
elif (record['rectype'] == 'DESCRIPTION'):
record['description'] = _read_string_data(f)
elif (record['rectype'] == 'HEAP_HEADER'):
record['nvalues'] = _read_long(f)
record['indices'] = [_read_long(f) for _ in range(record['nvalues'])]
elif (record['rectype'] == 'COMMONBLOCK'):
record['nvars'] = _read_long(f)
record['name'] = _read_string(f)
record['varnames'] = [_read_string(f) for _ in range(record['nvars'])]
elif (record['rectype'] == 'END_MARKER'):
record['end'] = True
elif (record['rectype'] == 'UNKNOWN'):
warnings.warn('Skipping UNKNOWN record')
elif (record['rectype'] == 'SYSTEM_VARIABLE'):
warnings.warn('Skipping SYSTEM_VARIABLE record')
else:
raise Exception(("record['rectype']=%s not implemented" % record['rectype']))
f.seek(nextrec)
return record | -6,800,225,076,854,987,000 | Function to read in a full record | scipy/io/idl.py | _read_record | ikamensh/scipy | python | def _read_record(f):
record = {'rectype': _read_long(f)}
nextrec = _read_uint32(f)
nextrec += (_read_uint32(f) * (2 ** 32))
_skip_bytes(f, 4)
if (not (record['rectype'] in RECTYPE_DICT)):
raise Exception(('Unknown RECTYPE: %i' % record['rectype']))
record['rectype'] = RECTYPE_DICT[record['rectype']]
if (record['rectype'] in ['VARIABLE', 'HEAP_DATA']):
if (record['rectype'] == 'VARIABLE'):
record['varname'] = _read_string(f)
else:
record['heap_index'] = _read_long(f)
_skip_bytes(f, 4)
rectypedesc = _read_typedesc(f)
if (rectypedesc['typecode'] == 0):
if (nextrec == f.tell()):
record['data'] = None
else:
raise ValueError('Unexpected type code: 0')
else:
varstart = _read_long(f)
if (varstart != 7):
raise Exception('VARSTART is not 7')
if rectypedesc['structure']:
record['data'] = _read_structure(f, rectypedesc['array_desc'], rectypedesc['struct_desc'])
elif rectypedesc['array']:
record['data'] = _read_array(f, rectypedesc['typecode'], rectypedesc['array_desc'])
else:
dtype = rectypedesc['typecode']
record['data'] = _read_data(f, dtype)
elif (record['rectype'] == 'TIMESTAMP'):
_skip_bytes(f, (4 * 256))
record['date'] = _read_string(f)
record['user'] = _read_string(f)
record['host'] = _read_string(f)
elif (record['rectype'] == 'VERSION'):
record['format'] = _read_long(f)
record['arch'] = _read_string(f)
record['os'] = _read_string(f)
record['release'] = _read_string(f)
elif (record['rectype'] == 'IDENTIFICATON'):
record['author'] = _read_string(f)
record['title'] = _read_string(f)
record['idcode'] = _read_string(f)
elif (record['rectype'] == 'NOTICE'):
record['notice'] = _read_string(f)
elif (record['rectype'] == 'DESCRIPTION'):
record['description'] = _read_string_data(f)
elif (record['rectype'] == 'HEAP_HEADER'):
record['nvalues'] = _read_long(f)
record['indices'] = [_read_long(f) for _ in range(record['nvalues'])]
elif (record['rectype'] == 'COMMONBLOCK'):
record['nvars'] = _read_long(f)
record['name'] = _read_string(f)
record['varnames'] = [_read_string(f) for _ in range(record['nvars'])]
elif (record['rectype'] == 'END_MARKER'):
record['end'] = True
elif (record['rectype'] == 'UNKNOWN'):
warnings.warn('Skipping UNKNOWN record')
elif (record['rectype'] == 'SYSTEM_VARIABLE'):
warnings.warn('Skipping SYSTEM_VARIABLE record')
else:
raise Exception(("record['rectype']=%s not implemented" % record['rectype']))
f.seek(nextrec)
return record |
def _read_typedesc(f):
'Function to read in a type descriptor'
typedesc = {'typecode': _read_long(f), 'varflags': _read_long(f)}
if ((typedesc['varflags'] & 2) == 2):
raise Exception('System variables not implemented')
typedesc['array'] = ((typedesc['varflags'] & 4) == 4)
typedesc['structure'] = ((typedesc['varflags'] & 32) == 32)
if typedesc['structure']:
typedesc['array_desc'] = _read_arraydesc(f)
typedesc['struct_desc'] = _read_structdesc(f)
elif typedesc['array']:
typedesc['array_desc'] = _read_arraydesc(f)
return typedesc | -3,107,632,973,865,778,000 | Function to read in a type descriptor | scipy/io/idl.py | _read_typedesc | ikamensh/scipy | python | def _read_typedesc(f):
typedesc = {'typecode': _read_long(f), 'varflags': _read_long(f)}
if ((typedesc['varflags'] & 2) == 2):
raise Exception('System variables not implemented')
typedesc['array'] = ((typedesc['varflags'] & 4) == 4)
typedesc['structure'] = ((typedesc['varflags'] & 32) == 32)
if typedesc['structure']:
typedesc['array_desc'] = _read_arraydesc(f)
typedesc['struct_desc'] = _read_structdesc(f)
elif typedesc['array']:
typedesc['array_desc'] = _read_arraydesc(f)
return typedesc |
def _read_arraydesc(f):
'Function to read in an array descriptor'
arraydesc = {'arrstart': _read_long(f)}
if (arraydesc['arrstart'] == 8):
_skip_bytes(f, 4)
arraydesc['nbytes'] = _read_long(f)
arraydesc['nelements'] = _read_long(f)
arraydesc['ndims'] = _read_long(f)
_skip_bytes(f, 8)
arraydesc['nmax'] = _read_long(f)
arraydesc['dims'] = [_read_long(f) for _ in range(arraydesc['nmax'])]
elif (arraydesc['arrstart'] == 18):
warnings.warn('Using experimental 64-bit array read')
_skip_bytes(f, 8)
arraydesc['nbytes'] = _read_uint64(f)
arraydesc['nelements'] = _read_uint64(f)
arraydesc['ndims'] = _read_long(f)
_skip_bytes(f, 8)
arraydesc['nmax'] = 8
arraydesc['dims'] = []
for d in range(arraydesc['nmax']):
v = _read_long(f)
if (v != 0):
raise Exception('Expected a zero in ARRAY_DESC')
arraydesc['dims'].append(_read_long(f))
else:
raise Exception(('Unknown ARRSTART: %i' % arraydesc['arrstart']))
return arraydesc | 6,493,596,777,023,376,000 | Function to read in an array descriptor | scipy/io/idl.py | _read_arraydesc | ikamensh/scipy | python | def _read_arraydesc(f):
arraydesc = {'arrstart': _read_long(f)}
if (arraydesc['arrstart'] == 8):
_skip_bytes(f, 4)
arraydesc['nbytes'] = _read_long(f)
arraydesc['nelements'] = _read_long(f)
arraydesc['ndims'] = _read_long(f)
_skip_bytes(f, 8)
arraydesc['nmax'] = _read_long(f)
arraydesc['dims'] = [_read_long(f) for _ in range(arraydesc['nmax'])]
elif (arraydesc['arrstart'] == 18):
warnings.warn('Using experimental 64-bit array read')
_skip_bytes(f, 8)
arraydesc['nbytes'] = _read_uint64(f)
arraydesc['nelements'] = _read_uint64(f)
arraydesc['ndims'] = _read_long(f)
_skip_bytes(f, 8)
arraydesc['nmax'] = 8
arraydesc['dims'] = []
for d in range(arraydesc['nmax']):
v = _read_long(f)
if (v != 0):
raise Exception('Expected a zero in ARRAY_DESC')
arraydesc['dims'].append(_read_long(f))
else:
raise Exception(('Unknown ARRSTART: %i' % arraydesc['arrstart']))
return arraydesc |
def _read_structdesc(f):
'Function to read in a structure descriptor'
structdesc = {}
structstart = _read_long(f)
if (structstart != 9):
raise Exception('STRUCTSTART should be 9')
structdesc['name'] = _read_string(f)
predef = _read_long(f)
structdesc['ntags'] = _read_long(f)
structdesc['nbytes'] = _read_long(f)
structdesc['predef'] = (predef & 1)
structdesc['inherits'] = (predef & 2)
structdesc['is_super'] = (predef & 4)
if (not structdesc['predef']):
structdesc['tagtable'] = [_read_tagdesc(f) for _ in range(structdesc['ntags'])]
for tag in structdesc['tagtable']:
tag['name'] = _read_string(f)
structdesc['arrtable'] = {tag['name']: _read_arraydesc(f) for tag in structdesc['tagtable'] if tag['array']}
structdesc['structtable'] = {tag['name']: _read_structdesc(f) for tag in structdesc['tagtable'] if tag['structure']}
if (structdesc['inherits'] or structdesc['is_super']):
structdesc['classname'] = _read_string(f)
structdesc['nsupclasses'] = _read_long(f)
structdesc['supclassnames'] = [_read_string(f) for _ in range(structdesc['nsupclasses'])]
structdesc['supclasstable'] = [_read_structdesc(f) for _ in range(structdesc['nsupclasses'])]
STRUCT_DICT[structdesc['name']] = structdesc
else:
if (not (structdesc['name'] in STRUCT_DICT)):
raise Exception("PREDEF=1 but can't find definition")
structdesc = STRUCT_DICT[structdesc['name']]
return structdesc | 1,344,744,848,076,249,000 | Function to read in a structure descriptor | scipy/io/idl.py | _read_structdesc | ikamensh/scipy | python | def _read_structdesc(f):
structdesc = {}
structstart = _read_long(f)
if (structstart != 9):
raise Exception('STRUCTSTART should be 9')
structdesc['name'] = _read_string(f)
predef = _read_long(f)
structdesc['ntags'] = _read_long(f)
structdesc['nbytes'] = _read_long(f)
structdesc['predef'] = (predef & 1)
structdesc['inherits'] = (predef & 2)
structdesc['is_super'] = (predef & 4)
if (not structdesc['predef']):
structdesc['tagtable'] = [_read_tagdesc(f) for _ in range(structdesc['ntags'])]
for tag in structdesc['tagtable']:
tag['name'] = _read_string(f)
structdesc['arrtable'] = {tag['name']: _read_arraydesc(f) for tag in structdesc['tagtable'] if tag['array']}
structdesc['structtable'] = {tag['name']: _read_structdesc(f) for tag in structdesc['tagtable'] if tag['structure']}
if (structdesc['inherits'] or structdesc['is_super']):
structdesc['classname'] = _read_string(f)
structdesc['nsupclasses'] = _read_long(f)
structdesc['supclassnames'] = [_read_string(f) for _ in range(structdesc['nsupclasses'])]
structdesc['supclasstable'] = [_read_structdesc(f) for _ in range(structdesc['nsupclasses'])]
STRUCT_DICT[structdesc['name']] = structdesc
else:
if (not (structdesc['name'] in STRUCT_DICT)):
raise Exception("PREDEF=1 but can't find definition")
structdesc = STRUCT_DICT[structdesc['name']]
return structdesc |
def _read_tagdesc(f):
'Function to read in a tag descriptor'
tagdesc = {'offset': _read_long(f)}
if (tagdesc['offset'] == (- 1)):
tagdesc['offset'] = _read_uint64(f)
tagdesc['typecode'] = _read_long(f)
tagflags = _read_long(f)
tagdesc['array'] = ((tagflags & 4) == 4)
tagdesc['structure'] = ((tagflags & 32) == 32)
tagdesc['scalar'] = (tagdesc['typecode'] in DTYPE_DICT)
return tagdesc | -756,983,100,868,209,700 | Function to read in a tag descriptor | scipy/io/idl.py | _read_tagdesc | ikamensh/scipy | python | def _read_tagdesc(f):
tagdesc = {'offset': _read_long(f)}
if (tagdesc['offset'] == (- 1)):
tagdesc['offset'] = _read_uint64(f)
tagdesc['typecode'] = _read_long(f)
tagflags = _read_long(f)
tagdesc['array'] = ((tagflags & 4) == 4)
tagdesc['structure'] = ((tagflags & 32) == 32)
tagdesc['scalar'] = (tagdesc['typecode'] in DTYPE_DICT)
return tagdesc |
def readsav(file_name, idict=None, python_dict=False, uncompressed_file_name=None, verbose=False):
"\n Read an IDL .sav file.\n\n Parameters\n ----------\n file_name : str\n Name of the IDL save file.\n idict : dict, optional\n Dictionary in which to insert .sav file variables.\n python_dict : bool, optional\n By default, the object return is not a Python dictionary, but a\n case-insensitive dictionary with item, attribute, and call access\n to variables. To get a standard Python dictionary, set this option\n to True.\n uncompressed_file_name : str, optional\n This option only has an effect for .sav files written with the\n /compress option. If a file name is specified, compressed .sav\n files are uncompressed to this file. Otherwise, readsav will use\n the `tempfile` module to determine a temporary filename\n automatically, and will remove the temporary file upon successfully\n reading it in.\n verbose : bool, optional\n Whether to print out information about the save file, including\n the records read, and available variables.\n\n Returns\n -------\n idl_dict : AttrDict or dict\n If `python_dict` is set to False (default), this function returns a\n case-insensitive dictionary with item, attribute, and call access\n to variables. If `python_dict` is set to True, this function\n returns a Python dictionary with all variable names in lowercase.\n If `idict` was specified, then variables are written to the\n dictionary specified, and the updated dictionary is returned.\n\n Examples\n --------\n >>> from os.path import dirname, join as pjoin\n >>> import scipy.io as sio\n >>> from scipy.io import readsav\n\n Get the filename for an example .sav file from the tests/data directory.\n\n >>> data_dir = pjoin(dirname(sio.__file__), 'tests', 'data')\n >>> sav_fname = pjoin(data_dir, 'array_float32_1d.sav')\n\n Load the .sav file contents.\n\n >>> sav_data = readsav(sav_fname)\n\n Get keys of the .sav file contents.\n\n >>> print(sav_data.keys())\n dict_keys(['array1d'])\n\n Access a content with a key.\n\n >>> print(sav_data['array1d'])\n [0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n 0. 0. 0.]\n\n "
records = []
if (python_dict or idict):
variables = {}
else:
variables = AttrDict()
f = open(file_name, 'rb')
signature = _read_bytes(f, 2)
if (signature != b'SR'):
raise Exception(f'Invalid SIGNATURE: {signature}')
recfmt = _read_bytes(f, 2)
if (recfmt == b'\x00\x04'):
pass
elif (recfmt == b'\x00\x06'):
if verbose:
print('IDL Save file is compressed')
if uncompressed_file_name:
fout = open(uncompressed_file_name, 'w+b')
else:
fout = tempfile.NamedTemporaryFile(suffix='.sav')
if verbose:
print(f' -> expanding to {fout.name}')
fout.write(b'SR\x00\x04')
while True:
rectype = _read_long(f)
fout.write(struct.pack('>l', int(rectype)))
nextrec = _read_uint32(f)
nextrec += (_read_uint32(f) * (2 ** 32))
unknown = f.read(4)
if (RECTYPE_DICT[rectype] == 'END_MARKER'):
fout.write(struct.pack('>I', (int(nextrec) % (2 ** 32))))
fout.write(struct.pack('>I', int(((nextrec - (nextrec % (2 ** 32))) / (2 ** 32)))))
fout.write(unknown)
break
pos = f.tell()
rec_string = zlib.decompress(f.read((nextrec - pos)))
nextrec = ((fout.tell() + len(rec_string)) + 12)
fout.write(struct.pack('>I', int((nextrec % (2 ** 32)))))
fout.write(struct.pack('>I', int(((nextrec - (nextrec % (2 ** 32))) / (2 ** 32)))))
fout.write(unknown)
fout.write(rec_string)
f.close()
f = fout
f.seek(4)
else:
raise Exception(f'Invalid RECFMT: {recfmt}')
while True:
r = _read_record(f)
records.append(r)
if ('end' in r):
if r['end']:
break
f.close()
heap = {}
for r in records:
if (r['rectype'] == 'HEAP_DATA'):
heap[r['heap_index']] = r['data']
for r in records:
if (r['rectype'] == 'VARIABLE'):
(replace, new) = _replace_heap(r['data'], heap)
if replace:
r['data'] = new
variables[r['varname'].lower()] = r['data']
if verbose:
for record in records:
if (record['rectype'] == 'TIMESTAMP'):
print(('-' * 50))
print(f"Date: {record['date']}")
print(f"User: {record['user']}")
print(f"Host: {record['host']}")
break
for record in records:
if (record['rectype'] == 'VERSION'):
print(('-' * 50))
print(f"Format: {record['format']}")
print(f"Architecture: {record['arch']}")
print(f"Operating System: {record['os']}")
print(f"IDL Version: {record['release']}")
break
for record in records:
if (record['rectype'] == 'IDENTIFICATON'):
print(('-' * 50))
print(f"Author: {record['author']}")
print(f"Title: {record['title']}")
print(f"ID Code: {record['idcode']}")
break
for record in records:
if (record['rectype'] == 'DESCRIPTION'):
print(('-' * 50))
print(f"Description: {record['description']}")
break
print(('-' * 50))
print(('Successfully read %i records of which:' % len(records)))
rectypes = [r['rectype'] for r in records]
for rt in set(rectypes):
if (rt != 'END_MARKER'):
print((' - %i are of type %s' % (rectypes.count(rt), rt)))
print(('-' * 50))
if ('VARIABLE' in rectypes):
print('Available variables:')
for var in variables:
print(f' - {var} [{type(variables[var])}]')
print(('-' * 50))
if idict:
for var in variables:
idict[var] = variables[var]
return idict
else:
return variables | -8,938,747,479,362,647,000 | Read an IDL .sav file.
Parameters
----------
file_name : str
Name of the IDL save file.
idict : dict, optional
Dictionary in which to insert .sav file variables.
python_dict : bool, optional
By default, the object return is not a Python dictionary, but a
case-insensitive dictionary with item, attribute, and call access
to variables. To get a standard Python dictionary, set this option
to True.
uncompressed_file_name : str, optional
This option only has an effect for .sav files written with the
/compress option. If a file name is specified, compressed .sav
files are uncompressed to this file. Otherwise, readsav will use
the `tempfile` module to determine a temporary filename
automatically, and will remove the temporary file upon successfully
reading it in.
verbose : bool, optional
Whether to print out information about the save file, including
the records read, and available variables.
Returns
-------
idl_dict : AttrDict or dict
If `python_dict` is set to False (default), this function returns a
case-insensitive dictionary with item, attribute, and call access
to variables. If `python_dict` is set to True, this function
returns a Python dictionary with all variable names in lowercase.
If `idict` was specified, then variables are written to the
dictionary specified, and the updated dictionary is returned.
Examples
--------
>>> from os.path import dirname, join as pjoin
>>> import scipy.io as sio
>>> from scipy.io import readsav
Get the filename for an example .sav file from the tests/data directory.
>>> data_dir = pjoin(dirname(sio.__file__), 'tests', 'data')
>>> sav_fname = pjoin(data_dir, 'array_float32_1d.sav')
Load the .sav file contents.
>>> sav_data = readsav(sav_fname)
Get keys of the .sav file contents.
>>> print(sav_data.keys())
dict_keys(['array1d'])
Access a content with a key.
>>> print(sav_data['array1d'])
[0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.
0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.
0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.
0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.
0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.
0. 0. 0.] | scipy/io/idl.py | readsav | ikamensh/scipy | python | def readsav(file_name, idict=None, python_dict=False, uncompressed_file_name=None, verbose=False):
"\n Read an IDL .sav file.\n\n Parameters\n ----------\n file_name : str\n Name of the IDL save file.\n idict : dict, optional\n Dictionary in which to insert .sav file variables.\n python_dict : bool, optional\n By default, the object return is not a Python dictionary, but a\n case-insensitive dictionary with item, attribute, and call access\n to variables. To get a standard Python dictionary, set this option\n to True.\n uncompressed_file_name : str, optional\n This option only has an effect for .sav files written with the\n /compress option. If a file name is specified, compressed .sav\n files are uncompressed to this file. Otherwise, readsav will use\n the `tempfile` module to determine a temporary filename\n automatically, and will remove the temporary file upon successfully\n reading it in.\n verbose : bool, optional\n Whether to print out information about the save file, including\n the records read, and available variables.\n\n Returns\n -------\n idl_dict : AttrDict or dict\n If `python_dict` is set to False (default), this function returns a\n case-insensitive dictionary with item, attribute, and call access\n to variables. If `python_dict` is set to True, this function\n returns a Python dictionary with all variable names in lowercase.\n If `idict` was specified, then variables are written to the\n dictionary specified, and the updated dictionary is returned.\n\n Examples\n --------\n >>> from os.path import dirname, join as pjoin\n >>> import scipy.io as sio\n >>> from scipy.io import readsav\n\n Get the filename for an example .sav file from the tests/data directory.\n\n >>> data_dir = pjoin(dirname(sio.__file__), 'tests', 'data')\n >>> sav_fname = pjoin(data_dir, 'array_float32_1d.sav')\n\n Load the .sav file contents.\n\n >>> sav_data = readsav(sav_fname)\n\n Get keys of the .sav file contents.\n\n >>> print(sav_data.keys())\n dict_keys(['array1d'])\n\n Access a content with a key.\n\n >>> print(sav_data['array1d'])\n [0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.\n 0. 0. 0.]\n\n "
records = []
if (python_dict or idict):
variables = {}
else:
variables = AttrDict()
f = open(file_name, 'rb')
signature = _read_bytes(f, 2)
if (signature != b'SR'):
raise Exception(f'Invalid SIGNATURE: {signature}')
recfmt = _read_bytes(f, 2)
if (recfmt == b'\x00\x04'):
pass
elif (recfmt == b'\x00\x06'):
if verbose:
print('IDL Save file is compressed')
if uncompressed_file_name:
fout = open(uncompressed_file_name, 'w+b')
else:
fout = tempfile.NamedTemporaryFile(suffix='.sav')
if verbose:
print(f' -> expanding to {fout.name}')
fout.write(b'SR\x00\x04')
while True:
rectype = _read_long(f)
fout.write(struct.pack('>l', int(rectype)))
nextrec = _read_uint32(f)
nextrec += (_read_uint32(f) * (2 ** 32))
unknown = f.read(4)
if (RECTYPE_DICT[rectype] == 'END_MARKER'):
fout.write(struct.pack('>I', (int(nextrec) % (2 ** 32))))
fout.write(struct.pack('>I', int(((nextrec - (nextrec % (2 ** 32))) / (2 ** 32)))))
fout.write(unknown)
break
pos = f.tell()
rec_string = zlib.decompress(f.read((nextrec - pos)))
nextrec = ((fout.tell() + len(rec_string)) + 12)
fout.write(struct.pack('>I', int((nextrec % (2 ** 32)))))
fout.write(struct.pack('>I', int(((nextrec - (nextrec % (2 ** 32))) / (2 ** 32)))))
fout.write(unknown)
fout.write(rec_string)
f.close()
f = fout
f.seek(4)
else:
raise Exception(f'Invalid RECFMT: {recfmt}')
while True:
r = _read_record(f)
records.append(r)
if ('end' in r):
if r['end']:
break
f.close()
heap = {}
for r in records:
if (r['rectype'] == 'HEAP_DATA'):
heap[r['heap_index']] = r['data']
for r in records:
if (r['rectype'] == 'VARIABLE'):
(replace, new) = _replace_heap(r['data'], heap)
if replace:
r['data'] = new
variables[r['varname'].lower()] = r['data']
if verbose:
for record in records:
if (record['rectype'] == 'TIMESTAMP'):
print(('-' * 50))
print(f"Date: {record['date']}")
print(f"User: {record['user']}")
print(f"Host: {record['host']}")
break
for record in records:
if (record['rectype'] == 'VERSION'):
print(('-' * 50))
print(f"Format: {record['format']}")
print(f"Architecture: {record['arch']}")
print(f"Operating System: {record['os']}")
print(f"IDL Version: {record['release']}")
break
for record in records:
if (record['rectype'] == 'IDENTIFICATON'):
print(('-' * 50))
print(f"Author: {record['author']}")
print(f"Title: {record['title']}")
print(f"ID Code: {record['idcode']}")
break
for record in records:
if (record['rectype'] == 'DESCRIPTION'):
print(('-' * 50))
print(f"Description: {record['description']}")
break
print(('-' * 50))
print(('Successfully read %i records of which:' % len(records)))
rectypes = [r['rectype'] for r in records]
for rt in set(rectypes):
if (rt != 'END_MARKER'):
print((' - %i are of type %s' % (rectypes.count(rt), rt)))
print(('-' * 50))
if ('VARIABLE' in rectypes):
print('Available variables:')
for var in variables:
print(f' - {var} [{type(variables[var])}]')
print(('-' * 50))
if idict:
for var in variables:
idict[var] = variables[var]
return idict
else:
return variables |
def test_pdis_plot(self):
' Test combined spectral plots. '
os.chdir((REAL_PATH + '/data/dispersion'))
expected_file = 'K3P-OQMD_4786-CollCode25550_spectral.png'
if os.path.isfile(expected_file):
os.remove(expected_file)
sys.argv = ['dispersion', 'K3P-OQMD_4786-CollCode25550', '--png', '-scale', '10', '-interp', '2', '-pw', '-5', '5', '--gap', '--preserve_kspace_distance', '--figsize', '10', '10']
errored = False
try:
matador.cli.dispersion.main()
except Exception as exc:
errored = True
error = exc
file_exists = os.path.isfile(expected_file)
if file_exists:
os.remove(expected_file)
os.chdir(ROOT_DIR)
if errored:
raise error
self.assertTrue(file_exists) | 7,943,555,130,315,218,000 | Test combined spectral plots. | tests/test_plotting.py | test_pdis_plot | AJMGroup/matador | python | def test_pdis_plot(self):
' '
os.chdir((REAL_PATH + '/data/dispersion'))
expected_file = 'K3P-OQMD_4786-CollCode25550_spectral.png'
if os.path.isfile(expected_file):
os.remove(expected_file)
sys.argv = ['dispersion', 'K3P-OQMD_4786-CollCode25550', '--png', '-scale', '10', '-interp', '2', '-pw', '-5', '5', '--gap', '--preserve_kspace_distance', '--figsize', '10', '10']
errored = False
try:
matador.cli.dispersion.main()
except Exception as exc:
errored = True
error = exc
file_exists = os.path.isfile(expected_file)
if file_exists:
os.remove(expected_file)
os.chdir(ROOT_DIR)
if errored:
raise error
self.assertTrue(file_exists) |
def test_dos_only(self):
' Test combined spectral plots. '
os.chdir((REAL_PATH + '/data/dispersion'))
expected_file = 'K3P-OQMD_4786-CollCode25550_spectral.png'
if os.path.isfile(expected_file):
os.remove(expected_file)
sys.argv = ['dispersion', 'K3P-OQMD_4786-CollCode25550', '--png', '--dos_only', '--figsize', '10', '10']
errored = False
try:
matador.cli.dispersion.main()
except Exception as exc:
errored = True
error = exc
file_exists = os.path.isfile(expected_file)
if file_exists:
os.remove(expected_file)
os.chdir(ROOT_DIR)
if errored:
raise error
self.assertTrue(file_exists) | -7,719,930,184,175,832,000 | Test combined spectral plots. | tests/test_plotting.py | test_dos_only | AJMGroup/matador | python | def test_dos_only(self):
' '
os.chdir((REAL_PATH + '/data/dispersion'))
expected_file = 'K3P-OQMD_4786-CollCode25550_spectral.png'
if os.path.isfile(expected_file):
os.remove(expected_file)
sys.argv = ['dispersion', 'K3P-OQMD_4786-CollCode25550', '--png', '--dos_only', '--figsize', '10', '10']
errored = False
try:
matador.cli.dispersion.main()
except Exception as exc:
errored = True
error = exc
file_exists = os.path.isfile(expected_file)
if file_exists:
os.remove(expected_file)
os.chdir(ROOT_DIR)
if errored:
raise error
self.assertTrue(file_exists) |
def test_multiseed(self):
' Test plotting two seed bandstructures on top of each other. '
os.chdir((REAL_PATH + '/data/bands_files'))
expected_file = 'KPSn_spectral.png'
sys.argv = ['dispersion', 'KPSn', 'KPSn_2', '--dos_only', '--cmap', 'viridis', '--png', '--band_reorder', '--labels', 'PBE, LDA', '--figsize', '10', '10', '--colours', 'green', 'red']
errored = False
try:
matador.cli.dispersion.main()
except Exception as exc:
errored = True
error = exc
file_exists = os.path.isfile(expected_file)
if file_exists:
os.remove(expected_file)
os.chdir(ROOT_DIR)
if errored:
raise error
self.assertTrue(file_exists) | -3,365,651,731,351,200,000 | Test plotting two seed bandstructures on top of each other. | tests/test_plotting.py | test_multiseed | AJMGroup/matador | python | def test_multiseed(self):
' '
os.chdir((REAL_PATH + '/data/bands_files'))
expected_file = 'KPSn_spectral.png'
sys.argv = ['dispersion', 'KPSn', 'KPSn_2', '--dos_only', '--cmap', 'viridis', '--png', '--band_reorder', '--labels', 'PBE, LDA', '--figsize', '10', '10', '--colours', 'green', 'red']
errored = False
try:
matador.cli.dispersion.main()
except Exception as exc:
errored = True
error = exc
file_exists = os.path.isfile(expected_file)
if file_exists:
os.remove(expected_file)
os.chdir(ROOT_DIR)
if errored:
raise error
self.assertTrue(file_exists) |
def test_x11_no_fail(self):
' Test combined spectral plots. '
os.chdir((REAL_PATH + '/data/dispersion'))
sys.argv = ['dispersion', 'K3P-OQMD_4786-CollCode25550', '--dos_only', '--cmap', 'viridis', '--figsize', '10', '10']
errored = False
try:
matador.cli.dispersion.main()
except Exception as exc:
errored = True
error = exc
os.chdir(ROOT_DIR)
if errored:
raise error | -2,491,664,025,944,966,000 | Test combined spectral plots. | tests/test_plotting.py | test_x11_no_fail | AJMGroup/matador | python | def test_x11_no_fail(self):
' '
os.chdir((REAL_PATH + '/data/dispersion'))
sys.argv = ['dispersion', 'K3P-OQMD_4786-CollCode25550', '--dos_only', '--cmap', 'viridis', '--figsize', '10', '10']
errored = False
try:
matador.cli.dispersion.main()
except Exception as exc:
errored = True
error = exc
os.chdir(ROOT_DIR)
if errored:
raise error |
def test_phonon_dispersion(self):
' Test phonon dispersion plot. '
os.chdir((REAL_PATH + '/data/phonon_dispersion'))
expected_file = 'K3P_spectral.png'
if os.path.isfile(expected_file):
os.remove(expected_file)
sys.argv = ['dispersion', 'K3P', '--png', '-ph', '--colours', 'grey', 'green', 'blue', '--figsize', '10', '10']
errored = False
try:
matador.cli.dispersion.main()
except Exception as exc:
errored = True
error = exc
file_exists = os.path.isfile(expected_file)
if file_exists:
os.remove(expected_file)
os.chdir(ROOT_DIR)
if errored:
raise error
self.assertTrue(file_exists) | 5,399,417,869,385,618,000 | Test phonon dispersion plot. | tests/test_plotting.py | test_phonon_dispersion | AJMGroup/matador | python | def test_phonon_dispersion(self):
' '
os.chdir((REAL_PATH + '/data/phonon_dispersion'))
expected_file = 'K3P_spectral.png'
if os.path.isfile(expected_file):
os.remove(expected_file)
sys.argv = ['dispersion', 'K3P', '--png', '-ph', '--colours', 'grey', 'green', 'blue', '--figsize', '10', '10']
errored = False
try:
matador.cli.dispersion.main()
except Exception as exc:
errored = True
error = exc
file_exists = os.path.isfile(expected_file)
if file_exists:
os.remove(expected_file)
os.chdir(ROOT_DIR)
if errored:
raise error
self.assertTrue(file_exists) |
def test_phonon_ir(self):
' Test phonon IR/Raman plot. '
os.chdir((REAL_PATH + '/data/phonon_ir'))
expected_file = 'h-BN_IRR_ir.svg'
if os.path.isfile(expected_file):
os.remove(expected_file)
sys.argv = ['dispersion', 'h-BN_IRR', '--svg', '-ir', '--figsize', '5', '5']
errored = False
try:
matador.cli.dispersion.main()
except Exception as exc:
errored = True
error = exc
file_exists = os.path.isfile(expected_file)
if file_exists:
os.remove(expected_file)
os.chdir(ROOT_DIR)
if errored:
raise error
self.assertTrue(file_exists) | -5,958,142,489,704,375,000 | Test phonon IR/Raman plot. | tests/test_plotting.py | test_phonon_ir | AJMGroup/matador | python | def test_phonon_ir(self):
' '
os.chdir((REAL_PATH + '/data/phonon_ir'))
expected_file = 'h-BN_IRR_ir.svg'
if os.path.isfile(expected_file):
os.remove(expected_file)
sys.argv = ['dispersion', 'h-BN_IRR', '--svg', '-ir', '--figsize', '5', '5']
errored = False
try:
matador.cli.dispersion.main()
except Exception as exc:
errored = True
error = exc
file_exists = os.path.isfile(expected_file)
if file_exists:
os.remove(expected_file)
os.chdir(ROOT_DIR)
if errored:
raise error
self.assertTrue(file_exists) |
def test_binary_hull_plot(self):
' Test plotting binary hull. '
expected_files = ['KP_hull_simple.svg']
cursor = res2dict((REAL_PATH + 'data/hull-KP-KSnP_pub/*.res'))[0]
QueryConvexHull(cursor=cursor, elements=['K', 'P'], svg=True, hull_cutoff=0.0, plot_kwargs={'plot_fname': 'KP_hull_simple', 'svg': True})
for expected_file in expected_files:
self.assertTrue(os.path.isfile(expected_file)) | 499,941,889,050,099,460 | Test plotting binary hull. | tests/test_plotting.py | test_binary_hull_plot | AJMGroup/matador | python | def test_binary_hull_plot(self):
' '
expected_files = ['KP_hull_simple.svg']
cursor = res2dict((REAL_PATH + 'data/hull-KP-KSnP_pub/*.res'))[0]
QueryConvexHull(cursor=cursor, elements=['K', 'P'], svg=True, hull_cutoff=0.0, plot_kwargs={'plot_fname': 'KP_hull_simple', 'svg': True})
for expected_file in expected_files:
self.assertTrue(os.path.isfile(expected_file)) |
def test_binary_battery_plots(self):
' Test plotting binary hull. '
expected_files = ['KP_hull.png', 'KP_voltage.png', 'KP_volume.png']
cursor = res2dict((REAL_PATH + 'data/hull-KP-KSnP_pub/*.res'))[0]
QueryConvexHull(cursor=cursor, elements=['K', 'P'], no_plot=False, png=True, quiet=False, voltage=True, labels=True, label_cutoff=0.05, hull_cutoff=0.1, volume=True, plot_kwargs={'colour_by_source': True})
for expected_file in expected_files:
self.assertTrue(os.path.isfile(expected_file)) | -5,268,088,620,267,915,000 | Test plotting binary hull. | tests/test_plotting.py | test_binary_battery_plots | AJMGroup/matador | python | def test_binary_battery_plots(self):
' '
expected_files = ['KP_hull.png', 'KP_voltage.png', 'KP_volume.png']
cursor = res2dict((REAL_PATH + 'data/hull-KP-KSnP_pub/*.res'))[0]
QueryConvexHull(cursor=cursor, elements=['K', 'P'], no_plot=False, png=True, quiet=False, voltage=True, labels=True, label_cutoff=0.05, hull_cutoff=0.1, volume=True, plot_kwargs={'colour_by_source': True})
for expected_file in expected_files:
self.assertTrue(os.path.isfile(expected_file)) |
@unittest.skipIf((not TERNARY_PRESENT), 'Skipping as python-ternary not found')
def test_ternary_hull_plot(self):
' Test plotting ternary hull. '
expected_files = ['KSnP_hull.png', 'KSnP_voltage.png']
for expected_file in expected_files:
if os.path.isfile(expected_file):
os.remove(expected_file)
res_list = glob((REAL_PATH + 'data/hull-KPSn-KP/*.res'))
self.assertEqual(len(res_list), 87, 'Could not find test res files, please check installation...')
cursor = [res2dict(res)[0] for res in res_list]
QueryConvexHull(cursor=cursor, elements=['K', 'Sn', 'P'], no_plot=False, png=True, quiet=False, voltage=True, labels=True, label_cutoff=0.05, hull_cutoff=0.1, capmap=True)
self.assertTrue(os.path.isfile(expected_file))
for expected_file in expected_files:
os.remove(expected_file) | 7,532,324,327,218,296,000 | Test plotting ternary hull. | tests/test_plotting.py | test_ternary_hull_plot | AJMGroup/matador | python | @unittest.skipIf((not TERNARY_PRESENT), 'Skipping as python-ternary not found')
def test_ternary_hull_plot(self):
' '
expected_files = ['KSnP_hull.png', 'KSnP_voltage.png']
for expected_file in expected_files:
if os.path.isfile(expected_file):
os.remove(expected_file)
res_list = glob((REAL_PATH + 'data/hull-KPSn-KP/*.res'))
self.assertEqual(len(res_list), 87, 'Could not find test res files, please check installation...')
cursor = [res2dict(res)[0] for res in res_list]
QueryConvexHull(cursor=cursor, elements=['K', 'Sn', 'P'], no_plot=False, png=True, quiet=False, voltage=True, labels=True, label_cutoff=0.05, hull_cutoff=0.1, capmap=True)
self.assertTrue(os.path.isfile(expected_file))
for expected_file in expected_files:
os.remove(expected_file) |
def test_beef_hull_plot(self):
' Test plotting BEEF hull. '
from matador.hull import EnsembleHull
from matador.scrapers import castep2dict
expected_file = 'KP_beef_hull.svg'
(cursor, s) = castep2dict((REAL_PATH + 'data/beef_files/*.castep'), db=False)
self.assertEqual(len(s), 0)
beef_hull = EnsembleHull(cursor, '_beef', elements=['K', 'P'], num_samples=10, energy_key='total_energy_per_atom', parameter_key='thetas')
beef_hull.plot_hull(svg=True)
self.assertTrue(os.path.isfile(expected_file)) | -8,681,327,293,378,057,000 | Test plotting BEEF hull. | tests/test_plotting.py | test_beef_hull_plot | AJMGroup/matador | python | def test_beef_hull_plot(self):
' '
from matador.hull import EnsembleHull
from matador.scrapers import castep2dict
expected_file = 'KP_beef_hull.svg'
(cursor, s) = castep2dict((REAL_PATH + 'data/beef_files/*.castep'), db=False)
self.assertEqual(len(s), 0)
beef_hull = EnsembleHull(cursor, '_beef', elements=['K', 'P'], num_samples=10, energy_key='total_energy_per_atom', parameter_key='thetas')
beef_hull.plot_hull(svg=True)
self.assertTrue(os.path.isfile(expected_file)) |
def __init__(self, audit_reason_id=None, comment=None):
'GetEdocWithAuditReasonRequest - a model defined in Swagger'
self._audit_reason_id = None
self._comment = None
self.discriminator = None
if (audit_reason_id is not None):
self.audit_reason_id = audit_reason_id
if (comment is not None):
self.comment = comment | 1,768,202,398,402,747,600 | GetEdocWithAuditReasonRequest - a model defined in Swagger | laserfiche_api/models/get_edoc_with_audit_reason_request.py | __init__ | Layer8Err/laserfiche-api | python | def __init__(self, audit_reason_id=None, comment=None):
self._audit_reason_id = None
self._comment = None
self.discriminator = None
if (audit_reason_id is not None):
self.audit_reason_id = audit_reason_id
if (comment is not None):
self.comment = comment |
@property
def audit_reason_id(self):
'Gets the audit_reason_id of this GetEdocWithAuditReasonRequest. # noqa: E501\n\n The reason id for this audit event. # noqa: E501\n\n :return: The audit_reason_id of this GetEdocWithAuditReasonRequest. # noqa: E501\n :rtype: int\n '
return self._audit_reason_id | -1,139,160,547,206,664,100 | Gets the audit_reason_id of this GetEdocWithAuditReasonRequest. # noqa: E501
The reason id for this audit event. # noqa: E501
:return: The audit_reason_id of this GetEdocWithAuditReasonRequest. # noqa: E501
:rtype: int | laserfiche_api/models/get_edoc_with_audit_reason_request.py | audit_reason_id | Layer8Err/laserfiche-api | python | @property
def audit_reason_id(self):
'Gets the audit_reason_id of this GetEdocWithAuditReasonRequest. # noqa: E501\n\n The reason id for this audit event. # noqa: E501\n\n :return: The audit_reason_id of this GetEdocWithAuditReasonRequest. # noqa: E501\n :rtype: int\n '
return self._audit_reason_id |
@audit_reason_id.setter
def audit_reason_id(self, audit_reason_id):
'Sets the audit_reason_id of this GetEdocWithAuditReasonRequest.\n\n The reason id for this audit event. # noqa: E501\n\n :param audit_reason_id: The audit_reason_id of this GetEdocWithAuditReasonRequest. # noqa: E501\n :type: int\n '
self._audit_reason_id = audit_reason_id | -7,435,327,145,679,349,000 | Sets the audit_reason_id of this GetEdocWithAuditReasonRequest.
The reason id for this audit event. # noqa: E501
:param audit_reason_id: The audit_reason_id of this GetEdocWithAuditReasonRequest. # noqa: E501
:type: int | laserfiche_api/models/get_edoc_with_audit_reason_request.py | audit_reason_id | Layer8Err/laserfiche-api | python | @audit_reason_id.setter
def audit_reason_id(self, audit_reason_id):
'Sets the audit_reason_id of this GetEdocWithAuditReasonRequest.\n\n The reason id for this audit event. # noqa: E501\n\n :param audit_reason_id: The audit_reason_id of this GetEdocWithAuditReasonRequest. # noqa: E501\n :type: int\n '
self._audit_reason_id = audit_reason_id |
@property
def comment(self):
'Gets the comment of this GetEdocWithAuditReasonRequest. # noqa: E501\n\n The comment for this audit event. # noqa: E501\n\n :return: The comment of this GetEdocWithAuditReasonRequest. # noqa: E501\n :rtype: str\n '
return self._comment | -6,609,239,928,674,097,000 | Gets the comment of this GetEdocWithAuditReasonRequest. # noqa: E501
The comment for this audit event. # noqa: E501
:return: The comment of this GetEdocWithAuditReasonRequest. # noqa: E501
:rtype: str | laserfiche_api/models/get_edoc_with_audit_reason_request.py | comment | Layer8Err/laserfiche-api | python | @property
def comment(self):
'Gets the comment of this GetEdocWithAuditReasonRequest. # noqa: E501\n\n The comment for this audit event. # noqa: E501\n\n :return: The comment of this GetEdocWithAuditReasonRequest. # noqa: E501\n :rtype: str\n '
return self._comment |
@comment.setter
def comment(self, comment):
'Sets the comment of this GetEdocWithAuditReasonRequest.\n\n The comment for this audit event. # noqa: E501\n\n :param comment: The comment of this GetEdocWithAuditReasonRequest. # noqa: E501\n :type: str\n '
self._comment = comment | 1,478,110,770,522,016,800 | Sets the comment of this GetEdocWithAuditReasonRequest.
The comment for this audit event. # noqa: E501
:param comment: The comment of this GetEdocWithAuditReasonRequest. # noqa: E501
:type: str | laserfiche_api/models/get_edoc_with_audit_reason_request.py | comment | Layer8Err/laserfiche-api | python | @comment.setter
def comment(self, comment):
'Sets the comment of this GetEdocWithAuditReasonRequest.\n\n The comment for this audit event. # noqa: E501\n\n :param comment: The comment of this GetEdocWithAuditReasonRequest. # noqa: E501\n :type: str\n '
self._comment = comment |
def to_dict(self):
'Returns the model properties as a dict'
result = {}
for (attr, _) in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
if issubclass(GetEdocWithAuditReasonRequest, dict):
for (key, value) in self.items():
result[key] = value
return result | -3,092,012,831,975,072,300 | Returns the model properties as a dict | laserfiche_api/models/get_edoc_with_audit_reason_request.py | to_dict | Layer8Err/laserfiche-api | python | def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
if issubclass(GetEdocWithAuditReasonRequest, dict):
for (key, value) in self.items():
result[key] = value
return result |
def to_str(self):
'Returns the string representation of the model'
return pprint.pformat(self.to_dict()) | 5,849,158,643,760,736,000 | Returns the string representation of the model | laserfiche_api/models/get_edoc_with_audit_reason_request.py | to_str | Layer8Err/laserfiche-api | python | def to_str(self):
return pprint.pformat(self.to_dict()) |
def __repr__(self):
'For `print` and `pprint`'
return self.to_str() | -8,960,031,694,814,905,000 | For `print` and `pprint` | laserfiche_api/models/get_edoc_with_audit_reason_request.py | __repr__ | Layer8Err/laserfiche-api | python | def __repr__(self):
return self.to_str() |
def __eq__(self, other):
'Returns true if both objects are equal'
if (not isinstance(other, GetEdocWithAuditReasonRequest)):
return False
return (self.__dict__ == other.__dict__) | 8,442,840,344,376,126,000 | Returns true if both objects are equal | laserfiche_api/models/get_edoc_with_audit_reason_request.py | __eq__ | Layer8Err/laserfiche-api | python | def __eq__(self, other):
if (not isinstance(other, GetEdocWithAuditReasonRequest)):
return False
return (self.__dict__ == other.__dict__) |
def __ne__(self, other):
'Returns true if both objects are not equal'
return (not (self == other)) | 7,764,124,047,908,058,000 | Returns true if both objects are not equal | laserfiche_api/models/get_edoc_with_audit_reason_request.py | __ne__ | Layer8Err/laserfiche-api | python | def __ne__(self, other):
return (not (self == other)) |
def complete_login(self, request, app):
'\n Returns a SocialLogin instance\n '
raise NotImplementedError | 4,991,094,973,540,804,000 | Returns a SocialLogin instance | allauth/socialaccount/providers/oauth/views.py | complete_login | rawjam/django-allauth | python | def complete_login(self, request, app):
'\n \n '
raise NotImplementedError |
def dispatch(self, request):
'\n View to handle final steps of OAuth based authentication where the user\n gets redirected back to from the service provider\n '
login_done_url = reverse((self.adapter.provider_id + '_callback'))
client = self._get_client(request, login_done_url)
if (not client.is_valid()):
if ('denied' in request.GET):
return HttpResponseRedirect(reverse('socialaccount_login_cancelled'))
extra_context = dict(oauth_client=client)
return render_authentication_error(request, extra_context)
app = self.adapter.get_provider().get_app(request)
try:
access_token = client.get_access_token()
token = SocialToken(app=app, token=access_token['oauth_token'], token_secret=access_token['oauth_token_secret'])
login = self.adapter.complete_login(request, app, token)
token.account = login.account
login.token = token
login.state = SocialLogin.unmarshall_state(request.session.pop('oauth_login_state', None))
login.redirect_account_url = request.session.pop('redirect_account_url', None)
return complete_social_login(request, login)
except OAuthError:
return render_authentication_error(request) | 7,326,730,518,377,786,000 | View to handle final steps of OAuth based authentication where the user
gets redirected back to from the service provider | allauth/socialaccount/providers/oauth/views.py | dispatch | rawjam/django-allauth | python | def dispatch(self, request):
'\n View to handle final steps of OAuth based authentication where the user\n gets redirected back to from the service provider\n '
login_done_url = reverse((self.adapter.provider_id + '_callback'))
client = self._get_client(request, login_done_url)
if (not client.is_valid()):
if ('denied' in request.GET):
return HttpResponseRedirect(reverse('socialaccount_login_cancelled'))
extra_context = dict(oauth_client=client)
return render_authentication_error(request, extra_context)
app = self.adapter.get_provider().get_app(request)
try:
access_token = client.get_access_token()
token = SocialToken(app=app, token=access_token['oauth_token'], token_secret=access_token['oauth_token_secret'])
login = self.adapter.complete_login(request, app, token)
token.account = login.account
login.token = token
login.state = SocialLogin.unmarshall_state(request.session.pop('oauth_login_state', None))
login.redirect_account_url = request.session.pop('redirect_account_url', None)
return complete_social_login(request, login)
except OAuthError:
return render_authentication_error(request) |
def get_machine_learning_compute(compute_name: Optional[str]=None, resource_group_name: Optional[str]=None, workspace_name: Optional[str]=None, opts: Optional[pulumi.InvokeOptions]=None) -> AwaitableGetMachineLearningComputeResult:
'\n Use this data source to access information about an existing resource.\n\n :param str compute_name: Name of the Azure Machine Learning compute.\n :param str resource_group_name: Name of the resource group in which workspace is located.\n :param str workspace_name: Name of Azure Machine Learning workspace.\n '
__args__ = dict()
__args__['computeName'] = compute_name
__args__['resourceGroupName'] = resource_group_name
__args__['workspaceName'] = workspace_name
if (opts is None):
opts = pulumi.InvokeOptions()
if (opts.version is None):
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:machinelearningservices/v20200901preview:getMachineLearningCompute', __args__, opts=opts, typ=GetMachineLearningComputeResult).value
return AwaitableGetMachineLearningComputeResult(identity=__ret__.identity, location=__ret__.location, name=__ret__.name, properties=__ret__.properties, sku=__ret__.sku, tags=__ret__.tags, type=__ret__.type) | 396,372,869,333,231,360 | Use this data source to access information about an existing resource.
:param str compute_name: Name of the Azure Machine Learning compute.
:param str resource_group_name: Name of the resource group in which workspace is located.
:param str workspace_name: Name of Azure Machine Learning workspace. | sdk/python/pulumi_azure_nextgen/machinelearningservices/v20200901preview/get_machine_learning_compute.py | get_machine_learning_compute | test-wiz-sec/pulumi-azure-nextgen | python | def get_machine_learning_compute(compute_name: Optional[str]=None, resource_group_name: Optional[str]=None, workspace_name: Optional[str]=None, opts: Optional[pulumi.InvokeOptions]=None) -> AwaitableGetMachineLearningComputeResult:
'\n Use this data source to access information about an existing resource.\n\n :param str compute_name: Name of the Azure Machine Learning compute.\n :param str resource_group_name: Name of the resource group in which workspace is located.\n :param str workspace_name: Name of Azure Machine Learning workspace.\n '
__args__ = dict()
__args__['computeName'] = compute_name
__args__['resourceGroupName'] = resource_group_name
__args__['workspaceName'] = workspace_name
if (opts is None):
opts = pulumi.InvokeOptions()
if (opts.version is None):
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:machinelearningservices/v20200901preview:getMachineLearningCompute', __args__, opts=opts, typ=GetMachineLearningComputeResult).value
return AwaitableGetMachineLearningComputeResult(identity=__ret__.identity, location=__ret__.location, name=__ret__.name, properties=__ret__.properties, sku=__ret__.sku, tags=__ret__.tags, type=__ret__.type) |
@property
@pulumi.getter
def identity(self) -> Optional['outputs.IdentityResponse']:
'\n The identity of the resource.\n '
return pulumi.get(self, 'identity') | -3,116,828,005,937,086,000 | The identity of the resource. | sdk/python/pulumi_azure_nextgen/machinelearningservices/v20200901preview/get_machine_learning_compute.py | identity | test-wiz-sec/pulumi-azure-nextgen | python | @property
@pulumi.getter
def identity(self) -> Optional['outputs.IdentityResponse']:
'\n \n '
return pulumi.get(self, 'identity') |
@property
@pulumi.getter
def location(self) -> Optional[str]:
'\n Specifies the location of the resource.\n '
return pulumi.get(self, 'location') | 7,379,677,595,793,272,000 | Specifies the location of the resource. | sdk/python/pulumi_azure_nextgen/machinelearningservices/v20200901preview/get_machine_learning_compute.py | location | test-wiz-sec/pulumi-azure-nextgen | python | @property
@pulumi.getter
def location(self) -> Optional[str]:
'\n \n '
return pulumi.get(self, 'location') |
@property
@pulumi.getter
def name(self) -> str:
'\n Specifies the name of the resource.\n '
return pulumi.get(self, 'name') | -4,591,409,987,009,933,000 | Specifies the name of the resource. | sdk/python/pulumi_azure_nextgen/machinelearningservices/v20200901preview/get_machine_learning_compute.py | name | test-wiz-sec/pulumi-azure-nextgen | python | @property
@pulumi.getter
def name(self) -> str:
'\n \n '
return pulumi.get(self, 'name') |
@property
@pulumi.getter
def properties(self) -> Any:
'\n Compute properties\n '
return pulumi.get(self, 'properties') | 1,575,765,894,159,976,400 | Compute properties | sdk/python/pulumi_azure_nextgen/machinelearningservices/v20200901preview/get_machine_learning_compute.py | properties | test-wiz-sec/pulumi-azure-nextgen | python | @property
@pulumi.getter
def properties(self) -> Any:
'\n \n '
return pulumi.get(self, 'properties') |
@property
@pulumi.getter
def sku(self) -> Optional['outputs.SkuResponse']:
'\n The sku of the workspace.\n '
return pulumi.get(self, 'sku') | -6,725,223,236,896,668,000 | The sku of the workspace. | sdk/python/pulumi_azure_nextgen/machinelearningservices/v20200901preview/get_machine_learning_compute.py | sku | test-wiz-sec/pulumi-azure-nextgen | python | @property
@pulumi.getter
def sku(self) -> Optional['outputs.SkuResponse']:
'\n \n '
return pulumi.get(self, 'sku') |
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[(str, str)]]:
'\n Contains resource tags defined as key/value pairs.\n '
return pulumi.get(self, 'tags') | -3,898,261,538,620,266,000 | Contains resource tags defined as key/value pairs. | sdk/python/pulumi_azure_nextgen/machinelearningservices/v20200901preview/get_machine_learning_compute.py | tags | test-wiz-sec/pulumi-azure-nextgen | python | @property
@pulumi.getter
def tags(self) -> Optional[Mapping[(str, str)]]:
'\n \n '
return pulumi.get(self, 'tags') |
@property
@pulumi.getter
def type(self) -> str:
'\n Specifies the type of the resource.\n '
return pulumi.get(self, 'type') | 3,039,650,124,608,506,000 | Specifies the type of the resource. | sdk/python/pulumi_azure_nextgen/machinelearningservices/v20200901preview/get_machine_learning_compute.py | type | test-wiz-sec/pulumi-azure-nextgen | python | @property
@pulumi.getter
def type(self) -> str:
'\n \n '
return pulumi.get(self, 'type') |
def get_full_page_url(self, page_number, scheme=None):
'Get the full, external URL for this page, optinally with the passed in URL scheme'
args = dict(request.view_args, _external=True)
if (scheme is not None):
args['_scheme'] = scheme
if (page_number != 1):
args['page'] = page_number
return url_for(request.endpoint, **args) | -4,384,661,751,867,713,000 | Get the full, external URL for this page, optinally with the passed in URL scheme | build/lib/littlefish/pager.py | get_full_page_url | michaelwalkerfl/littlefish | python | def get_full_page_url(self, page_number, scheme=None):
args = dict(request.view_args, _external=True)
if (scheme is not None):
args['_scheme'] = scheme
if (page_number != 1):
args['page'] = page_number
return url_for(request.endpoint, **args) |
def get_canonical_url(self, scheme=None):
'Get the canonical page URL'
return self.get_full_page_url(self.page_number, scheme=scheme) | 7,753,665,875,289,943,000 | Get the canonical page URL | build/lib/littlefish/pager.py | get_canonical_url | michaelwalkerfl/littlefish | python | def get_canonical_url(self, scheme=None):
return self.get_full_page_url(self.page_number, scheme=scheme) |
def render_prev_next_links(self, scheme=None):
'Render the rel=prev and rel=next links to a Markup object for injection into a template'
output = ''
if self.has_prev:
output += '<link rel="prev" href="{}" />\n'.format(self.get_full_page_url(self.prev, scheme=scheme))
if self.has_next:
output += '<link rel="next" href="{}" />\n'.format(self.get_full_page_url(self.next, scheme=scheme))
return Markup(output) | 7,284,570,635,124,984,000 | Render the rel=prev and rel=next links to a Markup object for injection into a template | build/lib/littlefish/pager.py | render_prev_next_links | michaelwalkerfl/littlefish | python | def render_prev_next_links(self, scheme=None):
output =
if self.has_prev:
output += '<link rel="prev" href="{}" />\n'.format(self.get_full_page_url(self.prev, scheme=scheme))
if self.has_next:
output += '<link rel="next" href="{}" />\n'.format(self.get_full_page_url(self.next, scheme=scheme))
return Markup(output) |
def render_canonical_link(self, scheme=None):
'Render the rel=canonical link to a Markup object for injection into a template'
return Markup('<link rel="canonical" href="{}" />'.format(self.get_canonical_url(scheme=scheme))) | -7,152,500,946,951,668,000 | Render the rel=canonical link to a Markup object for injection into a template | build/lib/littlefish/pager.py | render_canonical_link | michaelwalkerfl/littlefish | python | def render_canonical_link(self, scheme=None):
return Markup('<link rel="canonical" href="{}" />'.format(self.get_canonical_url(scheme=scheme))) |
def render_seo_links(self, scheme=None):
'Render the rel=canonical, rel=prev and rel=next links to a Markup object for injection into a template'
out = self.render_prev_next_links(scheme=scheme)
if (self.total_pages == 1):
out += self.render_canonical_link(scheme=scheme)
return out | 85,314,694,721,360,780 | Render the rel=canonical, rel=prev and rel=next links to a Markup object for injection into a template | build/lib/littlefish/pager.py | render_seo_links | michaelwalkerfl/littlefish | python | def render_seo_links(self, scheme=None):
out = self.render_prev_next_links(scheme=scheme)
if (self.total_pages == 1):
out += self.render_canonical_link(scheme=scheme)
return out |
@property
def first_item_number(self):
'\n :return: The first "item number", used when displaying messages to the user\n like "Displaying items 1 to 10 of 123" - in this example 1 would be returned\n '
return (self.offset + 1) | 3,773,746,073,670,640,000 | :return: The first "item number", used when displaying messages to the user
like "Displaying items 1 to 10 of 123" - in this example 1 would be returned | build/lib/littlefish/pager.py | first_item_number | michaelwalkerfl/littlefish | python | @property
def first_item_number(self):
'\n :return: The first "item number", used when displaying messages to the user\n like "Displaying items 1 to 10 of 123" - in this example 1 would be returned\n '
return (self.offset + 1) |
@property
def last_item_number(self):
'\n :return: The last "item number", used when displaying messages to the user\n like "Displaying items 1 to 10 of 123" - in this example 10 would be returned\n '
n = ((self.first_item_number + self.page_size) - 1)
if (n > self.total_items):
return self.total_items
return n | 2,972,782,625,575,436,300 | :return: The last "item number", used when displaying messages to the user
like "Displaying items 1 to 10 of 123" - in this example 10 would be returned | build/lib/littlefish/pager.py | last_item_number | michaelwalkerfl/littlefish | python | @property
def last_item_number(self):
'\n :return: The last "item number", used when displaying messages to the user\n like "Displaying items 1 to 10 of 123" - in this example 10 would be returned\n '
n = ((self.first_item_number + self.page_size) - 1)
if (n > self.total_items):
return self.total_items
return n |
def __init__(self, page_size, page_number, query, max_pages=12):
'\n :param max_pages: The maximum number of page links to display\n '
super().__init__(page_size, page_number, query)
self.max_pages = max_pages | -3,194,777,528,965,526,000 | :param max_pages: The maximum number of page links to display | build/lib/littlefish/pager.py | __init__ | michaelwalkerfl/littlefish | python | def __init__(self, page_size, page_number, query, max_pages=12):
'\n \n '
super().__init__(page_size, page_number, query)
self.max_pages = max_pages |
async def async_setup_platform(hass, config, add_entities, discovery_info=None):
'Set up the platform.\n\n Deprecated.\n '
_LOGGER.warning('Loading as a platform is no longer supported, convert to use the tplink component.') | 494,254,728,963,673,150 | Set up the platform.
Deprecated. | homeassistant/components/tplink/switch.py | async_setup_platform | ABOTlegacy/home-assistant | python | async def async_setup_platform(hass, config, add_entities, discovery_info=None):
'Set up the platform.\n\n Deprecated.\n '
_LOGGER.warning('Loading as a platform is no longer supported, convert to use the tplink component.') |
def add_entity(device: SmartPlug, async_add_entities):
'Check if device is online and add the entity.'
device.get_sysinfo()
async_add_entities([SmartPlugSwitch(device)], update_before_add=True) | 5,229,561,941,067,430,000 | Check if device is online and add the entity. | homeassistant/components/tplink/switch.py | add_entity | ABOTlegacy/home-assistant | python | def add_entity(device: SmartPlug, async_add_entities):
device.get_sysinfo()
async_add_entities([SmartPlugSwitch(device)], update_before_add=True) |
async def async_setup_entry(hass: HomeAssistantType, config_entry, async_add_entities):
'Set up switches.'
(await async_add_entities_retry(hass, async_add_entities, hass.data[TPLINK_DOMAIN][CONF_SWITCH], add_entity))
return True | 5,713,788,469,790,789,000 | Set up switches. | homeassistant/components/tplink/switch.py | async_setup_entry | ABOTlegacy/home-assistant | python | async def async_setup_entry(hass: HomeAssistantType, config_entry, async_add_entities):
(await async_add_entities_retry(hass, async_add_entities, hass.data[TPLINK_DOMAIN][CONF_SWITCH], add_entity))
return True |
def __init__(self, smartplug: SmartPlug):
'Initialize the switch.'
self.smartplug = smartplug
self._sysinfo = None
self._state = None
self._available = False
self._emeter_params = {}
self._mac = None
self._alias = None
self._model = None
self._device_id = None | 2,103,936,367,212,845,600 | Initialize the switch. | homeassistant/components/tplink/switch.py | __init__ | ABOTlegacy/home-assistant | python | def __init__(self, smartplug: SmartPlug):
self.smartplug = smartplug
self._sysinfo = None
self._state = None
self._available = False
self._emeter_params = {}
self._mac = None
self._alias = None
self._model = None
self._device_id = None |
@property
def unique_id(self):
'Return a unique ID.'
return self._device_id | -8,516,317,523,930,494,000 | Return a unique ID. | homeassistant/components/tplink/switch.py | unique_id | ABOTlegacy/home-assistant | python | @property
def unique_id(self):
return self._device_id |
@property
def name(self):
'Return the name of the Smart Plug.'
return self._alias | 5,040,068,852,810,455,000 | Return the name of the Smart Plug. | homeassistant/components/tplink/switch.py | name | ABOTlegacy/home-assistant | python | @property
def name(self):
return self._alias |
@property
def device_info(self):
'Return information about the device.'
return {'name': self._alias, 'model': self._model, 'manufacturer': 'TP-Link', 'connections': {(dr.CONNECTION_NETWORK_MAC, self._mac)}, 'sw_version': self._sysinfo['sw_ver']} | -1,675,202,575,605,957,600 | Return information about the device. | homeassistant/components/tplink/switch.py | device_info | ABOTlegacy/home-assistant | python | @property
def device_info(self):
return {'name': self._alias, 'model': self._model, 'manufacturer': 'TP-Link', 'connections': {(dr.CONNECTION_NETWORK_MAC, self._mac)}, 'sw_version': self._sysinfo['sw_ver']} |
@property
def available(self) -> bool:
'Return if switch is available.'
return self._available | 1,696,820,960,810,640,100 | Return if switch is available. | homeassistant/components/tplink/switch.py | available | ABOTlegacy/home-assistant | python | @property
def available(self) -> bool:
return self._available |
@property
def is_on(self):
'Return true if switch is on.'
return self._state | 1,076,728,360,152,047,200 | Return true if switch is on. | homeassistant/components/tplink/switch.py | is_on | ABOTlegacy/home-assistant | python | @property
def is_on(self):
return self._state |
def turn_on(self, **kwargs):
'Turn the switch on.'
self.smartplug.turn_on() | 3,127,283,808,176,620,000 | Turn the switch on. | homeassistant/components/tplink/switch.py | turn_on | ABOTlegacy/home-assistant | python | def turn_on(self, **kwargs):
self.smartplug.turn_on() |
def turn_off(self, **kwargs):
'Turn the switch off.'
self.smartplug.turn_off() | -3,362,160,142,785,076,000 | Turn the switch off. | homeassistant/components/tplink/switch.py | turn_off | ABOTlegacy/home-assistant | python | def turn_off(self, **kwargs):
self.smartplug.turn_off() |
@property
def device_state_attributes(self):
'Return the state attributes of the device.'
return self._emeter_params | 4,651,969,869,013,440,000 | Return the state attributes of the device. | homeassistant/components/tplink/switch.py | device_state_attributes | ABOTlegacy/home-assistant | python | @property
def device_state_attributes(self):
return self._emeter_params |
def update(self):
"Update the TP-Link switch's state."
try:
if (not self._sysinfo):
self._sysinfo = self.smartplug.sys_info
self._mac = self.smartplug.mac
self._model = self.smartplug.model
if (self.smartplug.context is None):
self._alias = self.smartplug.alias
self._device_id = self._mac
else:
self._alias = [child for child in self.smartplug.sys_info['children'] if (child['id'] == self.smartplug.context)][0]['alias']
self._device_id = self.smartplug.context
if (self.smartplug.context is None):
self._state = (self.smartplug.state == self.smartplug.SWITCH_STATE_ON)
else:
self._state = ([child for child in self.smartplug.sys_info['children'] if (child['id'] == self.smartplug.context)][0]['state'] == 1)
if self.smartplug.has_emeter:
emeter_readings = self.smartplug.get_emeter_realtime()
self._emeter_params[ATTR_CURRENT_POWER_W] = '{:.2f}'.format(emeter_readings['power'])
self._emeter_params[ATTR_TOTAL_ENERGY_KWH] = '{:.3f}'.format(emeter_readings['total'])
self._emeter_params[ATTR_VOLTAGE] = '{:.1f}'.format(emeter_readings['voltage'])
self._emeter_params[ATTR_CURRENT_A] = '{:.2f}'.format(emeter_readings['current'])
emeter_statics = self.smartplug.get_emeter_daily()
try:
self._emeter_params[ATTR_TODAY_ENERGY_KWH] = '{:.3f}'.format(emeter_statics[int(time.strftime('%e'))])
except KeyError:
pass
self._available = True
except (SmartDeviceException, OSError) as ex:
if self._available:
_LOGGER.warning('Could not read state for %s: %s', self.smartplug.host, ex)
self._available = False | 8,787,886,636,677,659,000 | Update the TP-Link switch's state. | homeassistant/components/tplink/switch.py | update | ABOTlegacy/home-assistant | python | def update(self):
try:
if (not self._sysinfo):
self._sysinfo = self.smartplug.sys_info
self._mac = self.smartplug.mac
self._model = self.smartplug.model
if (self.smartplug.context is None):
self._alias = self.smartplug.alias
self._device_id = self._mac
else:
self._alias = [child for child in self.smartplug.sys_info['children'] if (child['id'] == self.smartplug.context)][0]['alias']
self._device_id = self.smartplug.context
if (self.smartplug.context is None):
self._state = (self.smartplug.state == self.smartplug.SWITCH_STATE_ON)
else:
self._state = ([child for child in self.smartplug.sys_info['children'] if (child['id'] == self.smartplug.context)][0]['state'] == 1)
if self.smartplug.has_emeter:
emeter_readings = self.smartplug.get_emeter_realtime()
self._emeter_params[ATTR_CURRENT_POWER_W] = '{:.2f}'.format(emeter_readings['power'])
self._emeter_params[ATTR_TOTAL_ENERGY_KWH] = '{:.3f}'.format(emeter_readings['total'])
self._emeter_params[ATTR_VOLTAGE] = '{:.1f}'.format(emeter_readings['voltage'])
self._emeter_params[ATTR_CURRENT_A] = '{:.2f}'.format(emeter_readings['current'])
emeter_statics = self.smartplug.get_emeter_daily()
try:
self._emeter_params[ATTR_TODAY_ENERGY_KWH] = '{:.3f}'.format(emeter_statics[int(time.strftime('%e'))])
except KeyError:
pass
self._available = True
except (SmartDeviceException, OSError) as ex:
if self._available:
_LOGGER.warning('Could not read state for %s: %s', self.smartplug.host, ex)
self._available = False |
def get_constant_vars() -> Dict:
'\n Get a dictionary of constant environment variables.\n '
result = {}
for (name, members) in CONSTANT_ENVIRONMENT_VARS.items():
members = {k: v(is_immutable=True) for (k, v) in members.items()}
result[name] = StructDefinition(name, members, is_immutable=True)
return result | -2,572,485,749,946,484,000 | Get a dictionary of constant environment variables. | vyper/semantics/environment.py | get_constant_vars | GDGSNF/vyper | python | def get_constant_vars() -> Dict:
'\n \n '
result = {}
for (name, members) in CONSTANT_ENVIRONMENT_VARS.items():
members = {k: v(is_immutable=True) for (k, v) in members.items()}
result[name] = StructDefinition(name, members, is_immutable=True)
return result |
def get_mutable_vars() -> Dict:
'\n Get a dictionary of mutable environment variables (those that are\n modified during the course of contract execution, such as `self`).\n '
return {name: type_(is_immutable=True) for (name, type_) in MUTABLE_ENVIRONMENT_VARS.items()} | 8,367,429,888,535,597,000 | Get a dictionary of mutable environment variables (those that are
modified during the course of contract execution, such as `self`). | vyper/semantics/environment.py | get_mutable_vars | GDGSNF/vyper | python | def get_mutable_vars() -> Dict:
'\n Get a dictionary of mutable environment variables (those that are\n modified during the course of contract execution, such as `self`).\n '
return {name: type_(is_immutable=True) for (name, type_) in MUTABLE_ENVIRONMENT_VARS.items()} |
@pytest.mark.skipif((not gdal_version.at_least((1, 11))), reason='Requires GDAL >= 1.11')
@pytest.mark.skipif((not has_driver), reason='Requires {} driver'.format(driver))
def test_read_topojson(data_dir):
"Test reading a TopoJSON file\n\n The TopoJSON support in GDAL is a little unpredictable. In some versions\n the geometries or properties aren't parsed correctly. Here we just check\n that we can open the file, get the right number of features out, and\n that they have a geometry and some properties. See GH#722.\n "
with fiona.open(os.path.join(data_dir, 'example.topojson'), 'r') as collection:
features = list(collection)
assert (len(features) == 3), 'unexpected number of features'
for feature in features:
assert isinstance(feature['properties'], OrderedDict)
assert (len(feature['properties']) > 0)
assert (feature['geometry']['type'] in {'Point', 'LineString', 'Polygon'}) | -3,518,232,574,399,921,000 | Test reading a TopoJSON file
The TopoJSON support in GDAL is a little unpredictable. In some versions
the geometries or properties aren't parsed correctly. Here we just check
that we can open the file, get the right number of features out, and
that they have a geometry and some properties. See GH#722. | tests/test_topojson.py | test_read_topojson | HirniMeshram1/Fiona | python | @pytest.mark.skipif((not gdal_version.at_least((1, 11))), reason='Requires GDAL >= 1.11')
@pytest.mark.skipif((not has_driver), reason='Requires {} driver'.format(driver))
def test_read_topojson(data_dir):
"Test reading a TopoJSON file\n\n The TopoJSON support in GDAL is a little unpredictable. In some versions\n the geometries or properties aren't parsed correctly. Here we just check\n that we can open the file, get the right number of features out, and\n that they have a geometry and some properties. See GH#722.\n "
with fiona.open(os.path.join(data_dir, 'example.topojson'), 'r') as collection:
features = list(collection)
assert (len(features) == 3), 'unexpected number of features'
for feature in features:
assert isinstance(feature['properties'], OrderedDict)
assert (len(feature['properties']) > 0)
assert (feature['geometry']['type'] in {'Point', 'LineString', 'Polygon'}) |
@deprecated
def get_log_file_path() -> Optional[str]:
'DEPRECATED: Use get_latest_log_file instead.'
return log_file_path | -8,588,523,497,299,603,000 | DEPRECATED: Use get_latest_log_file instead. | aw_core/log.py | get_log_file_path | minhlt9196/activeseconds-aw-core | python | @deprecated
def get_log_file_path() -> Optional[str]:
return log_file_path |