body_hash
stringlengths 64
64
| body
stringlengths 23
109k
| docstring
stringlengths 1
57k
| path
stringlengths 4
198
| name
stringlengths 1
115
| repository_name
stringlengths 7
111
| repository_stars
float64 0
191k
| lang
stringclasses 1
value | body_without_docstring
stringlengths 14
108k
| unified
stringlengths 45
133k
|
---|---|---|---|---|---|---|---|---|---|
f3c29b476c194fce7eeaabc23a4bf9825bd9ad78a2696f7b8b3a805ba5fb6a1e
|
def read_data_list(self, names):
'\n :return: List of ResultsData objects, corresponding to each object name\n '
if self.is_hdf5:
with h5py.File(self.path, 'r') as file:
return extract_data_by_names_hdf5(names, file, self.group)
else:
return extract_data_by_names(names, self.path)
|
:return: List of ResultsData objects, corresponding to each object name
|
FBEM/postproc.py
|
read_data_list
|
icemtel/stokes
| 0 |
python
|
def read_data_list(self, names):
'\n \n '
if self.is_hdf5:
with h5py.File(self.path, 'r') as file:
return extract_data_by_names_hdf5(names, file, self.group)
else:
return extract_data_by_names(names, self.path)
|
def read_data_list(self, names):
'\n \n '
if self.is_hdf5:
with h5py.File(self.path, 'r') as file:
return extract_data_by_names_hdf5(names, file, self.group)
else:
return extract_data_by_names(names, self.path)<|docstring|>:return: List of ResultsData objects, corresponding to each object name<|endoftext|>
|
3b6fc6349f3bc6e799ee4f7acf81344e8ec3b684c746bc0ca361b8e90ff1f115
|
def url_resolver(url):
'Resolve url for both documentation and Github online.\n\n If the url is an IPython notebook links to the correct path.\n\n Args:\n url: the path to the link (not always a full url)\n\n Returns:\n a local url to either the documentation or the Github\n\n '
if (url[(- 6):] == '.ipynb'):
return (url[4:(- 6)] + '.html')
else:
return url
|
Resolve url for both documentation and Github online.
If the url is an IPython notebook links to the correct path.
Args:
url: the path to the link (not always a full url)
Returns:
a local url to either the documentation or the Github
|
doc/conf.py
|
url_resolver
|
faical-yannick-congo/sso-backend
| 0 |
python
|
def url_resolver(url):
'Resolve url for both documentation and Github online.\n\n If the url is an IPython notebook links to the correct path.\n\n Args:\n url: the path to the link (not always a full url)\n\n Returns:\n a local url to either the documentation or the Github\n\n '
if (url[(- 6):] == '.ipynb'):
return (url[4:(- 6)] + '.html')
else:
return url
|
def url_resolver(url):
'Resolve url for both documentation and Github online.\n\n If the url is an IPython notebook links to the correct path.\n\n Args:\n url: the path to the link (not always a full url)\n\n Returns:\n a local url to either the documentation or the Github\n\n '
if (url[(- 6):] == '.ipynb'):
return (url[4:(- 6)] + '.html')
else:
return url<|docstring|>Resolve url for both documentation and Github online.
If the url is an IPython notebook links to the correct path.
Args:
url: the path to the link (not always a full url)
Returns:
a local url to either the documentation or the Github<|endoftext|>
|
9205e86947f052be46eed60b59d4fdd8b37f8d42c62f8dd04ea070b44e6cf880
|
def transform(self, data_block):
'\n Compare the job IDs between the jobs acquired from both sides. Output the\n comparison results, as # jobs in both, # jobs in factory/Nersc only, # jobs\n running on Nersc, and # jobs in factory without an ID.\n '
self.logger.debug('in CompareNerscFactoryJobs transform')
nersc_df = data_block['Nersc_Job_Info']
factory_df = data_block['job_manifests']
factory_df = factory_df[factory_df.GridResource.str.startswith('batch slurm')]
results = {'both.count': 0, 'nersc_only.count': 0, 'factory_only.count': 0, 'factory_no_ID.count': 0, 'nersc.running.count': 0}
if factory_df.empty:
if nersc_df.empty:
pass
else:
results['nersc_only.count'] = len(nersc_df)
for (_index, row) in nersc_df.iterrows():
result_key = ((((((('nersc' + '.') + row['hostname']) + '.') + row['queue']) + '.') + row['user']) + '.count')
if (result_key in results):
results[result_key] += 1
else:
results[result_key] = 1
else:
num_no_id = 0
num_with_id = 0
factory_id_list = []
for (index, row) in factory_df.iterrows():
if (('GridJobID' not in row.axes[0].tolist()) or pd.isnull(row.GridJobID)):
num_no_id += 1
else:
line = row['GridJobID'].split(' ')
if (len(line) == 3):
num_no_id += 1
factory_df.loc[(index, 'GridJobID')] = None
elif (len(line) == 4):
num_with_id += 1
factory_df.loc[(index, 'GridJobID')] = line[(- 1)].split('/')[(- 1)]
factory_id_list.append(line[(- 1)].split('/')[(- 1)])
results['factory_no_ID.count'] = num_no_id
if nersc_df.empty:
if (num_with_id == 0):
pass
else:
results['factory_only.count'] = num_with_id
else:
for (_index, row) in nersc_df.iterrows():
result_key = ((((((('nersc' + '.') + row['hostname']) + '.') + row['queue']) + '.') + row['user']) + '.count')
if (result_key in results):
results[result_key] += 1
else:
results[result_key] = 1
if (num_with_id == 0):
results['factory_only.count'] = len(nersc_df)
else:
nersc_id_list = nersc_df.jobid.tolist()
factory_id_set = set(factory_id_list)
nersc_id_set = set(nersc_id_list)
in_both_set = (factory_id_set & nersc_id_set)
factory_only_set = (factory_id_set - nersc_id_set)
nersc_only_set = (nersc_id_set - factory_id_set)
num_in_both = len(in_both_set)
num_in_factory = len(factory_only_set)
num_in_nersc = len(nersc_only_set)
results['both.count'] = num_in_both
results['nersc_only.count'] = num_in_nersc
results['factory_only.count'] = num_in_factory
for (_index, row) in nersc_df.iterrows():
if (row['status'] == 'R'):
results['nersc.running.count'] += 1
return {'nersc_factory_jobs_comparison': results}
|
Compare the job IDs between the jobs acquired from both sides. Output the
comparison results, as # jobs in both, # jobs in factory/Nersc only, # jobs
running on Nersc, and # jobs in factory without an ID.
|
src/decisionengine_modules/NERSC/transforms/CompareNerscFactoryJobs.py
|
transform
|
goodenou/decisionengine_modules
| 2 |
python
|
def transform(self, data_block):
'\n Compare the job IDs between the jobs acquired from both sides. Output the\n comparison results, as # jobs in both, # jobs in factory/Nersc only, # jobs\n running on Nersc, and # jobs in factory without an ID.\n '
self.logger.debug('in CompareNerscFactoryJobs transform')
nersc_df = data_block['Nersc_Job_Info']
factory_df = data_block['job_manifests']
factory_df = factory_df[factory_df.GridResource.str.startswith('batch slurm')]
results = {'both.count': 0, 'nersc_only.count': 0, 'factory_only.count': 0, 'factory_no_ID.count': 0, 'nersc.running.count': 0}
if factory_df.empty:
if nersc_df.empty:
pass
else:
results['nersc_only.count'] = len(nersc_df)
for (_index, row) in nersc_df.iterrows():
result_key = ((((((('nersc' + '.') + row['hostname']) + '.') + row['queue']) + '.') + row['user']) + '.count')
if (result_key in results):
results[result_key] += 1
else:
results[result_key] = 1
else:
num_no_id = 0
num_with_id = 0
factory_id_list = []
for (index, row) in factory_df.iterrows():
if (('GridJobID' not in row.axes[0].tolist()) or pd.isnull(row.GridJobID)):
num_no_id += 1
else:
line = row['GridJobID'].split(' ')
if (len(line) == 3):
num_no_id += 1
factory_df.loc[(index, 'GridJobID')] = None
elif (len(line) == 4):
num_with_id += 1
factory_df.loc[(index, 'GridJobID')] = line[(- 1)].split('/')[(- 1)]
factory_id_list.append(line[(- 1)].split('/')[(- 1)])
results['factory_no_ID.count'] = num_no_id
if nersc_df.empty:
if (num_with_id == 0):
pass
else:
results['factory_only.count'] = num_with_id
else:
for (_index, row) in nersc_df.iterrows():
result_key = ((((((('nersc' + '.') + row['hostname']) + '.') + row['queue']) + '.') + row['user']) + '.count')
if (result_key in results):
results[result_key] += 1
else:
results[result_key] = 1
if (num_with_id == 0):
results['factory_only.count'] = len(nersc_df)
else:
nersc_id_list = nersc_df.jobid.tolist()
factory_id_set = set(factory_id_list)
nersc_id_set = set(nersc_id_list)
in_both_set = (factory_id_set & nersc_id_set)
factory_only_set = (factory_id_set - nersc_id_set)
nersc_only_set = (nersc_id_set - factory_id_set)
num_in_both = len(in_both_set)
num_in_factory = len(factory_only_set)
num_in_nersc = len(nersc_only_set)
results['both.count'] = num_in_both
results['nersc_only.count'] = num_in_nersc
results['factory_only.count'] = num_in_factory
for (_index, row) in nersc_df.iterrows():
if (row['status'] == 'R'):
results['nersc.running.count'] += 1
return {'nersc_factory_jobs_comparison': results}
|
def transform(self, data_block):
'\n Compare the job IDs between the jobs acquired from both sides. Output the\n comparison results, as # jobs in both, # jobs in factory/Nersc only, # jobs\n running on Nersc, and # jobs in factory without an ID.\n '
self.logger.debug('in CompareNerscFactoryJobs transform')
nersc_df = data_block['Nersc_Job_Info']
factory_df = data_block['job_manifests']
factory_df = factory_df[factory_df.GridResource.str.startswith('batch slurm')]
results = {'both.count': 0, 'nersc_only.count': 0, 'factory_only.count': 0, 'factory_no_ID.count': 0, 'nersc.running.count': 0}
if factory_df.empty:
if nersc_df.empty:
pass
else:
results['nersc_only.count'] = len(nersc_df)
for (_index, row) in nersc_df.iterrows():
result_key = ((((((('nersc' + '.') + row['hostname']) + '.') + row['queue']) + '.') + row['user']) + '.count')
if (result_key in results):
results[result_key] += 1
else:
results[result_key] = 1
else:
num_no_id = 0
num_with_id = 0
factory_id_list = []
for (index, row) in factory_df.iterrows():
if (('GridJobID' not in row.axes[0].tolist()) or pd.isnull(row.GridJobID)):
num_no_id += 1
else:
line = row['GridJobID'].split(' ')
if (len(line) == 3):
num_no_id += 1
factory_df.loc[(index, 'GridJobID')] = None
elif (len(line) == 4):
num_with_id += 1
factory_df.loc[(index, 'GridJobID')] = line[(- 1)].split('/')[(- 1)]
factory_id_list.append(line[(- 1)].split('/')[(- 1)])
results['factory_no_ID.count'] = num_no_id
if nersc_df.empty:
if (num_with_id == 0):
pass
else:
results['factory_only.count'] = num_with_id
else:
for (_index, row) in nersc_df.iterrows():
result_key = ((((((('nersc' + '.') + row['hostname']) + '.') + row['queue']) + '.') + row['user']) + '.count')
if (result_key in results):
results[result_key] += 1
else:
results[result_key] = 1
if (num_with_id == 0):
results['factory_only.count'] = len(nersc_df)
else:
nersc_id_list = nersc_df.jobid.tolist()
factory_id_set = set(factory_id_list)
nersc_id_set = set(nersc_id_list)
in_both_set = (factory_id_set & nersc_id_set)
factory_only_set = (factory_id_set - nersc_id_set)
nersc_only_set = (nersc_id_set - factory_id_set)
num_in_both = len(in_both_set)
num_in_factory = len(factory_only_set)
num_in_nersc = len(nersc_only_set)
results['both.count'] = num_in_both
results['nersc_only.count'] = num_in_nersc
results['factory_only.count'] = num_in_factory
for (_index, row) in nersc_df.iterrows():
if (row['status'] == 'R'):
results['nersc.running.count'] += 1
return {'nersc_factory_jobs_comparison': results}<|docstring|>Compare the job IDs between the jobs acquired from both sides. Output the
comparison results, as # jobs in both, # jobs in factory/Nersc only, # jobs
running on Nersc, and # jobs in factory without an ID.<|endoftext|>
|
e0e898b409fdb0ad560ecb796fb34aa9d879a1076588f65a7c87dace0540b9d5
|
def race_summary(request):
'Race summary statistics'
northEastLat = request.GET.get('neLat')
northEastLon = request.GET.get('neLon', [])
southWestLat = request.GET.get('swLat', [])
southWestLon = request.GET.get('swLon', [])
geoids = get_censustract_geoids(request, northEastLat, northEastLon, southWestLat, southWestLon)
if geoids:
query = Census2010RaceStats.objects.filter(geoid_id__in=geoids)
return query
else:
return HttpResponseBadRequest('Missing geoid or county')
|
Race summary statistics
|
institutions/censusdata/views.py
|
race_summary
|
sephcoster/mapusaurus
| 0 |
python
|
def race_summary(request):
northEastLat = request.GET.get('neLat')
northEastLon = request.GET.get('neLon', [])
southWestLat = request.GET.get('swLat', [])
southWestLon = request.GET.get('swLon', [])
geoids = get_censustract_geoids(request, northEastLat, northEastLon, southWestLat, southWestLon)
if geoids:
query = Census2010RaceStats.objects.filter(geoid_id__in=geoids)
return query
else:
return HttpResponseBadRequest('Missing geoid or county')
|
def race_summary(request):
northEastLat = request.GET.get('neLat')
northEastLon = request.GET.get('neLon', [])
southWestLat = request.GET.get('swLat', [])
southWestLon = request.GET.get('swLon', [])
geoids = get_censustract_geoids(request, northEastLat, northEastLon, southWestLat, southWestLon)
if geoids:
query = Census2010RaceStats.objects.filter(geoid_id__in=geoids)
return query
else:
return HttpResponseBadRequest('Missing geoid or county')<|docstring|>Race summary statistics<|endoftext|>
|
d0f57efbb84b0df36d3cb95197545304ee705e4a8ce28ca67f83a4a1a8136c5c
|
@pytest.fixture(scope='function', autouse=True)
def watchdog_not_running(self, platform_api_conn):
' Fixture that automatically runs on each test case and\n verifies that watchdog is not running before the test begins\n and disables it after the test ends'
assert (not watchdog.is_armed(platform_api_conn))
try:
(yield)
finally:
watchdog.disarm(platform_api_conn)
|
Fixture that automatically runs on each test case and
verifies that watchdog is not running before the test begins
and disables it after the test ends
|
tests/platform_tests/api/test_watchdog.py
|
watchdog_not_running
|
yozhao101/sonic-mgmt
| 1 |
python
|
@pytest.fixture(scope='function', autouse=True)
def watchdog_not_running(self, platform_api_conn):
' Fixture that automatically runs on each test case and\n verifies that watchdog is not running before the test begins\n and disables it after the test ends'
assert (not watchdog.is_armed(platform_api_conn))
try:
(yield)
finally:
watchdog.disarm(platform_api_conn)
|
@pytest.fixture(scope='function', autouse=True)
def watchdog_not_running(self, platform_api_conn):
' Fixture that automatically runs on each test case and\n verifies that watchdog is not running before the test begins\n and disables it after the test ends'
assert (not watchdog.is_armed(platform_api_conn))
try:
(yield)
finally:
watchdog.disarm(platform_api_conn)<|docstring|>Fixture that automatically runs on each test case and
verifies that watchdog is not running before the test begins
and disables it after the test ends<|endoftext|>
|
7bfde2b2bbdba1d4c6d7a4fba5b6a5142fe1a92f1b507381be9b851a8a2fff41
|
@pytest.fixture(scope='module')
def conf(self, request, duthost):
' Reads the watchdog test configuration file @TEST_CONFIG_FILE and\n results in a dictionary which holds parameters for test '
test_config = None
with open(TEST_CONFIG_FILE) as stream:
test_config = yaml.safe_load(stream)
config = test_config['default']
platform = duthost.facts['platform']
hwsku = duthost.facts['hwsku']
if ((platform in test_config) and ('default' in test_config[platform])):
config.update(test_config[platform]['default'])
if ((platform in test_config) and (hwsku in test_config[platform])):
config.update(test_config[platform][hwsku])
assert ('valid_timeout' in config)
assert (config['valid_timeout'] > (TEST_WAIT_TIME_SECONDS * 2))
return config
|
Reads the watchdog test configuration file @TEST_CONFIG_FILE and
results in a dictionary which holds parameters for test
|
tests/platform_tests/api/test_watchdog.py
|
conf
|
yozhao101/sonic-mgmt
| 1 |
python
|
@pytest.fixture(scope='module')
def conf(self, request, duthost):
' Reads the watchdog test configuration file @TEST_CONFIG_FILE and\n results in a dictionary which holds parameters for test '
test_config = None
with open(TEST_CONFIG_FILE) as stream:
test_config = yaml.safe_load(stream)
config = test_config['default']
platform = duthost.facts['platform']
hwsku = duthost.facts['hwsku']
if ((platform in test_config) and ('default' in test_config[platform])):
config.update(test_config[platform]['default'])
if ((platform in test_config) and (hwsku in test_config[platform])):
config.update(test_config[platform][hwsku])
assert ('valid_timeout' in config)
assert (config['valid_timeout'] > (TEST_WAIT_TIME_SECONDS * 2))
return config
|
@pytest.fixture(scope='module')
def conf(self, request, duthost):
' Reads the watchdog test configuration file @TEST_CONFIG_FILE and\n results in a dictionary which holds parameters for test '
test_config = None
with open(TEST_CONFIG_FILE) as stream:
test_config = yaml.safe_load(stream)
config = test_config['default']
platform = duthost.facts['platform']
hwsku = duthost.facts['hwsku']
if ((platform in test_config) and ('default' in test_config[platform])):
config.update(test_config[platform]['default'])
if ((platform in test_config) and (hwsku in test_config[platform])):
config.update(test_config[platform][hwsku])
assert ('valid_timeout' in config)
assert (config['valid_timeout'] > (TEST_WAIT_TIME_SECONDS * 2))
return config<|docstring|>Reads the watchdog test configuration file @TEST_CONFIG_FILE and
results in a dictionary which holds parameters for test<|endoftext|>
|
1b1e6bc8919bc395dad091f51dc360a8ba0d12ef24b50fa394a35f0fd2f845e5
|
def test_arm_disarm_states(self, duthost, localhost, platform_api_conn, conf):
' arm watchdog with a valid timeout value, verify it is in armed state,\n disarm watchdog and verify it is in disarmed state\n '
watchdog_timeout = conf['valid_timeout']
actual_timeout = watchdog.arm(platform_api_conn, watchdog_timeout)
assert (actual_timeout != (- 1))
assert (actual_timeout >= watchdog_timeout)
assert watchdog.is_armed(platform_api_conn)
assert watchdog.disarm(platform_api_conn)
assert (not watchdog.is_armed(platform_api_conn))
res = localhost.wait_for(host=duthost.hostname, port=22, state='stopped', delay=5, timeout=(watchdog_timeout + TIMEOUT_DEVIATION), module_ignore_errors=True)
assert ('exception' in res)
|
arm watchdog with a valid timeout value, verify it is in armed state,
disarm watchdog and verify it is in disarmed state
|
tests/platform_tests/api/test_watchdog.py
|
test_arm_disarm_states
|
yozhao101/sonic-mgmt
| 1 |
python
|
def test_arm_disarm_states(self, duthost, localhost, platform_api_conn, conf):
' arm watchdog with a valid timeout value, verify it is in armed state,\n disarm watchdog and verify it is in disarmed state\n '
watchdog_timeout = conf['valid_timeout']
actual_timeout = watchdog.arm(platform_api_conn, watchdog_timeout)
assert (actual_timeout != (- 1))
assert (actual_timeout >= watchdog_timeout)
assert watchdog.is_armed(platform_api_conn)
assert watchdog.disarm(platform_api_conn)
assert (not watchdog.is_armed(platform_api_conn))
res = localhost.wait_for(host=duthost.hostname, port=22, state='stopped', delay=5, timeout=(watchdog_timeout + TIMEOUT_DEVIATION), module_ignore_errors=True)
assert ('exception' in res)
|
def test_arm_disarm_states(self, duthost, localhost, platform_api_conn, conf):
' arm watchdog with a valid timeout value, verify it is in armed state,\n disarm watchdog and verify it is in disarmed state\n '
watchdog_timeout = conf['valid_timeout']
actual_timeout = watchdog.arm(platform_api_conn, watchdog_timeout)
assert (actual_timeout != (- 1))
assert (actual_timeout >= watchdog_timeout)
assert watchdog.is_armed(platform_api_conn)
assert watchdog.disarm(platform_api_conn)
assert (not watchdog.is_armed(platform_api_conn))
res = localhost.wait_for(host=duthost.hostname, port=22, state='stopped', delay=5, timeout=(watchdog_timeout + TIMEOUT_DEVIATION), module_ignore_errors=True)
assert ('exception' in res)<|docstring|>arm watchdog with a valid timeout value, verify it is in armed state,
disarm watchdog and verify it is in disarmed state<|endoftext|>
|
570b2c65bb48628d63e4b80a5b151f00d68acb6a7b995fb6274945ec61d5734b
|
def test_remaining_time(self, duthost, platform_api_conn, conf):
' arm watchdog with a valid timeout and verify that remaining time API works correctly '
watchdog_timeout = conf['valid_timeout']
assert (watchdog.get_remaining_time(platform_api_conn) == (- 1))
actual_timeout = watchdog.arm(platform_api_conn, watchdog_timeout)
remaining_time = watchdog.get_remaining_time(platform_api_conn)
assert (remaining_time > 0)
assert (remaining_time <= actual_timeout)
remaining_time = watchdog.get_remaining_time(platform_api_conn)
time.sleep(TEST_WAIT_TIME_SECONDS)
assert (watchdog.get_remaining_time(platform_api_conn) < remaining_time)
|
arm watchdog with a valid timeout and verify that remaining time API works correctly
|
tests/platform_tests/api/test_watchdog.py
|
test_remaining_time
|
yozhao101/sonic-mgmt
| 1 |
python
|
def test_remaining_time(self, duthost, platform_api_conn, conf):
' '
watchdog_timeout = conf['valid_timeout']
assert (watchdog.get_remaining_time(platform_api_conn) == (- 1))
actual_timeout = watchdog.arm(platform_api_conn, watchdog_timeout)
remaining_time = watchdog.get_remaining_time(platform_api_conn)
assert (remaining_time > 0)
assert (remaining_time <= actual_timeout)
remaining_time = watchdog.get_remaining_time(platform_api_conn)
time.sleep(TEST_WAIT_TIME_SECONDS)
assert (watchdog.get_remaining_time(platform_api_conn) < remaining_time)
|
def test_remaining_time(self, duthost, platform_api_conn, conf):
' '
watchdog_timeout = conf['valid_timeout']
assert (watchdog.get_remaining_time(platform_api_conn) == (- 1))
actual_timeout = watchdog.arm(platform_api_conn, watchdog_timeout)
remaining_time = watchdog.get_remaining_time(platform_api_conn)
assert (remaining_time > 0)
assert (remaining_time <= actual_timeout)
remaining_time = watchdog.get_remaining_time(platform_api_conn)
time.sleep(TEST_WAIT_TIME_SECONDS)
assert (watchdog.get_remaining_time(platform_api_conn) < remaining_time)<|docstring|>arm watchdog with a valid timeout and verify that remaining time API works correctly<|endoftext|>
|
ac6c4eab426be8d4701098571688372967ed942aa2f59a2fbb5be16e6daa3f49
|
def test_periodic_arm(self, duthost, platform_api_conn, conf):
' arm watchdog several times as watchdog deamon would and verify API behaves correctly '
watchdog_timeout = conf['valid_timeout']
actual_timeout = watchdog.arm(platform_api_conn, watchdog_timeout)
time.sleep(TEST_WAIT_TIME_SECONDS)
remaining_time = watchdog.get_remaining_time(platform_api_conn)
actual_timeout_new = watchdog.arm(platform_api_conn, watchdog_timeout)
assert (actual_timeout == actual_timeout_new)
assert (watchdog.get_remaining_time(platform_api_conn) > remaining_time)
|
arm watchdog several times as watchdog deamon would and verify API behaves correctly
|
tests/platform_tests/api/test_watchdog.py
|
test_periodic_arm
|
yozhao101/sonic-mgmt
| 1 |
python
|
def test_periodic_arm(self, duthost, platform_api_conn, conf):
' '
watchdog_timeout = conf['valid_timeout']
actual_timeout = watchdog.arm(platform_api_conn, watchdog_timeout)
time.sleep(TEST_WAIT_TIME_SECONDS)
remaining_time = watchdog.get_remaining_time(platform_api_conn)
actual_timeout_new = watchdog.arm(platform_api_conn, watchdog_timeout)
assert (actual_timeout == actual_timeout_new)
assert (watchdog.get_remaining_time(platform_api_conn) > remaining_time)
|
def test_periodic_arm(self, duthost, platform_api_conn, conf):
' '
watchdog_timeout = conf['valid_timeout']
actual_timeout = watchdog.arm(platform_api_conn, watchdog_timeout)
time.sleep(TEST_WAIT_TIME_SECONDS)
remaining_time = watchdog.get_remaining_time(platform_api_conn)
actual_timeout_new = watchdog.arm(platform_api_conn, watchdog_timeout)
assert (actual_timeout == actual_timeout_new)
assert (watchdog.get_remaining_time(platform_api_conn) > remaining_time)<|docstring|>arm watchdog several times as watchdog deamon would and verify API behaves correctly<|endoftext|>
|
387243c8e7a08e9c6c88f067712773f519279327c3b7ca34d73755b87b352a8f
|
def test_arm_different_timeout_greater(self, duthost, platform_api_conn, conf):
' arm the watchdog with greater timeout value and verify new timeout was accepted;\n If platform accepts only single valid timeout value, @greater_timeout should be None.\n '
watchdog_timeout = conf['valid_timeout']
watchdog_timeout_greater = conf['greater_timeout']
if (watchdog_timeout_greater is None):
pytest.skip('"greater_timeout" parameter is required for this test case')
actual_timeout_second = watchdog.arm(platform_api_conn, watchdog_timeout)
remaining_time = watchdog.get_remaining_time(platform_api_conn)
actual_timeout_second_second = watchdog.arm(platform_api_conn, watchdog_timeout_greater)
assert (actual_timeout_second < actual_timeout_second_second)
assert (watchdog.get_remaining_time(platform_api_conn) > remaining_time)
|
arm the watchdog with greater timeout value and verify new timeout was accepted;
If platform accepts only single valid timeout value, @greater_timeout should be None.
|
tests/platform_tests/api/test_watchdog.py
|
test_arm_different_timeout_greater
|
yozhao101/sonic-mgmt
| 1 |
python
|
def test_arm_different_timeout_greater(self, duthost, platform_api_conn, conf):
' arm the watchdog with greater timeout value and verify new timeout was accepted;\n If platform accepts only single valid timeout value, @greater_timeout should be None.\n '
watchdog_timeout = conf['valid_timeout']
watchdog_timeout_greater = conf['greater_timeout']
if (watchdog_timeout_greater is None):
pytest.skip('"greater_timeout" parameter is required for this test case')
actual_timeout_second = watchdog.arm(platform_api_conn, watchdog_timeout)
remaining_time = watchdog.get_remaining_time(platform_api_conn)
actual_timeout_second_second = watchdog.arm(platform_api_conn, watchdog_timeout_greater)
assert (actual_timeout_second < actual_timeout_second_second)
assert (watchdog.get_remaining_time(platform_api_conn) > remaining_time)
|
def test_arm_different_timeout_greater(self, duthost, platform_api_conn, conf):
' arm the watchdog with greater timeout value and verify new timeout was accepted;\n If platform accepts only single valid timeout value, @greater_timeout should be None.\n '
watchdog_timeout = conf['valid_timeout']
watchdog_timeout_greater = conf['greater_timeout']
if (watchdog_timeout_greater is None):
pytest.skip('"greater_timeout" parameter is required for this test case')
actual_timeout_second = watchdog.arm(platform_api_conn, watchdog_timeout)
remaining_time = watchdog.get_remaining_time(platform_api_conn)
actual_timeout_second_second = watchdog.arm(platform_api_conn, watchdog_timeout_greater)
assert (actual_timeout_second < actual_timeout_second_second)
assert (watchdog.get_remaining_time(platform_api_conn) > remaining_time)<|docstring|>arm the watchdog with greater timeout value and verify new timeout was accepted;
If platform accepts only single valid timeout value, @greater_timeout should be None.<|endoftext|>
|
6d736be2b86d8877d70e798129963e00d8868047b48ed1bd163526d5ab1933e9
|
def test_arm_different_timeout_smaller(self, duthost, platform_api_conn, conf):
' arm the watchdog with smaller timeout value and verify new timeout was accepted;\n If platform accepts only single valid timeout value, @greater_timeout should be None.\n '
watchdog_timeout = conf['greater_timeout']
if (watchdog_timeout is None):
pytest.skip('"greater_timeout" parameter is required for this test case')
watchdog_timeout_smaller = conf['valid_timeout']
actual_timeout = watchdog.arm(platform_api_conn, watchdog_timeout)
remaining_time = watchdog.get_remaining_time(platform_api_conn)
actual_timeout_smaller = watchdog.arm(platform_api_conn, watchdog_timeout_smaller)
assert (actual_timeout > actual_timeout_smaller)
assert (watchdog.get_remaining_time(platform_api_conn) < remaining_time)
|
arm the watchdog with smaller timeout value and verify new timeout was accepted;
If platform accepts only single valid timeout value, @greater_timeout should be None.
|
tests/platform_tests/api/test_watchdog.py
|
test_arm_different_timeout_smaller
|
yozhao101/sonic-mgmt
| 1 |
python
|
def test_arm_different_timeout_smaller(self, duthost, platform_api_conn, conf):
' arm the watchdog with smaller timeout value and verify new timeout was accepted;\n If platform accepts only single valid timeout value, @greater_timeout should be None.\n '
watchdog_timeout = conf['greater_timeout']
if (watchdog_timeout is None):
pytest.skip('"greater_timeout" parameter is required for this test case')
watchdog_timeout_smaller = conf['valid_timeout']
actual_timeout = watchdog.arm(platform_api_conn, watchdog_timeout)
remaining_time = watchdog.get_remaining_time(platform_api_conn)
actual_timeout_smaller = watchdog.arm(platform_api_conn, watchdog_timeout_smaller)
assert (actual_timeout > actual_timeout_smaller)
assert (watchdog.get_remaining_time(platform_api_conn) < remaining_time)
|
def test_arm_different_timeout_smaller(self, duthost, platform_api_conn, conf):
' arm the watchdog with smaller timeout value and verify new timeout was accepted;\n If platform accepts only single valid timeout value, @greater_timeout should be None.\n '
watchdog_timeout = conf['greater_timeout']
if (watchdog_timeout is None):
pytest.skip('"greater_timeout" parameter is required for this test case')
watchdog_timeout_smaller = conf['valid_timeout']
actual_timeout = watchdog.arm(platform_api_conn, watchdog_timeout)
remaining_time = watchdog.get_remaining_time(platform_api_conn)
actual_timeout_smaller = watchdog.arm(platform_api_conn, watchdog_timeout_smaller)
assert (actual_timeout > actual_timeout_smaller)
assert (watchdog.get_remaining_time(platform_api_conn) < remaining_time)<|docstring|>arm the watchdog with smaller timeout value and verify new timeout was accepted;
If platform accepts only single valid timeout value, @greater_timeout should be None.<|endoftext|>
|
5f02b710e2f542763516f48c495bb76488da750f98917d2b81b247966fe1d7bd
|
def test_arm_too_big_timeout(self, duthost, platform_api_conn, conf):
' try to arm the watchdog with timeout that is too big for hardware watchdog;\n If no such limitation exist, @too_big_timeout should be None for such platform.\n '
watchdog_timeout = conf['too_big_timeout']
if (watchdog_timeout is None):
pytest.skip('"too_big_timeout" parameter is required for this test case')
actual_timeout = watchdog.arm(platform_api_conn, watchdog_timeout)
assert (actual_timeout == (- 1))
|
try to arm the watchdog with timeout that is too big for hardware watchdog;
If no such limitation exist, @too_big_timeout should be None for such platform.
|
tests/platform_tests/api/test_watchdog.py
|
test_arm_too_big_timeout
|
yozhao101/sonic-mgmt
| 1 |
python
|
def test_arm_too_big_timeout(self, duthost, platform_api_conn, conf):
' try to arm the watchdog with timeout that is too big for hardware watchdog;\n If no such limitation exist, @too_big_timeout should be None for such platform.\n '
watchdog_timeout = conf['too_big_timeout']
if (watchdog_timeout is None):
pytest.skip('"too_big_timeout" parameter is required for this test case')
actual_timeout = watchdog.arm(platform_api_conn, watchdog_timeout)
assert (actual_timeout == (- 1))
|
def test_arm_too_big_timeout(self, duthost, platform_api_conn, conf):
' try to arm the watchdog with timeout that is too big for hardware watchdog;\n If no such limitation exist, @too_big_timeout should be None for such platform.\n '
watchdog_timeout = conf['too_big_timeout']
if (watchdog_timeout is None):
pytest.skip('"too_big_timeout" parameter is required for this test case')
actual_timeout = watchdog.arm(platform_api_conn, watchdog_timeout)
assert (actual_timeout == (- 1))<|docstring|>try to arm the watchdog with timeout that is too big for hardware watchdog;
If no such limitation exist, @too_big_timeout should be None for such platform.<|endoftext|>
|
506383a8b331ebbe8fd613eff286a6876aa28f3f6980e18ad274cd31c32ba8fa
|
def test_arm_negative_timeout(self, duthost, platform_api_conn):
' try to arm the watchdog with negative value '
watchdog_timeout = (- 1)
actual_timeout = watchdog.arm(platform_api_conn, watchdog_timeout)
assert (actual_timeout == (- 1))
|
try to arm the watchdog with negative value
|
tests/platform_tests/api/test_watchdog.py
|
test_arm_negative_timeout
|
yozhao101/sonic-mgmt
| 1 |
python
|
def test_arm_negative_timeout(self, duthost, platform_api_conn):
' '
watchdog_timeout = (- 1)
actual_timeout = watchdog.arm(platform_api_conn, watchdog_timeout)
assert (actual_timeout == (- 1))
|
def test_arm_negative_timeout(self, duthost, platform_api_conn):
' '
watchdog_timeout = (- 1)
actual_timeout = watchdog.arm(platform_api_conn, watchdog_timeout)
assert (actual_timeout == (- 1))<|docstring|>try to arm the watchdog with negative value<|endoftext|>
|
05adddc38df03e3c5183c7c93e2a1a4a7504e502808426ce38585835871351c2
|
@pytest.mark.disable_loganalyzer
def test_reboot(self, duthost, localhost, platform_api_conn, conf):
' arm the watchdog and verify it did its job after timeout expiration '
watchdog_timeout = conf['valid_timeout']
actual_timeout = watchdog.arm(platform_api_conn, watchdog_timeout)
assert (actual_timeout != (- 1))
res = localhost.wait_for(host=duthost.hostname, port=22, state='stopped', delay=2, timeout=(actual_timeout + TIMEOUT_DEVIATION), module_ignore_errors=True)
assert ('exception' not in res)
res = localhost.wait_for(host=duthost.hostname, port=22, state='started', delay=10, timeout=120, module_ignore_errors=True)
assert ('exception' not in res)
time.sleep(120)
|
arm the watchdog and verify it did its job after timeout expiration
|
tests/platform_tests/api/test_watchdog.py
|
test_reboot
|
yozhao101/sonic-mgmt
| 1 |
python
|
@pytest.mark.disable_loganalyzer
def test_reboot(self, duthost, localhost, platform_api_conn, conf):
' '
watchdog_timeout = conf['valid_timeout']
actual_timeout = watchdog.arm(platform_api_conn, watchdog_timeout)
assert (actual_timeout != (- 1))
res = localhost.wait_for(host=duthost.hostname, port=22, state='stopped', delay=2, timeout=(actual_timeout + TIMEOUT_DEVIATION), module_ignore_errors=True)
assert ('exception' not in res)
res = localhost.wait_for(host=duthost.hostname, port=22, state='started', delay=10, timeout=120, module_ignore_errors=True)
assert ('exception' not in res)
time.sleep(120)
|
@pytest.mark.disable_loganalyzer
def test_reboot(self, duthost, localhost, platform_api_conn, conf):
' '
watchdog_timeout = conf['valid_timeout']
actual_timeout = watchdog.arm(platform_api_conn, watchdog_timeout)
assert (actual_timeout != (- 1))
res = localhost.wait_for(host=duthost.hostname, port=22, state='stopped', delay=2, timeout=(actual_timeout + TIMEOUT_DEVIATION), module_ignore_errors=True)
assert ('exception' not in res)
res = localhost.wait_for(host=duthost.hostname, port=22, state='started', delay=10, timeout=120, module_ignore_errors=True)
assert ('exception' not in res)
time.sleep(120)<|docstring|>arm the watchdog and verify it did its job after timeout expiration<|endoftext|>
|
fd9a686fa9fabc0d5348f75514725ad4133cb62e681e2bd8f118f7434550935d
|
def __init__(self, dataload_obj=None, bkgrd_subtraction_dict=None, data_dict_idx=None, overview=True, sputter_time=None, offval=0, plotflag=True, plotspan=True, plot_legend=None, normalize_subtraction=False, name=None, spectra_colors=None, **kws):
'Class for holding the spectra objects taken on a sample\n\n All of the elemental spectra will be stored in this sample object. There is \n the option of loading data from multiple different sources. Additionally, this \n can serve as a general purpose spectra holder\n\n Parameters\n ----------\n dataload_obj : object holding the data\n Can be loaded from a vamas file, excel export data, or "derk object"\n bkgrd_subtraction_dict : dict, optional\n dictionary holding parameters for background subtraction.\n Sample object can be instantiated without this, but it helps to have it when doing\n backgroudn subtraction on spectra. Although it can be passed directly to bg_sub_all\n (Default is None)\n data_dict_idx: int, optional\n For Excel load, incase multiple sample sheets are found\n (default is None).\n overview : bool\n Whether or now to automatically generate plots of spectra\n offval: int, optional\n Offset value to stackscans\n plotflag: bool\n Whether or not to plot the data\n (default is True)\n plotspan: bool\n Whether or not to plot the background subtraction limits\n (default is False)\n plot_legend:\n normalize_subtraction\n name: str, optional\n option to name the sample\n spectra_colors: dict, optional\n option to specify spectra colors \n\n\n\n **kws : dict, optional\n Additional keyword arguments \n\n Notes\n -----\n\n\n Examples\n --------\n\n\n '
self.sample_name = name
self.rsf = cfg.avantage_sensitivity_factors()
self.offval = offval
self.plotflag = plotflag
self.plotspan = plotspan
self.normalize_subtraction = normalize_subtraction
self.plot_legend = plot_legend
self.sputter_time = sputter_time
if (bkgrd_subtraction_dict is None):
self.bg_info = cfg.bkgrd_subtraction
else:
self.bg_info = bkgrd_subtraction_dict
if (spectra_colors is None):
self.spectra_colors = cfg.spectra_colors()
else:
self.spectra_colors = spectra_colors
if (type(dataload_obj) == XPyS.VAMAS.VAMASExperiment):
self.load_experiment_sample_from_vamas(dataload_obj)
elif ((type(dataload_obj) == str) or (type(dataload_obj) == dict)):
self.load_experiment_sample_from_excel(dataload_obj, name)
if (dataload_obj is None):
overview = False
if overview:
self.xps_overview(bkgrd_subtraction_dict=bkgrd_subtraction_dict, plotflag=plotflag, plotspan=plotspan)
|
Class for holding the spectra objects taken on a sample
All of the elemental spectra will be stored in this sample object. There is
the option of loading data from multiple different sources. Additionally, this
can serve as a general purpose spectra holder
Parameters
----------
dataload_obj : object holding the data
Can be loaded from a vamas file, excel export data, or "derk object"
bkgrd_subtraction_dict : dict, optional
dictionary holding parameters for background subtraction.
Sample object can be instantiated without this, but it helps to have it when doing
backgroudn subtraction on spectra. Although it can be passed directly to bg_sub_all
(Default is None)
data_dict_idx: int, optional
For Excel load, incase multiple sample sheets are found
(default is None).
overview : bool
Whether or now to automatically generate plots of spectra
offval: int, optional
Offset value to stackscans
plotflag: bool
Whether or not to plot the data
(default is True)
plotspan: bool
Whether or not to plot the background subtraction limits
(default is False)
plot_legend:
normalize_subtraction
name: str, optional
option to name the sample
spectra_colors: dict, optional
option to specify spectra colors
**kws : dict, optional
Additional keyword arguments
Notes
-----
Examples
--------
|
sample.py
|
__init__
|
cassberk/xps_peakfit
| 1 |
python
|
def __init__(self, dataload_obj=None, bkgrd_subtraction_dict=None, data_dict_idx=None, overview=True, sputter_time=None, offval=0, plotflag=True, plotspan=True, plot_legend=None, normalize_subtraction=False, name=None, spectra_colors=None, **kws):
'Class for holding the spectra objects taken on a sample\n\n All of the elemental spectra will be stored in this sample object. There is \n the option of loading data from multiple different sources. Additionally, this \n can serve as a general purpose spectra holder\n\n Parameters\n ----------\n dataload_obj : object holding the data\n Can be loaded from a vamas file, excel export data, or "derk object"\n bkgrd_subtraction_dict : dict, optional\n dictionary holding parameters for background subtraction.\n Sample object can be instantiated without this, but it helps to have it when doing\n backgroudn subtraction on spectra. Although it can be passed directly to bg_sub_all\n (Default is None)\n data_dict_idx: int, optional\n For Excel load, incase multiple sample sheets are found\n (default is None).\n overview : bool\n Whether or now to automatically generate plots of spectra\n offval: int, optional\n Offset value to stackscans\n plotflag: bool\n Whether or not to plot the data\n (default is True)\n plotspan: bool\n Whether or not to plot the background subtraction limits\n (default is False)\n plot_legend:\n normalize_subtraction\n name: str, optional\n option to name the sample\n spectra_colors: dict, optional\n option to specify spectra colors \n\n\n\n **kws : dict, optional\n Additional keyword arguments \n\n Notes\n -----\n\n\n Examples\n --------\n\n\n '
self.sample_name = name
self.rsf = cfg.avantage_sensitivity_factors()
self.offval = offval
self.plotflag = plotflag
self.plotspan = plotspan
self.normalize_subtraction = normalize_subtraction
self.plot_legend = plot_legend
self.sputter_time = sputter_time
if (bkgrd_subtraction_dict is None):
self.bg_info = cfg.bkgrd_subtraction
else:
self.bg_info = bkgrd_subtraction_dict
if (spectra_colors is None):
self.spectra_colors = cfg.spectra_colors()
else:
self.spectra_colors = spectra_colors
if (type(dataload_obj) == XPyS.VAMAS.VAMASExperiment):
self.load_experiment_sample_from_vamas(dataload_obj)
elif ((type(dataload_obj) == str) or (type(dataload_obj) == dict)):
self.load_experiment_sample_from_excel(dataload_obj, name)
if (dataload_obj is None):
overview = False
if overview:
self.xps_overview(bkgrd_subtraction_dict=bkgrd_subtraction_dict, plotflag=plotflag, plotspan=plotspan)
|
def __init__(self, dataload_obj=None, bkgrd_subtraction_dict=None, data_dict_idx=None, overview=True, sputter_time=None, offval=0, plotflag=True, plotspan=True, plot_legend=None, normalize_subtraction=False, name=None, spectra_colors=None, **kws):
'Class for holding the spectra objects taken on a sample\n\n All of the elemental spectra will be stored in this sample object. There is \n the option of loading data from multiple different sources. Additionally, this \n can serve as a general purpose spectra holder\n\n Parameters\n ----------\n dataload_obj : object holding the data\n Can be loaded from a vamas file, excel export data, or "derk object"\n bkgrd_subtraction_dict : dict, optional\n dictionary holding parameters for background subtraction.\n Sample object can be instantiated without this, but it helps to have it when doing\n backgroudn subtraction on spectra. Although it can be passed directly to bg_sub_all\n (Default is None)\n data_dict_idx: int, optional\n For Excel load, incase multiple sample sheets are found\n (default is None).\n overview : bool\n Whether or now to automatically generate plots of spectra\n offval: int, optional\n Offset value to stackscans\n plotflag: bool\n Whether or not to plot the data\n (default is True)\n plotspan: bool\n Whether or not to plot the background subtraction limits\n (default is False)\n plot_legend:\n normalize_subtraction\n name: str, optional\n option to name the sample\n spectra_colors: dict, optional\n option to specify spectra colors \n\n\n\n **kws : dict, optional\n Additional keyword arguments \n\n Notes\n -----\n\n\n Examples\n --------\n\n\n '
self.sample_name = name
self.rsf = cfg.avantage_sensitivity_factors()
self.offval = offval
self.plotflag = plotflag
self.plotspan = plotspan
self.normalize_subtraction = normalize_subtraction
self.plot_legend = plot_legend
self.sputter_time = sputter_time
if (bkgrd_subtraction_dict is None):
self.bg_info = cfg.bkgrd_subtraction
else:
self.bg_info = bkgrd_subtraction_dict
if (spectra_colors is None):
self.spectra_colors = cfg.spectra_colors()
else:
self.spectra_colors = spectra_colors
if (type(dataload_obj) == XPyS.VAMAS.VAMASExperiment):
self.load_experiment_sample_from_vamas(dataload_obj)
elif ((type(dataload_obj) == str) or (type(dataload_obj) == dict)):
self.load_experiment_sample_from_excel(dataload_obj, name)
if (dataload_obj is None):
overview = False
if overview:
self.xps_overview(bkgrd_subtraction_dict=bkgrd_subtraction_dict, plotflag=plotflag, plotspan=plotspan)<|docstring|>Class for holding the spectra objects taken on a sample
All of the elemental spectra will be stored in this sample object. There is
the option of loading data from multiple different sources. Additionally, this
can serve as a general purpose spectra holder
Parameters
----------
dataload_obj : object holding the data
Can be loaded from a vamas file, excel export data, or "derk object"
bkgrd_subtraction_dict : dict, optional
dictionary holding parameters for background subtraction.
Sample object can be instantiated without this, but it helps to have it when doing
backgroudn subtraction on spectra. Although it can be passed directly to bg_sub_all
(Default is None)
data_dict_idx: int, optional
For Excel load, incase multiple sample sheets are found
(default is None).
overview : bool
Whether or now to automatically generate plots of spectra
offval: int, optional
Offset value to stackscans
plotflag: bool
Whether or not to plot the data
(default is True)
plotspan: bool
Whether or not to plot the background subtraction limits
(default is False)
plot_legend:
normalize_subtraction
name: str, optional
option to name the sample
spectra_colors: dict, optional
option to specify spectra colors
**kws : dict, optional
Additional keyword arguments
Notes
-----
Examples
--------<|endoftext|>
|
329cdcb4ab61479c19016233777f9ddc93336ea5b44d166bacf96b07805e680e
|
def xps_overview(self, subpars=None, plotflag=True, plotspan=True, analyze=True):
'Returns an overview of the spectra that are held in sample.\n\n The overview plots all of the raw signals, the background subtracted\n signals and the calculated atomic percent\n\n Parameters\n ----------\n subpars : dict\n Optional: ability to enter custom background subtraction parameters. If\n None it will use the default in the config file.\n plotflag : bool\n option to plot or not\n plotspan : bool\n If True it will highlight on the plot the range that is backgroudn subtracted\n analyze : bool\n Choose whether or not to calculate the background subtraction and atomic percent\n or to just plot what was loaded\n\n Returns\n -------\n matplotlib figures : fig\n Multi-plot plots of the data.\n\n See Also\n --------\n :func: plot_all_spectra(), plot_all_sub(), plot_atomic_percent()\n\n '
if (subpars != None):
for orb in subpars.keys():
self.bg_info[orb] = subpars[orb]
if (analyze == True):
self.bg_sub_all()
self.calc_atomic_percent()
self.plotflag = plotflag
if (self.plotflag == True):
fig_dict = {}
ax_dict = {}
(self.fig, self.ax) = self.plot_all_spectra(offval=self.offval, plotspan=self.plotspan, saveflag=0, filepath='', figdim=(15, 10))
(self.fig_sub, self.ax_sub) = self.plot_all_sub(offval=self.offval)
(self.fig_atp, self.ax_atp) = self.plot_atomic_percent()
|
Returns an overview of the spectra that are held in sample.
The overview plots all of the raw signals, the background subtracted
signals and the calculated atomic percent
Parameters
----------
subpars : dict
Optional: ability to enter custom background subtraction parameters. If
None it will use the default in the config file.
plotflag : bool
option to plot or not
plotspan : bool
If True it will highlight on the plot the range that is backgroudn subtracted
analyze : bool
Choose whether or not to calculate the background subtraction and atomic percent
or to just plot what was loaded
Returns
-------
matplotlib figures : fig
Multi-plot plots of the data.
See Also
--------
:func: plot_all_spectra(), plot_all_sub(), plot_atomic_percent()
|
sample.py
|
xps_overview
|
cassberk/xps_peakfit
| 1 |
python
|
def xps_overview(self, subpars=None, plotflag=True, plotspan=True, analyze=True):
'Returns an overview of the spectra that are held in sample.\n\n The overview plots all of the raw signals, the background subtracted\n signals and the calculated atomic percent\n\n Parameters\n ----------\n subpars : dict\n Optional: ability to enter custom background subtraction parameters. If\n None it will use the default in the config file.\n plotflag : bool\n option to plot or not\n plotspan : bool\n If True it will highlight on the plot the range that is backgroudn subtracted\n analyze : bool\n Choose whether or not to calculate the background subtraction and atomic percent\n or to just plot what was loaded\n\n Returns\n -------\n matplotlib figures : fig\n Multi-plot plots of the data.\n\n See Also\n --------\n :func: plot_all_spectra(), plot_all_sub(), plot_atomic_percent()\n\n '
if (subpars != None):
for orb in subpars.keys():
self.bg_info[orb] = subpars[orb]
if (analyze == True):
self.bg_sub_all()
self.calc_atomic_percent()
self.plotflag = plotflag
if (self.plotflag == True):
fig_dict = {}
ax_dict = {}
(self.fig, self.ax) = self.plot_all_spectra(offval=self.offval, plotspan=self.plotspan, saveflag=0, filepath=, figdim=(15, 10))
(self.fig_sub, self.ax_sub) = self.plot_all_sub(offval=self.offval)
(self.fig_atp, self.ax_atp) = self.plot_atomic_percent()
|
def xps_overview(self, subpars=None, plotflag=True, plotspan=True, analyze=True):
'Returns an overview of the spectra that are held in sample.\n\n The overview plots all of the raw signals, the background subtracted\n signals and the calculated atomic percent\n\n Parameters\n ----------\n subpars : dict\n Optional: ability to enter custom background subtraction parameters. If\n None it will use the default in the config file.\n plotflag : bool\n option to plot or not\n plotspan : bool\n If True it will highlight on the plot the range that is backgroudn subtracted\n analyze : bool\n Choose whether or not to calculate the background subtraction and atomic percent\n or to just plot what was loaded\n\n Returns\n -------\n matplotlib figures : fig\n Multi-plot plots of the data.\n\n See Also\n --------\n :func: plot_all_spectra(), plot_all_sub(), plot_atomic_percent()\n\n '
if (subpars != None):
for orb in subpars.keys():
self.bg_info[orb] = subpars[orb]
if (analyze == True):
self.bg_sub_all()
self.calc_atomic_percent()
self.plotflag = plotflag
if (self.plotflag == True):
fig_dict = {}
ax_dict = {}
(self.fig, self.ax) = self.plot_all_spectra(offval=self.offval, plotspan=self.plotspan, saveflag=0, filepath=, figdim=(15, 10))
(self.fig_sub, self.ax_sub) = self.plot_all_sub(offval=self.offval)
(self.fig_atp, self.ax_atp) = self.plot_atomic_percent()<|docstring|>Returns an overview of the spectra that are held in sample.
The overview plots all of the raw signals, the background subtracted
signals and the calculated atomic percent
Parameters
----------
subpars : dict
Optional: ability to enter custom background subtraction parameters. If
None it will use the default in the config file.
plotflag : bool
option to plot or not
plotspan : bool
If True it will highlight on the plot the range that is backgroudn subtracted
analyze : bool
Choose whether or not to calculate the background subtraction and atomic percent
or to just plot what was loaded
Returns
-------
matplotlib figures : fig
Multi-plot plots of the data.
See Also
--------
:func: plot_all_spectra(), plot_all_sub(), plot_atomic_percent()<|endoftext|>
|
376b3eb541d8ee0fd1b77c078152486e9352e6e80418f42cc27b5357abb816bc
|
def bg_sub_all(self):
'Backgrounds subtraction on all the scans in all the spectra objects.\n\n Calls spectra.bg_sub() on all spectra \n\n Parameters\n ----------\n\n Returns\n -------\n\n See Also\n --------\n :func: spectra.bg_sub()\n '
for spectra in self.element_scans:
if (not (spectra in ['XPS', 'Valence', 'vb', 'Survey'])):
print(self.bg_info[spectra])
self.__dict__[spectra].bg_sub(subpars=self.bg_info[spectra])
self.bg_info[spectra] = self.__dict__[spectra].bg_info
|
Backgrounds subtraction on all the scans in all the spectra objects.
Calls spectra.bg_sub() on all spectra
Parameters
----------
Returns
-------
See Also
--------
:func: spectra.bg_sub()
|
sample.py
|
bg_sub_all
|
cassberk/xps_peakfit
| 1 |
python
|
def bg_sub_all(self):
'Backgrounds subtraction on all the scans in all the spectra objects.\n\n Calls spectra.bg_sub() on all spectra \n\n Parameters\n ----------\n\n Returns\n -------\n\n See Also\n --------\n :func: spectra.bg_sub()\n '
for spectra in self.element_scans:
if (not (spectra in ['XPS', 'Valence', 'vb', 'Survey'])):
print(self.bg_info[spectra])
self.__dict__[spectra].bg_sub(subpars=self.bg_info[spectra])
self.bg_info[spectra] = self.__dict__[spectra].bg_info
|
def bg_sub_all(self):
'Backgrounds subtraction on all the scans in all the spectra objects.\n\n Calls spectra.bg_sub() on all spectra \n\n Parameters\n ----------\n\n Returns\n -------\n\n See Also\n --------\n :func: spectra.bg_sub()\n '
for spectra in self.element_scans:
if (not (spectra in ['XPS', 'Valence', 'vb', 'Survey'])):
print(self.bg_info[spectra])
self.__dict__[spectra].bg_sub(subpars=self.bg_info[spectra])
self.bg_info[spectra] = self.__dict__[spectra].bg_info<|docstring|>Backgrounds subtraction on all the scans in all the spectra objects.
Calls spectra.bg_sub() on all spectra
Parameters
----------
Returns
-------
See Also
--------
:func: spectra.bg_sub()<|endoftext|>
|
177c6a423091f610e3974d0763696c7490b7a231eb198192f6e5d6ebfe8b9395
|
def plot_all_spectra(self, offval=0, plotspan=False, saveflag=0, filepath='', fig=None, ax=None, figdim=None, done_it=False):
'Plots all of the raw spectra data in the sample object. It will plot the spectra from highest to lowest binding energy\n \n\n Parameters\n ----------\n\n Returns\n -------\n matplotlib figure : fig\n dict : matplotlib axes \n Subplots of all the Raw spectra data. Keys are the orbital names. values are the matplotlib axes object\n\n '
if ((fig is None) and (ax is None)):
if (figdim is None):
figdim = (15, (int(np.ceil(((len(self.element_scans) + 2) / 3))) * 4))
(fig, ax) = plt.subplots(int(np.ceil(((len(self.element_scans) + 2) / 3))), 3, figsize=figdim)
ax = ax.ravel()
orderlist = [(orbital, np.max(self.__dict__[orbital].E)) for orbital in self.element_scans]
orderlist.sort(key=(lambda x: x[1]))
orderlist = (['Survey', 'Valence'] + [spec[0] for spec in orderlist][::(- 1)])
ax = {orb[1]: ax[orb[0]] for orb in enumerate(orderlist)}
for spectra in self.all_scans:
for i in range(len(self.__dict__[spectra].I)):
ax[spectra].plot(self.__dict__[spectra].E, (self.__dict__[spectra].I[i] + (i * offval)), label=spectra, color=self.spectra_colors[spectra])
ax[spectra].set_title(spectra, fontsize=24)
ax[spectra].set_xlim(max(self.__dict__[spectra].E), min(self.__dict__[spectra].E))
ax[spectra].set_xlabel('Binding Energy (eV)', fontsize=20)
ax[spectra].set_ylabel('Counts/s', fontsize=20)
ax[spectra].tick_params(labelsize=16)
ax[spectra].tick_params(labelsize=16)
if ((plotspan == True) and (spectra in self.element_scans)):
if (self.bg_info[spectra][1] == 'shirley'):
ax[spectra].axvspan(np.min(self.bg_info[spectra][0]), np.max(self.bg_info[spectra][0]), alpha=0.1, color='orange')
elif (self.bg_info[spectra][1] == 'linear'):
ax[spectra].axvspan(np.min(self.bg_info[spectra][0]), np.max(self.bg_info[spectra][0]), alpha=0.1, color='green')
elif (self.bg_info[spectra][1] == 'UT2'):
ax[spectra].axvspan(np.min(self.bg_info[spectra][0]), np.max(self.bg_info[spectra][0]), alpha=0.1, color='blue')
fig.tight_layout(pad=2)
if (saveflag == True):
plt.savefig(filepath, bbox_inches='tight')
return (fig, ax)
|
Plots all of the raw spectra data in the sample object. It will plot the spectra from highest to lowest binding energy
Parameters
----------
Returns
-------
matplotlib figure : fig
dict : matplotlib axes
Subplots of all the Raw spectra data. Keys are the orbital names. values are the matplotlib axes object
|
sample.py
|
plot_all_spectra
|
cassberk/xps_peakfit
| 1 |
python
|
def plot_all_spectra(self, offval=0, plotspan=False, saveflag=0, filepath=, fig=None, ax=None, figdim=None, done_it=False):
'Plots all of the raw spectra data in the sample object. It will plot the spectra from highest to lowest binding energy\n \n\n Parameters\n ----------\n\n Returns\n -------\n matplotlib figure : fig\n dict : matplotlib axes \n Subplots of all the Raw spectra data. Keys are the orbital names. values are the matplotlib axes object\n\n '
if ((fig is None) and (ax is None)):
if (figdim is None):
figdim = (15, (int(np.ceil(((len(self.element_scans) + 2) / 3))) * 4))
(fig, ax) = plt.subplots(int(np.ceil(((len(self.element_scans) + 2) / 3))), 3, figsize=figdim)
ax = ax.ravel()
orderlist = [(orbital, np.max(self.__dict__[orbital].E)) for orbital in self.element_scans]
orderlist.sort(key=(lambda x: x[1]))
orderlist = (['Survey', 'Valence'] + [spec[0] for spec in orderlist][::(- 1)])
ax = {orb[1]: ax[orb[0]] for orb in enumerate(orderlist)}
for spectra in self.all_scans:
for i in range(len(self.__dict__[spectra].I)):
ax[spectra].plot(self.__dict__[spectra].E, (self.__dict__[spectra].I[i] + (i * offval)), label=spectra, color=self.spectra_colors[spectra])
ax[spectra].set_title(spectra, fontsize=24)
ax[spectra].set_xlim(max(self.__dict__[spectra].E), min(self.__dict__[spectra].E))
ax[spectra].set_xlabel('Binding Energy (eV)', fontsize=20)
ax[spectra].set_ylabel('Counts/s', fontsize=20)
ax[spectra].tick_params(labelsize=16)
ax[spectra].tick_params(labelsize=16)
if ((plotspan == True) and (spectra in self.element_scans)):
if (self.bg_info[spectra][1] == 'shirley'):
ax[spectra].axvspan(np.min(self.bg_info[spectra][0]), np.max(self.bg_info[spectra][0]), alpha=0.1, color='orange')
elif (self.bg_info[spectra][1] == 'linear'):
ax[spectra].axvspan(np.min(self.bg_info[spectra][0]), np.max(self.bg_info[spectra][0]), alpha=0.1, color='green')
elif (self.bg_info[spectra][1] == 'UT2'):
ax[spectra].axvspan(np.min(self.bg_info[spectra][0]), np.max(self.bg_info[spectra][0]), alpha=0.1, color='blue')
fig.tight_layout(pad=2)
if (saveflag == True):
plt.savefig(filepath, bbox_inches='tight')
return (fig, ax)
|
def plot_all_spectra(self, offval=0, plotspan=False, saveflag=0, filepath=, fig=None, ax=None, figdim=None, done_it=False):
'Plots all of the raw spectra data in the sample object. It will plot the spectra from highest to lowest binding energy\n \n\n Parameters\n ----------\n\n Returns\n -------\n matplotlib figure : fig\n dict : matplotlib axes \n Subplots of all the Raw spectra data. Keys are the orbital names. values are the matplotlib axes object\n\n '
if ((fig is None) and (ax is None)):
if (figdim is None):
figdim = (15, (int(np.ceil(((len(self.element_scans) + 2) / 3))) * 4))
(fig, ax) = plt.subplots(int(np.ceil(((len(self.element_scans) + 2) / 3))), 3, figsize=figdim)
ax = ax.ravel()
orderlist = [(orbital, np.max(self.__dict__[orbital].E)) for orbital in self.element_scans]
orderlist.sort(key=(lambda x: x[1]))
orderlist = (['Survey', 'Valence'] + [spec[0] for spec in orderlist][::(- 1)])
ax = {orb[1]: ax[orb[0]] for orb in enumerate(orderlist)}
for spectra in self.all_scans:
for i in range(len(self.__dict__[spectra].I)):
ax[spectra].plot(self.__dict__[spectra].E, (self.__dict__[spectra].I[i] + (i * offval)), label=spectra, color=self.spectra_colors[spectra])
ax[spectra].set_title(spectra, fontsize=24)
ax[spectra].set_xlim(max(self.__dict__[spectra].E), min(self.__dict__[spectra].E))
ax[spectra].set_xlabel('Binding Energy (eV)', fontsize=20)
ax[spectra].set_ylabel('Counts/s', fontsize=20)
ax[spectra].tick_params(labelsize=16)
ax[spectra].tick_params(labelsize=16)
if ((plotspan == True) and (spectra in self.element_scans)):
if (self.bg_info[spectra][1] == 'shirley'):
ax[spectra].axvspan(np.min(self.bg_info[spectra][0]), np.max(self.bg_info[spectra][0]), alpha=0.1, color='orange')
elif (self.bg_info[spectra][1] == 'linear'):
ax[spectra].axvspan(np.min(self.bg_info[spectra][0]), np.max(self.bg_info[spectra][0]), alpha=0.1, color='green')
elif (self.bg_info[spectra][1] == 'UT2'):
ax[spectra].axvspan(np.min(self.bg_info[spectra][0]), np.max(self.bg_info[spectra][0]), alpha=0.1, color='blue')
fig.tight_layout(pad=2)
if (saveflag == True):
plt.savefig(filepath, bbox_inches='tight')
return (fig, ax)<|docstring|>Plots all of the raw spectra data in the sample object. It will plot the spectra from highest to lowest binding energy
Parameters
----------
Returns
-------
matplotlib figure : fig
dict : matplotlib axes
Subplots of all the Raw spectra data. Keys are the orbital names. values are the matplotlib axes object<|endoftext|>
|
3036209b2b0f70136d55bbc5b300c824af22074c5d3d0a0db5f355a94e9bc298
|
def plot_all_sub(self, offval=0):
'Plots all of the background subtracted spectra\n\n Parameters\n ----------\n offval : int,float\n Will stack each spectra by offval value\n\n Returns\n -------\n matplotlib figures : fig\n Multi-plot plots of the data.\n\n See Also\n --------\n :func: plot_all_spectra(), plot_all_sub(), plot_atomic_percent()\n '
(fig, ax) = plt.subplots(int(np.ceil((len(self.element_scans) / 2))), 2, figsize=(15, 15))
ax = ax.ravel()
orderlist = [(orbital, np.max(self.__dict__[orbital].E)) for orbital in self.element_scans]
orderlist.sort(key=(lambda x: x[1]))
orderlist = [spec[0] for spec in orderlist][::(- 1)]
ax = {orb[1]: ax[orb[0]] for orb in enumerate(orderlist)}
for spectra in self.element_scans:
for i in range(len(self.__dict__[spectra].I)):
ax[spectra].plot(self.__dict__[spectra].esub, (self.__dict__[spectra].isub[i] + (offval * i)), label=spectra, color=self.spectra_colors[spectra])
ax[spectra].set_title(spectra, fontsize=24)
ax[spectra].set_xlim(max(self.__dict__[spectra].esub), min(self.__dict__[spectra].esub))
ax[spectra].set_xlabel('Binding Energy (eV)', fontsize=20)
ax[spectra].set_ylabel('Counts/s', fontsize=20)
ax[spectra].tick_params(labelsize=16)
ax[spectra].tick_params(labelsize=16)
if (self.bg_info[spectra][1] == 'shirley'):
ax[spectra].axvspan(np.min(self.bg_info[spectra][0]), np.max(self.bg_info[spectra][0]), alpha=0.1, color='orange')
elif (self.bg_info[spectra][1] == 'linear'):
ax[spectra].axvspan(np.min(self.bg_info[spectra][0]), np.max(self.bg_info[spectra][0]), alpha=0.1, color='green')
elif (self.bg_info[spectra][1] == 'UT2'):
ax[spectra].axvspan(np.min(self.bg_info[spectra][0]), np.max(self.bg_info[spectra][0]), alpha=0.1, color='blue')
fig.tight_layout()
return (fig, ax)
|
Plots all of the background subtracted spectra
Parameters
----------
offval : int,float
Will stack each spectra by offval value
Returns
-------
matplotlib figures : fig
Multi-plot plots of the data.
See Also
--------
:func: plot_all_spectra(), plot_all_sub(), plot_atomic_percent()
|
sample.py
|
plot_all_sub
|
cassberk/xps_peakfit
| 1 |
python
|
def plot_all_sub(self, offval=0):
'Plots all of the background subtracted spectra\n\n Parameters\n ----------\n offval : int,float\n Will stack each spectra by offval value\n\n Returns\n -------\n matplotlib figures : fig\n Multi-plot plots of the data.\n\n See Also\n --------\n :func: plot_all_spectra(), plot_all_sub(), plot_atomic_percent()\n '
(fig, ax) = plt.subplots(int(np.ceil((len(self.element_scans) / 2))), 2, figsize=(15, 15))
ax = ax.ravel()
orderlist = [(orbital, np.max(self.__dict__[orbital].E)) for orbital in self.element_scans]
orderlist.sort(key=(lambda x: x[1]))
orderlist = [spec[0] for spec in orderlist][::(- 1)]
ax = {orb[1]: ax[orb[0]] for orb in enumerate(orderlist)}
for spectra in self.element_scans:
for i in range(len(self.__dict__[spectra].I)):
ax[spectra].plot(self.__dict__[spectra].esub, (self.__dict__[spectra].isub[i] + (offval * i)), label=spectra, color=self.spectra_colors[spectra])
ax[spectra].set_title(spectra, fontsize=24)
ax[spectra].set_xlim(max(self.__dict__[spectra].esub), min(self.__dict__[spectra].esub))
ax[spectra].set_xlabel('Binding Energy (eV)', fontsize=20)
ax[spectra].set_ylabel('Counts/s', fontsize=20)
ax[spectra].tick_params(labelsize=16)
ax[spectra].tick_params(labelsize=16)
if (self.bg_info[spectra][1] == 'shirley'):
ax[spectra].axvspan(np.min(self.bg_info[spectra][0]), np.max(self.bg_info[spectra][0]), alpha=0.1, color='orange')
elif (self.bg_info[spectra][1] == 'linear'):
ax[spectra].axvspan(np.min(self.bg_info[spectra][0]), np.max(self.bg_info[spectra][0]), alpha=0.1, color='green')
elif (self.bg_info[spectra][1] == 'UT2'):
ax[spectra].axvspan(np.min(self.bg_info[spectra][0]), np.max(self.bg_info[spectra][0]), alpha=0.1, color='blue')
fig.tight_layout()
return (fig, ax)
|
def plot_all_sub(self, offval=0):
'Plots all of the background subtracted spectra\n\n Parameters\n ----------\n offval : int,float\n Will stack each spectra by offval value\n\n Returns\n -------\n matplotlib figures : fig\n Multi-plot plots of the data.\n\n See Also\n --------\n :func: plot_all_spectra(), plot_all_sub(), plot_atomic_percent()\n '
(fig, ax) = plt.subplots(int(np.ceil((len(self.element_scans) / 2))), 2, figsize=(15, 15))
ax = ax.ravel()
orderlist = [(orbital, np.max(self.__dict__[orbital].E)) for orbital in self.element_scans]
orderlist.sort(key=(lambda x: x[1]))
orderlist = [spec[0] for spec in orderlist][::(- 1)]
ax = {orb[1]: ax[orb[0]] for orb in enumerate(orderlist)}
for spectra in self.element_scans:
for i in range(len(self.__dict__[spectra].I)):
ax[spectra].plot(self.__dict__[spectra].esub, (self.__dict__[spectra].isub[i] + (offval * i)), label=spectra, color=self.spectra_colors[spectra])
ax[spectra].set_title(spectra, fontsize=24)
ax[spectra].set_xlim(max(self.__dict__[spectra].esub), min(self.__dict__[spectra].esub))
ax[spectra].set_xlabel('Binding Energy (eV)', fontsize=20)
ax[spectra].set_ylabel('Counts/s', fontsize=20)
ax[spectra].tick_params(labelsize=16)
ax[spectra].tick_params(labelsize=16)
if (self.bg_info[spectra][1] == 'shirley'):
ax[spectra].axvspan(np.min(self.bg_info[spectra][0]), np.max(self.bg_info[spectra][0]), alpha=0.1, color='orange')
elif (self.bg_info[spectra][1] == 'linear'):
ax[spectra].axvspan(np.min(self.bg_info[spectra][0]), np.max(self.bg_info[spectra][0]), alpha=0.1, color='green')
elif (self.bg_info[spectra][1] == 'UT2'):
ax[spectra].axvspan(np.min(self.bg_info[spectra][0]), np.max(self.bg_info[spectra][0]), alpha=0.1, color='blue')
fig.tight_layout()
return (fig, ax)<|docstring|>Plots all of the background subtracted spectra
Parameters
----------
offval : int,float
Will stack each spectra by offval value
Returns
-------
matplotlib figures : fig
Multi-plot plots of the data.
See Also
--------
:func: plot_all_spectra(), plot_all_sub(), plot_atomic_percent()<|endoftext|>
|
c1583374a937007f159dad27288b58e55218be80983bb917979a75acdb30431c
|
def plot_atomic_percent(self, infig=None, inax=None, **pltkwargs):
'Plots the atomic percent\n\n Parameters\n ----------\n infig: matplotlib.fig\n Pass in a figure to plot multiple samples on same graph\n inax: matplotlib.axes\n Pass in axes to plot multiple samples on same graph\n\n Returns\n -------\n matplotlib figures : fig\n Multi-plot plots of the data.\n\n See Also\n --------\n :func: plot_all_spectra(), plot_all_sub(), plot_atomic_percent()\n '
if ((infig == None) and (inax == None)):
(fig, ax) = plt.subplots(figsize=(18, 8))
else:
fig = infig
ax = inax
x = np.arange(len(self.__dict__['C1s'].atomic_percent))
if (self.sputter_time is not None):
x = (x * self.sputter_time)
for spectra in self.atomic_percent.keys():
ax.plot(x, (self.atomic_percent[spectra] * 100), color=self.spectra_colors[spectra], linewidth=3, **pltkwargs)
if (self.sputter_time is not None):
ax.set_xlabel('Sputter Time', fontsize=30)
else:
ax.set_xlabel('Position', fontsize=30)
ax.set_ylabel('Atomic Percent', fontsize=30)
ax.tick_params(labelsize=20)
ax.set_xticks(x)
ax.set_ylim(ymin=0)
if hasattr(self.__dict__['C1s'], 'positions'):
ax.set_xticklabels(self.__dict__['C1s'].positions, rotation=80)
ax.legend(list(self.atomic_percent.keys()), bbox_to_anchor=(0.85, 0.4, 0.5, 0.5), loc='lower center', fontsize=20)
return (fig, ax)
|
Plots the atomic percent
Parameters
----------
infig: matplotlib.fig
Pass in a figure to plot multiple samples on same graph
inax: matplotlib.axes
Pass in axes to plot multiple samples on same graph
Returns
-------
matplotlib figures : fig
Multi-plot plots of the data.
See Also
--------
:func: plot_all_spectra(), plot_all_sub(), plot_atomic_percent()
|
sample.py
|
plot_atomic_percent
|
cassberk/xps_peakfit
| 1 |
python
|
def plot_atomic_percent(self, infig=None, inax=None, **pltkwargs):
'Plots the atomic percent\n\n Parameters\n ----------\n infig: matplotlib.fig\n Pass in a figure to plot multiple samples on same graph\n inax: matplotlib.axes\n Pass in axes to plot multiple samples on same graph\n\n Returns\n -------\n matplotlib figures : fig\n Multi-plot plots of the data.\n\n See Also\n --------\n :func: plot_all_spectra(), plot_all_sub(), plot_atomic_percent()\n '
if ((infig == None) and (inax == None)):
(fig, ax) = plt.subplots(figsize=(18, 8))
else:
fig = infig
ax = inax
x = np.arange(len(self.__dict__['C1s'].atomic_percent))
if (self.sputter_time is not None):
x = (x * self.sputter_time)
for spectra in self.atomic_percent.keys():
ax.plot(x, (self.atomic_percent[spectra] * 100), color=self.spectra_colors[spectra], linewidth=3, **pltkwargs)
if (self.sputter_time is not None):
ax.set_xlabel('Sputter Time', fontsize=30)
else:
ax.set_xlabel('Position', fontsize=30)
ax.set_ylabel('Atomic Percent', fontsize=30)
ax.tick_params(labelsize=20)
ax.set_xticks(x)
ax.set_ylim(ymin=0)
if hasattr(self.__dict__['C1s'], 'positions'):
ax.set_xticklabels(self.__dict__['C1s'].positions, rotation=80)
ax.legend(list(self.atomic_percent.keys()), bbox_to_anchor=(0.85, 0.4, 0.5, 0.5), loc='lower center', fontsize=20)
return (fig, ax)
|
def plot_atomic_percent(self, infig=None, inax=None, **pltkwargs):
'Plots the atomic percent\n\n Parameters\n ----------\n infig: matplotlib.fig\n Pass in a figure to plot multiple samples on same graph\n inax: matplotlib.axes\n Pass in axes to plot multiple samples on same graph\n\n Returns\n -------\n matplotlib figures : fig\n Multi-plot plots of the data.\n\n See Also\n --------\n :func: plot_all_spectra(), plot_all_sub(), plot_atomic_percent()\n '
if ((infig == None) and (inax == None)):
(fig, ax) = plt.subplots(figsize=(18, 8))
else:
fig = infig
ax = inax
x = np.arange(len(self.__dict__['C1s'].atomic_percent))
if (self.sputter_time is not None):
x = (x * self.sputter_time)
for spectra in self.atomic_percent.keys():
ax.plot(x, (self.atomic_percent[spectra] * 100), color=self.spectra_colors[spectra], linewidth=3, **pltkwargs)
if (self.sputter_time is not None):
ax.set_xlabel('Sputter Time', fontsize=30)
else:
ax.set_xlabel('Position', fontsize=30)
ax.set_ylabel('Atomic Percent', fontsize=30)
ax.tick_params(labelsize=20)
ax.set_xticks(x)
ax.set_ylim(ymin=0)
if hasattr(self.__dict__['C1s'], 'positions'):
ax.set_xticklabels(self.__dict__['C1s'].positions, rotation=80)
ax.legend(list(self.atomic_percent.keys()), bbox_to_anchor=(0.85, 0.4, 0.5, 0.5), loc='lower center', fontsize=20)
return (fig, ax)<|docstring|>Plots the atomic percent
Parameters
----------
infig: matplotlib.fig
Pass in a figure to plot multiple samples on same graph
inax: matplotlib.axes
Pass in axes to plot multiple samples on same graph
Returns
-------
matplotlib figures : fig
Multi-plot plots of the data.
See Also
--------
:func: plot_all_spectra(), plot_all_sub(), plot_atomic_percent()<|endoftext|>
|
d027195083731a9ab830b16993c03acea3ca68a6b513493f6dabc7a85624544c
|
def get_root(root):
'\n 查看root是否为当前的文档下面,是就返回\n '
path = os.getcwd()
while (root != os.path.basename(path)):
path = os.path.dirname(path)
return path
|
查看root是否为当前的文档下面,是就返回
|
test/meituan/NewTxt.py
|
get_root
|
loetcn/UNT
| 2 |
python
|
def get_root(root):
'\n \n '
path = os.getcwd()
while (root != os.path.basename(path)):
path = os.path.dirname(path)
return path
|
def get_root(root):
'\n \n '
path = os.getcwd()
while (root != os.path.basename(path)):
path = os.path.dirname(path)
return path<|docstring|>查看root是否为当前的文档下面,是就返回<|endoftext|>
|
6c3d8696f2b1f19d7535e78120a05108c82f82d6f82e2109e0d91f01b5ca7a5a
|
def newTxt(Root, FILE, newFile):
'\n 新建文件夹在FILE中,newFile为新建的txt名字。\n 并且返回txtPath的读写行为。\n '
ROOT = get_root(Root)
try:
os.mkdir(os.path.join(ROOT, FILE))
config.LOG.debug((('新建文件夹' + FILE) + '成功!'))
txtPath = open((((FILE + '\\') + newFile) + '.txt'), 'a+')
config.LOG.debug((('新建' + newFile) + 'TXT文件成功!'))
return txtPath
except OSError as err:
txtPath = open((((((ROOT + '\\') + FILE) + '\\') + newFile) + '.txt'), 'a+', encoding='utf-8')
config.LOG.error(err)
config.LOG.debug((('打开' + newFile) + 'TXT文件成功!'))
return txtPath
except:
txtPath = open((((((ROOT + '\\') + FILE) + '\\') + newFile) + '.txt'), 'a+', encoding='utf-8')
config.LOG.error('不知道名错误')
config.LOG.debug((('打开' + newFile) + 'TXT文件成功!'))
return txtPath
|
新建文件夹在FILE中,newFile为新建的txt名字。
并且返回txtPath的读写行为。
|
test/meituan/NewTxt.py
|
newTxt
|
loetcn/UNT
| 2 |
python
|
def newTxt(Root, FILE, newFile):
'\n 新建文件夹在FILE中,newFile为新建的txt名字。\n 并且返回txtPath的读写行为。\n '
ROOT = get_root(Root)
try:
os.mkdir(os.path.join(ROOT, FILE))
config.LOG.debug((('新建文件夹' + FILE) + '成功!'))
txtPath = open((((FILE + '\\') + newFile) + '.txt'), 'a+')
config.LOG.debug((('新建' + newFile) + 'TXT文件成功!'))
return txtPath
except OSError as err:
txtPath = open((((((ROOT + '\\') + FILE) + '\\') + newFile) + '.txt'), 'a+', encoding='utf-8')
config.LOG.error(err)
config.LOG.debug((('打开' + newFile) + 'TXT文件成功!'))
return txtPath
except:
txtPath = open((((((ROOT + '\\') + FILE) + '\\') + newFile) + '.txt'), 'a+', encoding='utf-8')
config.LOG.error('不知道名错误')
config.LOG.debug((('打开' + newFile) + 'TXT文件成功!'))
return txtPath
|
def newTxt(Root, FILE, newFile):
'\n 新建文件夹在FILE中,newFile为新建的txt名字。\n 并且返回txtPath的读写行为。\n '
ROOT = get_root(Root)
try:
os.mkdir(os.path.join(ROOT, FILE))
config.LOG.debug((('新建文件夹' + FILE) + '成功!'))
txtPath = open((((FILE + '\\') + newFile) + '.txt'), 'a+')
config.LOG.debug((('新建' + newFile) + 'TXT文件成功!'))
return txtPath
except OSError as err:
txtPath = open((((((ROOT + '\\') + FILE) + '\\') + newFile) + '.txt'), 'a+', encoding='utf-8')
config.LOG.error(err)
config.LOG.debug((('打开' + newFile) + 'TXT文件成功!'))
return txtPath
except:
txtPath = open((((((ROOT + '\\') + FILE) + '\\') + newFile) + '.txt'), 'a+', encoding='utf-8')
config.LOG.error('不知道名错误')
config.LOG.debug((('打开' + newFile) + 'TXT文件成功!'))
return txtPath<|docstring|>新建文件夹在FILE中,newFile为新建的txt名字。
并且返回txtPath的读写行为。<|endoftext|>
|
04d6e38bb5f6fe028d2727f910dc7f731d55b757d168386f59f1d66d464574e1
|
def split_txt_list(FILE):
'\n 获取FILE文件夹中的所有的.txt格式的文件,返回filenamelist。\n '
import glob
cwd = os.getcwd()
FILE = ((str(get_root('pachong2')) + '\\') + FILE)
if FILE:
os.chdir(FILE)
filenamelist = []
for filename in glob.glob('*.txt'):
filenamelist.append(str(filename))
config.LOG.debug('成功获取FILE文件夹里面的txt文件!')
os.chdir(cwd)
return filenamelist
|
获取FILE文件夹中的所有的.txt格式的文件,返回filenamelist。
|
test/meituan/NewTxt.py
|
split_txt_list
|
loetcn/UNT
| 2 |
python
|
def split_txt_list(FILE):
'\n \n '
import glob
cwd = os.getcwd()
FILE = ((str(get_root('pachong2')) + '\\') + FILE)
if FILE:
os.chdir(FILE)
filenamelist = []
for filename in glob.glob('*.txt'):
filenamelist.append(str(filename))
config.LOG.debug('成功获取FILE文件夹里面的txt文件!')
os.chdir(cwd)
return filenamelist
|
def split_txt_list(FILE):
'\n \n '
import glob
cwd = os.getcwd()
FILE = ((str(get_root('pachong2')) + '\\') + FILE)
if FILE:
os.chdir(FILE)
filenamelist = []
for filename in glob.glob('*.txt'):
filenamelist.append(str(filename))
config.LOG.debug('成功获取FILE文件夹里面的txt文件!')
os.chdir(cwd)
return filenamelist<|docstring|>获取FILE文件夹中的所有的.txt格式的文件,返回filenamelist。<|endoftext|>
|
85be2ceab8a4a34bd69887f025f67588151e7736d081019644bb9dceb933fd3e
|
def __init__(self, camera, light, name, x, y, z, rx, ry, rz, sx, sy, sz, cx, cy, cz):
'\n Arguments:\n *light*\n Light instance: if None then Light.instance() will be used.\n *name*\n Name string for identification.\n *x, y, z*\n Location of the origin of the shape, stored in a uniform array.\n *rx, ry, rz*\n Rotation of shape in degrees about each axis.\n *sx, sy, sz*\n Scale in each direction.\n *cx, cy, cz*\n Offset vertices from origin in each direction.\n '
super(Shape, self).__init__()
self.name = name
light = (light or Light.instance())
self.unif = (ctypes.c_float * 60)(x, y, z, rx, ry, rz, sx, sy, sz, cx, cy, cz, 0.5, 0.5, 0.5, 5000.0, 0.8, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, light.lightpos[0], light.lightpos[1], light.lightpos[2], light.lightcol[0], light.lightcol[1], light.lightcol[2], light.lightamb[0], light.lightamb[1], light.lightamb[2])
' pass to shader array of vec3 uniform variables:\n\n ===== ========================================== ==== ==\n vec3 description python\n ----- ------------------------------------------ -------\n index from to\n ===== ========================================== ==== ==\n 0 location 0 2\n 1 rotation 3 5\n 2 scale 6 8\n 3 offset 9 11\n 4 fog shade 12 14\n 5 fog distance and alph (only 2 used) 15 16\n 6 camera position 18 20\n 7 unused: custom data space 21 23\n 8 light0 position, direction vector 24 26\n 9 light0 strength per shade 27 29\n 10 light0 ambient values 30 32\n 11 light1 position, direction vector 33 35\n 12 light1 strength per shade 36 38\n 13 light1 ambient values 39 41\n 14 defocus dist, amount (only 2 used) 42 43\n 15 defocus frame width, height (only 2 used) 45 46\n 16 custom data space 48 50\n 17 custom data space 51 53\n 18 custom data space 54 56\n 19 custom data space 57 59\n ===== ========================================== ==== ==\n\n Shape holds matrices that are updated each time it is moved or rotated\n this saves time recalculating them each frame as the Shape is drawn\n '
self.tr1 = array([[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [(self.unif[0] - self.unif[9]), (self.unif[1] - self.unif[10]), (self.unif[2] - self.unif[11]), 1.0]])
'translate to position - offset'
(s, c) = (sin(radians(self.unif[3])), cos(radians(self.unif[3])))
self.rox = array([[1.0, 0.0, 0.0, 0.0], [0.0, c, s, 0.0], [0.0, (- s), c, 0.0], [0.0, 0.0, 0.0, 1.0]])
'rotate about x axis'
(s, c) = (sin(radians(self.unif[4])), cos(radians(self.unif[4])))
self.roy = array([[c, 0.0, (- s), 0.0], [0.0, 1.0, 0.0, 0.0], [s, 0.0, c, 0.0], [0.0, 0.0, 0.0, 1.0]])
'rotate about y axis'
(s, c) = (sin(radians(self.unif[5])), cos(radians(self.unif[5])))
self.roz = array([[c, s, 0.0, 0.0], [(- s), c, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 0.0, 0.0, 1.0]])
'rotate about z axis'
self.scl = array([[self.unif[6], 0.0, 0.0, 0.0], [0.0, self.unif[7], 0.0, 0.0], [0.0, 0.0, self.unif[8], 0.0], [0.0, 0.0, 0.0, 1.0]])
'scale'
self.tr2 = array([[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [self.unif[9], self.unif[10], self.unif[11], 1.0]])
'translate to offset'
self.MFlg = True
self.M = (ctypes.c_float * 32)(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0)
self._camera = camera
self.shader = None
self.textures = []
self.buf = []
'self.buf contains a buffer for each part of this shape that needs\n rendering with a different Shader/Texture. self.draw() relies on objects\n inheriting from this filling buf with at least one element.\n '
|
Arguments:
*light*
Light instance: if None then Light.instance() will be used.
*name*
Name string for identification.
*x, y, z*
Location of the origin of the shape, stored in a uniform array.
*rx, ry, rz*
Rotation of shape in degrees about each axis.
*sx, sy, sz*
Scale in each direction.
*cx, cy, cz*
Offset vertices from origin in each direction.
|
code/python/external/pi3d/Shape.py
|
__init__
|
rec/echomesh
| 30 |
python
|
def __init__(self, camera, light, name, x, y, z, rx, ry, rz, sx, sy, sz, cx, cy, cz):
'\n Arguments:\n *light*\n Light instance: if None then Light.instance() will be used.\n *name*\n Name string for identification.\n *x, y, z*\n Location of the origin of the shape, stored in a uniform array.\n *rx, ry, rz*\n Rotation of shape in degrees about each axis.\n *sx, sy, sz*\n Scale in each direction.\n *cx, cy, cz*\n Offset vertices from origin in each direction.\n '
super(Shape, self).__init__()
self.name = name
light = (light or Light.instance())
self.unif = (ctypes.c_float * 60)(x, y, z, rx, ry, rz, sx, sy, sz, cx, cy, cz, 0.5, 0.5, 0.5, 5000.0, 0.8, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, light.lightpos[0], light.lightpos[1], light.lightpos[2], light.lightcol[0], light.lightcol[1], light.lightcol[2], light.lightamb[0], light.lightamb[1], light.lightamb[2])
' pass to shader array of vec3 uniform variables:\n\n ===== ========================================== ==== ==\n vec3 description python\n ----- ------------------------------------------ -------\n index from to\n ===== ========================================== ==== ==\n 0 location 0 2\n 1 rotation 3 5\n 2 scale 6 8\n 3 offset 9 11\n 4 fog shade 12 14\n 5 fog distance and alph (only 2 used) 15 16\n 6 camera position 18 20\n 7 unused: custom data space 21 23\n 8 light0 position, direction vector 24 26\n 9 light0 strength per shade 27 29\n 10 light0 ambient values 30 32\n 11 light1 position, direction vector 33 35\n 12 light1 strength per shade 36 38\n 13 light1 ambient values 39 41\n 14 defocus dist, amount (only 2 used) 42 43\n 15 defocus frame width, height (only 2 used) 45 46\n 16 custom data space 48 50\n 17 custom data space 51 53\n 18 custom data space 54 56\n 19 custom data space 57 59\n ===== ========================================== ==== ==\n\n Shape holds matrices that are updated each time it is moved or rotated\n this saves time recalculating them each frame as the Shape is drawn\n '
self.tr1 = array([[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [(self.unif[0] - self.unif[9]), (self.unif[1] - self.unif[10]), (self.unif[2] - self.unif[11]), 1.0]])
'translate to position - offset'
(s, c) = (sin(radians(self.unif[3])), cos(radians(self.unif[3])))
self.rox = array([[1.0, 0.0, 0.0, 0.0], [0.0, c, s, 0.0], [0.0, (- s), c, 0.0], [0.0, 0.0, 0.0, 1.0]])
'rotate about x axis'
(s, c) = (sin(radians(self.unif[4])), cos(radians(self.unif[4])))
self.roy = array([[c, 0.0, (- s), 0.0], [0.0, 1.0, 0.0, 0.0], [s, 0.0, c, 0.0], [0.0, 0.0, 0.0, 1.0]])
'rotate about y axis'
(s, c) = (sin(radians(self.unif[5])), cos(radians(self.unif[5])))
self.roz = array([[c, s, 0.0, 0.0], [(- s), c, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 0.0, 0.0, 1.0]])
'rotate about z axis'
self.scl = array([[self.unif[6], 0.0, 0.0, 0.0], [0.0, self.unif[7], 0.0, 0.0], [0.0, 0.0, self.unif[8], 0.0], [0.0, 0.0, 0.0, 1.0]])
'scale'
self.tr2 = array([[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [self.unif[9], self.unif[10], self.unif[11], 1.0]])
'translate to offset'
self.MFlg = True
self.M = (ctypes.c_float * 32)(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0)
self._camera = camera
self.shader = None
self.textures = []
self.buf = []
'self.buf contains a buffer for each part of this shape that needs\n rendering with a different Shader/Texture. self.draw() relies on objects\n inheriting from this filling buf with at least one element.\n '
|
def __init__(self, camera, light, name, x, y, z, rx, ry, rz, sx, sy, sz, cx, cy, cz):
'\n Arguments:\n *light*\n Light instance: if None then Light.instance() will be used.\n *name*\n Name string for identification.\n *x, y, z*\n Location of the origin of the shape, stored in a uniform array.\n *rx, ry, rz*\n Rotation of shape in degrees about each axis.\n *sx, sy, sz*\n Scale in each direction.\n *cx, cy, cz*\n Offset vertices from origin in each direction.\n '
super(Shape, self).__init__()
self.name = name
light = (light or Light.instance())
self.unif = (ctypes.c_float * 60)(x, y, z, rx, ry, rz, sx, sy, sz, cx, cy, cz, 0.5, 0.5, 0.5, 5000.0, 0.8, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, light.lightpos[0], light.lightpos[1], light.lightpos[2], light.lightcol[0], light.lightcol[1], light.lightcol[2], light.lightamb[0], light.lightamb[1], light.lightamb[2])
' pass to shader array of vec3 uniform variables:\n\n ===== ========================================== ==== ==\n vec3 description python\n ----- ------------------------------------------ -------\n index from to\n ===== ========================================== ==== ==\n 0 location 0 2\n 1 rotation 3 5\n 2 scale 6 8\n 3 offset 9 11\n 4 fog shade 12 14\n 5 fog distance and alph (only 2 used) 15 16\n 6 camera position 18 20\n 7 unused: custom data space 21 23\n 8 light0 position, direction vector 24 26\n 9 light0 strength per shade 27 29\n 10 light0 ambient values 30 32\n 11 light1 position, direction vector 33 35\n 12 light1 strength per shade 36 38\n 13 light1 ambient values 39 41\n 14 defocus dist, amount (only 2 used) 42 43\n 15 defocus frame width, height (only 2 used) 45 46\n 16 custom data space 48 50\n 17 custom data space 51 53\n 18 custom data space 54 56\n 19 custom data space 57 59\n ===== ========================================== ==== ==\n\n Shape holds matrices that are updated each time it is moved or rotated\n this saves time recalculating them each frame as the Shape is drawn\n '
self.tr1 = array([[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [(self.unif[0] - self.unif[9]), (self.unif[1] - self.unif[10]), (self.unif[2] - self.unif[11]), 1.0]])
'translate to position - offset'
(s, c) = (sin(radians(self.unif[3])), cos(radians(self.unif[3])))
self.rox = array([[1.0, 0.0, 0.0, 0.0], [0.0, c, s, 0.0], [0.0, (- s), c, 0.0], [0.0, 0.0, 0.0, 1.0]])
'rotate about x axis'
(s, c) = (sin(radians(self.unif[4])), cos(radians(self.unif[4])))
self.roy = array([[c, 0.0, (- s), 0.0], [0.0, 1.0, 0.0, 0.0], [s, 0.0, c, 0.0], [0.0, 0.0, 0.0, 1.0]])
'rotate about y axis'
(s, c) = (sin(radians(self.unif[5])), cos(radians(self.unif[5])))
self.roz = array([[c, s, 0.0, 0.0], [(- s), c, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 0.0, 0.0, 1.0]])
'rotate about z axis'
self.scl = array([[self.unif[6], 0.0, 0.0, 0.0], [0.0, self.unif[7], 0.0, 0.0], [0.0, 0.0, self.unif[8], 0.0], [0.0, 0.0, 0.0, 1.0]])
'scale'
self.tr2 = array([[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [self.unif[9], self.unif[10], self.unif[11], 1.0]])
'translate to offset'
self.MFlg = True
self.M = (ctypes.c_float * 32)(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0)
self._camera = camera
self.shader = None
self.textures = []
self.buf = []
'self.buf contains a buffer for each part of this shape that needs\n rendering with a different Shader/Texture. self.draw() relies on objects\n inheriting from this filling buf with at least one element.\n '<|docstring|>Arguments:
*light*
Light instance: if None then Light.instance() will be used.
*name*
Name string for identification.
*x, y, z*
Location of the origin of the shape, stored in a uniform array.
*rx, ry, rz*
Rotation of shape in degrees about each axis.
*sx, sy, sz*
Scale in each direction.
*cx, cy, cz*
Offset vertices from origin in each direction.<|endoftext|>
|
e8ba4af4b45a982189242e9f74841b5566f17849a261116748dbc82e6750ed3a
|
def draw(self, shader=None, txtrs=None, ntl=None, shny=None, camera=None):
'If called without parameters, there has to have been a previous call to\n set_draw_details() for each Buffer in buf[].\n NB there is no facility for setting umult and vmult with draw: they must be\n set using set_draw_details or Buffer.set_draw_details.\n '
from pi3d.Camera import Camera
camera = (camera or self._camera or Camera.instance())
shader = (shader or self.shader)
shader.use()
if (self.MFlg == True):
self.MRaw = dot(self.tr2, dot(self.scl, dot(self.roy, dot(self.rox, dot(self.roz, self.tr1)))))
self.M[0:16] = self.MRaw.ravel()
self.M[16:32] = dot(self.MRaw, camera.mtrx).ravel()
self.MFlg = False
elif camera.was_moved:
self.M[16:32] = dot(self.MRaw, camera.mtrx).ravel()
if camera.was_moved:
self.unif[18:21] = camera.eye[0:3]
opengles.glUniformMatrix4fv(shader.unif_modelviewmatrix, 2, ctypes.c_int(0), ctypes.byref(self.M))
opengles.glUniform3fv(shader.unif_unif, 20, ctypes.byref(self.unif))
for b in self.buf:
b.draw(shader, txtrs, ntl, shny)
|
If called without parameters, there has to have been a previous call to
set_draw_details() for each Buffer in buf[].
NB there is no facility for setting umult and vmult with draw: they must be
set using set_draw_details or Buffer.set_draw_details.
|
code/python/external/pi3d/Shape.py
|
draw
|
rec/echomesh
| 30 |
python
|
def draw(self, shader=None, txtrs=None, ntl=None, shny=None, camera=None):
'If called without parameters, there has to have been a previous call to\n set_draw_details() for each Buffer in buf[].\n NB there is no facility for setting umult and vmult with draw: they must be\n set using set_draw_details or Buffer.set_draw_details.\n '
from pi3d.Camera import Camera
camera = (camera or self._camera or Camera.instance())
shader = (shader or self.shader)
shader.use()
if (self.MFlg == True):
self.MRaw = dot(self.tr2, dot(self.scl, dot(self.roy, dot(self.rox, dot(self.roz, self.tr1)))))
self.M[0:16] = self.MRaw.ravel()
self.M[16:32] = dot(self.MRaw, camera.mtrx).ravel()
self.MFlg = False
elif camera.was_moved:
self.M[16:32] = dot(self.MRaw, camera.mtrx).ravel()
if camera.was_moved:
self.unif[18:21] = camera.eye[0:3]
opengles.glUniformMatrix4fv(shader.unif_modelviewmatrix, 2, ctypes.c_int(0), ctypes.byref(self.M))
opengles.glUniform3fv(shader.unif_unif, 20, ctypes.byref(self.unif))
for b in self.buf:
b.draw(shader, txtrs, ntl, shny)
|
def draw(self, shader=None, txtrs=None, ntl=None, shny=None, camera=None):
'If called without parameters, there has to have been a previous call to\n set_draw_details() for each Buffer in buf[].\n NB there is no facility for setting umult and vmult with draw: they must be\n set using set_draw_details or Buffer.set_draw_details.\n '
from pi3d.Camera import Camera
camera = (camera or self._camera or Camera.instance())
shader = (shader or self.shader)
shader.use()
if (self.MFlg == True):
self.MRaw = dot(self.tr2, dot(self.scl, dot(self.roy, dot(self.rox, dot(self.roz, self.tr1)))))
self.M[0:16] = self.MRaw.ravel()
self.M[16:32] = dot(self.MRaw, camera.mtrx).ravel()
self.MFlg = False
elif camera.was_moved:
self.M[16:32] = dot(self.MRaw, camera.mtrx).ravel()
if camera.was_moved:
self.unif[18:21] = camera.eye[0:3]
opengles.glUniformMatrix4fv(shader.unif_modelviewmatrix, 2, ctypes.c_int(0), ctypes.byref(self.M))
opengles.glUniform3fv(shader.unif_unif, 20, ctypes.byref(self.unif))
for b in self.buf:
b.draw(shader, txtrs, ntl, shny)<|docstring|>If called without parameters, there has to have been a previous call to
set_draw_details() for each Buffer in buf[].
NB there is no facility for setting umult and vmult with draw: they must be
set using set_draw_details or Buffer.set_draw_details.<|endoftext|>
|
f8acdd85ce21ec6fe5f4d23f3f5601323f448c6e7be8b1c5123620406590c903
|
def set_shader(self, shader):
"Wrapper method to set just the Shader for all the Buffer objects of\n this Shape. Used, for instance, in a Model where the Textures have been\n defined in the obj & mtl files, so you can't use set_draw_details.\n\n Arguments:\n\n *shader*\n Shader to use\n\n "
self.shader = shader
for b in self.buf:
b.shader = shader
|
Wrapper method to set just the Shader for all the Buffer objects of
this Shape. Used, for instance, in a Model where the Textures have been
defined in the obj & mtl files, so you can't use set_draw_details.
Arguments:
*shader*
Shader to use
|
code/python/external/pi3d/Shape.py
|
set_shader
|
rec/echomesh
| 30 |
python
|
def set_shader(self, shader):
"Wrapper method to set just the Shader for all the Buffer objects of\n this Shape. Used, for instance, in a Model where the Textures have been\n defined in the obj & mtl files, so you can't use set_draw_details.\n\n Arguments:\n\n *shader*\n Shader to use\n\n "
self.shader = shader
for b in self.buf:
b.shader = shader
|
def set_shader(self, shader):
"Wrapper method to set just the Shader for all the Buffer objects of\n this Shape. Used, for instance, in a Model where the Textures have been\n defined in the obj & mtl files, so you can't use set_draw_details.\n\n Arguments:\n\n *shader*\n Shader to use\n\n "
self.shader = shader
for b in self.buf:
b.shader = shader<|docstring|>Wrapper method to set just the Shader for all the Buffer objects of
this Shape. Used, for instance, in a Model where the Textures have been
defined in the obj & mtl files, so you can't use set_draw_details.
Arguments:
*shader*
Shader to use<|endoftext|>
|
b2ff6876f87a00bf06d5c881b1127a7435a27501e893fb49f4a17462b88303e9
|
def set_normal_shine(self, normtex, ntiles=1.0, shinetex=None, shiny=0.0, is_uv=True):
"Used to set some of the draw details for all Buffers in Shape.\n This is useful where a Model object has been loaded from an obj file and\n the textures assigned automatically.\n\n Arguments:\n *normtex*\n Normal map Texture to use.\n\n Keyword arguments:\n *ntiles*\n Multiplier for the tiling of the normal map.\n *shinetex*\n Reflection Texture to use.\n *shiny*\n Strength of reflection (ranging from 0.0 to 1.0).\n *is_uv*\n If True then the normtex will be textures[1] and shinetex will be\n textures[2] i.e. if using a 'uv' type Shader. However, for 'mat' type\n Shaders they are moved down one, as the basic shade is defined by\n material rgb rather than from a Texture.\n "
ofst = (0 if is_uv else (- 1))
for b in self.buf:
b.textures = (b.textures or [])
if (is_uv and (not b.textures)):
b.textures = [normtex]
while (len(b.textures) < (2 + ofst)):
b.textures.append(None)
b.textures[(1 + ofst)] = normtex
b.unib[0] = ntiles
if shinetex:
while (len(b.textures) < (3 + ofst)):
b.textures.append(None)
b.textures[(2 + ofst)] = shinetex
b.unib[1] = shiny
|
Used to set some of the draw details for all Buffers in Shape.
This is useful where a Model object has been loaded from an obj file and
the textures assigned automatically.
Arguments:
*normtex*
Normal map Texture to use.
Keyword arguments:
*ntiles*
Multiplier for the tiling of the normal map.
*shinetex*
Reflection Texture to use.
*shiny*
Strength of reflection (ranging from 0.0 to 1.0).
*is_uv*
If True then the normtex will be textures[1] and shinetex will be
textures[2] i.e. if using a 'uv' type Shader. However, for 'mat' type
Shaders they are moved down one, as the basic shade is defined by
material rgb rather than from a Texture.
|
code/python/external/pi3d/Shape.py
|
set_normal_shine
|
rec/echomesh
| 30 |
python
|
def set_normal_shine(self, normtex, ntiles=1.0, shinetex=None, shiny=0.0, is_uv=True):
"Used to set some of the draw details for all Buffers in Shape.\n This is useful where a Model object has been loaded from an obj file and\n the textures assigned automatically.\n\n Arguments:\n *normtex*\n Normal map Texture to use.\n\n Keyword arguments:\n *ntiles*\n Multiplier for the tiling of the normal map.\n *shinetex*\n Reflection Texture to use.\n *shiny*\n Strength of reflection (ranging from 0.0 to 1.0).\n *is_uv*\n If True then the normtex will be textures[1] and shinetex will be\n textures[2] i.e. if using a 'uv' type Shader. However, for 'mat' type\n Shaders they are moved down one, as the basic shade is defined by\n material rgb rather than from a Texture.\n "
ofst = (0 if is_uv else (- 1))
for b in self.buf:
b.textures = (b.textures or [])
if (is_uv and (not b.textures)):
b.textures = [normtex]
while (len(b.textures) < (2 + ofst)):
b.textures.append(None)
b.textures[(1 + ofst)] = normtex
b.unib[0] = ntiles
if shinetex:
while (len(b.textures) < (3 + ofst)):
b.textures.append(None)
b.textures[(2 + ofst)] = shinetex
b.unib[1] = shiny
|
def set_normal_shine(self, normtex, ntiles=1.0, shinetex=None, shiny=0.0, is_uv=True):
"Used to set some of the draw details for all Buffers in Shape.\n This is useful where a Model object has been loaded from an obj file and\n the textures assigned automatically.\n\n Arguments:\n *normtex*\n Normal map Texture to use.\n\n Keyword arguments:\n *ntiles*\n Multiplier for the tiling of the normal map.\n *shinetex*\n Reflection Texture to use.\n *shiny*\n Strength of reflection (ranging from 0.0 to 1.0).\n *is_uv*\n If True then the normtex will be textures[1] and shinetex will be\n textures[2] i.e. if using a 'uv' type Shader. However, for 'mat' type\n Shaders they are moved down one, as the basic shade is defined by\n material rgb rather than from a Texture.\n "
ofst = (0 if is_uv else (- 1))
for b in self.buf:
b.textures = (b.textures or [])
if (is_uv and (not b.textures)):
b.textures = [normtex]
while (len(b.textures) < (2 + ofst)):
b.textures.append(None)
b.textures[(1 + ofst)] = normtex
b.unib[0] = ntiles
if shinetex:
while (len(b.textures) < (3 + ofst)):
b.textures.append(None)
b.textures[(2 + ofst)] = shinetex
b.unib[1] = shiny<|docstring|>Used to set some of the draw details for all Buffers in Shape.
This is useful where a Model object has been loaded from an obj file and
the textures assigned automatically.
Arguments:
*normtex*
Normal map Texture to use.
Keyword arguments:
*ntiles*
Multiplier for the tiling of the normal map.
*shinetex*
Reflection Texture to use.
*shiny*
Strength of reflection (ranging from 0.0 to 1.0).
*is_uv*
If True then the normtex will be textures[1] and shinetex will be
textures[2] i.e. if using a 'uv' type Shader. However, for 'mat' type
Shaders they are moved down one, as the basic shade is defined by
material rgb rather than from a Texture.<|endoftext|>
|
c0f06d20d158877243153697e48167964ef529b109f949ff461b13b1e9892e2c
|
def set_draw_details(self, shader, textures, ntiles=0.0, shiny=0.0, umult=1.0, vmult=1.0):
'Wrapper to call set_draw_details() for each Buffer object.\n\n Arguments:\n *shader*\n Shader object\n *textures*\n array of Texture objects\n '
self.shader = shader
for b in self.buf:
b.set_draw_details(shader, textures, ntiles, shiny, umult, vmult)
|
Wrapper to call set_draw_details() for each Buffer object.
Arguments:
*shader*
Shader object
*textures*
array of Texture objects
|
code/python/external/pi3d/Shape.py
|
set_draw_details
|
rec/echomesh
| 30 |
python
|
def set_draw_details(self, shader, textures, ntiles=0.0, shiny=0.0, umult=1.0, vmult=1.0):
'Wrapper to call set_draw_details() for each Buffer object.\n\n Arguments:\n *shader*\n Shader object\n *textures*\n array of Texture objects\n '
self.shader = shader
for b in self.buf:
b.set_draw_details(shader, textures, ntiles, shiny, umult, vmult)
|
def set_draw_details(self, shader, textures, ntiles=0.0, shiny=0.0, umult=1.0, vmult=1.0):
'Wrapper to call set_draw_details() for each Buffer object.\n\n Arguments:\n *shader*\n Shader object\n *textures*\n array of Texture objects\n '
self.shader = shader
for b in self.buf:
b.set_draw_details(shader, textures, ntiles, shiny, umult, vmult)<|docstring|>Wrapper to call set_draw_details() for each Buffer object.
Arguments:
*shader*
Shader object
*textures*
array of Texture objects<|endoftext|>
|
c6fb05095191598fd7b39bc4e2d04a9295abe50c4e929ab4a24c1cb077ee5ee6
|
def set_material(self, material):
'Wrapper for setting material shade in each Buffer object.\n\n Arguments:\n *material*\n tuple (rgb)\n '
for b in self.buf:
b.set_material(material)
|
Wrapper for setting material shade in each Buffer object.
Arguments:
*material*
tuple (rgb)
|
code/python/external/pi3d/Shape.py
|
set_material
|
rec/echomesh
| 30 |
python
|
def set_material(self, material):
'Wrapper for setting material shade in each Buffer object.\n\n Arguments:\n *material*\n tuple (rgb)\n '
for b in self.buf:
b.set_material(material)
|
def set_material(self, material):
'Wrapper for setting material shade in each Buffer object.\n\n Arguments:\n *material*\n tuple (rgb)\n '
for b in self.buf:
b.set_material(material)<|docstring|>Wrapper for setting material shade in each Buffer object.
Arguments:
*material*
tuple (rgb)<|endoftext|>
|
f3e4453183ffc8863d67ab44d34b752df3378ef51cc80f886b78726a3b8625de
|
def set_fog(self, fogshade, fogdist):
'Set fog for this Shape only, it uses the shader smoothblend function from\n 1/3 fogdist to fogdist.\n\n Arguments:\n *fogshade*\n tuple (rgba)\n *fogdist*\n distance from Camera at which Shape is 100% fogshade\n '
self.unif[12:15] = fogshade[0:3]
self.unif[15] = fogdist
self.unif[16] = fogshade[3]
|
Set fog for this Shape only, it uses the shader smoothblend function from
1/3 fogdist to fogdist.
Arguments:
*fogshade*
tuple (rgba)
*fogdist*
distance from Camera at which Shape is 100% fogshade
|
code/python/external/pi3d/Shape.py
|
set_fog
|
rec/echomesh
| 30 |
python
|
def set_fog(self, fogshade, fogdist):
'Set fog for this Shape only, it uses the shader smoothblend function from\n 1/3 fogdist to fogdist.\n\n Arguments:\n *fogshade*\n tuple (rgba)\n *fogdist*\n distance from Camera at which Shape is 100% fogshade\n '
self.unif[12:15] = fogshade[0:3]
self.unif[15] = fogdist
self.unif[16] = fogshade[3]
|
def set_fog(self, fogshade, fogdist):
'Set fog for this Shape only, it uses the shader smoothblend function from\n 1/3 fogdist to fogdist.\n\n Arguments:\n *fogshade*\n tuple (rgba)\n *fogdist*\n distance from Camera at which Shape is 100% fogshade\n '
self.unif[12:15] = fogshade[0:3]
self.unif[15] = fogdist
self.unif[16] = fogshade[3]<|docstring|>Set fog for this Shape only, it uses the shader smoothblend function from
1/3 fogdist to fogdist.
Arguments:
*fogshade*
tuple (rgba)
*fogdist*
distance from Camera at which Shape is 100% fogshade<|endoftext|>
|
fa17d03c31fab8aabe601dc5b35fbb3b56e1bf5fe64928c494b11a567f6e45ed
|
def set_light(self, light, num=0):
'Set the values of the lights.\n\n Arguments:\n *light*\n Light object to use\n *num*\n number of the light to set\n '
if ((num > 1) or (num < 0)):
num = 0
stn = (24 + (num * 9))
self.unif[stn:(stn + 3)] = light.lightpos[0:3]
self.unif[(stn + 3):(stn + 6)] = light.lightcol[0:3]
self.unif[(stn + 6):(stn + 9)] = light.lightamb[0:3]
|
Set the values of the lights.
Arguments:
*light*
Light object to use
*num*
number of the light to set
|
code/python/external/pi3d/Shape.py
|
set_light
|
rec/echomesh
| 30 |
python
|
def set_light(self, light, num=0):
'Set the values of the lights.\n\n Arguments:\n *light*\n Light object to use\n *num*\n number of the light to set\n '
if ((num > 1) or (num < 0)):
num = 0
stn = (24 + (num * 9))
self.unif[stn:(stn + 3)] = light.lightpos[0:3]
self.unif[(stn + 3):(stn + 6)] = light.lightcol[0:3]
self.unif[(stn + 6):(stn + 9)] = light.lightamb[0:3]
|
def set_light(self, light, num=0):
'Set the values of the lights.\n\n Arguments:\n *light*\n Light object to use\n *num*\n number of the light to set\n '
if ((num > 1) or (num < 0)):
num = 0
stn = (24 + (num * 9))
self.unif[stn:(stn + 3)] = light.lightpos[0:3]
self.unif[(stn + 3):(stn + 6)] = light.lightcol[0:3]
self.unif[(stn + 6):(stn + 9)] = light.lightamb[0:3]<|docstring|>Set the values of the lights.
Arguments:
*light*
Light object to use
*num*
number of the light to set<|endoftext|>
|
93d03af069a5d9845e7cd04b27bd977990cdcabb2af4f0a2c979db825eab518f
|
def set_2d_size(self, w=None, h=None, x=0, y=0):
'saves size to be drawn and location in pixels for use by 2d shader\n\n Keyword arguments:\n\n *w*\n Width, pixels.\n *h*\n Height, pixels.\n *x*\n Left edge of image from left edge of display, pixels.\n *y*\n Top of image from top of display, pixels\n\n '
from pi3d.Display import Display
if (w == None):
w = Display.INSTANCE.width
if (h == None):
h = Display.INSTANCE.height
self.unif[42:44] = [x, y]
self.unif[45:48] = [w, h, Display.INSTANCE.height]
|
saves size to be drawn and location in pixels for use by 2d shader
Keyword arguments:
*w*
Width, pixels.
*h*
Height, pixels.
*x*
Left edge of image from left edge of display, pixels.
*y*
Top of image from top of display, pixels
|
code/python/external/pi3d/Shape.py
|
set_2d_size
|
rec/echomesh
| 30 |
python
|
def set_2d_size(self, w=None, h=None, x=0, y=0):
'saves size to be drawn and location in pixels for use by 2d shader\n\n Keyword arguments:\n\n *w*\n Width, pixels.\n *h*\n Height, pixels.\n *x*\n Left edge of image from left edge of display, pixels.\n *y*\n Top of image from top of display, pixels\n\n '
from pi3d.Display import Display
if (w == None):
w = Display.INSTANCE.width
if (h == None):
h = Display.INSTANCE.height
self.unif[42:44] = [x, y]
self.unif[45:48] = [w, h, Display.INSTANCE.height]
|
def set_2d_size(self, w=None, h=None, x=0, y=0):
'saves size to be drawn and location in pixels for use by 2d shader\n\n Keyword arguments:\n\n *w*\n Width, pixels.\n *h*\n Height, pixels.\n *x*\n Left edge of image from left edge of display, pixels.\n *y*\n Top of image from top of display, pixels\n\n '
from pi3d.Display import Display
if (w == None):
w = Display.INSTANCE.width
if (h == None):
h = Display.INSTANCE.height
self.unif[42:44] = [x, y]
self.unif[45:48] = [w, h, Display.INSTANCE.height]<|docstring|>saves size to be drawn and location in pixels for use by 2d shader
Keyword arguments:
*w*
Width, pixels.
*h*
Height, pixels.
*x*
Left edge of image from left edge of display, pixels.
*y*
Top of image from top of display, pixels<|endoftext|>
|
a96de2565c36251ae4faaa7cc3b1a9c59304d32f900c53553f42576b343abde9
|
def set_2d_location(self, x, y):
'saves location in pixels for use by 2d shader\n\n Arguments:\n\n *x*\n Left edge of image from left edge of display, pixels.\n *y*\n Top of image from top of display, pixels\n\n '
self.unif[42:44] = [x, y]
|
saves location in pixels for use by 2d shader
Arguments:
*x*
Left edge of image from left edge of display, pixels.
*y*
Top of image from top of display, pixels
|
code/python/external/pi3d/Shape.py
|
set_2d_location
|
rec/echomesh
| 30 |
python
|
def set_2d_location(self, x, y):
'saves location in pixels for use by 2d shader\n\n Arguments:\n\n *x*\n Left edge of image from left edge of display, pixels.\n *y*\n Top of image from top of display, pixels\n\n '
self.unif[42:44] = [x, y]
|
def set_2d_location(self, x, y):
'saves location in pixels for use by 2d shader\n\n Arguments:\n\n *x*\n Left edge of image from left edge of display, pixels.\n *y*\n Top of image from top of display, pixels\n\n '
self.unif[42:44] = [x, y]<|docstring|>saves location in pixels for use by 2d shader
Arguments:
*x*
Left edge of image from left edge of display, pixels.
*y*
Top of image from top of display, pixels<|endoftext|>
|
b4362a4b2ea235981b9ae6b23ff336cca432320da3d81e5d82c3f180a3294c5f
|
def set_custom_data(self, index_from, data):
'save general purpose custom data for use by any shader **NB it is up\n to the user to provide data in the form of a suitable array of values\n that will fit into the space available in the unif array**\n\n Arguments:\n\n *index_from*\n start index in unif array for filling data should be 48 to 59 though\n 42 to 47 could be used if they do not conflict with existing shaders\n i.e. 2d_flat, defocus etc\n *data*\n array of values to put in\n '
self.unif[index_from:(index_from + len(data))] = data
|
save general purpose custom data for use by any shader **NB it is up
to the user to provide data in the form of a suitable array of values
that will fit into the space available in the unif array**
Arguments:
*index_from*
start index in unif array for filling data should be 48 to 59 though
42 to 47 could be used if they do not conflict with existing shaders
i.e. 2d_flat, defocus etc
*data*
array of values to put in
|
code/python/external/pi3d/Shape.py
|
set_custom_data
|
rec/echomesh
| 30 |
python
|
def set_custom_data(self, index_from, data):
'save general purpose custom data for use by any shader **NB it is up\n to the user to provide data in the form of a suitable array of values\n that will fit into the space available in the unif array**\n\n Arguments:\n\n *index_from*\n start index in unif array for filling data should be 48 to 59 though\n 42 to 47 could be used if they do not conflict with existing shaders\n i.e. 2d_flat, defocus etc\n *data*\n array of values to put in\n '
self.unif[index_from:(index_from + len(data))] = data
|
def set_custom_data(self, index_from, data):
'save general purpose custom data for use by any shader **NB it is up\n to the user to provide data in the form of a suitable array of values\n that will fit into the space available in the unif array**\n\n Arguments:\n\n *index_from*\n start index in unif array for filling data should be 48 to 59 though\n 42 to 47 could be used if they do not conflict with existing shaders\n i.e. 2d_flat, defocus etc\n *data*\n array of values to put in\n '
self.unif[index_from:(index_from + len(data))] = data<|docstring|>save general purpose custom data for use by any shader **NB it is up
to the user to provide data in the form of a suitable array of values
that will fit into the space available in the unif array**
Arguments:
*index_from*
start index in unif array for filling data should be 48 to 59 though
42 to 47 could be used if they do not conflict with existing shaders
i.e. 2d_flat, defocus etc
*data*
array of values to put in<|endoftext|>
|
e7ea3ff6f776b252a1ca2810ef2de49940dea43afee35492af39f8ac3a1bb97b
|
def x(self):
'get value of x'
return self.unif[0]
|
get value of x
|
code/python/external/pi3d/Shape.py
|
x
|
rec/echomesh
| 30 |
python
|
def x(self):
return self.unif[0]
|
def x(self):
return self.unif[0]<|docstring|>get value of x<|endoftext|>
|
846557d6828205fb5e0ce4070f76b329e4f706285f4c532c600640727acbcb8b
|
def y(self):
'get value of y'
return self.unif[1]
|
get value of y
|
code/python/external/pi3d/Shape.py
|
y
|
rec/echomesh
| 30 |
python
|
def y(self):
return self.unif[1]
|
def y(self):
return self.unif[1]<|docstring|>get value of y<|endoftext|>
|
648bc0c555c3dfebf3eaf30323925b66371f7f61e827b5f4a2c06b7f530245ec
|
def z(self):
'get value of z'
return self.unif[2]
|
get value of z
|
code/python/external/pi3d/Shape.py
|
z
|
rec/echomesh
| 30 |
python
|
def z(self):
return self.unif[2]
|
def z(self):
return self.unif[2]<|docstring|>get value of z<|endoftext|>
|
d6873067938c1e8ce20430b87857231cc801ecbf07161d6eaa9da2324acb8bd3
|
def scale(self, sx, sy, sz):
'Arguments:\n\n *sx*\n x scale\n *sy*\n y scale\n *sz*\n z scale\n '
self.scl[(0, 0)] = sx
self.scl[(1, 1)] = sy
self.scl[(2, 2)] = sz
self.unif[6:9] = (sx, sy, sz)
self.MFlg = True
|
Arguments:
*sx*
x scale
*sy*
y scale
*sz*
z scale
|
code/python/external/pi3d/Shape.py
|
scale
|
rec/echomesh
| 30 |
python
|
def scale(self, sx, sy, sz):
'Arguments:\n\n *sx*\n x scale\n *sy*\n y scale\n *sz*\n z scale\n '
self.scl[(0, 0)] = sx
self.scl[(1, 1)] = sy
self.scl[(2, 2)] = sz
self.unif[6:9] = (sx, sy, sz)
self.MFlg = True
|
def scale(self, sx, sy, sz):
'Arguments:\n\n *sx*\n x scale\n *sy*\n y scale\n *sz*\n z scale\n '
self.scl[(0, 0)] = sx
self.scl[(1, 1)] = sy
self.scl[(2, 2)] = sz
self.unif[6:9] = (sx, sy, sz)
self.MFlg = True<|docstring|>Arguments:
*sx*
x scale
*sy*
y scale
*sz*
z scale<|endoftext|>
|
82b52670a3528c08c5c6418ec253dbeb4cbf64748916a1dc88f6dc27b3f56815
|
def position(self, x, y, z):
'Arguments:\n\n *x*\n x position\n *y*\n y position\n *z*\n z position\n '
self.tr1[(3, 0)] = (x - self.unif[9])
self.tr1[(3, 1)] = (y - self.unif[10])
self.tr1[(3, 2)] = (z - self.unif[11])
self.unif[0:3] = (x, y, z)
self.MFlg = True
|
Arguments:
*x*
x position
*y*
y position
*z*
z position
|
code/python/external/pi3d/Shape.py
|
position
|
rec/echomesh
| 30 |
python
|
def position(self, x, y, z):
'Arguments:\n\n *x*\n x position\n *y*\n y position\n *z*\n z position\n '
self.tr1[(3, 0)] = (x - self.unif[9])
self.tr1[(3, 1)] = (y - self.unif[10])
self.tr1[(3, 2)] = (z - self.unif[11])
self.unif[0:3] = (x, y, z)
self.MFlg = True
|
def position(self, x, y, z):
'Arguments:\n\n *x*\n x position\n *y*\n y position\n *z*\n z position\n '
self.tr1[(3, 0)] = (x - self.unif[9])
self.tr1[(3, 1)] = (y - self.unif[10])
self.tr1[(3, 2)] = (z - self.unif[11])
self.unif[0:3] = (x, y, z)
self.MFlg = True<|docstring|>Arguments:
*x*
x position
*y*
y position
*z*
z position<|endoftext|>
|
a2365d1beef40ed65b4c81eee734b74a03fc486ec999caa31e98c2f67b16c561
|
def positionX(self, v):
'Arguments:\n\n *v*\n x position\n '
self.tr1[(3, 0)] = (v - self.unif[9])
self.unif[0] = v
self.MFlg = True
|
Arguments:
*v*
x position
|
code/python/external/pi3d/Shape.py
|
positionX
|
rec/echomesh
| 30 |
python
|
def positionX(self, v):
'Arguments:\n\n *v*\n x position\n '
self.tr1[(3, 0)] = (v - self.unif[9])
self.unif[0] = v
self.MFlg = True
|
def positionX(self, v):
'Arguments:\n\n *v*\n x position\n '
self.tr1[(3, 0)] = (v - self.unif[9])
self.unif[0] = v
self.MFlg = True<|docstring|>Arguments:
*v*
x position<|endoftext|>
|
bc77a1f0b0e2d33458a45071c47adb3cd335db48896c59716afed7f07636c5d1
|
def positionY(self, v):
'Arguments:\n\n *v*\n y position\n '
self.tr1[(3, 1)] = (v - self.unif[10])
self.unif[1] = v
self.MFlg = True
|
Arguments:
*v*
y position
|
code/python/external/pi3d/Shape.py
|
positionY
|
rec/echomesh
| 30 |
python
|
def positionY(self, v):
'Arguments:\n\n *v*\n y position\n '
self.tr1[(3, 1)] = (v - self.unif[10])
self.unif[1] = v
self.MFlg = True
|
def positionY(self, v):
'Arguments:\n\n *v*\n y position\n '
self.tr1[(3, 1)] = (v - self.unif[10])
self.unif[1] = v
self.MFlg = True<|docstring|>Arguments:
*v*
y position<|endoftext|>
|
709d434041f0a6f4763248318c9b74735e7216df8f8b8120dc536a3174c4fc63
|
def positionZ(self, v):
'Arguments:\n\n *v*\n z position\n '
self.tr1[(3, 2)] = (v - self.unif[11])
self.unif[2] = v
self.MFlg = True
|
Arguments:
*v*
z position
|
code/python/external/pi3d/Shape.py
|
positionZ
|
rec/echomesh
| 30 |
python
|
def positionZ(self, v):
'Arguments:\n\n *v*\n z position\n '
self.tr1[(3, 2)] = (v - self.unif[11])
self.unif[2] = v
self.MFlg = True
|
def positionZ(self, v):
'Arguments:\n\n *v*\n z position\n '
self.tr1[(3, 2)] = (v - self.unif[11])
self.unif[2] = v
self.MFlg = True<|docstring|>Arguments:
*v*
z position<|endoftext|>
|
481bf83f79d33ff9ef6f3e08f49a73b8bbaa819ceaad97f67a27a239422b04e6
|
def translate(self, dx, dy, dz):
'Arguments:\n\n *dx*\n x translation\n *dy*\n y translation\n *dz*\n z translation\n '
self.tr1[(3, 0)] += dx
self.tr1[(3, 1)] += dy
self.tr1[(3, 2)] += dz
self.MFlg = True
self.unif[0] += dx
self.unif[1] += dy
self.unif[2] += dz
|
Arguments:
*dx*
x translation
*dy*
y translation
*dz*
z translation
|
code/python/external/pi3d/Shape.py
|
translate
|
rec/echomesh
| 30 |
python
|
def translate(self, dx, dy, dz):
'Arguments:\n\n *dx*\n x translation\n *dy*\n y translation\n *dz*\n z translation\n '
self.tr1[(3, 0)] += dx
self.tr1[(3, 1)] += dy
self.tr1[(3, 2)] += dz
self.MFlg = True
self.unif[0] += dx
self.unif[1] += dy
self.unif[2] += dz
|
def translate(self, dx, dy, dz):
'Arguments:\n\n *dx*\n x translation\n *dy*\n y translation\n *dz*\n z translation\n '
self.tr1[(3, 0)] += dx
self.tr1[(3, 1)] += dy
self.tr1[(3, 2)] += dz
self.MFlg = True
self.unif[0] += dx
self.unif[1] += dy
self.unif[2] += dz<|docstring|>Arguments:
*dx*
x translation
*dy*
y translation
*dz*
z translation<|endoftext|>
|
60680ea710d2e75d92fab4b668ed2ce96173a68a77c1240e05c0bb8c77d072ad
|
def translateX(self, v):
'Arguments:\n\n *v*\n x translation\n '
self.tr1[(3, 0)] += v
self.unif[0] += v
self.MFlg = True
|
Arguments:
*v*
x translation
|
code/python/external/pi3d/Shape.py
|
translateX
|
rec/echomesh
| 30 |
python
|
def translateX(self, v):
'Arguments:\n\n *v*\n x translation\n '
self.tr1[(3, 0)] += v
self.unif[0] += v
self.MFlg = True
|
def translateX(self, v):
'Arguments:\n\n *v*\n x translation\n '
self.tr1[(3, 0)] += v
self.unif[0] += v
self.MFlg = True<|docstring|>Arguments:
*v*
x translation<|endoftext|>
|
058b6a6d32acb7a5d6adb9f04daa04ea47c1d15777227f31c183b17b9e9517b4
|
def translateY(self, v):
'Arguments:\n\n *v*\n y translation\n '
self.tr1[(3, 1)] += v
self.unif[1] += v
self.MFlg = True
|
Arguments:
*v*
y translation
|
code/python/external/pi3d/Shape.py
|
translateY
|
rec/echomesh
| 30 |
python
|
def translateY(self, v):
'Arguments:\n\n *v*\n y translation\n '
self.tr1[(3, 1)] += v
self.unif[1] += v
self.MFlg = True
|
def translateY(self, v):
'Arguments:\n\n *v*\n y translation\n '
self.tr1[(3, 1)] += v
self.unif[1] += v
self.MFlg = True<|docstring|>Arguments:
*v*
y translation<|endoftext|>
|
419b608fc4cec36a6bf0db9831a3f0c20f17ac0a65b31c0e2dcd057925f275ab
|
def translateZ(self, v):
'Arguments:\n\n *v*\n z translation\n '
self.tr1[(3, 2)] += v
self.unif[2] += v
self.MFlg = True
|
Arguments:
*v*
z translation
|
code/python/external/pi3d/Shape.py
|
translateZ
|
rec/echomesh
| 30 |
python
|
def translateZ(self, v):
'Arguments:\n\n *v*\n z translation\n '
self.tr1[(3, 2)] += v
self.unif[2] += v
self.MFlg = True
|
def translateZ(self, v):
'Arguments:\n\n *v*\n z translation\n '
self.tr1[(3, 2)] += v
self.unif[2] += v
self.MFlg = True<|docstring|>Arguments:
*v*
z translation<|endoftext|>
|
5d5ce6059d043cc71b3d1bb9f9b378ff1a888f6dd0635093a64f309cd3a6c8ba
|
def rotateToX(self, v):
'Arguments:\n\n *v*\n x rotation\n '
(s, c) = (sin(radians(v)), cos(radians(v)))
self.rox[(1, 1)] = self.rox[(2, 2)] = c
self.rox[(1, 2)] = s
self.rox[(2, 1)] = (- s)
self.unif[3] = v
self.MFlg = True
|
Arguments:
*v*
x rotation
|
code/python/external/pi3d/Shape.py
|
rotateToX
|
rec/echomesh
| 30 |
python
|
def rotateToX(self, v):
'Arguments:\n\n *v*\n x rotation\n '
(s, c) = (sin(radians(v)), cos(radians(v)))
self.rox[(1, 1)] = self.rox[(2, 2)] = c
self.rox[(1, 2)] = s
self.rox[(2, 1)] = (- s)
self.unif[3] = v
self.MFlg = True
|
def rotateToX(self, v):
'Arguments:\n\n *v*\n x rotation\n '
(s, c) = (sin(radians(v)), cos(radians(v)))
self.rox[(1, 1)] = self.rox[(2, 2)] = c
self.rox[(1, 2)] = s
self.rox[(2, 1)] = (- s)
self.unif[3] = v
self.MFlg = True<|docstring|>Arguments:
*v*
x rotation<|endoftext|>
|
df736c51ca25e51eab77883594d58a8eabaa91fd41b7e8eafc3481feb53dc601
|
def rotateToY(self, v):
'Arguments:\n\n *v*\n y rotation\n '
(s, c) = (sin(radians(v)), cos(radians(v)))
self.roy[(0, 0)] = self.roy[(2, 2)] = c
self.roy[(0, 2)] = (- s)
self.roy[(2, 0)] = s
self.unif[4] = v
self.MFlg = True
|
Arguments:
*v*
y rotation
|
code/python/external/pi3d/Shape.py
|
rotateToY
|
rec/echomesh
| 30 |
python
|
def rotateToY(self, v):
'Arguments:\n\n *v*\n y rotation\n '
(s, c) = (sin(radians(v)), cos(radians(v)))
self.roy[(0, 0)] = self.roy[(2, 2)] = c
self.roy[(0, 2)] = (- s)
self.roy[(2, 0)] = s
self.unif[4] = v
self.MFlg = True
|
def rotateToY(self, v):
'Arguments:\n\n *v*\n y rotation\n '
(s, c) = (sin(radians(v)), cos(radians(v)))
self.roy[(0, 0)] = self.roy[(2, 2)] = c
self.roy[(0, 2)] = (- s)
self.roy[(2, 0)] = s
self.unif[4] = v
self.MFlg = True<|docstring|>Arguments:
*v*
y rotation<|endoftext|>
|
6648762588692af63070ae4d0233db74e91578c205aa19d0eaba04f378bc5894
|
def rotateToZ(self, v):
'Arguments:\n\n *v*\n z rotation\n '
(s, c) = (sin(radians(v)), cos(radians(v)))
self.roz[(0, 0)] = self.roz[(1, 1)] = c
self.roz[(0, 1)] = s
self.roz[(1, 0)] = (- s)
self.unif[5] = v
self.MFlg = True
|
Arguments:
*v*
z rotation
|
code/python/external/pi3d/Shape.py
|
rotateToZ
|
rec/echomesh
| 30 |
python
|
def rotateToZ(self, v):
'Arguments:\n\n *v*\n z rotation\n '
(s, c) = (sin(radians(v)), cos(radians(v)))
self.roz[(0, 0)] = self.roz[(1, 1)] = c
self.roz[(0, 1)] = s
self.roz[(1, 0)] = (- s)
self.unif[5] = v
self.MFlg = True
|
def rotateToZ(self, v):
'Arguments:\n\n *v*\n z rotation\n '
(s, c) = (sin(radians(v)), cos(radians(v)))
self.roz[(0, 0)] = self.roz[(1, 1)] = c
self.roz[(0, 1)] = s
self.roz[(1, 0)] = (- s)
self.unif[5] = v
self.MFlg = True<|docstring|>Arguments:
*v*
z rotation<|endoftext|>
|
f8019f7dc045b398562bd6a762dd6f5ac31c83e169b847a4963a7617d7292b9d
|
def rotateIncX(self, v):
'Arguments:\n\n *v*\n x rotational increment\n '
self.unif[3] += v
(s, c) = (sin(radians(self.unif[3])), cos(radians(self.unif[3])))
self.rox[(1, 1)] = self.rox[(2, 2)] = c
self.rox[(1, 2)] = s
self.rox[(2, 1)] = (- s)
self.MFlg = True
|
Arguments:
*v*
x rotational increment
|
code/python/external/pi3d/Shape.py
|
rotateIncX
|
rec/echomesh
| 30 |
python
|
def rotateIncX(self, v):
'Arguments:\n\n *v*\n x rotational increment\n '
self.unif[3] += v
(s, c) = (sin(radians(self.unif[3])), cos(radians(self.unif[3])))
self.rox[(1, 1)] = self.rox[(2, 2)] = c
self.rox[(1, 2)] = s
self.rox[(2, 1)] = (- s)
self.MFlg = True
|
def rotateIncX(self, v):
'Arguments:\n\n *v*\n x rotational increment\n '
self.unif[3] += v
(s, c) = (sin(radians(self.unif[3])), cos(radians(self.unif[3])))
self.rox[(1, 1)] = self.rox[(2, 2)] = c
self.rox[(1, 2)] = s
self.rox[(2, 1)] = (- s)
self.MFlg = True<|docstring|>Arguments:
*v*
x rotational increment<|endoftext|>
|
aec50bbe7698e695f94b879dea2b1468938bfc940cd9abb15724da7a8c0e67b2
|
def rotateIncY(self, v):
'Arguments:\n\n *v*\n y rotational increment\n '
self.unif[4] += v
(s, c) = (sin(radians(self.unif[4])), cos(radians(self.unif[4])))
self.roy[(0, 0)] = self.roy[(2, 2)] = c
self.roy[(0, 2)] = (- s)
self.roy[(2, 0)] = s
self.MFlg = True
|
Arguments:
*v*
y rotational increment
|
code/python/external/pi3d/Shape.py
|
rotateIncY
|
rec/echomesh
| 30 |
python
|
def rotateIncY(self, v):
'Arguments:\n\n *v*\n y rotational increment\n '
self.unif[4] += v
(s, c) = (sin(radians(self.unif[4])), cos(radians(self.unif[4])))
self.roy[(0, 0)] = self.roy[(2, 2)] = c
self.roy[(0, 2)] = (- s)
self.roy[(2, 0)] = s
self.MFlg = True
|
def rotateIncY(self, v):
'Arguments:\n\n *v*\n y rotational increment\n '
self.unif[4] += v
(s, c) = (sin(radians(self.unif[4])), cos(radians(self.unif[4])))
self.roy[(0, 0)] = self.roy[(2, 2)] = c
self.roy[(0, 2)] = (- s)
self.roy[(2, 0)] = s
self.MFlg = True<|docstring|>Arguments:
*v*
y rotational increment<|endoftext|>
|
74f4ce910eceee4676f98358b240cced01c40d69bd756de1f09f2de55c005561
|
def rotateIncZ(self, v):
'Arguments:\n\n *v*\n z rotational increment\n '
self.unif[5] += v
(s, c) = (sin(radians(self.unif[5])), cos(radians(self.unif[5])))
self.roz[(0, 0)] = self.roz[(1, 1)] = c
self.roz[(0, 1)] = s
self.roz[(1, 0)] = (- s)
self.MFlg = True
|
Arguments:
*v*
z rotational increment
|
code/python/external/pi3d/Shape.py
|
rotateIncZ
|
rec/echomesh
| 30 |
python
|
def rotateIncZ(self, v):
'Arguments:\n\n *v*\n z rotational increment\n '
self.unif[5] += v
(s, c) = (sin(radians(self.unif[5])), cos(radians(self.unif[5])))
self.roz[(0, 0)] = self.roz[(1, 1)] = c
self.roz[(0, 1)] = s
self.roz[(1, 0)] = (- s)
self.MFlg = True
|
def rotateIncZ(self, v):
'Arguments:\n\n *v*\n z rotational increment\n '
self.unif[5] += v
(s, c) = (sin(radians(self.unif[5])), cos(radians(self.unif[5])))
self.roz[(0, 0)] = self.roz[(1, 1)] = c
self.roz[(0, 1)] = s
self.roz[(1, 0)] = (- s)
self.MFlg = True<|docstring|>Arguments:
*v*
z rotational increment<|endoftext|>
|
e83ea36f7cd114c0dff76eeae1b68b2243ec5d187e087aae36b5c220f2003b1f
|
def _add_vertex(self, vert, norm, texc):
'add vertex,normal and tex_coords ...'
self.verts.append(vert)
self.norms.append(norm)
self.texcoords.append(texc)
|
add vertex,normal and tex_coords ...
|
code/python/external/pi3d/Shape.py
|
_add_vertex
|
rec/echomesh
| 30 |
python
|
def _add_vertex(self, vert, norm, texc):
self.verts.append(vert)
self.norms.append(norm)
self.texcoords.append(texc)
|
def _add_vertex(self, vert, norm, texc):
self.verts.append(vert)
self.norms.append(norm)
self.texcoords.append(texc)<|docstring|>add vertex,normal and tex_coords ...<|endoftext|>
|
3b8703b47742502fc99613b563cd0925feb6f08274d9fb5f98e2095f1a321261
|
def _add_tri(self, indx):
'add triangle refs.'
self.inds.append(indx)
|
add triangle refs.
|
code/python/external/pi3d/Shape.py
|
_add_tri
|
rec/echomesh
| 30 |
python
|
def _add_tri(self, indx):
self.inds.append(indx)
|
def _add_tri(self, indx):
self.inds.append(indx)<|docstring|>add triangle refs.<|endoftext|>
|
cdadf462c55fe9e3e827f4b4d1e5315b68407ae4f450e6d2849edf9b91770ec0
|
def _lathe(self, path, sides=12, rise=0.0, loops=1.0):
'Returns a Buffer object by rotating the points defined in path.\n\n Arguments:\n *path*\n An array of points [(x0, y0), (x1, y1) ...] to rotate around\n the y axis.\n\n Keyword arguments:\n *sides*\n Number of sides to divide each rotation into.\n *rise*\n Amount to increment the path y values for each rotation (ie helix)\n *loops*\n Number of times to rotate the path by 360 (ie helix).\n\n '
self.sides = sides
s = len(path)
rl = int((self.sides * loops))
ssize = ((rl * 6) * (s - 1))
pn = 0
pp = 0
tcx = (1.0 / self.sides)
pr = ((pi / self.sides) * 2.0)
rdiv = (rise / rl)
ss = 0
miny = path[0][1]
maxy = path[(s - 1)][1]
for p in range(s):
if (path[p][1] < miny):
miny = path[p][1]
if (path[p][1] > maxy):
maxy = path[p][1]
verts = []
norms = []
idx = []
tex_coords = []
opx = path[0][0]
opy = path[0][1]
for p in range(s):
px = (path[p][0] * 1.0)
py = (path[p][1] * 1.0)
tcy = (1.0 - ((py - miny) / (maxy - miny)))
(dx, dy) = Utility.vec_normal(Utility.vec_sub((px, py), (opx, opy)))
for r in range(0, rl):
sinr = sin((pr * r))
cosr = cos((pr * r))
verts.append(((px * sinr), py, (px * cosr)))
norms.append((((- sinr) * dy), dx, ((- cosr) * dy)))
tex_coords.append(((1.0 - (tcx * r)), tcy))
py += rdiv
verts.append((0, py, px))
norms.append((0, dx, (- dy)))
tex_coords.append((0, tcy))
if (p < (s - 1)):
pn += (rl + 1)
for r in range(rl):
idx.append((((pp + r) + 1), (pp + r), (pn + r)))
idx.append(((pn + r), ((pn + r) + 1), ((pp + r) + 1)))
pp += (rl + 1)
opx = px
opy = py
return Buffer(self, verts, tex_coords, idx, norms)
|
Returns a Buffer object by rotating the points defined in path.
Arguments:
*path*
An array of points [(x0, y0), (x1, y1) ...] to rotate around
the y axis.
Keyword arguments:
*sides*
Number of sides to divide each rotation into.
*rise*
Amount to increment the path y values for each rotation (ie helix)
*loops*
Number of times to rotate the path by 360 (ie helix).
|
code/python/external/pi3d/Shape.py
|
_lathe
|
rec/echomesh
| 30 |
python
|
def _lathe(self, path, sides=12, rise=0.0, loops=1.0):
'Returns a Buffer object by rotating the points defined in path.\n\n Arguments:\n *path*\n An array of points [(x0, y0), (x1, y1) ...] to rotate around\n the y axis.\n\n Keyword arguments:\n *sides*\n Number of sides to divide each rotation into.\n *rise*\n Amount to increment the path y values for each rotation (ie helix)\n *loops*\n Number of times to rotate the path by 360 (ie helix).\n\n '
self.sides = sides
s = len(path)
rl = int((self.sides * loops))
ssize = ((rl * 6) * (s - 1))
pn = 0
pp = 0
tcx = (1.0 / self.sides)
pr = ((pi / self.sides) * 2.0)
rdiv = (rise / rl)
ss = 0
miny = path[0][1]
maxy = path[(s - 1)][1]
for p in range(s):
if (path[p][1] < miny):
miny = path[p][1]
if (path[p][1] > maxy):
maxy = path[p][1]
verts = []
norms = []
idx = []
tex_coords = []
opx = path[0][0]
opy = path[0][1]
for p in range(s):
px = (path[p][0] * 1.0)
py = (path[p][1] * 1.0)
tcy = (1.0 - ((py - miny) / (maxy - miny)))
(dx, dy) = Utility.vec_normal(Utility.vec_sub((px, py), (opx, opy)))
for r in range(0, rl):
sinr = sin((pr * r))
cosr = cos((pr * r))
verts.append(((px * sinr), py, (px * cosr)))
norms.append((((- sinr) * dy), dx, ((- cosr) * dy)))
tex_coords.append(((1.0 - (tcx * r)), tcy))
py += rdiv
verts.append((0, py, px))
norms.append((0, dx, (- dy)))
tex_coords.append((0, tcy))
if (p < (s - 1)):
pn += (rl + 1)
for r in range(rl):
idx.append((((pp + r) + 1), (pp + r), (pn + r)))
idx.append(((pn + r), ((pn + r) + 1), ((pp + r) + 1)))
pp += (rl + 1)
opx = px
opy = py
return Buffer(self, verts, tex_coords, idx, norms)
|
def _lathe(self, path, sides=12, rise=0.0, loops=1.0):
'Returns a Buffer object by rotating the points defined in path.\n\n Arguments:\n *path*\n An array of points [(x0, y0), (x1, y1) ...] to rotate around\n the y axis.\n\n Keyword arguments:\n *sides*\n Number of sides to divide each rotation into.\n *rise*\n Amount to increment the path y values for each rotation (ie helix)\n *loops*\n Number of times to rotate the path by 360 (ie helix).\n\n '
self.sides = sides
s = len(path)
rl = int((self.sides * loops))
ssize = ((rl * 6) * (s - 1))
pn = 0
pp = 0
tcx = (1.0 / self.sides)
pr = ((pi / self.sides) * 2.0)
rdiv = (rise / rl)
ss = 0
miny = path[0][1]
maxy = path[(s - 1)][1]
for p in range(s):
if (path[p][1] < miny):
miny = path[p][1]
if (path[p][1] > maxy):
maxy = path[p][1]
verts = []
norms = []
idx = []
tex_coords = []
opx = path[0][0]
opy = path[0][1]
for p in range(s):
px = (path[p][0] * 1.0)
py = (path[p][1] * 1.0)
tcy = (1.0 - ((py - miny) / (maxy - miny)))
(dx, dy) = Utility.vec_normal(Utility.vec_sub((px, py), (opx, opy)))
for r in range(0, rl):
sinr = sin((pr * r))
cosr = cos((pr * r))
verts.append(((px * sinr), py, (px * cosr)))
norms.append((((- sinr) * dy), dx, ((- cosr) * dy)))
tex_coords.append(((1.0 - (tcx * r)), tcy))
py += rdiv
verts.append((0, py, px))
norms.append((0, dx, (- dy)))
tex_coords.append((0, tcy))
if (p < (s - 1)):
pn += (rl + 1)
for r in range(rl):
idx.append((((pp + r) + 1), (pp + r), (pn + r)))
idx.append(((pn + r), ((pn + r) + 1), ((pp + r) + 1)))
pp += (rl + 1)
opx = px
opy = py
return Buffer(self, verts, tex_coords, idx, norms)<|docstring|>Returns a Buffer object by rotating the points defined in path.
Arguments:
*path*
An array of points [(x0, y0), (x1, y1) ...] to rotate around
the y axis.
Keyword arguments:
*sides*
Number of sides to divide each rotation into.
*rise*
Amount to increment the path y values for each rotation (ie helix)
*loops*
Number of times to rotate the path by 360 (ie helix).<|endoftext|>
|
366f05decd43dd4cfd31a4ad5d2bc45f91871ab8d2ad1eb5c6fe878007ffce41
|
def request(self, method, request, credentials=None, extra_headers=None, json_payload=None):
'\n Executes a request.\n\n :param request: the GET request to execute.\n :return: response JSON object; False if the HTTP status code distinct\n to 200.\n '
authentication = ()
headers = {}
json_data = {}
if (extra_headers is not None):
headers = extra_headers
if (json_payload is not None):
json_data = json_payload
self.logger.debug('Payload: {0}'.format(json_data))
if ((credentials is not None) and ('username' in credentials) and ('password' in credentials)):
authentication = (credentials['username'], credentials['password'])
try:
response = requests.request(method, request, auth=authentication, headers=headers, json=json_data, timeout=self._TIMEOUT)
except RequestException:
raise
if (not response.ok):
self.logger.debug('ERROR: HTTP {0}: {1}'.format(response.status_code, response.text))
raise UnsuccessfulHttpRequestException(response.status_code, response.headers)
response_object = json.loads(response.text)
response.close()
return response_object
|
Executes a request.
:param request: the GET request to execute.
:return: response JSON object; False if the HTTP status code distinct
to 200.
|
gitssue/request/requests.py
|
request
|
julenpardo/Gitssue
| 0 |
python
|
def request(self, method, request, credentials=None, extra_headers=None, json_payload=None):
'\n Executes a request.\n\n :param request: the GET request to execute.\n :return: response JSON object; False if the HTTP status code distinct\n to 200.\n '
authentication = ()
headers = {}
json_data = {}
if (extra_headers is not None):
headers = extra_headers
if (json_payload is not None):
json_data = json_payload
self.logger.debug('Payload: {0}'.format(json_data))
if ((credentials is not None) and ('username' in credentials) and ('password' in credentials)):
authentication = (credentials['username'], credentials['password'])
try:
response = requests.request(method, request, auth=authentication, headers=headers, json=json_data, timeout=self._TIMEOUT)
except RequestException:
raise
if (not response.ok):
self.logger.debug('ERROR: HTTP {0}: {1}'.format(response.status_code, response.text))
raise UnsuccessfulHttpRequestException(response.status_code, response.headers)
response_object = json.loads(response.text)
response.close()
return response_object
|
def request(self, method, request, credentials=None, extra_headers=None, json_payload=None):
'\n Executes a request.\n\n :param request: the GET request to execute.\n :return: response JSON object; False if the HTTP status code distinct\n to 200.\n '
authentication = ()
headers = {}
json_data = {}
if (extra_headers is not None):
headers = extra_headers
if (json_payload is not None):
json_data = json_payload
self.logger.debug('Payload: {0}'.format(json_data))
if ((credentials is not None) and ('username' in credentials) and ('password' in credentials)):
authentication = (credentials['username'], credentials['password'])
try:
response = requests.request(method, request, auth=authentication, headers=headers, json=json_data, timeout=self._TIMEOUT)
except RequestException:
raise
if (not response.ok):
self.logger.debug('ERROR: HTTP {0}: {1}'.format(response.status_code, response.text))
raise UnsuccessfulHttpRequestException(response.status_code, response.headers)
response_object = json.loads(response.text)
response.close()
return response_object<|docstring|>Executes a request.
:param request: the GET request to execute.
:return: response JSON object; False if the HTTP status code distinct
to 200.<|endoftext|>
|
a215d6e6e15de0b3c3a6ab8d683f9d99452f375376ccfcde8cb91fbec1cad2d7
|
def assertRegexp(self, text, regex, msg=None):
'Wrapper around the different names for assertRegexp....'
for name in ['assertRegex', 'assertRegexpMatches']:
if hasattr(self, name):
return getattr(self, name)(text, regex, msg)
self.assertTrue(False, 'No method to check assertRegexp')
|
Wrapper around the different names for assertRegexp....
|
didyoumean/didyoumean_re_tests.py
|
assertRegexp
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def assertRegexp(self, text, regex, msg=None):
for name in ['assertRegex', 'assertRegexpMatches']:
if hasattr(self, name):
return getattr(self, name)(text, regex, msg)
self.assertTrue(False, 'No method to check assertRegexp')
|
def assertRegexp(self, text, regex, msg=None):
for name in ['assertRegex', 'assertRegexpMatches']:
if hasattr(self, name):
return getattr(self, name)(text, regex, msg)
self.assertTrue(False, 'No method to check assertRegexp')<|docstring|>Wrapper around the different names for assertRegexp....<|endoftext|>
|
9079eb71a679d6a0475fbc96fcb7dd21ab28fc26de665f587a88fa6966263e48
|
def assertNotRegexp(self, text, regex, msg=None):
'Wrapper around the different names for assertRegexpNot....'
for name in ['assertNotRegex', 'assertNotRegexpMatches']:
if hasattr(self, name):
return getattr(self, name)(text, regex, msg)
self.assertTrue(False, 'No method to check assertNotRegexp')
|
Wrapper around the different names for assertRegexpNot....
|
didyoumean/didyoumean_re_tests.py
|
assertNotRegexp
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def assertNotRegexp(self, text, regex, msg=None):
for name in ['assertNotRegex', 'assertNotRegexpMatches']:
if hasattr(self, name):
return getattr(self, name)(text, regex, msg)
self.assertTrue(False, 'No method to check assertNotRegexp')
|
def assertNotRegexp(self, text, regex, msg=None):
for name in ['assertNotRegex', 'assertNotRegexpMatches']:
if hasattr(self, name):
return getattr(self, name)(text, regex, msg)
self.assertTrue(False, 'No method to check assertNotRegexp')<|docstring|>Wrapper around the different names for assertRegexpNot....<|endoftext|>
|
88a3f9274776f4edbd06346cd8cc53268d5355db94688cfa5351eba3ad30e1ae
|
def re_matches(self, text, regexp, results):
'Check that text matches regexp and gives the right match groups.\n\n result is a tuple containing the expected return values for groups()\n and groupdict().\n '
(groups, named_groups) = results
self.assertRegexp(text, regexp)
match = re.match(regexp, text)
self.assertTrue(match)
self.assertEqual(groups, match.groups())
self.assertEqual(named_groups, match.groupdict())
self.check_more_about_re(text, regexp)
|
Check that text matches regexp and gives the right match groups.
result is a tuple containing the expected return values for groups()
and groupdict().
|
didyoumean/didyoumean_re_tests.py
|
re_matches
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def re_matches(self, text, regexp, results):
'Check that text matches regexp and gives the right match groups.\n\n result is a tuple containing the expected return values for groups()\n and groupdict().\n '
(groups, named_groups) = results
self.assertRegexp(text, regexp)
match = re.match(regexp, text)
self.assertTrue(match)
self.assertEqual(groups, match.groups())
self.assertEqual(named_groups, match.groupdict())
self.check_more_about_re(text, regexp)
|
def re_matches(self, text, regexp, results):
'Check that text matches regexp and gives the right match groups.\n\n result is a tuple containing the expected return values for groups()\n and groupdict().\n '
(groups, named_groups) = results
self.assertRegexp(text, regexp)
match = re.match(regexp, text)
self.assertTrue(match)
self.assertEqual(groups, match.groups())
self.assertEqual(named_groups, match.groupdict())
self.check_more_about_re(text, regexp)<|docstring|>Check that text matches regexp and gives the right match groups.
result is a tuple containing the expected return values for groups()
and groupdict().<|endoftext|>
|
805e19115496931d5fb2efa9a2a20ffd7ad9c76f3be52357928091d9d0db8634
|
def check_more_about_re(self, text, regexp):
'Check various properties about the regexp.\n\n Properties checked are configurable via global constants. These\n properties are not stricly speaking required but they help to\n detect potential issues much more quickly.\n '
if CHECK_RE_VALUE:
self.assertTrue(regexp.startswith('^'))
self.assertTrue(regexp.endswith('$'))
found = False
for (other_name, other_re) in re.ALL_REGEXPS.items():
if (other_re == regexp):
found = True
if CHECK_RE_NAME:
self.assertTrue(other_name.endswith('_RE'))
elif CHECK_OTHERS_DONT_MATCH:
details = ("text '%s' matches %s (on top of %s)" % (text, other_name, regexp))
self.assertNotRegexp(text, other_re, details)
no_match = re.match(other_re, text)
self.assertEqual(no_match, None, details)
if CHECK_RE_LISTED:
self.assertTrue(found)
|
Check various properties about the regexp.
Properties checked are configurable via global constants. These
properties are not stricly speaking required but they help to
detect potential issues much more quickly.
|
didyoumean/didyoumean_re_tests.py
|
check_more_about_re
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def check_more_about_re(self, text, regexp):
'Check various properties about the regexp.\n\n Properties checked are configurable via global constants. These\n properties are not stricly speaking required but they help to\n detect potential issues much more quickly.\n '
if CHECK_RE_VALUE:
self.assertTrue(regexp.startswith('^'))
self.assertTrue(regexp.endswith('$'))
found = False
for (other_name, other_re) in re.ALL_REGEXPS.items():
if (other_re == regexp):
found = True
if CHECK_RE_NAME:
self.assertTrue(other_name.endswith('_RE'))
elif CHECK_OTHERS_DONT_MATCH:
details = ("text '%s' matches %s (on top of %s)" % (text, other_name, regexp))
self.assertNotRegexp(text, other_re, details)
no_match = re.match(other_re, text)
self.assertEqual(no_match, None, details)
if CHECK_RE_LISTED:
self.assertTrue(found)
|
def check_more_about_re(self, text, regexp):
'Check various properties about the regexp.\n\n Properties checked are configurable via global constants. These\n properties are not stricly speaking required but they help to\n detect potential issues much more quickly.\n '
if CHECK_RE_VALUE:
self.assertTrue(regexp.startswith('^'))
self.assertTrue(regexp.endswith('$'))
found = False
for (other_name, other_re) in re.ALL_REGEXPS.items():
if (other_re == regexp):
found = True
if CHECK_RE_NAME:
self.assertTrue(other_name.endswith('_RE'))
elif CHECK_OTHERS_DONT_MATCH:
details = ("text '%s' matches %s (on top of %s)" % (text, other_name, regexp))
self.assertNotRegexp(text, other_re, details)
no_match = re.match(other_re, text)
self.assertEqual(no_match, None, details)
if CHECK_RE_LISTED:
self.assertTrue(found)<|docstring|>Check various properties about the regexp.
Properties checked are configurable via global constants. These
properties are not stricly speaking required but they help to
detect potential issues much more quickly.<|endoftext|>
|
19e7b39abae56099b099f10c7089f1facbcc568add1c57e7b1e8436d2bdd7d56
|
def test_var_name(self):
'Test VAR_NAME.'
regex = (('^' + re.VAR_NAME) + '$')
real_names = (set(locals().keys()) | set(globals().keys()))
names = (['a', 'a1', '_a1', 'aa_bb'] + list(real_names))
for name in names:
self.assertRegexp(name, regex)
for name in ['1a']:
self.assertNotRegexp(name, regex)
|
Test VAR_NAME.
|
didyoumean/didyoumean_re_tests.py
|
test_var_name
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_var_name(self):
regex = (('^' + re.VAR_NAME) + '$')
real_names = (set(locals().keys()) | set(globals().keys()))
names = (['a', 'a1', '_a1', 'aa_bb'] + list(real_names))
for name in names:
self.assertRegexp(name, regex)
for name in ['1a']:
self.assertNotRegexp(name, regex)
|
def test_var_name(self):
regex = (('^' + re.VAR_NAME) + '$')
real_names = (set(locals().keys()) | set(globals().keys()))
names = (['a', 'a1', '_a1', 'aa_bb'] + list(real_names))
for name in names:
self.assertRegexp(name, regex)
for name in ['1a']:
self.assertNotRegexp(name, regex)<|docstring|>Test VAR_NAME.<|endoftext|>
|
24eed1fdfb3b3be514a991359a1bf831d4b3cea3059bece59fa387fe6edbcaca
|
def test_attr_name(self):
'Test ATTR_NAME.'
regex = (('^' + re.ATTR_NAME) + '$')
real_attrs = set()
for o in get_subclasses(object):
try:
real_attrs.update(dir(o))
except AttributeError:
pass
attrs = (['do_stuff', '__magic__'] + list(real_attrs))
for attr in attrs:
self.assertRegexp(attr, regex)
for attr in ['1a']:
self.assertNotRegexp(attr, regex)
|
Test ATTR_NAME.
|
didyoumean/didyoumean_re_tests.py
|
test_attr_name
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_attr_name(self):
regex = (('^' + re.ATTR_NAME) + '$')
real_attrs = set()
for o in get_subclasses(object):
try:
real_attrs.update(dir(o))
except AttributeError:
pass
attrs = (['do_stuff', '__magic__'] + list(real_attrs))
for attr in attrs:
self.assertRegexp(attr, regex)
for attr in ['1a']:
self.assertNotRegexp(attr, regex)
|
def test_attr_name(self):
regex = (('^' + re.ATTR_NAME) + '$')
real_attrs = set()
for o in get_subclasses(object):
try:
real_attrs.update(dir(o))
except AttributeError:
pass
attrs = (['do_stuff', '__magic__'] + list(real_attrs))
for attr in attrs:
self.assertRegexp(attr, regex)
for attr in ['1a']:
self.assertNotRegexp(attr, regex)<|docstring|>Test ATTR_NAME.<|endoftext|>
|
3463ef1973450296bdcf4dfdef0655e1b0370160fba586a2bcc42d0ca39d2a4e
|
def test_type_name(self):
'Test TYPE_NAME.'
regex = (('^' + re.TYPE_NAME) + '$')
real_types = set((c.__name__ for c in get_subclasses(object)))
types = (['str', 'int', 'method-wrapper', 'builtin_function', 'builtin_function_or_method'] + list(real_types))
for type_ in types:
if (type_ != 'symtable entry'):
self.assertRegexp(type_, regex)
|
Test TYPE_NAME.
|
didyoumean/didyoumean_re_tests.py
|
test_type_name
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_type_name(self):
regex = (('^' + re.TYPE_NAME) + '$')
real_types = set((c.__name__ for c in get_subclasses(object)))
types = (['str', 'int', 'method-wrapper', 'builtin_function', 'builtin_function_or_method'] + list(real_types))
for type_ in types:
if (type_ != 'symtable entry'):
self.assertRegexp(type_, regex)
|
def test_type_name(self):
regex = (('^' + re.TYPE_NAME) + '$')
real_types = set((c.__name__ for c in get_subclasses(object)))
types = (['str', 'int', 'method-wrapper', 'builtin_function', 'builtin_function_or_method'] + list(real_types))
for type_ in types:
if (type_ != 'symtable entry'):
self.assertRegexp(type_, regex)<|docstring|>Test TYPE_NAME.<|endoftext|>
|
60242d400ea491b1346568a25d62f851501ab2e88afdb68d4d632d8f08c5741d
|
def test_func_name(self):
'Test FUNC_NAME.'
regex = (('^' + re.FUNC_NAME) + '$')
real_funcs = [(lambda x: x), range, dir, dict.get, list.index, classmethod]
real_func_names = [f.__name__ for f in real_funcs]
more_func_names = ['get', 'range', '<lambda>', 'print']
for func in (real_func_names + more_func_names):
self.assertRegexp(func, regex)
|
Test FUNC_NAME.
|
didyoumean/didyoumean_re_tests.py
|
test_func_name
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_func_name(self):
regex = (('^' + re.FUNC_NAME) + '$')
real_funcs = [(lambda x: x), range, dir, dict.get, list.index, classmethod]
real_func_names = [f.__name__ for f in real_funcs]
more_func_names = ['get', 'range', '<lambda>', 'print']
for func in (real_func_names + more_func_names):
self.assertRegexp(func, regex)
|
def test_func_name(self):
regex = (('^' + re.FUNC_NAME) + '$')
real_funcs = [(lambda x: x), range, dir, dict.get, list.index, classmethod]
real_func_names = [f.__name__ for f in real_funcs]
more_func_names = ['get', 'range', '<lambda>', 'print']
for func in (real_func_names + more_func_names):
self.assertRegexp(func, regex)<|docstring|>Test FUNC_NAME.<|endoftext|>
|
e0713e261957e4eef0a44f72f9aa27f869bd3a914026045703f0e842de9717fe
|
def test_qual_func_name(self):
'Test QUAL_FUNC_NAME.'
regex = (('^' + re.QUAL_FUNC_NAME) + '$')
real_funcs = [(lambda x: x), range, dir, dict.get, list.index, classmethod]
real_func_names = [f.__qualname__ for f in real_funcs if hasattr(f, '__qualname__')]
more_func_names = ['struct.pack', 'deque.index', 'Struct.pack']
for func in (real_func_names + more_func_names):
self.assertRegexp(func, regex)
|
Test QUAL_FUNC_NAME.
|
didyoumean/didyoumean_re_tests.py
|
test_qual_func_name
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_qual_func_name(self):
regex = (('^' + re.QUAL_FUNC_NAME) + '$')
real_funcs = [(lambda x: x), range, dir, dict.get, list.index, classmethod]
real_func_names = [f.__qualname__ for f in real_funcs if hasattr(f, '__qualname__')]
more_func_names = ['struct.pack', 'deque.index', 'Struct.pack']
for func in (real_func_names + more_func_names):
self.assertRegexp(func, regex)
|
def test_qual_func_name(self):
regex = (('^' + re.QUAL_FUNC_NAME) + '$')
real_funcs = [(lambda x: x), range, dir, dict.get, list.index, classmethod]
real_func_names = [f.__qualname__ for f in real_funcs if hasattr(f, '__qualname__')]
more_func_names = ['struct.pack', 'deque.index', 'Struct.pack']
for func in (real_func_names + more_func_names):
self.assertRegexp(func, regex)<|docstring|>Test QUAL_FUNC_NAME.<|endoftext|>
|
b5355ee44b76d01eb258292bdb260be9968e882780b67f5aef867c4dd7a9be05
|
def test_module_name(self):
'Test MODULE_NAME.'
regex = (('^' + re.MODULE_NAME) + '$')
real_modules = set(sys.modules.keys())
modules = (['sys', 'unittest.runner'] + list(real_modules))
for mod in modules:
if (not mod.startswith('$coverage')):
self.assertRegexp(mod, regex)
|
Test MODULE_NAME.
|
didyoumean/didyoumean_re_tests.py
|
test_module_name
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_module_name(self):
regex = (('^' + re.MODULE_NAME) + '$')
real_modules = set(sys.modules.keys())
modules = (['sys', 'unittest.runner'] + list(real_modules))
for mod in modules:
if (not mod.startswith('$coverage')):
self.assertRegexp(mod, regex)
|
def test_module_name(self):
regex = (('^' + re.MODULE_NAME) + '$')
real_modules = set(sys.modules.keys())
modules = (['sys', 'unittest.runner'] + list(real_modules))
for mod in modules:
if (not mod.startswith('$coverage')):
self.assertRegexp(mod, regex)<|docstring|>Test MODULE_NAME.<|endoftext|>
|
0cac77e3999cf9cb2c26ae600b661e92f70e403ebc8160b4efd7ea4f1499115b
|
def test_unbound_assignment(self):
'Test VARREFBEFOREASSIGN_RE.'
msgs = ["local variable 'some_var' referenced before assignment", "free variable 'some_var' referenced before assignment in enclosing scope"]
groups = ('some_var',)
named_groups = {'name': 'some_var'}
results = (groups, named_groups)
for msg in msgs:
self.re_matches(msg, re.VARREFBEFOREASSIGN_RE, results)
|
Test VARREFBEFOREASSIGN_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_unbound_assignment
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_unbound_assignment(self):
msgs = ["local variable 'some_var' referenced before assignment", "free variable 'some_var' referenced before assignment in enclosing scope"]
groups = ('some_var',)
named_groups = {'name': 'some_var'}
results = (groups, named_groups)
for msg in msgs:
self.re_matches(msg, re.VARREFBEFOREASSIGN_RE, results)
|
def test_unbound_assignment(self):
msgs = ["local variable 'some_var' referenced before assignment", "free variable 'some_var' referenced before assignment in enclosing scope"]
groups = ('some_var',)
named_groups = {'name': 'some_var'}
results = (groups, named_groups)
for msg in msgs:
self.re_matches(msg, re.VARREFBEFOREASSIGN_RE, results)<|docstring|>Test VARREFBEFOREASSIGN_RE.<|endoftext|>
|
4b745b4eb11c6e4f2b3db62657ee0411b5bd1ce42c570986fe42b16957af4558
|
def test_name_not_defined(self):
'Test NAMENOTDEFINED_RE.'
msgs = ["name 'some_name' is not defined", "global name 'some_name' is not defined"]
groups = ('some_name',)
named_groups = {'name': 'some_name'}
results = (groups, named_groups)
for msg in msgs:
self.re_matches(msg, re.NAMENOTDEFINED_RE, results)
|
Test NAMENOTDEFINED_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_name_not_defined
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_name_not_defined(self):
msgs = ["name 'some_name' is not defined", "global name 'some_name' is not defined"]
groups = ('some_name',)
named_groups = {'name': 'some_name'}
results = (groups, named_groups)
for msg in msgs:
self.re_matches(msg, re.NAMENOTDEFINED_RE, results)
|
def test_name_not_defined(self):
msgs = ["name 'some_name' is not defined", "global name 'some_name' is not defined"]
groups = ('some_name',)
named_groups = {'name': 'some_name'}
results = (groups, named_groups)
for msg in msgs:
self.re_matches(msg, re.NAMENOTDEFINED_RE, results)<|docstring|>Test NAMENOTDEFINED_RE.<|endoftext|>
|
0db954bd5018448e31a901b557fa04c1e92f45760d99a1daf15da3bc23bcf687
|
def test_attribute_error(self):
'Test ATTRIBUTEERROR_RE.'
group_msg = {('some.class', 'attri'): ["'some.class' object has no attribute 'attri'"], ('SomeClass', 'attri'): ["SomeClass instance has no attribute 'attri'", "class SomeClass has no attribute 'attri'", "type object 'SomeClass' has no attribute 'attri'"]}
for (groups, msgs) in group_msg.items():
(_, attr) = groups
named_groups = {'attr': attr}
results = (groups, named_groups)
for msg in msgs:
self.re_matches(msg, re.ATTRIBUTEERROR_RE, results)
|
Test ATTRIBUTEERROR_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_attribute_error
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_attribute_error(self):
group_msg = {('some.class', 'attri'): ["'some.class' object has no attribute 'attri'"], ('SomeClass', 'attri'): ["SomeClass instance has no attribute 'attri'", "class SomeClass has no attribute 'attri'", "type object 'SomeClass' has no attribute 'attri'"]}
for (groups, msgs) in group_msg.items():
(_, attr) = groups
named_groups = {'attr': attr}
results = (groups, named_groups)
for msg in msgs:
self.re_matches(msg, re.ATTRIBUTEERROR_RE, results)
|
def test_attribute_error(self):
group_msg = {('some.class', 'attri'): ["'some.class' object has no attribute 'attri'"], ('SomeClass', 'attri'): ["SomeClass instance has no attribute 'attri'", "class SomeClass has no attribute 'attri'", "type object 'SomeClass' has no attribute 'attri'"]}
for (groups, msgs) in group_msg.items():
(_, attr) = groups
named_groups = {'attr': attr}
results = (groups, named_groups)
for msg in msgs:
self.re_matches(msg, re.ATTRIBUTEERROR_RE, results)<|docstring|>Test ATTRIBUTEERROR_RE.<|endoftext|>
|
dcd8a13528cb896b7226d5b8ec6c0196485bf8351469c1c7462e947044101cc1
|
def test_module_attribute_error(self):
'Test MODULEHASNOATTRIBUTE_RE.'
msg = "module 'some_module' has no attribute 'attri'"
groups = ('some_module', 'attri')
(_, attr) = groups
named_groups = {'attr': attr}
results = (groups, named_groups)
self.re_matches(msg, re.MODULEHASNOATTRIBUTE_RE, results)
|
Test MODULEHASNOATTRIBUTE_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_module_attribute_error
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_module_attribute_error(self):
msg = "module 'some_module' has no attribute 'attri'"
groups = ('some_module', 'attri')
(_, attr) = groups
named_groups = {'attr': attr}
results = (groups, named_groups)
self.re_matches(msg, re.MODULEHASNOATTRIBUTE_RE, results)
|
def test_module_attribute_error(self):
msg = "module 'some_module' has no attribute 'attri'"
groups = ('some_module', 'attri')
(_, attr) = groups
named_groups = {'attr': attr}
results = (groups, named_groups)
self.re_matches(msg, re.MODULEHASNOATTRIBUTE_RE, results)<|docstring|>Test MODULEHASNOATTRIBUTE_RE.<|endoftext|>
|
8255a0f88e65eb7ae601e7017fb53bfa5102e4ca6d3df2c96568ec393ba4e659
|
def test_cannot_import(self):
'Test CANNOTIMPORT_RE.'
msgs = ['cannot import name pie', "cannot import name 'pie'", "cannot import name 'pie' from 'math' (/some/path)", "cannot import name 'pie' from 'math' (unknown location)"]
name = 'pie'
groups = (name,)
named_groups = {'name': name}
results = (groups, named_groups)
for msg in msgs:
self.re_matches(msg, re.CANNOTIMPORT_RE, results)
|
Test CANNOTIMPORT_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_cannot_import
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_cannot_import(self):
msgs = ['cannot import name pie', "cannot import name 'pie'", "cannot import name 'pie' from 'math' (/some/path)", "cannot import name 'pie' from 'math' (unknown location)"]
name = 'pie'
groups = (name,)
named_groups = {'name': name}
results = (groups, named_groups)
for msg in msgs:
self.re_matches(msg, re.CANNOTIMPORT_RE, results)
|
def test_cannot_import(self):
msgs = ['cannot import name pie', "cannot import name 'pie'", "cannot import name 'pie' from 'math' (/some/path)", "cannot import name 'pie' from 'math' (unknown location)"]
name = 'pie'
groups = (name,)
named_groups = {'name': name}
results = (groups, named_groups)
for msg in msgs:
self.re_matches(msg, re.CANNOTIMPORT_RE, results)<|docstring|>Test CANNOTIMPORT_RE.<|endoftext|>
|
f0cbfeaf2dbaa49c0a1675c14f521f6710c57484529a8a41fe3b926d1a7d7094
|
def test_no_module_named(self):
'Test NOMODULE_RE.'
msgs = ['No module named fake_module', "No module named 'fake_module'"]
groups = ('fake_module',)
results = (groups, dict())
for msg in msgs:
self.re_matches(msg, re.NOMODULE_RE, results)
|
Test NOMODULE_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_no_module_named
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_no_module_named(self):
msgs = ['No module named fake_module', "No module named 'fake_module'"]
groups = ('fake_module',)
results = (groups, dict())
for msg in msgs:
self.re_matches(msg, re.NOMODULE_RE, results)
|
def test_no_module_named(self):
msgs = ['No module named fake_module', "No module named 'fake_module'"]
groups = ('fake_module',)
results = (groups, dict())
for msg in msgs:
self.re_matches(msg, re.NOMODULE_RE, results)<|docstring|>Test NOMODULE_RE.<|endoftext|>
|
0399f3408ba69d62ea9e44b217cb61092a4e5b896fd443bb4061ffd40af8fb64
|
def test_index_out_of_range(self):
'Test INDEXOUTOFRANGE_RE.'
msg = 'list index out of range'
self.re_matches(msg, re.INDEXOUTOFRANGE_RE, NO_GROUP)
|
Test INDEXOUTOFRANGE_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_index_out_of_range
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_index_out_of_range(self):
msg = 'list index out of range'
self.re_matches(msg, re.INDEXOUTOFRANGE_RE, NO_GROUP)
|
def test_index_out_of_range(self):
msg = 'list index out of range'
self.re_matches(msg, re.INDEXOUTOFRANGE_RE, NO_GROUP)<|docstring|>Test INDEXOUTOFRANGE_RE.<|endoftext|>
|
82a71f98c5be69faf987b4f591975c12506355cd7ccfcd707b0a1e233093d521
|
def test_unsubscriptable(self):
'Test UNSUBSCRIPTABLE_RE.'
msgs = ["'function' object is unsubscriptable", "'function' object is not subscriptable", "'function' object is not subscriptable (key 0)"]
groups = ('function',)
results = (groups, dict())
for msg in msgs:
self.re_matches(msg, re.UNSUBSCRIPTABLE_RE, results)
|
Test UNSUBSCRIPTABLE_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_unsubscriptable
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_unsubscriptable(self):
msgs = ["'function' object is unsubscriptable", "'function' object is not subscriptable", "'function' object is not subscriptable (key 0)"]
groups = ('function',)
results = (groups, dict())
for msg in msgs:
self.re_matches(msg, re.UNSUBSCRIPTABLE_RE, results)
|
def test_unsubscriptable(self):
msgs = ["'function' object is unsubscriptable", "'function' object is not subscriptable", "'function' object is not subscriptable (key 0)"]
groups = ('function',)
results = (groups, dict())
for msg in msgs:
self.re_matches(msg, re.UNSUBSCRIPTABLE_RE, results)<|docstring|>Test UNSUBSCRIPTABLE_RE.<|endoftext|>
|
489f50ba9d080ede7437f0024a83a5e99d66c83a7b1e4c262ff13c84d0fcd77d
|
def test_unexpected_kw_arg(self):
'Test UNEXPECTED_KEYWORDARG_RE.'
msgs = [("some_func() got an unexpected keyword argument 'a'", ('some_func', 'a')), ("<lambda>() got an unexpected keyword argument 'a'", ('<lambda>', 'a')), ("MyClass.func() got an unexpected keyword argument 'a'", ('MyClass.func', 'a'))]
for (msg, groups) in msgs:
(func, kw_arg) = groups
named_groups = {'arg': kw_arg, 'func': func}
results = (groups, named_groups)
self.re_matches(msg, re.UNEXPECTED_KEYWORDARG_RE, results)
|
Test UNEXPECTED_KEYWORDARG_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_unexpected_kw_arg
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_unexpected_kw_arg(self):
msgs = [("some_func() got an unexpected keyword argument 'a'", ('some_func', 'a')), ("<lambda>() got an unexpected keyword argument 'a'", ('<lambda>', 'a')), ("MyClass.func() got an unexpected keyword argument 'a'", ('MyClass.func', 'a'))]
for (msg, groups) in msgs:
(func, kw_arg) = groups
named_groups = {'arg': kw_arg, 'func': func}
results = (groups, named_groups)
self.re_matches(msg, re.UNEXPECTED_KEYWORDARG_RE, results)
|
def test_unexpected_kw_arg(self):
msgs = [("some_func() got an unexpected keyword argument 'a'", ('some_func', 'a')), ("<lambda>() got an unexpected keyword argument 'a'", ('<lambda>', 'a')), ("MyClass.func() got an unexpected keyword argument 'a'", ('MyClass.func', 'a'))]
for (msg, groups) in msgs:
(func, kw_arg) = groups
named_groups = {'arg': kw_arg, 'func': func}
results = (groups, named_groups)
self.re_matches(msg, re.UNEXPECTED_KEYWORDARG_RE, results)<|docstring|>Test UNEXPECTED_KEYWORDARG_RE.<|endoftext|>
|
67267e3d6d5a4e9ae683246bb0cc9d23f11ebafb2d909333c1eb9d31b5a99b94
|
def test_unexpected_kw_arg2(self):
'Test UNEXPECTED_KEYWORDARG2_RE.'
msg = "'this_doesnt_exist' is an invalid keyword argument for this function"
groups = ('this_doesnt_exist',)
(kw_arg,) = groups
named_groups = {'arg': kw_arg}
results = (groups, named_groups)
self.re_matches(msg, re.UNEXPECTED_KEYWORDARG2_RE, results)
|
Test UNEXPECTED_KEYWORDARG2_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_unexpected_kw_arg2
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_unexpected_kw_arg2(self):
msg = "'this_doesnt_exist' is an invalid keyword argument for this function"
groups = ('this_doesnt_exist',)
(kw_arg,) = groups
named_groups = {'arg': kw_arg}
results = (groups, named_groups)
self.re_matches(msg, re.UNEXPECTED_KEYWORDARG2_RE, results)
|
def test_unexpected_kw_arg2(self):
msg = "'this_doesnt_exist' is an invalid keyword argument for this function"
groups = ('this_doesnt_exist',)
(kw_arg,) = groups
named_groups = {'arg': kw_arg}
results = (groups, named_groups)
self.re_matches(msg, re.UNEXPECTED_KEYWORDARG2_RE, results)<|docstring|>Test UNEXPECTED_KEYWORDARG2_RE.<|endoftext|>
|
3b7498225f8e8c8d4a2910f28678da7c31ae4173bbb3458d3fd3ab84e42ea262
|
def test_unexpected_kw_arg3(self):
'Test UNEXPECTED_KEYWORDARG3_RE.'
msg = 'invalid keyword arguments to print()'
func = 'print'
groups = (func,)
named_groups = {'func': func}
results = (groups, named_groups)
self.re_matches(msg, re.UNEXPECTED_KEYWORDARG3_RE, results)
|
Test UNEXPECTED_KEYWORDARG3_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_unexpected_kw_arg3
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_unexpected_kw_arg3(self):
msg = 'invalid keyword arguments to print()'
func = 'print'
groups = (func,)
named_groups = {'func': func}
results = (groups, named_groups)
self.re_matches(msg, re.UNEXPECTED_KEYWORDARG3_RE, results)
|
def test_unexpected_kw_arg3(self):
msg = 'invalid keyword arguments to print()'
func = 'print'
groups = (func,)
named_groups = {'func': func}
results = (groups, named_groups)
self.re_matches(msg, re.UNEXPECTED_KEYWORDARG3_RE, results)<|docstring|>Test UNEXPECTED_KEYWORDARG3_RE.<|endoftext|>
|
6bc772ea3fb32a5720e9561c1c25c323162fee53b5cd5f58376f3f02b4ff8667
|
def test_unexpected_kw_arg4(self):
'Test UNEXPECTED_KEYWORDARG4_RE.'
msgs = [("'this_doesnt_exist' is an invalid keyword argument for int()", ('this_doesnt_exist', 'int')), ("'end_' is an invalid keyword argument for print()", ('end_', 'print')), ("'cmp' is an invalid keyword argument for sort()", ('cmp', 'sort'))]
for (msg, groups) in msgs:
(kw_arg, func) = groups
named_groups = {'arg': kw_arg, 'func': func}
results = (groups, named_groups)
self.re_matches(msg, re.UNEXPECTED_KEYWORDARG4_RE, results)
|
Test UNEXPECTED_KEYWORDARG4_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_unexpected_kw_arg4
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_unexpected_kw_arg4(self):
msgs = [("'this_doesnt_exist' is an invalid keyword argument for int()", ('this_doesnt_exist', 'int')), ("'end_' is an invalid keyword argument for print()", ('end_', 'print')), ("'cmp' is an invalid keyword argument for sort()", ('cmp', 'sort'))]
for (msg, groups) in msgs:
(kw_arg, func) = groups
named_groups = {'arg': kw_arg, 'func': func}
results = (groups, named_groups)
self.re_matches(msg, re.UNEXPECTED_KEYWORDARG4_RE, results)
|
def test_unexpected_kw_arg4(self):
msgs = [("'this_doesnt_exist' is an invalid keyword argument for int()", ('this_doesnt_exist', 'int')), ("'end_' is an invalid keyword argument for print()", ('end_', 'print')), ("'cmp' is an invalid keyword argument for sort()", ('cmp', 'sort'))]
for (msg, groups) in msgs:
(kw_arg, func) = groups
named_groups = {'arg': kw_arg, 'func': func}
results = (groups, named_groups)
self.re_matches(msg, re.UNEXPECTED_KEYWORDARG4_RE, results)<|docstring|>Test UNEXPECTED_KEYWORDARG4_RE.<|endoftext|>
|
5499d432875ff7c2e22646afab031842eba2faaa1edce8791381ab00bc947d80
|
def test_func_takes_no_kwarg(self):
'Test FUNC_TAKES_NO_KEYWORDARG_RE.'
msgs = [('get', 'get() takes no keyword arguments'), ('get', 'get does not take keyword arguments'), ('get', 'get() does not take keyword arguments'), ('dict.get', 'dict.get() takes no keyword arguments')]
for (func, msg) in msgs:
groups = (func,)
named_groups = {'func': func}
results = (groups, named_groups)
self.re_matches(msg, re.FUNC_TAKES_NO_KEYWORDARG_RE, results)
|
Test FUNC_TAKES_NO_KEYWORDARG_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_func_takes_no_kwarg
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_func_takes_no_kwarg(self):
msgs = [('get', 'get() takes no keyword arguments'), ('get', 'get does not take keyword arguments'), ('get', 'get() does not take keyword arguments'), ('dict.get', 'dict.get() takes no keyword arguments')]
for (func, msg) in msgs:
groups = (func,)
named_groups = {'func': func}
results = (groups, named_groups)
self.re_matches(msg, re.FUNC_TAKES_NO_KEYWORDARG_RE, results)
|
def test_func_takes_no_kwarg(self):
msgs = [('get', 'get() takes no keyword arguments'), ('get', 'get does not take keyword arguments'), ('get', 'get() does not take keyword arguments'), ('dict.get', 'dict.get() takes no keyword arguments')]
for (func, msg) in msgs:
groups = (func,)
named_groups = {'func': func}
results = (groups, named_groups)
self.re_matches(msg, re.FUNC_TAKES_NO_KEYWORDARG_RE, results)<|docstring|>Test FUNC_TAKES_NO_KEYWORDARG_RE.<|endoftext|>
|
54f01d59b6529ab32d7b00cec28279f6a0a2ec912be7f31c55b038934ce8afa9
|
def test_zero_length_field(self):
'Test ZERO_LEN_FIELD_RE.'
msg = 'zero length field name in format'
self.re_matches(msg, re.ZERO_LEN_FIELD_RE, NO_GROUP)
|
Test ZERO_LEN_FIELD_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_zero_length_field
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_zero_length_field(self):
msg = 'zero length field name in format'
self.re_matches(msg, re.ZERO_LEN_FIELD_RE, NO_GROUP)
|
def test_zero_length_field(self):
msg = 'zero length field name in format'
self.re_matches(msg, re.ZERO_LEN_FIELD_RE, NO_GROUP)<|docstring|>Test ZERO_LEN_FIELD_RE.<|endoftext|>
|
9bffc9ad38dfec275eec8c3f99599308da956c95fcb86c82285680798b6a1cd9
|
def test_math_domain_error(self):
'Test MATH_DOMAIN_ERROR_RE.'
msg = 'math domain error'
self.re_matches(msg, re.MATH_DOMAIN_ERROR_RE, NO_GROUP)
|
Test MATH_DOMAIN_ERROR_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_math_domain_error
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_math_domain_error(self):
msg = 'math domain error'
self.re_matches(msg, re.MATH_DOMAIN_ERROR_RE, NO_GROUP)
|
def test_math_domain_error(self):
msg = 'math domain error'
self.re_matches(msg, re.MATH_DOMAIN_ERROR_RE, NO_GROUP)<|docstring|>Test MATH_DOMAIN_ERROR_RE.<|endoftext|>
|
9edd413d291a78ddd96c5100034fcad5f553872ed1b6a1201d32e5122e525460
|
def test_too_many_values(self):
'Test TOO_MANY_VALUES_UNPACK_RE.'
msgs = ['too many values to unpack', 'too many values to unpack (expected 3)']
for msg in msgs:
self.re_matches(msg, re.TOO_MANY_VALUES_UNPACK_RE, NO_GROUP)
|
Test TOO_MANY_VALUES_UNPACK_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_too_many_values
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_too_many_values(self):
msgs = ['too many values to unpack', 'too many values to unpack (expected 3)']
for msg in msgs:
self.re_matches(msg, re.TOO_MANY_VALUES_UNPACK_RE, NO_GROUP)
|
def test_too_many_values(self):
msgs = ['too many values to unpack', 'too many values to unpack (expected 3)']
for msg in msgs:
self.re_matches(msg, re.TOO_MANY_VALUES_UNPACK_RE, NO_GROUP)<|docstring|>Test TOO_MANY_VALUES_UNPACK_RE.<|endoftext|>
|
6bbdce8b5b91fad05a445f4c6518de295f491d019e57a82c3b0316d98eea1f8b
|
def test_unhashable_type(self):
'Test UNHASHABLE_RE.'
msgs = ["unhashable type: 'list'", "'list' objects are unhashable"]
groups = ('list',)
results = (groups, dict())
for msg in msgs:
self.re_matches(msg, re.UNHASHABLE_RE, results)
|
Test UNHASHABLE_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_unhashable_type
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_unhashable_type(self):
msgs = ["unhashable type: 'list'", "'list' objects are unhashable"]
groups = ('list',)
results = (groups, dict())
for msg in msgs:
self.re_matches(msg, re.UNHASHABLE_RE, results)
|
def test_unhashable_type(self):
msgs = ["unhashable type: 'list'", "'list' objects are unhashable"]
groups = ('list',)
results = (groups, dict())
for msg in msgs:
self.re_matches(msg, re.UNHASHABLE_RE, results)<|docstring|>Test UNHASHABLE_RE.<|endoftext|>
|
9b87b4c56894b58be59f0f009a01e811ed75fb85265a0da4216c9134075b1326
|
def test_cannot_be_interpreted_as_integer(self):
'Test CANNOT_BE_INTERPRETED_INT_RE.'
msgs = {"'str' object cannot be interpreted as an integer": 'str', "'list' object cannot be interpreted as an integer": 'list'}
for (msg, typ) in msgs.items():
results = ((typ,), dict())
self.re_matches(msg, re.CANNOT_BE_INTERPRETED_INT_RE, results)
|
Test CANNOT_BE_INTERPRETED_INT_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_cannot_be_interpreted_as_integer
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_cannot_be_interpreted_as_integer(self):
msgs = {"'str' object cannot be interpreted as an integer": 'str', "'list' object cannot be interpreted as an integer": 'list'}
for (msg, typ) in msgs.items():
results = ((typ,), dict())
self.re_matches(msg, re.CANNOT_BE_INTERPRETED_INT_RE, results)
|
def test_cannot_be_interpreted_as_integer(self):
msgs = {"'str' object cannot be interpreted as an integer": 'str', "'list' object cannot be interpreted as an integer": 'list'}
for (msg, typ) in msgs.items():
results = ((typ,), dict())
self.re_matches(msg, re.CANNOT_BE_INTERPRETED_INT_RE, results)<|docstring|>Test CANNOT_BE_INTERPRETED_INT_RE.<|endoftext|>
|
3347e4db78da39093615eb099f0e46f7f183719564f0e115c96a44ed63d810f4
|
def test_int_expected_got(self):
'Test INTEGER_EXPECTED_GOT_RE.'
msgs = {'expected integer, got str object': 'str', 'range() integer end argument expected, got list.': 'list', 'range() integer start argument expected, got list.': 'list'}
for (msg, typ) in msgs.items():
results = ((typ,), dict())
self.re_matches(msg, re.INTEGER_EXPECTED_GOT_RE, results)
|
Test INTEGER_EXPECTED_GOT_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_int_expected_got
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_int_expected_got(self):
msgs = {'expected integer, got str object': 'str', 'range() integer end argument expected, got list.': 'list', 'range() integer start argument expected, got list.': 'list'}
for (msg, typ) in msgs.items():
results = ((typ,), dict())
self.re_matches(msg, re.INTEGER_EXPECTED_GOT_RE, results)
|
def test_int_expected_got(self):
msgs = {'expected integer, got str object': 'str', 'range() integer end argument expected, got list.': 'list', 'range() integer start argument expected, got list.': 'list'}
for (msg, typ) in msgs.items():
results = ((typ,), dict())
self.re_matches(msg, re.INTEGER_EXPECTED_GOT_RE, results)<|docstring|>Test INTEGER_EXPECTED_GOT_RE.<|endoftext|>
|
5540ec9530acf8fccddbbb69e9779115a116fa826a3f63c060ab1bde12b9224a
|
def test_indices_must_be_int(self):
'Test INDICES_MUST_BE_INT_RE.'
msgs = {'list indices must be integers, not str': 'str', 'list indices must be integers or slices, not str': 'str', 'tuple indices must be integers or slices, not str': 'str', 'list index must be an integer, not str': 'str'}
for (msg, typ) in msgs.items():
results = ((typ,), dict())
self.re_matches(msg, re.INDICES_MUST_BE_INT_RE, results)
|
Test INDICES_MUST_BE_INT_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_indices_must_be_int
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_indices_must_be_int(self):
msgs = {'list indices must be integers, not str': 'str', 'list indices must be integers or slices, not str': 'str', 'tuple indices must be integers or slices, not str': 'str', 'list index must be an integer, not str': 'str'}
for (msg, typ) in msgs.items():
results = ((typ,), dict())
self.re_matches(msg, re.INDICES_MUST_BE_INT_RE, results)
|
def test_indices_must_be_int(self):
msgs = {'list indices must be integers, not str': 'str', 'list indices must be integers or slices, not str': 'str', 'tuple indices must be integers or slices, not str': 'str', 'list index must be an integer, not str': 'str'}
for (msg, typ) in msgs.items():
results = ((typ,), dict())
self.re_matches(msg, re.INDICES_MUST_BE_INT_RE, results)<|docstring|>Test INDICES_MUST_BE_INT_RE.<|endoftext|>
|
8cb444118091c87e233580462b58e4bcddb1309c0eacdfbb762f9d2da23252cf
|
def test_outside_function(self):
'Test OUTSIDE_FUNCTION_RE.'
msgs = ["'return' outside function", 'return outside function']
groups = ('return',)
results = (groups, dict())
for msg in msgs:
self.re_matches(msg, re.OUTSIDE_FUNCTION_RE, results)
|
Test OUTSIDE_FUNCTION_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_outside_function
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_outside_function(self):
msgs = ["'return' outside function", 'return outside function']
groups = ('return',)
results = (groups, dict())
for msg in msgs:
self.re_matches(msg, re.OUTSIDE_FUNCTION_RE, results)
|
def test_outside_function(self):
msgs = ["'return' outside function", 'return outside function']
groups = ('return',)
results = (groups, dict())
for msg in msgs:
self.re_matches(msg, re.OUTSIDE_FUNCTION_RE, results)<|docstring|>Test OUTSIDE_FUNCTION_RE.<|endoftext|>
|
add7ddd3b7a5c9b4bc2462de8b4264d69448375817bb0df1739f3375e7704c93
|
def test_nb_positional_argument(self):
'Test NB_ARG_RE.'
msgs = [('some_func() takes exactly 1 argument (2 given)', 'some_func', '1', '2'), ('some_func() takes exactly 3 arguments (1 given)', 'some_func', '3', '1'), ('some_func() takes no arguments (1 given)', 'some_func', 'no', '1'), ('some_func() takes at least 2 non-keyword arguments (0 given)', 'some_func', '2', '0'), ('some_func() takes exactly 1 positional argument (2 given)', 'some_func', '1', '2'), ('some_func() takes 1 positional argument but 2 were given', 'some_func', '1', '2'), ('some_func() takes 0 positional arguments but 1 was given', 'some_func', '0', '1'), ('MyClass.method() takes 0 positional arguments but 1 was given', 'MyClass.method', '0', '1'), ('get() takes from 2 to 3 positional arguments but 4 were given', 'get', '2 to 3', '4'), ("some_func() takes no arguments (1 given). Did you forget 'self' in the function definition?", 'some_func', 'no', '1'), ('get expected at least 1 arguments, got 0', 'get', '1', '0'), ('get expected at most 2 arguments, got 3', 'get', '2', '3')]
for (msg, func, exp, nb) in msgs:
groups = (func, exp, nb)
named_groups = {'func': func, 'expected': exp, 'actual': nb}
results = (groups, named_groups)
self.re_matches(msg, re.NB_ARG_RE, results)
|
Test NB_ARG_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_nb_positional_argument
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_nb_positional_argument(self):
msgs = [('some_func() takes exactly 1 argument (2 given)', 'some_func', '1', '2'), ('some_func() takes exactly 3 arguments (1 given)', 'some_func', '3', '1'), ('some_func() takes no arguments (1 given)', 'some_func', 'no', '1'), ('some_func() takes at least 2 non-keyword arguments (0 given)', 'some_func', '2', '0'), ('some_func() takes exactly 1 positional argument (2 given)', 'some_func', '1', '2'), ('some_func() takes 1 positional argument but 2 were given', 'some_func', '1', '2'), ('some_func() takes 0 positional arguments but 1 was given', 'some_func', '0', '1'), ('MyClass.method() takes 0 positional arguments but 1 was given', 'MyClass.method', '0', '1'), ('get() takes from 2 to 3 positional arguments but 4 were given', 'get', '2 to 3', '4'), ("some_func() takes no arguments (1 given). Did you forget 'self' in the function definition?", 'some_func', 'no', '1'), ('get expected at least 1 arguments, got 0', 'get', '1', '0'), ('get expected at most 2 arguments, got 3', 'get', '2', '3')]
for (msg, func, exp, nb) in msgs:
groups = (func, exp, nb)
named_groups = {'func': func, 'expected': exp, 'actual': nb}
results = (groups, named_groups)
self.re_matches(msg, re.NB_ARG_RE, results)
|
def test_nb_positional_argument(self):
msgs = [('some_func() takes exactly 1 argument (2 given)', 'some_func', '1', '2'), ('some_func() takes exactly 3 arguments (1 given)', 'some_func', '3', '1'), ('some_func() takes no arguments (1 given)', 'some_func', 'no', '1'), ('some_func() takes at least 2 non-keyword arguments (0 given)', 'some_func', '2', '0'), ('some_func() takes exactly 1 positional argument (2 given)', 'some_func', '1', '2'), ('some_func() takes 1 positional argument but 2 were given', 'some_func', '1', '2'), ('some_func() takes 0 positional arguments but 1 was given', 'some_func', '0', '1'), ('MyClass.method() takes 0 positional arguments but 1 was given', 'MyClass.method', '0', '1'), ('get() takes from 2 to 3 positional arguments but 4 were given', 'get', '2 to 3', '4'), ("some_func() takes no arguments (1 given). Did you forget 'self' in the function definition?", 'some_func', 'no', '1'), ('get expected at least 1 arguments, got 0', 'get', '1', '0'), ('get expected at most 2 arguments, got 3', 'get', '2', '3')]
for (msg, func, exp, nb) in msgs:
groups = (func, exp, nb)
named_groups = {'func': func, 'expected': exp, 'actual': nb}
results = (groups, named_groups)
self.re_matches(msg, re.NB_ARG_RE, results)<|docstring|>Test NB_ARG_RE.<|endoftext|>
|
dc3e0abd7568f79d6166d4db0752756fe35c527c93dd4f547307b772c51d3e81
|
def test_missing_positional_arg(self):
'Test MISSING_POS_ARG_RE.'
msgs = [("some_func() missing 2 required positional arguments: 'much' and 'args'", 'some_func'), ("some_func() missing 1 required positional argument: 'much'", 'some_func'), ("MyClass.some_method() missing 2 required positional arguments: 'much' and 'args'", 'MyClass.some_method')]
for (msg, func) in msgs:
groups = (func,)
named_groups = {'func': func}
results = (groups, named_groups)
self.re_matches(msg, re.MISSING_POS_ARG_RE, results)
|
Test MISSING_POS_ARG_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_missing_positional_arg
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_missing_positional_arg(self):
msgs = [("some_func() missing 2 required positional arguments: 'much' and 'args'", 'some_func'), ("some_func() missing 1 required positional argument: 'much'", 'some_func'), ("MyClass.some_method() missing 2 required positional arguments: 'much' and 'args'", 'MyClass.some_method')]
for (msg, func) in msgs:
groups = (func,)
named_groups = {'func': func}
results = (groups, named_groups)
self.re_matches(msg, re.MISSING_POS_ARG_RE, results)
|
def test_missing_positional_arg(self):
msgs = [("some_func() missing 2 required positional arguments: 'much' and 'args'", 'some_func'), ("some_func() missing 1 required positional argument: 'much'", 'some_func'), ("MyClass.some_method() missing 2 required positional arguments: 'much' and 'args'", 'MyClass.some_method')]
for (msg, func) in msgs:
groups = (func,)
named_groups = {'func': func}
results = (groups, named_groups)
self.re_matches(msg, re.MISSING_POS_ARG_RE, results)<|docstring|>Test MISSING_POS_ARG_RE.<|endoftext|>
|
a493f6f3ff422069d87d8d8e3d60de5272f504a8f1ef7196519f90cfa91fb9e9
|
def test_need_more_values_to_unpack(self):
'Test NEED_MORE_VALUES_RE.'
msgs = ['need more than 2 values to unpack', 'not enough values to unpack (expected 3, got 2)']
for msg in msgs:
self.re_matches(msg, re.NEED_MORE_VALUES_RE, NO_GROUP)
|
Test NEED_MORE_VALUES_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_need_more_values_to_unpack
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_need_more_values_to_unpack(self):
msgs = ['need more than 2 values to unpack', 'not enough values to unpack (expected 3, got 2)']
for msg in msgs:
self.re_matches(msg, re.NEED_MORE_VALUES_RE, NO_GROUP)
|
def test_need_more_values_to_unpack(self):
msgs = ['need more than 2 values to unpack', 'not enough values to unpack (expected 3, got 2)']
for msg in msgs:
self.re_matches(msg, re.NEED_MORE_VALUES_RE, NO_GROUP)<|docstring|>Test NEED_MORE_VALUES_RE.<|endoftext|>
|
b620eca9107095b1a239ab3e92c019a15f267d63de98d442c87d0212936719ee
|
def test_missing_parentheses(self):
'Test MISSING_PARENT_RE.'
msgs = ["Missing parentheses in call to 'exec'", "Missing parentheses in call to 'exec'. Did you mean print(...)?"]
func = 'exec'
groups = (func,)
named_groups = {'func': func}
results = (groups, named_groups)
for msg in msgs:
self.re_matches(msg, re.MISSING_PARENT_RE, results)
|
Test MISSING_PARENT_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_missing_parentheses
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_missing_parentheses(self):
msgs = ["Missing parentheses in call to 'exec'", "Missing parentheses in call to 'exec'. Did you mean print(...)?"]
func = 'exec'
groups = (func,)
named_groups = {'func': func}
results = (groups, named_groups)
for msg in msgs:
self.re_matches(msg, re.MISSING_PARENT_RE, results)
|
def test_missing_parentheses(self):
msgs = ["Missing parentheses in call to 'exec'", "Missing parentheses in call to 'exec'. Did you mean print(...)?"]
func = 'exec'
groups = (func,)
named_groups = {'func': func}
results = (groups, named_groups)
for msg in msgs:
self.re_matches(msg, re.MISSING_PARENT_RE, results)<|docstring|>Test MISSING_PARENT_RE.<|endoftext|>
|
2d1c5a6c05c33bc28cc3104e429ef0eee32f20ecb2691746238a982907c8be66
|
def test_invalid_literal(self):
'Test INVALID_LITERAL_RE.'
msg = "invalid literal for int() with base 10: 'toto'"
groups = ('int', 'toto')
results = (groups, dict())
self.re_matches(msg, re.INVALID_LITERAL_RE, results)
|
Test INVALID_LITERAL_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_invalid_literal
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_invalid_literal(self):
msg = "invalid literal for int() with base 10: 'toto'"
groups = ('int', 'toto')
results = (groups, dict())
self.re_matches(msg, re.INVALID_LITERAL_RE, results)
|
def test_invalid_literal(self):
msg = "invalid literal for int() with base 10: 'toto'"
groups = ('int', 'toto')
results = (groups, dict())
self.re_matches(msg, re.INVALID_LITERAL_RE, results)<|docstring|>Test INVALID_LITERAL_RE.<|endoftext|>
|
c6d37c0722f0ce0da0f82072dee0e0af96340e0de93c313ebb9cb539757e2df9
|
def test_invalid_syntax(self):
'Test INVALID_SYNTAX_RE.'
msgs = ['invalid syntax', "invalid syntax (expected ':')", "expected ':'", "invalid syntax. Maybe you meant '==' or ':=' instead of '='?"]
for msg in msgs:
self.re_matches(msg, re.INVALID_SYNTAX_RE, NO_GROUP)
|
Test INVALID_SYNTAX_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_invalid_syntax
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_invalid_syntax(self):
msgs = ['invalid syntax', "invalid syntax (expected ':')", "expected ':'", "invalid syntax. Maybe you meant '==' or ':=' instead of '='?"]
for msg in msgs:
self.re_matches(msg, re.INVALID_SYNTAX_RE, NO_GROUP)
|
def test_invalid_syntax(self):
msgs = ['invalid syntax', "invalid syntax (expected ':')", "expected ':'", "invalid syntax. Maybe you meant '==' or ':=' instead of '='?"]
for msg in msgs:
self.re_matches(msg, re.INVALID_SYNTAX_RE, NO_GROUP)<|docstring|>Test INVALID_SYNTAX_RE.<|endoftext|>
|
f15068918764a2cbd6953f7c9cb4c00f15527b22c8d6c119642f7be69b2fb15e
|
def test_invalid_comp(self):
'Test INVALID_COMP_RE.'
msg = 'invalid comparison'
self.re_matches(msg, re.INVALID_COMP_RE, NO_GROUP)
|
Test INVALID_COMP_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_invalid_comp
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_invalid_comp(self):
msg = 'invalid comparison'
self.re_matches(msg, re.INVALID_COMP_RE, NO_GROUP)
|
def test_invalid_comp(self):
msg = 'invalid comparison'
self.re_matches(msg, re.INVALID_COMP_RE, NO_GROUP)<|docstring|>Test INVALID_COMP_RE.<|endoftext|>
|
4e425fe57502250d152f7f913ec74dfeb5b9116729b8e85e0122aa2dc7017521
|
def test_expected_length(self):
'Test EXPECTED_LENGTH_RE.'
msg = 'expected length 3, got 2'
groups = ('3', '2')
results = (groups, dict())
self.re_matches(msg, re.EXPECTED_LENGTH_RE, results)
|
Test EXPECTED_LENGTH_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_expected_length
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_expected_length(self):
msg = 'expected length 3, got 2'
groups = ('3', '2')
results = (groups, dict())
self.re_matches(msg, re.EXPECTED_LENGTH_RE, results)
|
def test_expected_length(self):
msg = 'expected length 3, got 2'
groups = ('3', '2')
results = (groups, dict())
self.re_matches(msg, re.EXPECTED_LENGTH_RE, results)<|docstring|>Test EXPECTED_LENGTH_RE.<|endoftext|>
|
db9a7e9c5008993441f88d30953e51100701081ebfe1f30301cd823ee4fa858e
|
def test_future_first(self):
'Test FUTURE_FIRST_RE.'
msgs = ['from __future__ imports must occur at the beginning of the file', '__future__ statements must appear at beginning of file']
for msg in msgs:
self.re_matches(msg, re.FUTURE_FIRST_RE, NO_GROUP)
|
Test FUTURE_FIRST_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_future_first
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_future_first(self):
msgs = ['from __future__ imports must occur at the beginning of the file', '__future__ statements must appear at beginning of file']
for msg in msgs:
self.re_matches(msg, re.FUTURE_FIRST_RE, NO_GROUP)
|
def test_future_first(self):
msgs = ['from __future__ imports must occur at the beginning of the file', '__future__ statements must appear at beginning of file']
for msg in msgs:
self.re_matches(msg, re.FUTURE_FIRST_RE, NO_GROUP)<|docstring|>Test FUTURE_FIRST_RE.<|endoftext|>
|
005a7c1ffa53288dc48c7e25f3e132bf19d61623576b6d1bd8fee649f6565a0f
|
def test_future_feature_not_def(self):
'Test FUTURE_FEATURE_NOT_DEF_RE.'
msg = 'future feature divisio is not defined'
groups = ('divisio',)
results = (groups, dict())
self.re_matches(msg, re.FUTURE_FEATURE_NOT_DEF_RE, results)
|
Test FUTURE_FEATURE_NOT_DEF_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_future_feature_not_def
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_future_feature_not_def(self):
msg = 'future feature divisio is not defined'
groups = ('divisio',)
results = (groups, dict())
self.re_matches(msg, re.FUTURE_FEATURE_NOT_DEF_RE, results)
|
def test_future_feature_not_def(self):
msg = 'future feature divisio is not defined'
groups = ('divisio',)
results = (groups, dict())
self.re_matches(msg, re.FUTURE_FEATURE_NOT_DEF_RE, results)<|docstring|>Test FUTURE_FEATURE_NOT_DEF_RE.<|endoftext|>
|
de09d62af3b309669c34fca37f520ae1c3ca482d5987dc1ee6f8dcc6ff90f611
|
def test_result_has_too_many_items(self):
'Test RESULT_TOO_MANY_ITEMS_RE.'
msg = 'range() result has too many items'
func = 'range'
groups = (func,)
named_groups = {'func': func}
results = (groups, named_groups)
self.re_matches(msg, re.RESULT_TOO_MANY_ITEMS_RE, results)
|
Test RESULT_TOO_MANY_ITEMS_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_result_has_too_many_items
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_result_has_too_many_items(self):
msg = 'range() result has too many items'
func = 'range'
groups = (func,)
named_groups = {'func': func}
results = (groups, named_groups)
self.re_matches(msg, re.RESULT_TOO_MANY_ITEMS_RE, results)
|
def test_result_has_too_many_items(self):
msg = 'range() result has too many items'
func = 'range'
groups = (func,)
named_groups = {'func': func}
results = (groups, named_groups)
self.re_matches(msg, re.RESULT_TOO_MANY_ITEMS_RE, results)<|docstring|>Test RESULT_TOO_MANY_ITEMS_RE.<|endoftext|>
|
b847bfb62ac1bc302cdbe52ac17601f4be4e9dd360bd663c341dfed479afac93
|
def test_unqualified_exec(self):
'Test UNQUALIFIED_EXEC_RE.'
msgs = ["unqualified exec is not allowed in function 'func_name' it is a nested function", "unqualified exec is not allowed in function 'func_name' because it is a nested function", "unqualified exec is not allowed in function 'func_name' it contains a nested function with free variables", "unqualified exec is not allowed in function 'func_name' because it contains a nested function with free variables"]
for msg in msgs:
self.re_matches(msg, re.UNQUALIFIED_EXEC_RE, NO_GROUP)
|
Test UNQUALIFIED_EXEC_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_unqualified_exec
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_unqualified_exec(self):
msgs = ["unqualified exec is not allowed in function 'func_name' it is a nested function", "unqualified exec is not allowed in function 'func_name' because it is a nested function", "unqualified exec is not allowed in function 'func_name' it contains a nested function with free variables", "unqualified exec is not allowed in function 'func_name' because it contains a nested function with free variables"]
for msg in msgs:
self.re_matches(msg, re.UNQUALIFIED_EXEC_RE, NO_GROUP)
|
def test_unqualified_exec(self):
msgs = ["unqualified exec is not allowed in function 'func_name' it is a nested function", "unqualified exec is not allowed in function 'func_name' because it is a nested function", "unqualified exec is not allowed in function 'func_name' it contains a nested function with free variables", "unqualified exec is not allowed in function 'func_name' because it contains a nested function with free variables"]
for msg in msgs:
self.re_matches(msg, re.UNQUALIFIED_EXEC_RE, NO_GROUP)<|docstring|>Test UNQUALIFIED_EXEC_RE.<|endoftext|>
|
8890c7559fa4c4ba18eec80de74d723e0c0f22519925a35f1bce9b8857904645
|
def test_import_star(self):
'Test IMPORTSTAR_RE.'
msgs = ["import * is not allowed in function 'func_name' because it is contains a nested function with free variables", "import * is not allowed in function 'func_name' because it contains a nested function with free variables", "import * is not allowed in function 'func_name' because it is is a nested function", "import * is not allowed in function 'func_name' because it is a nested function", 'import * only allowed at module level']
for msg in msgs:
self.re_matches(msg, re.IMPORTSTAR_RE, NO_GROUP)
|
Test IMPORTSTAR_RE.
|
didyoumean/didyoumean_re_tests.py
|
test_import_star
|
SylvainDe/DidYouMean-Python
| 76 |
python
|
def test_import_star(self):
msgs = ["import * is not allowed in function 'func_name' because it is contains a nested function with free variables", "import * is not allowed in function 'func_name' because it contains a nested function with free variables", "import * is not allowed in function 'func_name' because it is is a nested function", "import * is not allowed in function 'func_name' because it is a nested function", 'import * only allowed at module level']
for msg in msgs:
self.re_matches(msg, re.IMPORTSTAR_RE, NO_GROUP)
|
def test_import_star(self):
msgs = ["import * is not allowed in function 'func_name' because it is contains a nested function with free variables", "import * is not allowed in function 'func_name' because it contains a nested function with free variables", "import * is not allowed in function 'func_name' because it is is a nested function", "import * is not allowed in function 'func_name' because it is a nested function", 'import * only allowed at module level']
for msg in msgs:
self.re_matches(msg, re.IMPORTSTAR_RE, NO_GROUP)<|docstring|>Test IMPORTSTAR_RE.<|endoftext|>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.