function
stringlengths 11
56k
| repo_name
stringlengths 5
60
| features
sequence |
---|---|---|
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {}) | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('Subnet', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {}) | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {}) | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def list(
self,
resource_group_name: str,
virtual_network_name: str,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def main():
out_dct = {
"_comment_s10_countries": (
"The country code is part of the S10 standard for international"
" mail. The official reference for this is here:"
" http://www.upu.int/uploads/tx_sbdownloader/"
"S10TechnicalStandard.pdf"),
"_comment_s10_countries": "Auto-generated file. Do not modify."
}
reqs = grequests.imap(
fetch_country_data(), size=15, exception_handler=exception_handler)
# (x for n, x in enumerate(fetch_country_data())
# if n < 200 and n > 190),
# size=3, exception_handler=exception_handler)
out_dct['s10_countries'] = sorted(
[gen_country_json_dct(req) for req in reqs], key=lambda x: x['country'])
with open(OUT_FILE, 'w') as fout:
json.dump(out_dct, fout, indent=2, ensure_ascii=False) | jkeen/tracking_number_data | [
77,
29,
77,
11,
1497493117
] |
def fetch_country_data():
r = requests.get(URL)
soup = bs4.BeautifulSoup(r.content, 'html.parser')
for tag in soup.find_all("strong", attrs={"itemprop": "member"}):
href = tag.nextSibling()[0]
url2 = href.attrs['href']
yield grequests.get(
join(BASE_URL, url2.lstrip('/')),
session=retry(3)
) | jkeen/tracking_number_data | [
77,
29,
77,
11,
1497493117
] |
def _get_value(attr, soup):
_child = soup.find("div", class_=attr)
if _child is None:
return None, None
child = _child.findChild(class_="field")
if child.next_element.name == "a":
# special handling for links
value = child.next_element.attrs['href']
elif child.next_element.name == "span" and \
child.next_element.attrs['class'] == ["noLink"]:
# special handling for cases where we expect a link but UPU
# doesn't have one for us
value = None
else:
value = child.text.encode().decode('utf8')
if value == 'None':
value = None
return value, child | jkeen/tracking_number_data | [
77,
29,
77,
11,
1497493117
] |
def send(self, **k):
mail_managers(self.subject(**k), self.message(**k)) | joshsimmons/animportantdate | [
6,
4,
6,
1,
1498342230
] |
def message(self, **k):
return self.__TEMPLATE__.format_map(k) | joshsimmons/animportantdate | [
6,
4,
6,
1,
1498342230
] |
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.config = config | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def delete(
self, person_group_id, custom_headers=None, raw=False, **operation_config):
"""Delete an existing person group. Persisted face features of all people
in the person group will also be deleted.
:param person_group_id: Id referencing a particular person group.
:type person_group_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`APIErrorException<azure.cognitiveservices.vision.face.models.APIErrorException>`
"""
# Construct URL
url = self.delete.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'personGroupId': self._serialize.url("person_group_id", person_group_id, 'str', max_length=64, pattern=r'^[a-z0-9-_]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.APIErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get(
self, person_group_id, return_recognition_model=False, custom_headers=None, raw=False, **operation_config):
"""Retrieve person group name, userData and recognitionModel. To get
person information under this personGroup, use [PersonGroup Person -
List](https://docs.microsoft.com/rest/api/faceapi/persongroupperson/list).
:param person_group_id: Id referencing a particular person group.
:type person_group_id: str
:param return_recognition_model: A value indicating whether the
operation should return 'recognitionModel' in response.
:type return_recognition_model: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: PersonGroup or ClientRawResponse if raw=true
:rtype: ~azure.cognitiveservices.vision.face.models.PersonGroup or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`APIErrorException<azure.cognitiveservices.vision.face.models.APIErrorException>`
"""
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'personGroupId': self._serialize.url("person_group_id", person_group_id, 'str', max_length=64, pattern=r'^[a-z0-9-_]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if return_recognition_model is not None:
query_parameters['returnRecognitionModel'] = self._serialize.query("return_recognition_model", return_recognition_model, 'bool')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.APIErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('PersonGroup', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def update(
self, person_group_id, name=None, user_data=None, custom_headers=None, raw=False, **operation_config):
"""Update an existing person group's display name and userData. The
properties which does not appear in request body will not be updated.
:param person_group_id: Id referencing a particular person group.
:type person_group_id: str
:param name: User defined name, maximum length is 128.
:type name: str
:param user_data: User specified data. Length should not exceed 16KB.
:type user_data: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`APIErrorException<azure.cognitiveservices.vision.face.models.APIErrorException>`
"""
body = models.NameAndUserDataContract(name=name, user_data=user_data)
# Construct URL
url = self.update.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'personGroupId': self._serialize.url("person_group_id", person_group_id, 'str', max_length=64, pattern=r'^[a-z0-9-_]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
body_content = self._serialize.body(body, 'NameAndUserDataContract')
# Construct and send request
request = self._client.patch(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.APIErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_training_status(
self, person_group_id, custom_headers=None, raw=False, **operation_config):
"""Retrieve the training status of a person group (completed or ongoing).
:param person_group_id: Id referencing a particular person group.
:type person_group_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: TrainingStatus or ClientRawResponse if raw=true
:rtype: ~azure.cognitiveservices.vision.face.models.TrainingStatus or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`APIErrorException<azure.cognitiveservices.vision.face.models.APIErrorException>`
"""
# Construct URL
url = self.get_training_status.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'personGroupId': self._serialize.url("person_group_id", person_group_id, 'str', max_length=64, pattern=r'^[a-z0-9-_]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.APIErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('TrainingStatus', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def list(
self, start=None, top=1000, return_recognition_model=False, custom_headers=None, raw=False, **operation_config):
"""List person groups’ personGroupId, name, userData and
recognitionModel.<br />
* Person groups are stored in alphabetical order of personGroupId.
* "start" parameter (string, optional) is a user-provided personGroupId
value that returned entries have larger ids by string comparison.
"start" set to empty to indicate return from the first item.
* "top" parameter (int, optional) specifies the number of entries to
return. A maximal of 1000 entries can be returned in one call. To fetch
more, you can specify "start" with the last returned entry’s Id of the
current call.
<br />
For example, total 5 person groups: "group1", ..., "group5".
<br /> "start=&top=" will return all 5 groups.
<br /> "start=&top=2" will return "group1", "group2".
<br /> "start=group2&top=3" will return "group3", "group4", "group5".
.
:param start: List person groups from the least personGroupId greater
than the "start".
:type start: str
:param top: The number of person groups to list.
:type top: int
:param return_recognition_model: A value indicating whether the
operation should return 'recognitionModel' in response.
:type return_recognition_model: bool
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: list or ClientRawResponse if raw=true
:rtype: list[~azure.cognitiveservices.vision.face.models.PersonGroup]
or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`APIErrorException<azure.cognitiveservices.vision.face.models.APIErrorException>`
"""
# Construct URL
url = self.list.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if start is not None:
query_parameters['start'] = self._serialize.query("start", start, 'str', max_length=64)
if top is not None:
query_parameters['top'] = self._serialize.query("top", top, 'int', maximum=1000, minimum=1)
if return_recognition_model is not None:
query_parameters['returnRecognitionModel'] = self._serialize.query("return_recognition_model", return_recognition_model, 'bool')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.APIErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('[PersonGroup]', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def train(
self, person_group_id, custom_headers=None, raw=False, **operation_config):
"""Queue a person group training task, the training task may not be
started immediately.
:param person_group_id: Id referencing a particular person group.
:type person_group_id: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`APIErrorException<azure.cognitiveservices.vision.face.models.APIErrorException>`
"""
# Construct URL
url = self.train.metadata['url']
path_format_arguments = {
'Endpoint': self._serialize.url("self.config.endpoint", self.config.endpoint, 'str', skip_quote=True),
'personGroupId': self._serialize.url("person_group_id", person_group_id, 'str', max_length=64, pattern=r'^[a-z0-9-_]+$')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [202]:
raise models.APIErrorException(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def mock_tmy3_weather_source():
tmp_dir = tempfile.mkdtemp()
ws = TMY3WeatherSource("724838", tmp_dir, preload=False)
ws.client = MockWeatherClient()
ws._load_data()
return ws | impactlab/eemeter | [
25,
13,
25,
17,
1422563081
] |
def setUp(self):
X, _ = datasets.load_iris(return_X_y=True)
columns = ['Sepal length', 'Sepal width', 'Petal length', 'Sepal length']
self.X = pd.DataFrame(X, columns=columns) | MaxHalford/Prince | [
951,
166,
951,
2,
1477139766
] |
def test_transform_pandas_dataframe(self):
pca = prince.PCA(n_components=2)
self.assertTrue(isinstance(pca.fit(self.X).transform(self.X), pd.DataFrame)) | MaxHalford/Prince | [
951,
166,
951,
2,
1477139766
] |
def test_transform_numpy_array(self):
pca = prince.PCA(n_components=2)
self.assertTrue(isinstance(pca.fit(self.X.values).transform(self.X.values), pd.DataFrame)) | MaxHalford/Prince | [
951,
166,
951,
2,
1477139766
] |
def test_fit_transform(self):
# Without rescaling
prince_pca = prince.PCA(n_components=3, rescale_with_mean=False, rescale_with_std=False)
pd.testing.assert_frame_equal(
prince_pca.fit_transform(self.X),
prince_pca.fit(self.X).transform(self.X)
)
# With rescaling
prince_pca = prince.PCA(n_components=3, rescale_with_mean=True, rescale_with_std=True)
pd.testing.assert_frame_equal(
prince_pca.fit_transform(self.X),
prince_pca.fit(self.X).transform(self.X)
) | MaxHalford/Prince | [
951,
166,
951,
2,
1477139766
] |
def test_explained_inertia_(self):
pca = prince.PCA(n_components=4)
pca.fit(self.X)
self.assertTrue(np.isclose(sum(pca.explained_inertia_), 1)) | MaxHalford/Prince | [
951,
166,
951,
2,
1477139766
] |
def validate_metadata_prefix(value, **kwargs):
"""Check metadataPrefix.
:param value: One of the metadata identifiers configured in
``OAISERVER_METADATA_FORMATS``.
"""
metadataFormats = current_app.config['OAISERVER_METADATA_FORMATS']
if value not in metadataFormats:
raise ValidationError('metadataPrefix does not exist',
field_names=['metadataPrefix']) | inveniosoftware/invenio-oaiserver | [
3,
41,
3,
12,
1439547138
] |
def from_iso_permissive(datestring, use_dateutil=True):
"""Parse an ISO8601-formatted datetime and return a datetime object.
Inspired by the marshmallow.utils.from_iso function, but also accepts
datestrings that don't contain the time.
"""
dateutil_available = False
try:
from dateutil import parser
dateutil_available = True
except ImportError:
dateutil_available = False
import datetime
# Use dateutil's parser if possible
if dateutil_available and use_dateutil:
return parser.parse(datestring)
else:
# Strip off timezone info.
return datetime.datetime.strptime(datestring[:19],
'%Y-%m-%dT%H:%M:%S') | inveniosoftware/invenio-oaiserver | [
3,
41,
3,
12,
1439547138
] |
def validate(self, data, **kwargs):
"""Check range between dates under keys ``from_`` and ``until``."""
if 'verb' in data and data['verb'] != self.__class__.__name__:
raise ValidationError(
# FIXME encode data
'This is not a valid OAI-PMH verb:{0}'.format(data['verb']),
field_names=['verb'],
)
if 'from_' in data and 'until' in data and \
data['from_'] > data['until']:
raise ValidationError('Date "from" must be before "until".') | inveniosoftware/invenio-oaiserver | [
3,
41,
3,
12,
1439547138
] |
def check_extra_params_in_request(verb):
"""Check for extra arguments in incomming request."""
extra = set(request.values.keys()) - set([
getattr(f, 'load_from', None) or getattr(
f, 'data_key', None) or f.name for f in verb.fields.values()
])
if extra:
raise ValidationError({'_schema': ['You have passed too many arguments.']}) | inveniosoftware/invenio-oaiserver | [
3,
41,
3,
12,
1439547138
] |
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def list(
self,
detailed: Optional[bool] = None,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
if not next_link: | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
if not next_link: | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('AppServicePlan', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def list_web_apps_by_hybrid_connection(
self,
resource_group_name: str,
name: str,
namespace_name: str,
relay_name: str,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
if not next_link: | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def list_hybrid_connections(
self,
resource_group_name: str,
name: str,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
if not next_link: | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def list_web_apps(
self,
resource_group_name: str,
name: str,
skip_token: Optional[str] = None,
filter: Optional[str] = None,
top: Optional[str] = None,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
if not next_link: | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def list_usages(
self,
resource_group_name: str,
name: str,
filter: Optional[str] = None,
**kwargs: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
if not next_link: | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def unicode_escape(text):
"""
Escapes unicode characters in the given string into a format compatible
with Python, C#, C++11, and many other programming languages. BMP characters
are escaped as ``\\u####`` and non-BMP characters are escaped as
``\\U########``. The former syntax is compatible with Java and other
languages, but the latter syntax is not.
Example::
.unicodeescape \U0001f50a \u266b Yeah, you take these dreams and throw them out the window \u266b
Response::
\\U0001f50a \\u266b Yeah, you take these dreams and throw them out the window \\u266b
"""
return text.encode("unicode_escape").decode("utf-8") | sk89q/Plumeria | [
35,
2,
35,
2,
1471646188
] |
def unicode_name(text):
"""
Finds the Unicode names for the given characters, up to 10 characters.
"""
return "\n".join(s + " **" + unicodedata.name(s, "?") + "**" for s in text[:10]) | sk89q/Plumeria | [
35,
2,
35,
2,
1471646188
] |
def unicode_code(text):
"""
Finds the Unicode code points for the given characters, up to 10 characters.
"""
return "\n".join("{} **{}**".format(s, ord(s)) for s in text[:10]) | sk89q/Plumeria | [
35,
2,
35,
2,
1471646188
] |
def register(self):
pass | marteinn/Skeppa | [
4,
1,
4,
1,
1455475132
] |
def test_validates_and_saves_input(self):
self.user = UserFactory()
data = {
'title': 'Foo',
'venue': 'Bar',
'start': timezone.now(),
'end': timezone.now() + timezone.timedelta(days=11),
}
form = EventForm(data=data, created_by=self.user)
self.assertTrue(form.is_valid())
instance = form.save()
self.assertEqual(Event.objects.all().count(), 1)
# Test update
data.update({'street': 'Foostreet'})
form = EventForm(data=data, instance=instance, created_by=self.user)
instance = form.save()
self.assertEqual(instance.street, 'Foostreet')
# Test creating an event from a template
form = EventForm(data=data, instance=instance, created_by=self.user,
create_from_template=True)
self.assertTrue(form.is_valid())
instance = form.save()
self.assertEqual(Event.objects.all().count(), 2)
# Test saving a template
data.update({'template_name': 'Foo'})
form = EventForm(data=data, created_by=self.user)
self.assertTrue(form.is_valid())
form.save()
self.assertEqual(
Event.objects.exclude(template_name__exact='').count(), 1)
# Test updating a template
data.update({'street': 'Barstreet'})
instance = Event.objects.get(template_name='Foo')
form = EventForm(data=data, instance=instance, created_by=self.user)
self.assertTrue(form.is_valid())
instance = form.save()
self.assertEqual(instance.street, 'Barstreet') | bitmazk/django-event-rsvp | [
19,
10,
19,
3,
1357893081
] |
def setup_function(_):
np.random.seed(0)
np.set_printoptions(threshold=np.inf,
formatter={'float_kind':lambda x: "%.3f" % x}) | welch/rasl | [
46,
11,
46,
4,
1458753226
] |
def image_noise(likeimg, p=0.1):
"""sparse noise as described in RASL and RPCA papers"""
sgn = np.random.choice((-1.0, 1.0), size=likeimg.shape)
return sgn * np.random.binomial(1, p, size=likeimg.shape) | welch/rasl | [
46,
11,
46,
4,
1458753226
] |
def test_inner_aligned_similarity():
inner_aligned(SimilarityTransform) | welch/rasl | [
46,
11,
46,
4,
1458753226
] |
def test_inner_aligned_affine():
inner_aligned(AffineTransform) | welch/rasl | [
46,
11,
46,
4,
1458753226
] |
def inner_jittered(T, inset=10, rtol=1e-3, atol=0):
"""move a stack of jittered noisy images in the direction of aligned"""
image0 = gauss_image()
Image = [image0 + image_noise(image0, p=.05) for _ in T]
T = [tform.inset(image0.shape, inset) for tform in T]
TImage, J = zip(*[warp_image_gradient(tform, image, normalize=True)
for tform, image in zip(T, Image)])
_, _, dParamv = inner_ialm(TImage, J, tol=1e-4)
# does dParamv move towards alignment? check if stdev of
# parameters decreased.
before = np.array([t.paramv for t in T])
beforeStd = np.std(before, 0)
after = np.array([t.paramv + dparamv
for t, dparamv in zip(T, dParamv)])
afterStd = np.std(after, 0)
assert np.all(np.logical_or(afterStd < beforeStd,
np.isclose(after, before, rtol=rtol, atol=atol))) | welch/rasl | [
46,
11,
46,
4,
1458753226
] |
def test_inner_jittered_similarity():
N = 40
ds, dtheta, dx, dy= .05, .05, 1, 1
Jitters = [[(np.random.random() * 2 - 1) * ds + 1,
(np.random.random() * 2 - 1) * dtheta,
(np.random.random() * 2 - 1) * dx,
(np.random.random() * 2 - 1) * dy]
for _ in range(N)]
inner_jittered([SimilarityTransform(paramv=jitter) for jitter in Jitters]) | welch/rasl | [
46,
11,
46,
4,
1458753226
] |
def protoc_emitter(target, source, env):
"""Return list of targets generated by Protoc builder for source."""
for src in source:
proto = os.path.splitext(str(src))[0]
if env['PROTOCPPOUT']:
target.append('%s.pb.cc' % (proto))
target.append('%s.pb.h' % (proto))
if env['PROTOPYOUT']:
target.append('%s_pb2.py' % (proto))
return target, source | TheOstrichIO/sconseries | [
6,
2,
6,
1,
1411017394
] |
def generate(env):
"""Add Builders, Scanners and construction variables
for protoc to the build Environment."""
try:
bldr = env['BUILDERS']['Protoc']
except KeyError:
action = SCons.Action.Action('$PROTOCOM', '$PROTOCOMSTR')
bldr = SCons.Builder.Builder(action=action,
emitter=protoc_emitter,
src_suffix='$PROTOCSRCSUFFIX')
env['BUILDERS']['Protoc'] = bldr
# pylint: disable=bad-whitespace
env['PROTOC'] = env.Detect(_PROTOCS) or 'protoc'
env['PROTOCFLAGS'] = SCons.Util.CLVar('')
env['PROTOCSRCSUFFIX'] = _PROTOSUFFIX
# Default proto search path is same dir
env['PROTOPATH'] = ['.']
# Default CPP output in same dir
env['PROTOCPPOUT'] = '.'
# No default Python output
env['PROTOPYOUT'] = ''
proto_cmd = ['$PROTOC']
proto_cmd.append('${["--proto_path=%s"%(x) for x in PROTOPATH]}')
proto_cmd.append('$PROTOCFLAGS')
proto_cmd.append('${PROTOCPPOUT and "--cpp_out=%s"%(PROTOCPPOUT) or ""}')
proto_cmd.append('${PROTOPYOUT and "--python_out=%s"%(PROTOPYOUT) or ""}')
proto_cmd.append('${SOURCES}')
env['PROTOCOM'] = ' '.join(proto_cmd)
# Add the proto scanner (if it wasn't added already)
env.AppendUnique(SCANNERS=SCons.Scanner.Scanner(function=protoc_scanner,
skeys=[_PROTOSUFFIX])) | TheOstrichIO/sconseries | [
6,
2,
6,
1,
1411017394
] |
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_replication_fabrics.metadata['url'] # type: ignore
path_format_arguments = {
'resourceName': self._serialize.url("self._config.resource_name", self._config.resource_name, 'str'),
'resourceGroupName': self._serialize.url("self._config.resource_group_name", self._config.resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'fabricName': self._serialize.url("fabric_name", fabric_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get(
self,
fabric_name, # type: str
provider_name, # type: str
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def _create_initial(
self,
fabric_name, # type: str
provider_name, # type: str
add_provider_input, # type: "_models.AddRecoveryServicesProviderInput"
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def begin_create(
self,
fabric_name, # type: str
provider_name, # type: str
add_provider_input, # type: "_models.AddRecoveryServicesProviderInput"
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('RecoveryServicesProvider', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def _purge_initial(
self,
fabric_name, # type: str
provider_name, # type: str
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def begin_purge(
self,
fabric_name, # type: str
provider_name, # type: str
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {}) | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def _refresh_provider_initial(
self,
fabric_name, # type: str
provider_name, # type: str
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def begin_refresh_provider(
self,
fabric_name, # type: str
provider_name, # type: str
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('RecoveryServicesProvider', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def _delete_initial(
self,
fabric_name, # type: str
provider_name, # type: str
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def begin_delete(
self,
fabric_name, # type: str
provider_name, # type: str
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {}) | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def list(
self,
**kwargs # type: Any | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceName': self._serialize.url("self._config.resource_name", self._config.resource_name, 'str'),
'resourceGroupName': self._serialize.url("self._config.resource_group_name", self._config.resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response | Azure/azure-sdk-for-python | [
3526,
2256,
3526,
986,
1335285972
] |
def __init__(self, user, connection,
password=None, privateKey=None, publicKey=None):
SSHUserAuthClient.__init__(self, user, connection)
self.password = password
self.privateKey = privateKey
self.publicKey = publicKey | sporsh/carnifex | [
7,
1,
7,
2,
1349465719
] |
def getPassword(self, prompt=None):
if not self.password:
return None # Return none to indicate we do not want to retry
return defer.succeed(self.password) | sporsh/carnifex | [
7,
1,
7,
2,
1349465719
] |
def __init__(self, response, shipwire_instance):
r = response
self.response = r
j = r.json()
self.json = j
self.status = j.get('status')
self.message = j.get('message')
self.location = j.get('resourceLocation')
self.resource_location = self.location
self.resource = j.get('resource')
self.warnings = j.get('warnings')
self.errors = j.get('errors')
self.shipwire = shipwire_instance | soylentme/shipwire-python | [
15,
9,
15,
1,
1413840690
] |
def __init__(self, response, shipwire_instance):
super(ListResponse, self).__init__(response, shipwire_instance)
# check to make sure that you have a valid response
if self.status is not HTTP_SUCCESS:
return
r = self.resource
self.total = r.get('total')
self.previous = r.get('previous')
self.next = r.get('next')
self.__next__ = r.get('next')
self.offset = r.get('offset')
self.items = r.get('items')
self.limit = len(self.items)
self.all_serial = self._get_all_serial
self.all = self.all_serial | soylentme/shipwire-python | [
15,
9,
15,
1,
1413840690
] |
def __init__(self, filename, callback):
super(PluginEventHandler, self).__init__(ignore_directories=True,
regexes=['.*' + filename])
self.callback = callback | mike820324/microProxy | [
17,
3,
17,
15,
1460816457
] |
def __init__(self, plugin_path):
self.plugin_path = os.path.abspath(plugin_path)
self.plugin_name = os.path.basename(self.plugin_path)
self.plugin_dir = os.path.dirname(self.plugin_path)
self.namespace = None
self._load_plugin()
self._register_watcher() | mike820324/microProxy | [
17,
3,
17,
15,
1460816457
] |
def _load_plugin(self):
sys.path.append(os.path.dirname(self.plugin_path))
try:
with open(self.plugin_path) as fp:
self.namespace = {"__file__": self.plugin_path}
code = compile(fp.read(), self.plugin_path, "exec")
exec (code, self.namespace, self.namespace)
except Exception as e:
logger.exception(e)
sys.path.pop()
logger.info("Load Plugin : {0}".format(self.plugin_name)) | mike820324/microProxy | [
17,
3,
17,
15,
1460816457
] |
def __getattr__(self, attr):
if attr not in self.PLUGIN_METHODS:
raise AttributeError
try:
return self.namespace[attr]
except KeyError:
raise AttributeError | mike820324/microProxy | [
17,
3,
17,
15,
1460816457
] |
def __init__(self, config):
self.plugins = []
self.load_plugins(config["plugins"]) | mike820324/microProxy | [
17,
3,
17,
15,
1460816457
] |
def exec_request(self, plugin_context):
if len(self.plugins) == 0:
return plugin_context
current_context = copy(plugin_context)
for plugin in self.plugins:
try:
new_context = plugin.on_request(current_context)
current_context = copy(new_context)
except AttributeError:
logger.debug(
"Plugin {0} does not have on_request".format(
plugin.namespace["__file__"].split("/")[-1]))
return current_context | mike820324/microProxy | [
17,
3,
17,
15,
1460816457
] |
def parse_arguments():
"""Reads the arguments passed from command line.
Command line Args:
--json-config (str): Compulsory argument. The path to the JSON
configuration file.
--serial-requests (bool): Optional argument. Defines if the requests
will be sent from the master to the workers parallely, simultaneusly to
all workers, or serially, one worker each time.
Returns:
collection: An object containing the values of all arguments.
"""
parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('--json-config',
required=True,
type=str,
dest='json_config',
action='store',
help='Configuration file (JSON)')
parser.add_argument('--serial-requests',
required=False,
dest='is_serial',
action='store_true',
default=False,
help='Is request in serial execution mode')
args = parser.parse_args()
return args | intracom-telecom-sdn/multinet | [
14,
3,
14,
2,
1444836376
] |
def dpid_offset_range(num_vms):
"""Generate a range of dpid dpid_offset_list
Every VM has allocates 1000 unique dpid offsets
Args:
num_vms (int): The number of virtual machines
topo_size (int): The number of topology switches
Returns:
list: The dpid offset range
"""
return [i for i in xrange(0, num_vms)] | intracom-telecom-sdn/multinet | [
14,
3,
14,
2,
1444836376
] |
def make_post_request_runner(host_ip, host_port, route, data, queue):
"""Wrapper function to create a new job for each POST request.
Make a POST request and put the response in a queue.
Used for multiprocessing.
Args:
host_ip (str): The IP address of the REST server
host_port (int): The port of the REST server
route (str): The REST API endpoint
data (str): Any additional JSON data
queue (multiprocessing.Queue): The queue where all the responses are stored
"""
queue.put(make_post_request(host_ip, host_port, route, data))
return 0 | intracom-telecom-sdn/multinet | [
14,
3,
14,
2,
1444836376
] |
def broadcast_cmd(worker_ip_list, worker_port_list, opcode, data=None):
"""Broadcast a POST request to all the workers
Use multiple processes to send POST requests to a specified
endpoint of all the workers simultaneously.
Args:
worker_ip_list (list): A list of IP addresses to broadcast the POST request
worker_port (int): The port of the workers
opcode (str): The REST API endpoint
topo_size (int): The number of topology switches
data (dict): JSON data to go with the request
Returns:
list: A list of responses for all the POST requests performed
"""
if data is not None and 'is_serial' in data:
is_serial = data['is_serial']
else:
logging.info('[{0}] POST data is None. Setting is_serial to False'.
format(opcode))
is_serial = False
if opcode == 'init':
dpid_offset_list = dpid_offset_range(len(worker_ip_list))
offset_idx = 0
processes = []
result_queue = multiprocessing.Queue()
for worker_ip, worker_port in zip(worker_ip_list, worker_port_list):
if opcode == 'init':
data['dpid_offset'] = dpid_offset_list[offset_idx]
offset_idx += 1
if is_serial:
# Serial send REST requests to workers
logging.info('[{0}] is running in serial mode'.format(opcode))
processes.append(make_post_request(worker_ip, worker_port, opcode,
data))
else:
# Parallel send REST requests to workers
logging.info('[{0}] is running in parallel mode'.format(opcode))
process = multiprocessing.Process(target=make_post_request_runner,
args=(worker_ip,
worker_port,
opcode,
data,
result_queue,))
processes.append(process)
process.start()
if is_serial:
return processes
else:
for process in processes:
process.join()
return [result_queue.get() for _ in processes] | intracom-telecom-sdn/multinet | [
14,
3,
14,
2,
1444836376
] |
def test_user_creation(self):
"""
Create user and check his attributes
"""
user = UserFactory(username='johnsmith', name='John Smith')
self.assertEqual('johnsmith', user.__unicode__())
self.assertEqual('John Smith', user.name)
self.assertEqual('John Smith', user.get_short_name())
self.assertEqual(False, user.is_vip)
self.assertEqual('John Smith (johnsmith)', user.get_full_name())
self.assertEqual('John Smith (johnsmith)', user.full_name)
user.calculate_reputation()
user.save()
self.assertEqual(False, user.is_superuser)
self.assertEqual({
'user_id': 1,
'total_cash': formatted(0),
'portfolio_value': formatted(0),
'reputation': '100%',
}, user.statistics_dict)
self.assertEqual(0, user.current_portfolio_value) | KlubJagiellonski/Politikon | [
20,
21,
20,
29,
1433923333
] |
def test_user_urls(self):
"""
Check is urls are valid
"""
user = UserFactory(
twitter_user='jsmith',
facebook_user='facesmith'
)
# TODO: FIXME
# url = user.get_absolute_url()
# self.assertEqual('/accounts/1/', url)
#
# url = user.get_avatar_url()
# self.assertEqual('/static/img/blank-avatar.jpg', url)
#
# url = user.get_twitter_url()
# self.assertEqual('https://twitter.com/jsmith', url)
#
# url = user.get_facebook_url()
# self.assertEqual('https://www.facebook.com/facesmith', url) | KlubJagiellonski/Politikon | [
20,
21,
20,
29,
1433923333
] |
def test_current_portfolio_value(self):
"""
Current portfolio value
"""
user = UserFactory()
self.assertEqual(0, user.current_portfolio_value)
event = EventFactory()
bet = BetFactory(user=user, event=event)
self.assertEqual(50, user.current_portfolio_value)
bet.outcome = Bet.NO
bet.has = 2
bet.save()
self.assertEqual(100, user.current_portfolio_value) | KlubJagiellonski/Politikon | [
20,
21,
20,
29,
1433923333
] |
def test_reset_account_without_bonus(self):
"""
Test reset account
"""
user = UserFactory()
user.reset_account()
self.assertEqual({
'user_id': 1,
'total_cash': formatted(1000),
'portfolio_value': formatted(0),
'reputation': "100%",
}, user.statistics_dict) | KlubJagiellonski/Politikon | [
20,
21,
20,
29,
1433923333
] |
def test_get_newest_results(self):
"""
Get newest results
"""
users = UserFactory.create_batch(2)
events = EventFactory.create_batch(5)
BetFactory(user=users[0], event=events[0])
bet2 = BetFactory(user=users[0], event=events[1])
bet3 = BetFactory(user=users[0], event=events[2])
bet4 = BetFactory(user=users[0], event=events[3])
bet5 = BetFactory(user=users[1], event=events[4])
events[1].outcome = Event.CANCELLED
events[1].save()
events[2].outcome = Event.FINISHED_YES
events[2].save()
events[3].outcome = Event.FINISHED_NO
events[3].save()
events[4].outcome = Event.FINISHED_YES
events[4].save()
bet2.is_new_resolved = True
bet2.save()
bet3.is_new_resolved = True
bet3.save()
bet4.is_new_resolved = True
bet4.save()
bet5.is_new_resolved = True
bet5.save()
self.assertEqual([bet2, bet3, bet4], list(users[0].get_newest_results()))
self.assertEqual([bet5], list(users[1].get_newest_results())) | KlubJagiellonski/Politikon | [
20,
21,
20,
29,
1433923333
] |
def test_return_new_user_object(self):
"""
Return new user object
"""
user = UserProfile.objects.return_new_user_object(
username='j_smith',
password='password9',
)
self.assertIsInstance(user, UserProfile)
self.assertEqual('j_smith', user.username)
self.assertTrue(user.check_password('password9'))
with self.assertRaises(ValueError):
UserProfile.objects.return_new_user_object(
username=None,
) | KlubJagiellonski/Politikon | [
20,
21,
20,
29,
1433923333
] |
def test_create_superuser(self):
"""
Create superuser
"""
user = UserProfile.objects.create_superuser(
username='j_smith',
email='[email protected]',
password='password9',
)
self.assertIsInstance(user, UserProfile)
self.assertEqual('j_smith', user.username)
self.assertTrue(user.check_password('password9'))
self.assertTrue(user.is_staff)
self.assertTrue(user.is_admin)
self.assertTrue(user.is_active)
self.assertEqual({
'user_id': 1,
'total_cash': formatted(0),
'portfolio_value': formatted(0),
'reputation': '100%',
}, user.statistics_dict)
user2 = UserProfile.objects.create_superuser(
username='j_smith',
email='[email protected]',
)
self.assertIsInstance(user2, HttpResponseForbidden) | KlubJagiellonski/Politikon | [
20,
21,
20,
29,
1433923333
] |
def test_get_users(self):
"""
Get users
"""
user1 = UserFactory()
UserFactory(is_deleted=True)
UserFactory(is_active=False)
users = UserProfile.objects.get_users()
self.assertIsInstance(users[0], UserProfile)
self.assertEqual(1, len(users))
self.assertEqual([user1], list(users)) | KlubJagiellonski/Politikon | [
20,
21,
20,
29,
1433923333
] |
def test_get_admins(self):
"""
Get admins
"""
UserFactory()
UserFactory(is_admin=True)
UserFactory(is_staff=True)
user4 = AdminFactory()
admins = UserProfile.objects.get_admins()
self.assertIsInstance(admins[0], UserProfile)
self.assertEqual(1, len(admins))
self.assertEqual([user4], list(admins)) | KlubJagiellonski/Politikon | [
20,
21,
20,
29,
1433923333
] |
def test_get_best_monthly(self):
"""
Get best monthly
"""
UserFactory()
user2 = UserFactory(monthly_result=300)
AdminFactory()
user4 = UserFactory(monthly_result=100)
users = UserProfile.objects.get_best_monthly()
self.assertEqual(0, len(users))
self.assertEqual([], list(users))
# TODO mock transaction
# self.assertIsInstance(users[0], UserProfile)
# self.assertEqual(2, len(users))
# self.assertEqual([user2, user4], list(users)) | KlubJagiellonski/Politikon | [
20,
21,
20,
29,
1433923333
] |
Subsets and Splits