repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
candrews/portage | pym/portage/tests/dep/test_get_required_use_flags.py | 18 | 1431 | # Copyright 2010-2012 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
from portage.tests import TestCase
from portage.dep import get_required_use_flags
from portage.exception import InvalidDependString
class TestCheckRequiredUse(TestCase):
def testCheckRequiredUse(self):
test_cases = (
("a b c", ["a", "b", "c"]),
("|| ( a b c )", ["a", "b", "c"]),
("^^ ( a b c )", ["a", "b", "c"]),
("?? ( a b c )", ["a", "b", "c"]),
("?? ( )", []),
("|| ( a b ^^ ( d e f ) )", ["a", "b", "d", "e", "f"]),
("^^ ( a b || ( d e f ) )", ["a", "b", "d", "e", "f"]),
("( ^^ ( a ( b ) ( || ( ( d e ) ( f ) ) ) ) )", ["a", "b", "d", "e", "f"]),
("a? ( ^^ ( b c ) )", ["a", "b", "c"]),
("a? ( ^^ ( !b !d? ( c ) ) )", ["a", "b", "c", "d"]),
)
test_cases_xfail = (
("^^ ( || ( a b ) ^^ ( b c )"),
("^^( || ( a b ) ^^ ( b c ) )"),
("^^ || ( a b ) ^^ ( b c )"),
("^^ ( ( || ) ( a b ) ^^ ( b c ) )"),
("^^ ( || ( a b ) ) ^^ ( b c ) )"),
)
for required_use, expected in test_cases:
result = get_required_use_flags(required_use)
expected = set(expected)
self.assertEqual(result, expected, \
"REQUIRED_USE: '%s', expected: '%s', got: '%s'" % (required_use, expected, result))
for required_use in test_cases_xfail:
self.assertRaisesMsg("REQUIRED_USE: '%s'" % (required_use,), \
InvalidDependString, get_required_use_flags, required_use)
| gpl-2.0 | -1,298,328,410,242,048,500 | 31.522727 | 87 | 0.461216 | false |
jtakayama/makahiki-draft | makahiki/apps/widgets/bonus_points/admin.py | 7 | 3615 | """Admin definition for Bonus Points widget."""
from django.shortcuts import render_to_response
from django.template import RequestContext
from apps.admin.admin import challenge_designer_site, challenge_manager_site, developer_site
'''
Created on Aug 5, 2012
@author: Cam Moore
'''
from django.contrib import admin
from django import forms
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from apps.widgets.bonus_points.models import BonusPoint
from apps.managers.challenge_mgr import challenge_mgr
class BonusPointAdminForm(forms.ModelForm):
"""Bonus Points Admin Form."""
point_value = forms.IntegerField(initial=5,
label="Number of bonus points to award.",
help_text="The number of bonus points the player earns.")
class Meta:
"""Meta"""
model = BonusPoint
def save(self, *args, **kwargs):
"""Generates the number of bonus point codes."""
_ = args
_ = kwargs
num = self.cleaned_data.get("num_codes")
p = self.cleaned_data.get("point_value")
# Generate
if num > 0:
BonusPoint.generate_bonus_points(p, num)
class BonusPointAdmin(admin.ModelAdmin):
"""admin for Bonus Points."""
actions = ["delete_selected", "deactivate_selected", "view_selected",
"print_selected"]
list_display = ["pk", "code", "point_value", "create_date", "is_active",
"printed_or_distributed", "user"]
ordering = ["-create_date", "is_active"]
list_filter = ["point_value", "is_active", "printed_or_distributed"]
date_hierarchy = "create_date"
form = BonusPointAdminForm
def delete_selected(self, request, queryset):
"""override the delete selected method."""
_ = request
for obj in queryset:
obj.delete()
delete_selected.short_description = "Delete the selected Bonus Points."
def deactivate_selected(self, request, queryset):
"""Changes the is_active flag to false for the selected Bonus Points."""
_ = request
queryset.update(is_active=False)
deactivate_selected.short_description = "Deactivate the selected Bonus Points."
def print_selected(self, request, queryset):
"""Changes the printed_or_distributed flag to True for the selected
Bonus Points."""
_ = request
queryset.update(printed_or_distributed=True)
print_selected.short_description = "Set the printed or distributed flag."
def view_selected(self, request, queryset):
"""Views the Bonus Points Codes for printing."""
_ = request
_ = queryset
return render_to_response("view_bonus_points.html", {
"codes": queryset,
"per_page": 10,
}, context_instance=RequestContext(request))
view_selected.short_description = "View the selected Bonus Points."
def view_codes(self, request, queryset):
"""Views the Bonus Points Codes for printing."""
_ = request
_ = queryset
response = HttpResponseRedirect(reverse("bonus_view_codes", args=()))
return response
admin.site.register(BonusPoint, BonusPointAdmin)
challenge_designer_site.register(BonusPoint, BonusPointAdmin)
challenge_manager_site.register(BonusPoint, BonusPointAdmin)
developer_site.register(BonusPoint, BonusPointAdmin)
challenge_mgr.register_designer_game_info_model("Smart Grid Game", BonusPoint)
challenge_mgr.register_admin_game_info_model("Smart Grid Game", BonusPoint)
challenge_mgr.register_developer_game_info_model("Smart Grid Game", BonusPoint)
| mit | 4,632,840,344,670,161,000 | 33.759615 | 92 | 0.676072 | false |
matthijsvk/multimodalSR | code/Experiments/neon-master/neon/backends/cuda_batchnorm.py | 3 | 14722 | from pycuda.tools import context_dependent_memoize
# from neon.backends.cuda_templates import (_ew_template,
# _stage_template,
# _fin_template,
# _init_rand_func,
# _init_rand_round_func,
# _finish_rand_func,
# _common_urand_gen,
# _common_frand,
# _common_round,
# _common_fp16_to_fp32,
# _ew_types,
# _ew_strings,
# _is_finite,
# _float_ops,
# _reduction_ops)
from neon.backends.cuda_templates import (_common_round,
_common_kepler,
_ew_types,
_common_fp16_to_fp32,
_ew_strings)
from neon.backends.kernels.cuda.binary import shift_element
from neon.backends.util.source_module import SourceModule
@context_dependent_memoize
def _get_bn_fprop_kernel(dtype, threads, compute_capability):
if threads > 32:
shr_code = "__shared__ float sPartials[THREADS];"
red_code = r"""
sPartials[tid] = xvar;
__syncthreads();
#pragma unroll
for (int a = THREADS >> 1; a > 32; a >>= 1)
{
if ( tid < a )
sPartials[tid] += sPartials[tid + a];
__syncthreads();
}
if ( tid < 32 )
{
xvar = sPartials[tid] + sPartials[tid + 32];
#pragma unroll
for (int i = 16; i > 0; i >>= 1)
xvar += __shfl_xor(xvar, i);
sPartials[tid] = xvar * rcpN;
}
__syncthreads();
xvar = sPartials[0];
"""
else:
shr_code = ""
red_code = r"""
#pragma unroll
for (int i = 16; i > 0; i >>= 1)
xvar += __shfl_xor(xvar, i);
xvar *= rcpN;
"""
code = r"""
#define THREADS %(threads)s
%(common)s
%(binary)s
__global__ void batchnorm_fprop (
%(type)s* y_out, float* xvar_out, float* gmean_out, float* gvar_out,
const %(type)s* x_in, const float* xsum_in, const float* gmean_in,
const float* gvar_in, const float* gamma_in, const float* beta_in,
const float eps, const float rho, const float accumbeta, const int N,
const int relu, bool binary)
{
%(share)s
const int tid = threadIdx.x;
const int bid = blockIdx.x;
int offset = bid * N;
const %(type)s* x_in0 = x_in + offset + tid;
const float rcpN = 1.0f/(float)N;
float xmean = __ldg(xsum_in + bid) * rcpN;
float xvar = 0.0f;
for (int i = tid; i < N; i += THREADS)
{
float x = %(cvt)s(__ldg(x_in0));
x_in0 += THREADS;
x -= xmean;
if (binary) {
xvar += shift_element(x, x, true);
} else {
xvar += x * x;
}
}
%(red)s
float gamma = __ldg(gamma_in + bid);
float beta = __ldg(beta_in + bid);
if ( tid == 0 )
{
float gmean = __ldg(gmean_in + bid);
float gvar = __ldg(gvar_in + bid);
*(xvar_out + bid) = xvar;
*(gmean_out + bid) = gmean * rho + (1.0f - rho) * xmean;
*(gvar_out + bid) = gvar * rho + (1.0f - rho) * xvar;
}
float xvar_rcp_sqrt = 1.0f / sqrtf(xvar + eps);
int start = N - (THREADS*4 - tid);
offset += start;
x_in += offset;
y_out += offset;
for (int i = start; i >= -THREADS*3; i -= THREADS*4)
{
float x0 = i >= -THREADS*0 ? %(cvt)s(__ldg(x_in + THREADS*0)) : 0.0f;
float x1 = i >= -THREADS*1 ? %(cvt)s(__ldg(x_in + THREADS*1)) : 0.0f;
float x2 = i >= -THREADS*2 ? %(cvt)s(__ldg(x_in + THREADS*2)) : 0.0f;
float x3 = %(cvt)s(__ldg(x_in + THREADS*3));
x_in -= THREADS*4;
float xhat0 = 0.0f;
float xhat1 = 0.0f;
float xhat2 = 0.0f;
float xhat3 = 0.0f;
float y0 = 0.0f;
float y1 = 0.0f;
float y2 = 0.0f;
float y3 = 0.0f;
if (binary) {
xhat0 = shift_element(x0 - xmean, xvar_rcp_sqrt, true);
xhat1 = shift_element(x1 - xmean, xvar_rcp_sqrt, true);
xhat2 = shift_element(x2 - xmean, xvar_rcp_sqrt, true);
xhat3 = shift_element(x3 - xmean, xvar_rcp_sqrt, true);
y0 = shift_element(xhat0, gamma, true) + beta;
y1 = shift_element(xhat1, gamma, true) + beta;
y2 = shift_element(xhat2, gamma, true) + beta;
y3 = shift_element(xhat3, gamma, true) + beta;
} else {
xhat0 = (x0 - xmean) * xvar_rcp_sqrt;
xhat1 = (x1 - xmean) * xvar_rcp_sqrt;
xhat2 = (x2 - xmean) * xvar_rcp_sqrt;
xhat3 = (x3 - xmean) * xvar_rcp_sqrt;
y0 = xhat0 * gamma + beta;
y1 = xhat1 * gamma + beta;
y2 = xhat2 * gamma + beta;
y3 = xhat3 * gamma + beta;
}
if (relu)
{
y0 = fmaxf(y0, 0.0f);
y1 = fmaxf(y1, 0.0f);
y2 = fmaxf(y2, 0.0f);
y3 = fmaxf(y3, 0.0f);
}
%(y0_out)s
%(y1_out)s
%(y2_out)s
%(y3_out)s
if (accumbeta == 0.0)
{
if (i >= -THREADS*0) *(y_out + THREADS*0) = y0_val;
if (i >= -THREADS*1) *(y_out + THREADS*1) = y1_val;
if (i >= -THREADS*2) *(y_out + THREADS*2) = y2_val;
*(y_out + THREADS*3) = y3_val;
}
else
{
if (i >= -THREADS*0) *(y_out + THREADS*0) = y_out[THREADS*0] * accumbeta + y0_val;
if (i >= -THREADS*1) *(y_out + THREADS*1) = y_out[THREADS*1] * accumbeta + y1_val;
if (i >= -THREADS*2) *(y_out + THREADS*2) = y_out[THREADS*2] * accumbeta + y2_val;
*(y_out + THREADS*3) = y_out[THREADS*3] * accumbeta + y3_val;
}
y_out -= THREADS*4;
}
}
"""
out_code = _ew_strings["round"]["nearest"].get(dtype, "float {0} = {1};")
common_code = _common_round["nearest"].get(dtype, "")
if dtype == "f2":
common_code += _common_fp16_to_fp32
if (compute_capability[0] == 3 and compute_capability[1] < 5) or compute_capability[0] < 3:
common_code += _common_kepler
code = code % {
"common" : common_code,
"binary" : shift_element(),
"share" : shr_code,
"red" : red_code,
"threads" : threads,
"type" : _ew_types[dtype]["type"],
"cvt" : _ew_types[dtype]["cvt"],
"y0_out" : out_code.format("y0_val", "y0"),
"y1_out" : out_code.format("y1_val", "y1"),
"y2_out" : out_code.format("y2_val", "y2"),
"y3_out" : out_code.format("y3_val", "y3"),
}
module = SourceModule(code, options=["--use_fast_math"])
kernel = module.get_function("batchnorm_fprop")
kernel.prepare("PPPPPPPPPPfffIII")
kernel.name = "batchnorm_fprop"
return kernel
@context_dependent_memoize
def _get_bn_bprop_kernel(dtype, threads, compute_capability):
if threads > 32:
shr_code = "__shared__ float sPartials[THREADS * 2];"
red_code = r"""
sPartials[tid + THREADS*0] = grad_gamma;
sPartials[tid + THREADS*1] = grad_beta;
__syncthreads();
#pragma unroll
for (int a = THREADS >> 1; a > 32; a >>= 1)
{
if ( tid < a )
{
sPartials[tid + THREADS*0] += sPartials[tid + a + THREADS*0];
sPartials[tid + THREADS*1] += sPartials[tid + a + THREADS*1];
}
__syncthreads();
}
if ( tid < 32 )
{
grad_gamma = sPartials[tid + THREADS*0] + sPartials[tid + 32 + THREADS*0];
grad_beta = sPartials[tid + THREADS*1] + sPartials[tid + 32 + THREADS*1];
#pragma unroll
for (int i = 16; i > 0; i >>= 1)
{
grad_gamma += __shfl_xor(grad_gamma, i);
grad_beta += __shfl_xor(grad_beta, i);
}
sPartials[tid + THREADS*0] = grad_gamma;
sPartials[tid + THREADS*1] = grad_beta;
}
__syncthreads();
grad_gamma = sPartials[THREADS*0];
grad_beta = sPartials[THREADS*1];
"""
else:
shr_code = ""
red_code = r"""
#pragma unroll
for (int i = 16; i > 0; i >>= 1)
{
grad_gamma += __shfl_xor(grad_gamma, i);
grad_beta += __shfl_xor(grad_beta, i);
}
"""
code = r"""
#define THREADS %(threads)s
%(common)s
%(binary)s
__global__ void batchnorm_bprop (
%(type)s* delta_out, float* grad_gamma_out, float* grad_beta_out,
const %(type)s* delta_in, const %(type)s* x_in, const float* xsum_in,
const float* xvar_in, const float* gamma_in,
const float eps, const int N, bool binary)
{
%(share)s
const int tid = threadIdx.x;
const int bid = blockIdx.x;
const float rcpN = 1.0f/(float)N;
int offset = bid * N;
const %(type)s* x_in0 = x_in + offset + tid;
const %(type)s* d_in0 = delta_in + offset + tid;
float xmean = __ldg(xsum_in + bid) * rcpN;
float xvar = __ldg(xvar_in + bid);
float gamma = __ldg(gamma_in + bid);
float xvar_rcp_sqrt = 1.0f / sqrtf(xvar + eps);
float grad_gamma = 0.0f;
float grad_beta = 0.0f;
for (int i = tid; i < N; i += THREADS)
{
float x = %(cvt)s(__ldg(x_in0));
x_in0 += THREADS;
float d = %(cvt)s(__ldg(d_in0));
d_in0 += THREADS;
float xhat = 0.0f;
if (binary) {
xhat = shift_element(x - xmean, xvar_rcp_sqrt, true);
} else {
xhat = (x - xmean) * xvar_rcp_sqrt;
}
grad_gamma += xhat * d;
grad_beta += d;
}
%(red)s
if ( tid == 0 )
{
*(grad_gamma_out + bid) = grad_gamma;
*(grad_beta_out + bid) = grad_beta;
}
int start = N - (THREADS*4 - tid);
offset += start;
const %(type)s* x_in1 = x_in + offset;
const %(type)s* d_in1 = delta_in + offset;
delta_out += offset;
for (int i = start; i >= -THREADS*3; i -= THREADS*4)
{
float x0 = i >= -THREADS*0 ? %(cvt)s(__ldg(x_in1 + THREADS*0)) : 0.0f;
float x1 = i >= -THREADS*1 ? %(cvt)s(__ldg(x_in1 + THREADS*1)) : 0.0f;
float x2 = i >= -THREADS*2 ? %(cvt)s(__ldg(x_in1 + THREADS*2)) : 0.0f;
float x3 = %(cvt)s(__ldg(x_in1 + THREADS*3));
float d0 = i >= -THREADS*0 ? %(cvt)s(__ldg(d_in1 + THREADS*0)) : 0.0f;
float d1 = i >= -THREADS*1 ? %(cvt)s(__ldg(d_in1 + THREADS*1)) : 0.0f;
float d2 = i >= -THREADS*2 ? %(cvt)s(__ldg(d_in1 + THREADS*2)) : 0.0f;
float d3 = %(cvt)s(__ldg(d_in1 + THREADS*3));
x_in1 -= THREADS*4;
d_in1 -= THREADS*4;
float xhat0 = 0.0f;
float xhat1 = 0.0f;
float xhat2 = 0.0f;
float xhat3 = 0.0f;
float xtmp0 = 0.0f;
float xtmp1 = 0.0f;
float xtmp2 = 0.0f;
float xtmp3 = 0.0f;
float delta0 = 0.0f;
float delta1 = 0.0f;
float delta2 = 0.0f;
float delta3 = 0.0f;
if (binary) {
xhat0 = shift_element(x0 - xmean, xvar_rcp_sqrt, true);
xhat1 = shift_element(x1 - xmean, xvar_rcp_sqrt, true);
xhat2 = shift_element(x2 - xmean, xvar_rcp_sqrt, true);
xhat3 = shift_element(x3 - xmean, xvar_rcp_sqrt, true);
xtmp0 = (shift_element(xhat0, grad_gamma, true) + grad_beta) * rcpN;
xtmp1 = (shift_element(xhat1, grad_gamma, true) + grad_beta) * rcpN;
xtmp2 = (shift_element(xhat2, grad_gamma, true) + grad_beta) * rcpN;
xtmp3 = (shift_element(xhat3, grad_gamma, true) + grad_beta) * rcpN;
delta0 = shift_element(shift_element(d0 - xtmp0, gamma, true), xvar_rcp_sqrt, true);
delta1 = shift_element(shift_element(d1 - xtmp1, gamma, true), xvar_rcp_sqrt, true);
delta2 = shift_element(shift_element(d2 - xtmp2, gamma, true), xvar_rcp_sqrt, true);
delta3 = shift_element(shift_element(d3 - xtmp3, gamma, true), xvar_rcp_sqrt, true);
} else {
xhat0 = (x0 - xmean) * xvar_rcp_sqrt;
xhat1 = (x1 - xmean) * xvar_rcp_sqrt;
xhat2 = (x2 - xmean) * xvar_rcp_sqrt;
xhat3 = (x3 - xmean) * xvar_rcp_sqrt;
xtmp0 = (xhat0 * grad_gamma + grad_beta) * rcpN;
xtmp1 = (xhat1 * grad_gamma + grad_beta) * rcpN;
xtmp2 = (xhat2 * grad_gamma + grad_beta) * rcpN;
xtmp3 = (xhat3 * grad_gamma + grad_beta) * rcpN;
delta0 = gamma * (d0 - xtmp0) * xvar_rcp_sqrt;
delta1 = gamma * (d1 - xtmp1) * xvar_rcp_sqrt;
delta2 = gamma * (d2 - xtmp2) * xvar_rcp_sqrt;
delta3 = gamma * (d3 - xtmp3) * xvar_rcp_sqrt;
}
%(delta0_out)s
%(delta1_out)s
%(delta2_out)s
%(delta3_out)s
if (i >= -THREADS*0) *(delta_out + THREADS*0) = delta0_val;
if (i >= -THREADS*1) *(delta_out + THREADS*1) = delta1_val;
if (i >= -THREADS*2) *(delta_out + THREADS*2) = delta2_val;
*(delta_out + THREADS*3) = delta3_val;
delta_out -= THREADS*4;
}
}
"""
out_code = _ew_strings["round"]["nearest"].get(dtype, "float {0} = {1};")
common_code = _common_round["nearest"].get(dtype, "")
if dtype == "f2":
common_code += _common_fp16_to_fp32
if (compute_capability[0] == 3 and compute_capability[1] < 5) or compute_capability[0] < 3:
common_code += _common_kepler
code = code % {
"common" : common_code,
"binary" : shift_element(),
"share" : shr_code,
"red" : red_code,
"threads" : threads,
"type" : _ew_types[dtype]["type"],
"cvt" : _ew_types[dtype]["cvt"],
"delta0_out" : out_code.format("delta0_val", "delta0"),
"delta1_out" : out_code.format("delta1_val", "delta1"),
"delta2_out" : out_code.format("delta2_val", "delta2"),
"delta3_out" : out_code.format("delta3_val", "delta3"),
}
module = SourceModule(code, options=["--use_fast_math"])
kernel = module.get_function("batchnorm_bprop")
kernel.prepare("PPPPPPPPfII")
kernel.name = "batchnorm_bprop"
return kernel
| mit | 1,876,698,193,698,282,500 | 33.157773 | 96 | 0.479894 | false |
Dklotz-Circle/security_monkey | security_monkey/views/user_settings.py | 7 | 6398 | # Copyright 2014 Netflix, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from security_monkey.views import AuthenticatedService
from security_monkey.views import __check_auth__
from security_monkey.views import USER_SETTINGS_FIELDS
from security_monkey.datastore import Account
from security_monkey.datastore import User
from security_monkey import db
from security_monkey import api
from flask.ext.restful import marshal, reqparse
from flask.ext.login import current_user
class UserSettings(AuthenticatedService):
def __init__(self):
super(UserSettings, self).__init__()
def get(self):
"""
.. http:get:: /api/1/settings
Get the settings for the given user.
**Example Request**:
.. sourcecode:: http
GET /api/1/settings HTTP/1.1
Host: example.com
Accept: application/json
**Example Response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: application/json
{
"auth": {
"authenticated": true,
"user": "[email protected]"
},
"settings": [
{
"accounts": [
1,
2,
3,
6,
17,
21,
22
],
"change_reports": "ISSUES",
"daily_audit_email": true
}
]
}
:statuscode 200: no error
:statuscode 401: Authentication Error. Please Authenticate.
"""
auth, retval = __check_auth__(self.auth_dict)
if auth:
return retval
return_dict = {"auth": self.auth_dict}
if not current_user.is_authenticated():
return_val = return_dict, 401
return return_val
return_dict["settings"] = []
user = User.query.filter(User.id == current_user.get_id()).first()
if user:
sub_marshaled = marshal(user.__dict__, USER_SETTINGS_FIELDS)
account_ids = []
for account in user.accounts:
account_ids.append(account.id)
sub_marshaled = dict(sub_marshaled.items() +
{"accounts": account_ids}.items()
)
return_dict["settings"].append(sub_marshaled)
return return_dict, 200
def post(self):
"""
.. http:post:: /api/1/settings
Change the settings for the current user.
**Example Request**:
.. sourcecode:: http
POST /api/1/settings HTTP/1.1
Host: example.com
Accept: application/json
{
"accounts": [
1,
2,
3,
6,
17,
21,
22
],
"daily_audit_email": true,
"change_report_setting": "ALL"
}
**Example Response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Vary: Accept
Content-Type: application/json
{
"auth": {
"authenticated": true,
"user": "[email protected]"
},
"settings": {
"accounts": [
1,
2,
3,
6,
17,
21,
22
],
"daily_audit_email": true,
"change_report_setting": "ALL"
}
}
:statuscode 200: no error
:statuscode 401: Authentication Error. Please Login.
"""
auth, retval = __check_auth__(self.auth_dict)
if auth:
return retval
self.reqparse.add_argument('accounts', required=True, type=list, help='Must provide accounts', location='json')
self.reqparse.add_argument('change_report_setting', required=True, type=str, help='Must provide change_report_setting', location='json')
self.reqparse.add_argument('daily_audit_email', required=True, type=bool, help='Must provide daily_audit_email', location='json')
args = self.reqparse.parse_args()
current_user.daily_audit_email = args['daily_audit_email']
current_user.change_reports = args['change_report_setting']
account_list = []
for account_id in args['accounts']:
account = Account.query.filter(Account.id == account_id).first()
if account:
account_list.append(account)
#current_user.accounts.append(account)
current_user.accounts = account_list
db.session.add(current_user)
db.session.commit()
retdict = {'auth': self.auth_dict}
account_ids = []
for account in current_user.accounts:
account_ids.append(account.id)
retdict['settings'] = {
"accounts": account_ids,
"change_report_setting": current_user.change_reports,
"daily_audit_email": current_user.daily_audit_email
}
return retdict, 200
| apache-2.0 | -8,932,920,740,953,357,000 | 32.150259 | 144 | 0.472335 | false |
NoahFlowa/glowing-spoon | venv/lib/python2.7/site-packages/psycopg2/tests/test_transaction.py | 7 | 9235 | #!/usr/bin/env python
# test_transaction - unit test on transaction behaviour
#
# Copyright (C) 2007-2011 Federico Di Gregorio <[email protected]>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# In addition, as a special exception, the copyright holders give
# permission to link this program with the OpenSSL library (or with
# modified versions of OpenSSL that use the same license as OpenSSL),
# and distribute linked combinations including the two.
#
# You must obey the GNU Lesser General Public License in all respects for
# all of the code used other than OpenSSL.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
import threading
from testutils import unittest, ConnectingTestCase, skip_before_postgres, slow
import psycopg2
from psycopg2.extensions import (
ISOLATION_LEVEL_SERIALIZABLE, STATUS_BEGIN, STATUS_READY)
class TransactionTests(ConnectingTestCase):
def setUp(self):
ConnectingTestCase.setUp(self)
self.conn.set_isolation_level(ISOLATION_LEVEL_SERIALIZABLE)
curs = self.conn.cursor()
curs.execute('''
CREATE TEMPORARY TABLE table1 (
id int PRIMARY KEY
)''')
# The constraint is set to deferrable for the commit_failed test
curs.execute('''
CREATE TEMPORARY TABLE table2 (
id int PRIMARY KEY,
table1_id int,
CONSTRAINT table2__table1_id__fk
FOREIGN KEY (table1_id) REFERENCES table1(id) DEFERRABLE)''')
curs.execute('INSERT INTO table1 VALUES (1)')
curs.execute('INSERT INTO table2 VALUES (1, 1)')
self.conn.commit()
def test_rollback(self):
# Test that rollback undoes changes
curs = self.conn.cursor()
curs.execute('INSERT INTO table2 VALUES (2, 1)')
# Rollback takes us from BEGIN state to READY state
self.assertEqual(self.conn.status, STATUS_BEGIN)
self.conn.rollback()
self.assertEqual(self.conn.status, STATUS_READY)
curs.execute('SELECT id, table1_id FROM table2 WHERE id = 2')
self.assertEqual(curs.fetchall(), [])
def test_commit(self):
# Test that commit stores changes
curs = self.conn.cursor()
curs.execute('INSERT INTO table2 VALUES (2, 1)')
# Rollback takes us from BEGIN state to READY state
self.assertEqual(self.conn.status, STATUS_BEGIN)
self.conn.commit()
self.assertEqual(self.conn.status, STATUS_READY)
# Now rollback and show that the new record is still there:
self.conn.rollback()
curs.execute('SELECT id, table1_id FROM table2 WHERE id = 2')
self.assertEqual(curs.fetchall(), [(2, 1)])
def test_failed_commit(self):
# Test that we can recover from a failed commit.
# We use a deferred constraint to cause a failure on commit.
curs = self.conn.cursor()
curs.execute('SET CONSTRAINTS table2__table1_id__fk DEFERRED')
curs.execute('INSERT INTO table2 VALUES (2, 42)')
# The commit should fail, and move the cursor back to READY state
self.assertEqual(self.conn.status, STATUS_BEGIN)
self.assertRaises(psycopg2.IntegrityError, self.conn.commit)
self.assertEqual(self.conn.status, STATUS_READY)
# The connection should be ready to use for the next transaction:
curs.execute('SELECT 1')
self.assertEqual(curs.fetchone()[0], 1)
class DeadlockSerializationTests(ConnectingTestCase):
"""Test deadlock and serialization failure errors."""
def connect(self):
conn = ConnectingTestCase.connect(self)
conn.set_isolation_level(ISOLATION_LEVEL_SERIALIZABLE)
return conn
def setUp(self):
ConnectingTestCase.setUp(self)
curs = self.conn.cursor()
# Drop table if it already exists
try:
curs.execute("DROP TABLE table1")
self.conn.commit()
except psycopg2.DatabaseError:
self.conn.rollback()
try:
curs.execute("DROP TABLE table2")
self.conn.commit()
except psycopg2.DatabaseError:
self.conn.rollback()
# Create sample data
curs.execute("""
CREATE TABLE table1 (
id int PRIMARY KEY,
name text)
""")
curs.execute("INSERT INTO table1 VALUES (1, 'hello')")
curs.execute("CREATE TABLE table2 (id int PRIMARY KEY)")
self.conn.commit()
def tearDown(self):
curs = self.conn.cursor()
curs.execute("DROP TABLE table1")
curs.execute("DROP TABLE table2")
self.conn.commit()
ConnectingTestCase.tearDown(self)
@slow
def test_deadlock(self):
self.thread1_error = self.thread2_error = None
step1 = threading.Event()
step2 = threading.Event()
def task1():
try:
conn = self.connect()
curs = conn.cursor()
curs.execute("LOCK table1 IN ACCESS EXCLUSIVE MODE")
step1.set()
step2.wait()
curs.execute("LOCK table2 IN ACCESS EXCLUSIVE MODE")
except psycopg2.DatabaseError, exc:
self.thread1_error = exc
step1.set()
conn.close()
def task2():
try:
conn = self.connect()
curs = conn.cursor()
step1.wait()
curs.execute("LOCK table2 IN ACCESS EXCLUSIVE MODE")
step2.set()
curs.execute("LOCK table1 IN ACCESS EXCLUSIVE MODE")
except psycopg2.DatabaseError, exc:
self.thread2_error = exc
step2.set()
conn.close()
# Run the threads in parallel. The "step1" and "step2" events
# ensure that the two transactions overlap.
thread1 = threading.Thread(target=task1)
thread2 = threading.Thread(target=task2)
thread1.start()
thread2.start()
thread1.join()
thread2.join()
# Exactly one of the threads should have failed with
# TransactionRollbackError:
self.assertFalse(self.thread1_error and self.thread2_error)
error = self.thread1_error or self.thread2_error
self.assertTrue(isinstance(
error, psycopg2.extensions.TransactionRollbackError))
@slow
def test_serialisation_failure(self):
self.thread1_error = self.thread2_error = None
step1 = threading.Event()
step2 = threading.Event()
def task1():
try:
conn = self.connect()
curs = conn.cursor()
curs.execute("SELECT name FROM table1 WHERE id = 1")
curs.fetchall()
step1.set()
step2.wait()
curs.execute("UPDATE table1 SET name='task1' WHERE id = 1")
conn.commit()
except psycopg2.DatabaseError, exc:
self.thread1_error = exc
step1.set()
conn.close()
def task2():
try:
conn = self.connect()
curs = conn.cursor()
step1.wait()
curs.execute("UPDATE table1 SET name='task2' WHERE id = 1")
conn.commit()
except psycopg2.DatabaseError, exc:
self.thread2_error = exc
step2.set()
conn.close()
# Run the threads in parallel. The "step1" and "step2" events
# ensure that the two transactions overlap.
thread1 = threading.Thread(target=task1)
thread2 = threading.Thread(target=task2)
thread1.start()
thread2.start()
thread1.join()
thread2.join()
# Exactly one of the threads should have failed with
# TransactionRollbackError:
self.assertFalse(self.thread1_error and self.thread2_error)
error = self.thread1_error or self.thread2_error
self.assertTrue(isinstance(
error, psycopg2.extensions.TransactionRollbackError))
class QueryCancellationTests(ConnectingTestCase):
"""Tests for query cancellation."""
def setUp(self):
ConnectingTestCase.setUp(self)
self.conn.set_isolation_level(ISOLATION_LEVEL_SERIALIZABLE)
@skip_before_postgres(8, 2)
def test_statement_timeout(self):
curs = self.conn.cursor()
# Set a low statement timeout, then sleep for a longer period.
curs.execute('SET statement_timeout TO 10')
self.assertRaises(psycopg2.extensions.QueryCanceledError,
curs.execute, 'SELECT pg_sleep(50)')
def test_suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == "__main__":
unittest.main()
| apache-2.0 | -7,506,273,814,172,407,000 | 35.646825 | 78 | 0.613102 | false |
alhashash/odoo | addons/mrp/__init__.py | 8 | 1098 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import mrp
import stock
import product
import wizard
import report
import company
import procurement
import res_config
| agpl-3.0 | 521,504,473,826,075,700 | 36.862069 | 78 | 0.625683 | false |
digimarc/django | tests/m2m_through_regress/tests.py | 25 | 9146 | from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.core import management
from django.test import TestCase
from django.utils.six import StringIO
from .models import (
Car, CarDriver, Driver, Group, Membership, Person, UserMembership,
)
class M2MThroughTestCase(TestCase):
def test_everything(self):
bob = Person.objects.create(name="Bob")
jim = Person.objects.create(name="Jim")
rock = Group.objects.create(name="Rock")
roll = Group.objects.create(name="Roll")
frank = User.objects.create_user("frank", "[email protected]", "password")
jane = User.objects.create_user("jane", "[email protected]", "password")
Membership.objects.create(person=bob, group=rock)
Membership.objects.create(person=bob, group=roll)
Membership.objects.create(person=jim, group=rock)
self.assertQuerysetEqual(
bob.group_set.all(), [
"<Group: Rock>",
"<Group: Roll>",
],
ordered=False
)
self.assertQuerysetEqual(
roll.members.all(), [
"<Person: Bob>",
]
)
self.assertRaises(AttributeError, setattr, bob, "group_set", [])
self.assertRaises(AttributeError, setattr, roll, "members", [])
self.assertRaises(AttributeError, rock.members.create, name="Anne")
self.assertRaises(AttributeError, bob.group_set.create, name="Funk")
UserMembership.objects.create(user=frank, group=rock)
UserMembership.objects.create(user=frank, group=roll)
UserMembership.objects.create(user=jane, group=rock)
self.assertQuerysetEqual(
frank.group_set.all(), [
"<Group: Rock>",
"<Group: Roll>",
],
ordered=False
)
self.assertQuerysetEqual(
roll.user_members.all(), [
"<User: frank>",
]
)
def test_serialization(self):
"m2m-through models aren't serialized as m2m fields. Refs #8134"
p = Person.objects.create(name="Bob")
g = Group.objects.create(name="Roll")
m = Membership.objects.create(person=p, group=g)
pks = {"p_pk": p.pk, "g_pk": g.pk, "m_pk": m.pk}
out = StringIO()
management.call_command("dumpdata", "m2m_through_regress", format="json", stdout=out)
self.assertJSONEqual(out.getvalue().strip(), """[{"pk": %(m_pk)s, "model": "m2m_through_regress.membership", "fields": {"person": %(p_pk)s, "price": 100, "group": %(g_pk)s}}, {"pk": %(p_pk)s, "model": "m2m_through_regress.person", "fields": {"name": "Bob"}}, {"pk": %(g_pk)s, "model": "m2m_through_regress.group", "fields": {"name": "Roll"}}]""" % pks)
out = StringIO()
management.call_command("dumpdata", "m2m_through_regress", format="xml",
indent=2, stdout=out)
self.assertXMLEqual(out.getvalue().strip(), """
<?xml version="1.0" encoding="utf-8"?>
<django-objects version="1.0">
<object pk="%(m_pk)s" model="m2m_through_regress.membership">
<field to="m2m_through_regress.person" name="person" rel="ManyToOneRel">%(p_pk)s</field>
<field to="m2m_through_regress.group" name="group" rel="ManyToOneRel">%(g_pk)s</field>
<field type="IntegerField" name="price">100</field>
</object>
<object pk="%(p_pk)s" model="m2m_through_regress.person">
<field type="CharField" name="name">Bob</field>
</object>
<object pk="%(g_pk)s" model="m2m_through_regress.group">
<field type="CharField" name="name">Roll</field>
</object>
</django-objects>
""".strip() % pks)
def test_join_trimming(self):
"Check that we don't involve too many copies of the intermediate table when doing a join. Refs #8046, #8254"
bob = Person.objects.create(name="Bob")
jim = Person.objects.create(name="Jim")
rock = Group.objects.create(name="Rock")
roll = Group.objects.create(name="Roll")
Membership.objects.create(person=bob, group=rock)
Membership.objects.create(person=jim, group=rock, price=50)
Membership.objects.create(person=bob, group=roll, price=50)
self.assertQuerysetEqual(
rock.members.filter(membership__price=50), [
"<Person: Jim>",
]
)
self.assertQuerysetEqual(
bob.group_set.filter(membership__price=50), [
"<Group: Roll>",
]
)
class ToFieldThroughTests(TestCase):
def setUp(self):
self.car = Car.objects.create(make="Toyota")
self.driver = Driver.objects.create(name="Ryan Briscoe")
CarDriver.objects.create(car=self.car, driver=self.driver)
# We are testing if wrong objects get deleted due to using wrong
# field value in m2m queries. So, it is essential that the pk
# numberings do not match.
# Create one intentionally unused driver to mix up the autonumbering
self.unused_driver = Driver.objects.create(name="Barney Gumble")
# And two intentionally unused cars.
self.unused_car1 = Car.objects.create(make="Trabant")
self.unused_car2 = Car.objects.create(make="Wartburg")
def test_to_field(self):
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Ryan Briscoe>"]
)
def test_to_field_reverse(self):
self.assertQuerysetEqual(
self.driver.car_set.all(),
["<Car: Toyota>"]
)
def test_to_field_clear_reverse(self):
self.driver.car_set.clear()
self.assertQuerysetEqual(
self.driver.car_set.all(), [])
def test_to_field_clear(self):
self.car.drivers.clear()
self.assertQuerysetEqual(
self.car.drivers.all(), [])
# Low level tests for _add_items and _remove_items. We test these methods
# because .add/.remove aren't available for m2m fields with through, but
# through is the only way to set to_field currently. We do want to make
# sure these methods are ready if the ability to use .add or .remove with
# to_field relations is added some day.
def test_add(self):
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Ryan Briscoe>"]
)
# Yikes - barney is going to drive...
self.car.drivers._add_items('car', 'driver', self.unused_driver)
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Barney Gumble>", "<Driver: Ryan Briscoe>"]
)
def test_add_null(self):
nullcar = Car.objects.create(make=None)
with self.assertRaises(ValueError):
nullcar.drivers._add_items('car', 'driver', self.unused_driver)
def test_add_related_null(self):
nulldriver = Driver.objects.create(name=None)
with self.assertRaises(ValueError):
self.car.drivers._add_items('car', 'driver', nulldriver)
def test_add_reverse(self):
car2 = Car.objects.create(make="Honda")
self.assertQuerysetEqual(
self.driver.car_set.all(),
["<Car: Toyota>"]
)
self.driver.car_set._add_items('driver', 'car', car2)
self.assertQuerysetEqual(
self.driver.car_set.all(),
["<Car: Toyota>", "<Car: Honda>"],
ordered=False
)
def test_add_null_reverse(self):
nullcar = Car.objects.create(make=None)
with self.assertRaises(ValueError):
self.driver.car_set._add_items('driver', 'car', nullcar)
def test_add_null_reverse_related(self):
nulldriver = Driver.objects.create(name=None)
with self.assertRaises(ValueError):
nulldriver.car_set._add_items('driver', 'car', self.car)
def test_remove(self):
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Ryan Briscoe>"]
)
self.car.drivers._remove_items('car', 'driver', self.driver)
self.assertQuerysetEqual(
self.car.drivers.all(), [])
def test_remove_reverse(self):
self.assertQuerysetEqual(
self.driver.car_set.all(),
["<Car: Toyota>"]
)
self.driver.car_set._remove_items('driver', 'car', self.car)
self.assertQuerysetEqual(
self.driver.car_set.all(), [])
class ThroughLoadDataTestCase(TestCase):
fixtures = ["m2m_through"]
def test_sequence_creation(self):
"Check that sequences on an m2m_through are created for the through model, not a phantom auto-generated m2m table. Refs #11107"
out = StringIO()
management.call_command("dumpdata", "m2m_through_regress", format="json", stdout=out)
self.assertJSONEqual(out.getvalue().strip(), """[{"pk": 1, "model": "m2m_through_regress.usermembership", "fields": {"price": 100, "group": 1, "user": 1}}, {"pk": 1, "model": "m2m_through_regress.person", "fields": {"name": "Guido"}}, {"pk": 1, "model": "m2m_through_regress.group", "fields": {"name": "Python Core Group"}}]""")
| bsd-3-clause | -8,723,538,028,509,625,000 | 37.590717 | 360 | 0.603871 | false |
christophlsa/odoo | addons/website_mail_group/controllers/main.py | 306 | 7883 | # -*- coding: utf-8 -*-
import datetime
from dateutil import relativedelta
from openerp import tools, SUPERUSER_ID
from openerp.addons.web import http
from openerp.addons.website.models.website import slug
from openerp.addons.web.http import request
class MailGroup(http.Controller):
_thread_per_page = 20
_replies_per_page = 10
def _get_archives(self, group_id):
MailMessage = request.registry['mail.message']
groups = MailMessage.read_group(
request.cr, request.uid, [('model', '=', 'mail.group'), ('res_id', '=', group_id)], ['subject', 'date'],
groupby="date", orderby="date desc", context=request.context)
for group in groups:
begin_date = datetime.datetime.strptime(group['__domain'][0][2], tools.DEFAULT_SERVER_DATETIME_FORMAT).date()
end_date = datetime.datetime.strptime(group['__domain'][1][2], tools.DEFAULT_SERVER_DATETIME_FORMAT).date()
group['date_begin'] = '%s' % datetime.date.strftime(begin_date, tools.DEFAULT_SERVER_DATE_FORMAT)
group['date_end'] = '%s' % datetime.date.strftime(end_date, tools.DEFAULT_SERVER_DATE_FORMAT)
return groups
@http.route("/groups", type='http', auth="public", website=True)
def view(self, **post):
cr, uid, context = request.cr, request.uid, request.context
group_obj = request.registry.get('mail.group')
mail_message_obj = request.registry.get('mail.message')
group_ids = group_obj.search(cr, uid, [('alias_id', '!=', False), ('alias_id.alias_name', '!=', False)], context=context)
groups = group_obj.browse(cr, uid, group_ids, context)
# compute statistics
month_date = datetime.datetime.today() - relativedelta.relativedelta(months=1)
group_data = dict()
for group in groups:
group_data[group.id] = {
'monthly_message_nbr': mail_message_obj.search(
cr, SUPERUSER_ID,
[('model', '=', 'mail.group'), ('res_id', '=', group.id), ('date', '>=', month_date.strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT))],
count=True, context=context)}
values = {'groups': groups, 'group_data': group_data}
return request.website.render('website_mail_group.mail_groups', values)
@http.route(["/groups/subscription/"], type='json', auth="user")
def subscription(self, group_id=0, action=False, **post):
""" TDE FIXME: seems dead code """
cr, uid, context = request.cr, request.uid, request.context
group_obj = request.registry.get('mail.group')
if action:
group_obj.message_subscribe_users(cr, uid, [group_id], context=context)
else:
group_obj.message_unsubscribe_users(cr, uid, [group_id], context=context)
return []
@http.route([
"/groups/<model('mail.group'):group>",
"/groups/<model('mail.group'):group>/page/<int:page>"
], type='http', auth="public", website=True)
def thread_headers(self, group, page=1, mode='thread', date_begin=None, date_end=None, **post):
cr, uid, context = request.cr, request.uid, request.context
thread_obj = request.registry.get('mail.message')
domain = [('model', '=', 'mail.group'), ('res_id', '=', group.id)]
if mode == 'thread':
domain += [('parent_id', '=', False)]
if date_begin and date_end:
domain += [('date', '>=', date_begin), ('date', '<=', date_end)]
thread_count = thread_obj.search_count(cr, uid, domain, context=context)
pager = request.website.pager(
url='/groups/%s' % slug(group),
total=thread_count,
page=page,
step=self._thread_per_page,
url_args={'mode': mode, 'date_begin': date_begin or '', 'date_end': date_end or ''},
)
thread_ids = thread_obj.search(cr, uid, domain, limit=self._thread_per_page, offset=pager['offset'])
messages = thread_obj.browse(cr, uid, thread_ids, context)
values = {
'messages': messages,
'group': group,
'pager': pager,
'mode': mode,
'archives': self._get_archives(group.id),
'date_begin': date_begin,
'date_end': date_end,
'replies_per_page': self._replies_per_page,
}
return request.website.render('website_mail_group.group_messages', values)
@http.route([
'''/groups/<model('mail.group'):group>/<model('mail.message', "[('model','=','mail.group'), ('res_id','=',group[0])]"):message>''',
], type='http', auth="public", website=True)
def thread_discussion(self, group, message, mode='thread', date_begin=None, date_end=None, **post):
cr, uid, context = request.cr, request.uid, request.context
Message = request.registry['mail.message']
if mode == 'thread':
base_domain = [('model', '=', 'mail.group'), ('res_id', '=', group.id), ('parent_id', '=', message.parent_id and message.parent_id.id or False)]
else:
base_domain = [('model', '=', 'mail.group'), ('res_id', '=', group.id)]
next_message = None
next_message_ids = Message.search(cr, uid, base_domain + [('date', '<', message.date)], order="date DESC", limit=1, context=context)
if next_message_ids:
next_message = Message.browse(cr, uid, next_message_ids[0], context=context)
prev_message = None
prev_message_ids = Message.search(cr, uid, base_domain + [('date', '>', message.date)], order="date ASC", limit=1, context=context)
if prev_message_ids:
prev_message = Message.browse(cr, uid, prev_message_ids[0], context=context)
values = {
'message': message,
'group': group,
'mode': mode,
'archives': self._get_archives(group.id),
'date_begin': date_begin,
'date_end': date_end,
'replies_per_page': self._replies_per_page,
'next_message': next_message,
'prev_message': prev_message,
}
return request.website.render('website_mail_group.group_message', values)
@http.route(
'''/groups/<model('mail.group'):group>/<model('mail.message', "[('model','=','mail.group'), ('res_id','=',group[0])]"):message>/get_replies''',
type='json', auth="public", methods=['POST'], website=True)
def render_messages(self, group, message, **post):
last_displayed_id = post.get('last_displayed_id')
if not last_displayed_id:
return False
Message = request.registry['mail.message']
replies_domain = [('id', '<', int(last_displayed_id)), ('parent_id', '=', message.id)]
msg_ids = Message.search(request.cr, request.uid, replies_domain, limit=self._replies_per_page, context=request.context)
msg_count = Message.search(request.cr, request.uid, replies_domain, count=True, context=request.context)
messages = Message.browse(request.cr, request.uid, msg_ids, context=request.context)
values = {
'group': group,
'thread_header': message,
'messages': messages,
'msg_more_count': msg_count - self._replies_per_page,
'replies_per_page': self._replies_per_page,
}
return request.registry['ir.ui.view'].render(request.cr, request.uid, 'website_mail_group.messages_short', values, engine='ir.qweb', context=request.context)
@http.route("/groups/<model('mail.group'):group>/get_alias_info", type='json', auth='public', website=True)
def get_alias_info(self, group, **post):
return {
'alias_name': group.alias_id and group.alias_id.alias_name and group.alias_id.alias_domain and '%s@%s' % (group.alias_id.alias_name, group.alias_id.alias_domain) or False
}
| agpl-3.0 | 8,533,092,553,908,299,000 | 51.553333 | 182 | 0.59419 | false |
smartdevice475/sdl_core | tools/InterfaceGenerator/test/generator/parsers/test_SDLRPCV2.py | 14 | 17475 | """SDLRPCV2 XML parser unit test."""
import os
import unittest
import generator.Model
import generator.parsers.SDLRPCV2
class TestSDLRPCV2Parser(unittest.TestCase):
"""Test for SDLRPCV2 xml parser."""
class _Issue:
def __init__(self, creator, value):
self.creator = creator
self.value = value
def __eq__(self, other):
return self.creator == other.creator and self.value == other.value
def setUp(self):
"""Test initialization."""
self.valid_xml_name = os.path.dirname(os.path.realpath(__file__)) + \
"/valid_SDLRPCV2.xml"
self.parser = generator.parsers.SDLRPCV2.Parser()
def test_valid_xml(self):
"""Test parsing of valid xml."""
interface = self.parser.parse(self.valid_xml_name)
self.assertEqual(2, len(interface.params))
self.assertDictEqual({"attribute1": "value1", "attribute2": "value2"},
interface.params)
# Enumerations
self.assertEqual(3, len(interface.enums))
# Enumeration "FunctionID"
self.assertIn("FunctionID", interface.enums)
enum = interface.enums["FunctionID"]
self.verify_base_item(item=enum,
name="FunctionID",
description=["Description string 1",
"Description string 2"],
todos=['Function id todo'])
self.assertIsNone(enum.internal_scope)
self.assertEqual(2, len(enum.elements))
self.assertIn("Function1_id", enum.elements)
element = enum.elements["Function1_id"]
self.verify_base_item(
item=element,
name="Function1_id",
design_description=["Function1 element design description"])
self.assertIsNone(element.internal_name)
self.assertEqual(10, element.value)
self.assertIn("Function2_id", enum.elements)
element = enum.elements["Function2_id"]
self.verify_base_item(
item=element,
name="Function2_id")
self.assertEqual("Function2_internal", element.internal_name)
self.assertIsNone(element.value)
# Enumeration "messageType"
self.assertIn("messageType", interface.enums)
enum = interface.enums["messageType"]
self.verify_base_item(
item=enum,
name="messageType",
design_description=["messageType design description",
"messageType design description 2"],
issues=[TestSDLRPCV2Parser._Issue(
creator="messageType issue creator",
value="Issue text")])
self.assertIsNone(enum.internal_scope)
self.assertEqual(3, len(enum.elements))
self.assertIn("request", enum.elements)
element = enum.elements["request"]
self.verify_base_item(item=element,
name="request",
todos=["request todo 1", "request todo 2"],
issues=[TestSDLRPCV2Parser._Issue(
creator="issue creator",
value="request issue")])
self.assertIsNone(element.internal_name)
self.assertEqual(0, element.value)
self.assertIn("response", enum.elements)
element = enum.elements["response"]
self.verify_base_item(item=element, name="response")
self.assertIsNone(element.internal_name)
self.assertEqual(1, element.value)
self.assertIn("notification", enum.elements)
element = enum.elements["notification"]
self.verify_base_item(item=element, name="notification")
self.assertIsNone(element.internal_name)
self.assertEqual(2, element.value)
# Enumeration "enum1"
self.assertIn("enum1", interface.enums)
enum = interface.enums["enum1"]
self.verify_base_item(item=enum, name="enum1",
platform="enum1 platform")
self.assertEqual("scope", enum.internal_scope)
self.assertEqual(3, len(enum.elements))
self.assertIn("element1", enum.elements)
element = enum.elements["element1"]
self.verify_base_item(item=element, name="element1")
self.assertIsNone(element.internal_name)
self.assertEqual(10, element.value)
self.assertIn("element2", enum.elements)
element = enum.elements["element2"]
self.verify_base_item(item=element, name="element2")
self.assertEqual("element2_internal", element.internal_name)
self.assertEqual(11, element.value)
self.assertIn("element3", enum.elements)
element = enum.elements["element3"]
self.verify_base_item(
item=element,
name="element3",
design_description=["Element design description"],
platform="element3 platform")
self.assertIsNone(element.internal_name)
self.assertIsNone(element.value)
# Structures
self.assertEqual(2, len(interface.structs))
# Structure "struct1"
self.assertIn("struct1", interface.structs)
struct = interface.structs["struct1"]
self.verify_base_item(
item=struct,
name="struct1",
description=["Struct description"],
issues=[TestSDLRPCV2Parser._Issue(creator="creator1",
value="Issue1"),
TestSDLRPCV2Parser._Issue(creator="creator2",
value="Issue2")])
self.assertEqual(4, len(struct.members))
self.assertIn("member1", struct.members)
member = struct.members["member1"]
self.verify_base_item(
item=member,
name="member1",
description=["Param1 description"])
self.assertTrue(member.is_mandatory)
self.assertIsInstance(member.param_type, generator.Model.Integer)
self.assertIsNone(member.param_type.min_value)
self.assertIsNone(member.param_type.max_value)
self.assertIn("member2", struct.members)
member = struct.members["member2"]
self.verify_base_item(item=member, name="member2",
platform="member2 platform")
self.assertTrue(member.is_mandatory)
self.assertIsInstance(member.param_type, generator.Model.Boolean)
self.assertIn("member3", struct.members)
member = struct.members["member3"]
self.verify_base_item(item=member, name="member3")
self.assertEqual(False, member.is_mandatory)
self.assertIsInstance(member.param_type, generator.Model.Double)
self.assertIsNone(member.param_type.min_value)
self.assertAlmostEqual(20.5, member.param_type.max_value)
self.assertIn("member4", struct.members)
member = struct.members["member4"]
self.verify_base_item(item=member, name="member4")
self.assertTrue(member.is_mandatory)
self.assertIsInstance(member.param_type, generator.Model.Array)
self.assertIsNone(member.param_type.min_size)
self.assertIsNone(member.param_type.max_size)
self.assertIsInstance(member.param_type.element_type,
generator.Model.Integer)
self.assertEqual(11, member.param_type.element_type.min_value)
self.assertEqual(100, member.param_type.element_type.max_value)
# Structure "struct2"
self.assertIn("struct2", interface.structs)
struct = interface.structs["struct2"]
self.verify_base_item(item=struct,
name="struct2",
description=["Description of struct2"],
platform="struct2 platform")
self.assertEqual(4, len(struct.members))
self.assertIn("m1", struct.members)
member = struct.members["m1"]
self.verify_base_item(item=member, name="m1")
self.assertTrue(member.is_mandatory)
self.assertIsInstance(member.param_type, generator.Model.String)
self.assertIsNone(member.param_type.max_length)
self.assertIn("m2", struct.members)
member = struct.members["m2"]
self.verify_base_item(item=member, name="m2")
self.assertTrue(member.is_mandatory)
self.assertIsInstance(member.param_type, generator.Model.Array)
self.assertEqual(1, member.param_type.min_size)
self.assertEqual(50, member.param_type.max_size)
self.assertIsInstance(member.param_type.element_type,
generator.Model.String)
self.assertEqual(100, member.param_type.element_type.max_length)
self.assertIn("m3", struct.members)
member = struct.members["m3"]
self.verify_base_item(item=member, name="m3")
self.assertTrue(member.is_mandatory)
self.assertIs(member.param_type, interface.enums["enum1"])
self.assertIn("m4", struct.members)
member = struct.members["m4"]
self.verify_base_item(item=member, name="m4")
self.assertTrue(member.is_mandatory)
self.assertIsInstance(member.param_type, generator.Model.Array)
self.assertIsNone(member.param_type.min_size)
self.assertEqual(10, member.param_type.max_size)
self.assertIs(member.param_type.element_type,
interface.structs["struct1"])
# Functions
self.assertEqual(3, len(interface.functions))
# Function request "Function1"
self.assertIn(
(interface.enums["FunctionID"].elements["Function1_id"],
interface.enums["messageType"].elements["request"]),
interface.functions)
function = interface.functions[
(interface.enums["FunctionID"].elements["Function1_id"],
interface.enums["messageType"].elements["request"])]
self.verify_base_item(
item=function,
name="Function1",
description=["Description of request Function1"],
todos=["Function1 request todo"])
self.assertIs(function.function_id,
interface.enums["FunctionID"].elements["Function1_id"])
self.assertIs(function.message_type,
interface.enums["messageType"].elements["request"])
self.assertEqual(3, len(function.params))
self.assertIn("param1", function.params)
param = function.params["param1"]
self.verify_base_item(
item=param,
name="param1",
issues=[TestSDLRPCV2Parser._Issue(creator="", value="")])
self.assertEqual(False, param.is_mandatory)
self.assertIsInstance(param.param_type, generator.Model.String)
self.assertIsNone(param.param_type.max_length)
self.assertEqual("String default value", param.default_value)
self.assertIn("param2", function.params)
param = function.params["param2"]
self.verify_base_item(
item=param,
name="param2",
description=["Param2 description", ""],
todos=["Param2 todo"],
platform="param2 platform")
self.assertTrue(param.is_mandatory)
self.assertIsInstance(param.param_type, generator.Model.Integer)
self.assertIsNone(param.param_type.min_value)
self.assertIsNone(param.param_type.max_value)
self.assertIsNone(param.default_value)
self.assertIn("param3", function.params)
param = function.params["param3"]
self.verify_base_item(item=param, name="param3")
self.assertEqual(False, param.is_mandatory)
self.assertIs(param.param_type, interface.structs["struct1"])
self.assertIsNone(param.default_value)
# Function response "Function1"
self.assertIn(
(interface.enums["FunctionID"].elements["Function1_id"],
interface.enums["messageType"].elements["response"]),
interface.functions)
function = interface.functions[
(interface.enums["FunctionID"].elements["Function1_id"],
interface.enums["messageType"].elements["response"])]
self.verify_base_item(
item=function,
name="Function1",
issues=[TestSDLRPCV2Parser._Issue(creator="c1", value=""),
TestSDLRPCV2Parser._Issue(creator="c2", value="")],
platform="")
self.assertIs(function.function_id,
interface.enums["FunctionID"].elements["Function1_id"])
self.assertIs(function.message_type,
interface.enums["messageType"].elements["response"])
self.assertEqual(3, len(function.params))
self.assertIn("p1", function.params)
param = function.params["p1"]
self.verify_base_item(item=param, name="p1")
self.assertTrue(param.is_mandatory)
self.assertIs(param.param_type, interface.enums["enum1"])
self.assertIsNone(param.default_value)
self.assertIn("p2", function.params)
param = function.params["p2"]
self.verify_base_item(item=param, name="p2")
self.assertTrue(param.is_mandatory)
self.assertIs(param.param_type, interface.enums["enum1"])
self.assertIs(param.default_value,
interface.enums["enum1"].elements["element2"])
self.assertIn("p3", function.params)
param = function.params["p3"]
self.verify_base_item(item=param, name="p3", design_description=[""])
self.assertTrue(param.is_mandatory)
self.assertIsInstance(param.param_type, generator.Model.Boolean)
self.assertEqual(False, param.default_value)
# Function notification "Function2"
self.assertIn(
(interface.enums["FunctionID"].elements["Function2_id"],
interface.enums["messageType"].elements["notification"]),
interface.functions)
function = interface.functions[
(interface.enums["FunctionID"].elements["Function2_id"],
interface.enums["messageType"].elements["notification"])]
self.verify_base_item(item=function,
name="Function2",
description=["Function2 description"],
platform="function2 platform")
self.assertIs(function.function_id,
interface.enums["FunctionID"].elements["Function2_id"])
self.assertIs(function.message_type,
interface.enums["messageType"].elements["notification"])
self.assertEqual(3, len(function.params))
self.assertIn("n1", function.params)
param = function.params["n1"]
self.verify_base_item(item=param, name="n1", todos=["n1 todo"])
self.assertTrue(param.is_mandatory)
self.assertIsInstance(param.param_type, generator.Model.EnumSubset)
self.assertIs(param.param_type.enum, interface.enums["enum1"])
self.assertDictEqual(
{"element2": interface.enums["enum1"].elements["element2"],
"element3": interface.enums["enum1"].elements["element3"]},
param.param_type.allowed_elements)
self.assertIsNone(param.default_value)
self.assertIn("n2", function.params)
param = function.params["n2"]
self.verify_base_item(item=param, name="n2", todos=["n2 todo"])
self.assertTrue(param.is_mandatory)
self.assertIsInstance(param.param_type, generator.Model.Array)
self.assertEqual(1, param.param_type.min_size)
self.assertEqual(100, param.param_type.max_size)
self.assertIsInstance(param.param_type.element_type,
generator.Model.EnumSubset)
self.assertIs(param.param_type.element_type.enum,
interface.enums["enum1"])
self.assertDictEqual(
{"element1": interface.enums["enum1"].elements["element1"],
"element3": interface.enums["enum1"].elements["element3"]},
param.param_type.element_type.allowed_elements)
self.assertIsNone(param.default_value)
self.assertIn("n3", function.params)
param = function.params["n3"]
self.verify_base_item(item=param, name="n3")
self.assertEqual(False, param.is_mandatory)
self.assertIs(param.param_type, interface.structs["struct2"])
self.assertIsNone(param.default_value)
def verify_base_item(self, item, name, description=None,
design_description=None, issues=None, todos=None,
platform=None):
"""Verify base interface item variables."""
self.assertEqual(name, item.name)
self.assertSequenceEqual(self.get_list(description), item.description)
self.assertSequenceEqual(self.get_list(design_description),
item.design_description)
self.assertSequenceEqual(self.get_list(issues), item.issues)
self.assertSequenceEqual(self.get_list(todos), item.todos)
self.assertEqual(platform, item.platform)
@staticmethod
def get_list(list=None):
"""Return provided list or empty list if None is provided."""
return list if list is not None else []
if __name__ == "__main__":
unittest.main()
| bsd-3-clause | 1,349,733,835,912,469,200 | 40.906475 | 78 | 0.610529 | false |
BenTheElder/test-infra | gubernator/filters.py | 14 | 8242 | # Copyright 2016 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import datetime
import hashlib
import os
import re
import time
import urllib
import urlparse
import jinja2
GITHUB_VIEW_TEMPLATE = 'https://github.com/%s/blob/%s/%s#L%s'
GITHUB_COMMIT_TEMPLATE = 'https://github.com/%s/commit/%s'
LINKIFY_RE = re.compile(
r'(^\s*/\S*/)(kubernetes/(\S+):(\d+)(?: \+0x[0-9a-f]+)?)$',
flags=re.MULTILINE)
def do_timestamp(unix_time, css_class='timestamp', tmpl='%F %H:%M'):
"""Convert an int Unix timestamp into a human-readable datetime."""
t = datetime.datetime.utcfromtimestamp(unix_time)
return jinja2.Markup('<span class="%s" data-epoch="%s">%s</span>' %
(css_class, unix_time, t.strftime(tmpl)))
def do_dt_to_epoch(dt):
return time.mktime(dt.timetuple())
def do_shorttimestamp(unix_time):
t = datetime.datetime.utcfromtimestamp(unix_time)
return jinja2.Markup('<span class="shorttimestamp" data-epoch="%s">%s</span>' %
(unix_time, t.strftime('%m-%d %H:%M')))
def do_duration(seconds):
"""Convert a numeric duration in seconds into a human-readable string."""
hours, seconds = divmod(seconds, 3600)
minutes, seconds = divmod(seconds, 60)
if hours:
return '%dh%dm' % (hours, minutes)
if minutes:
return '%dm%ds' % (minutes, seconds)
else:
if seconds < 10:
return '%.2fs' % seconds
return '%ds' % seconds
def do_slugify(inp):
"""Convert an arbitrary string into a url-safe slug."""
inp = re.sub(r'[^\w\s-]+', '', inp)
return re.sub(r'\s+', '-', inp).lower()
def do_linkify_stacktrace(inp, commit, repo):
"""Add links to a source code viewer for every mentioned source line."""
inp = unicode(jinja2.escape(inp))
if not commit:
return jinja2.Markup(inp) # this was already escaped, mark it safe!
def rep(m):
prefix, full, path, line = m.groups()
return '%s<a href="%s">%s</a>' % (
prefix,
GITHUB_VIEW_TEMPLATE % (repo, commit, path, line),
full)
return jinja2.Markup(LINKIFY_RE.sub(rep, inp))
def do_github_commit_link(commit, repo):
commit_url = jinja2.escape(GITHUB_COMMIT_TEMPLATE % (repo, commit))
return jinja2.Markup('<a href="%s">%s</a>' % (commit_url, commit[:8]))
def do_maybe_linkify(inp):
try:
if urlparse.urlparse(inp).scheme in ('http', 'https'):
inp = unicode(jinja2.escape(inp))
return jinja2.Markup('<a href="%s">%s</a>' % (inp, inp))
except (AttributeError, TypeError):
pass
return inp
def do_testcmd(name):
if name.startswith('k8s.io/'):
try:
pkg, name = name.split(' ')
except ValueError: # don't block the page render
logging.error('Unexpected Go unit test name %r', name)
return name
return 'go test -v %s -run %s$' % (pkg, name)
elif name.startswith('istio.io/'):
return ''
elif name.startswith('//'):
return 'bazel test %s' % name
elif name.startswith('verify '):
return 'make verify WHAT=%s' % name.split(' ')[1]
else:
name = re.sub(r'^\[k8s\.io\] ', '', name)
name_escaped = re.escape(name).replace('\\ ', '\\s')
test_args = ('--ginkgo.focus=%s$' % name_escaped)
return "go run hack/e2e.go -v --test --test_args='%s'" % test_args
def do_parse_pod_name(text):
"""Find the pod name from the failure and return the pod name."""
p = re.search(r' pod (\S+)', text)
if p:
return re.sub(r'[\'"\\:]', '', p.group(1))
else:
return ""
def do_label_attr(labels, name):
"""
>> do_label_attr(['needs-rebase', 'size/XS'], 'size')
'XS'
"""
name += '/'
for label in labels:
if label.startswith(name):
return label[len(name):]
return ''
def do_classify_size(payload):
"""
Determine the size class for a PR, based on either its labels or
on the magnitude of its changes.
"""
size = do_label_attr(payload['labels'], 'size')
if not size and 'additions' in payload and 'deletions' in payload:
lines = payload['additions'] + payload['deletions']
# based on mungegithub/mungers/size.go
for limit, label in [
(10, 'XS'),
(30, 'S'),
(100, 'M'),
(500, 'L'),
(1000, 'XL')
]:
if lines < limit:
return label
return 'XXL'
return size
def has_lgtm_without_missing_approval(payload, user):
labels = payload.get('labels', []) or []
return 'lgtm' in labels and not (
user in payload.get('approvers', [])
and 'approved' not in labels)
def do_render_status(payload, user):
states = set()
text = 'Pending'
if has_lgtm_without_missing_approval(payload, user):
text = 'LGTM'
elif user in payload.get('attn', {}):
text = payload['attn'][user].title()
if '#' in text: # strip start/end attn timestamps
text = text[:text.index('#')]
for ctx, (state, _url, desc) in payload.get('status', {}).items():
if ctx == 'Submit Queue' and state == 'pending':
if 'does not have lgtm' in desc.lower():
# Don't show overall status as pending when Submit
# won't continue without LGTM.
continue
if ctx == 'tide' and state == 'pending':
# Ignore pending tide statuses for now.
continue
if ctx == 'code-review/reviewable' and state == 'pending':
# Reviewable isn't a CI, so we don't care if it's pending.
# Its dashboard might replace all of this eventually.
continue
states.add(state)
icon = ''
title = ''
if 'failure' in states:
icon = 'x'
state = 'failure'
title = 'failing tests'
elif 'pending' in states:
icon = 'primitive-dot'
state = 'pending'
title = 'pending tests'
elif 'success' in states:
icon = 'check'
state = 'success'
title = 'tests passing'
if icon:
icon = '<span class="text-%s octicon octicon-%s" title="%s"></span>' % (
state, icon, title)
return jinja2.Markup('%s%s' % (icon, text))
def do_get_latest(payload, user):
text = payload.get('attn', {}).get(user)
if not text:
return None
if '#' not in text:
return None
_text, _start, latest = text.rsplit('#', 2)
return float(latest)
def do_ltrim(s, needle):
if s.startswith(needle):
return s[len(needle):]
return s
def do_select(seq, pred):
return filter(pred, seq)
def do_tg_url(testgrid_query, test_name=''):
if test_name:
regex = '^Overall$|' + re.escape(test_name)
testgrid_query += '&include-filter-by-regex=%s' % urllib.quote(regex)
return 'https://testgrid.k8s.io/%s' % testgrid_query
def do_gcs_browse_url(gcs_path):
if not gcs_path.endswith('/'):
gcs_path += '/'
return 'https://gcsweb.k8s.io/gcs' + gcs_path
static_hashes = {}
def do_static(filename):
filename = 'static/%s' % filename
if filename not in static_hashes:
data = open(filename).read()
static_hashes[filename] = hashlib.sha1(data).hexdigest()[:10]
return '/%s?%s' % (filename, static_hashes[filename])
do_basename = os.path.basename
do_dirname = os.path.dirname
do_quote_plus = urllib.quote_plus
def register(filters):
"""Register do_* functions in this module in a dictionary."""
for name, func in globals().items():
if name.startswith('do_'):
filters[name[3:]] = func
| apache-2.0 | -669,064,552,855,797,500 | 29.639405 | 83 | 0.586751 | false |
jsoref/django | django/db/backends/oracle/base.py | 20 | 24998 | """
Oracle database backend for Django.
Requires cx_Oracle: http://cx-oracle.sourceforge.net/
"""
from __future__ import unicode_literals
import datetime
import decimal
import os
import platform
import sys
import warnings
from django.conf import settings
from django.db import utils
from django.db.backends.base.base import BaseDatabaseWrapper
from django.db.backends.base.validation import BaseDatabaseValidation
from django.utils import six, timezone
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.duration import duration_string
from django.utils.encoding import force_bytes, force_text
from django.utils.functional import cached_property
def _setup_environment(environ):
# Cygwin requires some special voodoo to set the environment variables
# properly so that Oracle will see them.
if platform.system().upper().startswith('CYGWIN'):
try:
import ctypes
except ImportError as e:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("Error loading ctypes: %s; "
"the Oracle backend requires ctypes to "
"operate correctly under Cygwin." % e)
kernel32 = ctypes.CDLL('kernel32')
for name, value in environ:
kernel32.SetEnvironmentVariableA(name, value)
else:
os.environ.update(environ)
_setup_environment([
# Oracle takes client-side character set encoding from the environment.
('NLS_LANG', '.UTF8'),
# This prevents unicode from getting mangled by getting encoded into the
# potentially non-unicode database character set.
('ORA_NCHAR_LITERAL_REPLACE', 'TRUE'),
])
try:
import cx_Oracle as Database
except ImportError as e:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("Error loading cx_Oracle module: %s" % e)
# Some of these import cx_Oracle, so import them after checking if it's installed.
from .client import DatabaseClient # isort:skip
from .creation import DatabaseCreation # isort:skip
from .features import DatabaseFeatures # isort:skip
from .introspection import DatabaseIntrospection # isort:skip
from .operations import DatabaseOperations # isort:skip
from .schema import DatabaseSchemaEditor # isort:skip
from .utils import Oracle_datetime, convert_unicode # isort:skip
DatabaseError = Database.DatabaseError
IntegrityError = Database.IntegrityError
class _UninitializedOperatorsDescriptor(object):
def __get__(self, instance, cls=None):
# If connection.operators is looked up before a connection has been
# created, transparently initialize connection.operators to avert an
# AttributeError.
if instance is None:
raise AttributeError("operators not available as class attribute")
# Creating a cursor will initialize the operators.
instance.cursor().close()
return instance.__dict__['operators']
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = 'oracle'
# This dictionary maps Field objects to their associated Oracle column
# types, as strings. Column-type strings can contain format strings; they'll
# be interpolated against the values of Field.__dict__ before being output.
# If a column type is set to None, it won't be included in the output.
#
# Any format strings starting with "qn_" are quoted before being used in the
# output (the "qn_" prefix is stripped before the lookup is performed.
data_types = {
'AutoField': 'NUMBER(11)',
'BinaryField': 'BLOB',
'BooleanField': 'NUMBER(1)',
'CharField': 'NVARCHAR2(%(max_length)s)',
'CommaSeparatedIntegerField': 'VARCHAR2(%(max_length)s)',
'DateField': 'DATE',
'DateTimeField': 'TIMESTAMP',
'DecimalField': 'NUMBER(%(max_digits)s, %(decimal_places)s)',
'DurationField': 'INTERVAL DAY(9) TO SECOND(6)',
'FileField': 'NVARCHAR2(%(max_length)s)',
'FilePathField': 'NVARCHAR2(%(max_length)s)',
'FloatField': 'DOUBLE PRECISION',
'IntegerField': 'NUMBER(11)',
'BigIntegerField': 'NUMBER(19)',
'IPAddressField': 'VARCHAR2(15)',
'GenericIPAddressField': 'VARCHAR2(39)',
'NullBooleanField': 'NUMBER(1)',
'OneToOneField': 'NUMBER(11)',
'PositiveIntegerField': 'NUMBER(11)',
'PositiveSmallIntegerField': 'NUMBER(11)',
'SlugField': 'NVARCHAR2(%(max_length)s)',
'SmallIntegerField': 'NUMBER(11)',
'TextField': 'NCLOB',
'TimeField': 'TIMESTAMP',
'URLField': 'VARCHAR2(%(max_length)s)',
'UUIDField': 'VARCHAR2(32)',
}
data_type_check_constraints = {
'BooleanField': '%(qn_column)s IN (0,1)',
'NullBooleanField': '(%(qn_column)s IN (0,1)) OR (%(qn_column)s IS NULL)',
'PositiveIntegerField': '%(qn_column)s >= 0',
'PositiveSmallIntegerField': '%(qn_column)s >= 0',
}
operators = _UninitializedOperatorsDescriptor()
_standard_operators = {
'exact': '= %s',
'iexact': '= UPPER(%s)',
'contains': "LIKE TRANSLATE(%s USING NCHAR_CS) ESCAPE TRANSLATE('\\' USING NCHAR_CS)",
'icontains': "LIKE UPPER(TRANSLATE(%s USING NCHAR_CS)) ESCAPE TRANSLATE('\\' USING NCHAR_CS)",
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': "LIKE TRANSLATE(%s USING NCHAR_CS) ESCAPE TRANSLATE('\\' USING NCHAR_CS)",
'endswith': "LIKE TRANSLATE(%s USING NCHAR_CS) ESCAPE TRANSLATE('\\' USING NCHAR_CS)",
'istartswith': "LIKE UPPER(TRANSLATE(%s USING NCHAR_CS)) ESCAPE TRANSLATE('\\' USING NCHAR_CS)",
'iendswith': "LIKE UPPER(TRANSLATE(%s USING NCHAR_CS)) ESCAPE TRANSLATE('\\' USING NCHAR_CS)",
}
_likec_operators = _standard_operators.copy()
_likec_operators.update({
'contains': "LIKEC %s ESCAPE '\\'",
'icontains': "LIKEC UPPER(%s) ESCAPE '\\'",
'startswith': "LIKEC %s ESCAPE '\\'",
'endswith': "LIKEC %s ESCAPE '\\'",
'istartswith': "LIKEC UPPER(%s) ESCAPE '\\'",
'iendswith': "LIKEC UPPER(%s) ESCAPE '\\'",
})
# The patterns below are used to generate SQL pattern lookup clauses when
# the right-hand side of the lookup isn't a raw string (it might be an expression
# or the result of a bilateral transformation).
# In those cases, special characters for LIKE operators (e.g. \, *, _) should be
# escaped on database side.
#
# Note: we use str.format() here for readability as '%' is used as a wildcard for
# the LIKE operator.
pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\', '\\'), '%%', '\%%'), '_', '\_')"
_pattern_ops = {
'contains': "'%%' || {} || '%%'",
'icontains': "'%%' || UPPER({}) || '%%'",
'startswith': "{} || '%%'",
'istartswith': "UPPER({}) || '%%'",
'endswith': "'%%' || {}",
'iendswith': "'%%' || UPPER({})",
}
_standard_pattern_ops = {k: "LIKE TRANSLATE( " + v + " USING NCHAR_CS)"
" ESCAPE TRANSLATE('\\' USING NCHAR_CS)"
for k, v in _pattern_ops.items()}
_likec_pattern_ops = {k: "LIKEC " + v + " ESCAPE '\\'"
for k, v in _pattern_ops.items()}
Database = Database
SchemaEditorClass = DatabaseSchemaEditor
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.features = DatabaseFeatures(self)
use_returning_into = self.settings_dict["OPTIONS"].get('use_returning_into', True)
self.features.can_return_id_from_insert = use_returning_into
self.ops = DatabaseOperations(self)
self.client = DatabaseClient(self)
self.creation = DatabaseCreation(self)
self.introspection = DatabaseIntrospection(self)
self.validation = BaseDatabaseValidation(self)
def _connect_string(self):
settings_dict = self.settings_dict
if not settings_dict['HOST'].strip():
settings_dict['HOST'] = 'localhost'
if settings_dict['PORT'].strip():
dsn = Database.makedsn(settings_dict['HOST'],
int(settings_dict['PORT']),
settings_dict['NAME'])
else:
dsn = settings_dict['NAME']
return "%s/%s@%s" % (settings_dict['USER'],
settings_dict['PASSWORD'], dsn)
def get_connection_params(self):
conn_params = self.settings_dict['OPTIONS'].copy()
if 'use_returning_into' in conn_params:
del conn_params['use_returning_into']
return conn_params
def get_new_connection(self, conn_params):
conn_string = convert_unicode(self._connect_string())
return Database.connect(conn_string, **conn_params)
def init_connection_state(self):
cursor = self.create_cursor()
# Set the territory first. The territory overrides NLS_DATE_FORMAT
# and NLS_TIMESTAMP_FORMAT to the territory default. When all of
# these are set in single statement it isn't clear what is supposed
# to happen.
cursor.execute("ALTER SESSION SET NLS_TERRITORY = 'AMERICA'")
# Set Oracle date to ANSI date format. This only needs to execute
# once when we create a new connection. We also set the Territory
# to 'AMERICA' which forces Sunday to evaluate to a '1' in
# TO_CHAR().
cursor.execute(
"ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS'"
" NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS.FF'"
+ (" TIME_ZONE = 'UTC'" if settings.USE_TZ else ''))
cursor.close()
if 'operators' not in self.__dict__:
# Ticket #14149: Check whether our LIKE implementation will
# work for this connection or we need to fall back on LIKEC.
# This check is performed only once per DatabaseWrapper
# instance per thread, since subsequent connections will use
# the same settings.
cursor = self.create_cursor()
try:
cursor.execute("SELECT 1 FROM DUAL WHERE DUMMY %s"
% self._standard_operators['contains'],
['X'])
except DatabaseError:
self.operators = self._likec_operators
self.pattern_ops = self._likec_pattern_ops
else:
self.operators = self._standard_operators
self.pattern_ops = self._standard_pattern_ops
cursor.close()
try:
self.connection.stmtcachesize = 20
except AttributeError:
# Django docs specify cx_Oracle version 4.3.1 or higher, but
# stmtcachesize is available only in 4.3.2 and up.
pass
# Ensure all changes are preserved even when AUTOCOMMIT is False.
if not self.get_autocommit():
self.commit()
def create_cursor(self):
return FormatStylePlaceholderCursor(self.connection)
def _commit(self):
if self.connection is not None:
try:
return self.connection.commit()
except Database.DatabaseError as e:
# cx_Oracle 5.0.4 raises a cx_Oracle.DatabaseError exception
# with the following attributes and values:
# code = 2091
# message = 'ORA-02091: transaction rolled back
# 'ORA-02291: integrity constraint (TEST_DJANGOTEST.SYS
# _C00102056) violated - parent key not found'
# We convert that particular case to our IntegrityError exception
x = e.args[0]
if hasattr(x, 'code') and hasattr(x, 'message') \
and x.code == 2091 and 'ORA-02291' in x.message:
six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2])
raise
# Oracle doesn't support releasing savepoints. But we fake them when query
# logging is enabled to keep query counts consistent with other backends.
def _savepoint_commit(self, sid):
if self.queries_logged:
self.queries_log.append({
'sql': '-- RELEASE SAVEPOINT %s (faked)' % self.ops.quote_name(sid),
'time': '0.000',
})
def _set_autocommit(self, autocommit):
with self.wrap_database_errors:
self.connection.autocommit = autocommit
def check_constraints(self, table_names=None):
"""
To check constraints, we set constraints to immediate. Then, when, we're done we must ensure they
are returned to deferred.
"""
self.cursor().execute('SET CONSTRAINTS ALL IMMEDIATE')
self.cursor().execute('SET CONSTRAINTS ALL DEFERRED')
def is_usable(self):
try:
self.connection.ping()
except Database.Error:
return False
else:
return True
@cached_property
def oracle_full_version(self):
with self.temporary_connection():
return self.connection.version
@cached_property
def oracle_version(self):
try:
return int(self.oracle_full_version.split('.')[0])
except ValueError:
return None
class OracleParam(object):
"""
Wrapper object for formatting parameters for Oracle. If the string
representation of the value is large enough (greater than 4000 characters)
the input size needs to be set as CLOB. Alternatively, if the parameter
has an `input_size` attribute, then the value of the `input_size` attribute
will be used instead. Otherwise, no input size will be set for the
parameter when executing the query.
"""
def __init__(self, param, cursor, strings_only=False):
# With raw SQL queries, datetimes can reach this function
# without being converted by DateTimeField.get_db_prep_value.
if settings.USE_TZ and (isinstance(param, datetime.datetime) and
not isinstance(param, Oracle_datetime)):
if timezone.is_aware(param):
warnings.warn(
"The Oracle database adapter received an aware datetime (%s), "
"probably from cursor.execute(). Update your code to pass a "
"naive datetime in the database connection's time zone (UTC by "
"default).", RemovedInDjango20Warning)
param = param.astimezone(timezone.utc).replace(tzinfo=None)
param = Oracle_datetime.from_datetime(param)
if isinstance(param, datetime.timedelta):
param = duration_string(param)
if ' ' not in param:
param = '0 ' + param
string_size = 0
# Oracle doesn't recognize True and False correctly in Python 3.
# The conversion done below works both in 2 and 3.
if param is True:
param = 1
elif param is False:
param = 0
if hasattr(param, 'bind_parameter'):
self.force_bytes = param.bind_parameter(cursor)
elif isinstance(param, Database.Binary):
self.force_bytes = param
else:
# To transmit to the database, we need Unicode if supported
# To get size right, we must consider bytes.
self.force_bytes = convert_unicode(param, cursor.charset,
strings_only)
if isinstance(self.force_bytes, six.string_types):
# We could optimize by only converting up to 4000 bytes here
string_size = len(force_bytes(param, cursor.charset, strings_only))
if hasattr(param, 'input_size'):
# If parameter has `input_size` attribute, use that.
self.input_size = param.input_size
elif string_size > 4000:
# Mark any string param greater than 4000 characters as a CLOB.
self.input_size = Database.CLOB
else:
self.input_size = None
class VariableWrapper(object):
"""
An adapter class for cursor variables that prevents the wrapped object
from being converted into a string when used to instantiate an OracleParam.
This can be used generally for any other object that should be passed into
Cursor.execute as-is.
"""
def __init__(self, var):
self.var = var
def bind_parameter(self, cursor):
return self.var
def __getattr__(self, key):
return getattr(self.var, key)
def __setattr__(self, key, value):
if key == 'var':
self.__dict__[key] = value
else:
setattr(self.var, key, value)
class FormatStylePlaceholderCursor(object):
"""
Django uses "format" (e.g. '%s') style placeholders, but Oracle uses ":var"
style. This fixes it -- but note that if you want to use a literal "%s" in
a query, you'll need to use "%%s".
We also do automatic conversion between Unicode on the Python side and
UTF-8 -- for talking to Oracle -- in here.
"""
charset = 'utf-8'
def __init__(self, connection):
self.cursor = connection.cursor()
# Necessary to retrieve decimal values without rounding error.
self.cursor.numbersAsStrings = True
# Default arraysize of 1 is highly sub-optimal.
self.cursor.arraysize = 100
def _format_params(self, params):
try:
return {k: OracleParam(v, self, True) for k, v in params.items()}
except AttributeError:
return tuple(OracleParam(p, self, True) for p in params)
def _guess_input_sizes(self, params_list):
# Try dict handling; if that fails, treat as sequence
if hasattr(params_list[0], 'keys'):
sizes = {}
for params in params_list:
for k, value in params.items():
if value.input_size:
sizes[k] = value.input_size
self.setinputsizes(**sizes)
else:
# It's not a list of dicts; it's a list of sequences
sizes = [None] * len(params_list[0])
for params in params_list:
for i, value in enumerate(params):
if value.input_size:
sizes[i] = value.input_size
self.setinputsizes(*sizes)
def _param_generator(self, params):
# Try dict handling; if that fails, treat as sequence
if hasattr(params, 'items'):
return {k: v.force_bytes for k, v in params.items()}
else:
return [p.force_bytes for p in params]
def _fix_for_params(self, query, params):
# cx_Oracle wants no trailing ';' for SQL statements. For PL/SQL, it
# it does want a trailing ';' but not a trailing '/'. However, these
# characters must be included in the original query in case the query
# is being passed to SQL*Plus.
if query.endswith(';') or query.endswith('/'):
query = query[:-1]
if params is None:
params = []
query = convert_unicode(query, self.charset)
elif hasattr(params, 'keys'):
# Handle params as dict
args = {k: ":%s" % k for k in params.keys()}
query = convert_unicode(query % args, self.charset)
else:
# Handle params as sequence
args = [(':arg%d' % i) for i in range(len(params))]
query = convert_unicode(query % tuple(args), self.charset)
return query, self._format_params(params)
def execute(self, query, params=None):
query, params = self._fix_for_params(query, params)
self._guess_input_sizes([params])
try:
return self.cursor.execute(query, self._param_generator(params))
except Database.DatabaseError as e:
# cx_Oracle <= 4.4.0 wrongly raises a DatabaseError for ORA-01400.
if hasattr(e.args[0], 'code') and e.args[0].code == 1400 and not isinstance(e, IntegrityError):
six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2])
raise
def executemany(self, query, params=None):
if not params:
# No params given, nothing to do
return None
# uniform treatment for sequences and iterables
params_iter = iter(params)
query, firstparams = self._fix_for_params(query, next(params_iter))
# we build a list of formatted params; as we're going to traverse it
# more than once, we can't make it lazy by using a generator
formatted = [firstparams] + [self._format_params(p) for p in params_iter]
self._guess_input_sizes(formatted)
try:
return self.cursor.executemany(query,
[self._param_generator(p) for p in formatted])
except Database.DatabaseError as e:
# cx_Oracle <= 4.4.0 wrongly raises a DatabaseError for ORA-01400.
if hasattr(e.args[0], 'code') and e.args[0].code == 1400 and not isinstance(e, IntegrityError):
six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2])
raise
def fetchone(self):
row = self.cursor.fetchone()
if row is None:
return row
return _rowfactory(row, self.cursor)
def fetchmany(self, size=None):
if size is None:
size = self.arraysize
return tuple(_rowfactory(r, self.cursor) for r in self.cursor.fetchmany(size))
def fetchall(self):
return tuple(_rowfactory(r, self.cursor) for r in self.cursor.fetchall())
def close(self):
try:
self.cursor.close()
except Database.InterfaceError:
# already closed
pass
def var(self, *args):
return VariableWrapper(self.cursor.var(*args))
def arrayvar(self, *args):
return VariableWrapper(self.cursor.arrayvar(*args))
def __getattr__(self, attr):
if attr in self.__dict__:
return self.__dict__[attr]
else:
return getattr(self.cursor, attr)
def __iter__(self):
return CursorIterator(self.cursor)
class CursorIterator(six.Iterator):
"""
Cursor iterator wrapper that invokes our custom row factory.
"""
def __init__(self, cursor):
self.cursor = cursor
self.iter = iter(cursor)
def __iter__(self):
return self
def __next__(self):
return _rowfactory(next(self.iter), self.cursor)
def _rowfactory(row, cursor):
# Cast numeric values as the appropriate Python type based upon the
# cursor description, and convert strings to unicode.
casted = []
for value, desc in zip(row, cursor.description):
if value is not None and desc[1] is Database.NUMBER:
precision, scale = desc[4:6]
if scale == -127:
if precision == 0:
# NUMBER column: decimal-precision floating point
# This will normally be an integer from a sequence,
# but it could be a decimal value.
if '.' in value:
value = decimal.Decimal(value)
else:
value = int(value)
else:
# FLOAT column: binary-precision floating point.
# This comes from FloatField columns.
value = float(value)
elif precision > 0:
# NUMBER(p,s) column: decimal-precision fixed point.
# This comes from IntField and DecimalField columns.
if scale == 0:
value = int(value)
else:
value = decimal.Decimal(value)
elif '.' in value:
# No type information. This normally comes from a
# mathematical expression in the SELECT list. Guess int
# or Decimal based on whether it has a decimal point.
value = decimal.Decimal(value)
else:
value = int(value)
elif desc[1] in (Database.STRING, Database.FIXED_CHAR,
Database.LONG_STRING):
value = to_unicode(value)
casted.append(value)
return tuple(casted)
def to_unicode(s):
"""
Convert strings to Unicode objects (and return all other data types
unchanged).
"""
if isinstance(s, six.string_types):
return force_text(s)
return s
| bsd-3-clause | -3,278,042,799,849,264,000 | 40.115132 | 110 | 0.592607 | false |
xuleiboy1234/autoTitle | tensorflow/tensorflow/contrib/predictor/saved_model_predictor.py | 55 | 6579 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A `Predictor` constructed from a `SavedModel`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
from tensorflow.contrib.predictor import predictor
from tensorflow.contrib.saved_model.python.saved_model import reader
from tensorflow.contrib.saved_model.python.saved_model import signature_def_utils
from tensorflow.python.client import session
from tensorflow.python.framework import ops
from tensorflow.python.saved_model import loader
from tensorflow.python.saved_model import signature_constants
DEFAULT_TAGS = 'serve'
_DEFAULT_INPUT_ALTERNATIVE_FORMAT = 'default_input_alternative:{}'
def get_meta_graph_def(saved_model_dir, tags):
"""Gets `MetaGraphDef` from a directory containing a `SavedModel`.
Returns the `MetaGraphDef` for the given tag-set and SavedModel directory.
Args:
saved_model_dir: Directory containing the SavedModel.
tags: Comma separated list of tags used to identify the correct
`MetaGraphDef`.
Raises:
ValueError: An error when the given tags cannot be found.
Returns:
A `MetaGraphDef` corresponding to the given tags.
"""
saved_model = reader.read_saved_model(saved_model_dir)
set_of_tags = set([tag.strip() for tag in tags.split(',')])
for meta_graph_def in saved_model.meta_graphs:
if set(meta_graph_def.meta_info_def.tags) == set_of_tags:
return meta_graph_def
raise ValueError('Could not find MetaGraphDef with tags {}'.format(tags))
def _get_signature_def(signature_def_key, export_dir, tags):
"""Construct a `SignatureDef` proto."""
signature_def_key = (
signature_def_key or
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY)
metagraph_def = get_meta_graph_def(export_dir, tags)
try:
signature_def = signature_def_utils.get_signature_def_by_key(
metagraph_def,
signature_def_key)
except ValueError as e:
try:
formatted_key = _DEFAULT_INPUT_ALTERNATIVE_FORMAT.format(
signature_def_key)
signature_def = signature_def_utils.get_signature_def_by_key(
metagraph_def, formatted_key)
logging.warning('Could not find signature def "%s". '
'Using "%s" instead', signature_def_key, formatted_key)
except ValueError:
raise ValueError(
'Got signature_def_key "{}". Available signatures are {}. '
'Original error:\n{}'.format(
signature_def_key, list(metagraph_def.signature_def), e))
return signature_def
def _check_signature_arguments(signature_def_key,
signature_def,
input_names,
output_names):
"""Validates signature arguments for `SavedModelPredictor`."""
signature_def_key_specified = signature_def_key is not None
signature_def_specified = signature_def is not None
input_names_specified = input_names is not None
output_names_specified = output_names is not None
if input_names_specified != output_names_specified:
raise ValueError(
'input_names and output_names must both be specified or both be '
'unspecified.'
)
if (signature_def_key_specified + signature_def_specified +
input_names_specified > 1):
raise ValueError(
'You must specify at most one of signature_def_key OR signature_def OR'
'(input_names AND output_names).'
)
class SavedModelPredictor(predictor.Predictor):
"""A `Predictor` constructed from a `SavedModel`."""
def __init__(self,
export_dir,
signature_def_key=None,
signature_def=None,
input_names=None,
output_names=None,
tags=None,
graph=None):
"""Initialize a `CoreEstimatorPredictor`.
Args:
export_dir: a path to a directory containing a `SavedModel`.
signature_def_key: Optional string specifying the signature to use. If
`None`, then `DEFAULT_SERVING_SIGNATURE_DEF_KEY` is used. Only one of
`signature_def_key` and `signature_def` should be specified.
signature_def: A `SignatureDef` proto specifying the inputs and outputs
for prediction. Only one of `signature_def_key` and `signature_def`
should be specified.
input_names: A dictionary mapping strings to `Tensor`s in the `SavedModel`
that represent the input. The keys can be any string of the user's
choosing.
output_names: A dictionary mapping strings to `Tensor`s in the
`SavedModel` that represent the output. The keys can be any string of
the user's choosing.
tags: Optional. Comma separated list of tags that will be used to retrieve
the correct `SignatureDef`. Defaults to `DEFAULT_TAGS`.
graph: Optional. The Tensorflow `graph` in which prediction should be
done.
Raises:
ValueError: If more than one of signature_def_key OR signature_def OR
(input_names AND output_names) is specified.
"""
_check_signature_arguments(
signature_def_key, signature_def, input_names, output_names)
tags = tags or DEFAULT_TAGS
self._graph = graph or ops.Graph()
with self._graph.as_default():
self._session = session.Session()
loader.load(self._session, tags.split(','), export_dir)
if input_names is None:
if signature_def is None:
signature_def = _get_signature_def(signature_def_key, export_dir, tags)
input_names = {k: v.name for k, v in signature_def.inputs.items()}
output_names = {k: v.name for k, v in signature_def.outputs.items()}
self._feed_tensors = {k: self._graph.get_tensor_by_name(v)
for k, v in input_names.items()}
self._fetch_tensors = {k: self._graph.get_tensor_by_name(v)
for k, v in output_names.items()}
| mit | 4,567,240,406,883,969,000 | 38.39521 | 81 | 0.668947 | false |
lucc/alot | alot/widgets/search.py | 1 | 7118 | # Copyright (C) 2011-2012 Patrick Totzke <[email protected]>
# This file is released under the GNU GPL, version 3 or a later revision.
# For further details see the COPYING file
"""
Widgets specific to search mode
"""
import urwid
from ..settings.const import settings
from ..helper import shorten_author_string
from .utils import AttrFlipWidget
from .globals import TagWidget
class ThreadlineWidget(urwid.AttrMap):
"""
selectable line widget that represents a :class:`~alot.db.Thread`
in the :class:`~alot.buffers.SearchBuffer`.
"""
def __init__(self, tid, dbman):
self.dbman = dbman
self.tid = tid
self.thread = None # will be set by refresh()
self.tag_widgets = []
self.structure = None
self.rebuild()
normal = self.structure['normal']
focussed = self.structure['focus']
urwid.AttrMap.__init__(self, self.columns, normal, focussed)
def rebuild(self):
self.thread = self.dbman.get_thread(self.tid)
self.widgets = []
self.structure = settings.get_threadline_theming(self.thread)
columns = []
# combine width info and widget into an urwid.Column entry
def add_column(width, part):
width_tuple = self.structure[partname]['width']
if width_tuple[0] == 'weight':
columnentry = width_tuple + (part,)
else:
columnentry = ('fixed', width, part)
columns.append(columnentry)
# create a column for every part of the threadline
for partname in self.structure['parts']:
# build widget(s) around this part's content and remember them so
# that self.render() may change local attributes.
if partname == 'tags':
width, part = build_tags_part(self.thread.get_tags(),
self.structure['tags']['normal'],
self.structure['tags']['focus'])
if part:
add_column(width, part)
for w in part.widget_list:
self.widgets.append(w)
else:
width, part = build_text_part(partname, self.thread,
self.structure[partname])
add_column(width, part)
self.widgets.append(part)
self.columns = urwid.Columns(columns, dividechars=1)
self.original_widget = self.columns
def render(self, size, focus=False):
for w in self.widgets:
w.set_map('focus' if focus else 'normal')
return urwid.AttrMap.render(self, size, focus)
def selectable(self):
return True
def keypress(self, size, key):
return key
def get_thread(self):
return self.thread
def build_tags_part(tags, attr_normal, attr_focus):
"""
create an urwid.Columns widget (wrapped in approproate Attributes)
to display a list of tag strings, as part of a threadline.
:param tags: list of tag strings to include
:type tags: list of str
:param attr_normal: urwid attribute to use if unfocussed
:param attr_focus: urwid attribute to use if focussed
:return: overall width in characters and a Columns widget.
:rtype: tuple[int, urwid.Columns]
"""
part_w = None
width = None
tag_widgets = []
cols = []
width = -1
# create individual TagWidgets and sort them
tag_widgets = [TagWidget(t, attr_normal, attr_focus) for t in tags]
tag_widgets = sorted(tag_widgets)
for tag_widget in tag_widgets:
if not tag_widget.hidden:
wrapped_tagwidget = tag_widget
tag_width = tag_widget.width()
cols.append(('fixed', tag_width, wrapped_tagwidget))
width += tag_width + 1
if cols:
part_w = urwid.Columns(cols, dividechars=1)
return width, part_w
def build_text_part(name, thread, struct):
"""
create an urwid.Text widget (wrapped in approproate Attributes)
to display a plain text parts in a threadline.
create an urwid.Columns widget (wrapped in approproate Attributes)
to display a list of tag strings, as part of a threadline.
:param name: id of part to build
:type name: str
:param thread: the thread to get local info for
:type thread: :class:`alot.db.thread.Thread`
:param struct: theming attributes for this part, as provided by
:class:`alot.settings.theme.Theme.get_threadline_theming`
:type struct: dict
:return: overall width (in characters) and a widget.
:rtype: tuple[int, AttrFliwWidget]
"""
part_w = None
width = None
# extract min and max allowed width from theme
minw = 0
maxw = None
width_tuple = struct['width']
if width_tuple is not None:
if width_tuple[0] == 'fit':
minw, maxw = width_tuple[1:]
content = prepare_string(name, thread, maxw)
# pad content if not long enough
if minw:
alignment = struct['alignment']
if alignment == 'left':
content = content.ljust(minw)
elif alignment == 'center':
content = content.center(minw)
else:
content = content.rjust(minw)
# define width and part_w
text = urwid.Text(content, wrap='clip')
width = text.pack()[0]
part_w = AttrFlipWidget(text, struct)
return width, part_w
def prepare_date_string(thread):
newest = None
newest = thread.get_newest_date()
if newest is not None:
datestring = settings.represent_datetime(newest)
return datestring
def prepare_mailcount_string(thread):
return "(%d)" % thread.get_total_messages()
def prepare_authors_string(thread):
return thread.get_authors_string() or '(None)'
def prepare_subject_string(thread):
return thread.get_subject() or ' '
def prepare_content_string(thread):
msgs = sorted(thread.get_messages().keys(),
key=lambda msg: msg.get_date(), reverse=True)
lastcontent = ' '.join(m.get_text_content() for m in msgs)
return lastcontent
def prepare_string(partname, thread, maxw):
"""
extract a content string for part 'partname' from 'thread' of maximal
length 'maxw'.
"""
# map part names to function extracting content string and custom shortener
prep = {
'mailcount': (prepare_mailcount_string, None),
'date': (prepare_date_string, None),
'authors': (prepare_authors_string, shorten_author_string),
'subject': (prepare_subject_string, None),
'content': (prepare_content_string, None),
}
s = ' ' # fallback value
if thread:
# get extractor and shortener
content, shortener = prep[partname]
# get string
s = content(thread)
# sanitize
s = s.replace('\n', ' ')
s = s.replace('\r', '')
# shorten if max width is requested
if maxw:
if len(s) > maxw and shortener:
s = shortener(s, maxw)
else:
s = s[:maxw]
return s
| gpl-3.0 | 8,841,554,747,410,669,000 | 30.776786 | 79 | 0.605507 | false |
rahku/coreclr | src/ToolBox/SOS/tests/t_cmd_bpmd_methoddesc.py | 43 | 1308 | import lldb
import re
import testutils as test
# bpmd -md <MethodDesc pointer>
def runScenario(assembly, debugger, target):
process = target.GetProcess()
res = lldb.SBCommandReturnObject()
ci = debugger.GetCommandInterpreter()
# Run debugger, wait until libcoreclr is loaded,
# set breakpoint at Test.Main and stop there
test.stop_in_main(debugger, assembly)
md_addr = test.get_methoddesc(debugger, assembly, "Test.UnlikelyInlined")
ci.HandleCommand("bpmd -md %s" % md_addr, res)
out_msg = res.GetOutput()
err_msg = res.GetError()
print(out_msg)
print(err_msg)
# Interpreter must have this command and able to run it
test.assertTrue(res.Succeeded())
# Output is not empty
# Should be at least 'Adding pending breakpoints...'
test.assertTrue(len(out_msg) > 0)
# Error message is empty
test.assertTrue(len(err_msg) == 0)
process.Continue()
# Process must be stopped at UnlinkelyInlined
test.assertEqual(process.GetState(), lldb.eStateStopped)
# The reason of this stop must be a breakpoint
test.assertEqual(process.GetSelectedThread().GetStopReason(),
lldb.eStopReasonBreakpoint)
#
# Continue current process and checks its exit code
test.exit_lldb(debugger, assembly)
| mit | 4,104,002,611,073,469,000 | 28.066667 | 77 | 0.691131 | false |
kkozarev/mwacme | synchrotron_fitting/GS_kappa_function.py | 1 | 2634 | import Get_MW
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
import numpy as np
N=10 #number of frequencies
#These values are starting positions for coronal CME radio observations
ParmIn=29*[0] # input array
ParmIn[0] =8e19 # Area, cm^2
ParmIn[1] =5e9 # Depth, cm
ParmIn[2] =3e6 # T_0, K
ParmIn[3] =0.05 # \eps (not used in this example)
ParmIn[4] =6.0 # \kappa (not used in this example)
ParmIn[5] =16 # number of integration nodes
ParmIn[6] =0.1 # E_min, MeV
ParmIn[7] =10.0 # E_max, MeV
ParmIn[8] =1.0 # E_break, MeV (not used in this example)
ParmIn[9] =4.0 # \delta_1
ParmIn[10]=6.0 # \delta_2 (not used in this example)
ParmIn[11]=1e8 # n_0 - thermal electron density, cm^{-3}
ParmIn[12]=1e6 # n_b - nonthermal electron density, cm^{-3}
ParmIn[13]=5.0 # B - magnetic field, G
ParmIn[14]=60.0 # theta - the viewing angle, degrees
ParmIn[15]=8.e7 # starting frequency to calculate spectrum, Hz
ParmIn[16]=0.005 # logarithmic step in frequency
ParmIn[17]=6 # Index of distribution over energy (KAP is chosen)
ParmIn[18]=N # Number of frequencies (specified above)
ParmIn[19]=3 # Index of distribution over pitch-angle (GLC is chosen)
ParmIn[20]=90.0 # loss-cone boundary, degrees
ParmIn[21]=0.0 # beam direction (degrees) in GAU and SGA (not used in this example)
ParmIn[22]=0.2 # \Delta\mu
ParmIn[23]=0.0 # a_4 in SGA (not used in this example)
ParmIn[25]=12.0 # f^C_cr
ParmIn[26]=12.0 # f^WH_cr
ParmIn[27]=1 # matching on
ParmIn[28]=1 # Q-optimization on
def init_frequency_grid(startfreq,endfreq,numfreq=N):
Params = ParmIn
Params[16]=np.log10(endfreq/startfreq)/numfreq
Params[15]=startfreq*1.e6
Params[18]=numfreq
s=Get_MW.GET_MW(Params) # calling the main function
f=s[0] # emission frequency (GHz)
fmhz=[i*1000. for i in f]
return fmhz
def gs_kappa_func(freqgrid, temp=ParmIn[2],dens=ParmIn[11],kappa=ParmIn[4],emax=ParmIn[7],numfreq=N):
Params = ParmIn
Params[2]=temp
Params[4]=kappa
Params[7]=emax
Params[11]=dens
Params[15]=freqgrid[0]/1.e6
Params[17]=6
if not numfreq:
numfreq=len(freqgrid)
Params[16]=np.log10(freqgrid[-1]/freqgrid[0])/numfreq
ParmIn[18]=numfreq
s=Get_MW.GET_MW(ParmIn) # calling the main function
I_O=s[1] # observed (at the Earth) intensity, O-mode (sfu)
k_O=s[2] # exp(-tau), O-mode
#I_X=s[3] # observed (at the Earth) intensity, X-mode (sfu)
#k_X=s[4] # exp(-tau), X-mode
return I_O
| gpl-2.0 | 8,581,899,702,963,685,000 | 35.628571 | 101 | 0.635156 | false |
eenchev/idea-note-taking-app | env/lib/python2.7/site-packages/werkzeug/posixemulation.py | 364 | 3519 | # -*- coding: utf-8 -*-
r"""
werkzeug.posixemulation
~~~~~~~~~~~~~~~~~~~~~~~
Provides a POSIX emulation for some features that are relevant to
web applications. The main purpose is to simplify support for
systems such as Windows NT that are not 100% POSIX compatible.
Currently this only implements a :func:`rename` function that
follows POSIX semantics. Eg: if the target file already exists it
will be replaced without asking.
This module was introduced in 0.6.1 and is not a public interface.
It might become one in later versions of Werkzeug.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import sys
import os
import errno
import time
import random
from ._compat import to_unicode
from .filesystem import get_filesystem_encoding
can_rename_open_file = False
if os.name == 'nt': # pragma: no cover
_rename = lambda src, dst: False
_rename_atomic = lambda src, dst: False
try:
import ctypes
_MOVEFILE_REPLACE_EXISTING = 0x1
_MOVEFILE_WRITE_THROUGH = 0x8
_MoveFileEx = ctypes.windll.kernel32.MoveFileExW
def _rename(src, dst):
src = to_unicode(src, get_filesystem_encoding())
dst = to_unicode(dst, get_filesystem_encoding())
if _rename_atomic(src, dst):
return True
retry = 0
rv = False
while not rv and retry < 100:
rv = _MoveFileEx(src, dst, _MOVEFILE_REPLACE_EXISTING |
_MOVEFILE_WRITE_THROUGH)
if not rv:
time.sleep(0.001)
retry += 1
return rv
# new in Vista and Windows Server 2008
_CreateTransaction = ctypes.windll.ktmw32.CreateTransaction
_CommitTransaction = ctypes.windll.ktmw32.CommitTransaction
_MoveFileTransacted = ctypes.windll.kernel32.MoveFileTransactedW
_CloseHandle = ctypes.windll.kernel32.CloseHandle
can_rename_open_file = True
def _rename_atomic(src, dst):
ta = _CreateTransaction(None, 0, 0, 0, 0, 1000, 'Werkzeug rename')
if ta == -1:
return False
try:
retry = 0
rv = False
while not rv and retry < 100:
rv = _MoveFileTransacted(src, dst, None, None,
_MOVEFILE_REPLACE_EXISTING |
_MOVEFILE_WRITE_THROUGH, ta)
if rv:
rv = _CommitTransaction(ta)
break
else:
time.sleep(0.001)
retry += 1
return rv
finally:
_CloseHandle(ta)
except Exception:
pass
def rename(src, dst):
# Try atomic or pseudo-atomic rename
if _rename(src, dst):
return
# Fall back to "move away and replace"
try:
os.rename(src, dst)
except OSError as e:
if e.errno != errno.EEXIST:
raise
old = "%s-%08x" % (dst, random.randint(0, sys.maxint))
os.rename(dst, old)
os.rename(src, dst)
try:
os.unlink(old)
except Exception:
pass
else:
rename = os.rename
can_rename_open_file = True
| mit | 2,454,830,130,427,693,600 | 32.198113 | 78 | 0.539642 | false |
jcftang/ansible | lib/ansible/modules/cloud/ovirt/ovirt_affinity_labels.py | 12 | 6828 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import traceback
try:
import ovirtsdk4.types as otypes
except ImportError:
pass
from collections import defaultdict
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import (
BaseModule,
check_sdk,
create_connection,
ovirt_full_argument_spec,
)
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: ovirt_affinity_labels
short_description: Module to manage affinity labels in oVirt
version_added: "2.3"
author: "Ondra Machacek (@machacekondra)"
description:
- "This module manage affinity labels in oVirt. It can also manage assignments
of those labels to hosts and VMs."
options:
name:
description:
- "Name of the the affinity label to manage."
required: true
state:
description:
- "Should the affinity label be present or absent."
choices: ['present', 'absent']
default: present
cluster:
description:
- "Name of the cluster where vms and hosts resides."
vms:
description:
- "List of the VMs names, which should have assigned this affinity label."
hosts:
description:
- "List of the hosts names, which should have assigned this affinity label."
extends_documentation_fragment: ovirt
'''
EXAMPLES = '''
# Examples don't contain auth parameter for simplicity,
# look at ovirt_auth module to see how to reuse authentication:
# Create(if not exists) and assign affinity label to vms vm1 and vm2 and host host1
- ovirt_affinity_labels:
name: mylabel
cluster: mycluster
vms:
- vm1
- vm2
hosts:
- host1
# To detach all VMs from label
- ovirt_affinity_labels:
name: mylabel
cluster: mycluster
vms: []
# Remove affinity label
- ovirt_affinity_labels:
state: absent
name: mylabel
'''
RETURN = '''
id:
description: ID of the affinity label which is managed
returned: On success if affinity label is found.
type: str
sample: 7de90f31-222c-436c-a1ca-7e655bd5b60c
affinity_label:
description: "Dictionary of all the affinity label attributes. Affinity label attributes can be found on your oVirt instance
at following url: https://ovirt.example.com/ovirt-engine/api/model#types/affinity_label."
returned: On success if affinity label is found.
'''
class AffinityLabelsModule(BaseModule):
def build_entity(self):
return otypes.AffinityLabel(name=self._module.params['name'])
def post_create(self, entity):
self.update_check(entity)
def pre_remove(self, entity):
self._module.params['vms'] = []
self._module.params['hosts'] = []
self.update_check(entity)
def _update_label_assignments(self, entity, name, label_obj_type):
objs_service = getattr(self._connection.system_service(), '%s_service' % name)()
if self._module.params[name] is not None:
objs = self._connection.follow_link(getattr(entity, name))
objs_names = defaultdict(list)
for obj in objs:
labeled_entity = objs_service.service(obj.id).get()
if self._module.params['cluster'] is None:
objs_names[labeled_entity.name].append(obj.id)
elif self._connection.follow_link(labeled_entity.cluster).name == self._module.params['cluster']:
objs_names[labeled_entity.name].append(obj.id)
for obj in self._module.params[name]:
if obj not in objs_names:
for obj_id in objs_service.list(
search='name=%s and cluster=%s' % (obj, self._module.params['cluster'])
):
label_service = getattr(self._service.service(entity.id), '%s_service' % name)()
if not self._module.check_mode:
label_service.add(**{
name[:-1]: label_obj_type(id=obj_id.id)
})
self.changed = True
for obj in objs_names:
if obj not in self._module.params[name]:
label_service = getattr(self._service.service(entity.id), '%s_service' % name)()
if not self._module.check_mode:
for obj_id in objs_names[obj]:
label_service.service(obj_id).remove()
self.changed = True
def update_check(self, entity):
self._update_label_assignments(entity, 'vms', otypes.Vm)
self._update_label_assignments(entity, 'hosts', otypes.Host)
return True
def main():
argument_spec = ovirt_full_argument_spec(
state=dict(
choices=['present', 'absent'],
default='present',
),
cluster=dict(default=None),
name=dict(default=None, required=True),
vms=dict(default=None, type='list'),
hosts=dict(default=None, type='list'),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
('state', 'present', ['cluster']),
],
)
check_sdk(module)
try:
connection = create_connection(module.params.pop('auth'))
affinity_labels_service = connection.system_service().affinity_labels_service()
affinity_labels_module = AffinityLabelsModule(
connection=connection,
module=module,
service=affinity_labels_service,
)
state = module.params['state']
if state == 'present':
ret = affinity_labels_module.create()
elif state == 'absent':
ret = affinity_labels_module.remove()
module.exit_json(**ret)
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
finally:
connection.close(logout=False)
if __name__ == "__main__":
main()
| gpl-3.0 | 8,548,192,868,156,203,000 | 31.985507 | 128 | 0.612332 | false |
Gateworks/platform-external-chromium_org | tools/telemetry/telemetry/core/discover_unittest.py | 25 | 1836 | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import unittest
from telemetry.core import discover
from telemetry.core import util
class DiscoverTest(unittest.TestCase):
def setUp(self):
self._base_dir = util.GetUnittestDataDir()
self._start_dir = os.path.join(self._base_dir, 'discoverable_classes')
self._base_class = Exception
def testDiscoverClassesBasic(self):
classes = discover.DiscoverClasses(
self._start_dir, self._base_dir, self._base_class)
actual_classes = dict(
(name, cls.__name__) for name, cls in classes.iteritems())
expected_classes = {
'discover_dummyclass': 'DummyException',
'another_discover_dummyclass': 'DummyExceptionImpl2',
}
self.assertEqual(actual_classes, expected_classes)
def testDiscoverClassesWithPattern(self):
classes = discover.DiscoverClasses(
self._start_dir, self._base_dir, self._base_class,
pattern='another*')
actual_classes = dict(
(name, cls.__name__) for name, cls in classes.iteritems())
expected_classes = {
'another_discover_dummyclass': 'DummyExceptionImpl2',
}
self.assertEqual(actual_classes, expected_classes)
def testDiscoverClassesByClassName(self):
classes = discover.DiscoverClasses(
self._start_dir, self._base_dir, self._base_class,
index_by_class_name=True)
actual_classes = dict(
(name, cls.__name__) for name, cls in classes.iteritems())
expected_classes = {
'dummy_exception': 'DummyException',
'dummy_exception_impl1': 'DummyExceptionImpl1',
'dummy_exception_impl2': 'DummyExceptionImpl2',
}
self.assertEqual(actual_classes, expected_classes)
| bsd-3-clause | -4,714,378,293,815,855,000 | 34.307692 | 74 | 0.686819 | false |
andersk/zulip | zerver/lib/cache.py | 2 | 28356 | # See https://zulip.readthedocs.io/en/latest/subsystems/caching.html for docs
import hashlib
import logging
import os
import re
import secrets
import sys
import time
import traceback
from functools import lru_cache, wraps
from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
Iterable,
List,
Optional,
Sequence,
Tuple,
TypeVar,
cast,
)
from django.conf import settings
from django.core.cache import cache as djcache
from django.core.cache import caches
from django.core.cache.backends.base import BaseCache
from django.db.models import Q
from django.http import HttpRequest
from zerver.lib.utils import make_safe_digest, statsd, statsd_key
if TYPE_CHECKING:
# These modules have to be imported for type annotations but
# they cannot be imported at runtime due to cyclic dependency.
from zerver.models import Message, Realm, UserProfile
MEMCACHED_MAX_KEY_LENGTH = 250
FuncT = TypeVar("FuncT", bound=Callable[..., object])
logger = logging.getLogger()
class NotFoundInCache(Exception):
pass
remote_cache_time_start = 0.0
remote_cache_total_time = 0.0
remote_cache_total_requests = 0
def get_remote_cache_time() -> float:
return remote_cache_total_time
def get_remote_cache_requests() -> int:
return remote_cache_total_requests
def remote_cache_stats_start() -> None:
global remote_cache_time_start
remote_cache_time_start = time.time()
def remote_cache_stats_finish() -> None:
global remote_cache_total_time
global remote_cache_total_requests
global remote_cache_time_start
remote_cache_total_requests += 1
remote_cache_total_time += time.time() - remote_cache_time_start
def get_or_create_key_prefix() -> str:
if settings.PUPPETEER_TESTS:
# This sets the prefix for the benefit of the Puppeteer tests.
#
# Having a fixed key is OK since we don't support running
# multiple copies of the Puppeteer tests at the same time anyway.
return "puppeteer_tests:"
elif settings.TEST_SUITE:
# The Python tests overwrite KEY_PREFIX on each test, but use
# this codepath as well, just to save running the more complex
# code below for reading the normal key prefix.
return "django_tests_unused:"
# directory `var` should exist in production
os.makedirs(os.path.join(settings.DEPLOY_ROOT, "var"), exist_ok=True)
filename = os.path.join(settings.DEPLOY_ROOT, "var", "remote_cache_prefix")
try:
with open(filename, "x") as f:
prefix = secrets.token_hex(16) + ":"
f.write(prefix + "\n")
except FileExistsError:
tries = 1
while tries < 10:
with open(filename) as f:
prefix = f.readline()[:-1]
if len(prefix) == 33:
break
tries += 1
prefix = ""
time.sleep(0.5)
if not prefix:
print("Could not read remote cache key prefix file")
sys.exit(1)
return prefix
KEY_PREFIX: str = get_or_create_key_prefix()
def bounce_key_prefix_for_testing(test_name: str) -> None:
global KEY_PREFIX
KEY_PREFIX = test_name + ":" + str(os.getpid()) + ":"
# We are taking the hash of the KEY_PREFIX to decrease the size of the key.
# Memcached keys should have a length of less than 250.
KEY_PREFIX = hashlib.sha1(KEY_PREFIX.encode("utf-8")).hexdigest() + ":"
def get_cache_backend(cache_name: Optional[str]) -> BaseCache:
if cache_name is None:
return djcache
return caches[cache_name]
def get_cache_with_key(
keyfunc: Callable[..., str],
cache_name: Optional[str] = None,
) -> Callable[[FuncT], FuncT]:
"""
The main goal of this function getting value from the cache like in the "cache_with_key".
A cache value can contain any data including the "None", so
here used exception for case if value isn't found in the cache.
"""
def decorator(func: FuncT) -> FuncT:
@wraps(func)
def func_with_caching(*args: object, **kwargs: object) -> object:
key = keyfunc(*args, **kwargs)
try:
val = cache_get(key, cache_name=cache_name)
except InvalidCacheKeyException:
stack_trace = traceback.format_exc()
log_invalid_cache_keys(stack_trace, [key])
val = None
if val is not None:
return val[0]
raise NotFoundInCache()
return cast(FuncT, func_with_caching) # https://github.com/python/mypy/issues/1927
return decorator
def cache_with_key(
keyfunc: Callable[..., str],
cache_name: Optional[str] = None,
timeout: Optional[int] = None,
with_statsd_key: Optional[str] = None,
) -> Callable[[FuncT], FuncT]:
"""Decorator which applies Django caching to a function.
Decorator argument is a function which computes a cache key
from the original function's arguments. You are responsible
for avoiding collisions with other uses of this decorator or
other uses of caching."""
def decorator(func: FuncT) -> FuncT:
@wraps(func)
def func_with_caching(*args: object, **kwargs: object) -> object:
key = keyfunc(*args, **kwargs)
try:
val = cache_get(key, cache_name=cache_name)
except InvalidCacheKeyException:
stack_trace = traceback.format_exc()
log_invalid_cache_keys(stack_trace, [key])
return func(*args, **kwargs)
extra = ""
if cache_name == "database":
extra = ".dbcache"
if with_statsd_key is not None:
metric_key = with_statsd_key
else:
metric_key = statsd_key(key)
status = "hit" if val is not None else "miss"
statsd.incr(f"cache{extra}.{metric_key}.{status}")
# Values are singleton tuples so that we can distinguish
# a result of None from a missing key.
if val is not None:
return val[0]
val = func(*args, **kwargs)
cache_set(key, val, cache_name=cache_name, timeout=timeout)
return val
return cast(FuncT, func_with_caching) # https://github.com/python/mypy/issues/1927
return decorator
class InvalidCacheKeyException(Exception):
pass
def log_invalid_cache_keys(stack_trace: str, key: List[str]) -> None:
logger.warning(
"Invalid cache key used: %s\nStack trace: %s\n",
key,
stack_trace,
)
def validate_cache_key(key: str) -> None:
if not key.startswith(KEY_PREFIX):
key = KEY_PREFIX + key
# Theoretically memcached can handle non-ascii characters
# and only "control" characters are strictly disallowed, see:
# https://github.com/memcached/memcached/blob/master/doc/protocol.txt
# However, limiting the characters we allow in keys simiplifies things,
# and anyway we use make_safe_digest when forming some keys to ensure
# the resulting keys fit the regex below.
# The regex checks "all characters between ! and ~ in the ascii table",
# which happens to be the set of all "nice" ascii characters.
if not bool(re.fullmatch(r"([!-~])+", key)):
raise InvalidCacheKeyException("Invalid characters in the cache key: " + key)
if len(key) > MEMCACHED_MAX_KEY_LENGTH:
raise InvalidCacheKeyException(f"Cache key too long: {key} Length: {len(key)}")
def cache_set(
key: str, val: Any, cache_name: Optional[str] = None, timeout: Optional[int] = None
) -> None:
final_key = KEY_PREFIX + key
validate_cache_key(final_key)
remote_cache_stats_start()
cache_backend = get_cache_backend(cache_name)
cache_backend.set(final_key, (val,), timeout=timeout)
remote_cache_stats_finish()
def cache_get(key: str, cache_name: Optional[str] = None) -> Any:
final_key = KEY_PREFIX + key
validate_cache_key(final_key)
remote_cache_stats_start()
cache_backend = get_cache_backend(cache_name)
ret = cache_backend.get(final_key)
remote_cache_stats_finish()
return ret
def cache_get_many(keys: List[str], cache_name: Optional[str] = None) -> Dict[str, Any]:
keys = [KEY_PREFIX + key for key in keys]
for key in keys:
validate_cache_key(key)
remote_cache_stats_start()
ret = get_cache_backend(cache_name).get_many(keys)
remote_cache_stats_finish()
return {key[len(KEY_PREFIX) :]: value for key, value in ret.items()}
def safe_cache_get_many(keys: List[str], cache_name: Optional[str] = None) -> Dict[str, Any]:
"""Variant of cache_get_many that drops any keys that fail
validation, rather than throwing an exception visible to the
caller."""
try:
# Almost always the keys will all be correct, so we just try
# to do normal cache_get_many to avoid the overhead of
# validating all the keys here.
return cache_get_many(keys, cache_name)
except InvalidCacheKeyException:
stack_trace = traceback.format_exc()
good_keys, bad_keys = filter_good_and_bad_keys(keys)
log_invalid_cache_keys(stack_trace, bad_keys)
return cache_get_many(good_keys, cache_name)
def cache_set_many(
items: Dict[str, Any], cache_name: Optional[str] = None, timeout: Optional[int] = None
) -> None:
new_items = {}
for key in items:
new_key = KEY_PREFIX + key
validate_cache_key(new_key)
new_items[new_key] = items[key]
items = new_items
remote_cache_stats_start()
get_cache_backend(cache_name).set_many(items, timeout=timeout)
remote_cache_stats_finish()
def safe_cache_set_many(
items: Dict[str, Any], cache_name: Optional[str] = None, timeout: Optional[int] = None
) -> None:
"""Variant of cache_set_many that drops saving any keys that fail
validation, rather than throwing an exception visible to the
caller."""
try:
# Almost always the keys will all be correct, so we just try
# to do normal cache_set_many to avoid the overhead of
# validating all the keys here.
return cache_set_many(items, cache_name, timeout)
except InvalidCacheKeyException:
stack_trace = traceback.format_exc()
good_keys, bad_keys = filter_good_and_bad_keys(list(items.keys()))
log_invalid_cache_keys(stack_trace, bad_keys)
good_items = {key: items[key] for key in good_keys}
return cache_set_many(good_items, cache_name, timeout)
def cache_delete(key: str, cache_name: Optional[str] = None) -> None:
final_key = KEY_PREFIX + key
validate_cache_key(final_key)
remote_cache_stats_start()
get_cache_backend(cache_name).delete(final_key)
remote_cache_stats_finish()
def cache_delete_many(items: Iterable[str], cache_name: Optional[str] = None) -> None:
keys = [KEY_PREFIX + item for item in items]
for key in keys:
validate_cache_key(key)
remote_cache_stats_start()
get_cache_backend(cache_name).delete_many(keys)
remote_cache_stats_finish()
def filter_good_and_bad_keys(keys: List[str]) -> Tuple[List[str], List[str]]:
good_keys = []
bad_keys = []
for key in keys:
try:
validate_cache_key(key)
good_keys.append(key)
except InvalidCacheKeyException:
bad_keys.append(key)
return good_keys, bad_keys
# Generic_bulk_cached fetch and its helpers. We start with declaring
# a few type variables that help define its interface.
# Type for the cache's keys; will typically be int or str.
ObjKT = TypeVar("ObjKT")
# Type for items to be fetched from the database (e.g. a Django model object)
ItemT = TypeVar("ItemT")
# Type for items to be stored in the cache (e.g. a dictionary serialization).
# Will equal ItemT unless a cache_transformer is specified.
CacheItemT = TypeVar("CacheItemT")
# Type for compressed items for storage in the cache. For
# serializable objects, will be the object; if encoded, bytes.
CompressedItemT = TypeVar("CompressedItemT")
# Required arguments are as follows:
# * object_ids: The list of object ids to look up
# * cache_key_function: object_id => cache key
# * query_function: [object_ids] => [objects from database]
# * setter: Function to call before storing items to cache (e.g. compression)
# * extractor: Function to call on items returned from cache
# (e.g. decompression). Should be the inverse of the setter
# function.
# * id_fetcher: Function mapping an object from database => object_id
# (in case we're using a key more complex than obj.id)
# * cache_transformer: Function mapping an object from database =>
# value for cache (in case the values that we're caching are some
# function of the objects, not the objects themselves)
def generic_bulk_cached_fetch(
cache_key_function: Callable[[ObjKT], str],
query_function: Callable[[List[ObjKT]], Iterable[ItemT]],
object_ids: Sequence[ObjKT],
*,
extractor: Callable[[CompressedItemT], CacheItemT],
setter: Callable[[CacheItemT], CompressedItemT],
id_fetcher: Callable[[ItemT], ObjKT],
cache_transformer: Callable[[ItemT], CacheItemT],
) -> Dict[ObjKT, CacheItemT]:
if len(object_ids) == 0:
# Nothing to fetch.
return {}
cache_keys: Dict[ObjKT, str] = {}
for object_id in object_ids:
cache_keys[object_id] = cache_key_function(object_id)
cached_objects_compressed: Dict[str, Tuple[CompressedItemT]] = safe_cache_get_many(
[cache_keys[object_id] for object_id in object_ids],
)
cached_objects: Dict[str, CacheItemT] = {}
for (key, val) in cached_objects_compressed.items():
cached_objects[key] = extractor(cached_objects_compressed[key][0])
needed_ids = [
object_id for object_id in object_ids if cache_keys[object_id] not in cached_objects
]
# Only call query_function if there are some ids to fetch from the database:
if len(needed_ids) > 0:
db_objects = query_function(needed_ids)
else:
db_objects = []
items_for_remote_cache: Dict[str, Tuple[CompressedItemT]] = {}
for obj in db_objects:
key = cache_keys[id_fetcher(obj)]
item = cache_transformer(obj)
items_for_remote_cache[key] = (setter(item),)
cached_objects[key] = item
if len(items_for_remote_cache) > 0:
safe_cache_set_many(items_for_remote_cache)
return {
object_id: cached_objects[cache_keys[object_id]]
for object_id in object_ids
if cache_keys[object_id] in cached_objects
}
def transformed_bulk_cached_fetch(
cache_key_function: Callable[[ObjKT], str],
query_function: Callable[[List[ObjKT]], Iterable[ItemT]],
object_ids: Sequence[ObjKT],
*,
id_fetcher: Callable[[ItemT], ObjKT],
cache_transformer: Callable[[ItemT], CacheItemT],
) -> Dict[ObjKT, CacheItemT]:
return generic_bulk_cached_fetch(
cache_key_function,
query_function,
object_ids,
extractor=lambda obj: obj,
setter=lambda obj: obj,
id_fetcher=id_fetcher,
cache_transformer=cache_transformer,
)
def bulk_cached_fetch(
cache_key_function: Callable[[ObjKT], str],
query_function: Callable[[List[ObjKT]], Iterable[ItemT]],
object_ids: Sequence[ObjKT],
*,
id_fetcher: Callable[[ItemT], ObjKT],
) -> Dict[ObjKT, ItemT]:
return transformed_bulk_cached_fetch(
cache_key_function,
query_function,
object_ids,
id_fetcher=id_fetcher,
cache_transformer=lambda obj: obj,
)
def preview_url_cache_key(url: str) -> str:
return f"preview_url:{make_safe_digest(url)}"
def display_recipient_cache_key(recipient_id: int) -> str:
return f"display_recipient_dict:{recipient_id}"
def display_recipient_bulk_get_users_by_id_cache_key(user_id: int) -> str:
# Cache key function for a function for bulk fetching users, used internally
# by display_recipient code.
return "bulk_fetch_display_recipients:" + user_profile_by_id_cache_key(user_id)
def user_profile_cache_key_id(email: str, realm_id: int) -> str:
return f"user_profile:{make_safe_digest(email.strip())}:{realm_id}"
def user_profile_cache_key(email: str, realm: "Realm") -> str:
return user_profile_cache_key_id(email, realm.id)
def user_profile_delivery_email_cache_key(delivery_email: str, realm: "Realm") -> str:
return f"user_profile_by_delivery_email:{make_safe_digest(delivery_email.strip())}:{realm.id}"
def bot_profile_cache_key(email: str) -> str:
return f"bot_profile:{make_safe_digest(email.strip())}"
def user_profile_by_id_cache_key(user_profile_id: int) -> str:
return f"user_profile_by_id:{user_profile_id}"
def user_profile_by_api_key_cache_key(api_key: str) -> str:
return f"user_profile_by_api_key:{api_key}"
realm_user_dict_fields: List[str] = [
"id",
"full_name",
"email",
"avatar_source",
"avatar_version",
"is_active",
"role",
"is_billing_admin",
"is_bot",
"realm_id",
"timezone",
"date_joined",
"bot_owner_id",
"delivery_email",
"bot_type",
"long_term_idle",
]
def realm_user_dicts_cache_key(realm_id: int) -> str:
return f"realm_user_dicts:{realm_id}"
def get_muting_users_cache_key(muted_user_id: int) -> str:
return f"muting_users_list:{muted_user_id}"
def get_realm_used_upload_space_cache_key(realm: "Realm") -> str:
return f"realm_used_upload_space:{realm.id}"
def active_user_ids_cache_key(realm_id: int) -> str:
return f"active_user_ids:{realm_id}"
def active_non_guest_user_ids_cache_key(realm_id: int) -> str:
return f"active_non_guest_user_ids:{realm_id}"
bot_dict_fields: List[str] = [
"api_key",
"avatar_source",
"avatar_version",
"bot_owner_id",
"bot_type",
"default_all_public_streams",
"default_events_register_stream__name",
"default_sending_stream__name",
"email",
"full_name",
"id",
"is_active",
"realm_id",
]
def bot_dicts_in_realm_cache_key(realm: "Realm") -> str:
return f"bot_dicts_in_realm:{realm.id}"
def get_stream_cache_key(stream_name: str, realm_id: int) -> str:
return f"stream_by_realm_and_name:{realm_id}:{make_safe_digest(stream_name.strip().lower())}"
def delete_user_profile_caches(user_profiles: Iterable["UserProfile"]) -> None:
# Imported here to avoid cyclic dependency.
from zerver.lib.users import get_all_api_keys
from zerver.models import is_cross_realm_bot_email
keys = []
for user_profile in user_profiles:
keys.append(user_profile_by_id_cache_key(user_profile.id))
for api_key in get_all_api_keys(user_profile):
keys.append(user_profile_by_api_key_cache_key(api_key))
keys.append(user_profile_cache_key(user_profile.email, user_profile.realm))
keys.append(
user_profile_delivery_email_cache_key(user_profile.delivery_email, user_profile.realm)
)
if user_profile.is_bot and is_cross_realm_bot_email(user_profile.email):
# Handle clearing system bots from their special cache.
keys.append(bot_profile_cache_key(user_profile.email))
cache_delete_many(keys)
def delete_display_recipient_cache(user_profile: "UserProfile") -> None:
from zerver.models import Subscription # We need to import here to avoid cyclic dependency.
recipient_ids = Subscription.objects.filter(user_profile=user_profile)
recipient_ids = recipient_ids.values_list("recipient_id", flat=True)
keys = [display_recipient_cache_key(rid) for rid in recipient_ids]
keys.append(display_recipient_bulk_get_users_by_id_cache_key(user_profile.id))
cache_delete_many(keys)
def changed(kwargs: Any, fields: List[str]) -> bool:
if kwargs.get("update_fields") is None:
# adds/deletes should invalidate the cache
return True
update_fields = set(kwargs["update_fields"])
for f in fields:
if f in update_fields:
return True
return False
# Called by models.py to flush the user_profile cache whenever we save
# a user_profile object
def flush_user_profile(sender: Any, **kwargs: Any) -> None:
user_profile = kwargs["instance"]
delete_user_profile_caches([user_profile])
# Invalidate our active_users_in_realm info dict if any user has changed
# the fields in the dict or become (in)active
if changed(kwargs, realm_user_dict_fields):
cache_delete(realm_user_dicts_cache_key(user_profile.realm_id))
if changed(kwargs, ["is_active"]):
cache_delete(active_user_ids_cache_key(user_profile.realm_id))
cache_delete(active_non_guest_user_ids_cache_key(user_profile.realm_id))
if changed(kwargs, ["role"]):
cache_delete(active_non_guest_user_ids_cache_key(user_profile.realm_id))
if changed(kwargs, ["email", "full_name", "id", "is_mirror_dummy"]):
delete_display_recipient_cache(user_profile)
# Invalidate our bots_in_realm info dict if any bot has
# changed the fields in the dict or become (in)active
if user_profile.is_bot and changed(kwargs, bot_dict_fields):
cache_delete(bot_dicts_in_realm_cache_key(user_profile.realm))
def flush_muting_users_cache(sender: Any, **kwargs: Any) -> None:
mute_object = kwargs["instance"]
cache_delete(get_muting_users_cache_key(mute_object.muted_user_id))
# Called by models.py to flush various caches whenever we save
# a Realm object. The main tricky thing here is that Realm info is
# generally cached indirectly through user_profile objects.
def flush_realm(sender: Any, from_deletion: bool = False, **kwargs: Any) -> None:
realm = kwargs["instance"]
users = realm.get_active_users()
delete_user_profile_caches(users)
if (
from_deletion
or realm.deactivated
or (kwargs["update_fields"] is not None and "string_id" in kwargs["update_fields"])
):
cache_delete(realm_user_dicts_cache_key(realm.id))
cache_delete(active_user_ids_cache_key(realm.id))
cache_delete(bot_dicts_in_realm_cache_key(realm))
cache_delete(realm_alert_words_cache_key(realm))
cache_delete(realm_alert_words_automaton_cache_key(realm))
cache_delete(active_non_guest_user_ids_cache_key(realm.id))
cache_delete(realm_rendered_description_cache_key(realm))
cache_delete(realm_text_description_cache_key(realm))
elif changed(kwargs, ["description"]):
cache_delete(realm_rendered_description_cache_key(realm))
cache_delete(realm_text_description_cache_key(realm))
def realm_alert_words_cache_key(realm: "Realm") -> str:
return f"realm_alert_words:{realm.string_id}"
def realm_alert_words_automaton_cache_key(realm: "Realm") -> str:
return f"realm_alert_words_automaton:{realm.string_id}"
def realm_rendered_description_cache_key(realm: "Realm") -> str:
return f"realm_rendered_description:{realm.string_id}"
def realm_text_description_cache_key(realm: "Realm") -> str:
return f"realm_text_description:{realm.string_id}"
# Called by models.py to flush the stream cache whenever we save a stream
# object.
def flush_stream(sender: Any, **kwargs: Any) -> None:
from zerver.models import UserProfile
stream = kwargs["instance"]
items_for_remote_cache = {}
if kwargs.get("update_fields") is None:
cache_delete(get_stream_cache_key(stream.name, stream.realm_id))
else:
items_for_remote_cache[get_stream_cache_key(stream.name, stream.realm_id)] = (stream,)
cache_set_many(items_for_remote_cache)
if (
kwargs.get("update_fields") is None
or "name" in kwargs["update_fields"]
and UserProfile.objects.filter(
Q(default_sending_stream=stream) | Q(default_events_register_stream=stream)
).exists()
):
cache_delete(bot_dicts_in_realm_cache_key(stream.realm))
def flush_used_upload_space_cache(sender: Any, **kwargs: Any) -> None:
attachment = kwargs["instance"]
if kwargs.get("created") is None or kwargs.get("created") is True:
cache_delete(get_realm_used_upload_space_cache_key(attachment.owner.realm))
def to_dict_cache_key_id(message_id: int) -> str:
return f"message_dict:{message_id}"
def to_dict_cache_key(message: "Message", realm_id: Optional[int] = None) -> str:
return to_dict_cache_key_id(message.id)
def open_graph_description_cache_key(content: bytes, request: HttpRequest) -> str:
return "open_graph_description_path:{}".format(make_safe_digest(request.META["PATH_INFO"]))
def flush_message(sender: Any, **kwargs: Any) -> None:
message = kwargs["instance"]
cache_delete(to_dict_cache_key_id(message.id))
def flush_submessage(sender: Any, **kwargs: Any) -> None:
submessage = kwargs["instance"]
# submessages are not cached directly, they are part of their
# parent messages
message_id = submessage.message_id
cache_delete(to_dict_cache_key_id(message_id))
def ignore_unhashable_lru_cache(
maxsize: int = 128, typed: bool = False
) -> Callable[[FuncT], FuncT]:
"""
This is a wrapper over lru_cache function. It adds following features on
top of lru_cache:
* It will not cache result of functions with unhashable arguments.
* It will clear cache whenever zerver.lib.cache.KEY_PREFIX changes.
"""
internal_decorator = lru_cache(maxsize=maxsize, typed=typed)
def decorator(user_function: FuncT) -> FuncT:
if settings.DEVELOPMENT and not settings.TEST_SUITE: # nocoverage
# In the development environment, we want every file
# change to refresh the source files from disk.
return user_function
# Casting to Any since we're about to monkey-patch this.
cache_enabled_user_function: Any = internal_decorator(user_function)
def wrapper(*args: object, **kwargs: object) -> object:
if not hasattr(cache_enabled_user_function, "key_prefix"):
cache_enabled_user_function.key_prefix = KEY_PREFIX
if cache_enabled_user_function.key_prefix != KEY_PREFIX:
# Clear cache when cache.KEY_PREFIX changes. This is used in
# tests.
cache_enabled_user_function.cache_clear()
cache_enabled_user_function.key_prefix = KEY_PREFIX
try:
return cache_enabled_user_function(*args, **kwargs)
except TypeError:
# args or kwargs contains an element which is unhashable. In
# this case we don't cache the result.
pass
# Deliberately calling this function from outside of exception
# handler to get a more descriptive traceback. Otherwise traceback
# can include the exception from cached_enabled_user_function as
# well.
return user_function(*args, **kwargs)
setattr(wrapper, "cache_info", cache_enabled_user_function.cache_info)
setattr(wrapper, "cache_clear", cache_enabled_user_function.cache_clear)
return cast(FuncT, wrapper) # https://github.com/python/mypy/issues/1927
return decorator
def dict_to_items_tuple(user_function: Callable[..., Any]) -> Callable[..., Any]:
"""Wrapper that converts any dict args to dict item tuples."""
def dict_to_tuple(arg: Any) -> Any:
if isinstance(arg, dict):
return tuple(sorted(arg.items()))
return arg
def wrapper(*args: Any, **kwargs: Any) -> Any:
new_args = (dict_to_tuple(arg) for arg in args)
return user_function(*new_args, **kwargs)
return wrapper
def items_tuple_to_dict(user_function: Callable[..., Any]) -> Callable[..., Any]:
"""Wrapper that converts any dict items tuple args to dicts."""
def dict_items_to_dict(arg: Any) -> Any:
if isinstance(arg, tuple):
try:
return dict(arg)
except TypeError:
pass
return arg
def wrapper(*args: Any, **kwargs: Any) -> Any:
new_args = (dict_items_to_dict(arg) for arg in args)
new_kwargs = {key: dict_items_to_dict(val) for key, val in kwargs.items()}
return user_function(*new_args, **new_kwargs)
return wrapper
| apache-2.0 | -7,385,823,118,788,663,000 | 32.959281 | 98 | 0.655875 | false |
leoliujie/odoo | addons/l10n_hn/__openerp__.py | 343 | 2260 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2009-2010 Salvatore Josué Trimarchi Pinto <[email protected]>
# (http://trigluu.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#
# This module provides a minimal Honduran chart of accounts that can be use
# to build upon a more complex one. It also includes a chart of taxes and
# the Lempira currency.
#
# This module is based on the Guatemalan chart of accounts:
# Copyright (c) 2009-2010 Soluciones Tecnologócias Prisma S.A. All Rights Reserved.
# José Rodrigo Fernández Menegazzo, Soluciones Tecnologócias Prisma S.A.
# (http://www.solucionesprisma.com)
#
# This module works with OpenERP 6.0 to 8.0
#
{
'name': 'Honduras - Accounting',
'version': '0.1',
'category': 'Localization/Account Charts',
'description': """
This is the base module to manage the accounting chart for Honduras.
====================================================================
Agrega una nomenclatura contable para Honduras. También incluye impuestos y la
moneda Lempira. -- Adds accounting chart for Honduras. It also includes taxes
and the Lempira currency.""",
'author': 'Salvatore Josue Trimarchi Pinto',
'website': 'http://trigluu.com',
'depends': ['base', 'account', 'account_chart'],
'data': [
'account_types.xml',
'account_chart.xml',
'account_tax.xml',
'l10n_hn_base.xml',
],
'demo': [],
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -9,086,664,144,931,744,000 | 37.862069 | 83 | 0.639752 | false |
jonfoster/pyxb-upstream-mirror | doc/conf.py | 3 | 6355 | # -*- coding: utf-8 -*-
#
# PyXB documentation build configuration file, created by
# sphinx-quickstart on Tue May 19 03:28:52 2009.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.todo', 'extapi' ]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.txt'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'PyXB'
copyright = u'2009-2013, Peter A. Bigot'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.2'
# The full version, including alpha/beta/rc tags.
release = '1.2.3'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = [ 'W3C', 'api', 'html', 'Images', '_templates' ]
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'PyXBdoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'PyXB.tex', u'PyXB Documentation',
u'Peter A. Bigot', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
| apache-2.0 | 5,365,618,532,357,052,000 | 31.589744 | 80 | 0.709677 | false |
Moriadry/tensorflow | tensorflow/python/lib/io/python_io.py | 112 | 1273 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Python functions for directly manipulating TFRecord-formatted files.
See the @{$python/python_io} guide.
@@TFRecordWriter
@@tf_record_iterator
@@TFRecordCompressionType
@@TFRecordOptions
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.lib.io.tf_record import *
# pylint: enable=wildcard-import
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = []
remove_undocumented(__name__, _allowed_symbols)
| apache-2.0 | -4,112,572,014,698,157,600 | 32.5 | 80 | 0.720346 | false |
z1gm4/desarrollo_web_udp | env/lib/python2.7/site-packages/django/conf/locale/cs/formats.py | 504 | 1702 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. E Y'
TIME_FORMAT = 'G:i'
DATETIME_FORMAT = 'j. E Y G:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y G:i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d.%m.%Y', '%d.%m.%y', # '05.01.2006', '05.01.06'
'%d. %m. %Y', '%d. %m. %y', # '5. 1. 2006', '5. 1. 06'
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
]
# Kept ISO formats as one is in first position
TIME_INPUT_FORMATS = [
'%H:%M:%S', # '04:30:59'
'%H.%M', # '04.30'
'%H:%M', # '04:30'
]
DATETIME_INPUT_FORMATS = [
'%d.%m.%Y %H:%M:%S', # '05.01.2006 04:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '05.01.2006 04:30:59.000200'
'%d.%m.%Y %H.%M', # '05.01.2006 04.30'
'%d.%m.%Y %H:%M', # '05.01.2006 04:30'
'%d.%m.%Y', # '05.01.2006'
'%d. %m. %Y %H:%M:%S', # '05. 01. 2006 04:30:59'
'%d. %m. %Y %H:%M:%S.%f', # '05. 01. 2006 04:30:59.000200'
'%d. %m. %Y %H.%M', # '05. 01. 2006 04.30'
'%d. %m. %Y %H:%M', # '05. 01. 2006 04:30'
'%d. %m. %Y', # '05. 01. 2006'
'%Y-%m-%d %H.%M', # '2006-01-05 04.30'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '\xa0' # non-breaking space
NUMBER_GROUPING = 3
| gpl-3.0 | -1,611,089,585,953,104,600 | 36.822222 | 77 | 0.528202 | false |
akash1808/glance | glance/tests/unit/v2/test_image_tags_resource.py | 18 | 4136 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob
import glance.api.v2.image_tags
from glance.common import exception
from glance.tests.unit import base
import glance.tests.unit.utils as unit_test_utils
import glance.tests.unit.v2.test_image_data_resource as image_data_tests
import glance.tests.utils as test_utils
class TestImageTagsController(base.IsolatedUnitTest):
def setUp(self):
super(TestImageTagsController, self).setUp()
self.db = unit_test_utils.FakeDB()
self.controller = glance.api.v2.image_tags.Controller(self.db)
def test_create_tag(self):
request = unit_test_utils.get_fake_request()
self.controller.update(request, unit_test_utils.UUID1, 'dink')
context = request.context
tags = self.db.image_tag_get_all(context, unit_test_utils.UUID1)
self.assertEqual(1, len([tag for tag in tags if tag == 'dink']))
def test_create_too_many_tags(self):
self.config(image_tag_quota=0)
request = unit_test_utils.get_fake_request()
self.assertRaises(webob.exc.HTTPRequestEntityTooLarge,
self.controller.update,
request, unit_test_utils.UUID1, 'dink')
def test_create_duplicate_tag_ignored(self):
request = unit_test_utils.get_fake_request()
self.controller.update(request, unit_test_utils.UUID1, 'dink')
self.controller.update(request, unit_test_utils.UUID1, 'dink')
context = request.context
tags = self.db.image_tag_get_all(context, unit_test_utils.UUID1)
self.assertEqual(1, len([tag for tag in tags if tag == 'dink']))
def test_update_tag_of_non_existing_image(self):
request = unit_test_utils.get_fake_request()
self.assertRaises(webob.exc.HTTPNotFound, self.controller.update,
request, "abcd", "dink")
def test_delete_tag_forbidden(self):
def fake_get(self):
raise exception.Forbidden()
image_repo = image_data_tests.FakeImageRepo()
image_repo.get = fake_get
def get_fake_repo(self):
return image_repo
self.controller.gateway.get_repo = get_fake_repo
request = unit_test_utils.get_fake_request()
self.assertRaises(webob.exc.HTTPForbidden, self.controller.update,
request, unit_test_utils.UUID1, "ping")
def test_delete_tag(self):
request = unit_test_utils.get_fake_request()
self.controller.delete(request, unit_test_utils.UUID1, 'ping')
def test_delete_tag_not_found(self):
request = unit_test_utils.get_fake_request()
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
request, unit_test_utils.UUID1, 'what')
def test_delete_tag_of_non_existing_image(self):
request = unit_test_utils.get_fake_request()
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
request, "abcd", "dink")
class TestImagesSerializer(test_utils.BaseTestCase):
def setUp(self):
super(TestImagesSerializer, self).setUp()
self.serializer = glance.api.v2.image_tags.ResponseSerializer()
def test_create_tag(self):
response = webob.Response()
self.serializer.update(response, None)
self.assertEqual(204, response.status_int)
def test_delete_tag(self):
response = webob.Response()
self.serializer.delete(response, None)
self.assertEqual(204, response.status_int)
| apache-2.0 | -302,333,761,914,808,260 | 38.769231 | 78 | 0.666828 | false |
grdlok/UStar-dl | src/youtube_dl/extractor/screencast.py | 12 | 4228 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
ExtractorError,
compat_parse_qs,
compat_urllib_request,
)
class ScreencastIE(InfoExtractor):
_VALID_URL = r'https?://www\.screencast\.com/t/(?P<id>[a-zA-Z0-9]+)'
_TESTS = [{
'url': 'http://www.screencast.com/t/3ZEjQXlT',
'md5': '917df1c13798a3e96211dd1561fded83',
'info_dict': {
'id': '3ZEjQXlT',
'ext': 'm4v',
'title': 'Color Measurement with Ocean Optics Spectrometers',
'description': 'md5:240369cde69d8bed61349a199c5fb153',
'thumbnail': 're:^https?://.*\.(?:gif|jpg)$',
}
}, {
'url': 'http://www.screencast.com/t/V2uXehPJa1ZI',
'md5': 'e8e4b375a7660a9e7e35c33973410d34',
'info_dict': {
'id': 'V2uXehPJa1ZI',
'ext': 'mov',
'title': 'The Amadeus Spectrometer',
'description': 're:^In this video, our friends at.*To learn more about Amadeus, visit',
'thumbnail': 're:^https?://.*\.(?:gif|jpg)$',
}
}, {
'url': 'http://www.screencast.com/t/aAB3iowa',
'md5': 'dedb2734ed00c9755761ccaee88527cd',
'info_dict': {
'id': 'aAB3iowa',
'ext': 'mp4',
'title': 'Google Earth Export',
'description': 'Provides a demo of a CommunityViz export to Google Earth, one of the 3D viewing options.',
'thumbnail': 're:^https?://.*\.(?:gif|jpg)$',
}
}, {
'url': 'http://www.screencast.com/t/X3ddTrYh',
'md5': '669ee55ff9c51988b4ebc0877cc8b159',
'info_dict': {
'id': 'X3ddTrYh',
'ext': 'wmv',
'title': 'Toolkit 6 User Group Webinar (2014-03-04) - Default Judgment and First Impression',
'description': 'md5:7b9f393bc92af02326a5c5889639eab0',
'thumbnail': 're:^https?://.*\.(?:gif|jpg)$',
}
},
]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
video_url = self._html_search_regex(
r'<embed name="Video".*?src="([^"]+)"', webpage,
'QuickTime embed', default=None)
if video_url is None:
flash_vars_s = self._html_search_regex(
r'<param name="flashVars" value="([^"]+)"', webpage, 'flash vars',
default=None)
if not flash_vars_s:
flash_vars_s = self._html_search_regex(
r'<param name="initParams" value="([^"]+)"', webpage, 'flash vars',
default=None)
if flash_vars_s:
flash_vars_s = flash_vars_s.replace(',', '&')
if flash_vars_s:
flash_vars = compat_parse_qs(flash_vars_s)
video_url_raw = compat_urllib_request.quote(
flash_vars['content'][0])
video_url = video_url_raw.replace('http%3A', 'http:')
if video_url is None:
video_meta = self._html_search_meta(
'og:video', webpage, default=None)
if video_meta:
video_url = self._search_regex(
r'src=(.*?)(?:$|&)', video_meta,
'meta tag video URL', default=None)
if video_url is None:
raise ExtractorError('Cannot find video')
title = self._og_search_title(webpage, default=None)
if title is None:
title = self._html_search_regex(
[r'<b>Title:</b> ([^<]*)</div>',
r'class="tabSeperator">></span><span class="tabText">(.*?)<'],
webpage, 'title')
thumbnail = self._og_search_thumbnail(webpage)
description = self._og_search_description(webpage, default=None)
if description is None:
description = self._html_search_meta('description', webpage)
return {
'id': video_id,
'url': video_url,
'title': title,
'description': description,
'thumbnail': thumbnail,
}
| unlicense | -4,414,803,871,771,628,000 | 36.75 | 118 | 0.515847 | false |
willingc/oh-mainline | vendor/packages/python-social-auth/social/backends/rdio.py | 79 | 2468 | """
Rdio OAuth1 and OAuth2 backends, docs at:
http://psa.matiasaguirre.net/docs/backends/rdio.html
"""
from social.backends.oauth import BaseOAuth1, BaseOAuth2, OAuthAuth
RDIO_API = 'https://www.rdio.com/api/1/'
class BaseRdio(OAuthAuth):
ID_KEY = 'key'
def get_user_details(self, response):
fullname, first_name, last_name = self.get_user_names(
fullname=response['displayName'],
first_name=response['firstName'],
last_name=response['lastName']
)
return {
'username': response['username'],
'fullname': fullname,
'first_name': first_name,
'last_name': last_name
}
class RdioOAuth1(BaseRdio, BaseOAuth1):
"""Rdio OAuth authentication backend"""
name = 'rdio-oauth1'
REQUEST_TOKEN_URL = 'http://api.rdio.com/oauth/request_token'
AUTHORIZATION_URL = 'https://www.rdio.com/oauth/authorize'
ACCESS_TOKEN_URL = 'http://api.rdio.com/oauth/access_token'
EXTRA_DATA = [
('key', 'rdio_id'),
('icon', 'rdio_icon_url'),
('url', 'rdio_profile_url'),
('username', 'rdio_username'),
('streamRegion', 'rdio_stream_region'),
]
def user_data(self, access_token, *args, **kwargs):
"""Return user data provided"""
params = {'method': 'currentUser',
'extras': 'username,displayName,streamRegion'}
request = self.oauth_request(access_token, RDIO_API,
params, method='POST')
return self.get_json(request.url, method='POST',
data=request.to_postdata())['result']
class RdioOAuth2(BaseRdio, BaseOAuth2):
name = 'rdio-oauth2'
AUTHORIZATION_URL = 'https://www.rdio.com/oauth2/authorize'
ACCESS_TOKEN_URL = 'https://www.rdio.com/oauth2/token'
ACCESS_TOKEN_METHOD = 'POST'
EXTRA_DATA = [
('key', 'rdio_id'),
('icon', 'rdio_icon_url'),
('url', 'rdio_profile_url'),
('username', 'rdio_username'),
('streamRegion', 'rdio_stream_region'),
('refresh_token', 'refresh_token', True),
('token_type', 'token_type', True),
]
def user_data(self, access_token, *args, **kwargs):
return self.get_json(RDIO_API, method='POST', data={
'method': 'currentUser',
'extras': 'username,displayName,streamRegion',
'access_token': access_token
})['result']
| agpl-3.0 | 2,030,584,576,457,099,800 | 33.277778 | 67 | 0.576985 | false |
roshan/thrift | lib/py/src/protocol/TProtocolDecorator.py | 145 | 1540 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from thrift.protocol.TProtocol import TProtocolBase
from types import *
class TProtocolDecorator():
def __init__(self, protocol):
TProtocolBase(protocol)
self.protocol = protocol
def __getattr__(self, name):
if hasattr(self.protocol, name):
member = getattr(self.protocol, name)
if type(member) in [MethodType, UnboundMethodType, FunctionType, LambdaType, BuiltinFunctionType, BuiltinMethodType]:
return lambda *args, **kwargs: self._wrap(member, args, kwargs)
else:
return member
raise AttributeError(name)
def _wrap(self, func, args, kwargs):
if type(func) == MethodType:
result = func(*args, **kwargs)
else:
result = func(self.protocol, *args, **kwargs)
return result
| apache-2.0 | 8,322,687,308,669,466,000 | 35.666667 | 123 | 0.727273 | false |
hgijeon/the_PLAY | test_gameapi.py | 1 | 2612 | import pygame as gameapi
import pygame.midi as piano
import sys, random
import pygame.locals as apiVar
gameapi.init()
fpsClock = gameapi.time.Clock()
windowSurfaceObj = gameapi.display.set_mode((640, 480))
gameapi.display.set_caption('set_caption')
redColor = gameapi.Color(255,0,0)
greenColor = gameapi.Color(0,255,0)
blueColor = gameapi.Color(0,0,255)
mousex, mousey = 0,0
fontObj = gameapi.font.Font('freesansbold.ttf', 32)
mouseposMsg = ""
keypressMsg = "asdfasdfasdf"
piano.init()
piano_id = piano.get_default_input_id()
print (piano_id)
print (piano.get_count())
print(piano.get_device_info(3))
midiInput = piano.Input(3)
gameapi.fastevent.init()
while True:
windowSurfaceObj.fill(greenColor)
randomColor = gameapi.Color(random.randint(0,255),random.randint(0,255),random.randint(0,255))
gameapi.draw.polygon(windowSurfaceObj, redColor, ((0,0), (10, 10), (10,0)))
gameapi.draw.rect(windowSurfaceObj, redColor, (20, 40, 10, 10))
pixArr = gameapi.PixelArray(windowSurfaceObj)
for x in range(100,200,4):
for y in range(100,200,4):
pixArr[x][y] = redColor
del pixArr
msgSurfaceObj = fontObj.render(keypressMsg, False, blueColor)
msgRectobj = msgSurfaceObj.get_rect()
msgRectobj.topleft = (0,0)
windowSurfaceObj.blit(msgSurfaceObj, msgRectobj)
mouseposSurfaceObj = fontObj.render(mouseposMsg, True, randomColor)
windowSurfaceObj.blit(mouseposSurfaceObj, (mousex, mousey))
while midiInput.poll():
midiEvents = midiInput.read(10)
for e in piano.midis2events(midiEvents, piano_id):
gameapi.fastevent.post(e)
for event in gameapi.fastevent.get():
if event.type == apiVar.QUIT:
gameapi.quit()
sys.exit()
elif event.type == piano.MIDIIN:
print(event)
'''
print (event.data1)
print (event.data2)
print (event.data3)
print (event.timestamp)
print (event.vice_id)
'''
elif event.type == apiVar.MOUSEMOTION:
mousex, mousey = event.pos
mouseposMsg = str((mousex, mousey))
elif event.type == apiVar.KEYDOWN:
if event.key in (apiVar.K_LEFT, apiVar.K_RIGHT, apiVar.K_UP, apiVar.K_DOWN):
keypressMsg = 'Arrow key pressed'
elif event.key == apiVar.K_ESCAPE:
gameapi.event.post(gameapi.event.Event(apiVar.QUIT))
else:
keypressMsg = str(event.key)
gameapi.display.update()
fpsClock.tick(30)
| mit | 4,477,830,631,700,586,500 | 28.348315 | 98 | 0.632466 | false |
amwelch/a10sdk-python | a10sdk/core/logging/logging_host.py | 2 | 2576 | from a10sdk.common.A10BaseClass import A10BaseClass
class Host(A10BaseClass):
"""Class Description::
Set remote syslog host DNS name or ip address.
Class host supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param ipv6addr_list: {"minItems": 1, "items": {"type": "ipv6addr"}, "uniqueItems": true, "array": [{"required": ["host-ipv6"], "properties": {"host-ipv6": {"optional": false, "type": "string", "description": "Set syslog host ipv6 address", "format": "ipv6-address"}, "tcp": {"description": "Use TCP as transport protocol", "partition-visibility": "shared", "default": 0, "type": "number", "format": "flag", "optional": true}, "port": {"description": "Set remote syslog port number", "format": "number", "default": 514, "optional": true, "maximum": 32767, "minimum": 1, "type": "number"}, "use-mgmt-port": {"description": "Use management port for connections", "partition-visibility": "shared", "default": 0, "type": "number", "format": "flag", "optional": true}}}], "type": "array", "$ref": "/axapi/v3/logging/host/ipv6addr/{host-ipv6}"}
:param ipv4addr_list: {"minItems": 1, "items": {"type": "ipv4addr"}, "uniqueItems": true, "array": [{"required": ["host-ipv4"], "properties": {"tcp": {"description": "Use TCP as transport protocol", "partition-visibility": "shared", "default": 0, "type": "number", "format": "flag", "optional": true}, "host-ipv4": {"optional": false, "type": "string", "description": "Set syslog host ip address", "format": "host"}, "port": {"description": "Set remote syslog port number", "format": "number", "default": 514, "optional": true, "maximum": 32767, "minimum": 1, "type": "number"}, "use-mgmt-port": {"description": "Use management port for connections", "partition-visibility": "shared", "default": 0, "type": "number", "format": "flag", "optional": true}}}], "type": "array", "$ref": "/axapi/v3/logging/host/ipv4addr/{host-ipv4}"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/logging/host`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required=[]
self.b_key = "host"
self.a10_url="/axapi/v3/logging/host"
self.DeviceProxy = ""
self.ipv6addr_list = []
self.partition = {}
self.ipv4addr_list = []
for keys, value in kwargs.items():
setattr(self,keys, value)
| apache-2.0 | 3,944,507,077,873,680,400 | 66.789474 | 842 | 0.634317 | false |
xujun10110/MITMf | core/sslstrip/ServerConnectionFactory.py | 26 | 1930 | # Copyright (c) 2014-2016 Moxie Marlinspike, Marcello Salvati
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
#
import logging
from core.logger import logger
from twisted.internet.protocol import ClientFactory
formatter = logging.Formatter("%(asctime)s [ServerConnectionFactory] %(message)s", datefmt="%Y-%m-%d %H:%M:%S")
log = logger().setup_logger("ServerConnectionFactory", formatter)
class ServerConnectionFactory(ClientFactory):
def __init__(self, command, uri, postData, headers, client):
self.command = command
self.uri = uri
self.postData = postData
self.headers = headers
self.client = client
def buildProtocol(self, addr):
return self.protocol(self.command, self.uri, self.postData, self.headers, self.client)
def clientConnectionFailed(self, connector, reason):
log.debug("Server connection failed.")
destination = connector.getDestination()
if (destination.port != 443):
log.debug("Retrying via SSL")
self.client.proxyViaSSL(self.headers['host'], self.command, self.uri, self.postData, self.headers, 443)
else:
try:
self.client.finish()
except:
pass
| gpl-3.0 | 6,826,442,213,485,487,000 | 36.843137 | 115 | 0.684974 | false |
Kvle/ardupilot | mk/PX4/Tools/genmsg/src/genmsg/command_line.py | 217 | 1887 | # Software License Agreement (BSD License)
#
# Copyright (c) 2011, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
def includepath_to_dict(includepath):
search_path = {}
if includepath:
for path in includepath:
key = path[:path.find(':')]
value = path[path.find(':')+1:]
if value:
search_path.setdefault(key, []).append(value)
return search_path
| gpl-3.0 | -9,092,119,296,888,728,000 | 45.02439 | 70 | 0.745628 | false |
daniel198730/reposdmgv | proyectofinal/app/personaMain.py | 1 | 1667 | '''
Created on 19/2/2015
@author: PC29
'''
from app import app
from ec.edu.itsae.dao import PersonaDAO
from flask import render_template, request, redirect, url_for
@app.route("/mainPersona")
def personamain():
objR=PersonaDAO.PersonaDAO().reportarPersona()
return render_template("prueba.html", data=objR)
@app.route("/addPersona", methods=['POST'])
def addPersona():
nombre=request.form.get('nombre', type=str)
apaterno=request.form.get('apaterno', type=str)
amaterno=request.form.get('amaterno', type=str)
cedula=request.form.get('cedula', type=str)
fnacimiento=request.form.get('fnacimiento', type=str)
sexo=request.form.get('sexo', type=str)
direccion=request.form.get('direccion', type=str)
celular=request.form.get('celular', type=str)
estado=request.form.get('estado', type=int)
PersonaDAO.PersonaDAO().insertarPersona(nombre, apaterno, amaterno, cedula, fnacimiento, sexo, direccion, celular, estado)
return redirect(url_for('personamain'))
PersonaDAO.PersonaDAO().eliminarPersona(nombre, apaterno, amaterno, cedula, fnacimiento, sexo, direccion, celular, estado)
return redirect(url_for('personamain'))
@app.route("/buscarauto")
def buscarPersonaAuto():
nombre=str(request.args.get('term'))
objR=PersonaDAO.PersonaDAO().buscarPersonaNombre(nombre)
return objR
@app.route("/buscarDato")
def buscarPersonaDato():
nombre=str(request.args.get('bnombre'))
objR=PersonaDAO.PersonaDAO().buscarPersonaDato(nombre)
return render_template("prueba.html", data=objR)
return objR | lgpl-2.1 | 3,173,741,083,152,962,600 | 27.803571 | 126 | 0.686863 | false |
venicegeo/eventkit-cloud | eventkit_cloud/utils/stats/geomutils.py | 1 | 3906 | from mapproxy import grid as mapproxy_grid
from eventkit_cloud.tasks.models import ExportRun
import logging
import json
import math
logger = logging.getLogger(__name__)
_dbg_geom_cache_misses = 0
def _create_cache_geom_entry(job):
"""
Constructs a geometry cache entry
:param job: job contains the geometry
"""
orm_geom = job.the_geom
geojson = json.loads(orm_geom.json)
bbox = orm_geom.extent
cache_entry = {
"bbox": bbox,
"bbox_area": get_area_bbox(bbox),
"geometry": geojson,
"area": get_area_geojson(geojson),
}
return cache_entry
def lookup_cache_geometry(run, geom_cache):
"""
Cache area information to avoid repeated and expensive database lookups to Job when requesting
area for ExportTasks, DataProviderTasks, or ExportRuns
:param run: A run
:param geom_cache: Object holding cached values, lookup by run.id
:return:
"""
cache_entry = geom_cache.get(run.id)
if not cache_entry:
global _dbg_geom_cache_misses
_dbg_geom_cache_misses += 1
# Important that we only touch 'job' on cache miss
cache_entry = _create_cache_geom_entry(run.job)
geom_cache[run.id] = cache_entry
return cache_entry
def get_area_geojson(geojson, earth_r=6371):
"""
Given a GeoJSON string or object, return an approximation of its geodesic area in km².
The geometry must contain a single polygon with a single ring, no holes.
Based on Chamberlain and Duquette's algorithm: https://trs.jpl.nasa.gov/bitstream/handle/2014/41271/07-0286.pdf
:param geojson: GeoJSON selection area
:param earth_r: Earth radius in km
:return: area of geojson ring in square kilometers
"""
def rad(d):
return math.pi * d / 180
if isinstance(geojson, str):
geojson = json.loads(geojson)
if hasattr(geojson, "geometry"):
geojson = geojson["geometry"]
geom_type = geojson["type"].lower()
if geom_type == "polygon":
polys = [geojson["coordinates"]]
elif geom_type == "multipolygon":
polys = geojson["coordinates"]
else:
return RuntimeError("Invalid geometry type: %s" % geom_type)
a = 0
for poly in polys:
ring = poly[0]
if len(ring) < 4:
continue
ring.append(ring[-2]) # convenient for circular indexing
for i in range(len(ring) - 2):
a += (rad(ring[i + 1][0]) - rad(ring[i - 1][0])) * math.sin(rad(ring[i][1]))
area = abs(a * (earth_r ** 2) / 2)
return area
def get_area_bbox(bbox):
"""
:param bbox: bounding box tuple (w, s, e, n)
:return: The area of the bounding box
"""
w, s, e, n = bbox
return get_area_geojson({"type": "Polygon", "coordinates": [[[w, s], [e, s], [e, n], [w, n], [w, s]]]})
def get_bbox_intersect(one, two):
"""
Finds the intersection of two bounding boxes in the same SRS
:param one: The first bbox tuple (w, s, e, n)
:param two: The second bbox tuple (w, s, e, n)
:return: A bounding box tuple where one and two overlap, or None if there is no overlap
"""
a_x0, a_y0, a_x1, a_y1 = one
b_x0, b_y0, b_x1, b_y1 = two
if mapproxy_grid.bbox_intersects(one, two):
return max(a_x0, b_x0), max(a_y0, b_y0), min(a_x1, b_x1), min(a_y1, b_y1)
else:
return None
def prefetch_geometry_cache(geom_cache):
"""
Populates geom_cache with all geometries information from all Jobs indexed by ExportRun.id
:param geom_cache:
"""
for er in ExportRun.objects.select_related("job").only("id", "job__the_geom").all():
geom_cache[er.id] = _create_cache_geom_entry(er.job)
def get_estimate_cache_key(bbox, srs, min_zoom, max_zoom, slug):
estimate_tuple = (tuple(bbox), int(srs), int(min_zoom), int(max_zoom), str(slug))
hash_val = hash(estimate_tuple)
return str(hash_val)
| bsd-3-clause | -7,206,371,908,155,125,000 | 29.507813 | 115 | 0.626376 | false |
jmarshallnz/xbmc | tools/EventClients/lib/python/ps3/keymaps.py | 245 | 2329 | # -*- coding: utf-8 -*-
# Copyright (C) 2008-2013 Team XBMC
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# PS3 Remote and Controller Keymaps
keymap_remote = {
"16": 'power' ,#EJECT
"64": None ,#AUDIO
"65": None ,#ANGLE
"63": 'subtitle' ,#SUBTITLE
"0f": None ,#CLEAR
"28": None ,#TIME
"00": 'one' ,#1
"01": 'two' ,#2
"02": 'three' ,#3
"03": 'four' ,#4
"04": 'five' ,#5
"05": 'six' ,#6
"06": 'seven' ,#7
"07": 'eight' ,#8
"08": 'nine' ,#9
"09": 'zero' ,#0
"81": 'mytv' ,#RED
"82": 'mymusic' ,#GREEN
"80": 'mypictures' ,#BLUE
"83": 'myvideo' ,#YELLOW
"70": 'display' ,#DISPLAY
"1a": None ,#TOP MENU
"40": 'menu' ,#POP UP/MENU
"0e": None ,#RETURN
"5c": 'menu' ,#OPTIONS/TRIANGLE
"5d": 'back' ,#BACK/CIRCLE
"5e": 'info' ,#X
"5f": 'title' ,#VIEW/SQUARE
"54": 'up' ,#UP
"55": 'right' ,#RIGHT
"56": 'down' ,#DOWN
"57": 'left' ,#LEFT
"0b": 'select' ,#ENTER
"5a": 'volumeplus' ,#L1
"58": 'volumeminus' ,#L2
"51": 'Mute' ,#L3
"5b": 'pageplus' ,#R1
"59": 'pageminus' ,#R2
"52": None ,#R3
"43": None ,#PLAYSTATION
"50": None ,#SELECT
"53": None ,#START
"33": 'reverse' ,#<-SCAN
"34": 'forward' ,# SCAN->
"30": 'skipminus' ,#PREV
"31": 'skipplus' ,#NEXT
"60": None ,#<-SLOW/STEP
"61": None ,# SLOW/STEP->
"32": 'play' ,#PLAY
"38": 'stop' ,#STOP
"39": 'pause' ,#PAUSE
}
| gpl-2.0 | -3,101,409,142,541,438,500 | 27.753086 | 75 | 0.519536 | false |
WeblateOrg/weblate | weblate/trans/stats.py | 2 | 1465 | #
# Copyright © 2012 - 2021 Michal Čihař <[email protected]>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
def get_project_stats(project):
"""Return stats for project."""
return [
{
"language": str(tup.language),
"code": tup.language.code,
"total": tup.all,
"translated": tup.translated,
"translated_percent": tup.translated_percent,
"total_words": tup.all_words,
"translated_words": tup.translated_words,
"translated_words_percent": tup.translated_words_percent,
"total_chars": tup.all_chars,
"translated_chars": tup.translated_chars,
"translated_chars_percent": tup.translated_chars_percent,
}
for tup in project.stats.get_language_stats()
]
| gpl-3.0 | 4,287,441,481,533,199,400 | 37.473684 | 72 | 0.662791 | false |
ravi-sharma/python-api-library | src/kayako/tests/test_api.py | 3 | 6831 | # -*- coding: utf-8 -*-
#-----------------------------------------------------------------------------
# Copyright (c) 2011, Evan Leis
#
# Distributed under the terms of the Lesser GNU General Public License (LGPL)
#-----------------------------------------------------------------------------
'''
Created on May 5, 2011
@author: evan
'''
from kayako.tests import KayakoAPITest
class TestKayakoAPI(KayakoAPITest):
def test_init_without_url(self):
from kayako.api import KayakoAPI
from kayako.exception import KayakoInitializationError
self.assertRaises(KayakoInitializationError, KayakoAPI, None, 'key', 'secret')
def test_init_without_key(self):
from kayako.api import KayakoAPI
from kayako.exception import KayakoInitializationError
self.assertRaises(KayakoInitializationError, KayakoAPI, 'url', None, 'secret')
def test_init_without_secret(self):
from kayako.api import KayakoAPI
from kayako.exception import KayakoInitializationError
self.assertRaises(KayakoInitializationError, KayakoAPI, 'url', 'key', None)
def test__sanitize_paramter_list(self):
api = self.api
self.assertEqual(api._sanitize_parameter(['a', 'b', '', None, 'c']), ['a', 'b', 'c'])
def test__sanitize_paramter_number(self):
api = self.api
self.assertEqual(api._sanitize_parameter(123), '123')
def test__sanitize_paramter_none(self):
api = self.api
self.assertEqual(api._sanitize_parameter(None), '')
def test__post_data_none(self):
api = self.api
sanitized = api._sanitize_parameters(data=None)
results = api._post_data(**sanitized)
self.assertEqual(results, 'data=')
def test__post_data_array(self):
api = self.api
sanitized = api._sanitize_parameters(data=['abc', '', None, '123'])
results = api._post_data(**sanitized)
self.assertEqual(results, 'data[]=abc&data[]=123')
def test__post_data_empty_array(self):
api = self.api
sanitized = api._sanitize_parameters(data=['', None])
results = api._post_data(**sanitized)
self.assertEqual(results, 'data[]=')
def test__post_data_date(self):
import time
from datetime import datetime
api = self.api
date = datetime(2011, 5, 11, 12, 42, 46, 977079)
timestamp = int(time.mktime(date.timetuple()))
sanitized = api._sanitize_parameters(date=date)
results = api._post_data(**sanitized)
self.assertEqual(results, 'date=%s' % timestamp)
def test__post_data_FOREVER(self):
from kayako.core.lib import FOREVER
api = self.api
sanitized = api._sanitize_parameters(date=FOREVER)
results = api._post_data(**sanitized)
self.assertEqual(results, 'date=0')
def test__post_data_int(self):
api = self.api
sanitized = api._sanitize_parameters(data=123)
results = api._post_data(**sanitized)
self.assertEqual(results, 'data=123')
def test__post_data_str(self):
api = self.api
sanitized = api._sanitize_parameters(data='abc')
results = api._post_data(**sanitized)
self.assertEqual(results, 'data=abc')
def test__post_data_true(self):
api = self.api
sanitized = api._sanitize_parameters(data=True)
results = api._post_data(**sanitized)
self.assertEqual(results, 'data=1')
def test__post_data_false(self):
api = self.api
sanitized = api._sanitize_parameters(data=False)
results = api._post_data(**sanitized)
self.assertEqual(results, 'data=0')
def test_signature(self):
''' Test the signature generation process '''
import hmac
import base64
import urllib
import hashlib
secretkey = "secretkey"
# Generates a random string of ten digits
salt = '1234567890'
# Computes the signature by hashing the salt with the secret key as the key
signature = hmac.new(secretkey, msg=salt, digestmod=hashlib.sha256).digest()
# base64 encode...
encoded_signature = base64.b64encode(signature)
# urlencode...
url_encoded_signature = urllib.quote(encoded_signature)
assert url_encoded_signature == 'VKjt8M54liY6xq1UuhUYH5BFp1RUqHekqytgLPrVEA0%3D'
def test_get(self):
r = self.api._request('/Core/TestAPI', 'GET')
assert r.read()
assert r.getcode() == 200, r.getcode()
r = self.api._request('/Core/TestAPI', 'GET', test='just a test')
assert r.read()
assert r.getcode() == 200, r.getcode()
r = self.api._request('/Core/TestAPI/1', 'GET')
assert r.read()
assert r.getcode() == 200, r.getcode()
r = self.api._request('/Core/TestAPI/1', 'GET', test='just a test')
assert r.read()
assert r.getcode() == 200, r.getcode()
def test_post(self):
r = self.api._request('/Core/TestAPI', 'POST')
assert r.read()
assert r.getcode() == 200, r.getcode()
def test_put(self):
r = self.api._request('/Core/TestAPI/1', 'PUT', x=234)
assert r.read()
assert r.getcode() == 200, r.getcode()
def test_delete(self):
r = self.api._request('/Core/TestAPI/1', 'DELETE')
assert r.read()
assert r.getcode() == 200, r.getcode()
def test_get_department(self):
from kayako.objects import Department
d = self.api.get(Department, 1)
self.assertEqual(d.id, 1)
def test_create_department(self):
from kayako.core.lib import UnsetParameter
from kayako.objects import Department
d = self.api.create(Department)
self.assertEqual(d.id, UnsetParameter)
def test_creat_with_kwargs(self):
from kayako.objects import Department
d = self.api.create(Department, title='test_dept')
assert d.title == 'test_dept'
def test_creat_with_bad_kwargs(self):
from kayako.objects import Department
self.assertRaises(TypeError, self.api.create, Department, bad_kwarg='bad_kwarg')
def test_invalid_url(self):
from kayako import KayakoAPI
from kayako.exception import KayakoRequestError
api = KayakoAPI('http://this.is.just.a.test.1293847987flsjclksjckn32.com', 'api_key', 'secret_key')
raised = False
try:
api._request('testing testing', 'GET')
except KayakoRequestError, error:
self.log(error)
raised = True
assert raised
def test_ticket_search(self):
assert isinstance(self.api.ticket_search('testonly', ticketid=True), list)
def test_ticket_search_full(self):
assert isinstance(self.api.ticket_search_full('testonly'), list)
| bsd-2-clause | -4,984,744,415,136,577,000 | 34.393782 | 107 | 0.609281 | false |
kylepjohnson/cltk | tests/test_morphology.py | 4 | 28253 | """Test cltk.morphology."""
import unittest
from cltk.core.exceptions import CLTKException
from cltk.morphology.lat import CollatinusDecliner
class TestMorphology(unittest.TestCase):
def test_collatinus_decline(self):
""" Ensure lemmatization works well """
decliner = CollatinusDecliner()
def sort_result(result):
return {key: sorted(val) for key, val in result.items()}
self.maxDiff = None
self.assertEqual(
decliner.decline("via", collatinus_dict=True),
{
1: ["via"],
2: ["via"],
3: ["viam"],
4: ["viae"],
5: ["viae"],
6: ["via"],
7: ["viae"],
8: ["viae"],
9: ["vias"],
10: ["viarum"],
11: ["viis"],
12: ["viis"],
},
"Declination of via should be right",
)
self.assertEqual(
decliner.decline("doctus", collatinus_dict=True),
{
13: ["doctus"],
14: ["docte"],
15: ["doctum"],
16: ["docti"],
17: ["docto"],
18: ["docto"],
19: ["docti"],
20: ["docti"],
21: ["doctos"],
22: ["doctorum"],
23: ["doctis"],
24: ["doctis"],
25: ["docta"],
26: ["docta"],
27: ["doctam"],
28: ["doctae"],
29: ["doctae"],
30: ["docta"],
31: ["doctae"],
32: ["doctae"],
33: ["doctas"],
34: ["doctarum"],
35: ["doctis"],
36: ["doctis"],
37: ["doctum"],
38: ["doctum"],
39: ["doctum"],
40: ["docti"],
41: ["docto"],
42: ["docto"],
43: ["docta"],
44: ["docta"],
45: ["docta"],
46: ["doctorum"],
47: ["doctis"],
48: ["doctis"],
49: ["doctior"],
50: ["doctior"],
51: ["doctiorem"],
52: ["doctioris"],
53: ["doctiori"],
54: ["doctiore"],
55: ["doctiores"],
56: ["doctiores"],
57: ["doctiores"],
58: ["doctiorum"],
59: ["doctioribus"],
60: ["doctioribus"],
61: ["doctior"],
62: ["doctior"],
63: ["doctiorem"],
64: ["doctioris"],
65: ["doctiori"],
66: ["doctiore"],
67: ["doctiores"],
68: ["doctiores"],
69: ["doctiores"],
70: ["doctiorum"],
71: ["doctioribus"],
72: ["doctioribus"],
73: ["doctius"],
74: ["doctius"],
75: ["doctius"],
76: ["doctioris"],
77: ["doctiori"],
78: ["doctiore"],
79: ["doctiora"],
80: ["doctiora"],
81: ["doctiora"],
82: ["doctiorum"],
83: ["doctioribus"],
84: ["doctioribus"],
85: ["doctissimus"],
86: ["doctissime"],
87: ["doctissimum"],
88: ["doctissimi"],
89: ["doctissimo"],
90: ["doctissimo"],
91: ["doctissimi"],
92: ["doctissimi"],
93: ["doctissimos"],
94: ["doctissimorum"],
95: ["doctissimis"],
96: ["doctissimis"],
97: ["doctissima"],
98: ["doctissima"],
99: ["doctissimam"],
100: ["doctissimae"],
101: ["doctissimae"],
102: ["doctissima"],
103: ["doctissimae"],
104: ["doctissimae"],
105: ["doctissimas"],
106: ["doctissimarum"],
107: ["doctissimis"],
108: ["doctissimis"],
109: ["doctissimum"],
110: ["doctissimum"],
111: ["doctissimum"],
112: ["doctissimi"],
113: ["doctissimo"],
114: ["doctissimo"],
115: ["doctissima"],
116: ["doctissima"],
117: ["doctissima"],
118: ["doctissimorum"],
119: ["doctissimis"],
120: ["doctissimis"],
},
"Doctus has three radicals and lots of forms",
)
self.assertEqual(
sort_result(decliner.decline("verbex", collatinus_dict=True)),
{
1: ["berbex", "verbex", "vervex"],
2: ["berbex", "verbex", "vervex"],
3: ["berbecem", "verbecem", "vervecem"],
4: ["berbecis", "verbecis", "vervecis"],
5: ["berbeci", "verbeci", "verveci"],
6: ["berbece", "verbece", "vervece"],
7: ["berbeces", "verbeces", "verveces"],
8: ["berbeces", "verbeces", "verveces"],
9: ["berbeces", "verbeces", "verveces"],
10: ["berbecum", "verbecum", "vervecum"],
11: ["berbecibus", "verbecibus", "vervecibus"],
12: ["berbecibus", "verbecibus", "vervecibus"],
}, # Missing 12 ?
"Verbex has two different roots : checking they are taken into account",
)
self.assertEqual(
sort_result(decliner.decline("vendo", collatinus_dict=True)),
{
121: ["vendo"],
122: ["vendis"],
123: ["vendit"],
124: ["vendimus"],
125: ["venditis"],
126: ["vendunt"],
127: ["vendebam"],
128: ["vendebas"],
129: ["vendebat"],
130: ["vendebamus"],
131: ["vendebatis"],
132: ["vendebant"],
133: ["vendam"],
134: ["vendes"],
135: ["vendet"],
136: ["vendemus"],
137: ["vendetis"],
138: ["vendent"],
139: ["vendavi", "vendidi"],
140: ["vendavisti", "vendidisti"],
141: ["vendavit", "vendidit"],
142: ["vendavimus", "vendidimus"],
143: ["vendavistis", "vendidistis"],
144: ["vendavere", "vendaverunt", "vendidere", "vendiderunt"],
145: ["vendaveram", "vendideram"],
146: ["vendaveras", "vendideras"],
147: ["vendaverat", "vendiderat"],
148: ["vendaveramus", "vendideramus"],
149: ["vendaveratis", "vendideratis"],
150: ["vendaverant", "vendiderant"],
151: ["vendavero", "vendidero"],
152: ["vendaveris", "vendideris"],
153: ["vendaverit", "vendiderit"],
154: ["vendaverimus", "vendiderimus"],
155: ["vendaveritis", "vendideritis"],
156: ["vendaverint", "vendiderint"],
157: ["vendam"],
158: ["vendas"],
159: ["vendat"],
160: ["vendamus"],
161: ["vendatis"],
162: ["vendant"],
163: ["venderem"],
164: ["venderes"],
165: ["venderet"],
166: ["venderemus"],
167: ["venderetis"],
168: ["venderent"],
169: ["vendaverim", "vendiderim"],
170: ["vendaveris", "vendideris"],
171: ["vendaverit", "vendiderit"],
172: ["vendaverimus", "vendiderimus"],
173: ["vendaveritis", "vendideritis"],
174: ["vendaverint", "vendiderint"],
175: ["vendavissem", "vendidissem"],
176: ["vendavisses", "vendidisses"],
177: ["vendavisset", "vendidisset"],
178: ["vendavissemus", "vendidissemus"],
179: ["vendavissetis", "vendidissetis"],
180: ["vendavissent", "vendidissent"],
181: ["vende"],
182: ["vendite"],
183: ["vendito"],
184: ["vendito"],
185: ["venditote"],
186: ["vendunto"],
187: ["vendere"],
188: ["vendavisse", "vendidisse"],
189: ["vendens"],
190: ["vendens"],
191: ["vendentem"],
192: ["vendentis"],
193: ["vendenti"],
194: ["vendente"],
195: ["vendentes"],
196: ["vendentes"],
197: ["vendentes"],
198: ["vendentium", "vendentum"],
199: ["vendentibus"],
200: ["vendentibus"],
201: ["vendens"],
202: ["vendens"],
203: ["vendentem"],
204: ["vendentis"],
205: ["vendenti"],
206: ["vendente"],
207: ["vendentes"],
208: ["vendentes"],
209: ["vendentes"],
210: ["vendentium", "vendentum"],
211: ["vendentibus"],
212: ["vendentibus"],
213: ["vendens"],
214: ["vendens"],
215: ["vendens"],
216: ["vendentis"],
217: ["vendenti"],
218: ["vendente"],
219: ["vendentia"],
220: ["vendentia"],
221: ["vendentia"],
222: ["vendentium", "vendentum"],
223: ["vendentibus"],
224: ["vendentibus"],
225: ["vendaturus", "venditurus"],
226: ["vendature", "venditure"],
227: ["vendaturum", "venditurum"],
228: ["vendaturi", "vendituri"],
229: ["vendaturo", "vendituro"],
230: ["vendaturo", "vendituro"],
231: ["vendaturi", "vendituri"],
232: ["vendaturi", "vendituri"],
233: ["vendaturos", "vendituros"],
234: ["vendaturorum", "venditurorum"],
235: ["vendaturis", "vendituris"],
236: ["vendaturis", "vendituris"],
237: ["vendatura", "venditura"],
238: ["vendatura", "venditura"],
239: ["vendaturam", "vendituram"],
240: ["vendaturae", "venditurae"],
241: ["vendaturae", "venditurae"],
242: ["vendatura", "venditura"],
243: ["vendaturae", "venditurae"],
244: ["vendaturae", "venditurae"],
245: ["vendaturas", "vendituras"],
246: ["vendaturarum", "venditurarum"],
247: ["vendaturis", "vendituris"],
248: ["vendaturis", "vendituris"],
249: ["vendaturum", "venditurum"],
250: ["vendaturum", "venditurum"],
251: ["vendaturum", "venditurum"],
252: ["vendaturi", "vendituri"],
253: ["vendaturo", "vendituro"],
254: ["vendaturo", "vendituro"],
255: ["vendatura", "venditura"],
256: ["vendatura", "venditura"],
257: ["vendatura", "venditura"],
258: ["vendaturorum", "venditurorum"],
259: ["vendaturis", "vendituris"],
260: ["vendaturis", "vendituris"],
261: ["vendendum"],
262: ["vendendi"],
263: ["vendendo"],
264: ["vendendo"],
265: ["vendatum", "venditum"],
266: ["vendatu", "venditu"],
267: ["vendor"],
268: ["vendere", "venderis"],
269: ["venditur"],
270: ["vendimur"],
271: ["vendimini"],
272: ["venduntur"],
273: ["vendebar"],
274: ["vendebare", "vendebaris"],
275: ["vendebatur"],
276: ["vendebamur"],
277: ["vendebamini"],
278: ["vendebantur"],
279: ["vendar"],
280: ["vendere", "venderis"],
281: ["vendetur"],
282: ["vendemur"],
283: ["vendemini"],
284: ["vendentur"],
285: ["vendar"],
286: ["vendare", "vendaris"],
287: ["vendatur"],
288: ["vendamur"],
289: ["vendamini"],
290: ["vendantur"],
291: ["venderer"],
292: ["venderere", "vendereris"],
293: ["venderetur"],
294: ["venderemur"],
295: ["venderemini"],
296: ["venderentur"],
297: ["vendere"],
298: ["vendimini"],
299: ["venditor"],
300: ["venditor"],
301: ["venduntor"],
302: ["vendi"],
303: ["vendatus", "venditus"],
304: ["vendate", "vendite"],
305: ["vendatum", "venditum"],
306: ["vendati", "venditi"],
307: ["vendato", "vendito"],
308: ["vendato", "vendito"],
309: ["vendati", "venditi"],
310: ["vendati", "venditi"],
311: ["vendatos", "venditos"],
312: ["vendatorum", "venditorum"],
313: ["vendatis", "venditis"],
314: ["vendatis", "venditis"],
315: ["vendata", "vendita"],
316: ["vendata", "vendita"],
317: ["vendatam", "venditam"],
318: ["vendatae", "venditae"],
319: ["vendatae", "venditae"],
320: ["vendata", "vendita"],
321: ["vendatae", "venditae"],
322: ["vendatae", "venditae"],
323: ["vendatas", "venditas"],
324: ["vendatarum", "venditarum"],
325: ["vendatis", "venditis"],
326: ["vendatis", "venditis"],
327: ["vendatum", "venditum"],
328: ["vendatum", "venditum"],
329: ["vendatum", "venditum"],
330: ["vendati", "venditi"],
331: ["vendato", "vendito"],
332: ["vendato", "vendito"],
333: ["vendata", "vendita"],
334: ["vendata", "vendita"],
335: ["vendata", "vendita"],
336: ["vendatorum", "venditorum"],
337: ["vendatis", "venditis"],
338: ["vendatis", "venditis"],
339: ["vendendus"],
340: ["vendende"],
341: ["vendendum"],
342: ["vendendi"],
343: ["vendendo"],
344: ["vendendo"],
345: ["vendendi"],
346: ["vendendi"],
347: ["vendendos"],
348: ["vendendorum"],
349: ["vendendis"],
350: ["vendendis"],
351: ["vendenda"],
352: ["vendenda"],
353: ["vendendam"],
354: ["vendendae"],
355: ["vendendae"],
356: ["vendenda"],
357: ["vendendae"],
358: ["vendendae"],
359: ["vendendas"],
360: ["vendendarum"],
361: ["vendendis"],
362: ["vendendis"],
363: ["vendendum"],
364: ["vendendum"],
365: ["vendendum"],
366: ["vendendi"],
367: ["vendendo"],
368: ["vendendo"],
369: ["vendenda"],
370: ["vendenda"],
371: ["vendenda"],
372: ["vendendorum"],
373: ["vendendis"],
374: ["vendendis"],
},
"Check verb vendo declines well",
)
self.assertEqual(
decliner.decline("poesis", collatinus_dict=True),
{
1: ["poesis"],
2: ["poesis"],
3: ["poesem", "poesin", "poesim"],
4: ["poesis", "poeseos"],
5: ["poesi"],
6: ["poese"],
7: ["poeses"],
8: ["poeses"],
9: ["poeses", "poesis"],
10: ["poesium"],
11: ["poesibus"],
12: ["poesibus"],
},
"Duplicity of forms should be accepted",
)
self.assertEqual(
sort_result(decliner.decline("hic", collatinus_dict=True)),
{
13: ["hic", "hice", "hicine"],
15: ["hunc"],
16: ["hujus", "hujusce"],
17: ["huic"],
18: ["hoc", "hocine"],
19: ["hi"],
21: ["hos", "hosce"],
22: ["horum"],
23: ["his", "hisce"],
24: ["his", "hisce"],
25: ["haec", "haeccine", "haece", "haecine"],
27: ["hanc"],
28: ["hujus", "hujusce"],
29: ["huic"],
30: ["hac"],
31: ["hae"],
33: ["has", "hasce"],
34: ["harum"],
35: ["his", "hisce"],
36: ["his", "hisce"],
37: ["hoc", "hocine"],
39: ["hoc", "hocine"],
40: ["hujus", "hujusce"],
41: ["huic"],
42: ["hoc", "hocine"],
43: ["haec", "haeccine", "haecine"],
45: ["haec", "haeccine", "haecine"],
46: ["horum"],
47: ["his", "hisce"],
48: ["his", "hisce"],
},
"Check that suffixes are well added",
)
self.assertEqual(
sort_result(decliner.decline("quicumque", collatinus_dict=True)),
{
13: ["quicumque", "quicunque"],
15: ["quemcumque", "quemcunque"],
16: ["cujuscumque", "cujuscunque", "quojuscumque", "quojuscunque"],
17: ["cuicumque", "cuicunque", "quoicumque", "quoicunque"],
18: ["quocumque", "quocunque"],
19: ["quicumque", "quicunque"],
21: ["quoscumque", "quoscunque"],
22: ["quorumcumque", "quorumcunque"],
23: ["quibuscumque", "quibuscunque"],
24: ["quibuscumque", "quibuscunque"],
25: ["quaecumque", "quaecunque"],
27: ["quamcumque", "quamcunque"],
28: ["cujuscumque", "cujuscunque", "quojuscumque", "quojuscunque"],
29: ["cuicumque", "cuicunque", "quoicumque", "quoicunque"],
30: ["quacumque", "quacunque"],
31: ["quaecumque", "quaecunque"],
33: ["quascumque", "quascunque"],
34: ["quarumcumque", "quarumcunque"],
35: ["quibuscumque", "quibuscunque"],
36: ["quibuscumque", "quibuscunque"],
37: ["quodcumque", "quodcunque"],
39: ["quodcumque", "quodcunque"],
40: ["cujuscumque", "cujuscunque", "quojuscumque", "quojuscunque"],
41: ["cuicumque", "cuicunque", "quoicumque", "quoicunque"],
42: ["quocumque", "quocunque"],
43: ["quaecumque", "quaecunque"],
45: ["quaecumque", "quaecunque"],
46: ["quorumcumque", "quorumcunque"],
47: ["quibuscumque", "quibuscunque"],
48: ["quibuscumque", "quibuscunque"],
},
"Constant suffix should be added",
)
self.assertEqual(
decliner.decline("plerique", collatinus_dict=True),
{
19: ["plerique"],
20: ["plerique"],
21: ["plerosque"],
22: ["plerorumque"],
23: ["plerisque"],
24: ["plerisque"],
31: ["pleraeque"],
32: ["pleraeque"],
33: ["plerasque"],
34: ["plerarumque"],
35: ["plerisque"],
36: ["plerisque"],
43: ["pleraque"],
44: ["pleraque"],
45: ["pleraque"],
46: ["plerorumque"],
47: ["plerisque"],
48: ["plerisque"],
},
"Checking abs is applied correctly",
)
self.assertEqual(
decliner.decline("edo", collatinus_dict=True)[122]
+ decliner.decline("edo", collatinus_dict=True)[163],
["edis", "es"] + ["ederem", "essem"],
"Alternative desisences should be added, even with different root",
)
self.assertEqual(
decliner.decline("aggero2")[0],
("aggero", "v1spia---"),
"Lemma with disambiguation indexes should not fail their declension [aggero and not aggeroo]",
)
def test_collatinus_flatten_decline(self):
""" Ensure that flattening decline result is consistant"""
decliner = CollatinusDecliner()
self.assertEqual(
decliner.decline("via", flatten=True),
[
"via",
"via",
"viam",
"viae",
"viae",
"via",
"viae",
"viae",
"vias",
"viarum",
"viis",
"viis",
],
"Declination of via should be right",
)
self.assertEqual(
decliner.decline("poesis", flatten=True),
[
"poesis",
"poesis",
"poesem",
"poesin",
"poesim",
"poesis",
"poeseos",
"poesi",
"poese",
"poeses",
"poeses",
"poeses",
"poesis",
"poesium",
"poesibus",
"poesibus",
],
"Duplicity of forms should be accepted",
)
def test_collatinus_POS_decline(self):
""" Ensure that POS decline result is consistant"""
decliner = CollatinusDecliner()
self.assertEqual(
decliner.decline("via"),
[
("via", "--s----n-"),
("via", "--s----v-"),
("viam", "--s----a-"),
("viae", "--s----g-"),
("viae", "--s----d-"),
("via", "--s----b-"),
("viae", "--p----n-"),
("viae", "--p----v-"),
("vias", "--p----a-"),
("viarum", "--p----g-"),
("viis", "--p----d-"),
("viis", "--p----b-"),
],
"Declination of via should be right",
)
self.assertEqual(
decliner.decline("poesis"),
[
("poesis", "--s----n-"),
("poesis", "--s----v-"),
("poesem", "--s----a-"),
("poesin", "--s----a-"),
("poesim", "--s----a-"),
("poesis", "--s----g-"),
("poeseos", "--s----g-"),
("poesi", "--s----d-"),
("poese", "--s----b-"),
("poeses", "--p----n-"),
("poeses", "--p----v-"),
("poeses", "--p----a-"),
("poesis", "--p----a-"),
("poesium", "--p----g-"),
("poesibus", "--p----d-"),
("poesibus", "--p----b-"),
],
"Duplicity of forms should be accepted",
)
def test_collatinus_multiple_radicals(self):
coll = CollatinusDecliner()
self.assertEqual(
sorted(coll.decline("sandaraca")[:3], key=lambda x: x[0]),
[
("sandaraca", "--s----n-"),
("sandaracha", "--s----n-"),
("sanderaca", "--s----n-"),
],
)
jajunitas = [form for form, _ in coll.decline("jajunitas")]
self.assertIn("jajunitas", jajunitas)
self.assertIn("jejunitas", jajunitas)
self.assertIn("jajunitatem", jajunitas)
self.assertIn("jejunitatem", jajunitas)
def test_collatinus_raise(self):
""" Unknown lemma should raise exception """
def decline():
decliner = CollatinusDecliner()
decliner.decline("this lemma will never exist")
self.assertRaises(CLTKException, decline)
| mit | -5,816,317,965,406,286,000 | 40.358321 | 110 | 0.342017 | false |
SuperTango/TangoLogger | Uploader/requests/requests/compat.py | 1039 | 1469 | # -*- coding: utf-8 -*-
"""
pythoncompat
"""
from .packages import chardet
import sys
# -------
# Pythons
# -------
# Syntax sugar.
_ver = sys.version_info
#: Python 2.x?
is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
try:
import simplejson as json
except (ImportError, SyntaxError):
# simplejson does not support Python 3.2, it throws a SyntaxError
# because of u'...' Unicode literals.
import json
# ---------
# Specifics
# ---------
if is_py2:
from urllib import quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, proxy_bypass
from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
from urllib2 import parse_http_list
import cookielib
from Cookie import Morsel
from StringIO import StringIO
from .packages.urllib3.packages.ordered_dict import OrderedDict
builtin_str = str
bytes = str
str = unicode
basestring = basestring
numeric_types = (int, long, float)
elif is_py3:
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
from urllib.request import parse_http_list, getproxies, proxy_bypass
from http import cookiejar as cookielib
from http.cookies import Morsel
from io import StringIO
from collections import OrderedDict
builtin_str = str
str = str
bytes = bytes
basestring = (str, bytes)
numeric_types = (int, float)
| gpl-3.0 | -6,216,641,335,774,923,000 | 22.693548 | 132 | 0.676651 | false |
nilqed/root | tutorials/pyroot/first.py | 28 | 1524 | from ROOT import TCanvas, TF1, TPaveLabel, TPad, TText
from ROOT import gROOT
nut = TCanvas( 'nut', 'FirstSession', 100, 10, 700, 900 )
nut.Range( 0, 0, 20, 24 )
nut.SetFillColor( 10 )
nut.SetBorderSize( 2 )
pl = TPaveLabel( 3, 22, 17, 23.7, 'My first PyROOT interactive session', 'br' )
pl.SetFillColor( 18 )
pl.Draw()
t = TText( 0, 0, 'a' )
t.SetTextFont( 62 )
t.SetTextSize( 0.025 )
t.SetTextAlign( 12 )
t.DrawText( 2, 20.3, 'PyROOT provides ROOT bindings for Python, a powerful interpreter.' )
t.DrawText( 2, 19.3, 'Blocks of lines can be entered typographically.' )
t.DrawText( 2, 18.3, 'Previous typed lines can be recalled.' )
t.SetTextFont( 72 )
t.SetTextSize( 0.026 )
t.DrawText( 3, 17, r'>>> x, y = 5, 7' )
t.DrawText( 3, 16, r'>>> import math; x*math.sqrt(y)' )
t.DrawText( 3, 14, r'>>> for i in range(2,7): print "sqrt(%d) = %f" % (i,math.sqrt(i))' )
t.DrawText( 3, 10, r'>>> import ROOT; f1 = ROOT.TF1( "f1", "sin(x)/x", 0, 10 )' )
t.DrawText( 3, 9, r'>>> f1.Draw()' )
t.SetTextFont( 81 )
t.SetTextSize( 0.018 )
t.DrawText( 4, 15, '13.228756555322953' )
t.DrawText( 4, 13.3, 'sqrt(2) = 1.414214' )
t.DrawText( 4, 12.7, 'sqrt(3) = 1.732051' )
t.DrawText( 4, 12.1, 'sqrt(4) = 2.000000' )
t.DrawText( 4, 11.5, 'sqrt(5) = 2.236068' )
t.DrawText( 4, 10.9, 'sqrt(6) = 2.449490' )
pad = TPad( 'pad', 'pad', .2, .05, .8, .35 )
pad.SetFillColor( 42 )
pad.SetFrameFillColor( 33 )
pad.SetBorderSize( 10 )
pad.Draw()
pad.cd()
pad.SetGrid()
f1 = TF1( 'f1', 'sin(x)/x', 0, 10 )
f1.Draw()
nut.cd()
nut.Update()
| lgpl-2.1 | 385,312,365,967,908,160 | 30.102041 | 90 | 0.619423 | false |
auready/django | django/contrib/auth/context_processors.py | 3 | 1824 | # PermWrapper and PermLookupDict proxy the permissions system into objects that
# the template system can understand.
class PermLookupDict:
def __init__(self, user, app_label):
self.user, self.app_label = user, app_label
def __repr__(self):
return str(self.user.get_all_permissions())
def __getitem__(self, perm_name):
return self.user.has_perm("%s.%s" % (self.app_label, perm_name))
def __iter__(self):
# To fix 'item in perms.someapp' and __getitem__ interaction we need to
# define __iter__. See #18979 for details.
raise TypeError("PermLookupDict is not iterable.")
def __bool__(self):
return self.user.has_module_perms(self.app_label)
class PermWrapper:
def __init__(self, user):
self.user = user
def __getitem__(self, app_label):
return PermLookupDict(self.user, app_label)
def __iter__(self):
# I am large, I contain multitudes.
raise TypeError("PermWrapper is not iterable.")
def __contains__(self, perm_name):
"""
Lookup by "someapp" or "someapp.someperm" in perms.
"""
if '.' not in perm_name:
# The name refers to module.
return bool(self[perm_name])
app_label, perm_name = perm_name.split('.', 1)
return self[app_label][perm_name]
def auth(request):
"""
Returns context variables required by apps that use Django's authentication
system.
If there is no 'user' attribute in the request, uses AnonymousUser (from
django.contrib.auth).
"""
if hasattr(request, 'user'):
user = request.user
else:
from django.contrib.auth.models import AnonymousUser
user = AnonymousUser()
return {
'user': user,
'perms': PermWrapper(user),
}
| bsd-3-clause | -8,592,740,546,633,336,000 | 27.952381 | 79 | 0.609649 | false |
ZhangXinNan/tensorflow | tensorflow/contrib/tpu/python/tpu/tpu_system_metadata.py | 13 | 5734 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===================================================================
"""TPU system metadata and associated tooling."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import re
from tensorflow.contrib.tpu.python.tpu import tpu
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session as session_lib
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.platform import tf_logging as logging
_PINGING_MASTER_TIMEOUT_IN_MS = 60 * 1000 # 1 min
_RETRY_TIMES = 120
_INITIAL_TPU_SYSTEM_TIMEOUT_IN_MS = 300 * 1000 # 5 mins
_TPU_DEVICE_REG = re.compile(r'.*task:(\d+)/.*device:TPU:(\d+)$')
# _TPUSystemMetadata is used by TPUEstimator to hold TPU configuration,
# including num_cores and num_hosts.
_TPUSystemMetadata = collections.namedtuple('_TPUSystemMetadata', [
'num_cores',
'num_hosts',
'num_of_cores_per_host',
'topology',
'devices',
])
def _query_tpu_system_metadata(master_address, cluster_def=None,
query_topology=False):
"""Automatically detects the TPU system metadata in the system."""
tpu_core_count = 0
devices = []
device_dict = collections.defaultdict(list)
retry_count = 1
while True:
logging.info('Querying Tensorflow master (%s) for TPU system metadata.',
master_address)
try:
with ops.Graph().as_default():
with session_lib.Session(
master_address,
config=get_session_config_with_timeout(
_PINGING_MASTER_TIMEOUT_IN_MS,
cluster_def)) as sess:
devices = sess.list_devices()
for device in devices:
match = _TPU_DEVICE_REG.match(device.name)
if match:
host_id = match.group(1)
core_id = match.group(2)
device_dict[host_id].append(core_id)
tpu_core_count += 1
break
except errors.DeadlineExceededError:
msg = ('Failed to connect to the Tensorflow master. The TPU worker may '
'not be ready (still scheduling) or the Tensorflow master address '
'is incorrect: got (%s).' %
(master_address))
# TODO(xiejw): For local or grpc master we might not need retry logic
# here.
if retry_count <= _RETRY_TIMES:
logging.warning('%s', msg)
logging.warning('Retrying (%d/%d).', retry_count, _RETRY_TIMES)
retry_count += 1
else:
raise ValueError(msg)
num_of_cores_per_host = 0
if tpu_core_count:
num_cores_per_host_set = set(
[len(core_ids) for core_ids in device_dict.values()])
if len(num_cores_per_host_set) != 1:
raise RuntimeError(
'TPU cores on each host is not same. This should not happen!. '
'devices: {}'.format(devices))
num_of_cores_per_host = num_cores_per_host_set.pop()
topology = None
if query_topology:
if not tpu_core_count:
raise RuntimeError(
'Cannot find any TPU cores in the system (master address {}). '
'This usually means the master address is incorrect or the '
'TPU worker has some problems. Available devices: {}'.format(
master_address, devices))
topology = _obtain_topology(master_address, cluster_def)
metadata = _TPUSystemMetadata(
num_cores=tpu_core_count,
num_hosts=len(device_dict),
num_of_cores_per_host=num_of_cores_per_host,
topology=topology,
devices=devices)
if tpu_core_count:
logging.info('Found TPU system:')
logging.info('*** Num TPU Cores: %d', metadata.num_cores)
logging.info('*** Num TPU Workers: %d', metadata.num_hosts)
logging.info('*** Num TPU Cores Per Worker: %d',
metadata.num_of_cores_per_host)
for device in metadata.devices:
logging.info('*** Available Device: %s', device)
else:
logging.info('Failed to find TPU: %s', metadata)
return metadata
def _obtain_topology(master_address, cluster_def):
"""Obtains TPU fabric topology."""
try:
logging.info('Initializing TPU system (master: %s) to fetch topology '
'for model parallelism. This might take a while.',
master_address)
with ops.Graph().as_default():
session_config = get_session_config_with_timeout(
_INITIAL_TPU_SYSTEM_TIMEOUT_IN_MS, cluster_def)
with session_lib.Session(
master_address, config=session_config) as sess:
topology = sess.run(tpu.initialize_system())
return topology
except errors.DeadlineExceededError:
raise ValueError(
'Fail to initialize TPU system with master (%s). '
'Please double check the TPU system is functional.' % (
master_address))
def get_session_config_with_timeout(timeout_in_secs, cluster_def):
"""Returns a session given a timeout and a cluster configuration."""
config = config_pb2.ConfigProto(
operation_timeout_in_ms=timeout_in_secs, cluster_def=cluster_def)
return config
| apache-2.0 | -6,773,715,563,003,639,000 | 35.993548 | 80 | 0.648239 | false |
samueldotj/TeeRISC-Simulator | tests/configs/realview-switcheroo-full.py | 6 | 2411 | # Copyright (c) 2012 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Andreas Sandberg
from m5.objects import *
from arm_generic import *
import switcheroo
root = LinuxArmFSSwitcheroo(
cpu_classes=(AtomicSimpleCPU, TimingSimpleCPU, DerivO3CPU)
).create_root()
# Setup a custom test method that uses the switcheroo tester that
# switches between CPU models.
run_test = switcheroo.run_test
| bsd-3-clause | 1,143,308,448,108,865,800 | 49.229167 | 72 | 0.793861 | false |
YihaoLu/statsmodels | statsmodels/tools/grouputils.py | 25 | 22518 | # -*- coding: utf-8 -*-
"""Tools for working with groups
This provides several functions to work with groups and a Group class that
keeps track of the different representations and has methods to work more
easily with groups.
Author: Josef Perktold,
Author: Nathaniel Smith, recipe for sparse_dummies on scipy user mailing list
Created on Tue Nov 29 15:44:53 2011 : sparse_dummies
Created on Wed Nov 30 14:28:24 2011 : combine_indices
changes: add Group class
Notes
~~~~~
This reverses the class I used before, where the class was for the data and
the group was auxiliary. Here, it is only the group, no data is kept.
sparse_dummies needs checking for corner cases, e.g.
what if a category level has zero elements? This can happen with subset
selection even if the original groups where defined as arange.
Not all methods and options have been tried out yet after refactoring
need more efficient loop if groups are sorted -> see GroupSorted.group_iter
"""
from __future__ import print_function
from statsmodels.compat.python import lrange, lzip, range
import numpy as np
import pandas as pd
from statsmodels.compat.numpy import npc_unique
import statsmodels.tools.data as data_util
from pandas.core.index import Index, MultiIndex
def combine_indices(groups, prefix='', sep='.', return_labels=False):
"""use np.unique to get integer group indices for product, intersection
"""
if isinstance(groups, tuple):
groups = np.column_stack(groups)
else:
groups = np.asarray(groups)
dt = groups.dtype
is2d = (groups.ndim == 2) # need to store
if is2d:
ncols = groups.shape[1]
if not groups.flags.c_contiguous:
groups = np.array(groups, order='C')
groups_ = groups.view([('', groups.dtype)] * groups.shape[1])
else:
groups_ = groups
uni, uni_idx, uni_inv = npc_unique(groups_, return_index=True,
return_inverse=True)
if is2d:
uni = uni.view(dt).reshape(-1, ncols)
# avoiding a view would be
# for t in uni.dtype.fields.values():
# assert (t[0] == dt)
#
# uni.dtype = dt
# uni.shape = (uni.size//ncols, ncols)
if return_labels:
label = [(prefix+sep.join(['%s']*len(uni[0]))) % tuple(ii)
for ii in uni]
return uni_inv, uni_idx, uni, label
else:
return uni_inv, uni_idx, uni
# written for and used in try_covariance_grouploop.py
def group_sums(x, group, use_bincount=True):
"""simple bincount version, again
group : array, integer
assumed to be consecutive integers
no dtype checking because I want to raise in that case
uses loop over columns of x
for comparison, simple python loop
"""
x = np.asarray(x)
if x.ndim == 1:
x = x[:, None]
elif x.ndim > 2 and use_bincount:
raise ValueError('not implemented yet')
if use_bincount:
# re-label groups or bincount takes too much memory
if np.max(group) > 2 * x.shape[0]:
group = pd.factorize(group)[0]
return np.array([np.bincount(group, weights=x[:, col])
for col in range(x.shape[1])])
else:
uniques = np.unique(group)
result = np.zeros([len(uniques)] + list(x.shape[1:]))
for ii, cat in enumerate(uniques):
result[ii] = x[g == cat].sum(0)
return result
def group_sums_dummy(x, group_dummy):
"""sum by groups given group dummy variable
group_dummy can be either ndarray or sparse matrix
"""
if data_util._is_using_ndarray_type(group_dummy, None):
return np.dot(x.T, group_dummy)
else: # check for sparse
return x.T * group_dummy
def dummy_sparse(groups):
"""create a sparse indicator from a group array with integer labels
Parameters
----------
groups: ndarray, int, 1d (nobs,)
an array of group indicators for each observation. Group levels are
assumed to be defined as consecutive integers, i.e. range(n_groups)
where n_groups is the number of group levels. A group level with no
observations for it will still produce a column of zeros.
Returns
-------
indi : ndarray, int8, 2d (nobs, n_groups)
an indicator array with one row per observation, that has 1 in the
column of the group level for that observation
Examples
--------
>>> g = np.array([0, 0, 2, 1, 1, 2, 0])
>>> indi = dummy_sparse(g)
>>> indi
<7x3 sparse matrix of type '<type 'numpy.int8'>'
with 7 stored elements in Compressed Sparse Row format>
>>> indi.todense()
matrix([[1, 0, 0],
[1, 0, 0],
[0, 0, 1],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[1, 0, 0]], dtype=int8)
current behavior with missing groups
>>> g = np.array([0, 0, 2, 0, 2, 0])
>>> indi = dummy_sparse(g)
>>> indi.todense()
matrix([[1, 0, 0],
[1, 0, 0],
[0, 0, 1],
[1, 0, 0],
[0, 0, 1],
[1, 0, 0]], dtype=int8)
"""
from scipy import sparse
indptr = np.arange(len(groups)+1)
data = np.ones(len(groups), dtype=np.int8)
indi = sparse.csr_matrix((data, g, indptr))
return indi
class Group(object):
def __init__(self, group, name=''):
# self.group = np.asarray(group) # TODO: use checks in combine_indices
self.name = name
uni, uni_idx, uni_inv = combine_indices(group)
# TODO: rename these to something easier to remember
self.group_int, self.uni_idx, self.uni = uni, uni_idx, uni_inv
self.n_groups = len(self.uni)
# put this here so they can be overwritten before calling labels
self.separator = '.'
self.prefix = self.name
if self.prefix:
self.prefix = self.prefix + '='
# cache decorator
def counts(self):
return np.bincount(self.group_int)
# cache_decorator
def labels(self):
# is this only needed for product of groups (intersection)?
prefix = self.prefix
uni = self.uni
sep = self.separator
if uni.ndim > 1:
label = [(prefix+sep.join(['%s']*len(uni[0]))) % tuple(ii)
for ii in uni]
else:
label = [prefix + '%s' % ii for ii in uni]
return label
def dummy(self, drop_idx=None, sparse=False, dtype=int):
"""
drop_idx is only available if sparse=False
drop_idx is supposed to index into uni
"""
uni = self.uni
if drop_idx is not None:
idx = lrange(len(uni))
del idx[drop_idx]
uni = uni[idx]
group = self.group
if not sparse:
return (group[:, None] == uni[None, :]).astype(dtype)
else:
return dummy_sparse(self.group_int)
def interaction(self, other):
if isinstance(other, self.__class__):
other = other.group
return self.__class__((self, other))
def group_sums(self, x, use_bincount=True):
return group_sums(x, self.group_int, use_bincount=use_bincount)
def group_demean(self, x, use_bincount=True):
nobs = float(len(x))
means_g = group_sums(x / nobs, self.group_int,
use_bincount=use_bincount)
x_demeaned = x - means_g[self.group_int] # check reverse_index?
return x_demeaned, means_g
class GroupSorted(Group):
def __init__(self, group, name=''):
super(self.__class__, self).__init__(group, name=name)
idx = (np.nonzero(np.diff(group))[0]+1).tolist()
self.groupidx = lzip([0] + idx, idx + [len(group)])
def group_iter(self):
for low, upp in self.groupidx:
yield slice(low, upp)
def lag_indices(self, lag):
"""return the index array for lagged values
Warning: if k is larger then the number of observations for an
individual, then no values for that individual are returned.
TODO: for the unbalanced case, I should get the same truncation for
the array with lag=0. From the return of lag_idx we wouldn't know
which individual is missing.
TODO: do I want the full equivalent of lagmat in tsa?
maxlag or lag or lags.
not tested yet
"""
lag_idx = np.asarray(self.groupidx)[:, 1] - lag # asarray or already?
mask_ok = (lag <= lag_idx)
# still an observation that belongs to the same individual
return lag_idx[mask_ok]
def _is_hierarchical(x):
"""
Checks if the first item of an array-like object is also array-like
If so, we have a MultiIndex and returns True. Else returns False.
"""
item = x[0]
# is there a better way to do this?
if isinstance(item, (list, tuple, np.ndarray, pd.Series, pd.DataFrame)):
return True
else:
return False
def _make_hierarchical_index(index, names):
return MultiIndex.from_tuples(*[index], names=names)
def _make_generic_names(index):
n_names = len(index.names)
pad = str(len(str(n_names))) # number of digits
return [("group{0:0"+pad+"}").format(i) for i in range(n_names)]
class Grouping(object):
def __init__(self, index, names=None):
"""
index : index-like
Can be pandas MultiIndex or Index or array-like. If array-like
and is a MultipleIndex (more than one grouping variable),
groups are expected to be in each row. E.g., [('red', 1),
('red', 2), ('green', 1), ('green', 2)]
names : list or str, optional
The names to use for the groups. Should be a str if only
one grouping variable is used.
Notes
-----
If index is already a pandas Index then there is no copy.
"""
if isinstance(index, (Index, MultiIndex)):
if names is not None:
if hasattr(index, 'set_names'): # newer pandas
index.set_names(names, inplace=True)
else:
index.names = names
self.index = index
else: # array-like
if _is_hierarchical(index):
self.index = _make_hierarchical_index(index, names)
else:
self.index = Index(index, name=names)
if names is None:
names = _make_generic_names(self.index)
if hasattr(self.index, 'set_names'):
self.index.set_names(names, inplace=True)
else:
self.index.names = names
self.nobs = len(self.index)
self.nlevels = len(self.index.names)
self.slices = None
@property
def index_shape(self):
if hasattr(self.index, 'levshape'):
return self.index.levshape
else:
return self.index.shape
@property
def levels(self):
if hasattr(self.index, 'levels'):
return self.index.levels
else:
return pd.Categorical(self.index).levels
@property
def labels(self):
# this was index_int, but that's not a very good name...
if hasattr(self.index, 'labels'):
return self.index.labels
else: # pandas version issue here
# Compat code for the labels -> codes change in pandas 0.15
# FIXME: use .codes directly when we don't want to support
# pandas < 0.15
tmp = pd.Categorical(self.index)
try:
labl = tmp.codes
except AttributeError:
labl = tmp.labels # Old pandsd
return labl[None]
@property
def group_names(self):
return self.index.names
def reindex(self, index=None, names=None):
"""
Resets the index in-place.
"""
# NOTE: this isn't of much use if the rest of the data doesn't change
# This needs to reset cache
if names is None:
names = self.group_names
self = Grouping(index, names)
def get_slices(self, level=0):
"""
Sets the slices attribute to be a list of indices of the sorted
groups for the first index level. I.e., self.slices[0] is the
index where each observation is in the first (sorted) group.
"""
# TODO: refactor this
groups = self.index.get_level_values(level).unique()
groups.sort()
if isinstance(self.index, MultiIndex):
self.slices = [self.index.get_loc_level(x, level=level)[0]
for x in groups]
else:
self.slices = [self.index.get_loc(x) for x in groups]
def count_categories(self, level=0):
"""
Sets the attribute counts to equal the bincount of the (integer-valued)
labels.
"""
# TODO: refactor this not to set an attribute. Why would we do this?
self.counts = np.bincount(self.labels[level])
def check_index(self, is_sorted=True, unique=True, index=None):
"""Sanity checks"""
if not index:
index = self.index
if is_sorted:
test = pd.DataFrame(lrange(len(index)), index=index)
test_sorted = test.sort()
if not test.index.equals(test_sorted.index):
raise Exception('Data is not be sorted')
if unique:
if len(index) != len(index.unique()):
raise Exception('Duplicate index entries')
def sort(self, data, index=None):
"""Applies a (potentially hierarchical) sort operation on a numpy array
or pandas series/dataframe based on the grouping index or a
user-supplied index. Returns an object of the same type as the
original data as well as the matching (sorted) Pandas index.
"""
if index is None:
index = self.index
if data_util._is_using_ndarray_type(data, None):
if data.ndim == 1:
out = pd.Series(data, index=index, copy=True)
out = out.sort_index()
else:
out = pd.DataFrame(data, index=index)
out = out.sort(inplace=False) # copies
return np.array(out), out.index
elif data_util._is_using_pandas(data, None):
out = data
out = out.reindex(index) # copies?
out = out.sort_index()
return out, out.index
else:
msg = 'data must be a Numpy array or a Pandas Series/DataFrame'
raise ValueError(msg)
def transform_dataframe(self, dataframe, function, level=0, **kwargs):
"""Apply function to each column, by group
Assumes that the dataframe already has a proper index"""
if dataframe.shape[0] != self.nobs:
raise Exception('dataframe does not have the same shape as index')
out = dataframe.groupby(level=level).apply(function, **kwargs)
if 1 in out.shape:
return np.ravel(out)
else:
return np.array(out)
def transform_array(self, array, function, level=0, **kwargs):
"""Apply function to each column, by group
"""
if array.shape[0] != self.nobs:
raise Exception('array does not have the same shape as index')
dataframe = pd.DataFrame(array, index=self.index)
return self.transform_dataframe(dataframe, function, level=level,
**kwargs)
def transform_slices(self, array, function, level=0, **kwargs):
"""Apply function to each group. Similar to transform_array but does
not coerce array to a DataFrame and back and only works on a 1D or 2D
numpy array. function is called function(group, group_idx, **kwargs).
"""
array = np.asarray(array)
if array.shape[0] != self.nobs:
raise Exception('array does not have the same shape as index')
# always reset because level is given. need to refactor this.
self.get_slices(level=level)
processed = []
for s in self.slices:
if array.ndim == 2:
subset = array[s, :]
elif array.ndim == 1:
subset = array[s]
processed.append(function(subset, s, **kwargs))
processed = np.array(processed)
return processed.reshape(-1, processed.shape[-1])
# TODO: this isn't general needs to be a PanelGrouping object
def dummies_time(self):
self.dummy_sparse(level=1)
return self._dummies
def dummies_groups(self, level=0):
self.dummy_sparse(level=level)
return self._dummies
def dummy_sparse(self, level=0):
"""create a sparse indicator from a group array with integer labels
Parameters
----------
groups: ndarray, int, 1d (nobs,) an array of group indicators for each
observation. Group levels are assumed to be defined as consecutive
integers, i.e. range(n_groups) where n_groups is the number of
group levels. A group level with no observations for it will still
produce a column of zeros.
Returns
-------
indi : ndarray, int8, 2d (nobs, n_groups)
an indicator array with one row per observation, that has 1 in the
column of the group level for that observation
Examples
--------
>>> g = np.array([0, 0, 2, 1, 1, 2, 0])
>>> indi = dummy_sparse(g)
>>> indi
<7x3 sparse matrix of type '<type 'numpy.int8'>'
with 7 stored elements in Compressed Sparse Row format>
>>> indi.todense()
matrix([[1, 0, 0],
[1, 0, 0],
[0, 0, 1],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[1, 0, 0]], dtype=int8)
current behavior with missing groups
>>> g = np.array([0, 0, 2, 0, 2, 0])
>>> indi = dummy_sparse(g)
>>> indi.todense()
matrix([[1, 0, 0],
[1, 0, 0],
[0, 0, 1],
[1, 0, 0],
[0, 0, 1],
[1, 0, 0]], dtype=int8)
"""
from scipy import sparse
groups = self.labels[level]
indptr = np.arange(len(groups)+1)
data = np.ones(len(groups), dtype=np.int8)
self._dummies = sparse.csr_matrix((data, groups, indptr))
if __name__ == '__main__':
# ---------- examples combine_indices
from numpy.testing import assert_equal
np.random.seed(985367)
groups = np.random.randint(0, 2, size=(10, 2))
uv, ux, u, label = combine_indices(groups, return_labels=True)
uv, ux, u, label = combine_indices(groups, prefix='g1,g2=', sep=',',
return_labels=True)
group0 = np.array(['sector0', 'sector1'])[groups[:, 0]]
group1 = np.array(['region0', 'region1'])[groups[:, 1]]
uv, ux, u, label = combine_indices((group0, group1),
prefix='sector,region=',
sep=',',
return_labels=True)
uv, ux, u, label = combine_indices((group0, group1), prefix='', sep='.',
return_labels=True)
group_joint = np.array(label)[uv]
group_joint_expected = np.array(['sector1.region0', 'sector0.region1',
'sector0.region0', 'sector0.region1',
'sector1.region1', 'sector0.region0',
'sector1.region0', 'sector1.region0',
'sector0.region1', 'sector0.region0'],
dtype='|S15')
assert_equal(group_joint, group_joint_expected)
"""
>>> uv
array([2, 1, 0, 0, 1, 0, 2, 0, 1, 0])
>>> label
['sector0.region0', 'sector1.region0', 'sector1.region1']
>>> np.array(label)[uv]
array(['sector1.region1', 'sector1.region0', 'sector0.region0',
'sector0.region0', 'sector1.region0', 'sector0.region0',
'sector1.region1', 'sector0.region0', 'sector1.region0',
'sector0.region0'],
dtype='|S15')
>>> np.column_stack((group0, group1))
array([['sector1', 'region1'],
['sector1', 'region0'],
['sector0', 'region0'],
['sector0', 'region0'],
['sector1', 'region0'],
['sector0', 'region0'],
['sector1', 'region1'],
['sector0', 'region0'],
['sector1', 'region0'],
['sector0', 'region0']],
dtype='|S7')
"""
# ------------- examples sparse_dummies
from scipy import sparse
g = np.array([0, 0, 1, 2, 1, 1, 2, 0])
u = lrange(3)
indptr = np.arange(len(g)+1)
data = np.ones(len(g), dtype=np.int8)
a = sparse.csr_matrix((data, g, indptr))
print(a.todense())
print(np.all(a.todense() == (g[:, None] == np.arange(3)).astype(int)))
x = np.arange(len(g)*3).reshape(len(g), 3, order='F')
print('group means')
print(x.T * a)
print(np.dot(x.T, g[:, None] == np.arange(3)))
print(np.array([np.bincount(g, weights=x[:, col]) for col in range(3)]))
for cat in u:
print(x[g == cat].sum(0))
for cat in u:
x[g == cat].sum(0)
cc = sparse.csr_matrix([[0, 1, 0, 1, 0, 0, 0, 0, 0],
[1, 0, 1, 0, 1, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 1, 0, 0, 0],
[1, 0, 0, 0, 1, 0, 1, 0, 0],
[0, 1, 0, 1, 0, 1, 0, 1, 0],
[0, 0, 1, 0, 1, 0, 0, 0, 1],
[0, 0, 0, 1, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 1, 0, 1, 0, 1],
[0, 0, 0, 0, 0, 1, 0, 1, 0]])
# ------------- groupsums
print(group_sums(np.arange(len(g)*3*2).reshape(len(g), 3, 2), g,
use_bincount=False).T)
print(group_sums(np.arange(len(g)*3*2).reshape(len(g), 3, 2)[:, :, 0], g))
print(group_sums(np.arange(len(g)*3*2).reshape(len(g), 3, 2)[:, :, 1], g))
# ------------- examples class
x = np.arange(len(g)*3).reshape(len(g), 3, order='F')
mygroup = Group(g)
print(mygroup.group_int)
print(mygroup.group_sums(x))
print(mygroup.labels())
| bsd-3-clause | -8,709,583,423,084,470,000 | 33.273973 | 79 | 0.552136 | false |
mgrygoriev/CloudFerry | tests/cloudferrylib/os/actions/test_keypair_migration.py | 1 | 6887 | # Copyright 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import keystoneclient
import mock
from cloudferrylib.os.compute import keypairs
from tests import test
from cloudferrylib.os.actions import transport_compute_resources as tcr
from cloudferrylib.utils import utils as utl
class KeyPairObjectTestCase(test.TestCase):
def test_key_pair_does_not_include_autoincrement_fields(self):
kp_db = (
"Jan 1st 1970", # created_at
None, # updated_at
None, # deleted_at
"keypair-id", # id
"keypair-name", # name
"user-id", # user_id
"aa:bb:cc:dd:ee:ff", # fingerprint
"public-key-data", # public_key
0, # deleted
)
kp = keypairs.KeyPair.from_tuple(kp_db)
kp_dict = kp.to_dict(allow_auto_fields=False)
self.assertTrue('id' not in kp_dict.keys())
def test_all_fields_are_accessible_through_attributes(self):
kp = keypairs.KeyPair()
try:
for field in kp.FIELDS:
getattr(kp, field)
except AttributeError:
self.fail("KeyPair object must have all fields accessible as "
"attributes")
def test_value_error_is_risen_in_case_db_value_is_incorrect(self):
# user id, fingerprint, public key and deleted keys missing
db_kp = ("datetime", None, None, "id", "keypair name")
self.assertRaises(ValueError, keypairs.KeyPair.from_tuple, db_kp)
db_kp = ("datetime", None, None, "id", "keypair name", "user id",
"fingerprint", "public key", 0, "invalid argument")
self.assertRaises(ValueError, keypairs.KeyPair.from_tuple, db_kp)
def test_fields_are_settable_as_attributes(self):
try:
kp = keypairs.KeyPair()
public_key_value = "random public key"
fingerprint_value = "fingerprint"
deleted_value = 1
kp.public_key = public_key_value
kp.fingerprint = fingerprint_value
kp.deleted = deleted_value
self.assertEqual(kp.public_key, public_key_value)
self.assertEqual(kp.fingerprint, fingerprint_value)
self.assertEqual(kp.deleted, deleted_value)
except AttributeError:
self.fail("Key pair fields must be settable as attributes")
def test_key_pair_has_dict_support(self):
try:
kp = keypairs.KeyPair()
public_key_value = "random public key"
fingerprint_value = "fingerprint"
deleted_value = 1
kp['public_key'] = public_key_value
kp['fingerprint'] = fingerprint_value
kp['deleted'] = deleted_value
self.assertEqual(kp['public_key'], public_key_value)
self.assertEqual(kp['fingerprint'], fingerprint_value)
self.assertEqual(kp['deleted'], deleted_value)
except KeyError:
self.fail("Key pair fields must be settable as dict item")
class KeyPairMigrationTestCase(test.TestCase):
@mock.patch('cloudferrylib.os.identity.keystone.'
'get_dst_user_from_src_user_id')
def test_non_existing_user_does_not_break_migration(self, _):
try:
db_broker = mock.Mock()
db_broker.get_all_keypairs.return_value = [keypairs.KeyPair(),
keypairs.KeyPair()]
tkp = tcr.TransportKeyPairs(init=mock.MagicMock(),
kp_db_broker=db_broker)
tkp.src_cloud = mock.MagicMock()
tkp.dst_cloud = mock.MagicMock()
tkp.cfg = mock.Mock()
tkp.cfg.migrate.skip_orphaned_keypairs = True
src_users = tkp.src_cloud.resources[
utl.IDENTITY_RESOURCE].keystone_client.users
src_users.find.side_effect = keystoneclient.exceptions.NotFound
dst_users = tkp.dst_cloud.resources[
utl.IDENTITY_RESOURCE].keystone_client.users
dst_users.find.side_effect = keystoneclient.exceptions.NotFound
tkp.run()
except Exception as e:
self.fail("Unexpected exception caught: %s" % e)
def test_update_sql_gets_called_for_each_keypair(self):
num_keypairs = 5
db_broker = mock.Mock()
db_broker.get_all_keypairs.return_value = [
keypairs.KeyPair() for _ in xrange(num_keypairs)]
db_broker.store_keypair = mock.Mock()
tkp = tcr.TransportKeyPairs(init=mock.MagicMock(),
kp_db_broker=db_broker)
tkp.src_cloud = mock.MagicMock()
tkp.dst_cloud = mock.MagicMock()
tkp.cfg = mock.Mock()
tkp.cfg.migrate.skip_orphaned_keypairs = True
tkp.run()
self.assertTrue(db_broker.store_keypair.call_count == num_keypairs)
class KeyPairForInstancesTestCase(test.TestCase):
def test_does_nothing_if_no_info_provided(self):
db_broker = mock.Mock()
task = tcr.SetKeyPairsForInstances(init=mock.MagicMock(),
kp_db_broker=db_broker)
task.run()
self.assertFalse(db_broker.add_keypair_to_instance.called)
def test_keypair_is_added_to_instance(self):
db_broker = mock.Mock()
num_instances_with_keys = 5
num_instances_without_keys = 5
instances = {
'instance1%d' % i: {
'instance': {
'key_name': 'key%d' % i,
'user_id': 'user%d' % i
}
} for i in xrange(num_instances_with_keys)
}
instances.update({
'instance2%d' % j: {
'instance': {
'user_id': 'user%d' % j
}
} for j in xrange(num_instances_without_keys)}
)
info = {utl.INSTANCES_TYPE: instances}
task = tcr.SetKeyPairsForInstances(init=mock.MagicMock(),
kp_db_broker=db_broker)
task.run(info=info)
self.assertTrue(db_broker.add_keypair_to_instance.called)
self.assertEqual(db_broker.add_keypair_to_instance.call_count,
num_instances_with_keys)
| apache-2.0 | 4,797,559,157,881,220,000 | 36.227027 | 75 | 0.58095 | false |
AstroPrint/AstroBox | src/ext/sockjs/tornado/transports/jsonp.py | 9 | 3642 | # -*- coding: utf-8 -*-
"""
sockjs.tornado.transports.jsonp
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
JSONP transport implementation.
"""
import logging
from tornado.web import asynchronous
from sockjs.tornado import proto
from sockjs.tornado.transports import pollingbase
from sockjs.tornado.util import bytes_to_str, unquote_plus
LOG = logging.getLogger("tornado.general")
class JSONPTransport(pollingbase.PollingTransportBase):
name = 'jsonp'
@asynchronous
def get(self, session_id):
# Start response
self.handle_session_cookie()
self.disable_cache()
# Grab callback parameter
self.callback = self.get_argument('c', None)
if not self.callback:
self.write('"callback" parameter required')
self.set_status(500)
self.finish()
return
# Get or create session without starting heartbeat
if not self._attach_session(session_id, False):
return
# Might get already detached because connection was closed in on_open
if not self.session:
return
if not self.session.send_queue:
self.session.start_heartbeat()
else:
self.session.flush()
def send_pack(self, message, binary=False):
if binary:
raise Exception('binary not supported for JSONPTransport')
self.active = False
try:
# TODO: Just escape
msg = '%s(%s);\r\n' % (self.callback, proto.json_encode(message))
self.set_header('Content-Type', 'application/javascript; charset=UTF-8')
self.set_header('Content-Length', len(msg))
# TODO: Fix me
self.set_header('Etag', 'dummy')
self.write(msg)
self.flush(callback=self.send_complete)
except IOError:
# If connection dropped, make sure we close offending session instead
# of propagating error all way up.
self.session.delayed_close()
class JSONPSendHandler(pollingbase.PollingTransportBase):
def post(self, session_id):
self.preflight()
self.handle_session_cookie()
self.disable_cache()
session = self._get_session(session_id)
if session is None or session.is_closed:
self.set_status(404)
return
data = bytes_to_str(self.request.body)
ctype = self.request.headers.get('Content-Type', '').lower()
if ctype == 'application/x-www-form-urlencoded':
if not data.startswith('d='):
LOG.exception('jsonp_send: Invalid payload.')
self.write("Payload expected.")
self.set_status(500)
return
data = unquote_plus(data[2:])
if not data:
LOG.debug('jsonp_send: Payload expected.')
self.write("Payload expected.")
self.set_status(500)
return
try:
messages = proto.json_decode(data)
except:
# TODO: Proper error handling
LOG.debug('jsonp_send: Invalid json encoding')
self.write("Broken JSON encoding.")
self.set_status(500)
return
try:
session.on_messages(messages)
except Exception:
LOG.exception('jsonp_send: on_message() failed')
session.close()
self.write('Message handler failed.')
self.set_status(500)
return
self.write('ok')
self.set_header('Content-Type', 'text/plain; charset=UTF-8')
self.set_status(200)
| agpl-3.0 | -534,404,830,478,798,600 | 27.677165 | 84 | 0.576881 | false |
EvgeneOskin/taiga-back | taiga/projects/userstories/apps.py | 14 | 5623 | # Copyright (C) 2014 Andrey Antukh <[email protected]>
# Copyright (C) 2014 Jesús Espino <[email protected]>
# Copyright (C) 2014 David Barragán <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.apps import AppConfig
from django.apps import apps
from django.db.models import signals
from taiga.projects import signals as generic_handlers
from taiga.projects.custom_attributes import signals as custom_attributes_handlers
from . import signals as handlers
def connect_userstories_signals():
# Cached prev object version
signals.pre_save.connect(handlers.cached_prev_us,
sender=apps.get_model("userstories", "UserStory"),
dispatch_uid="cached_prev_us")
# Role Points
signals.post_save.connect(handlers.update_role_points_when_create_or_edit_us,
sender=apps.get_model("userstories", "UserStory"),
dispatch_uid="update_role_points_when_create_or_edit_us")
# Tasks
signals.post_save.connect(handlers.update_milestone_of_tasks_when_edit_us,
sender=apps.get_model("userstories", "UserStory"),
dispatch_uid="update_milestone_of_tasks_when_edit_us")
# Open/Close US and Milestone
signals.post_save.connect(handlers.try_to_close_or_open_us_and_milestone_when_create_or_edit_us,
sender=apps.get_model("userstories", "UserStory"),
dispatch_uid="try_to_close_or_open_us_and_milestone_when_create_or_edit_us")
signals.post_delete.connect(handlers.try_to_close_milestone_when_delete_us,
sender=apps.get_model("userstories", "UserStory"),
dispatch_uid="try_to_close_milestone_when_delete_us")
# Tags
signals.pre_save.connect(generic_handlers.tags_normalization,
sender=apps.get_model("userstories", "UserStory"),
dispatch_uid="tags_normalization_user_story")
signals.post_save.connect(generic_handlers.update_project_tags_when_create_or_edit_taggable_item,
sender=apps.get_model("userstories", "UserStory"),
dispatch_uid="update_project_tags_when_create_or_edit_taggable_item_user_story")
signals.post_delete.connect(generic_handlers.update_project_tags_when_delete_taggable_item,
sender=apps.get_model("userstories", "UserStory"),
dispatch_uid="update_project_tags_when_delete_taggable_item_user_story")
def connect_userstories_custom_attributes_signals():
signals.post_save.connect(custom_attributes_handlers.create_custom_attribute_value_when_create_user_story,
sender=apps.get_model("userstories", "UserStory"),
dispatch_uid="create_custom_attribute_value_when_create_user_story")
def connect_all_userstories_signals():
connect_userstories_signals()
connect_userstories_custom_attributes_signals()
def disconnect_userstories_signals():
signals.pre_save.disconnect(sender=apps.get_model("userstories", "UserStory"), dispatch_uid="cached_prev_us")
signals.post_save.disconnect(sender=apps.get_model("userstories", "UserStory"), dispatch_uid="update_role_points_when_create_or_edit_us")
signals.post_save.disconnect(sender=apps.get_model("userstories", "UserStory"), dispatch_uid="update_milestone_of_tasks_when_edit_us")
signals.post_save.disconnect(sender=apps.get_model("userstories", "UserStory"), dispatch_uid="try_to_close_or_open_us_and_milestone_when_create_or_edit_us")
signals.post_delete.disconnect(sender=apps.get_model("userstories", "UserStory"), dispatch_uid="try_to_close_milestone_when_delete_us")
signals.pre_save.disconnect(sender=apps.get_model("userstories", "UserStory"), dispatch_uid="tags_normalization_user_story")
signals.post_save.disconnect(sender=apps.get_model("userstories", "UserStory"), dispatch_uid="update_project_tags_when_create_or_edit_taggable_item_user_story")
signals.post_delete.disconnect(sender=apps.get_model("userstories", "UserStory"), dispatch_uid="update_project_tags_when_delete_taggable_item_user_story")
def disconnect_userstories_custom_attributes_signals():
signals.post_save.disconnect(sender=apps.get_model("userstories", "UserStory"), dispatch_uid="create_custom_attribute_value_when_create_user_story")
def disconnect_all_userstories_signals():
disconnect_userstories_signals()
disconnect_userstories_custom_attributes_signals()
class UserStoriesAppConfig(AppConfig):
name = "taiga.projects.userstories"
verbose_name = "User Stories"
def ready(self):
connect_all_userstories_signals()
| agpl-3.0 | -134,019,683,328,046,600 | 56.357143 | 168 | 0.6723 | false |
endlessm/chromium-browser | third_party/angle/src/libANGLE/gen_overlay_fonts.py | 5 | 10022 | #!/usr/bin/env vpython
#
# [VPYTHON:BEGIN]
# wheel: <
# name: "infra/python/wheels/freetype-py/${vpython_platform}"
# version: "version:2.1.0.post1"
# >
# [VPYTHON:END]
# Copyright 2019 The ANGLE Project Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# gen_vk_overlay_fonts.py:
# Code generation for overlay fonts. Should be run if the font file under overlay/ is changed,
# or the font sizes declared in this file are modified. The font is assumed to be monospace.
# The output will contain ASCII characters in order from ' ' to '~'. The output will be images
# with 3 rows of 32 characters each.
# NOTE: don't run this script directly. Run scripts/run_code_generation.py.
from datetime import date
import sys
if len(sys.argv) < 2:
from freetype import *
out_file_cpp = 'Overlay_font_autogen.cpp'
out_file_h = 'Overlay_font_autogen.h'
font_file = 'overlay/DejaVuSansMono-Bold.ttf'
template_out_file_h = u"""// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using {font_file}.
//
// Copyright {copyright_year} The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// {out_file_name}:
// Autogenerated overlay font data.
#include "libANGLE/Overlay.h"
namespace gl
{{
namespace overlay
{{
constexpr int kFontCount = {font_count};
constexpr int kFontGlyphWidths[kFontCount] = {{ {font_glyph_widths} }};
constexpr int kFontGlyphHeights[kFontCount] = {{ {font_glyph_heights} }};
constexpr int kFontCharactersPerRow = 32;
constexpr int kFontCharactersPerCol = 3;
constexpr int kFontCharacters = kFontCharactersPerRow * kFontCharactersPerCol;
constexpr int kFontImageWidth = {max_font_width} * kFontCharactersPerRow;
constexpr int kFontImageHeight = {max_font_height} * kFontCharactersPerCol;
{font_layers}
}} // namespace overlay
}} // namespace gl
"""
template_out_file_cpp = u"""// GENERATED FILE - DO NOT EDIT.
// Generated by {script_name} using images from {font_file}.
//
// Copyright {copyright_year} The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// {out_file_name}:
// Autogenerated overlay font data.
#include "libANGLE/Overlay.h"
#include "libANGLE/Overlay_font_autogen.h"
#include <numeric>
namespace gl
{{
using namespace overlay;
// Save binary size if the font images are never to be used.
#if ANGLE_ENABLE_OVERLAY
namespace
{{
constexpr int kFontWidths[kFontCount] = {{ {font_layer_widths} }};
constexpr int kFontHeights[kFontCount] = {{ {font_layer_heights} }};
{font_data}
// Returns a bit with the value of the pixel.
template<int kFontWidth, int kFontHeight>
uint32_t GetFontLayerPixel(const uint32_t fontImage[kFontHeight][kFontWidth / 32], int x, int y)
{{
ASSERT(x >= 0 && x < kFontWidth && y >= 0 && y < kFontHeight);
return fontImage[y][x / 32] >> (x % 32) & 1;
}}
inline uint32_t GetFontPixel(int layer, int x, int y)
{{
switch (layer)
{{
{get_font_layer_pixel}
default:
UNREACHABLE();
return 0;
}}
}}
}} // anonymous namespace
void OverlayState::initFontData(uint8_t *fontData) const
{{
constexpr int kFontDataLayerSize = kFontImageWidth * kFontImageHeight;
// Unpack the font bitmap into R8_UNORM format. Border pixels are given a 0.5 value for better
// font visibility.
for (int layer = 0; layer < kFontCount; ++layer)
{{
memset(fontData, 0, kFontDataLayerSize);
for (int y = 0; y < kFontHeights[layer]; ++y)
{{
for (int x = 0; x < kFontWidths[layer]; ++x)
{{
uint32_t src = GetFontPixel(layer, x, y);
uint8_t dstValue = src ? 255 : 0;
fontData[y * kFontImageWidth + x] = dstValue;
}}
}}
fontData += kFontDataLayerSize;
}}
}}
#else
void OverlayState::initFontData(uint8_t *fontData) const
{{
memset(fontData, 0, kFontCount * kFontImageWidth * kFontImageHeight * sizeof(*fontData));
}}
#endif
}} // namespace gl
"""
template_get_font_layer_pixel = u"""case {layer}:
return GetFontLayerPixel<kFontWidths[{layer}], kFontHeights[{layer}]>({font_image}, x, y);
"""
def main():
if len(sys.argv) == 2 and sys.argv[1] == 'inputs':
# disabled because of issues on Windows. http://anglebug.com/3892
# print(font_file)
return
if len(sys.argv) == 2 and sys.argv[1] == 'outputs':
print(','.join([out_file_cpp, out_file_h]))
return
font_defs = [('large', 36), ('medium', 23), ('small', 14)]
chars = ' !"#$%&\'()*+,-./0123456789:;<=>?' + \
'@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_' + \
'`abcdefghijklmnopqrstuvwxyz{|}~ '
output_rows = 3
output_cols = 32
assert (len(chars) == output_rows * output_cols)
font_glyph_widths = []
font_glyph_heights = []
font_layers = []
font_data = []
get_font_layer_pixel = []
current_font_layer = 0
# Load the font file.
face = Face(font_file)
assert (face.is_fixed_width)
for font_name, font_size in font_defs:
# Since the font is fixed width, we can retrieve its size right away.
face.set_char_size(font_size << 6)
glyph_width = face.size.max_advance >> 6
glyph_ascender = face.size.ascender >> 6
glyph_descender = face.size.descender >> 6
glyph_height = glyph_ascender - glyph_descender
font_tag = font_name.capitalize()
font_layer = str(current_font_layer)
font_layer_symbol = 'kFontLayer' + font_tag
font_array_name = 'kFontImage' + font_tag
font_width = 'kFontWidths[' + font_layer_symbol + ']'
font_height = 'kFontHeights[' + font_layer_symbol + ']'
# Font pixels are packed in 32-bit values.
font_array_width = output_cols * glyph_width / 32
font_array_height = output_rows * glyph_height
font_array = [[0] * font_array_width for i in range(font_array_height)]
for charIndex in range(len(chars)):
char = chars[charIndex]
base_x = (charIndex % output_cols) * glyph_width
base_y = (charIndex / output_cols) * glyph_height
# Render the character.
face.load_char(char)
bitmap = face.glyph.bitmap
left = face.glyph.bitmap_left
top = face.glyph.bitmap_top
width = bitmap.width
rows = bitmap.rows
pitch = bitmap.pitch
offset_x = left
offset_y = glyph_height - (top - glyph_descender)
# '#' in the smallest font generates a larger glyph than the "fixed" font width.
if offset_x + width > glyph_width:
offset_x = glyph_width - width
if offset_x < 0:
width += offset_x
offset_x = 0
base_x += offset_x
base_y += offset_y
assert (offset_x + width <= glyph_width)
assert (offset_y + rows <= glyph_height)
# Write the character bitmap in the font image.
for y in range(rows):
for x in range(width):
pixel_value = bitmap.buffer[y * pitch + x]
output_bit = 1 if pixel_value >= 122 else 0
font_array_row = base_y + y
font_array_col = (base_x + x) / 32
font_array_bit = (base_x + x) % 32
font_array[font_array_row][font_array_col] |= output_bit << font_array_bit
# Output the image to a C array.
data = 'constexpr uint32_t ' + font_array_name + '[' + font_height + '][' + font_width + '/32] = {\n'
for y in range(font_array_height):
data += '{'
for x in range(font_array_width):
data += '0x{:08X}, '.format(font_array[y][x])
data += '},\n'
data += '};\n'
font_glyph_widths.append(glyph_width)
font_glyph_heights.append(glyph_height)
font_layers.append('constexpr int ' + font_layer_symbol + ' = ' + font_layer + ';')
font_data.append(data)
get_font_layer_pixel.append(
template_get_font_layer_pixel.format(
layer=font_layer_symbol, font_image=font_array_name))
current_font_layer += 1
with open(out_file_h, 'w') as outfile:
outfile.write(
template_out_file_h.format(
script_name=__file__,
font_file=font_file,
copyright_year=date.today().year,
out_file_name=out_file_h,
font_count=len(font_data),
font_glyph_widths=','.join(map(str, font_glyph_widths)),
font_glyph_heights=','.join(map(str, font_glyph_heights)),
max_font_width=max(font_glyph_widths),
max_font_height=max(font_glyph_heights),
font_layers='\n'.join(font_layers)))
outfile.close()
font_layer_widths = [
'kFontGlyphWidths[' + str(layer) + '] * kFontCharactersPerRow'
for layer in range(len(font_data))
]
font_layer_heights = [
'kFontGlyphHeights[' + str(layer) + '] * kFontCharactersPerCol'
for layer in range(len(font_data))
]
with open(out_file_cpp, 'w') as outfile:
outfile.write(
template_out_file_cpp.format(
script_name=__file__,
font_file=font_file,
copyright_year=date.today().year,
out_file_name=out_file_cpp,
font_layer_widths=','.join(font_layer_widths),
font_layer_heights=','.join(font_layer_heights),
font_data='\n'.join(font_data),
get_font_layer_pixel=''.join(get_font_layer_pixel)))
outfile.close()
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause | 8,678,031,032,273,313,000 | 33.321918 | 109 | 0.598084 | false |
lucasmiqueias/speakerfight-1 | deck/migrations/0001_initial.py | 20 | 3687 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
import django_extensions.db.fields
class Migration(migrations.Migration):
dependencies = [
('jury', '__first__'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Event',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=50, verbose_name='Title')),
('slug', django_extensions.db.fields.AutoSlugField(editable=False, populate_from=b'title', max_length=60, blank=True, unique=True, overwrite=True)),
('description', models.TextField(max_length=400, verbose_name='Description')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
('is_published', models.BooleanField(default=False, verbose_name='Publish')),
('allow_public_voting', models.BooleanField(default=True, verbose_name='Allow Public Voting')),
('due_date', models.DateTimeField(null=True, blank=True)),
('author', models.ForeignKey(related_name='events', to=settings.AUTH_USER_MODEL)),
('jury', models.OneToOneField(related_name='event', null=True, blank=True, to='jury.Jury')),
],
options={
'verbose_name': 'Event',
'verbose_name_plural': 'Events',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Proposal',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=50, verbose_name='Title')),
('slug', django_extensions.db.fields.AutoSlugField(editable=False, populate_from=b'title', max_length=60, blank=True, unique=True, overwrite=True)),
('description', models.TextField(max_length=400, verbose_name='Description')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
('is_published', models.BooleanField(default=False, verbose_name='Publish')),
('author', models.ForeignKey(related_name='proposals', to=settings.AUTH_USER_MODEL)),
('event', models.ForeignKey(related_name='proposals', to='deck.Event')),
],
options={
'verbose_name': 'Proposal',
'verbose_name_plural': 'Proposals',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Vote',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('rate', models.SmallIntegerField(blank=True, null=True, verbose_name='Rate Index', choices=[(-1, b'angry'), (0, b'sleepy'), (1, b'sad'), (2, b'happy'), (3, b'laughing')])),
('proposal', models.ForeignKey(related_name='votes', to='deck.Proposal')),
('user', models.ForeignKey(related_name='votes', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Vote',
'verbose_name_plural': 'Votes',
},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='vote',
unique_together=set([('proposal', 'user')]),
),
]
| mit | -6,755,807,652,525,710,000 | 49.506849 | 189 | 0.56984 | false |
sonnyhu/scikit-learn | sklearn/externals/joblib/numpy_pickle.py | 37 | 23222 | """Utilities for fast persistence of big data, with optional compression."""
# Author: Gael Varoquaux <gael dot varoquaux at normalesup dot org>
# Copyright (c) 2009 Gael Varoquaux
# License: BSD Style, 3 clauses.
import pickle
import os
import sys
import warnings
try:
from pathlib import Path
except ImportError:
Path = None
from .numpy_pickle_utils import _COMPRESSORS
from .numpy_pickle_utils import BinaryZlibFile
from .numpy_pickle_utils import Unpickler, Pickler
from .numpy_pickle_utils import _read_fileobject, _write_fileobject
from .numpy_pickle_utils import _read_bytes, BUFFER_SIZE
from .numpy_pickle_compat import load_compatibility
from .numpy_pickle_compat import NDArrayWrapper
# For compatibility with old versions of joblib, we need ZNDArrayWrapper
# to be visible in the current namespace.
# Explicitly skipping next line from flake8 as it triggers an F401 warning
# which we don't care.
from .numpy_pickle_compat import ZNDArrayWrapper # noqa
from ._compat import _basestring, PY3_OR_LATER
###############################################################################
# Utility objects for persistence.
class NumpyArrayWrapper(object):
"""An object to be persisted instead of numpy arrays.
This object is used to hack into the pickle machinery and read numpy
array data from our custom persistence format.
More precisely, this object is used for:
* carrying the information of the persisted array: subclass, shape, order,
dtype. Those ndarray metadata are used to correctly reconstruct the array
with low level numpy functions.
* determining if memmap is allowed on the array.
* reading the array bytes from a file.
* reading the array using memorymap from a file.
* writing the array bytes to a file.
Attributes
----------
subclass: numpy.ndarray subclass
Determine the subclass of the wrapped array.
shape: numpy.ndarray shape
Determine the shape of the wrapped array.
order: {'C', 'F'}
Determine the order of wrapped array data. 'C' is for C order, 'F' is
for fortran order.
dtype: numpy.ndarray dtype
Determine the data type of the wrapped array.
allow_mmap: bool
Determine if memory mapping is allowed on the wrapped array.
Default: False.
"""
def __init__(self, subclass, shape, order, dtype, allow_mmap=False):
"""Constructor. Store the useful information for later."""
self.subclass = subclass
self.shape = shape
self.order = order
self.dtype = dtype
self.allow_mmap = allow_mmap
def write_array(self, array, pickler):
"""Write array bytes to pickler file handle.
This function is an adaptation of the numpy write_array function
available in version 1.10.1 in numpy/lib/format.py.
"""
# Set buffer size to 16 MiB to hide the Python loop overhead.
buffersize = max(16 * 1024 ** 2 // array.itemsize, 1)
if array.dtype.hasobject:
# We contain Python objects so we cannot write out the data
# directly. Instead, we will pickle it out with version 2 of the
# pickle protocol.
pickle.dump(array, pickler.file_handle, protocol=2)
else:
for chunk in pickler.np.nditer(array,
flags=['external_loop',
'buffered',
'zerosize_ok'],
buffersize=buffersize,
order=self.order):
pickler.file_handle.write(chunk.tostring('C'))
def read_array(self, unpickler):
"""Read array from unpickler file handle.
This function is an adaptation of the numpy read_array function
available in version 1.10.1 in numpy/lib/format.py.
"""
if len(self.shape) == 0:
count = 1
else:
count = unpickler.np.multiply.reduce(self.shape)
# Now read the actual data.
if self.dtype.hasobject:
# The array contained Python objects. We need to unpickle the data.
array = pickle.load(unpickler.file_handle)
else:
if (not PY3_OR_LATER and
unpickler.np.compat.isfileobj(unpickler.file_handle)):
# In python 2, gzip.GzipFile is considered as a file so one
# can use numpy.fromfile().
# For file objects, use np.fromfile function.
# This function is faster than the memory-intensive
# method below.
array = unpickler.np.fromfile(unpickler.file_handle,
dtype=self.dtype, count=count)
else:
# This is not a real file. We have to read it the
# memory-intensive way.
# crc32 module fails on reads greater than 2 ** 32 bytes,
# breaking large reads from gzip streams. Chunk reads to
# BUFFER_SIZE bytes to avoid issue and reduce memory overhead
# of the read. In non-chunked case count < max_read_count, so
# only one read is performed.
max_read_count = BUFFER_SIZE // min(BUFFER_SIZE,
self.dtype.itemsize)
array = unpickler.np.empty(count, dtype=self.dtype)
for i in range(0, count, max_read_count):
read_count = min(max_read_count, count - i)
read_size = int(read_count * self.dtype.itemsize)
data = _read_bytes(unpickler.file_handle,
read_size, "array data")
array[i:i + read_count] = \
unpickler.np.frombuffer(data, dtype=self.dtype,
count=read_count)
del data
if self.order == 'F':
array.shape = self.shape[::-1]
array = array.transpose()
else:
array.shape = self.shape
return array
def read_mmap(self, unpickler):
"""Read an array using numpy memmap."""
offset = unpickler.file_handle.tell()
if unpickler.mmap_mode == 'w+':
unpickler.mmap_mode = 'r+'
marray = unpickler.np.memmap(unpickler.filename,
dtype=self.dtype,
shape=self.shape,
order=self.order,
mode=unpickler.mmap_mode,
offset=offset)
# update the offset so that it corresponds to the end of the read array
unpickler.file_handle.seek(offset + marray.nbytes)
return marray
def read(self, unpickler):
"""Read the array corresponding to this wrapper.
Use the unpickler to get all information to correctly read the array.
Parameters
----------
unpickler: NumpyUnpickler
Returns
-------
array: numpy.ndarray
"""
# When requested, only use memmap mode if allowed.
if unpickler.mmap_mode is not None and self.allow_mmap:
array = self.read_mmap(unpickler)
else:
array = self.read_array(unpickler)
# Manage array subclass case
if (hasattr(array, '__array_prepare__') and
self.subclass not in (unpickler.np.ndarray,
unpickler.np.memmap)):
# We need to reconstruct another subclass
new_array = unpickler.np.core.multiarray._reconstruct(
self.subclass, (0,), 'b')
return new_array.__array_prepare__(array)
else:
return array
###############################################################################
# Pickler classes
class NumpyPickler(Pickler):
"""A pickler to persist big data efficiently.
The main features of this object are:
* persistence of numpy arrays in a single file.
* optional compression with a special care on avoiding memory copies.
Attributes
----------
fp: file
File object handle used for serializing the input object.
protocol: int
Pickle protocol used. Default is pickle.DEFAULT_PROTOCOL under
python 3, pickle.HIGHEST_PROTOCOL otherwise.
"""
dispatch = Pickler.dispatch.copy()
def __init__(self, fp, protocol=None):
self.file_handle = fp
self.buffered = isinstance(self.file_handle, BinaryZlibFile)
# By default we want a pickle protocol that only changes with
# the major python version and not the minor one
if protocol is None:
protocol = (pickle.DEFAULT_PROTOCOL if PY3_OR_LATER
else pickle.HIGHEST_PROTOCOL)
Pickler.__init__(self, self.file_handle, protocol=protocol)
# delayed import of numpy, to avoid tight coupling
try:
import numpy as np
except ImportError:
np = None
self.np = np
def _create_array_wrapper(self, array):
"""Create and returns a numpy array wrapper from a numpy array."""
order = 'F' if (array.flags.f_contiguous and
not array.flags.c_contiguous) else 'C'
allow_mmap = not self.buffered and not array.dtype.hasobject
wrapper = NumpyArrayWrapper(type(array),
array.shape, order, array.dtype,
allow_mmap=allow_mmap)
return wrapper
def save(self, obj):
"""Subclass the Pickler `save` method.
This is a total abuse of the Pickler class in order to use the numpy
persistence function `save` instead of the default pickle
implementation. The numpy array is replaced by a custom wrapper in the
pickle persistence stack and the serialized array is written right
after in the file. Warning: the file produced does not follow the
pickle format. As such it can not be read with `pickle.load`.
"""
if self.np is not None and type(obj) in (self.np.ndarray,
self.np.matrix,
self.np.memmap):
if type(obj) is self.np.memmap:
# Pickling doesn't work with memmapped arrays
obj = self.np.asanyarray(obj)
# The array wrapper is pickled instead of the real array.
wrapper = self._create_array_wrapper(obj)
Pickler.save(self, wrapper)
# A framer was introduced with pickle protocol 4 and we want to
# ensure the wrapper object is written before the numpy array
# buffer in the pickle file.
# See https://www.python.org/dev/peps/pep-3154/#framing to get
# more information on the framer behavior.
if self.proto >= 4:
self.framer.commit_frame(force=True)
# And then array bytes are written right after the wrapper.
wrapper.write_array(obj, self)
return
return Pickler.save(self, obj)
class NumpyUnpickler(Unpickler):
"""A subclass of the Unpickler to unpickle our numpy pickles.
Attributes
----------
mmap_mode: str
The memorymap mode to use for reading numpy arrays.
file_handle: file_like
File object to unpickle from.
filename: str
Name of the file to unpickle from. It should correspond to file_handle.
This parameter is required when using mmap_mode.
np: module
Reference to numpy module if numpy is installed else None.
"""
dispatch = Unpickler.dispatch.copy()
def __init__(self, filename, file_handle, mmap_mode=None):
# The next line is for backward compatibility with pickle generated
# with joblib versions less than 0.10.
self._dirname = os.path.dirname(filename)
self.mmap_mode = mmap_mode
self.file_handle = file_handle
# filename is required for numpy mmap mode.
self.filename = filename
self.compat_mode = False
Unpickler.__init__(self, self.file_handle)
try:
import numpy as np
except ImportError:
np = None
self.np = np
def load_build(self):
"""Called to set the state of a newly created object.
We capture it to replace our place-holder objects, NDArrayWrapper or
NumpyArrayWrapper, by the array we are interested in. We
replace them directly in the stack of pickler.
NDArrayWrapper is used for backward compatibility with joblib <= 0.9.
"""
Unpickler.load_build(self)
# For backward compatibility, we support NDArrayWrapper objects.
if isinstance(self.stack[-1], (NDArrayWrapper, NumpyArrayWrapper)):
if self.np is None:
raise ImportError("Trying to unpickle an ndarray, "
"but numpy didn't import correctly")
array_wrapper = self.stack.pop()
# If any NDArrayWrapper is found, we switch to compatibility mode,
# this will be used to raise a DeprecationWarning to the user at
# the end of the unpickling.
if isinstance(array_wrapper, NDArrayWrapper):
self.compat_mode = True
self.stack.append(array_wrapper.read(self))
# Be careful to register our new method.
if PY3_OR_LATER:
dispatch[pickle.BUILD[0]] = load_build
else:
dispatch[pickle.BUILD] = load_build
###############################################################################
# Utility functions
def dump(value, filename, compress=0, protocol=None, cache_size=None):
"""Persist an arbitrary Python object into one file.
Parameters
-----------
value: any Python object
The object to store to disk.
filename: str or pathlib.Path
The path of the file in which it is to be stored. The compression
method corresponding to one of the supported filename extensions ('.z',
'.gz', '.bz2', '.xz' or '.lzma') will be used automatically.
compress: int from 0 to 9 or bool or 2-tuple, optional
Optional compression level for the data. 0 or False is no compression.
Higher value means more compression, but also slower read and
write times. Using a value of 3 is often a good compromise.
See the notes for more details.
If compress is True, the compression level used is 3.
If compress is a 2-tuple, the first element must correspond to a string
between supported compressors (e.g 'zlib', 'gzip', 'bz2', 'lzma'
'xz'), the second element must be an integer from 0 to 9, corresponding
to the compression level.
protocol: positive int
Pickle protocol, see pickle.dump documentation for more details.
cache_size: positive int, optional
This option is deprecated in 0.10 and has no effect.
Returns
-------
filenames: list of strings
The list of file names in which the data is stored. If
compress is false, each array is stored in a different file.
See Also
--------
joblib.load : corresponding loader
Notes
-----
Memmapping on load cannot be used for compressed files. Thus
using compression can significantly slow down loading. In
addition, compressed files take extra extra memory during
dump and load.
"""
if Path is not None and isinstance(filename, Path):
filename = str(filename)
is_filename = isinstance(filename, _basestring)
is_fileobj = hasattr(filename, "write")
compress_method = 'zlib' # zlib is the default compression method.
if compress is True:
# By default, if compress is enabled, we want to be using 3 by default
compress_level = 3
elif isinstance(compress, tuple):
# a 2-tuple was set in compress
if len(compress) != 2:
raise ValueError(
'Compress argument tuple should contain exactly 2 elements: '
'(compress method, compress level), you passed {0}'
.format(compress))
compress_method, compress_level = compress
else:
compress_level = compress
if compress_level is not False and compress_level not in range(10):
# Raising an error if a non valid compress level is given.
raise ValueError(
'Non valid compress level given: "{0}". Possible values are '
'{1}.'.format(compress_level, list(range(10))))
if compress_method not in _COMPRESSORS:
# Raising an error if an unsupported compression method is given.
raise ValueError(
'Non valid compression method given: "{0}". Possible values are '
'{1}.'.format(compress_method, _COMPRESSORS))
if not is_filename and not is_fileobj:
# People keep inverting arguments, and the resulting error is
# incomprehensible
raise ValueError(
'Second argument should be a filename or a file-like object, '
'%s (type %s) was given.'
% (filename, type(filename))
)
if is_filename and not isinstance(compress, tuple):
# In case no explicit compression was requested using both compression
# method and level in a tuple and the filename has an explicit
# extension, we select the corresponding compressor.
if filename.endswith('.z'):
compress_method = 'zlib'
elif filename.endswith('.gz'):
compress_method = 'gzip'
elif filename.endswith('.bz2'):
compress_method = 'bz2'
elif filename.endswith('.lzma'):
compress_method = 'lzma'
elif filename.endswith('.xz'):
compress_method = 'xz'
else:
# no matching compression method found, we unset the variable to
# be sure no compression level is set afterwards.
compress_method = None
if compress_method in _COMPRESSORS and compress_level == 0:
# we choose a default compress_level of 3 in case it was not given
# as an argument (using compress).
compress_level = 3
if not PY3_OR_LATER and compress_method in ('lzma', 'xz'):
raise NotImplementedError("{0} compression is only available for "
"python version >= 3.3. You are using "
"{1}.{2}".format(compress_method,
sys.version_info[0],
sys.version_info[1]))
if cache_size is not None:
# Cache size is deprecated starting from version 0.10
warnings.warn("Please do not set 'cache_size' in joblib.dump, "
"this parameter has no effect and will be removed. "
"You used 'cache_size={0}'".format(cache_size),
DeprecationWarning, stacklevel=2)
if compress_level != 0:
with _write_fileobject(filename, compress=(compress_method,
compress_level)) as f:
NumpyPickler(f, protocol=protocol).dump(value)
elif is_filename:
with open(filename, 'wb') as f:
NumpyPickler(f, protocol=protocol).dump(value)
else:
NumpyPickler(filename, protocol=protocol).dump(value)
# If the target container is a file object, nothing is returned.
if is_fileobj:
return
# For compatibility, the list of created filenames (e.g with one element
# after 0.10.0) is returned by default.
return [filename]
def _unpickle(fobj, filename="", mmap_mode=None):
"""Internal unpickling function."""
# We are careful to open the file handle early and keep it open to
# avoid race-conditions on renames.
# That said, if data is stored in companion files, which can be
# the case with the old persistence format, moving the directory
# will create a race when joblib tries to access the companion
# files.
unpickler = NumpyUnpickler(filename, fobj, mmap_mode=mmap_mode)
obj = None
try:
obj = unpickler.load()
if unpickler.compat_mode:
warnings.warn("The file '%s' has been generated with a "
"joblib version less than 0.10. "
"Please regenerate this pickle file."
% filename,
DeprecationWarning, stacklevel=3)
except UnicodeDecodeError as exc:
# More user-friendly error message
if PY3_OR_LATER:
new_exc = ValueError(
'You may be trying to read with '
'python 3 a joblib pickle generated with python 2. '
'This feature is not supported by joblib.')
new_exc.__cause__ = exc
raise new_exc
# Reraise exception with Python 2
raise
return obj
def load(filename, mmap_mode=None):
"""Reconstruct a Python object from a file persisted with joblib.dump.
Parameters
-----------
filename: str or pathlib.Path
The path of the file from which to load the object
mmap_mode: {None, 'r+', 'r', 'w+', 'c'}, optional
If not None, the arrays are memory-mapped from the disk. This
mode has no effect for compressed files. Note that in this
case the reconstructed object might not longer match exactly
the originally pickled object.
Returns
-------
result: any Python object
The object stored in the file.
See Also
--------
joblib.dump : function to save an object
Notes
-----
This function can load numpy array files saved separately during the
dump. If the mmap_mode argument is given, it is passed to np.load and
arrays are loaded as memmaps. As a consequence, the reconstructed
object might not match the original pickled object. Note that if the
file was saved with compression, the arrays cannot be memmaped.
"""
if Path is not None and isinstance(filename, Path):
filename = str(filename)
if hasattr(filename, "read") and hasattr(filename, "seek"):
with _read_fileobject(filename, "", mmap_mode) as fobj:
obj = _unpickle(fobj)
else:
with open(filename, 'rb') as f:
with _read_fileobject(f, filename, mmap_mode) as fobj:
if isinstance(fobj, _basestring):
# if the returned file object is a string, this means we
# try to load a pickle file generated with an version of
# Joblib so we load it with joblib compatibility function.
return load_compatibility(fobj)
obj = _unpickle(fobj, filename, mmap_mode)
return obj
| bsd-3-clause | -3,702,681,519,317,451,000 | 39.246101 | 79 | 0.5928 | false |
jonyroda97/redbot-amigosprovaveis | lib/youtube_dl/extractor/dispeak.py | 23 | 4368 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
int_or_none,
parse_duration,
remove_end,
xpath_element,
xpath_text,
)
class DigitallySpeakingIE(InfoExtractor):
_VALID_URL = r'https?://(?:s?evt\.dispeak|events\.digitallyspeaking)\.com/(?:[^/]+/)+xml/(?P<id>[^.]+)\.xml'
_TESTS = [{
# From http://gdcvault.com/play/1023460/Tenacious-Design-and-The-Interface
'url': 'http://evt.dispeak.com/ubm/gdc/sf16/xml/840376_BQRC.xml',
'md5': 'a8efb6c31ed06ca8739294960b2dbabd',
'info_dict': {
'id': '840376_BQRC',
'ext': 'mp4',
'title': 'Tenacious Design and The Interface of \'Destiny\'',
},
}, {
# From http://www.gdcvault.com/play/1014631/Classic-Game-Postmortem-PAC
'url': 'http://events.digitallyspeaking.com/gdc/sf11/xml/12396_1299111843500GMPX.xml',
'only_matching': True,
}, {
# From http://www.gdcvault.com/play/1013700/Advanced-Material
'url': 'http://sevt.dispeak.com/ubm/gdc/eur10/xml/11256_1282118587281VNIT.xml',
'only_matching': True,
}]
def _parse_mp4(self, metadata):
video_formats = []
video_root = None
mp4_video = xpath_text(metadata, './mp4video', default=None)
if mp4_video is not None:
mobj = re.match(r'(?P<root>https?://.*?/).*', mp4_video)
video_root = mobj.group('root')
if video_root is None:
http_host = xpath_text(metadata, 'httpHost', default=None)
if http_host:
video_root = 'http://%s/' % http_host
if video_root is None:
# Hard-coded in http://evt.dispeak.com/ubm/gdc/sf16/custom/player2.js
# Works for GPUTechConf, too
video_root = 'http://s3-2u.digitallyspeaking.com/'
formats = metadata.findall('./MBRVideos/MBRVideo')
if not formats:
return None
for a_format in formats:
stream_name = xpath_text(a_format, 'streamName', fatal=True)
video_path = re.match(r'mp4\:(?P<path>.*)', stream_name).group('path')
url = video_root + video_path
vbr = xpath_text(a_format, 'bitrate')
video_formats.append({
'url': url,
'vbr': int_or_none(vbr),
})
return video_formats
def _parse_flv(self, metadata):
formats = []
akamai_url = xpath_text(metadata, './akamaiHost', fatal=True)
audios = metadata.findall('./audios/audio')
for audio in audios:
formats.append({
'url': 'rtmp://%s/ondemand?ovpfv=1.1' % akamai_url,
'play_path': remove_end(audio.get('url'), '.flv'),
'ext': 'flv',
'vcodec': 'none',
'format_id': audio.get('code'),
})
slide_video_path = xpath_text(metadata, './slideVideo', fatal=True)
formats.append({
'url': 'rtmp://%s/ondemand?ovpfv=1.1' % akamai_url,
'play_path': remove_end(slide_video_path, '.flv'),
'ext': 'flv',
'format_note': 'slide deck video',
'quality': -2,
'preference': -2,
'format_id': 'slides',
})
speaker_video_path = xpath_text(metadata, './speakerVideo', fatal=True)
formats.append({
'url': 'rtmp://%s/ondemand?ovpfv=1.1' % akamai_url,
'play_path': remove_end(speaker_video_path, '.flv'),
'ext': 'flv',
'format_note': 'speaker video',
'quality': -1,
'preference': -1,
'format_id': 'speaker',
})
return formats
def _real_extract(self, url):
video_id = self._match_id(url)
xml_description = self._download_xml(url, video_id)
metadata = xpath_element(xml_description, 'metadata')
video_formats = self._parse_mp4(metadata)
if video_formats is None:
video_formats = self._parse_flv(metadata)
return {
'id': video_id,
'formats': video_formats,
'title': xpath_text(metadata, 'title', fatal=True),
'duration': parse_duration(xpath_text(metadata, 'endTime')),
'creator': xpath_text(metadata, 'speaker'),
}
| gpl-3.0 | -644,573,189,458,565,600 | 36.016949 | 112 | 0.540751 | false |
damianam/easybuild-framework | easybuild/toolchains/craypgi.py | 5 | 1596 | ##
# Copyright 2014-2015 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
CrayPGI toolchain: Cray compilers (PGI) and MPI via Cray compiler drivers (PrgEnv-pgi) minus LibSci minus Cray FFTW
:author: Jg Piccinali (CSCS)
"""
from easybuild.toolchains.compiler.craype import CrayPEPGI
from easybuild.toolchains.mpi.craympich import CrayMPICH
from easybuild.tools.toolchain import DUMMY_TOOLCHAIN_NAME
class CrayPGI(CrayPEPGI, CrayMPICH):
"""Compiler toolchain for Cray Programming Environment for Cray Compiling Environment (PGI) (PrgEnv-pgi)."""
NAME = 'CrayPGI'
SUBTOOLCHAIN = DUMMY_TOOLCHAIN_NAME
| gpl-2.0 | -7,995,501,323,763,502,000 | 43.333333 | 115 | 0.764411 | false |
quamilek/django | tests/admin_inlines/tests.py | 118 | 44045 | from __future__ import unicode_literals
import datetime
import warnings
from django.contrib.admin import ModelAdmin, TabularInline
from django.contrib.admin.helpers import InlineAdminForm
from django.contrib.admin.tests import AdminSeleniumWebDriverTestCase
from django.contrib.auth.models import Permission, User
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
from django.test import RequestFactory, TestCase, override_settings
from django.utils.encoding import force_text
from .admin import InnerInline, site as admin_site
from .models import (
Author, BinaryTree, Book, Chapter, Child, ChildModel1, ChildModel2,
Fashionista, FootNote, Holder, Holder2, Holder3, Holder4, Inner, Inner2,
Inner3, Inner4Stacked, Inner4Tabular, Novel, OutfitItem, Parent,
ParentModelWithCustomPk, Person, Poll, Profile, ProfileCollection,
Question, Sighting, SomeChildModel, SomeParentModel, Teacher,
)
INLINE_CHANGELINK_HTML = 'class="inlinechangelink">Change</a>'
class TestDataMixin(object):
@classmethod
def setUpTestData(cls):
# password = "secret"
User.objects.create(
pk=100, username='super', first_name='Super', last_name='User', email='[email protected]',
password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=True,
is_staff=True, last_login=datetime.datetime(2007, 5, 30, 13, 20, 10),
date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_inlines.urls")
class TestInline(TestDataMixin, TestCase):
def setUp(self):
holder = Holder(dummy=13)
holder.save()
Inner(dummy=42, holder=holder).save()
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
self.factory = RequestFactory()
def test_can_delete(self):
"""
can_delete should be passed to inlineformset factory.
"""
holder = Holder.objects.get(dummy=13)
response = self.client.get(
reverse('admin:admin_inlines_holder_change', args=(holder.id,))
)
inner_formset = response.context['inline_admin_formsets'][0].formset
expected = InnerInline.can_delete
actual = inner_formset.can_delete
self.assertEqual(expected, actual, 'can_delete must be equal')
def test_readonly_stacked_inline_label(self):
"""Bug #13174."""
holder = Holder.objects.create(dummy=42)
Inner.objects.create(holder=holder, dummy=42, readonly='')
response = self.client.get(
reverse('admin:admin_inlines_holder_change', args=(holder.id,))
)
self.assertContains(response, '<label>Inner readonly label:</label>')
def test_many_to_many_inlines(self):
"Autogenerated many-to-many inlines are displayed correctly (#13407)"
response = self.client.get(reverse('admin:admin_inlines_author_add'))
# The heading for the m2m inline block uses the right text
self.assertContains(response, '<h2>Author-book relationships</h2>')
# The "add another" label is correct
self.assertContains(response, 'Add another Author\\u002Dbook relationship')
# The '+' is dropped from the autogenerated form prefix (Author_books+)
self.assertContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_primary(self):
person = Person.objects.create(firstname='Imelda')
item = OutfitItem.objects.create(name='Shoes')
# Imelda likes shoes, but can't carry her own bags.
data = {
'shoppingweakness_set-TOTAL_FORMS': 1,
'shoppingweakness_set-INITIAL_FORMS': 0,
'shoppingweakness_set-MAX_NUM_FORMS': 0,
'_save': 'Save',
'person': person.id,
'max_weight': 0,
'shoppingweakness_set-0-item': item.id,
}
response = self.client.post(reverse('admin:admin_inlines_fashionista_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(Fashionista.objects.filter(person__firstname='Imelda')), 1)
def test_tabular_non_field_errors(self):
"""
Ensure that non_field_errors are displayed correctly, including the
right value for colspan. Refs #13510.
"""
data = {
'title_set-TOTAL_FORMS': 1,
'title_set-INITIAL_FORMS': 0,
'title_set-MAX_NUM_FORMS': 0,
'_save': 'Save',
'title_set-0-title1': 'a title',
'title_set-0-title2': 'a different title',
}
response = self.client.post(reverse('admin:admin_inlines_titlecollection_add'), data)
# Here colspan is "4": two fields (title1 and title2), one hidden field and the delete checkbox.
self.assertContains(response, '<tr><td colspan="4"><ul class="errorlist nonfield"><li>The two titles must be the same</li></ul></td></tr>')
def test_no_parent_callable_lookup(self):
"""Admin inline `readonly_field` shouldn't invoke parent ModelAdmin callable"""
# Identically named callable isn't present in the parent ModelAdmin,
# rendering of the add view shouldn't explode
response = self.client.get(reverse('admin:admin_inlines_novel_add'))
self.assertEqual(response.status_code, 200)
# View should have the child inlines section
self.assertContains(response, '<div class="inline-group" id="chapter_set-group">')
def test_callable_lookup(self):
"""Admin inline should invoke local callable when its name is listed in readonly_fields"""
response = self.client.get(reverse('admin:admin_inlines_poll_add'))
self.assertEqual(response.status_code, 200)
# Add parent object view should have the child inlines section
self.assertContains(response, '<div class="inline-group" id="question_set-group">')
# The right callable should be used for the inline readonly_fields
# column cells
self.assertContains(response, '<p>Callable in QuestionInline</p>')
def test_help_text(self):
"""
Ensure that the inlines' model field help texts are displayed when
using both the stacked and tabular layouts.
Ref #8190.
"""
response = self.client.get(reverse('admin:admin_inlines_holder4_add'))
self.assertContains(response, '<p class="help">Awesome stacked help text is awesome.</p>', 4)
self.assertContains(response, '<img src="/static/admin/img/icon-unknown.gif" class="help help-tooltip" width="10" height="10" alt="(Awesome tabular help text is awesome.)" title="Awesome tabular help text is awesome." />', 1)
# ReadOnly fields
response = self.client.get(reverse('admin:admin_inlines_capofamiglia_add'))
self.assertContains(response, '<img src="/static/admin/img/icon-unknown.gif" class="help help-tooltip" width="10" height="10" alt="(Help text for ReadOnlyInline)" title="Help text for ReadOnlyInline" />', 1)
def test_inline_hidden_field_no_column(self):
"""#18263 -- Make sure hidden fields don't get a column in tabular inlines"""
parent = SomeParentModel.objects.create(name='a')
SomeChildModel.objects.create(name='b', position='0', parent=parent)
SomeChildModel.objects.create(name='c', position='1', parent=parent)
response = self.client.get(reverse('admin:admin_inlines_someparentmodel_change', args=(parent.pk,)))
self.assertNotContains(response, '<td class="field-position">')
self.assertContains(response, (
'<input id="id_somechildmodel_set-1-position" '
'name="somechildmodel_set-1-position" type="hidden" value="1" />'))
def test_non_related_name_inline(self):
"""
Ensure that multiple inlines with related_name='+' have correct form
prefixes. Bug #16838.
"""
response = self.client.get(reverse('admin:admin_inlines_capofamiglia_add'))
self.assertContains(response,
'<input type="hidden" name="-1-0-id" id="id_-1-0-id" />', html=True)
self.assertContains(response,
'<input type="hidden" name="-1-0-capo_famiglia" id="id_-1-0-capo_famiglia" />', html=True)
self.assertContains(response,
'<input id="id_-1-0-name" type="text" class="vTextField" '
'name="-1-0-name" maxlength="100" />', html=True)
self.assertContains(response,
'<input type="hidden" name="-2-0-id" id="id_-2-0-id" />', html=True)
self.assertContains(response,
'<input type="hidden" name="-2-0-capo_famiglia" id="id_-2-0-capo_famiglia" />', html=True)
self.assertContains(response,
'<input id="id_-2-0-name" type="text" class="vTextField" '
'name="-2-0-name" maxlength="100" />', html=True)
@override_settings(USE_L10N=True, USE_THOUSAND_SEPARATOR=True)
def test_localize_pk_shortcut(self):
"""
Ensure that the "View on Site" link is correct for locales that use
thousand separators
"""
holder = Holder.objects.create(pk=123456789, dummy=42)
inner = Inner.objects.create(pk=987654321, holder=holder, dummy=42, readonly='')
response = self.client.get(reverse('admin:admin_inlines_holder_change', args=(holder.id,)))
inner_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(inner).pk, inner.pk)
self.assertContains(response, inner_shortcut)
def test_custom_pk_shortcut(self):
"""
Ensure that the "View on Site" link is correct for models with a
custom primary key field. Bug #18433.
"""
parent = ParentModelWithCustomPk.objects.create(my_own_pk="foo", name="Foo")
child1 = ChildModel1.objects.create(my_own_pk="bar", name="Bar", parent=parent)
child2 = ChildModel2.objects.create(my_own_pk="baz", name="Baz", parent=parent)
response = self.client.get(reverse('admin:admin_inlines_parentmodelwithcustompk_change', args=('foo',)))
child1_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(child1).pk, child1.pk)
child2_shortcut = 'r/%s/%s/' % (ContentType.objects.get_for_model(child2).pk, child2.pk)
self.assertContains(response, child1_shortcut)
self.assertContains(response, child2_shortcut)
def test_create_inlines_on_inherited_model(self):
"""
Ensure that an object can be created with inlines when it inherits
another class. Bug #19524.
"""
data = {
'name': 'Martian',
'sighting_set-TOTAL_FORMS': 1,
'sighting_set-INITIAL_FORMS': 0,
'sighting_set-MAX_NUM_FORMS': 0,
'sighting_set-0-place': 'Zone 51',
'_save': 'Save',
}
response = self.client.post(reverse('admin:admin_inlines_extraterrestrial_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Sighting.objects.filter(et__name='Martian').count(), 1)
def test_custom_get_extra_form(self):
bt_head = BinaryTree.objects.create(name="Tree Head")
BinaryTree.objects.create(name="First Child", parent=bt_head)
# The maximum number of forms should respect 'get_max_num' on the
# ModelAdmin
max_forms_input = '<input id="id_binarytree_set-MAX_NUM_FORMS" name="binarytree_set-MAX_NUM_FORMS" type="hidden" value="%d" />'
# The total number of forms will remain the same in either case
total_forms_hidden = '<input id="id_binarytree_set-TOTAL_FORMS" name="binarytree_set-TOTAL_FORMS" type="hidden" value="2" />'
response = self.client.get(reverse('admin:admin_inlines_binarytree_add'))
self.assertContains(response, max_forms_input % 3)
self.assertContains(response, total_forms_hidden)
response = self.client.get(reverse('admin:admin_inlines_binarytree_change', args=(bt_head.id,)))
self.assertContains(response, max_forms_input % 2)
self.assertContains(response, total_forms_hidden)
def test_min_num(self):
"""
Ensure that min_num and extra determine number of forms.
"""
class MinNumInline(TabularInline):
model = BinaryTree
min_num = 2
extra = 3
modeladmin = ModelAdmin(BinaryTree, admin_site)
modeladmin.inlines = [MinNumInline]
min_forms = '<input id="id_binarytree_set-MIN_NUM_FORMS" name="binarytree_set-MIN_NUM_FORMS" type="hidden" value="2" />'
total_forms = '<input id="id_binarytree_set-TOTAL_FORMS" name="binarytree_set-TOTAL_FORMS" type="hidden" value="5" />'
request = self.factory.get(reverse('admin:admin_inlines_binarytree_add'))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request)
self.assertContains(response, min_forms)
self.assertContains(response, total_forms)
def test_custom_min_num(self):
"""
Ensure that get_min_num is called and used correctly.
"""
bt_head = BinaryTree.objects.create(name="Tree Head")
BinaryTree.objects.create(name="First Child", parent=bt_head)
class MinNumInline(TabularInline):
model = BinaryTree
extra = 3
def get_min_num(self, request, obj=None, **kwargs):
if obj:
return 5
return 2
modeladmin = ModelAdmin(BinaryTree, admin_site)
modeladmin.inlines = [MinNumInline]
min_forms = '<input id="id_binarytree_set-MIN_NUM_FORMS" name="binarytree_set-MIN_NUM_FORMS" type="hidden" value="%d" />'
total_forms = '<input id="id_binarytree_set-TOTAL_FORMS" name="binarytree_set-TOTAL_FORMS" type="hidden" value="%d" />'
request = self.factory.get(reverse('admin:admin_inlines_binarytree_add'))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request)
self.assertContains(response, min_forms % 2)
self.assertContains(response, total_forms % 5)
request = self.factory.get(reverse('admin:admin_inlines_binarytree_change', args=(bt_head.id,)))
request.user = User(username='super', is_superuser=True)
response = modeladmin.changeform_view(request, object_id=str(bt_head.id))
self.assertContains(response, min_forms % 5)
self.assertContains(response, total_forms % 8)
def test_inline_nonauto_noneditable_pk(self):
response = self.client.get(reverse('admin:admin_inlines_author_add'))
self.assertContains(response,
'<input id="id_nonautopkbook_set-0-rand_pk" name="nonautopkbook_set-0-rand_pk" type="hidden" />',
html=True)
self.assertContains(response,
'<input id="id_nonautopkbook_set-2-0-rand_pk" name="nonautopkbook_set-2-0-rand_pk" type="hidden" />',
html=True)
def test_inline_editable_pk(self):
response = self.client.get(reverse('admin:admin_inlines_author_add'))
self.assertContains(response,
'<input class="vIntegerField" id="id_editablepkbook_set-0-manual_pk" name="editablepkbook_set-0-manual_pk" type="text" />',
html=True, count=1)
self.assertContains(response,
'<input class="vIntegerField" id="id_editablepkbook_set-2-0-manual_pk" name="editablepkbook_set-2-0-manual_pk" type="text" />',
html=True, count=1)
def test_stacked_inline_edit_form_contains_has_original_class(self):
holder = Holder.objects.create(dummy=1)
holder.inner_set.create(dummy=1)
response = self.client.get(reverse('admin:admin_inlines_holder_change', args=(holder.pk,)))
self.assertContains(
response,
'<div class="inline-related has_original" id="inner_set-0">',
count=1
)
self.assertContains(
response,
'<div class="inline-related" id="inner_set-1">',
count=1
)
def test_inlines_show_change_link_registered(self):
"Inlines `show_change_link` for registered models when enabled."
holder = Holder4.objects.create(dummy=1)
item1 = Inner4Stacked.objects.create(dummy=1, holder=holder)
item2 = Inner4Tabular.objects.create(dummy=1, holder=holder)
items = (
('inner4stacked', item1.pk),
('inner4tabular', item2.pk),
)
response = self.client.get(reverse('admin:admin_inlines_holder4_change', args=(holder.pk,)))
self.assertTrue(response.context['inline_admin_formset'].opts.has_registered_model)
for model, pk in items:
url = reverse('admin:admin_inlines_%s_change' % model, args=(pk,))
self.assertContains(response, '<a href="%s" %s' % (url, INLINE_CHANGELINK_HTML))
def test_inlines_show_change_link_unregistered(self):
"Inlines `show_change_link` disabled for unregistered models."
parent = ParentModelWithCustomPk.objects.create(my_own_pk="foo", name="Foo")
ChildModel1.objects.create(my_own_pk="bar", name="Bar", parent=parent)
ChildModel2.objects.create(my_own_pk="baz", name="Baz", parent=parent)
response = self.client.get(reverse('admin:admin_inlines_parentmodelwithcustompk_change', args=('foo',)))
self.assertFalse(response.context['inline_admin_formset'].opts.has_registered_model)
self.assertNotContains(response, INLINE_CHANGELINK_HTML)
def test_tabular_inline_show_change_link_false_registered(self):
"Inlines `show_change_link` disabled by default."
poll = Poll.objects.create(name="New poll")
Question.objects.create(poll=poll)
response = self.client.get(reverse('admin:admin_inlines_poll_change', args=(poll.pk,)))
self.assertTrue(response.context['inline_admin_formset'].opts.has_registered_model)
self.assertNotContains(response, INLINE_CHANGELINK_HTML)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_inlines.urls")
class TestInlineMedia(TestDataMixin, TestCase):
def setUp(self):
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
def test_inline_media_only_base(self):
holder = Holder(dummy=13)
holder.save()
Inner(dummy=42, holder=holder).save()
change_url = reverse('admin:admin_inlines_holder_change', args=(holder.id,))
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_admin_scripts.js')
def test_inline_media_only_inline(self):
holder = Holder3(dummy=13)
holder.save()
Inner3(dummy=42, holder=holder).save()
change_url = reverse('admin:admin_inlines_holder3_change', args=(holder.id,))
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_inline_scripts.js')
def test_all_inline_media(self):
holder = Holder2(dummy=13)
holder.save()
Inner2(dummy=42, holder=holder).save()
change_url = reverse('admin:admin_inlines_holder2_change', args=(holder.id,))
response = self.client.get(change_url)
self.assertContains(response, 'my_awesome_admin_scripts.js')
self.assertContains(response, 'my_awesome_inline_scripts.js')
@override_settings(ROOT_URLCONF="admin_inlines.urls")
class TestInlineAdminForm(TestCase):
def test_immutable_content_type(self):
"""Regression for #9362
The problem depends only on InlineAdminForm and its "original"
argument, so we can safely set the other arguments to None/{}. We just
need to check that the content_type argument of Child isn't altered by
the internals of the inline form."""
sally = Teacher.objects.create(name='Sally')
john = Parent.objects.create(name='John')
joe = Child.objects.create(name='Joe', teacher=sally, parent=john)
iaf = InlineAdminForm(None, None, {}, {}, joe)
parent_ct = ContentType.objects.get_for_model(Parent)
self.assertEqual(iaf.original.content_type, parent_ct)
def test_original_content_type_id_deprecated(self):
"""
#23444 -- Verify a warning is raised when accessing
`original_content_type_id` attribute of `InlineAdminForm` object.
"""
iaf = InlineAdminForm(None, None, {}, {}, None)
poll = Poll.objects.create(name="poll")
iaf2 = InlineAdminForm(None, None, {}, {}, poll)
poll_ct = ContentType.objects.get_for_model(Poll)
with warnings.catch_warnings(record=True) as recorded:
warnings.filterwarnings('always')
with self.assertRaises(AttributeError):
iaf.original_content_type_id
msg = force_text(recorded.pop().message)
self.assertEqual(
msg,
'InlineAdminForm.original_content_type_id is deprecated and will be '
'removed in Django 1.10. If you were using this attribute to construct '
'the "view on site" URL, use the `absolute_url` attribute instead.'
)
self.assertEqual(iaf2.original_content_type_id, poll_ct.id)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_inlines.urls")
class TestInlineProtectedOnDelete(TestDataMixin, TestCase):
def setUp(self):
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
def test_deleting_inline_with_protected_delete_does_not_validate(self):
lotr = Novel.objects.create(name='Lord of the rings')
chapter = Chapter.objects.create(novel=lotr, name='Many Meetings')
foot_note = FootNote.objects.create(chapter=chapter, note='yadda yadda')
change_url = reverse('admin:admin_inlines_novel_change', args=(lotr.id,))
response = self.client.get(change_url)
data = {
'name': lotr.name,
'chapter_set-TOTAL_FORMS': 1,
'chapter_set-INITIAL_FORMS': 1,
'chapter_set-MAX_NUM_FORMS': 1000,
'_save': 'Save',
'chapter_set-0-id': chapter.id,
'chapter_set-0-name': chapter.name,
'chapter_set-0-novel': lotr.id,
'chapter_set-0-DELETE': 'on'
}
response = self.client.post(change_url, data)
self.assertEqual(response.status_code, 200)
self.assertContains(response, "Deleting chapter %s would require deleting "
"the following protected related objects: foot note %s"
% (chapter, foot_note))
@override_settings(ROOT_URLCONF="admin_inlines.urls")
class TestInlinePermissions(TestCase):
"""
Make sure the admin respects permissions for objects that are edited
inline. Refs #8060.
"""
def setUp(self):
self.user = User(username='admin')
self.user.is_staff = True
self.user.is_active = True
self.user.set_password('secret')
self.user.save()
self.author_ct = ContentType.objects.get_for_model(Author)
self.holder_ct = ContentType.objects.get_for_model(Holder2)
self.book_ct = ContentType.objects.get_for_model(Book)
self.inner_ct = ContentType.objects.get_for_model(Inner2)
# User always has permissions to add and change Authors, and Holders,
# the main (parent) models of the inlines. Permissions on the inlines
# vary per test.
permission = Permission.objects.get(codename='add_author', content_type=self.author_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_author', content_type=self.author_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='add_holder2', content_type=self.holder_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_holder2', content_type=self.holder_ct)
self.user.user_permissions.add(permission)
author = Author.objects.create(pk=1, name='The Author')
book = author.books.create(name='The inline Book')
self.author_change_url = reverse('admin:admin_inlines_author_change', args=(author.id,))
# Get the ID of the automatically created intermediate model for the Author-Book m2m
author_book_auto_m2m_intermediate = Author.books.through.objects.get(author=author, book=book)
self.author_book_auto_m2m_intermediate_id = author_book_auto_m2m_intermediate.pk
holder = Holder2.objects.create(dummy=13)
inner2 = Inner2.objects.create(dummy=42, holder=holder)
self.holder_change_url = reverse('admin:admin_inlines_holder2_change', args=(holder.id,))
self.inner2_id = inner2.id
self.assertEqual(
self.client.login(username='admin', password='secret'),
True)
def test_inline_add_m2m_noperm(self):
response = self.client.get(reverse('admin:admin_inlines_author_add'))
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author\\u002DBook Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_add_fk_noperm(self):
response = self.client.get(reverse('admin:admin_inlines_holder2_add'))
# No permissions on Inner2s, so no inline
self.assertNotContains(response, '<h2>Inner2s</h2>')
self.assertNotContains(response, 'Add another Inner2')
self.assertNotContains(response, 'id="id_inner2_set-TOTAL_FORMS"')
def test_inline_change_m2m_noperm(self):
response = self.client.get(self.author_change_url)
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author\\u002DBook Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_change_fk_noperm(self):
response = self.client.get(self.holder_change_url)
# No permissions on Inner2s, so no inline
self.assertNotContains(response, '<h2>Inner2s</h2>')
self.assertNotContains(response, 'Add another Inner2')
self.assertNotContains(response, 'id="id_inner2_set-TOTAL_FORMS"')
def test_inline_add_m2m_add_perm(self):
permission = Permission.objects.get(codename='add_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(reverse('admin:admin_inlines_author_add'))
# No change permission on Books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author\\u002DBook Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
def test_inline_add_fk_add_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(reverse('admin:admin_inlines_holder2_add'))
# Add permission on inner2s, so we get the inline
self.assertContains(response, '<h2>Inner2s</h2>')
self.assertContains(response, 'Add another Inner2')
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="3" name="inner2_set-TOTAL_FORMS" />', html=True)
def test_inline_change_m2m_add_perm(self):
permission = Permission.objects.get(codename='add_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.author_change_url)
# No change permission on books, so no inline
self.assertNotContains(response, '<h2>Author-book relationships</h2>')
self.assertNotContains(response, 'Add another Author\\u002DBook Relationship')
self.assertNotContains(response, 'id="id_Author_books-TOTAL_FORMS"')
self.assertNotContains(response, 'id="id_Author_books-0-DELETE"')
def test_inline_change_m2m_change_perm(self):
permission = Permission.objects.get(codename='change_book', content_type=self.book_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.author_change_url)
# We have change perm on books, so we can add/change/delete inlines
self.assertContains(response, '<h2>Author-book relationships</h2>')
self.assertContains(response, 'Add another Author\\u002Dbook relationship')
self.assertContains(response, '<input type="hidden" id="id_Author_books-TOTAL_FORMS" '
'value="4" name="Author_books-TOTAL_FORMS" />', html=True)
self.assertContains(response, '<input type="hidden" id="id_Author_books-0-id" '
'value="%i" name="Author_books-0-id" />' % self.author_book_auto_m2m_intermediate_id, html=True)
self.assertContains(response, 'id="id_Author_books-0-DELETE"')
def test_inline_change_fk_add_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Add permission on inner2s, so we can add but not modify existing
self.assertContains(response, '<h2>Inner2s</h2>')
self.assertContains(response, 'Add another Inner2')
# 3 extra forms only, not the existing instance form
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="3" name="inner2_set-TOTAL_FORMS" />', html=True)
self.assertNotContains(response, '<input type="hidden" id="id_inner2_set-0-id" '
'value="%i" name="inner2_set-0-id" />' % self.inner2_id, html=True)
def test_inline_change_fk_change_perm(self):
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Change permission on inner2s, so we can change existing but not add new
self.assertContains(response, '<h2>Inner2s</h2>')
# Just the one form for existing instances
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="1" name="inner2_set-TOTAL_FORMS" />', html=True)
self.assertContains(response, '<input type="hidden" id="id_inner2_set-0-id" '
'value="%i" name="inner2_set-0-id" />' % self.inner2_id, html=True)
# max-num 0 means we can't add new ones
self.assertContains(response, '<input type="hidden" id="id_inner2_set-MAX_NUM_FORMS" '
'value="0" name="inner2_set-MAX_NUM_FORMS" />', html=True)
def test_inline_change_fk_add_change_perm(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Add/change perm, so we can add new and change existing
self.assertContains(response, '<h2>Inner2s</h2>')
# One form for existing instance and three extra for new
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="4" name="inner2_set-TOTAL_FORMS" />', html=True)
self.assertContains(response, '<input type="hidden" id="id_inner2_set-0-id" '
'value="%i" name="inner2_set-0-id" />' % self.inner2_id, html=True)
def test_inline_change_fk_change_del_perm(self):
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='delete_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# Change/delete perm on inner2s, so we can change/delete existing
self.assertContains(response, '<h2>Inner2s</h2>')
# One form for existing instance only, no new
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="1" name="inner2_set-TOTAL_FORMS" />', html=True)
self.assertContains(response, '<input type="hidden" id="id_inner2_set-0-id" '
'value="%i" name="inner2_set-0-id" />' % self.inner2_id, html=True)
self.assertContains(response, 'id="id_inner2_set-0-DELETE"')
def test_inline_change_fk_all_perms(self):
permission = Permission.objects.get(codename='add_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='change_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
permission = Permission.objects.get(codename='delete_inner2', content_type=self.inner_ct)
self.user.user_permissions.add(permission)
response = self.client.get(self.holder_change_url)
# All perms on inner2s, so we can add/change/delete
self.assertContains(response, '<h2>Inner2s</h2>')
# One form for existing instance only, three for new
self.assertContains(response, '<input type="hidden" id="id_inner2_set-TOTAL_FORMS" '
'value="4" name="inner2_set-TOTAL_FORMS" />', html=True)
self.assertContains(response, '<input type="hidden" id="id_inner2_set-0-id" '
'value="%i" name="inner2_set-0-id" />' % self.inner2_id, html=True)
self.assertContains(response, 'id="id_inner2_set-0-DELETE"')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_inlines.urls")
class SeleniumFirefoxTests(AdminSeleniumWebDriverTestCase):
available_apps = ['admin_inlines'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def setUp(self):
# password = "secret"
User.objects.create(
pk=100, username='super', first_name='Super', last_name='User', email='[email protected]',
password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158', is_active=True, is_superuser=True,
is_staff=True, last_login=datetime.datetime(2007, 5, 30, 13, 20, 10),
date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def test_add_stackeds(self):
"""
Ensure that the "Add another XXX" link correctly adds items to the
stacked formset.
"""
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_inlines_holder4_add')))
inline_id = '#inner4stacked_set-group'
rows_length = lambda: len(self.selenium.find_elements_by_css_selector(
'%s .dynamic-inner4stacked_set' % inline_id))
self.assertEqual(rows_length(), 3)
add_button = self.selenium.find_element_by_link_text(
'Add another Inner4 stacked')
add_button.click()
self.assertEqual(rows_length(), 4)
def test_delete_stackeds(self):
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_inlines_holder4_add')))
inline_id = '#inner4stacked_set-group'
rows_length = lambda: len(self.selenium.find_elements_by_css_selector(
'%s .dynamic-inner4stacked_set' % inline_id))
self.assertEqual(rows_length(), 3)
add_button = self.selenium.find_element_by_link_text(
'Add another Inner4 stacked')
add_button.click()
add_button.click()
self.assertEqual(rows_length(), 5, msg="sanity check")
for delete_link in self.selenium.find_elements_by_css_selector(
'%s .inline-deletelink' % inline_id):
delete_link.click()
self.assertEqual(rows_length(), 3)
def test_add_inlines(self):
"""
Ensure that the "Add another XXX" link correctly adds items to the
inline form.
"""
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_inlines_profilecollection_add')))
# Check that there's only one inline to start with and that it has the
# correct ID.
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')), 1)
self.assertEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[0].get_attribute('id'),
'profile_set-0')
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-0 input[name=profile_set-0-first_name]')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-0 input[name=profile_set-0-last_name]')), 1)
# Add an inline
self.selenium.find_element_by_link_text('Add another Profile').click()
# Check that the inline has been added, that it has the right id, and
# that it contains the right fields.
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')), 2)
self.assertEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[1].get_attribute('id'), 'profile_set-1')
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-1 input[name=profile_set-1-first_name]')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-1 input[name=profile_set-1-last_name]')), 1)
# Let's add another one to be sure
self.selenium.find_element_by_link_text('Add another Profile').click()
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')), 3)
self.assertEqual(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set')[2].get_attribute('id'), 'profile_set-2')
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-2 input[name=profile_set-2-first_name]')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'.dynamic-profile_set#profile_set-2 input[name=profile_set-2-last_name]')), 1)
# Enter some data and click 'Save'
self.selenium.find_element_by_name('profile_set-0-first_name').send_keys('0 first name 1')
self.selenium.find_element_by_name('profile_set-0-last_name').send_keys('0 last name 2')
self.selenium.find_element_by_name('profile_set-1-first_name').send_keys('1 first name 1')
self.selenium.find_element_by_name('profile_set-1-last_name').send_keys('1 last name 2')
self.selenium.find_element_by_name('profile_set-2-first_name').send_keys('2 first name 1')
self.selenium.find_element_by_name('profile_set-2-last_name').send_keys('2 last name 2')
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
# Check that the objects have been created in the database
self.assertEqual(ProfileCollection.objects.all().count(), 1)
self.assertEqual(Profile.objects.all().count(), 3)
def test_delete_inlines(self):
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_inlines_profilecollection_add')))
# Add a few inlines
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'#profile_set-group table tr.dynamic-profile_set')), 5)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-0')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-3')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-4')), 1)
# Click on a few delete buttons
self.selenium.find_element_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1 td.delete a').click()
self.selenium.find_element_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2 td.delete a').click()
# Verify that they're gone and that the IDs have been re-sequenced
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'#profile_set-group table tr.dynamic-profile_set')), 3)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-0')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-1')), 1)
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
'form#profilecollection_form tr.dynamic-profile_set#profile_set-2')), 1)
def test_alternating_rows(self):
self.admin_login(username='super', password='secret')
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_inlines_profilecollection_add')))
# Add a few inlines
self.selenium.find_element_by_link_text('Add another Profile').click()
self.selenium.find_element_by_link_text('Add another Profile').click()
row_selector = 'form#profilecollection_form tr.dynamic-profile_set'
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
"%s.row1" % row_selector)), 2, msg="Expect two row1 styled rows")
self.assertEqual(len(self.selenium.find_elements_by_css_selector(
"%s.row2" % row_selector)), 1, msg="Expect one row2 styled row")
class SeleniumChromeTests(SeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class SeleniumIETests(SeleniumFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
| bsd-3-clause | 4,721,046,444,730,954,000 | 51.12426 | 233 | 0.656533 | false |
dbkaynor/PyCopyMoveTk | auxfiles/Send2Trash/Send2Trash-1.3.0/send2trash/plat_win.py | 2 | 1655 | # Copyright 2013 Hardcoded Software (http://www.hardcoded.net)
# This software is licensed under the "BSD" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.hardcoded.net/licenses/bsd_license
from __future__ import unicode_literals
from ctypes import windll, Structure, byref, c_uint
from ctypes.wintypes import HWND, UINT, LPCWSTR, BOOL
import os.path as op
from .compat import text_type
shell32 = windll.shell32
SHFileOperationW = shell32.SHFileOperationW
class SHFILEOPSTRUCTW(Structure):
_fields_ = [
("hwnd", HWND),
("wFunc", UINT),
("pFrom", LPCWSTR),
("pTo", LPCWSTR),
("fFlags", c_uint),
("fAnyOperationsAborted", BOOL),
("hNameMappings", c_uint),
("lpszProgressTitle", LPCWSTR),
]
FO_MOVE = 1
FO_COPY = 2
FO_DELETE = 3
FO_RENAME = 4
FOF_MULTIDESTFILES = 1
FOF_SILENT = 4
FOF_NOCONFIRMATION = 16
FOF_ALLOWUNDO = 64
FOF_NOERRORUI = 1024
def send2trash(path):
if not isinstance(path, text_type):
path = text_type(path, 'mbcs')
if not op.isabs(path):
path = op.abspath(path)
fileop = SHFILEOPSTRUCTW()
fileop.hwnd = 0
fileop.wFunc = FO_DELETE
fileop.pFrom = LPCWSTR(path + '\0')
fileop.pTo = None
fileop.fFlags = FOF_ALLOWUNDO | FOF_NOCONFIRMATION | FOF_NOERRORUI | FOF_SILENT
fileop.fAnyOperationsAborted = 0
fileop.hNameMappings = 0
fileop.lpszProgressTitle = None
result = SHFileOperationW(byref(fileop))
if result:
msg = "Couldn't perform operation. Error code: %d" % result
raise OSError(msg)
| gpl-2.0 | 6,834,559,875,260,292,000 | 26.583333 | 87 | 0.670091 | false |
timonwong/foo_uie_wsh_panel_mod.scintilla | test/simpleTests.py | 4 | 74073 | # -*- coding: utf-8 -*-
# Requires Python 2.7 or later
from __future__ import with_statement
from __future__ import unicode_literals
import codecs, ctypes, os, sys, unittest
if sys.platform == "win32":
import XiteWin as Xite
else:
import XiteQt as Xite
class TestSimple(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
def testLength(self):
self.assertEquals(self.ed.Length, 0)
def testAddText(self):
self.ed.AddText(1, b"x")
self.assertEquals(self.ed.Length, 1)
self.assertEquals(self.ed.GetCharAt(0), ord("x"))
self.assertEquals(self.ed.GetStyleAt(0), 0)
self.ed.ClearAll()
self.assertEquals(self.ed.Length, 0)
def testDeleteRange(self):
self.ed.AddText(5, b"abcde")
self.assertEquals(self.ed.Length, 5)
self.ed.DeleteRange(1, 2)
self.assertEquals(self.ed.Length, 3)
self.assertEquals(self.ed.Contents(), b"ade")
def testAddStyledText(self):
self.assertEquals(self.ed.EndStyled, 0)
self.ed.AddStyledText(2, b"x\002")
self.assertEquals(self.ed.Length, 1)
self.assertEquals(self.ed.GetCharAt(0), ord("x"))
self.assertEquals(self.ed.GetStyleAt(0), 2)
self.assertEquals(self.ed.StyledTextRange(0, 1), b"x\002")
self.ed.ClearDocumentStyle()
self.assertEquals(self.ed.Length, 1)
self.assertEquals(self.ed.GetCharAt(0), ord("x"))
self.assertEquals(self.ed.GetStyleAt(0), 0)
self.assertEquals(self.ed.StyledTextRange(0, 1), b"x\0")
def testStyling(self):
self.assertEquals(self.ed.EndStyled, 0)
self.ed.AddStyledText(4, b"x\002y\003")
self.assertEquals(self.ed.StyledTextRange(0, 2), b"x\002y\003")
self.ed.StartStyling(0,0xf)
self.ed.SetStyling(1, 5)
self.assertEquals(self.ed.StyledTextRange(0, 2), b"x\005y\003")
self.ed.StartStyling(0,0xff)
self.ed.SetStylingEx(2, b"\100\101")
self.assertEquals(self.ed.StyledTextRange(0, 2), b"x\100y\101")
def testPosition(self):
self.assertEquals(self.ed.CurrentPos, 0)
self.assertEquals(self.ed.Anchor, 0)
self.ed.AddText(1, b"x")
# Caret has automatically moved
self.assertEquals(self.ed.CurrentPos, 1)
self.assertEquals(self.ed.Anchor, 1)
self.ed.SelectAll()
self.assertEquals(self.ed.CurrentPos, 0)
self.assertEquals(self.ed.Anchor, 1)
self.ed.Anchor = 0
self.assertEquals(self.ed.Anchor, 0)
# Check line positions
self.assertEquals(self.ed.PositionFromLine(0), 0)
self.assertEquals(self.ed.GetLineEndPosition(0), 1)
self.assertEquals(self.ed.PositionFromLine(1), 1)
self.ed.CurrentPos = 1
self.assertEquals(self.ed.Anchor, 0)
self.assertEquals(self.ed.CurrentPos, 1)
def testBeyonEnd(self):
self.ed.AddText(1, b"x")
self.assertEquals(self.ed.GetLineEndPosition(0), 1)
self.assertEquals(self.ed.GetLineEndPosition(1), 1)
self.assertEquals(self.ed.GetLineEndPosition(2), 1)
def testSelection(self):
self.assertEquals(self.ed.CurrentPos, 0)
self.assertEquals(self.ed.Anchor, 0)
self.assertEquals(self.ed.SelectionStart, 0)
self.assertEquals(self.ed.SelectionEnd, 0)
self.ed.AddText(1, b"x")
self.ed.SelectionStart = 0
self.assertEquals(self.ed.CurrentPos, 1)
self.assertEquals(self.ed.Anchor, 0)
self.assertEquals(self.ed.SelectionStart, 0)
self.assertEquals(self.ed.SelectionEnd, 1)
self.ed.SelectionStart = 1
self.assertEquals(self.ed.CurrentPos, 1)
self.assertEquals(self.ed.Anchor, 1)
self.assertEquals(self.ed.SelectionStart, 1)
self.assertEquals(self.ed.SelectionEnd, 1)
self.ed.SelectionEnd = 0
self.assertEquals(self.ed.CurrentPos, 0)
self.assertEquals(self.ed.Anchor, 0)
def testSetSelection(self):
self.ed.AddText(4, b"abcd")
self.ed.SetSel(1, 3)
self.assertEquals(self.ed.SelectionStart, 1)
self.assertEquals(self.ed.SelectionEnd, 3)
result = self.ed.GetSelText(0)
self.assertEquals(result, b"bc\0")
self.ed.ReplaceSel(0, b"1234")
self.assertEquals(self.ed.Length, 6)
self.assertEquals(self.ed.Contents(), b"a1234d")
def testReadOnly(self):
self.ed.AddText(1, b"x")
self.assertEquals(self.ed.ReadOnly, 0)
self.assertEquals(self.ed.Contents(), b"x")
self.ed.ReadOnly = 1
self.assertEquals(self.ed.ReadOnly, 1)
self.ed.AddText(1, b"x")
self.assertEquals(self.ed.Contents(), b"x")
self.ed.ReadOnly = 0
self.ed.AddText(1, b"x")
self.assertEquals(self.ed.Contents(), b"xx")
self.ed.Null()
self.assertEquals(self.ed.Contents(), b"xx")
def testAddLine(self):
data = b"x" * 70 + b"\n"
for i in range(5):
self.ed.AddText(len(data), data)
self.xite.DoEvents()
self.assertEquals(self.ed.LineCount, i + 2)
self.assert_(self.ed.Length > 0)
def testInsertText(self):
data = b"xy"
self.ed.InsertText(0, data)
self.assertEquals(self.ed.Length, 2)
self.assertEquals(data, self.ed.ByteRange(0,2))
self.ed.InsertText(1, data)
# Should now be "xxyy"
self.assertEquals(self.ed.Length, 4)
self.assertEquals(b"xxyy", self.ed.ByteRange(0,4))
def testInsertNul(self):
data = b"\0"
self.ed.AddText(1, data)
self.assertEquals(self.ed.Length, 1)
self.assertEquals(data, self.ed.ByteRange(0,1))
def testUndoRedo(self):
data = b"xy"
self.assertEquals(self.ed.Modify, 0)
self.assertEquals(self.ed.UndoCollection, 1)
self.assertEquals(self.ed.CanRedo(), 0)
self.assertEquals(self.ed.CanUndo(), 0)
self.ed.InsertText(0, data)
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.ed.Modify, 1)
self.assertEquals(self.ed.CanRedo(), 0)
self.assertEquals(self.ed.CanUndo(), 1)
self.ed.Undo()
self.assertEquals(self.ed.Length, 0)
self.assertEquals(self.ed.Modify, 0)
self.assertEquals(self.ed.CanRedo(), 1)
self.assertEquals(self.ed.CanUndo(), 0)
self.ed.Redo()
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.ed.Modify, 1)
self.assertEquals(data, self.ed.Contents())
self.assertEquals(self.ed.CanRedo(), 0)
self.assertEquals(self.ed.CanUndo(), 1)
def testUndoSavePoint(self):
data = b"xy"
self.assertEquals(self.ed.Modify, 0)
self.ed.InsertText(0, data)
self.assertEquals(self.ed.Modify, 1)
self.ed.SetSavePoint()
self.assertEquals(self.ed.Modify, 0)
self.ed.InsertText(0, data)
self.assertEquals(self.ed.Modify, 1)
def testUndoCollection(self):
data = b"xy"
self.assertEquals(self.ed.UndoCollection, 1)
self.ed.UndoCollection = 0
self.assertEquals(self.ed.UndoCollection, 0)
self.ed.InsertText(0, data)
self.assertEquals(self.ed.CanRedo(), 0)
self.assertEquals(self.ed.CanUndo(), 0)
self.ed.UndoCollection = 1
def testGetColumn(self):
self.ed.AddText(1, b"x")
self.assertEquals(self.ed.GetColumn(0), 0)
self.assertEquals(self.ed.GetColumn(1), 1)
# Next line caused infinite loop in 1.71
self.assertEquals(self.ed.GetColumn(2), 1)
self.assertEquals(self.ed.GetColumn(3), 1)
def testTabWidth(self):
self.assertEquals(self.ed.TabWidth, 8)
self.ed.AddText(3, b"x\tb")
self.assertEquals(self.ed.GetColumn(0), 0)
self.assertEquals(self.ed.GetColumn(1), 1)
self.assertEquals(self.ed.GetColumn(2), 8)
for col in range(10):
if col == 0:
self.assertEquals(self.ed.FindColumn(0, col), 0)
elif col == 1:
self.assertEquals(self.ed.FindColumn(0, col), 1)
elif col == 8:
self.assertEquals(self.ed.FindColumn(0, col), 2)
elif col == 9:
self.assertEquals(self.ed.FindColumn(0, col), 3)
else:
self.assertEquals(self.ed.FindColumn(0, col), 1)
self.ed.TabWidth = 4
self.assertEquals(self.ed.TabWidth, 4)
self.assertEquals(self.ed.GetColumn(0), 0)
self.assertEquals(self.ed.GetColumn(1), 1)
self.assertEquals(self.ed.GetColumn(2), 4)
def testIndent(self):
self.assertEquals(self.ed.Indent, 0)
self.assertEquals(self.ed.UseTabs, 1)
self.ed.Indent = 8
self.ed.UseTabs = 0
self.assertEquals(self.ed.Indent, 8)
self.assertEquals(self.ed.UseTabs, 0)
self.ed.AddText(3, b"x\tb")
self.assertEquals(self.ed.GetLineIndentation(0), 0)
self.ed.InsertText(0, b" ")
self.assertEquals(self.ed.GetLineIndentation(0), 1)
self.assertEquals(self.ed.GetLineIndentPosition(0), 1)
self.assertEquals(self.ed.Contents(), b" x\tb")
self.ed.SetLineIndentation(0,2)
self.assertEquals(self.ed.Contents(), b" x\tb")
self.assertEquals(self.ed.GetLineIndentPosition(0), 2)
self.ed.UseTabs = 1
self.ed.SetLineIndentation(0,8)
self.assertEquals(self.ed.Contents(), b"\tx\tb")
self.assertEquals(self.ed.GetLineIndentPosition(0), 1)
def testGetCurLine(self):
self.ed.AddText(1, b"x")
data = ctypes.create_string_buffer(b"\0" * 100)
caret = self.ed.GetCurLine(len(data), data)
self.assertEquals(caret, 1)
self.assertEquals(data.value, b"x")
def testGetLine(self):
self.ed.AddText(1, b"x")
data = ctypes.create_string_buffer(b"\0" * 100)
self.ed.GetLine(0, data)
self.assertEquals(data.value, b"x")
def testLineEnds(self):
self.ed.AddText(3, b"x\ny")
self.assertEquals(self.ed.GetLineEndPosition(0), 1)
self.assertEquals(self.ed.GetLineEndPosition(1), 3)
self.assertEquals(self.ed.LineLength(0), 2)
self.assertEquals(self.ed.LineLength(1), 1)
if sys.platform == "win32":
self.assertEquals(self.ed.EOLMode, self.ed.SC_EOL_CRLF)
else:
self.assertEquals(self.ed.EOLMode, self.ed.SC_EOL_LF)
lineEnds = [b"\r\n", b"\r", b"\n"]
for lineEndType in [self.ed.SC_EOL_CR, self.ed.SC_EOL_LF, self.ed.SC_EOL_CRLF]:
self.ed.EOLMode = lineEndType
self.assertEquals(self.ed.EOLMode, lineEndType)
self.ed.ConvertEOLs(lineEndType)
self.assertEquals(self.ed.Contents(), b"x" + lineEnds[lineEndType] + b"y")
self.assertEquals(self.ed.LineLength(0), 1 + len(lineEnds[lineEndType]))
# Several tests for unicode line ends U+2028 and U+2029
def testUnicodeLineEnds(self):
# Add two lines separated with U+2028 and ensure it is seen as two lines
# Then remove U+2028 and should be just 1 lines
self.ed.Lexer = self.ed.SCLEX_CPP
self.ed.SetCodePage(65001)
self.ed.SetLineEndTypesAllowed(1)
self.ed.AddText(5, b"x\xe2\x80\xa8y")
self.assertEquals(self.ed.LineCount, 2)
self.assertEquals(self.ed.GetLineEndPosition(0), 1)
self.assertEquals(self.ed.GetLineEndPosition(1), 5)
self.assertEquals(self.ed.LineLength(0), 4)
self.assertEquals(self.ed.LineLength(1), 1)
self.ed.TargetStart = 1
self.ed.TargetEnd = 4
self.ed.ReplaceTarget(0, b"")
self.assertEquals(self.ed.LineCount, 1)
self.assertEquals(self.ed.LineLength(0), 2)
self.assertEquals(self.ed.GetLineEndPosition(0), 2)
self.assertEquals(self.ed.LineEndTypesSupported, 1)
def testUnicodeLineEndsWithCodePage0(self):
# Try the Unicode line ends when not in Unicode mode -> should remain 1 line
self.ed.SetCodePage(0)
self.ed.AddText(5, b"x\xe2\x80\xa8y")
self.assertEquals(self.ed.LineCount, 1)
self.ed.AddText(4, b"x\xc2\x85y")
self.assertEquals(self.ed.LineCount, 1)
def testUnicodeLineEndsSwitchToUnicodeAndBack(self):
# Add the Unicode line ends when not in Unicode mode
self.ed.SetCodePage(0)
self.ed.AddText(5, b"x\xe2\x80\xa8y")
self.assertEquals(self.ed.LineCount, 1)
# Into UTF-8 mode - should now be interpreting as two lines
self.ed.Lexer = self.ed.SCLEX_CPP
self.ed.SetCodePage(65001)
self.ed.SetLineEndTypesAllowed(1)
self.assertEquals(self.ed.LineCount, 2)
# Back to code page 0 and 1 line
self.ed.SetCodePage(0)
self.assertEquals(self.ed.LineCount, 1)
def testUFragmentedEOLCompletion(self):
# Add 2 starting bytes of UTF-8 line end then complete it
self.ed.ClearAll()
self.ed.AddText(4, b"x\xe2\x80y")
self.assertEquals(self.ed.LineCount, 1)
self.assertEquals(self.ed.GetLineEndPosition(0), 4)
self.ed.SetSel(3,3)
self.ed.AddText(1, b"\xa8")
self.assertEquals(self.ed.Contents(), b"x\xe2\x80\xa8y")
self.assertEquals(self.ed.LineCount, 2)
# Add 1 starting bytes of UTF-8 line end then complete it
self.ed.ClearAll()
self.ed.AddText(3, b"x\xe2y")
self.assertEquals(self.ed.LineCount, 1)
self.assertEquals(self.ed.GetLineEndPosition(0), 3)
self.ed.SetSel(2,2)
self.ed.AddText(2, b"\x80\xa8")
self.assertEquals(self.ed.Contents(), b"x\xe2\x80\xa8y")
self.assertEquals(self.ed.LineCount, 2)
def testUFragmentedEOLStart(self):
# Add end of UTF-8 line end then insert start
self.ed.Lexer = self.ed.SCLEX_CPP
self.ed.SetCodePage(65001)
self.ed.SetLineEndTypesAllowed(1)
self.assertEquals(self.ed.LineCount, 1)
self.ed.AddText(4, b"x\x80\xa8y")
self.assertEquals(self.ed.LineCount, 1)
self.ed.SetSel(1,1)
self.ed.AddText(1, b"\xe2")
self.assertEquals(self.ed.LineCount, 2)
def testUBreakApartEOL(self):
# Add two lines separated by U+2029 then remove and add back each byte ensuring
# only one line after each removal of any byte in line end and 2 lines after reinsertion
self.ed.Lexer = self.ed.SCLEX_CPP
self.ed.SetCodePage(65001)
self.ed.SetLineEndTypesAllowed(1)
text = b"x\xe2\x80\xa9y";
self.ed.AddText(5, text)
self.assertEquals(self.ed.LineCount, 2)
for i in range(len(text)):
self.ed.TargetStart = i
self.ed.TargetEnd = i + 1
self.ed.ReplaceTarget(0, b"")
if i in [0, 4]:
# Removing text characters does not change number of lines
self.assertEquals(self.ed.LineCount, 2)
else:
# Removing byte from line end, removes 1 line
self.assertEquals(self.ed.LineCount, 1)
self.ed.TargetEnd = i
self.ed.ReplaceTarget(1, text[i:i+1])
self.assertEquals(self.ed.LineCount, 2)
def testURemoveEOLFragment(self):
# Add UTF-8 line end then delete each byte causing line end to disappear
self.ed.Lexer = self.ed.SCLEX_CPP
self.ed.SetCodePage(65001)
self.ed.SetLineEndTypesAllowed(1)
for i in range(3):
self.ed.ClearAll()
self.ed.AddText(5, b"x\xe2\x80\xa8y")
self.assertEquals(self.ed.LineCount, 2)
self.ed.TargetStart = i+1
self.ed.TargetEnd = i+2
self.ed.ReplaceTarget(0, b"")
self.assertEquals(self.ed.LineCount, 1)
# Several tests for unicode NEL line ends U+0085
def testNELLineEnds(self):
# Add two lines separated with U+0085 and ensure it is seen as two lines
# Then remove U+0085 and should be just 1 lines
self.ed.Lexer = self.ed.SCLEX_CPP
self.ed.SetCodePage(65001)
self.ed.SetLineEndTypesAllowed(1)
self.ed.AddText(4, b"x\xc2\x85y")
self.assertEquals(self.ed.LineCount, 2)
self.assertEquals(self.ed.GetLineEndPosition(0), 1)
self.assertEquals(self.ed.GetLineEndPosition(1), 4)
self.assertEquals(self.ed.LineLength(0), 3)
self.assertEquals(self.ed.LineLength(1), 1)
self.ed.TargetStart = 1
self.ed.TargetEnd = 3
self.ed.ReplaceTarget(0, b"")
self.assertEquals(self.ed.LineCount, 1)
self.assertEquals(self.ed.LineLength(0), 2)
self.assertEquals(self.ed.GetLineEndPosition(0), 2)
def testNELFragmentedEOLCompletion(self):
# Add starting byte of UTF-8 NEL then complete it
self.ed.AddText(3, b"x\xc2y")
self.assertEquals(self.ed.LineCount, 1)
self.assertEquals(self.ed.GetLineEndPosition(0), 3)
self.ed.SetSel(2,2)
self.ed.AddText(1, b"\x85")
self.assertEquals(self.ed.Contents(), b"x\xc2\x85y")
self.assertEquals(self.ed.LineCount, 2)
def testNELFragmentedEOLStart(self):
# Add end of UTF-8 NEL then insert start
self.ed.Lexer = self.ed.SCLEX_CPP
self.ed.SetCodePage(65001)
self.ed.SetLineEndTypesAllowed(1)
self.assertEquals(self.ed.LineCount, 1)
self.ed.AddText(4, b"x\x85y")
self.assertEquals(self.ed.LineCount, 1)
self.ed.SetSel(1,1)
self.ed.AddText(1, b"\xc2")
self.assertEquals(self.ed.LineCount, 2)
def testNELBreakApartEOL(self):
# Add two lines separated by U+0085 then remove and add back each byte ensuring
# only one line after each removal of any byte in line end and 2 lines after reinsertion
self.ed.Lexer = self.ed.SCLEX_CPP
self.ed.SetCodePage(65001)
self.ed.SetLineEndTypesAllowed(1)
text = b"x\xc2\x85y";
self.ed.AddText(4, text)
self.assertEquals(self.ed.LineCount, 2)
for i in range(len(text)):
self.ed.TargetStart = i
self.ed.TargetEnd = i + 1
self.ed.ReplaceTarget(0, b"")
if i in [0, 3]:
# Removing text characters does not change number of lines
self.assertEquals(self.ed.LineCount, 2)
else:
# Removing byte from line end, removes 1 line
self.assertEquals(self.ed.LineCount, 1)
self.ed.TargetEnd = i
self.ed.ReplaceTarget(1, text[i:i+1])
self.assertEquals(self.ed.LineCount, 2)
def testNELRemoveEOLFragment(self):
# Add UTF-8 NEL then delete each byte causing line end to disappear
self.ed.SetCodePage(65001)
for i in range(2):
self.ed.ClearAll()
self.ed.AddText(4, b"x\xc2\x85y")
self.assertEquals(self.ed.LineCount, 2)
self.ed.TargetStart = i+1
self.ed.TargetEnd = i+2
self.ed.ReplaceTarget(0, b"")
self.assertEquals(self.ed.LineCount, 1)
def testGoto(self):
self.ed.AddText(5, b"a\nb\nc")
self.assertEquals(self.ed.CurrentPos, 5)
self.ed.GotoLine(1)
self.assertEquals(self.ed.CurrentPos, 2)
self.ed.GotoPos(4)
self.assertEquals(self.ed.CurrentPos, 4)
def testCutCopyPaste(self):
self.ed.AddText(5, b"a1b2c")
self.ed.SetSel(1,3)
self.ed.Cut()
self.xite.DoEvents()
self.assertEquals(self.ed.CanPaste(), 1)
self.ed.SetSel(0, 0)
self.ed.Paste()
self.assertEquals(self.ed.Contents(), b"1ba2c")
self.ed.SetSel(4,5)
self.ed.Copy()
self.ed.SetSel(1,3)
self.ed.Paste()
self.assertEquals(self.ed.Contents(), b"1c2c")
self.ed.SetSel(2,4)
self.ed.Clear()
self.assertEquals(self.ed.Contents(), b"1c")
def testCopyAllowLine(self):
self.xite.DoEvents()
lineEndType = self.ed.EOLMode
self.ed.EOLMode = self.ed.SC_EOL_LF
self.ed.AddText(5, b"a1\nb2")
self.ed.SetSel(1,1)
self.ed.CopyAllowLine()
self.xite.DoEvents()
self.assertEquals(self.ed.CanPaste(), 1)
self.ed.SetSel(0, 0)
self.ed.Paste()
self.ed.EOLMode = lineEndType
self.assertEquals(self.ed.Contents(), b"a1\na1\nb2")
def testDuplicate(self):
self.ed.AddText(3, b"1b2")
self.ed.SetSel(1,2)
self.ed.SelectionDuplicate()
self.assertEquals(self.ed.Contents(), b"1bb2")
def testTransposeLines(self):
self.ed.AddText(8, b"a1\nb2\nc3")
self.ed.SetSel(3,3)
self.ed.LineTranspose()
self.assertEquals(self.ed.Contents(), b"b2\na1\nc3")
def testGetSet(self):
self.ed.SetContents(b"abc")
self.assertEquals(self.ed.TextLength, 3)
result = ctypes.create_string_buffer(b"\0" * 5)
length = self.ed.GetText(4, result)
self.assertEquals(result.value, b"abc")
def testAppend(self):
self.ed.SetContents(b"abc")
self.assertEquals(self.ed.SelectionStart, 0)
self.assertEquals(self.ed.SelectionEnd, 0)
text = b"12"
self.ed.AppendText(len(text), text)
self.assertEquals(self.ed.SelectionStart, 0)
self.assertEquals(self.ed.SelectionEnd, 0)
self.assertEquals(self.ed.Contents(), b"abc12")
def testTarget(self):
self.ed.SetContents(b"abcd")
self.ed.TargetStart = 1
self.ed.TargetEnd = 3
self.assertEquals(self.ed.TargetStart, 1)
self.assertEquals(self.ed.TargetEnd, 3)
rep = b"321"
self.ed.ReplaceTarget(len(rep), rep)
self.assertEquals(self.ed.Contents(), b"a321d")
self.ed.SearchFlags = self.ed.SCFIND_REGEXP
self.assertEquals(self.ed.SearchFlags, self.ed.SCFIND_REGEXP)
searchString = b"\([1-9]+\)"
pos = self.ed.SearchInTarget(len(searchString), searchString)
self.assertEquals(1, pos)
tagString = self.ed.GetTag(1)
self.assertEquals(tagString, b"321")
rep = b"\\1"
self.ed.TargetStart = 0
self.ed.TargetEnd = 0
self.ed.ReplaceTargetRE(len(rep), rep)
self.assertEquals(self.ed.Contents(), b"321a321d")
self.ed.SetSel(4,5)
self.ed.TargetFromSelection()
self.assertEquals(self.ed.TargetStart, 4)
self.assertEquals(self.ed.TargetEnd, 5)
def testTargetEscape(self):
# Checks that a literal \ can be in the replacement. Bug #2959876
self.ed.SetContents(b"abcd")
self.ed.TargetStart = 1
self.ed.TargetEnd = 3
rep = b"\\\\n"
self.ed.ReplaceTargetRE(len(rep), rep)
self.assertEquals(self.ed.Contents(), b"a\\nd")
def testPointsAndPositions(self):
self.ed.AddText(1, b"x")
# Start of text
self.assertEquals(self.ed.PositionFromPoint(0,0), 0)
# End of text
self.assertEquals(self.ed.PositionFromPoint(0,100), 1)
def testLinePositions(self):
text = b"ab\ncd\nef"
nl = b"\n"
if sys.version_info[0] == 3:
nl = ord(b"\n")
self.ed.AddText(len(text), text)
self.assertEquals(self.ed.LineFromPosition(-1), 0)
line = 0
for pos in range(len(text)+1):
self.assertEquals(self.ed.LineFromPosition(pos), line)
if pos < len(text) and text[pos] == nl:
line += 1
def testWordPositions(self):
text = b"ab cd\tef"
self.ed.AddText(len(text), text)
self.assertEquals(self.ed.WordStartPosition(3, 0), 2)
self.assertEquals(self.ed.WordStartPosition(4, 0), 3)
self.assertEquals(self.ed.WordStartPosition(5, 0), 3)
self.assertEquals(self.ed.WordStartPosition(6, 0), 5)
self.assertEquals(self.ed.WordEndPosition(2, 0), 3)
self.assertEquals(self.ed.WordEndPosition(3, 0), 5)
self.assertEquals(self.ed.WordEndPosition(4, 0), 5)
self.assertEquals(self.ed.WordEndPosition(5, 0), 6)
self.assertEquals(self.ed.WordEndPosition(6, 0), 8)
MODI = 1
UNDO = 2
REDO = 4
class TestContainerUndo(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
self.data = b"xy"
def UndoState(self):
return (MODI if self.ed.Modify else 0) | \
(UNDO if self.ed.CanUndo() else 0) | \
(REDO if self.ed.CanRedo() else 0)
def testContainerActNoCoalesce(self):
self.ed.InsertText(0, self.data)
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.AddUndoAction(5, 0)
self.ed.Undo()
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO | REDO)
self.ed.Redo()
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.Undo()
def testContainerActCoalesce(self):
self.ed.InsertText(0, self.data)
self.ed.AddUndoAction(5, 1)
self.ed.Undo()
self.assertEquals(self.ed.Length, 0)
self.assertEquals(self.UndoState(), REDO)
self.ed.Redo()
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
def testContainerMultiStage(self):
self.ed.InsertText(0, self.data)
self.ed.AddUndoAction(5, 1)
self.ed.AddUndoAction(5, 1)
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.Undo()
self.assertEquals(self.ed.Length, 0)
self.assertEquals(self.UndoState(), REDO)
self.ed.Redo()
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.AddUndoAction(5, 1)
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.Undo()
self.assertEquals(self.ed.Length, 0)
self.assertEquals(self.UndoState(), REDO)
def testContainerMultiStageNoText(self):
self.ed.AddUndoAction(5, 1)
self.ed.AddUndoAction(5, 1)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.Undo()
self.assertEquals(self.UndoState(), REDO)
self.ed.Redo()
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.AddUndoAction(5, 1)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.Undo()
self.assertEquals(self.UndoState(), REDO)
def testContainerActCoalesceEnd(self):
self.ed.AddUndoAction(5, 1)
self.assertEquals(self.ed.Length, 0)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.InsertText(0, self.data)
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.Undo()
self.assertEquals(self.ed.Length, 0)
self.assertEquals(self.UndoState(), REDO)
self.ed.Redo()
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
def testContainerBetweenInsertAndInsert(self):
self.assertEquals(self.ed.Length, 0)
self.ed.InsertText(0, self.data)
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.AddUndoAction(5, 1)
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.InsertText(2, self.data)
self.assertEquals(self.ed.Length, 4)
self.assertEquals(self.UndoState(), MODI | UNDO)
# Undoes both insertions and the containerAction in the middle
self.ed.Undo()
self.assertEquals(self.ed.Length, 0)
self.assertEquals(self.UndoState(), REDO)
def testContainerNoCoalesceBetweenInsertAndInsert(self):
self.assertEquals(self.ed.Length, 0)
self.ed.InsertText(0, self.data)
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.AddUndoAction(5, 0)
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.InsertText(2, self.data)
self.assertEquals(self.ed.Length, 4)
self.assertEquals(self.UndoState(), MODI | UNDO)
# Undo last insertion
self.ed.Undo()
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO | REDO)
# Undo container
self.ed.Undo()
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO | REDO)
# Undo first insertion
self.ed.Undo()
self.assertEquals(self.ed.Length, 0)
self.assertEquals(self.UndoState(), REDO)
def testContainerBetweenDeleteAndDelete(self):
self.ed.InsertText(0, self.data)
self.ed.EmptyUndoBuffer()
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), 0)
self.ed.SetSel(2,2)
self.ed.DeleteBack()
self.assertEquals(self.ed.Length, 1)
self.ed.AddUndoAction(5, 1)
self.ed.DeleteBack()
self.assertEquals(self.ed.Length, 0)
# Undoes both deletions and the containerAction in the middle
self.ed.Undo()
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), REDO)
def testContainerBetweenInsertAndDelete(self):
self.assertEquals(self.ed.Length, 0)
self.ed.InsertText(0, self.data)
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.AddUndoAction(5, 1)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.SetSel(0,1)
self.ed.Cut()
self.assertEquals(self.ed.Length, 1)
self.assertEquals(self.UndoState(), MODI | UNDO)
self.ed.Undo() # Only undoes the deletion
self.assertEquals(self.ed.Length, 2)
self.assertEquals(self.UndoState(), MODI | UNDO | REDO)
class TestKeyCommands(unittest.TestCase):
""" These commands are normally assigned to keys and take no arguments """
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
def selRange(self):
return self.ed.CurrentPos, self.ed.Anchor
def testLineMove(self):
self.ed.AddText(8, b"x1\ny2\nz3")
self.ed.SetSel(0,0)
self.ed.ChooseCaretX()
self.ed.LineDown()
self.ed.LineDown()
self.assertEquals(self.selRange(), (6, 6))
self.ed.LineUp()
self.assertEquals(self.selRange(), (3, 3))
self.ed.LineDownExtend()
self.assertEquals(self.selRange(), (6, 3))
self.ed.LineUpExtend()
self.ed.LineUpExtend()
self.assertEquals(self.selRange(), (0, 3))
def testCharMove(self):
self.ed.AddText(8, b"x1\ny2\nz3")
self.ed.SetSel(0,0)
self.ed.CharRight()
self.ed.CharRight()
self.assertEquals(self.selRange(), (2, 2))
self.ed.CharLeft()
self.assertEquals(self.selRange(), (1, 1))
self.ed.CharRightExtend()
self.assertEquals(self.selRange(), (2, 1))
self.ed.CharLeftExtend()
self.ed.CharLeftExtend()
self.assertEquals(self.selRange(), (0, 1))
def testWordMove(self):
self.ed.AddText(10, b"a big boat")
self.ed.SetSel(3,3)
self.ed.WordRight()
self.ed.WordRight()
self.assertEquals(self.selRange(), (10, 10))
self.ed.WordLeft()
self.assertEquals(self.selRange(), (6, 6))
self.ed.WordRightExtend()
self.assertEquals(self.selRange(), (10, 6))
self.ed.WordLeftExtend()
self.ed.WordLeftExtend()
self.assertEquals(self.selRange(), (2, 6))
def testHomeEndMove(self):
self.ed.AddText(10, b"a big boat")
self.ed.SetSel(3,3)
self.ed.Home()
self.assertEquals(self.selRange(), (0, 0))
self.ed.LineEnd()
self.assertEquals(self.selRange(), (10, 10))
self.ed.SetSel(3,3)
self.ed.HomeExtend()
self.assertEquals(self.selRange(), (0, 3))
self.ed.LineEndExtend()
self.assertEquals(self.selRange(), (10, 3))
def testStartEndMove(self):
self.ed.AddText(10, b"a\nbig\nboat")
self.ed.SetSel(3,3)
self.ed.DocumentStart()
self.assertEquals(self.selRange(), (0, 0))
self.ed.DocumentEnd()
self.assertEquals(self.selRange(), (10, 10))
self.ed.SetSel(3,3)
self.ed.DocumentStartExtend()
self.assertEquals(self.selRange(), (0, 3))
self.ed.DocumentEndExtend()
self.assertEquals(self.selRange(), (10, 3))
class TestMarkers(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
self.ed.AddText(5, b"x\ny\nz")
def testMarker(self):
handle = self.ed.MarkerAdd(1,1)
self.assertEquals(self.ed.MarkerLineFromHandle(handle), 1)
self.ed.MarkerDelete(1,1)
self.assertEquals(self.ed.MarkerLineFromHandle(handle), -1)
def testTwiceAddedDelete(self):
handle = self.ed.MarkerAdd(1,1)
self.assertEquals(self.ed.MarkerGet(1), 2)
handle2 = self.ed.MarkerAdd(1,1)
self.assertEquals(self.ed.MarkerGet(1), 2)
self.ed.MarkerDelete(1,1)
self.assertEquals(self.ed.MarkerGet(1), 2)
self.ed.MarkerDelete(1,1)
self.assertEquals(self.ed.MarkerGet(1), 0)
def testMarkerDeleteAll(self):
h1 = self.ed.MarkerAdd(0,1)
h2 = self.ed.MarkerAdd(1,2)
self.assertEquals(self.ed.MarkerLineFromHandle(h1), 0)
self.assertEquals(self.ed.MarkerLineFromHandle(h2), 1)
self.ed.MarkerDeleteAll(1)
self.assertEquals(self.ed.MarkerLineFromHandle(h1), -1)
self.assertEquals(self.ed.MarkerLineFromHandle(h2), 1)
self.ed.MarkerDeleteAll(-1)
self.assertEquals(self.ed.MarkerLineFromHandle(h1), -1)
self.assertEquals(self.ed.MarkerLineFromHandle(h2), -1)
def testMarkerDeleteHandle(self):
handle = self.ed.MarkerAdd(0,1)
self.assertEquals(self.ed.MarkerLineFromHandle(handle), 0)
self.ed.MarkerDeleteHandle(handle)
self.assertEquals(self.ed.MarkerLineFromHandle(handle), -1)
def testMarkerBits(self):
self.assertEquals(self.ed.MarkerGet(0), 0)
self.ed.MarkerAdd(0,1)
self.assertEquals(self.ed.MarkerGet(0), 2)
self.ed.MarkerAdd(0,2)
self.assertEquals(self.ed.MarkerGet(0), 6)
def testMarkerAddSet(self):
self.assertEquals(self.ed.MarkerGet(0), 0)
self.ed.MarkerAddSet(0,5)
self.assertEquals(self.ed.MarkerGet(0), 5)
self.ed.MarkerDeleteAll(-1)
def testMarkerNext(self):
self.assertEquals(self.ed.MarkerNext(0, 2), -1)
h1 = self.ed.MarkerAdd(0,1)
h2 = self.ed.MarkerAdd(2,1)
self.assertEquals(self.ed.MarkerNext(0, 2), 0)
self.assertEquals(self.ed.MarkerNext(1, 2), 2)
self.assertEquals(self.ed.MarkerNext(2, 2), 2)
self.assertEquals(self.ed.MarkerPrevious(0, 2), 0)
self.assertEquals(self.ed.MarkerPrevious(1, 2), 0)
self.assertEquals(self.ed.MarkerPrevious(2, 2), 2)
def testMarkerNegative(self):
self.assertEquals(self.ed.MarkerNext(-1, 2), -1)
def testLineState(self):
self.assertEquals(self.ed.MaxLineState, 0)
self.assertEquals(self.ed.GetLineState(0), 0)
self.assertEquals(self.ed.GetLineState(1), 0)
self.assertEquals(self.ed.GetLineState(2), 0)
self.ed.SetLineState(1, 100)
self.assertNotEquals(self.ed.MaxLineState, 0)
self.assertEquals(self.ed.GetLineState(0), 0)
self.assertEquals(self.ed.GetLineState(1), 100)
self.assertEquals(self.ed.GetLineState(2), 0)
def testSymbolRetrieval(self):
self.ed.MarkerDefine(1,3)
self.assertEquals(self.ed.MarkerSymbolDefined(1), 3)
class TestIndicators(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
def testSetIndicator(self):
self.assertEquals(self.ed.IndicGetStyle(0), 1)
self.assertEquals(self.ed.IndicGetFore(0), 0x007f00)
self.ed.IndicSetStyle(0, 2)
self.ed.IndicSetFore(0, 0xff0080)
self.assertEquals(self.ed.IndicGetStyle(0), 2)
self.assertEquals(self.ed.IndicGetFore(0), 0xff0080)
def testIndicatorFill(self):
self.ed.InsertText(0, b"abc")
self.ed.IndicatorCurrent = 3
self.ed.IndicatorFillRange(1,1)
self.assertEquals(self.ed.IndicatorValueAt(3, 0), 0)
self.assertEquals(self.ed.IndicatorValueAt(3, 1), 1)
self.assertEquals(self.ed.IndicatorValueAt(3, 2), 0)
self.assertEquals(self.ed.IndicatorStart(3, 0), 0)
self.assertEquals(self.ed.IndicatorEnd(3, 0), 1)
self.assertEquals(self.ed.IndicatorStart(3, 1), 1)
self.assertEquals(self.ed.IndicatorEnd(3, 1), 2)
self.assertEquals(self.ed.IndicatorStart(3, 2), 2)
self.assertEquals(self.ed.IndicatorEnd(3, 2), 3)
def testIndicatorAtEnd(self):
self.ed.InsertText(0, b"ab")
self.ed.IndicatorCurrent = 3
self.ed.IndicatorFillRange(1,1)
self.assertEquals(self.ed.IndicatorValueAt(3, 0), 0)
self.assertEquals(self.ed.IndicatorValueAt(3, 1), 1)
self.assertEquals(self.ed.IndicatorStart(3, 0), 0)
self.assertEquals(self.ed.IndicatorEnd(3, 0), 1)
self.assertEquals(self.ed.IndicatorStart(3, 1), 1)
self.assertEquals(self.ed.IndicatorEnd(3, 1), 2)
self.ed.DeleteRange(1, 1)
# Now only one character left and does not have indicator so indicator 3 is null
self.assertEquals(self.ed.IndicatorValueAt(3, 0), 0)
# Since null, remaining calls return 0
self.assertEquals(self.ed.IndicatorStart(3, 0), 0)
self.assertEquals(self.ed.IndicatorEnd(3, 0), 0)
self.assertEquals(self.ed.IndicatorStart(3, 1), 0)
self.assertEquals(self.ed.IndicatorEnd(3, 1), 0)
class TestScrolling(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
# 150 should be enough lines
self.ed.InsertText(0, b"a" * 150 + b"\n" * 150)
def testTop(self):
self.ed.GotoLine(0)
self.assertEquals(self.ed.FirstVisibleLine, 0)
def testLineScroll(self):
self.ed.GotoLine(0)
self.ed.LineScroll(0, 3)
self.assertEquals(self.ed.FirstVisibleLine, 3)
self.ed.LineScroll(0, -2)
self.assertEquals(self.ed.FirstVisibleLine, 1)
self.assertEquals(self.ed.XOffset, 0)
self.ed.LineScroll(10, 0)
self.assertGreater(self.ed.XOffset, 0)
scroll_width = float(self.ed.XOffset) / 10
self.ed.LineScroll(-2, 0)
self.assertEquals(self.ed.XOffset, scroll_width * 8)
def testVisibleLine(self):
self.ed.FirstVisibleLine = 7
self.assertEquals(self.ed.FirstVisibleLine, 7)
class TestSearch(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
self.ed.InsertText(0, b"a\tbig boat\t")
def testFind(self):
pos = self.ed.FindBytes(0, self.ed.Length, b"zzz", 0)
self.assertEquals(pos, -1)
pos = self.ed.FindBytes(0, self.ed.Length, b"big", 0)
self.assertEquals(pos, 2)
def testFindEmpty(self):
pos = self.ed.FindBytes(0, self.ed.Length, b"", 0)
self.assertEquals(pos, 0)
def testCaseFind(self):
self.assertEquals(self.ed.FindBytes(0, self.ed.Length, b"big", 0), 2)
self.assertEquals(self.ed.FindBytes(0, self.ed.Length, b"bIg", 0), 2)
self.assertEquals(self.ed.FindBytes(0, self.ed.Length, b"bIg",
self.ed.SCFIND_MATCHCASE), -1)
def testWordFind(self):
self.assertEquals(self.ed.FindBytes(0, self.ed.Length, b"bi", 0), 2)
self.assertEquals(self.ed.FindBytes(0, self.ed.Length, b"bi",
self.ed.SCFIND_WHOLEWORD), -1)
def testWordStartFind(self):
self.assertEquals(self.ed.FindBytes(0, self.ed.Length, b"bi", 0), 2)
self.assertEquals(self.ed.FindBytes(0, self.ed.Length, b"bi",
self.ed.SCFIND_WORDSTART), 2)
self.assertEquals(self.ed.FindBytes(0, self.ed.Length, b"ig", 0), 3)
self.assertEquals(self.ed.FindBytes(0, self.ed.Length, b"ig",
self.ed.SCFIND_WORDSTART), -1)
def testREFind(self):
flags = self.ed.SCFIND_REGEXP
self.assertEquals(-1, self.ed.FindBytes(0, self.ed.Length, b"b.g", 0))
self.assertEquals(2, self.ed.FindBytes(0, self.ed.Length, b"b.g", flags))
self.assertEquals(2, self.ed.FindBytes(0, self.ed.Length, b"\<b.g\>", flags))
self.assertEquals(-1, self.ed.FindBytes(0, self.ed.Length, b"b[A-Z]g",
flags | self.ed.SCFIND_MATCHCASE))
self.assertEquals(2, self.ed.FindBytes(0, self.ed.Length, b"b[a-z]g", flags))
self.assertEquals(6, self.ed.FindBytes(0, self.ed.Length, b"b[a-z]*t", flags))
self.assertEquals(0, self.ed.FindBytes(0, self.ed.Length, b"^a", flags))
self.assertEquals(10, self.ed.FindBytes(0, self.ed.Length, b"\t$", flags))
self.assertEquals(0, self.ed.FindBytes(0, self.ed.Length, b"\([a]\).*\0", flags))
def testPosixREFind(self):
flags = self.ed.SCFIND_REGEXP | self.ed.SCFIND_POSIX
self.assertEquals(-1, self.ed.FindBytes(0, self.ed.Length, b"b.g", 0))
self.assertEquals(2, self.ed.FindBytes(0, self.ed.Length, b"b.g", flags))
self.assertEquals(2, self.ed.FindBytes(0, self.ed.Length, b"\<b.g\>", flags))
self.assertEquals(-1, self.ed.FindBytes(0, self.ed.Length, b"b[A-Z]g",
flags | self.ed.SCFIND_MATCHCASE))
self.assertEquals(2, self.ed.FindBytes(0, self.ed.Length, b"b[a-z]g", flags))
self.assertEquals(6, self.ed.FindBytes(0, self.ed.Length, b"b[a-z]*t", flags))
self.assertEquals(0, self.ed.FindBytes(0, self.ed.Length, b"^a", flags))
self.assertEquals(10, self.ed.FindBytes(0, self.ed.Length, b"\t$", flags))
self.assertEquals(0, self.ed.FindBytes(0, self.ed.Length, b"([a]).*\0", flags))
def testPhilippeREFind(self):
# Requires 1.,72
flags = self.ed.SCFIND_REGEXP
self.assertEquals(0, self.ed.FindBytes(0, self.ed.Length, b"\w", flags))
self.assertEquals(1, self.ed.FindBytes(0, self.ed.Length, b"\W", flags))
self.assertEquals(-1, self.ed.FindBytes(0, self.ed.Length, b"\d", flags))
self.assertEquals(0, self.ed.FindBytes(0, self.ed.Length, b"\D", flags))
self.assertEquals(1, self.ed.FindBytes(0, self.ed.Length, b"\s", flags))
self.assertEquals(0, self.ed.FindBytes(0, self.ed.Length, b"\S", flags))
self.assertEquals(2, self.ed.FindBytes(0, self.ed.Length, b"\x62", flags))
def testRENonASCII(self):
self.ed.InsertText(0, b"\xAD")
flags = self.ed.SCFIND_REGEXP
self.assertEquals(-1, self.ed.FindBytes(0, self.ed.Length, b"\\x10", flags))
self.assertEquals(2, self.ed.FindBytes(0, self.ed.Length, b"\\x09", flags))
self.assertEquals(-1, self.ed.FindBytes(0, self.ed.Length, b"\\xAB", flags))
self.assertEquals(0, self.ed.FindBytes(0, self.ed.Length, b"\\xAD", flags))
class TestRepresentations(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
def testGetControl(self):
result = self.ed.GetRepresentation(b"\001")
self.assertEquals(result, b"SOH")
def testClearControl(self):
result = self.ed.GetRepresentation(b"\002")
self.assertEquals(result, b"STX")
self.ed.ClearRepresentation(b"\002")
result = self.ed.GetRepresentation(b"\002")
self.assertEquals(result, b"")
def testSetOhm(self):
ohmSign = b"\xe2\x84\xa6"
ohmExplained = b"U+2126 \xe2\x84\xa6"
self.ed.SetRepresentation(ohmSign, ohmExplained)
result = self.ed.GetRepresentation(ohmSign)
self.assertEquals(result, ohmExplained)
class TestProperties(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
def testSet(self):
self.ed.SetProperty(b"test", b"12")
self.assertEquals(self.ed.GetPropertyInt(b"test"), 12)
result = self.ed.GetProperty(b"test")
self.assertEquals(result, b"12")
self.ed.SetProperty(b"test.plus", b"[$(test)]")
result = self.ed.GetPropertyExpanded(b"test.plus")
self.assertEquals(result, b"[12]")
class TestTextMargin(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
self.txt = b"abcd"
self.ed.AddText(1, b"x")
def testAscent(self):
lineHeight = self.ed.TextHeight(0)
self.assertEquals(self.ed.ExtraAscent, 0)
self.assertEquals(self.ed.ExtraDescent, 0)
self.ed.ExtraAscent = 1
self.assertEquals(self.ed.ExtraAscent, 1)
self.ed.ExtraDescent = 2
self.assertEquals(self.ed.ExtraDescent, 2)
# Allow line height to recalculate
self.xite.DoEvents()
lineHeightIncreased = self.ed.TextHeight(0)
self.assertEquals(lineHeightIncreased, lineHeight + 2 + 1)
def testTextMargin(self):
self.ed.MarginSetText(0, self.txt)
result = self.ed.MarginGetText(0)
self.assertEquals(result, self.txt)
self.ed.MarginTextClearAll()
def testTextMarginStyle(self):
self.ed.MarginSetText(0, self.txt)
self.ed.MarginSetStyle(0, 33)
self.assertEquals(self.ed.MarginGetStyle(0), 33)
self.ed.MarginTextClearAll()
def testTextMarginStyles(self):
styles = b"\001\002\003\004"
self.ed.MarginSetText(0, self.txt)
self.ed.MarginSetStyles(0, styles)
result = self.ed.MarginGetStyles(0)
self.assertEquals(result, styles)
self.ed.MarginTextClearAll()
def testTextMarginStyleOffset(self):
self.ed.MarginSetStyleOffset(300)
self.assertEquals(self.ed.MarginGetStyleOffset(), 300)
class TestAnnotation(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
self.txt = b"abcd"
self.ed.AddText(1, b"x")
def testTextAnnotation(self):
self.assertEquals(self.ed.AnnotationGetLines(), 0)
self.ed.AnnotationSetText(0, self.txt)
self.assertEquals(self.ed.AnnotationGetLines(), 1)
result = self.ed.AnnotationGetText(0)
self.assertEquals(len(result), 4)
self.assertEquals(result, self.txt)
self.ed.AnnotationClearAll()
def testTextAnnotationStyle(self):
self.ed.AnnotationSetText(0, self.txt)
self.ed.AnnotationSetStyle(0, 33)
self.assertEquals(self.ed.AnnotationGetStyle(0), 33)
self.ed.AnnotationClearAll()
def testTextAnnotationStyles(self):
styles = b"\001\002\003\004"
self.ed.AnnotationSetText(0, self.txt)
self.ed.AnnotationSetStyles(0, styles)
result = self.ed.AnnotationGetStyles(0)
self.assertEquals(result, styles)
self.ed.AnnotationClearAll()
def testExtendedStyles(self):
start0 = self.ed.AllocateExtendedStyles(0)
self.assertEquals(start0, 256)
start1 = self.ed.AllocateExtendedStyles(10)
self.assertEquals(start1, 256)
start2 = self.ed.AllocateExtendedStyles(20)
self.assertEquals(start2, start1 + 10)
# Reset by changing lexer
self.ed.ReleaseAllExtendedStyles()
start0 = self.ed.AllocateExtendedStyles(0)
self.assertEquals(start0, 256)
def testTextAnnotationStyleOffset(self):
self.ed.AnnotationSetStyleOffset(300)
self.assertEquals(self.ed.AnnotationGetStyleOffset(), 300)
def testTextAnnotationVisible(self):
self.assertEquals(self.ed.AnnotationGetVisible(), 0)
self.ed.AnnotationSetVisible(2)
self.assertEquals(self.ed.AnnotationGetVisible(), 2)
self.ed.AnnotationSetVisible(0)
class TestMultiSelection(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
# 3 lines of 3 characters
t = b"xxx\nxxx\nxxx"
self.ed.AddText(len(t), t)
def testSelectionCleared(self):
self.ed.ClearSelections()
self.assertEquals(self.ed.Selections, 1)
self.assertEquals(self.ed.MainSelection, 0)
self.assertEquals(self.ed.GetSelectionNCaret(0), 0)
self.assertEquals(self.ed.GetSelectionNAnchor(0), 0)
def test1Selection(self):
self.ed.SetSelection(1, 2)
self.assertEquals(self.ed.Selections, 1)
self.assertEquals(self.ed.MainSelection, 0)
self.assertEquals(self.ed.GetSelectionNCaret(0), 1)
self.assertEquals(self.ed.GetSelectionNAnchor(0), 2)
self.assertEquals(self.ed.GetSelectionNStart(0), 1)
self.assertEquals(self.ed.GetSelectionNEnd(0), 2)
self.ed.SwapMainAnchorCaret()
self.assertEquals(self.ed.Selections, 1)
self.assertEquals(self.ed.MainSelection, 0)
self.assertEquals(self.ed.GetSelectionNCaret(0), 2)
self.assertEquals(self.ed.GetSelectionNAnchor(0), 1)
def test1SelectionReversed(self):
self.ed.SetSelection(2, 1)
self.assertEquals(self.ed.Selections, 1)
self.assertEquals(self.ed.MainSelection, 0)
self.assertEquals(self.ed.GetSelectionNCaret(0), 2)
self.assertEquals(self.ed.GetSelectionNAnchor(0), 1)
self.assertEquals(self.ed.GetSelectionNStart(0), 1)
self.assertEquals(self.ed.GetSelectionNEnd(0), 2)
def test1SelectionByStartEnd(self):
self.ed.SetSelectionNStart(0, 2)
self.ed.SetSelectionNEnd(0, 3)
self.assertEquals(self.ed.Selections, 1)
self.assertEquals(self.ed.MainSelection, 0)
self.assertEquals(self.ed.GetSelectionNAnchor(0), 2)
self.assertEquals(self.ed.GetSelectionNCaret(0), 3)
self.assertEquals(self.ed.GetSelectionNStart(0), 2)
self.assertEquals(self.ed.GetSelectionNEnd(0), 3)
def test2Selections(self):
self.ed.SetSelection(1, 2)
self.ed.AddSelection(4, 5)
self.assertEquals(self.ed.Selections, 2)
self.assertEquals(self.ed.MainSelection, 1)
self.assertEquals(self.ed.GetSelectionNCaret(0), 1)
self.assertEquals(self.ed.GetSelectionNAnchor(0), 2)
self.assertEquals(self.ed.GetSelectionNCaret(1), 4)
self.assertEquals(self.ed.GetSelectionNAnchor(1), 5)
self.assertEquals(self.ed.GetSelectionNStart(0), 1)
self.assertEquals(self.ed.GetSelectionNEnd(0), 2)
self.ed.MainSelection = 0
self.assertEquals(self.ed.MainSelection, 0)
self.ed.RotateSelection()
self.assertEquals(self.ed.MainSelection, 1)
def testRectangularSelection(self):
self.ed.RectangularSelectionAnchor = 1
self.assertEquals(self.ed.RectangularSelectionAnchor, 1)
self.ed.RectangularSelectionCaret = 10
self.assertEquals(self.ed.RectangularSelectionCaret, 10)
self.assertEquals(self.ed.Selections, 3)
self.assertEquals(self.ed.MainSelection, 2)
self.assertEquals(self.ed.GetSelectionNAnchor(0), 1)
self.assertEquals(self.ed.GetSelectionNCaret(0), 2)
self.assertEquals(self.ed.GetSelectionNAnchor(1), 5)
self.assertEquals(self.ed.GetSelectionNCaret(1), 6)
self.assertEquals(self.ed.GetSelectionNAnchor(2), 9)
self.assertEquals(self.ed.GetSelectionNCaret(2), 10)
def testVirtualSpace(self):
self.ed.SetSelection(3, 7)
self.ed.SetSelectionNCaretVirtualSpace(0, 3)
self.assertEquals(self.ed.GetSelectionNCaretVirtualSpace(0), 3)
self.ed.SetSelectionNAnchorVirtualSpace(0, 2)
self.assertEquals(self.ed.GetSelectionNAnchorVirtualSpace(0), 2)
# Does not check that virtual space is valid by being at end of line
self.ed.SetSelection(1, 1)
self.ed.SetSelectionNCaretVirtualSpace(0, 3)
self.assertEquals(self.ed.GetSelectionNCaretVirtualSpace(0), 3)
def testRectangularVirtualSpace(self):
self.ed.VirtualSpaceOptions=1
self.ed.RectangularSelectionAnchor = 3
self.assertEquals(self.ed.RectangularSelectionAnchor, 3)
self.ed.RectangularSelectionCaret = 7
self.assertEquals(self.ed.RectangularSelectionCaret, 7)
self.ed.RectangularSelectionAnchorVirtualSpace = 1
self.assertEquals(self.ed.RectangularSelectionAnchorVirtualSpace, 1)
self.ed.RectangularSelectionCaretVirtualSpace = 10
self.assertEquals(self.ed.RectangularSelectionCaretVirtualSpace, 10)
self.assertEquals(self.ed.Selections, 2)
self.assertEquals(self.ed.MainSelection, 1)
self.assertEquals(self.ed.GetSelectionNAnchor(0), 3)
self.assertEquals(self.ed.GetSelectionNAnchorVirtualSpace(0), 1)
self.assertEquals(self.ed.GetSelectionNCaret(0), 3)
self.assertEquals(self.ed.GetSelectionNCaretVirtualSpace(0), 10)
def testRectangularVirtualSpaceOptionOff(self):
# Same as previous test but virtual space option off so no virtual space in result
self.ed.VirtualSpaceOptions=0
self.ed.RectangularSelectionAnchor = 3
self.assertEquals(self.ed.RectangularSelectionAnchor, 3)
self.ed.RectangularSelectionCaret = 7
self.assertEquals(self.ed.RectangularSelectionCaret, 7)
self.ed.RectangularSelectionAnchorVirtualSpace = 1
self.assertEquals(self.ed.RectangularSelectionAnchorVirtualSpace, 1)
self.ed.RectangularSelectionCaretVirtualSpace = 10
self.assertEquals(self.ed.RectangularSelectionCaretVirtualSpace, 10)
self.assertEquals(self.ed.Selections, 2)
self.assertEquals(self.ed.MainSelection, 1)
self.assertEquals(self.ed.GetSelectionNAnchor(0), 3)
self.assertEquals(self.ed.GetSelectionNAnchorVirtualSpace(0), 0)
self.assertEquals(self.ed.GetSelectionNCaret(0), 3)
self.assertEquals(self.ed.GetSelectionNCaretVirtualSpace(0), 0)
def testDropSelectionN(self):
self.ed.SetSelection(1, 2)
# Only one so dropping has no effect
self.ed.DropSelectionN(0)
self.assertEquals(self.ed.Selections, 1)
self.ed.AddSelection(4, 5)
self.assertEquals(self.ed.Selections, 2)
# Outside bounds so no effect
self.ed.DropSelectionN(2)
self.assertEquals(self.ed.Selections, 2)
# Dropping before main so main decreases
self.ed.DropSelectionN(0)
self.assertEquals(self.ed.Selections, 1)
self.assertEquals(self.ed.MainSelection, 0)
self.assertEquals(self.ed.GetSelectionNCaret(0), 4)
self.assertEquals(self.ed.GetSelectionNAnchor(0), 5)
self.ed.AddSelection(10, 11)
self.ed.AddSelection(20, 21)
self.assertEquals(self.ed.Selections, 3)
self.assertEquals(self.ed.MainSelection, 2)
self.ed.MainSelection = 1
# Dropping after main so main does not change
self.ed.DropSelectionN(2)
self.assertEquals(self.ed.MainSelection, 1)
# Dropping first selection so wraps around to new last.
self.ed.AddSelection(30, 31)
self.ed.AddSelection(40, 41)
self.assertEquals(self.ed.Selections, 4)
self.ed.MainSelection = 0
self.ed.DropSelectionN(0)
self.assertEquals(self.ed.MainSelection, 2)
class TestStyleAttributes(unittest.TestCase):
""" These tests are just to ensure that the calls set and retrieve values.
They do not check the visual appearance of the style attributes.
"""
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
self.testColour = 0x171615
self.testFont = b"Georgia"
def tearDown(self):
self.ed.StyleResetDefault()
def testFont(self):
self.ed.StyleSetFont(self.ed.STYLE_DEFAULT, self.testFont)
self.assertEquals(self.ed.StyleGetFont(self.ed.STYLE_DEFAULT), self.testFont)
def testSize(self):
self.ed.StyleSetSize(self.ed.STYLE_DEFAULT, 12)
self.assertEquals(self.ed.StyleGetSize(self.ed.STYLE_DEFAULT), 12)
self.assertEquals(self.ed.StyleGetSizeFractional(self.ed.STYLE_DEFAULT), 12*self.ed.SC_FONT_SIZE_MULTIPLIER)
self.ed.StyleSetSizeFractional(self.ed.STYLE_DEFAULT, 1234)
self.assertEquals(self.ed.StyleGetSizeFractional(self.ed.STYLE_DEFAULT), 1234)
def testBold(self):
self.ed.StyleSetBold(self.ed.STYLE_DEFAULT, 1)
self.assertEquals(self.ed.StyleGetBold(self.ed.STYLE_DEFAULT), 1)
self.assertEquals(self.ed.StyleGetWeight(self.ed.STYLE_DEFAULT), self.ed.SC_WEIGHT_BOLD)
self.ed.StyleSetWeight(self.ed.STYLE_DEFAULT, 530)
self.assertEquals(self.ed.StyleGetWeight(self.ed.STYLE_DEFAULT), 530)
def testItalic(self):
self.ed.StyleSetItalic(self.ed.STYLE_DEFAULT, 1)
self.assertEquals(self.ed.StyleGetItalic(self.ed.STYLE_DEFAULT), 1)
def testUnderline(self):
self.assertEquals(self.ed.StyleGetUnderline(self.ed.STYLE_DEFAULT), 0)
self.ed.StyleSetUnderline(self.ed.STYLE_DEFAULT, 1)
self.assertEquals(self.ed.StyleGetUnderline(self.ed.STYLE_DEFAULT), 1)
def testFore(self):
self.assertEquals(self.ed.StyleGetFore(self.ed.STYLE_DEFAULT), 0)
self.ed.StyleSetFore(self.ed.STYLE_DEFAULT, self.testColour)
self.assertEquals(self.ed.StyleGetFore(self.ed.STYLE_DEFAULT), self.testColour)
def testBack(self):
self.assertEquals(self.ed.StyleGetBack(self.ed.STYLE_DEFAULT), 0xffffff)
self.ed.StyleSetBack(self.ed.STYLE_DEFAULT, self.testColour)
self.assertEquals(self.ed.StyleGetBack(self.ed.STYLE_DEFAULT), self.testColour)
def testEOLFilled(self):
self.assertEquals(self.ed.StyleGetEOLFilled(self.ed.STYLE_DEFAULT), 0)
self.ed.StyleSetEOLFilled(self.ed.STYLE_DEFAULT, 1)
self.assertEquals(self.ed.StyleGetEOLFilled(self.ed.STYLE_DEFAULT), 1)
def testCharacterSet(self):
self.ed.StyleSetCharacterSet(self.ed.STYLE_DEFAULT, self.ed.SC_CHARSET_RUSSIAN)
self.assertEquals(self.ed.StyleGetCharacterSet(self.ed.STYLE_DEFAULT), self.ed.SC_CHARSET_RUSSIAN)
def testCase(self):
self.assertEquals(self.ed.StyleGetCase(self.ed.STYLE_DEFAULT), self.ed.SC_CASE_MIXED)
self.ed.StyleSetCase(self.ed.STYLE_DEFAULT, self.ed.SC_CASE_UPPER)
self.assertEquals(self.ed.StyleGetCase(self.ed.STYLE_DEFAULT), self.ed.SC_CASE_UPPER)
self.ed.StyleSetCase(self.ed.STYLE_DEFAULT, self.ed.SC_CASE_LOWER)
self.assertEquals(self.ed.StyleGetCase(self.ed.STYLE_DEFAULT), self.ed.SC_CASE_LOWER)
def testVisible(self):
self.assertEquals(self.ed.StyleGetVisible(self.ed.STYLE_DEFAULT), 1)
self.ed.StyleSetVisible(self.ed.STYLE_DEFAULT, 0)
self.assertEquals(self.ed.StyleGetVisible(self.ed.STYLE_DEFAULT), 0)
def testChangeable(self):
self.assertEquals(self.ed.StyleGetChangeable(self.ed.STYLE_DEFAULT), 1)
self.ed.StyleSetChangeable(self.ed.STYLE_DEFAULT, 0)
self.assertEquals(self.ed.StyleGetChangeable(self.ed.STYLE_DEFAULT), 0)
def testHotSpot(self):
self.assertEquals(self.ed.StyleGetHotSpot(self.ed.STYLE_DEFAULT), 0)
self.ed.StyleSetHotSpot(self.ed.STYLE_DEFAULT, 1)
self.assertEquals(self.ed.StyleGetHotSpot(self.ed.STYLE_DEFAULT), 1)
class TestCharacterNavigation(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
self.ed.SetCodePage(65001)
def tearDown(self):
self.ed.SetCodePage(0)
def testBeforeAfter(self):
t = "aåflﬔ-"
tv = t.encode("UTF-8")
self.ed.SetContents(tv)
pos = 0
for i in range(len(t)-1):
after = self.ed.PositionAfter(pos)
self.assert_(after > i)
back = self.ed.PositionBefore(after)
self.assertEquals(pos, back)
pos = after
def testRelative(self):
# \x61 \xc3\xa5 \xef\xac\x82 \xef\xac\x94 \x2d
t = "aåflﬔ-"
tv = t.encode("UTF-8")
self.ed.SetContents(tv)
self.assertEquals(self.ed.PositionRelative(1, 2), 6)
self.assertEquals(self.ed.PositionRelative(6, -2), 1)
pos = 0
previous = 0
for i in range(1, len(t)):
after = self.ed.PositionRelative(pos, i)
self.assert_(after > pos)
self.assert_(after > previous)
previous = after
pos = len(t)
previous = pos
for i in range(1, len(t)-1):
after = self.ed.PositionRelative(pos, -i)
self.assert_(after < pos)
self.assert_(after < previous)
previous = after
class TestCaseMapping(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
def tearDown(self):
self.ed.SetCodePage(0)
self.ed.StyleSetCharacterSet(self.ed.STYLE_DEFAULT, self.ed.SC_CHARSET_DEFAULT)
def testEmpty(self):
# Trying to upper case an empty string caused a crash at one stage
t = b"x"
self.ed.SetContents(t)
self.ed.UpperCase()
self.assertEquals(self.ed.Contents(), b"x")
def testASCII(self):
t = b"x"
self.ed.SetContents(t)
self.ed.SetSel(0,1)
self.ed.UpperCase()
self.assertEquals(self.ed.Contents(), b"X")
def testLatin1(self):
t = "å".encode("Latin-1")
r = "Å".encode("Latin-1")
self.ed.SetContents(t)
self.ed.SetSel(0,1)
self.ed.UpperCase()
self.assertEquals(self.ed.Contents(), r)
def testRussian(self):
if sys.platform == "win32":
self.ed.StyleSetCharacterSet(self.ed.STYLE_DEFAULT, self.ed.SC_CHARSET_RUSSIAN)
else:
self.ed.StyleSetCharacterSet(self.ed.STYLE_DEFAULT, self.ed.SC_CHARSET_CYRILLIC)
t = "Б".encode("Windows-1251")
r = "б".encode("Windows-1251")
self.ed.SetContents(t)
self.ed.SetSel(0,1)
self.ed.LowerCase()
self.assertEquals(self.ed.Contents(), r)
def testUTF(self):
self.ed.SetCodePage(65001)
t = "å".encode("UTF-8")
r = "Å".encode("UTF-8")
self.ed.SetContents(t)
self.ed.SetSel(0,2)
self.ed.UpperCase()
self.assertEquals(self.ed.Contents(), r)
def testUTFDifferentLength(self):
self.ed.SetCodePage(65001)
t = "ı".encode("UTF-8")
r = "I".encode("UTF-8")
self.ed.SetContents(t)
self.assertEquals(self.ed.Length, 2)
self.ed.SetSel(0,2)
self.ed.UpperCase()
self.assertEquals(self.ed.Length, 1)
self.assertEquals(self.ed.Contents(), r)
def testUTFGrows(self):
# This crashed at one point in debug builds due to looking past end of shorter string
self.ed.SetCodePage(65001)
# ﬖ is a single character ligature taking 3 bytes in UTF8: EF AC 96
t = 'ﬖﬖ'.encode("UTF-8")
self.ed.SetContents(t)
self.assertEquals(self.ed.Length, 6)
self.ed.SetSel(0,self.ed.Length)
self.ed.UpperCase()
# To convert to upper case the ligature is separated into վ and ն then uppercased to Վ and Ն
# each of which takes 2 bytes in UTF-8: D5 8E D5 86
r = 'ՎՆՎՆ'.encode("UTF-8")
self.assertEquals(self.ed.Length, 8)
self.assertEquals(self.ed.Contents(), r)
self.assertEquals(self.ed.SelectionEnd, self.ed.Length)
def testUTFShrinks(self):
self.ed.SetCodePage(65001)
# fi is a single character ligature taking 3 bytes in UTF8: EF AC 81
t = 'fifi'.encode("UTF-8")
self.ed.SetContents(t)
self.assertEquals(self.ed.Length, 6)
self.ed.SetSel(0,self.ed.Length)
self.ed.UpperCase()
# To convert to upper case the ligature is separated into f and i then uppercased to F and I
# each of which takes 1 byte in UTF-8: 46 49
r = 'FIFI'.encode("UTF-8")
self.assertEquals(self.ed.Length, 4)
self.assertEquals(self.ed.Contents(), r)
self.assertEquals(self.ed.SelectionEnd, self.ed.Length)
class TestCaseInsensitiveSearch(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
def tearDown(self):
self.ed.SetCodePage(0)
self.ed.StyleSetCharacterSet(self.ed.STYLE_DEFAULT, self.ed.SC_CHARSET_DEFAULT)
def testEmpty(self):
text = b" x X"
searchString = b""
self.ed.SetContents(text)
self.ed.TargetStart = 0
self.ed.TargetEnd = self.ed.Length-1
self.ed.SearchFlags = 0
pos = self.ed.SearchInTarget(len(searchString), searchString)
self.assertEquals(0, pos)
def testASCII(self):
text = b" x X"
searchString = b"X"
self.ed.SetContents(text)
self.ed.TargetStart = 0
self.ed.TargetEnd = self.ed.Length-1
self.ed.SearchFlags = 0
pos = self.ed.SearchInTarget(len(searchString), searchString)
self.assertEquals(1, pos)
def testLatin1(self):
text = "Frånd Åå".encode("Latin-1")
searchString = "Å".encode("Latin-1")
self.ed.SetContents(text)
self.ed.TargetStart = 0
self.ed.TargetEnd = self.ed.Length-1
self.ed.SearchFlags = 0
pos = self.ed.SearchInTarget(len(searchString), searchString)
self.assertEquals(2, pos)
def testRussian(self):
self.ed.StyleSetCharacterSet(self.ed.STYLE_DEFAULT, self.ed.SC_CHARSET_RUSSIAN)
text = "=(Б tex б)".encode("Windows-1251")
searchString = "б".encode("Windows-1251")
self.ed.SetContents(text)
self.ed.TargetStart = 0
self.ed.TargetEnd = self.ed.Length-1
self.ed.SearchFlags = 0
pos = self.ed.SearchInTarget(len(searchString), searchString)
self.assertEquals(2, pos)
def testUTF(self):
self.ed.SetCodePage(65001)
text = "Frånd Åå".encode("UTF-8")
searchString = "Å".encode("UTF-8")
self.ed.SetContents(text)
self.ed.TargetStart = 0
self.ed.TargetEnd = self.ed.Length-1
self.ed.SearchFlags = 0
pos = self.ed.SearchInTarget(len(searchString), searchString)
self.assertEquals(2, pos)
def testUTFDifferentLength(self):
# Searching for a two byte string finds a single byte
self.ed.SetCodePage(65001)
# two byte string "ſ" single byte "s"
text = "Frånds Ååſ $".encode("UTF-8")
searchString = "ſ".encode("UTF-8")
firstPosition = len("Frånd".encode("UTF-8"))
self.assertEquals(len(searchString), 2)
self.ed.SetContents(text)
self.ed.TargetStart = 0
self.ed.TargetEnd = self.ed.Length-1
self.ed.SearchFlags = 0
pos = self.ed.SearchInTarget(len(searchString), searchString)
self.assertEquals(firstPosition, pos)
self.assertEquals(firstPosition+1, self.ed.TargetEnd)
class TestLexer(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
def testLexerNumber(self):
self.ed.Lexer = self.ed.SCLEX_CPP
self.assertEquals(self.ed.GetLexer(), self.ed.SCLEX_CPP)
def testLexerName(self):
self.ed.LexerLanguage = b"cpp"
self.assertEquals(self.ed.GetLexer(), self.ed.SCLEX_CPP)
name = self.ed.GetLexerLanguage(0)
self.assertEquals(name, b"cpp")
def testPropertyNames(self):
propertyNames = self.ed.PropertyNames()
self.assertNotEquals(propertyNames, b"")
# The cpp lexer has a boolean property named lexer.cpp.allow.dollars
propNameDollars = b"lexer.cpp.allow.dollars"
propertyType = self.ed.PropertyType(propNameDollars)
self.assertEquals(propertyType, self.ed.SC_TYPE_BOOLEAN)
propertyDescription = self.ed.DescribeProperty(propNameDollars)
self.assertNotEquals(propertyDescription, b"")
def testWordListDescriptions(self):
wordSet = self.ed.DescribeKeyWordSets()
self.assertNotEquals(wordSet, b"")
class TestSubStyles(unittest.TestCase):
''' These tests include knowledge of the current implementation in the cpp lexer
and may have to change when that implementation changes.
Currently supports subStyles for IDENTIFIER 11 and COMMENTDOCKEYWORD 17 '''
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
def testInfo(self):
self.ed.Lexer = self.ed.SCLEX_CPP
bases = self.ed.GetSubStyleBases()
self.assertEquals(bases, b"\x0b\x11") # 11, 17
self.assertEquals(self.ed.DistanceToSecondaryStyles(), 0x40)
def testAllocate(self):
firstSubStyle = 0x80 # Current implementation
self.ed.Lexer = self.ed.SCLEX_CPP
self.assertEquals(self.ed.GetStyleFromSubStyle(firstSubStyle), firstSubStyle)
self.assertEquals(self.ed.GetSubStylesStart(self.ed.SCE_C_IDENTIFIER), 0)
self.assertEquals(self.ed.GetSubStylesLength(self.ed.SCE_C_IDENTIFIER), 0)
numSubStyles = 5
subs = self.ed.AllocateSubStyles(self.ed.SCE_C_IDENTIFIER, numSubStyles)
self.assertEquals(subs, firstSubStyle)
self.assertEquals(self.ed.GetSubStylesStart(self.ed.SCE_C_IDENTIFIER), firstSubStyle)
self.assertEquals(self.ed.GetSubStylesLength(self.ed.SCE_C_IDENTIFIER), numSubStyles)
self.assertEquals(self.ed.GetStyleFromSubStyle(subs), self.ed.SCE_C_IDENTIFIER)
self.assertEquals(self.ed.GetStyleFromSubStyle(subs+numSubStyles-1), self.ed.SCE_C_IDENTIFIER)
self.assertEquals(self.ed.GetStyleFromSubStyle(self.ed.SCE_C_IDENTIFIER), self.ed.SCE_C_IDENTIFIER)
# Now free and check same as start
self.ed.FreeSubStyles()
self.assertEquals(self.ed.GetStyleFromSubStyle(subs), subs)
self.assertEquals(self.ed.GetSubStylesStart(self.ed.SCE_C_IDENTIFIER), 0)
self.assertEquals(self.ed.GetSubStylesLength(self.ed.SCE_C_IDENTIFIER), 0)
def testInactive(self):
firstSubStyle = 0x80 # Current implementation
inactiveDistance = self.ed.DistanceToSecondaryStyles()
self.ed.Lexer = self.ed.SCLEX_CPP
numSubStyles = 5
subs = self.ed.AllocateSubStyles(self.ed.SCE_C_IDENTIFIER, numSubStyles)
self.assertEquals(subs, firstSubStyle)
self.assertEquals(self.ed.GetStyleFromSubStyle(subs), self.ed.SCE_C_IDENTIFIER)
self.assertEquals(self.ed.GetStyleFromSubStyle(subs+inactiveDistance), self.ed.SCE_C_IDENTIFIER+inactiveDistance)
self.ed.FreeSubStyles()
def testSecondary(self):
inactiveDistance = self.ed.DistanceToSecondaryStyles()
self.assertEquals(self.ed.GetPrimaryStyleFromStyle(self.ed.SCE_C_IDENTIFIER+inactiveDistance), self.ed.SCE_C_IDENTIFIER)
class TestCallTip(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
# 1 line of 4 characters
t = b"fun("
self.ed.AddText(len(t), t)
def testBasics(self):
self.assertEquals(self.ed.CallTipActive(), 0)
self.ed.CallTipShow(1, "fun(int x)")
self.assertEquals(self.ed.CallTipActive(), 1)
self.assertEquals(self.ed.CallTipPosStart(), 4)
self.ed.CallTipSetPosStart(1)
self.assertEquals(self.ed.CallTipPosStart(), 1)
self.ed.CallTipCancel()
self.assertEquals(self.ed.CallTipActive(), 0)
class TestAutoComplete(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
# 1 line of 3 characters
t = b"xxx\n"
self.ed.AddText(len(t), t)
def testDefaults(self):
self.assertEquals(self.ed.AutoCGetSeparator(), ord(' '))
self.assertEquals(self.ed.AutoCGetMaxHeight(), 5)
self.assertEquals(self.ed.AutoCGetMaxWidth(), 0)
self.assertEquals(self.ed.AutoCGetTypeSeparator(), ord('?'))
self.assertEquals(self.ed.AutoCGetIgnoreCase(), 0)
self.assertEquals(self.ed.AutoCGetAutoHide(), 1)
self.assertEquals(self.ed.AutoCGetDropRestOfWord(), 0)
def testChangeDefaults(self):
self.ed.AutoCSetSeparator(ord('-'))
self.assertEquals(self.ed.AutoCGetSeparator(), ord('-'))
self.ed.AutoCSetSeparator(ord(' '))
self.ed.AutoCSetMaxHeight(100)
self.assertEquals(self.ed.AutoCGetMaxHeight(), 100)
self.ed.AutoCSetMaxHeight(5)
self.ed.AutoCSetMaxWidth(100)
self.assertEquals(self.ed.AutoCGetMaxWidth(), 100)
self.ed.AutoCSetMaxWidth(0)
self.ed.AutoCSetTypeSeparator(ord('@'))
self.assertEquals(self.ed.AutoCGetTypeSeparator(), ord('@'))
self.ed.AutoCSetTypeSeparator(ord('?'))
self.ed.AutoCSetIgnoreCase(1)
self.assertEquals(self.ed.AutoCGetIgnoreCase(), 1)
self.ed.AutoCSetIgnoreCase(0)
self.ed.AutoCSetAutoHide(0)
self.assertEquals(self.ed.AutoCGetAutoHide(), 0)
self.ed.AutoCSetAutoHide(1)
self.ed.AutoCSetDropRestOfWord(1)
self.assertEquals(self.ed.AutoCGetDropRestOfWord(), 1)
self.ed.AutoCSetDropRestOfWord(0)
def testAutoShow(self):
self.assertEquals(self.ed.AutoCActive(), 0)
self.ed.SetSel(0, 0)
self.ed.AutoCShow(0, b"za defn ghi")
self.assertEquals(self.ed.AutoCActive(), 1)
#~ time.sleep(2)
self.assertEquals(self.ed.AutoCPosStart(), 0)
self.assertEquals(self.ed.AutoCGetCurrent(), 0)
t = self.ed.AutoCGetCurrentText(5)
#~ self.assertEquals(l, 3)
self.assertEquals(t, b"za")
self.ed.AutoCCancel()
self.assertEquals(self.ed.AutoCActive(), 0)
def testAutoShowComplete(self):
self.assertEquals(self.ed.AutoCActive(), 0)
self.ed.SetSel(0, 0)
self.ed.AutoCShow(0, b"za defn ghi")
self.ed.AutoCComplete()
self.assertEquals(self.ed.Contents(), b"zaxxx\n")
self.assertEquals(self.ed.AutoCActive(), 0)
def testAutoShowSelect(self):
self.assertEquals(self.ed.AutoCActive(), 0)
self.ed.SetSel(0, 0)
self.ed.AutoCShow(0, b"za defn ghi")
self.ed.AutoCSelect(0, b"d")
self.ed.AutoCComplete()
self.assertEquals(self.ed.Contents(), b"defnxxx\n")
self.assertEquals(self.ed.AutoCActive(), 0)
def testWriteOnly(self):
""" Checks that setting attributes doesn't crash or change tested behaviour
but does not check that the changed attributes are effective. """
self.ed.AutoCStops(0, b"abcde")
self.ed.AutoCSetFillUps(0, b"1234")
class TestDirectAccess(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
def testGapPosition(self):
text = b"abcd"
self.ed.SetContents(text)
self.assertEquals(self.ed.GapPosition, 4)
self.ed.TargetStart = 1
self.ed.TargetEnd = 1
rep = b"-"
self.ed.ReplaceTarget(len(rep), rep)
self.assertEquals(self.ed.GapPosition, 2)
def testCharacterPointerAndRangePointer(self):
text = b"abcd"
self.ed.SetContents(text)
characterPointer = self.ed.CharacterPointer
rangePointer = self.ed.GetRangePointer(0,3)
self.assertEquals(characterPointer, rangePointer)
cpBuffer = ctypes.c_char_p(characterPointer)
self.assertEquals(cpBuffer.value, text)
# Gap will not be moved as already moved for CharacterPointer call
rangePointer = self.ed.GetRangePointer(1,3)
cpBuffer = ctypes.c_char_p(rangePointer)
self.assertEquals(cpBuffer.value, text[1:])
class TestWordChars(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
def tearDown(self):
self.ed.SetCharsDefault()
def _setChars(self, charClass, chars):
""" Wrapper to call self.ed.Set*Chars with the right type
@param charClass {str} the character class, "word", "space", etc.
@param chars {iterable of int} characters to set
"""
if sys.version_info.major == 2:
# Python 2, use latin-1 encoded str
unichars = (unichr(x) for x in chars if x != 0)
# can't use literal u"", that's a syntax error in Py3k
# uncode() doesn't exist in Py3k, but we never run it there
result = unicode("").join(unichars).encode("latin-1")
else:
# Python 3, use bytes()
result = bytes(x for x in chars if x != 0)
meth = getattr(self.ed, "Set%sChars" % (charClass.capitalize()))
return meth(None, result)
def assertCharSetsEqual(self, first, second, *args, **kwargs):
""" Assert that the two character sets are equal.
If either set are an iterable of numbers, convert them to chars
first. """
first_set = set()
for c in first:
first_set.add(chr(c) if isinstance(c, int) else c)
second_set = set()
for c in second:
second_set.add(chr(c) if isinstance(c, int) else c)
return self.assertEqual(first_set, second_set, *args, **kwargs)
def testDefaultWordChars(self):
# check that the default word chars are as expected
import string
data = self.ed.GetWordChars(None)
expected = set(string.digits + string.ascii_letters + '_') | \
set(chr(x) for x in range(0x80, 0x100))
self.assertCharSetsEqual(data, expected)
def testDefaultWhitespaceChars(self):
# check that the default whitespace chars are as expected
import string
data = self.ed.GetWhitespaceChars(None)
expected = (set(chr(x) for x in (range(0, 0x20))) | set(' ')) - \
set(['\r', '\n'])
self.assertCharSetsEqual(data, expected)
def testDefaultPunctuationChars(self):
# check that the default punctuation chars are as expected
import string
data = self.ed.GetPunctuationChars(None)
expected = set(chr(x) for x in range(0x20, 0x80)) - \
set(string.ascii_letters + string.digits + "\r\n_ ")
self.assertCharSetsEqual(data, expected)
def testCustomWordChars(self):
# check that setting things to whitespace chars makes them not words
self._setChars("whitespace", range(1, 0x100))
data = self.ed.GetWordChars(None)
expected = set()
self.assertCharSetsEqual(data, expected)
# and now set something to make sure that works too
expected = set(range(1, 0x100, 2))
self._setChars("word", expected)
data = self.ed.GetWordChars(None)
self.assertCharSetsEqual(data, expected)
def testCustomWhitespaceChars(self):
# check setting whitespace chars to non-default values
self._setChars("word", range(1, 0x100))
# we can't change chr(0) from being anything but whitespace
expected = set([0])
data = self.ed.GetWhitespaceChars(None)
self.assertCharSetsEqual(data, expected)
# now try to set it to something custom
expected = set(range(1, 0x100, 2)) | set([0])
self._setChars("whitespace", expected)
data = self.ed.GetWhitespaceChars(None)
self.assertCharSetsEqual(data, expected)
def testCustomPunctuationChars(self):
# check setting punctuation chars to non-default values
self._setChars("word", range(1, 0x100))
expected = set()
data = self.ed.GetPunctuationChars(0)
self.assertEquals(set(data), expected)
# now try to set it to something custom
expected = set(range(1, 0x100, 1))
self._setChars("punctuation", expected)
data = self.ed.GetPunctuationChars(None)
self.assertCharSetsEqual(data, expected)
class TestExplicitTabStops(unittest.TestCase):
def setUp(self):
self.xite = Xite.xiteFrame
self.ed = self.xite.ed
self.ed.ClearAll()
self.ed.EmptyUndoBuffer()
# 2 lines of 4 characters
self.t = b"fun(\nint)"
self.ed.AddText(len(self.t), self.t)
def testAddingAndClearing(self):
self.assertEquals(self.ed.GetNextTabStop(0,0), 0)
# Add a tab stop at 7
self.ed.AddTabStop(0, 7)
# Check added
self.assertEquals(self.ed.GetNextTabStop(0,0), 7)
# Check does not affect line 1
self.assertEquals(self.ed.GetNextTabStop(1,0), 0)
# Add a tab stop at 18
self.ed.AddTabStop(0, 18)
# Check added
self.assertEquals(self.ed.GetNextTabStop(0,0), 7)
self.assertEquals(self.ed.GetNextTabStop(0,7), 18)
# Check does not affect line 1
self.assertEquals(self.ed.GetNextTabStop(1,0), 0)
self.assertEquals(self.ed.GetNextTabStop(1,7), 0)
# Add a tab stop between others at 13
self.ed.AddTabStop(0, 13)
# Check added
self.assertEquals(self.ed.GetNextTabStop(0,0), 7)
self.assertEquals(self.ed.GetNextTabStop(0,7), 13)
self.assertEquals(self.ed.GetNextTabStop(0,13), 18)
# Check does not affect line 1
self.assertEquals(self.ed.GetNextTabStop(1,0), 0)
self.assertEquals(self.ed.GetNextTabStop(1,7), 0)
self.ed.ClearTabStops(0)
# Check back to original state
self.assertEquals(self.ed.GetNextTabStop(0,0), 0)
def testLineInsertionDeletion(self):
# Add a tab stop at 7 on line 1
self.ed.AddTabStop(1, 7)
# Check added
self.assertEquals(self.ed.GetNextTabStop(1,0), 7)
# More text at end
self.ed.AddText(len(self.t), self.t)
self.assertEquals(self.ed.GetNextTabStop(0,0), 0)
self.assertEquals(self.ed.GetNextTabStop(1,0), 7)
self.assertEquals(self.ed.GetNextTabStop(2,0), 0)
self.assertEquals(self.ed.GetNextTabStop(3,0), 0)
# Another 2 lines before explicit line moves the explicit tab stop
data = b"x\ny\n"
self.ed.InsertText(4, data)
self.assertEquals(self.ed.GetNextTabStop(0,0), 0)
self.assertEquals(self.ed.GetNextTabStop(1,0), 0)
self.assertEquals(self.ed.GetNextTabStop(2,0), 0)
self.assertEquals(self.ed.GetNextTabStop(3,0), 7)
self.assertEquals(self.ed.GetNextTabStop(4,0), 0)
self.assertEquals(self.ed.GetNextTabStop(5,0), 0)
# Undo moves the explicit tab stop back
self.ed.Undo()
self.assertEquals(self.ed.GetNextTabStop(0,0), 0)
self.assertEquals(self.ed.GetNextTabStop(1,0), 7)
self.assertEquals(self.ed.GetNextTabStop(2,0), 0)
self.assertEquals(self.ed.GetNextTabStop(3,0), 0)
if __name__ == '__main__':
uu = Xite.main("simpleTests")
#~ for x in sorted(uu.keys()):
#~ print(x, uu[x])
#~ print()
| isc | -2,735,364,442,869,949,000 | 33.864814 | 122 | 0.72912 | false |
qedsoftware/commcare-hq | corehq/apps/app_manager/tests/test_translations.py | 1 | 2099 | # coding=utf-8
import os
from lxml import etree
from django.test import TestCase
from corehq.apps.app_manager.models import Application
from corehq.apps.app_manager.tests.util import SuiteMixin
from corehq.apps.app_manager.translations import escape_output_value
import commcare_translations
class AppManagerTranslationsTest(TestCase, SuiteMixin):
root = os.path.dirname(__file__)
file_path = ('data', 'suite')
def test_escape_output_value(self):
test_cases = [
('hello', '<value>hello</value>'),
('abc < def > abc', '<value>abc < def > abc</value>'),
("bee's knees", "<value>bee's knees</value>"),
('unfortunate <xml expression', '<value>unfortunate <xml expression</value>'),
(u'क्लिक', '<value>क्लिक</value>'),
(''', '<value>&#39</value>'),
('question1 is <output value="/data/question1" vellum:value="#form/question1"/> !',
'<value>question1 is <output value="/data/question1" vellum:value="#form/question1"/> !</value>'),
('Here is a ref <output value="/data/no_media"/> with some "trailing" text & that\'s some bad < xml.',
'<value>Here is a ref <output value="/data/no_media"/> with some "trailing" text & that\'s some bad < xml.</value>')
]
for input, expected_output in test_cases:
self.assertEqual(expected_output, etree.tostring(escape_output_value(input)))
def test_language_names(self):
app_json = self.get_json('app')
app_json['langs'] = ['en', 'fra', 'hin', 'pol']
app = Application.wrap(app_json)
app.create_suite()
app_strings = app.create_app_strings('default')
app_strings_dict = commcare_translations.loads(app_strings)
self.assertEqual(app_strings_dict['en'], 'English')
self.assertEqual(app_strings_dict['fra'], u'Français')
self.assertEqual(app_strings_dict['hin'], u'हिंदी')
self.assertEqual(app_strings_dict['pol'], 'polski')
| bsd-3-clause | 907,193,766,072,327,000 | 45.177778 | 142 | 0.619346 | false |
uannight/reposan | plugin.video.tvalacarta/lib/youtube_dl/extractor/swrmediathek.py | 64 | 4375 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
parse_duration,
int_or_none,
determine_protocol,
)
class SWRMediathekIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?swrmediathek\.de/(?:content/)?player\.htm\?show=(?P<id>[\da-f]{8}-[\da-f]{4}-[\da-f]{4}-[\da-f]{4}-[\da-f]{12})'
_TESTS = [{
'url': 'http://swrmediathek.de/player.htm?show=849790d0-dab8-11e3-a953-0026b975f2e6',
'md5': '8c5f6f0172753368547ca8413a7768ac',
'info_dict': {
'id': '849790d0-dab8-11e3-a953-0026b975f2e6',
'ext': 'mp4',
'title': 'SWR odysso',
'description': 'md5:2012e31baad36162e97ce9eb3f157b8a',
'thumbnail': r're:^http:.*\.jpg$',
'duration': 2602,
'upload_date': '20140515',
'uploader': 'SWR Fernsehen',
'uploader_id': '990030',
},
}, {
'url': 'http://swrmediathek.de/player.htm?show=0e1a8510-ddf2-11e3-9be3-0026b975f2e6',
'md5': 'b10ab854f912eecc5a6b55cd6fc1f545',
'info_dict': {
'id': '0e1a8510-ddf2-11e3-9be3-0026b975f2e6',
'ext': 'mp4',
'title': 'Nachtcafé - Alltagsdroge Alkohol - zwischen Sektempfang und Komasaufen',
'description': 'md5:e0a3adc17e47db2c23aab9ebc36dbee2',
'thumbnail': r're:http://.*\.jpg',
'duration': 5305,
'upload_date': '20140516',
'uploader': 'SWR Fernsehen',
'uploader_id': '990030',
},
'skip': 'redirect to http://swrmediathek.de/index.htm?hinweis=swrlink',
}, {
'url': 'http://swrmediathek.de/player.htm?show=bba23e10-cb93-11e3-bf7f-0026b975f2e6',
'md5': '4382e4ef2c9d7ce6852535fa867a0dd3',
'info_dict': {
'id': 'bba23e10-cb93-11e3-bf7f-0026b975f2e6',
'ext': 'mp3',
'title': 'Saša Stanišic: Vor dem Fest',
'description': 'md5:5b792387dc3fbb171eb709060654e8c9',
'thumbnail': r're:http://.*\.jpg',
'duration': 3366,
'upload_date': '20140520',
'uploader': 'SWR 2',
'uploader_id': '284670',
},
'skip': 'redirect to http://swrmediathek.de/index.htm?hinweis=swrlink',
}]
def _real_extract(self, url):
video_id = self._match_id(url)
video = self._download_json(
'http://swrmediathek.de/AjaxEntry?ekey=%s' % video_id,
video_id, 'Downloading video JSON')
attr = video['attr']
title = attr['entry_title']
media_type = attr.get('entry_etype')
formats = []
for entry in video.get('sub', []):
if entry.get('name') != 'entry_media':
continue
entry_attr = entry.get('attr', {})
f_url = entry_attr.get('val2')
if not f_url:
continue
codec = entry_attr.get('val0')
if codec == 'm3u8':
formats.extend(self._extract_m3u8_formats(
f_url, video_id, 'mp4', 'm3u8_native',
m3u8_id='hls', fatal=False))
elif codec == 'f4m':
formats.extend(self._extract_f4m_formats(
f_url + '?hdcore=3.7.0', video_id,
f4m_id='hds', fatal=False))
else:
formats.append({
'format_id': determine_protocol({'url': f_url}),
'url': f_url,
'quality': int_or_none(entry_attr.get('val1')),
'vcodec': codec if media_type == 'Video' else 'none',
'acodec': codec if media_type == 'Audio' else None,
})
self._sort_formats(formats)
upload_date = None
entry_pdatet = attr.get('entry_pdatet')
if entry_pdatet:
upload_date = entry_pdatet[:-4]
return {
'id': video_id,
'title': title,
'description': attr.get('entry_descl'),
'thumbnail': attr.get('entry_image_16_9'),
'duration': parse_duration(attr.get('entry_durat')),
'upload_date': upload_date,
'uploader': attr.get('channel_title'),
'uploader_id': attr.get('channel_idkey'),
'formats': formats,
}
| gpl-2.0 | 2,760,507,603,684,567,000 | 37.017391 | 150 | 0.51441 | false |
HydrelioxGitHub/home-assistant | tests/components/sensor/test_rest.py | 2 | 12631 | """The tests for the REST sensor platform."""
import unittest
from pytest import raises
from unittest.mock import patch, Mock
import requests
from requests.exceptions import Timeout, MissingSchema, RequestException
import requests_mock
from homeassistant.exceptions import PlatformNotReady
from homeassistant.setup import setup_component
import homeassistant.components.sensor as sensor
import homeassistant.components.sensor.rest as rest
from homeassistant.helpers.config_validation import template
from tests.common import get_test_home_assistant, assert_setup_component
import pytest
class TestRestSensorSetup(unittest.TestCase):
"""Tests for setting up the REST sensor platform."""
def setUp(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
def tearDown(self):
"""Stop everything that was started."""
self.hass.stop()
def test_setup_missing_config(self):
"""Test setup with configuration missing required entries."""
with assert_setup_component(0):
assert setup_component(self.hass, sensor.DOMAIN, {
'sensor': {'platform': 'rest'}})
def test_setup_missing_schema(self):
"""Test setup with resource missing schema."""
with pytest.raises(MissingSchema):
rest.setup_platform(self.hass, {
'platform': 'rest',
'resource': 'localhost',
'method': 'GET'
}, None)
@patch('requests.Session.send',
side_effect=requests.exceptions.ConnectionError())
def test_setup_failed_connect(self, mock_req):
"""Test setup when connection error occurs."""
with raises(PlatformNotReady):
rest.setup_platform(self.hass, {
'platform': 'rest',
'resource': 'http://localhost',
}, lambda devices, update=True: None)
@patch('requests.Session.send', side_effect=Timeout())
def test_setup_timeout(self, mock_req):
"""Test setup when connection timeout occurs."""
with raises(PlatformNotReady):
rest.setup_platform(self.hass, {
'platform': 'rest',
'resource': 'http://localhost',
}, lambda devices, update=True: None)
@requests_mock.Mocker()
def test_setup_minimum(self, mock_req):
"""Test setup with minimum configuration."""
mock_req.get('http://localhost', status_code=200)
with assert_setup_component(1, 'sensor'):
assert setup_component(self.hass, 'sensor', {
'sensor': {
'platform': 'rest',
'resource': 'http://localhost'
}
})
assert 2 == mock_req.call_count
@requests_mock.Mocker()
def test_setup_get(self, mock_req):
"""Test setup with valid configuration."""
mock_req.get('http://localhost', status_code=200)
with assert_setup_component(1, 'sensor'):
assert setup_component(self.hass, 'sensor', {
'sensor': {
'platform': 'rest',
'resource': 'http://localhost',
'method': 'GET',
'value_template': '{{ value_json.key }}',
'name': 'foo',
'unit_of_measurement': 'MB',
'verify_ssl': 'true',
'timeout': 30,
'authentication': 'basic',
'username': 'my username',
'password': 'my password',
'headers': {'Accept': 'application/json'}
}
})
assert 2 == mock_req.call_count
@requests_mock.Mocker()
def test_setup_post(self, mock_req):
"""Test setup with valid configuration."""
mock_req.post('http://localhost', status_code=200)
with assert_setup_component(1, 'sensor'):
assert setup_component(self.hass, 'sensor', {
'sensor': {
'platform': 'rest',
'resource': 'http://localhost',
'method': 'POST',
'value_template': '{{ value_json.key }}',
'payload': '{ "device": "toaster"}',
'name': 'foo',
'unit_of_measurement': 'MB',
'verify_ssl': 'true',
'timeout': 30,
'authentication': 'basic',
'username': 'my username',
'password': 'my password',
'headers': {'Accept': 'application/json'}
}
})
assert 2 == mock_req.call_count
class TestRestSensor(unittest.TestCase):
"""Tests for REST sensor platform."""
def setUp(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.initial_state = 'initial_state'
self.rest = Mock('rest.RestData')
self.rest.update = Mock('rest.RestData.update',
side_effect=self.update_side_effect(
'{ "key": "' + self.initial_state + '" }'))
self.name = 'foo'
self.unit_of_measurement = 'MB'
self.device_class = None
self.value_template = template('{{ value_json.key }}')
self.value_template.hass = self.hass
self.force_update = False
self.sensor = rest.RestSensor(
self.hass, self.rest, self.name, self.unit_of_measurement,
self.device_class, self.value_template, [], self.force_update
)
def tearDown(self):
"""Stop everything that was started."""
self.hass.stop()
def update_side_effect(self, data):
"""Side effect function for mocking RestData.update()."""
self.rest.data = data
def test_name(self):
"""Test the name."""
assert self.name == self.sensor.name
def test_unit_of_measurement(self):
"""Test the unit of measurement."""
assert self.unit_of_measurement == self.sensor.unit_of_measurement
def test_force_update(self):
"""Test the unit of measurement."""
assert self.force_update == self.sensor.force_update
def test_state(self):
"""Test the initial state."""
self.sensor.update()
assert self.initial_state == self.sensor.state
def test_update_when_value_is_none(self):
"""Test state gets updated to unknown when sensor returns no data."""
self.rest.update = Mock(
'rest.RestData.update', side_effect=self.update_side_effect(None))
self.sensor.update()
assert self.sensor.state is None
assert not self.sensor.available
def test_update_when_value_changed(self):
"""Test state gets updated when sensor returns a new status."""
self.rest.update = Mock('rest.RestData.update',
side_effect=self.update_side_effect(
'{ "key": "updated_state" }'))
self.sensor.update()
assert 'updated_state' == self.sensor.state
assert self.sensor.available
def test_update_with_no_template(self):
"""Test update when there is no value template."""
self.rest.update = Mock('rest.RestData.update',
side_effect=self.update_side_effect(
'plain_state'))
self.sensor = rest.RestSensor(self.hass, self.rest, self.name,
self.unit_of_measurement,
self.device_class, None, [],
self.force_update)
self.sensor.update()
assert 'plain_state' == self.sensor.state
assert self.sensor.available
def test_update_with_json_attrs(self):
"""Test attributes get extracted from a JSON result."""
self.rest.update = Mock('rest.RestData.update',
side_effect=self.update_side_effect(
'{ "key": "some_json_value" }'))
self.sensor = rest.RestSensor(self.hass, self.rest, self.name,
self.unit_of_measurement,
self.device_class, None, ['key'],
self.force_update)
self.sensor.update()
assert 'some_json_value' == \
self.sensor.device_state_attributes['key']
@patch('homeassistant.components.sensor.rest._LOGGER')
def test_update_with_json_attrs_no_data(self, mock_logger):
"""Test attributes when no JSON result fetched."""
self.rest.update = Mock('rest.RestData.update',
side_effect=self.update_side_effect(None))
self.sensor = rest.RestSensor(self.hass, self.rest, self.name,
self.unit_of_measurement,
self.device_class, None, ['key'],
self.force_update)
self.sensor.update()
assert {} == self.sensor.device_state_attributes
assert mock_logger.warning.called
@patch('homeassistant.components.sensor.rest._LOGGER')
def test_update_with_json_attrs_not_dict(self, mock_logger):
"""Test attributes get extracted from a JSON result."""
self.rest.update = Mock('rest.RestData.update',
side_effect=self.update_side_effect(
'["list", "of", "things"]'))
self.sensor = rest.RestSensor(self.hass, self.rest, self.name,
self.unit_of_measurement,
self.device_class, None, ['key'],
self.force_update)
self.sensor.update()
assert {} == self.sensor.device_state_attributes
assert mock_logger.warning.called
@patch('homeassistant.components.sensor.rest._LOGGER')
def test_update_with_json_attrs_bad_JSON(self, mock_logger):
"""Test attributes get extracted from a JSON result."""
self.rest.update = Mock('rest.RestData.update',
side_effect=self.update_side_effect(
'This is text rather than JSON data.'))
self.sensor = rest.RestSensor(self.hass, self.rest, self.name,
self.unit_of_measurement,
self.device_class, None, ['key'],
self.force_update)
self.sensor.update()
assert {} == self.sensor.device_state_attributes
assert mock_logger.warning.called
assert mock_logger.debug.called
def test_update_with_json_attrs_and_template(self):
"""Test attributes get extracted from a JSON result."""
self.rest.update = Mock('rest.RestData.update',
side_effect=self.update_side_effect(
'{ "key": "json_state_updated_value" }'))
self.sensor = rest.RestSensor(self.hass, self.rest, self.name,
self.unit_of_measurement,
self.device_class,
self.value_template, ['key'],
self.force_update)
self.sensor.update()
assert 'json_state_updated_value' == self.sensor.state
assert 'json_state_updated_value' == \
self.sensor.device_state_attributes['key'], \
self.force_update
class TestRestData(unittest.TestCase):
"""Tests for RestData."""
def setUp(self):
"""Set up things to be run when tests are started."""
self.method = "GET"
self.resource = "http://localhost"
self.verify_ssl = True
self.timeout = 10
self.rest = rest.RestData(
self.method, self.resource, None, None, None, self.verify_ssl,
self.timeout)
@requests_mock.Mocker()
def test_update(self, mock_req):
"""Test update."""
mock_req.get('http://localhost', text='test data')
self.rest.update()
assert 'test data' == self.rest.data
@patch('requests.Session', side_effect=RequestException)
def test_update_request_exception(self, mock_req):
"""Test update when a request exception occurs."""
self.rest.update()
assert self.rest.data is None
| apache-2.0 | -8,433,154,817,086,836,000 | 40.963455 | 79 | 0.543267 | false |
MaxTyutyunnikov/lino | lino/modlib/cal/__init__.py | 1 | 1851 | # -*- coding: UTF-8 -*-
## Copyright 2011-2013 Luc Saffre
## This file is part of the Lino project.
## Lino is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 3 of the License, or
## (at your option) any later version.
## Lino is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
## You should have received a copy of the GNU General Public License
## along with Lino; if not, see <http://www.gnu.org/licenses/>.
"""
This module turns Lino into a basic calendar client.
When using this app, you probably also like to set
:settings:`use_extensible` to True.
"""
#~ class SiteMixin(object):
#~ """
#~ Class methods and attibutes added to a Site by this module.
#~ """
#~ def get_reminder_generators_by_user(self,user):
#~ """
#~ Override this per application to return a list of
#~ reminder generators from all models for a give ueser
#~ A reminder generator is an object that has a `update_reminders`
#~ method.
#~ """
#~ return []
#~ def get_todo_tables(self,ar):
#~ """
#~ Return or yield a list of tables that should be empty
#~ """
#~ from django.db.models import loading
#~ for mod in loading.get_apps():
#~ meth = getattr(mod,'get_todo_tables',None)
#~ if meth is not None:
#~ dblogger.debug("Running %s of %s", methname, mod.__name__)
#~ for i in meth(self,ar):
#~ yield i
| gpl-3.0 | -8,836,564,157,821,136,000 | 35.77551 | 77 | 0.605078 | false |
josephsnyder/VistA | Scripts/DefaultKIDSBuildInstaller.py | 1 | 30883 | #---------------------------------------------------------------------------
# Copyright 2012-2019 The Open Source Electronic Health Record Alliance
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#---------------------------------------------------------------------------
from __future__ import division
from __future__ import print_function
from builtins import object
from past.utils import old_div
import sys
import os
import re
import tempfile
import shutil
import argparse
import glob
from PatchInfoParser import installNameToDirName
from VistATestClient import VistATestClientFactory, createTestClientArgParser
from LoggerManager import logger, initConsoleLogging
from VistAPackageInfoFetcher import VistAPackageInfoFetcher
from VistAGlobalImport import VistAGlobalImport, DEFAULT_GLOBAL_IMPORT_TIMEOUT
from ExternalDownloader import obtainKIDSBuildFileBySha1
from ConvertToExternalData import readSha1SumFromSha1File
from ConvertToExternalData import isValidExternalDataFileName
from ConvertToExternalData import isValidGlobalFileSuffix, isValidGlobalSha1Suffix
from ConvertToExternalData import getSha1HashFromExternalDataFileName
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
DEFAULT_CACHE_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, "../"))
from VistAMenuUtil import VistAMenuUtil
DEFAULT_INSTALL_DUZ = 17 # VistA user, "USER,SEVENTEEN"
CHECK_INSTALLATION_PROGRESS_TIMEOUT = 7200 # 7200 seconds or 120 minutes
GLOBAL_IMPORT_BYTE_PER_SEC = 0.5*1024*1024 # import speed is 0.5 MiB per sec
""" Default Installer for KIDS Build """
class DefaultKIDSBuildInstaller(object):
#---------------------------------------------------------------------------#
# Class Constants
#---------------------------------------------------------------------------#
""" A list of tuple, defined the action list corresponding to KIDS Build
questions that might need to act.
each tuple should have three items.
first item: KIDS Menu option text
second item: default answer, use \"\" for default
third item: bool flag to indicate whether to break out of the menu loop
If more menu options is needed, please either add extra option
in the subclass if just specific to that KIDS Build, or add it here if
it is a general question
"""
KIDS_MENU_OPTION_ACTION_LIST = [
("Want to continue installing this build\?","YES", False),
("Enter the Coordinator for Mail Group", "POSTMASTER", False),
("Want KIDS to Rebuild Menu Trees Upon Completion of Install\?",
"", False),
("Want KIDS to INHIBIT LOGONs during the install?",
"NO", False),
("Want to DISABLE Scheduled Options, Menu Options, and Protocols\?",
"NO", False),
("Delay Install \(Minutes\): \(0\-60\):", "0", False),
("do you want to include disabled components\?", "NO", False),
("DEVICE:", None, True)
]
""" A list of tuple, defined the action list corresponding to KIDS Build
questions that might need to act.
each tuple should have three items.
first item: KIDS Menu option text
second item: default answer, use \"\" for default
third item: bool flag to indicate whether to break out of the menu loop
If more menu options is needed, please either add extra option
in the subclass if just specific to that KIDS Build, or add it here if
it is a general question
"""
KIDS_LOAD_QUESTION_ACTION_LIST = [
("OK to continue with Load","YES", False),
("Want to Continue with Load\?","YES", False),
("Select Installation ","?", True),
("Want to continue installing this build\?","YES", False),
("Want to RUN the Environment Check Routine\? YES//","YES",False)
]
""" option action list for Exit KIDS menu, similar struct as above """
EXIT_KIDS_MENU_ACTION_LIST = [
("Select Installation ", "", False),
("Select Kernel Installation & Distribution System ", "", False),
("Select Programmer Options ", "", False),
("Select Systems Manager Menu ", "", False),
("Do you really want to halt\?", "YES", True)
]
KIDS_FILE_PATH_MAX_LEN = 75 # this might need to be fixed in VistA XPD
#---------------------------------------------------------------------------#
# Class Methods
#---------------------------------------------------------------------------#
""" Constructor
@kidsFile: the absolute path to KIDS Build file
@kidsInstallName: the install name for the KIDS Build
@seqNo: seqNo of the KIDS Build, default is None
@logFile: logFile to store the log information for VistA interaction
@multiBuildList: a python list of install names, only applies to
a multibuilds KIDS Build
@duz: the applier's VistA DUZ, default is set to 17, in VistA FOIA
it is USER SEVENTEEN
@**kargs: any extra information that might be needed
"""
def __init__(self, kidsFile, kidsInstallName, seqNo=None, logFile=None,
multiBuildList = None, duz = DEFAULT_INSTALL_DUZ, **kargs):
assert os.path.exists(kidsFile), ("kids file does not exist %s" % kidsFile)
self._origKidsFile = kidsFile
if len(kidsFile) >= self.KIDS_FILE_PATH_MAX_LEN:
destFilename = os.path.basename(kidsFile)
tempDir = tempfile.gettempdir()
if isValidExternalDataFileName(kidsFile):
# if read directly from inplace, need to replace the name with hash
destFilename = getSha1HashFromExternalDataFileName(kidsFile)
while (len(tempDir)+len(destFilename)+1) >= self.KIDS_FILE_PATH_MAX_LEN:
tempDir = os.path.split(tempDir)[0]
dest = os.path.join(tempDir, destFilename)
shutil.copy(kidsFile, dest)
self._kidsFile = os.path.normpath(dest)
logger.info("new kids file is %s" % self._kidsFile)
else:
self._kidsFile = os.path.normpath(kidsFile)
self._kidsInstallName = kidsInstallName
self._logFile = logFile
self._duz = duz
self._updatePackageLink = False
self._multiBuildList = multiBuildList
# store all the globals files associated with KIDS"
self._globalFiles = None
if "globals" in kargs:
self._globalFiles = kargs['globals']
self._tgOutputDir = None
if "printTG" in kargs:
self._tgOutputDir = kargs['printTG']
""" set up the log for VistA connection
@connection: a connection from a VistATestClient
"""
def __setupLogFile__(self, connection):
if self._logFile:
connection.logfile = open(self._logFile, "ab")
else:
connection.logfile = sys.stdout
""" Go to KIDS Main Menu
Always start with ready state (wait for promp)
"""
def __gotoKIDSMainMenu__(self, vistATestClient):
menuUtil = VistAMenuUtil(self._duz)
menuUtil.gotoKidsMainMenu(vistATestClient)
""" load the KIDS build distribution file via menu
must be called while in KIDS Main Menu
"""
def __loadKIDSBuild__(self, connection):
connection.send("Installation\r")
connection.expect("Select Installation ")
connection.send("1\r") # load the distribution
connection.expect("Enter a Host File:")
connection.send(self._kidsFile+"\r")
""" Answer all the KIDS install questions
"""
def __handleKIDSInstallQuestions__(self, connection, connection2=None):
connection.send("Install\r")
connection.expect("Select INSTALL NAME:")
connection.send(self._kidsInstallName+"\r")
""" handle any questions lastconnection general KIDS installation questions"""
result = self.handleKIDSInstallQuestions(connection)
if not result:
return False
kidsMenuActionLst = self.KIDS_MENU_OPTION_ACTION_LIST
while True:
index = connection.expect([x[0] for x in kidsMenuActionLst])
if index > 0:
sendCmd = kidsMenuActionLst[index][1]
if sendCmd != None:
connection.send("%s\r" % sendCmd)
if kidsMenuActionLst[index][2]:
break
else:
connection.send("")
return True
""" restart the previous installation
"""
def restartInstallation(self, vistATestClient):
logger.warn("restart the previous installation for %s" %
self._kidsInstallName)
connection = vistATestClient.getConnection()
self.__gotoKIDSMainMenu__(vistATestClient)
self.__selectRestartInstallOption__(connection)
index = connection.expect(["DEVICE: ", "Select INSTALL NAME: "])
if index == 0:
self.__installationCommon__(vistATestClient)
return True
else:
logger.error("Restart install %s failed" % self._kidsInstallName)
""" go back to KIDS main menu first """
connection.send('\r')
connection.expect("Select Installation ")
connection.send('\r')
""" try to unload a distribution first """
result = self.unloadDistribution(vistATestClient, False)
if not result:
logger.error("Unload Distribution %s failed" % self._kidsInstallName)
return self.normalInstallation(vistATestClient)
""" go to the restart KIDS build option """
def __selectRestartInstallOption__(self, connection):
connection.send("Installation\r")
connection.expect("Select Installation ")
connection.send("Restart Install of\r") # restart install of package(s)
connection.expect("Select INSTALL NAME: ")
connection.send(self._kidsInstallName+"\r")
""" go to the unload a distribution option """
def __selectUnloadDistributionOption__(self, connection):
#connection.expect("Select Kernel Installation & Distribution System ")
connection.send("installation\r")
connection.expect("Select Installation ")
connection.send("Unload a Distribution\r")
connection.expect("Select INSTALL NAME: ")
connection.send(self._kidsInstallName+"\r")
""" unload a previous loaded distribution """
def unloadDistribution(self, vistATestClient, waitForPrompt=True):
connection = vistATestClient.getConnection()
logger.info("Unload distribution for %s" % self._kidsInstallName)
if waitForPrompt:
self.__gotoKIDSMainMenu__(vistATestClient)
self.__selectUnloadDistributionOption__(connection)
index = connection.expect([
"Want to continue with the Unload of this Distribution\? NO// ",
"Select INSTALL NAME: "])
if index == 1:
connection.send('\r')
self.__exitKIDSMenu__(vistATestClient)
return False
connection.send('YES\r')
self.__exitKIDSMenu__(vistATestClient)
return True
""" Do a fresh load and installation """
def normalInstallation(self, vistATestClient, vistATestClient2=None, reinst=True):
logger.info("Start installing %s" % self._kidsInstallName)
connection = vistATestClient.getConnection()
if vistATestClient2:
connection2 = vistATestClient2.getConnection()
self.__gotoKIDSMainMenu__(vistATestClient)
self.__loadKIDSBuild__(connection)
result = self.__handleKIDSLoadOptions__(connection, reinst)
if not result:
logger.error("Error handling KIDS Load Options %s, %s" %
(self._kidsInstallName, self._kidsFile))
return False
if self._tgOutputDir:
if self._multiBuildList is None:
self.__printTransportGlobal__(vistATestClient,[self._kidsInstallName],self._tgOutputDir)
else:
self.__printTransportGlobal__(vistATestClient,self._multiBuildList,self._tgOutputDir)
if vistATestClient2:
result = self.__handleKIDSInstallQuestions__(connection, connection2)
else:
result = self.__handleKIDSInstallQuestions__(connection)
if not result:
result = self.unloadDistribution(vistATestClient, False)
if not result:
logger.error("Unload %s failed" % self._kidsInstallName)
return False
return self.normalInstallation(vistATestClient, vistATestClient2, reinst)
self.__installationCommon__(vistATestClient)
return True
""" common shared workflow in KIDS installation process """
def __installationCommon__(self, vistATestClient):
connection = vistATestClient.getConnection()
self.setupDevice(connection)
self.__checkInstallationProgress__(connection)
self.__exitKIDSMenu__(vistATestClient)
self.extraFixWork(vistATestClient)
""" Handle options during load KIDS distribution section """
def __handleKIDSLoadOptions__(self, connection, reinst):
loadOptionActionList = self.KIDS_LOAD_QUESTION_ACTION_LIST[:]
""" make sure install completed is the last one """
loadOptionActionList.append(
(self._kidsInstallName + " Install Completed", None))
while True:
index = connection.expect([x[0] for x in loadOptionActionList], 120)
if index == len(loadOptionActionList) - 1:
if not reinst:
return False
else:
connection.send("%s\r" % (loadOptionActionList[index][1]))
if loadOptionActionList[index][2]:
break
return True
""" Exit the KIDS Menu option.
Make sure the VistA connection is in the ready state (wait for prompt)
"""
def __exitKIDSMenu__(self, vistATestClient):
exitMenuActionList = self.EXIT_KIDS_MENU_ACTION_LIST[:]
connection = vistATestClient.getConnection()
""" add wait for prompt """
exitMenuActionList.append((vistATestClient.getPrompt(), "\r", True))
expectList = [x[0] for x in exitMenuActionList]
while True:
idx = connection.expect(expectList,120)
connection.send("%s\r" % exitMenuActionList[idx][1])
if exitMenuActionList[idx][2]:
break
""" Checking the current status of the KIDS build
"""
def __checkInstallationProgress__(self, connection):
KIDS_BUILD_STATUS_ACTION_LIST = [
("Running Pre-Install Routine:",self.runPreInstallationRoutine,False),
("Running Post-Install Routine:",self.runPostInstallationRoutine,False),
("Begin Post-Install:",None,False),
("Starting Menu Rebuild:", None , False),
("Installing Routines:", None , False),
("Installing Data:", None , False),
("Menu Rebuild Complete:", None , False),
("Installing PACKAGE COMPONENTS:", None ,False),
("Send mail to: ", self.handleSendMailToOptions, False),
("Select Installation ", self.handleInstallError, True),
("Install Completed", self.installCompleted, True)
]
""" Bulid the status update action list """
statusActionList = []
installName = self._kidsInstallName
if self._multiBuildList:
for item in self._multiBuildList:
statusActionList.append(
(re.escape("Install Started for %s :" %item), None, False))
statusActionList.append(
(re.escape("%s Installed." % item), None, False))
else:
statusActionList.append(
(re.escape("Install Started for %s :" % installName),
None, False))
statusActionList.append(
(re.escape("%s Installed." % installName), None, False))
statusActionList.extend(KIDS_BUILD_STATUS_ACTION_LIST)
expectList = [x[0] for x in statusActionList]
while True:
index = connection.expect(expectList, CHECK_INSTALLATION_PROGRESS_TIMEOUT)
status = expectList[index].replace("\\","")
logger.info(status)
callback = statusActionList[index][1]
if callback:
callback(connection, status=status)
if statusActionList[index][2]:
break
else:
continue
""" This is the entry point of KIDS installer
It defines the workflow of KIDS installation process
@reinst: wether re-install the KIDS build, default is False
@return, True if no error, otherwise False
"""
def runInstallation(self, vistATestClient, vistATestClient2=None, reinst=False):
connection = vistATestClient.getConnection()
self.__setupLogFile__(connection)
infoFetcher = VistAPackageInfoFetcher(vistATestClient)
installStatus = infoFetcher.getInstallationStatus(self._kidsInstallName)
""" select KIDS installation workflow based on install status """
if infoFetcher.isInstallCompleted(installStatus):
logger.warn("install %s is already completed!" %
self._kidsInstallName)
if not reinst:
return True
# run pre-installation preparation
self.preInstallationWork(vistATestClient)
if infoFetcher.isInstallStarted(installStatus):
return self.restartInstallation(vistATestClient)
return self.normalInstallation(vistATestClient,vistATestClient2, reinst)
def __printTGlobalChecksums__(self,testClient,installname,outputDir):
connection = testClient.getConnection()
connection.expect("Select Installation")
connection.send("Verify Checksums\r")
connection.expect("Select INSTALL NAME")
connection.send(installname +"\r")
connection.expect("Want each Routine Listed with Checksums")
connection.send("YES\r")
connection.expect("DEVICE")
connection.send("HFS\r")
connection.expect("HOST FILE NAME")
logfile=os.path.join(outputDir,installNameToDirName(installname)+"Checksums.log")
if testClient.isCache():
logfile=os.path.normpath(logfile)
connection.send(logfile+"\r")
connection.expect("PARAMETERS")
if testClient.isCache():
connection.send("\r")
else:
connection.send("NEWVERSION:NOREADONLY:VARIABLE\r")
index = connection.expect(["Select Installation","overwrite it"],600)
if index == 0:
connection.send("?\r")
else:
connection.send('\r')
def __printTGlobalSummary__(self,testClient,installname,outputDir):
connection = testClient.getConnection()
connection.expect("Select Installation")
connection.send("Print Transport Global\r")
connection.expect("Select INSTALL NAME")
connection.send(installname +"\r")
connection.expect("What to Print")
connection.send('2\r')
connection.expect("DEVICE")
connection.send("HFS\r")
connection.expect("HOST FILE NAME")
logfile=os.path.join(outputDir,installNameToDirName(installname)+"Print.log")
if testClient.isCache():
logfile=os.path.normpath(logfile)
connection.send(logfile+"\r")
connection.expect("PARAMETERS")
if testClient.isCache():
connection.send("\r")
else:
connection.send("NEWVERSION:NOREADONLY:VARIABLE\r")
index = connection.expect(["Select Installation","overwrite it"],600)
if index == 0:
connection.send("?\r")
else:
connection.send('\r')
def __printTGlobalCompare__(self,testClient,installname,outputDir):
connection = testClient.getConnection()
connection.expect("Select Installation")
connection.send("Compare Transport Global\r")
connection.expect("Select INSTALL NAME")
connection.send(installname +"\r")
connection.expect("Type of Compare")
connection.send("1\r")
connection.expect("DEVICE")
connection.send("HFS\r")
connection.expect("HOST FILE NAME")
logfile=os.path.join(outputDir,installNameToDirName(installname)+"Compare.log")
if testClient.isCache():
logfile=os.path.normpath(logfile)
connection.send(logfile+"\r")
connection.expect("PARAMETERS")
if testClient.isCache():
connection.send("\r")
else:
connection.send("NEWVERSION:NOREADONLY:VARIABLE\r")
index = connection.expect(["Select Installation","overwrite it"],600)
if index == 0:
connection.send("?\r")
else:
connection.send('\r')
''' Print out the checksums and the summary of the transport global '''
def __printTransportGlobal__(self,testClient,installNameList,outputDir):
for installName in installNameList:
self.__printTGlobalChecksums__(testClient,installName,outputDir)
self.__printTGlobalSummary__(testClient,installName,outputDir)
self.__printTGlobalCompare__(testClient,installName,outputDir)
#---------------------------------------------------------------------------#
# Public override methods sections
#---------------------------------------------------------------------------#
""" Set up the KIDS installation result output device
default is to use HOME device
if you want to use a difference device, please override this method
"""
def setupDevice(self, connection):
connection.send("HOME;82;999\r")
""" intended to be implemented by subclass
this is to handle any build related questions that
comes up before the general KIDS questions
default implementation is to check the error condition
"""
def handleKIDSInstallQuestions(self, connection, **kargs):
errorCheckTimeout = 5 # 5 seconds
try:
connection.expect("\*\*INSTALL FILE IS CORRUPTED\*\*",errorCheckTimeout)
logger.error("%s:INSTALL FILE IS CORRUPTED" % self._kidsInstallName)
connection.expect("Select Installation ", errorCheckTimeout)
connection.send('\r')
return False
except Exception as ex:
return True
""" intended to be implemented by subclass
answer question related to pre install routine
"""
def runPreInstallationRoutine(self, connection, **kargs):
pass
""" intended to be implemented by subclass
answer question related to post install routine
"""
def runPostInstallationRoutine(self, connection, **kargs):
pass
""" intended to be implemented by subclass """
def extraFixWork(self, vistATestClient):
pass
""" default action for Send Mail To option
please override or enhance it if more action is needed
"""
def handleSendMailToOptions(self, connection, **kargs):
connection.send("\r")
connection.expect("Select basket to send to: ")
connection.send("\r")
connection.expect("Send ")
connection.send("\r")
""" default action for install completed
please override or enhance it if more action is needed
"""
def installCompleted(self, connection, **kargs):
extraInfo = connection.lastconnection
logger.debug(extraInfo)
if re.search("No link to PACKAGE file", extraInfo):
self._updatePackageLink = True
logger.warn("You might have to update KIDS build %s to link"
" to Package file" %
(self._kidsInstallName))
""" default action for installation error
please override or enhance it if more action is needed
"""
def handleInstallError(self, connection, **kargs):
logger.error("Installation failed for %s" % self._kidsInstallName)
connection.send("\r")
""" default action for pre-installation preperation.
right now it is just to import the globals file under
the same directory as the KIDs directory
please override or enhance it if more action is needed
"""
def preInstallationWork(self, vistATestClient, **kargs):
""" ignore the multi-build patch for now """
if self._multiBuildList is not None:
return
globalFiles = self.__getGlobalFileList__()
if globalFiles is None or len(globalFiles) == 0:
return
globalImport = VistAGlobalImport()
for glbFile in globalFiles:
logger.info("Import global file %s" % (glbFile))
fileSize = os.path.getsize(glbFile)
importTimeout = DEFAULT_GLOBAL_IMPORT_TIMEOUT
importTimeout += int(old_div(fileSize,GLOBAL_IMPORT_BYTE_PER_SEC))
globalImport.importGlobal(vistATestClient, glbFile, timeout=importTimeout)
#---------------------------------------------------------------------------#
# Utilities Functions
#---------------------------------------------------------------------------#
""" utility function to find the all global files ends with GLB/s """
def __getGlobalFileList__(self):
globalFiles = []
if self._globalFiles is None or len(self._globalFiles) == 0:
return globalFiles
for gFile in self._globalFiles:
if isValidGlobalFileSuffix(gFile):
globalFiles.append(gFile)
continue
if isValidGlobalSha1Suffix(gFile): # external file
sha1Sum = readSha1SumFromSha1File(gFile)
(result, path) = obtainKIDSBuildFileBySha1(gFile,
sha1Sum,
DEFAULT_CACHE_DIR)
if not result:
logger.error("Could not obtain global file for %s" % gFile)
raise Exception("Error getting global file for %s" % gFile)
globalFiles.append(path)
if len(globalFiles) > 0:
logger.info("global file lists %s" % globalFiles)
return globalFiles
""" utility function to find the name associated the DUZ """
def getPersonNameByDuz(inputDuz, vistAClient):
logger.info ("inputDuz is %s" % inputDuz)
""" user Kernel User API """
connection = vistAClient.getConnection()
menuUtil = VistAMenuUtil(duz=1)
menuUtil.gotoSystemMenu(vistAClient)
connection.send('Prog\r')
connection.expect('Select Programmer Options')
connection.send('^\r')
menuUtil.exitSystemMenu(vistAClient)
vistAClient.waitForPrompt()
connection.send('W $$NAME^XUSER(%s)\r' % inputDuz)
connection.expect('\)') # get rid of the echo
vistAClient.waitForPrompt()
result = connection.lastconnection.strip(' \r\n')
connection.send('\r')
return result
""" function to add an entry to PACAKGE HISTORY """
def addPackagePatchHistory(packageName, version, seqNo,
patchNo, vistAClient, inputDuz):
logger.info("Adding %s, %s, %s, %s to Package Patch history" %
(packageName, version, seqNo, patchNo))
connection = vistAClient.getConnection()
menuUtil = VistAMenuUtil(duz=1)
menuUtil.gotoFileManEditEnterEntryMenu(vistAClient)
connection.send("9.4\r") # package file
connection.expect("EDIT WHICH FIELD: ")
connection.send("VERSION\r")
connection.expect("EDIT WHICH VERSION SUB-FIELD: ")
connection.send("PATCH APPLICATION HISTORY\r")
connection.expect("EDIT WHICH PATCH APPLICATION HISTORY SUB-FIELD: ")
connection.send("ALL\r")
connection.expect("THEN EDIT VERSION SUB-FIELD: ")
connection.send("\r")
connection.expect("THEN EDIT FIELD: ")
connection.send("\r")
connection.expect("Select PACKAGE NAME: ")
connection.send("%s\r" % packageName)
connection.expect("Select VERSION: %s//" % version)
connection.send("\r")
connection.expect("Select PATCH APPLICATION HISTORY: ")
connection.send("%s SEQ #%s\r" % (patchNo, seqNo))
connection.expect("Are you adding .*\? No//")
connection.send("YES\r")
connection.expect("DATE APPLIED: ")
connection.send("T\r")
connection.expect("APPLIED BY: ")
connection.send("`%s\r" % inputDuz)
connection.expect("DESCRIPTION:")
connection.send("\r")
connection.expect("Select PATCH APPLICATION HISTORY: ")
connection.send("\r")
connection.expect("Select PACKAGE NAME: ")
connection.send("\r")
menuUtil.exitFileManMenu(vistAClient)
""" class KIDSInstallerFactory
create KIDS installer via Factory methods
"""
class KIDSInstallerFactory(object):
installerDict = {}
@staticmethod
def createKIDSInstaller(kidsFile, kidsInstallName,
seqNo=None, logFile=None,
multiBuildList=None, duz=DEFAULT_INSTALL_DUZ,
**kargs):
return KIDSInstallerFactory.installerDict.get(
kidsInstallName,
DefaultKIDSBuildInstaller)(kidsFile,
kidsInstallName,
seqNo, logFile,
multiBuildList, duz,
**kargs)
@staticmethod
def registerKidsInstaller(kidsInstallName, kidsInstaller):
KIDSInstallerFactory.installerDict[kidsInstallName] = kidsInstaller
""" Test code """
def createTestClient():
testClientParser = createTestClientArgParser()
parser = argparse.ArgumentParser(description='Default KIDS Installer',
parents=[testClientParser])
result = parser.parse_args();
print (result)
testClient = VistATestClientFactory.createVistATestClientWithArgs(result)
return testClient
def testAddPackagePatchHistory():
testClient = createTestClient()
with testClient:
addPackagePatchHistory("LAB SERVICE", "5.2", "288", "334",
testClient, 17)
""" Test Function getPersonNameByDuz """
def testGetPersonNameByDuz():
testClient = createTestClient()
initConsoleLogging()
with testClient:
result = getPersonNameByDuz(1, testClient)
print ("Name is [%s]" % result)
""" main entry """
def main():
testClientParser = createTestClientArgParser()
parser = argparse.ArgumentParser(description='Default KIDS Installer',
parents=[testClientParser])
parser.add_argument('kidsFile', help='path to KIDS Build file')
parser.add_argument('-l', '--logFile', default=None, help='path to logFile')
parser.add_argument('-r', '--reinstall', default=False, action='store_true',
help='whether re-install the KIDS even it is already installed')
parser.add_argument('-t', '--tglobalprint', default=None,
help='folder to hold a printout of Transport global information')
parser.add_argument('-g', '--globalFiles', default=None, nargs='*',
help='list of global files that need to import')
parser.add_argument('-d', '--duz', default=DEFAULT_INSTALL_DUZ, type=int,
help='installer\'s VistA instance\'s DUZ')
result = parser.parse_args();
print (result)
testClient = VistATestClientFactory.createVistATestClientWithArgs(result)
assert testClient
initConsoleLogging()
with testClient:
kidsFile = os.path.abspath(result.kidsFile)
from KIDSBuildParser import KIDSBuildParser
kidsParser = KIDSBuildParser(None)
kidsParser.unregisterSectionHandler(KIDSBuildParser.ROUTINE_SECTION)
kidsParser.parseKIDSBuild(kidsFile)
installNameList = kidsParser.installNameList
installName = installNameList[0]
multiBuildList = installNameList
if len(installNameList) == 1:
multiBuildList = None
defaultKidsInstall = DefaultKIDSBuildInstaller(kidsFile,
installName,
logFile=result.logFile,
multiBuildList=multiBuildList,
duz = result.duz,
globals=result.globalFiles,
printTG=result.tglobalprint)
defaultKidsInstall.runInstallation(testClient, result.reinstall)
if __name__ == "__main__":
main()
| apache-2.0 | -6,796,245,939,135,587,000 | 40.903664 | 96 | 0.667455 | false |
jxta/cc | vendor/Twisted-10.0.0/twisted/web/test/test_domhelpers.py | 53 | 11063 | # -*- test-case-name: twisted.web.test.test_domhelpers -*-
# Copyright (c) 2001-2009 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Specific tests for (some of) the methods in L{twisted.web.domhelpers}.
"""
from xml.dom import minidom
from twisted.trial.unittest import TestCase
from twisted.web import microdom
from twisted.web import domhelpers
class DOMHelpersTestsMixin:
"""
A mixin for L{TestCase} subclasses which defines test methods for
domhelpers functionality based on a DOM creation function provided by a
subclass.
"""
dom = None
def test_getElementsByTagName(self):
doc1 = self.dom.parseString('<foo/>')
actual=domhelpers.getElementsByTagName(doc1, 'foo')[0].nodeName
expected='foo'
self.assertEquals(actual, expected)
el1=doc1.documentElement
actual=domhelpers.getElementsByTagName(el1, 'foo')[0].nodeName
self.assertEqual(actual, expected)
doc2_xml='<a><foo in="a"/><b><foo in="b"/></b><c><foo in="c"/></c><foo in="d"/><foo in="ef"/><g><foo in="g"/><h><foo in="h"/></h></g></a>'
doc2 = self.dom.parseString(doc2_xml)
tag_list=domhelpers.getElementsByTagName(doc2, 'foo')
actual=''.join([node.getAttribute('in') for node in tag_list])
expected='abcdefgh'
self.assertEquals(actual, expected)
el2=doc2.documentElement
tag_list=domhelpers.getElementsByTagName(el2, 'foo')
actual=''.join([node.getAttribute('in') for node in tag_list])
self.assertEqual(actual, expected)
doc3_xml='''
<a><foo in="a"/>
<b><foo in="b"/>
<d><foo in="d"/>
<g><foo in="g"/></g>
<h><foo in="h"/></h>
</d>
<e><foo in="e"/>
<i><foo in="i"/></i>
</e>
</b>
<c><foo in="c"/>
<f><foo in="f"/>
<j><foo in="j"/></j>
</f>
</c>
</a>'''
doc3 = self.dom.parseString(doc3_xml)
tag_list=domhelpers.getElementsByTagName(doc3, 'foo')
actual=''.join([node.getAttribute('in') for node in tag_list])
expected='abdgheicfj'
self.assertEquals(actual, expected)
el3=doc3.documentElement
tag_list=domhelpers.getElementsByTagName(el3, 'foo')
actual=''.join([node.getAttribute('in') for node in tag_list])
self.assertEqual(actual, expected)
doc4_xml='<foo><bar></bar><baz><foo/></baz></foo>'
doc4 = self.dom.parseString(doc4_xml)
actual=domhelpers.getElementsByTagName(doc4, 'foo')
root=doc4.documentElement
expected=[root, root.childNodes[-1].childNodes[0]]
self.assertEquals(actual, expected)
actual=domhelpers.getElementsByTagName(root, 'foo')
self.assertEqual(actual, expected)
def test_gatherTextNodes(self):
doc1 = self.dom.parseString('<a>foo</a>')
actual=domhelpers.gatherTextNodes(doc1)
expected='foo'
self.assertEqual(actual, expected)
actual=domhelpers.gatherTextNodes(doc1.documentElement)
self.assertEqual(actual, expected)
doc2_xml='<a>a<b>b</b><c>c</c>def<g>g<h>h</h></g></a>'
doc2 = self.dom.parseString(doc2_xml)
actual=domhelpers.gatherTextNodes(doc2)
expected='abcdefgh'
self.assertEqual(actual, expected)
actual=domhelpers.gatherTextNodes(doc2.documentElement)
self.assertEqual(actual, expected)
doc3_xml=('<a>a<b>b<d>d<g>g</g><h>h</h></d><e>e<i>i</i></e></b>' +
'<c>c<f>f<j>j</j></f></c></a>')
doc3 = self.dom.parseString(doc3_xml)
actual=domhelpers.gatherTextNodes(doc3)
expected='abdgheicfj'
self.assertEqual(actual, expected)
actual=domhelpers.gatherTextNodes(doc3.documentElement)
self.assertEqual(actual, expected)
def test_clearNode(self):
doc1 = self.dom.parseString('<a><b><c><d/></c></b></a>')
a_node=doc1.documentElement
domhelpers.clearNode(a_node)
self.assertEqual(
a_node.toxml(),
self.dom.Element('a').toxml())
doc2 = self.dom.parseString('<a><b><c><d/></c></b></a>')
b_node=doc2.documentElement.childNodes[0]
domhelpers.clearNode(b_node)
actual=doc2.documentElement.toxml()
expected = self.dom.Element('a')
expected.appendChild(self.dom.Element('b'))
self.assertEqual(actual, expected.toxml())
def test_get(self):
doc1 = self.dom.parseString('<a><b id="bar"/><c class="foo"/></a>')
node=domhelpers.get(doc1, "foo")
actual=node.toxml()
expected = self.dom.Element('c')
expected.setAttribute('class', 'foo')
self.assertEqual(actual, expected.toxml())
node=domhelpers.get(doc1, "bar")
actual=node.toxml()
expected = self.dom.Element('b')
expected.setAttribute('id', 'bar')
self.assertEqual(actual, expected.toxml())
self.assertRaises(domhelpers.NodeLookupError,
domhelpers.get,
doc1,
"pzork")
def test_getIfExists(self):
doc1 = self.dom.parseString('<a><b id="bar"/><c class="foo"/></a>')
node=domhelpers.getIfExists(doc1, "foo")
actual=node.toxml()
expected = self.dom.Element('c')
expected.setAttribute('class', 'foo')
self.assertEqual(actual, expected.toxml())
node=domhelpers.getIfExists(doc1, "pzork")
self.assertIdentical(node, None)
def test_getAndClear(self):
doc1 = self.dom.parseString('<a><b id="foo"><c></c></b></a>')
node=domhelpers.getAndClear(doc1, "foo")
actual=node.toxml()
expected = self.dom.Element('b')
expected.setAttribute('id', 'foo')
self.assertEqual(actual, expected.toxml())
def test_locateNodes(self):
doc1 = self.dom.parseString('<a><b foo="olive"><c foo="olive"/></b><d foo="poopy"/></a>')
node_list=domhelpers.locateNodes(
doc1.childNodes, 'foo', 'olive', noNesting=1)
actual=''.join([node.toxml() for node in node_list])
expected = self.dom.Element('b')
expected.setAttribute('foo', 'olive')
c = self.dom.Element('c')
c.setAttribute('foo', 'olive')
expected.appendChild(c)
self.assertEqual(actual, expected.toxml())
node_list=domhelpers.locateNodes(
doc1.childNodes, 'foo', 'olive', noNesting=0)
actual=''.join([node.toxml() for node in node_list])
self.assertEqual(actual, expected.toxml() + c.toxml())
def test_getParents(self):
doc1 = self.dom.parseString('<a><b><c><d/></c><e/></b><f/></a>')
node_list = domhelpers.getParents(
doc1.childNodes[0].childNodes[0].childNodes[0])
actual = ''.join([node.tagName for node in node_list
if hasattr(node, 'tagName')])
self.assertEqual(actual, 'cba')
def test_findElementsWithAttribute(self):
doc1 = self.dom.parseString('<a foo="1"><b foo="2"/><c foo="1"/><d/></a>')
node_list = domhelpers.findElementsWithAttribute(doc1, 'foo')
actual = ''.join([node.tagName for node in node_list])
self.assertEqual(actual, 'abc')
node_list = domhelpers.findElementsWithAttribute(doc1, 'foo', '1')
actual = ''.join([node.tagName for node in node_list])
self.assertEqual(actual, 'ac')
def test_findNodesNamed(self):
doc1 = self.dom.parseString('<doc><foo/><bar/><foo>a</foo></doc>')
node_list = domhelpers.findNodesNamed(doc1, 'foo')
actual = len(node_list)
self.assertEqual(actual, 2)
# NOT SURE WHAT THESE ARE SUPPOSED TO DO..
# def test_RawText FIXME
# def test_superSetAttribute FIXME
# def test_superPrependAttribute FIXME
# def test_superAppendAttribute FIXME
# def test_substitute FIXME
def test_escape(self):
j='this string " contains many & characters> xml< won\'t like'
expected='this string " contains many & characters> xml< won\'t like'
self.assertEqual(domhelpers.escape(j), expected)
def test_unescape(self):
j='this string " has && entities > < and some characters xml won\'t like<'
expected='this string " has && entities > < and some characters xml won\'t like<'
self.assertEqual(domhelpers.unescape(j), expected)
def test_getNodeText(self):
"""
L{getNodeText} returns the concatenation of all the text data at or
beneath the node passed to it.
"""
node = self.dom.parseString('<foo><bar>baz</bar><bar>quux</bar></foo>')
self.assertEqual(domhelpers.getNodeText(node), "bazquux")
class MicroDOMHelpersTests(DOMHelpersTestsMixin, TestCase):
dom = microdom
def test_gatherTextNodesDropsWhitespace(self):
"""
Microdom discards whitespace-only text nodes, so L{gatherTextNodes}
returns only the text from nodes which had non-whitespace characters.
"""
doc4_xml='''<html>
<head>
</head>
<body>
stuff
</body>
</html>
'''
doc4 = self.dom.parseString(doc4_xml)
actual = domhelpers.gatherTextNodes(doc4)
expected = '\n stuff\n '
self.assertEqual(actual, expected)
actual = domhelpers.gatherTextNodes(doc4.documentElement)
self.assertEqual(actual, expected)
def test_textEntitiesNotDecoded(self):
"""
Microdom does not decode entities in text nodes.
"""
doc5_xml='<x>Souffl&</x>'
doc5 = self.dom.parseString(doc5_xml)
actual=domhelpers.gatherTextNodes(doc5)
expected='Souffl&'
self.assertEqual(actual, expected)
actual=domhelpers.gatherTextNodes(doc5.documentElement)
self.assertEqual(actual, expected)
class MiniDOMHelpersTests(DOMHelpersTestsMixin, TestCase):
dom = minidom
def test_textEntitiesDecoded(self):
"""
Minidom does decode entities in text nodes.
"""
doc5_xml='<x>Souffl&</x>'
doc5 = self.dom.parseString(doc5_xml)
actual=domhelpers.gatherTextNodes(doc5)
expected='Souffl&'
self.assertEqual(actual, expected)
actual=domhelpers.gatherTextNodes(doc5.documentElement)
self.assertEqual(actual, expected)
def test_getNodeUnicodeText(self):
"""
L{domhelpers.getNodeText} returns a C{unicode} string when text
nodes are represented in the DOM with unicode, whether or not there
are non-ASCII characters present.
"""
node = self.dom.parseString("<foo>bar</foo>")
text = domhelpers.getNodeText(node)
self.assertEqual(text, u"bar")
self.assertIsInstance(text, unicode)
node = self.dom.parseString(u"<foo>\N{SNOWMAN}</foo>".encode('utf-8'))
text = domhelpers.getNodeText(node)
self.assertEqual(text, u"\N{SNOWMAN}")
self.assertIsInstance(text, unicode)
| apache-2.0 | 5,335,483,112,513,000,000 | 35.153595 | 146 | 0.610775 | false |
PaulKinlan/cli-caniuse | site/app/scripts/bower_components/jsrepl-build/extern/python/closured/lib/python2.7/json/encoder.py | 103 | 16014 | """Implementation of JSONEncoder
"""
import re
try:
from _json import encode_basestring_ascii as c_encode_basestring_ascii
except ImportError:
c_encode_basestring_ascii = None
try:
from _json import make_encoder as c_make_encoder
except ImportError:
c_make_encoder = None
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
HAS_UTF8 = re.compile(r'[\x80-\xff]')
ESCAPE_DCT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
}
for i in range(0x20):
ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
#ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
# Assume this produces an infinity on all machines (probably not guaranteed)
INFINITY = float('1e66666')
FLOAT_REPR = repr
def encode_basestring(s):
"""Return a JSON representation of a Python string
"""
def replace(match):
return ESCAPE_DCT[match.group(0)]
return '"' + ESCAPE.sub(replace, s) + '"'
def py_encode_basestring_ascii(s):
"""Return an ASCII-only JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
s = match.group(0)
try:
return ESCAPE_DCT[s]
except KeyError:
n = ord(s)
if n < 0x10000:
return '\\u{0:04x}'.format(n)
#return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
#return '\\u%04x\\u%04x' % (s1, s2)
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
encode_basestring_ascii = (
c_encode_basestring_ascii or py_encode_basestring_ascii)
class JSONEncoder(object):
"""Extensible JSON <http://json.org> encoder for Python data structures.
Supports the following objects and types by default:
+-------------------+---------------+
| Python | JSON |
+===================+===============+
| dict | object |
+-------------------+---------------+
| list, tuple | array |
+-------------------+---------------+
| str, unicode | string |
+-------------------+---------------+
| int, long, float | number |
+-------------------+---------------+
| True | true |
+-------------------+---------------+
| False | false |
+-------------------+---------------+
| None | null |
+-------------------+---------------+
To extend this to recognize other objects, subclass and implement a
``.default()`` method with another method that returns a serializable
object for ``o`` if possible, otherwise it should call the superclass
implementation (to raise ``TypeError``).
"""
item_separator = ', '
key_separator = ': '
def __init__(self, skipkeys=False, ensure_ascii=True,
check_circular=True, allow_nan=True, sort_keys=False,
indent=None, separators=None, encoding='utf-8', default=None):
"""Constructor for JSONEncoder, with sensible defaults.
If skipkeys is false, then it is a TypeError to attempt
encoding of keys that are not str, int, long, float or None. If
skipkeys is True, such items are simply skipped.
If ensure_ascii is true, the output is guaranteed to be str
objects with all incoming unicode characters escaped. If
ensure_ascii is false, the output will be unicode object.
If check_circular is true, then lists, dicts, and custom encoded
objects will be checked for circular references during encoding to
prevent an infinite recursion (which would cause an OverflowError).
Otherwise, no such check takes place.
If allow_nan is true, then NaN, Infinity, and -Infinity will be
encoded as such. This behavior is not JSON specification compliant,
but is consistent with most JavaScript based encoders and decoders.
Otherwise, it will be a ValueError to encode such floats.
If sort_keys is true, then the output of dictionaries will be
sorted by key; this is useful for regression tests to ensure
that JSON serializations can be compared on a day-to-day basis.
If indent is a non-negative integer, then JSON array
elements and object members will be pretty-printed with that
indent level. An indent level of 0 will only insert newlines.
None is the most compact representation.
If specified, separators should be a (item_separator, key_separator)
tuple. The default is (', ', ': '). To get the most compact JSON
representation you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
version of the object or raise a ``TypeError``.
If encoding is not None, then all input strings will be
transformed into unicode using that encoding prior to JSON-encoding.
The default is UTF-8.
"""
self.skipkeys = skipkeys
self.ensure_ascii = ensure_ascii
self.check_circular = check_circular
self.allow_nan = allow_nan
self.sort_keys = sort_keys
self.indent = indent
if separators is not None:
self.item_separator, self.key_separator = separators
if default is not None:
self.default = default
self.encoding = encoding
def default(self, o):
"""Implement this method in a subclass such that it returns
a serializable object for ``o``, or calls the base implementation
(to raise a ``TypeError``).
For example, to support arbitrary iterators, you could
implement default like this::
def default(self, o):
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, o)
"""
raise TypeError(repr(o) + " is not JSON serializable")
def encode(self, o):
"""Return a JSON string representation of a Python data structure.
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
'{"foo": ["bar", "baz"]}'
"""
# This is for extremely simple cases and benchmarks.
if isinstance(o, basestring):
if isinstance(o, str):
_encoding = self.encoding
if (_encoding is not None
and not (_encoding == 'utf-8')):
o = o.decode(_encoding)
if self.ensure_ascii:
return encode_basestring_ascii(o)
else:
return encode_basestring(o)
# This doesn't pass the iterator directly to ''.join() because the
# exceptions aren't as detailed. The list call should be roughly
# equivalent to the PySequence_Fast that ''.join() would do.
chunks = self.iterencode(o, _one_shot=True)
if not isinstance(chunks, (list, tuple)):
chunks = list(chunks)
return ''.join(chunks)
def iterencode(self, o, _one_shot=False):
"""Encode the given object and yield each string
representation as available.
For example::
for chunk in JSONEncoder().iterencode(bigobject):
mysocket.write(chunk)
"""
if self.check_circular:
markers = {}
else:
markers = None
if self.ensure_ascii:
_encoder = encode_basestring_ascii
else:
_encoder = encode_basestring
if self.encoding != 'utf-8':
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
if isinstance(o, str):
o = o.decode(_encoding)
return _orig_encoder(o)
def floatstr(o, allow_nan=self.allow_nan,
_repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY):
# Check for specials. Note that this type of test is processor
# and/or platform-specific, so do tests which don't depend on the
# internals.
if o != o:
text = 'NaN'
elif o == _inf:
text = 'Infinity'
elif o == _neginf:
text = '-Infinity'
else:
return _repr(o)
if not allow_nan:
raise ValueError(
"Out of range float values are not JSON compliant: " +
repr(o))
return text
if (_one_shot and c_make_encoder is not None
and self.indent is None and not self.sort_keys):
_iterencode = c_make_encoder(
markers, self.default, _encoder, self.indent,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, self.allow_nan)
else:
_iterencode = _make_iterencode(
markers, self.default, _encoder, self.indent, floatstr,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, _one_shot)
return _iterencode(o, 0)
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
_key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
## HACK: hand-optimized bytecode; turn globals into locals
ValueError=ValueError,
basestring=basestring,
dict=dict,
float=float,
id=id,
int=int,
isinstance=isinstance,
list=list,
long=long,
str=str,
tuple=tuple,
):
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
separator = _item_separator + newline_indent
buf += newline_indent
else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if isinstance(value, basestring):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, (int, long)):
yield buf + str(value)
elif isinstance(value, float):
yield buf + _floatstr(value)
else:
yield buf
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (' ' * (_indent * _current_indent_level))
yield ']'
if markers is not None:
del markers[markerid]
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _sort_keys:
items = sorted(dct.items(), key=lambda kv: kv[0])
else:
items = dct.iteritems()
for key, value in items:
if isinstance(key, basestring):
pass
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
key = _floatstr(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif isinstance(key, (int, long)):
key = str(key)
elif _skipkeys:
continue
else:
raise TypeError("key " + repr(key) + " is not a string")
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if isinstance(value, basestring):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, (int, long)):
yield str(value)
elif isinstance(value, float):
yield _floatstr(value)
else:
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (' ' * (_indent * _current_indent_level))
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if isinstance(o, basestring):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, (int, long)):
yield str(o)
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, (list, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
elif isinstance(o, dict):
for chunk in _iterencode_dict(o, _current_indent_level):
yield chunk
else:
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
for chunk in _iterencode(o, _current_indent_level):
yield chunk
if markers is not None:
del markers[markerid]
return _iterencode
| apache-2.0 | -5,823,712,314,674,305,000 | 35.230769 | 78 | 0.523917 | false |
halfcrazy/sqlalchemy | lib/sqlalchemy/dialects/mysql/mysqlconnector.py | 59 | 5323 | # mysql/mysqlconnector.py
# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
.. dialect:: mysql+mysqlconnector
:name: MySQL Connector/Python
:dbapi: myconnpy
:connectstring: mysql+mysqlconnector://<user>:<password>@\
<host>[:<port>]/<dbname>
:url: http://dev.mysql.com/downloads/connector/python/
Unicode
-------
Please see :ref:`mysql_unicode` for current recommendations on unicode
handling.
"""
from .base import (MySQLDialect, MySQLExecutionContext,
MySQLCompiler, MySQLIdentifierPreparer,
BIT)
from ... import util
import re
class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext):
def get_lastrowid(self):
return self.cursor.lastrowid
class MySQLCompiler_mysqlconnector(MySQLCompiler):
def visit_mod_binary(self, binary, operator, **kw):
if self.dialect._mysqlconnector_double_percents:
return self.process(binary.left, **kw) + " %% " + \
self.process(binary.right, **kw)
else:
return self.process(binary.left, **kw) + " % " + \
self.process(binary.right, **kw)
def post_process_text(self, text):
if self.dialect._mysqlconnector_double_percents:
return text.replace('%', '%%')
else:
return text
def escape_literal_column(self, text):
if self.dialect._mysqlconnector_double_percents:
return text.replace('%', '%%')
else:
return text
class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer):
def _escape_identifier(self, value):
value = value.replace(self.escape_quote, self.escape_to_quote)
if self.dialect._mysqlconnector_double_percents:
return value.replace("%", "%%")
else:
return value
class _myconnpyBIT(BIT):
def result_processor(self, dialect, coltype):
"""MySQL-connector already converts mysql bits, so."""
return None
class MySQLDialect_mysqlconnector(MySQLDialect):
driver = 'mysqlconnector'
supports_unicode_binds = True
supports_sane_rowcount = True
supports_sane_multi_rowcount = True
supports_native_decimal = True
default_paramstyle = 'format'
execution_ctx_cls = MySQLExecutionContext_mysqlconnector
statement_compiler = MySQLCompiler_mysqlconnector
preparer = MySQLIdentifierPreparer_mysqlconnector
colspecs = util.update_copy(
MySQLDialect.colspecs,
{
BIT: _myconnpyBIT,
}
)
@util.memoized_property
def supports_unicode_statements(self):
return util.py3k or self._mysqlconnector_version_info > (2, 0)
@classmethod
def dbapi(cls):
from mysql import connector
return connector
def create_connect_args(self, url):
opts = url.translate_connect_args(username='user')
opts.update(url.query)
util.coerce_kw_type(opts, 'buffered', bool)
util.coerce_kw_type(opts, 'raise_on_warnings', bool)
# unfortunately, MySQL/connector python refuses to release a
# cursor without reading fully, so non-buffered isn't an option
opts.setdefault('buffered', True)
# FOUND_ROWS must be set in ClientFlag to enable
# supports_sane_rowcount.
if self.dbapi is not None:
try:
from mysql.connector.constants import ClientFlag
client_flags = opts.get(
'client_flags', ClientFlag.get_default())
client_flags |= ClientFlag.FOUND_ROWS
opts['client_flags'] = client_flags
except Exception:
pass
return [[], opts]
@util.memoized_property
def _mysqlconnector_version_info(self):
if self.dbapi and hasattr(self.dbapi, '__version__'):
m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?',
self.dbapi.__version__)
if m:
return tuple(
int(x)
for x in m.group(1, 2, 3)
if x is not None)
@util.memoized_property
def _mysqlconnector_double_percents(self):
return not util.py3k and self._mysqlconnector_version_info < (2, 0)
def _get_server_version_info(self, connection):
dbapi_con = connection.connection
version = dbapi_con.get_server_version()
return tuple(version)
def _detect_charset(self, connection):
return connection.connection.charset
def _extract_error_code(self, exception):
return exception.errno
def is_disconnect(self, e, connection, cursor):
errnos = (2006, 2013, 2014, 2045, 2055, 2048)
exceptions = (self.dbapi.OperationalError, self.dbapi.InterfaceError)
if isinstance(e, exceptions):
return e.errno in errnos or \
"MySQL Connection not available." in str(e)
else:
return False
def _compat_fetchall(self, rp, charset=None):
return rp.fetchall()
def _compat_fetchone(self, rp, charset=None):
return rp.fetchone()
dialect = MySQLDialect_mysqlconnector
| mit | -4,307,490,482,341,100,500 | 29.244318 | 77 | 0.623708 | false |
ApplauseOSS/djangoevents | djangoevents/__init__.py | 1 | 1816 | import warnings
from eventsourcing.domain.model.entity import EventSourcedEntity
from eventsourcing.domain.model.entity import entity_mutator
from eventsourcing.domain.model.entity import singledispatch
from eventsourcing.domain.model.decorators import subscribe_to
from eventsourcing.domain.model.events import publish as es_publish
from eventsourcing.domain.model.events import subscribe
from eventsourcing.domain.model.events import unsubscribe
from eventsourcing.infrastructure.event_sourced_repo import EventSourcedRepository
from .domain import BaseEntity
from .domain import BaseAggregate
from .domain import DomainEvent
from .app import EventSourcingWithDjango
from .exceptions import EventSchemaError
from .schema import validate_event
from .settings import is_validation_enabled
default_app_config = 'djangoevents.apps.AppConfig'
__all__ = [
'DomainEvent',
'EventSourcedEntity',
'EventSourcedRepository',
'entity_mutator',
'singledispatch',
'publish',
'store_event'
'subscribe',
'unsubscribe',
'subscribe_to',
'BaseEntity',
'BaseAggregate',
'EventSourcingWithDjango'
]
def publish(event):
warnings.warn("`publish` is depreciated. Please switch to: `store_event`.", DeprecationWarning)
return es_publish(event)
def store_event(event, force_validate=False):
"""
Store an event to the service's event journal. Optionally validates event
schema if one is provided.
`force_validate` - enforces event schema validation even if configuration disables it globally.
"""
if is_validation_enabled() or force_validate:
is_valid = validate_event(event)
if not is_valid:
msg = "Event: {} does not match its schema.".format(event)
raise EventSchemaError(msg)
return es_publish(event)
| mit | 7,118,697,928,873,848,000 | 31.428571 | 99 | 0.752203 | false |
4ON91/KnickKnacks | Boolean Algebra Notes/LogicGates.py | 1 | 10004 | import copy
import csv
import os
class Gate:
def __init__(self, Sockets):
self.Sockets = Sockets
self.Inputs = []
self.UniqueInputs = ""
def canPass(self):
return(True)
def getInput(self, I):
if( (type(I) == Input) &
(I.sym().casefold() not in self.UniqueInputs.casefold()) ):
self.UniqueInputs += I.sym()
self.Inputs.append(I.On)
class Input:
def __init__(self, Symbol, On):
self.Symbol = Symbol.upper()[:1]
self.On = On
self.Position = (int, int)
def sym(self):
if(self.On):
return(self.Symbol.upper())
else:
return(self.Symbol.lower())
def csym(self):
return(self.Symbol.casefold())
def __repr__(self):
return(self.sym())
def __invert__(self):
if(self.On):
self.On = False
else:
self.On = True
def canPass(self):
return(False)
def canContinue(self, I):
return(True)
class Output:
def canPass(self):
return(True)
def canContinue(self, I):
return(True)
class AND(Gate):
def canContinue(self, I):
self.getInput(I)
if((True in self.Inputs)&
(False not in self.Inputs)&
(len(self.Inputs) >= self.Sockets)):
return(True)
else:
return(False)
class NAND(Gate):
def canContinue(self, I):
self.getInput(I)
if((False in self.Inputs)&
(True not in self.Inputs)&
(len(self.Inputs) >= self.Sockets)):
return(True)
else:
return(False)
class OR(Gate):
def canContinue(self, I):
self.getInput(I)
if( (len(self.Inputs) >= self.Sockets) &
(True in self.Inputs) ):
return(True)
else:
return(False)
class NOR(Gate):
def canContinue(self, I):
self.getInput(I)
if( (len(self.Inputs) >= self.Sockets) &
(False in self.Inputs) ):
return(True)
else:
return(False)
class INVERT:
def canPass(self):
return(True)
def canContinue(self, I):
~I
return(True)
class CircuitPath:
def __init__(self, Passable):
self.Passable = Passable
def canPass(self):
return(self.Passable)
def canContinue(self, I):
return(True)
def SwitchStateList(NumberOfSwitches):
binary_string = ""
i = 0
Switches = NumberOfSwitches
Switch_States = []
while( len(binary_string) <= NumberOfSwitches ):
binary_string = str(bin(i))[2:]
i += 1
Switch_States.append(("{:>0%s}"%str(Switches)).format(binary_string))
Switch_States.pop(-1)
return(Switch_States)
def ANDList(NumberOfSwitches):
a = list("ABCDEFGHIJKLMNOPQRSTUVWXYZ")
binary_string = ""
i = 0
Switches = NumberOfSwitches
Switch_States = []
while( len(binary_string) <= NumberOfSwitches ):
binary_string = ("{:>0%s}"%str(Switches)).format(str(bin(i))[2:])
b = ""
for x in range(0, len(binary_string)):
if(int(binary_string[x]) == 0):
b += a[x].lower()
else:
b += a[x].upper()
i += 1
Switch_States.append(b)
Switch_States.pop(-1)
return(Switch_States)
def RunCircuit(file):
OP1 = OR(1)
OP2 = OR(2)
OP3 = OR(3)
ON1 = NOR(1)
ON2 = NOR(2)
ON3 = NOR(3)
AP1 = AND(1)
AP2 = AND(2)
AP3 = AND(3)
AN1 = NAND(1)
AN2 = NAND(2)
AN3 = NAND(3)
CP0 = CircuitPath(False)
CP1 = CircuitPath(True)
I00 = Input("A", False)
I01 = Input("B", True)
I02 = Input("C", True)
OUT = Output()
INV = INVERT()
Circuit_Array = [line for line in csv.reader(open(file, "r"))]
for y in range(0, len(Circuit_Array)):
for x in range(0, len(Circuit_Array[0])):
exec("Circuit_Array[y][x] = " + Circuit_Array[y][x])
Circuit = copy.deepcopy(Circuit_Array)
Row = len(Circuit)-1
Col = len(Circuit[0])-1
Integers = []
Input_List = []
for y in range(0, len(Circuit)):
for x in range(0, len(Circuit[0])):
if(type(Circuit[y][x]) == Input):
Circuit[y][x].Position = (x,y)
Input_List.append(Circuit[y][x])
def BoolMove(Tile, Direction):
if(Tile.canPass()):
return(Direction)
else:
return("")
def GetDirection(Position, Direction):
X, Y = Position
if(Direction == "N"):
X, Y = X, Y-1
if(Direction == "E"):
X, Y = X+1, Y
if(Direction == "S"):
X, Y = X, Y+1
if(Direction == "W"):
X, Y = X-1, Y
return((X, Y))
def FindOutput(Input, CurrentPosition, Directions, Map, Length, Path, Globals):
X, Y = CurrentPosition
while(True):
if len(Directions) >= 2:
for Direction in Directions:
FindOutput(Input, (X,Y), Direction, copy.deepcopy(Map), Length, copy.deepcopy(Path), Globals)
return
Map[Y][X] = CP0
if( Globals[Y][X].canContinue(Input) ):
pass
else:
Integers.append([0, Input.sym(), Length, Path])
return
if(len(Directions) > 0):
Path.append(Directions)
X, Y = GetDirection((X,Y), Directions)
if( type(Globals[Y][X]) == Output):
Integers.append([1, Input.sym(), Length, Path])
return
Directions = ""
if(Y-1 >= 0):
Directions += BoolMove(Map[Y-1][X], "N")
if(X+1 <= Col):
Directions += BoolMove(Map[Y][X+1], "E")
if(Y+1 <= Row):
Directions += BoolMove(Map[Y+1][X], "S")
if(X-1 >= 0):
Directions += BoolMove(Map[Y][X-1], "W")
if len(Directions) == 0:
Integers.append([0, Input.sym(), Length, Path])
return
Length += 1
Input_List.sort(key = Input.csym)
for I in Input_List:
FindOutput(I, I.Position, "", copy.deepcopy(Circuit), 0, [], Circuit_Array)
return(Integers)
EmulatedCircuit = RunCircuit("T01.txt")
for line in EmulatedCircuit:
print(line)
"""
C * ( (A*B) + (a*B) )
C * ( (A*b) + a )
A * ( (B*c) + (b*C) + (a*B) ) * B
C * ( (B*C*a) + (a * (B+C)) )
A - 835
Simplying circuit
ab + aB + Ab
a*(B+b) + Ab
a*(1) + Ab
a + Ab
(A+aB)*(B+bA)
(A*B) + (A*bA) + (aB * B) + (aB*bA)
AB + Ab + aB + aBbA (Switches can't be on and off at the same time so we get rid of aBbA)
AB + Ab + aB + 0
AB + Ab + aB
A*(B+b) + aB (We simplify the equation now by grouping like terms)
A(B+b) + aB (and again; Switches can't be on and off at the same time so we get rid of Bb)
A + aB (and we're left with this)
ABc + ABC + aBC
AB(c+C) + aBC = (ABc + ABC + aBC, but simplified)
AB(1) + aBC (Adding a switch's opposite to itself is equal to '1')
AB + aBC (A switch multiplied by 1 is equal to itself)
B(A + aC)
abC + aBC + AbC + ABC
bC(Aa) + BC(Aa)
bC(1) + BC(1)
bC + BC
C(Bb) = bC + BC
C(1)
C
0
1
10
11
100
101
110
111
1000
1001
1010
1011
1100
1101
1110
1111
Ac + a(B+C) + AB(C+b)
Ac + aB + aC + ABC + ABb
Ac + aB + aC + ABC + A(0) ( A switch multiplied by its opposite is equal to '0')
Ac + aB + aC + ABC
A(c+BC) + aB + aC (Rule 17: A + aB = A+B)
A(c+B) + aB + aC
Ac + AB + aB + aC
Ac + B(A+a)
Ac + B + aC (Simplify until you have a set of unique variables)
AbC + AB(aC) + BC(bA)
AbC + ABa + ABC + BCb + BCA
AbC + 0*B + ABC + 0*C + ABC
AbC + ABC + ABC (ABC + ABC = ABC)
AbC + ABC
AC(b+B)
AC(1)
AC
HEM 11 46 105
835
1
ab + aB
a(b + B)
a
2
aB + AB + ab
a(B+b) + AB
a + AB
3
ab + Ab + b(A+a)
ab + Ab + b(1)
ab +Ab + b
b(Aa) + b
b(1) + b
b + b
b
4
Ab + A(B+b) + AB
Ab + AB + Ab + AB
Ab + Ab = Ab
AB + AB = AB
Ab + AB
A(Bb)
A
5
(A+AB)*(B+BA)
(AB) + (A*AB) + (AB*B) + (AB*AB)
AB + (A*A)B + A(B*B) + AB
AB + A(B) + A(B) + AB
AB
6
abC + aBC + AbC
bC(a + A) + aBC
bC(a + A) + aBC
bC(1) + aBC
bC + aBC
C(b + aB)
7
Abc + ABC + aBC
Abc + BC(A+a)
Abc + BC
8
abc + aBC + Abc
bc(a+A) + aBC
bc + aBC
9
abc + abC + Abc + AbC
ab(c+C) + Ab(c+C)
ab + Ab
b(A+a)
b
10
AbC + ABC + ABc + aBc
AbC + ABC + Bc(A+a)
AbC + ABC + Bc
AC(b+B) + Bc
AC + Bc
11
C(AB+Ab) + c(ab+aB)
ABC + AbC + abc + aBc
AC(B+b) + ac(b+B)
AC + ac
12
c(ab + AB + Ab) + A(BC + bC)
abc + ABc + Abc + ABC + AbC
abc + A(Bc + bC) + A(bc+BC)
abc + A + A
abc + A -shallow simplification
c(ab + AB + Ab) + A(BC+ bC)
abc + ABc + Abc + ABC + AbC
bc(a+A) ABc + ABC + AbC
bc + ABc + ABC + AbC
bc + AB(c+C) + AbC
bc + AB + AbC
b(c + AC) + AB
b(c+A) + AB
bc + Ab + AB
bc + A(b+B)
bc + A -deeper simplification
A + bc
AbC * aBc
11.4 106 De Morgan's laws
____ __
(Ab+C)*(a+Bc)
t1: (a+B)*c = ac + Bc
t2: a + (b+C) = a + b + C
(ac+Bc)*(a+b+C)
aac + abc + acC + aBc + Bbc + BcC
ac + abc + 0 + aBc + 0 + 0
ac + abc + aBc
ac(B+b) + ac
ac + ac
ac
__ ___
(aB)+(a+B)
(A+b)+A*b
A+Ab+b (A+AB) = A, regardless of any of the variable's states.
A+b
HEM 11.4 E47 107
1
__
(ab)*(aB)
(ab)*(A+b)
Aab + abb
0 + ab
ab
2 __ __
(A+BC)+(AB+C) = a+b+C
((A+b)*c) + (a+b+C)
Ac+bc+a+b+C
(a+Ac)+(b+bc)+C
a+b+C
3
_____ __
(aB+Bc)*(Ab)
((A+b)*(b+C))*(a+B)
(Ab+AC+bb+bC)*(a+B)
Aab+ABb+AaC+ABC+abb+Bbb+abC+BbC
0+0+0+ABC+ab+0+abC+0
ABC+ab+abC (ab+abC = ab ???)
ABC + ab
4
__ __ __
(Ab+Bc)+(aB)
(a+B+b+C)+(A+b)
a+B+b+C+A+b
(A+a)+(B+b)+C
1 + 1 + C
(C+1) + 1
1 + 1 = 1 ???
5
__ __ __
(Ab+aC)*(aBC) = a(b+c)
(a+B+A+c)*(a*(b+c))
(a+B+A+c)*(ab+ac)
aab+aac+aBb+aBc+Aab+Aac+abc+acc
ab+ac+0+aBc+0+0+abc+ac
ab+ac+aBc+abc+ac
(ac+ac)+(ab+aBc)+(ac+acb)
ac+ab+ac
ac+ab
a(b+c)
"""
| mit | -6,370,016,343,645,416,000 | 18.692913 | 113 | 0.495202 | false |
PrashntS/scikit-learn | examples/calibration/plot_compare_calibration.py | 241 | 5008 | """
========================================
Comparison of Calibration of Classifiers
========================================
Well calibrated classifiers are probabilistic classifiers for which the output
of the predict_proba method can be directly interpreted as a confidence level.
For instance a well calibrated (binary) classifier should classify the samples
such that among the samples to which it gave a predict_proba value close to
0.8, approx. 80% actually belong to the positive class.
LogisticRegression returns well calibrated predictions as it directly
optimizes log-loss. In contrast, the other methods return biased probilities,
with different biases per method:
* GaussianNaiveBayes tends to push probabilties to 0 or 1 (note the counts in
the histograms). This is mainly because it makes the assumption that features
are conditionally independent given the class, which is not the case in this
dataset which contains 2 redundant features.
* RandomForestClassifier shows the opposite behavior: the histograms show
peaks at approx. 0.2 and 0.9 probability, while probabilities close to 0 or 1
are very rare. An explanation for this is given by Niculescu-Mizil and Caruana
[1]: "Methods such as bagging and random forests that average predictions from
a base set of models can have difficulty making predictions near 0 and 1
because variance in the underlying base models will bias predictions that
should be near zero or one away from these values. Because predictions are
restricted to the interval [0,1], errors caused by variance tend to be one-
sided near zero and one. For example, if a model should predict p = 0 for a
case, the only way bagging can achieve this is if all bagged trees predict
zero. If we add noise to the trees that bagging is averaging over, this noise
will cause some trees to predict values larger than 0 for this case, thus
moving the average prediction of the bagged ensemble away from 0. We observe
this effect most strongly with random forests because the base-level trees
trained with random forests have relatively high variance due to feature
subseting." As a result, the calibration curve shows a characteristic sigmoid
shape, indicating that the classifier could trust its "intuition" more and
return probabilties closer to 0 or 1 typically.
* Support Vector Classification (SVC) shows an even more sigmoid curve as
the RandomForestClassifier, which is typical for maximum-margin methods
(compare Niculescu-Mizil and Caruana [1]), which focus on hard samples
that are close to the decision boundary (the support vectors).
.. topic:: References:
.. [1] Predicting Good Probabilities with Supervised Learning,
A. Niculescu-Mizil & R. Caruana, ICML 2005
"""
print(__doc__)
# Author: Jan Hendrik Metzen <[email protected]>
# License: BSD Style.
import numpy as np
np.random.seed(0)
import matplotlib.pyplot as plt
from sklearn import datasets
from sklearn.naive_bayes import GaussianNB
from sklearn.linear_model import LogisticRegression
from sklearn.ensemble import RandomForestClassifier
from sklearn.svm import LinearSVC
from sklearn.calibration import calibration_curve
X, y = datasets.make_classification(n_samples=100000, n_features=20,
n_informative=2, n_redundant=2)
train_samples = 100 # Samples used for training the models
X_train = X[:train_samples]
X_test = X[train_samples:]
y_train = y[:train_samples]
y_test = y[train_samples:]
# Create classifiers
lr = LogisticRegression()
gnb = GaussianNB()
svc = LinearSVC(C=1.0)
rfc = RandomForestClassifier(n_estimators=100)
###############################################################################
# Plot calibration plots
plt.figure(figsize=(10, 10))
ax1 = plt.subplot2grid((3, 1), (0, 0), rowspan=2)
ax2 = plt.subplot2grid((3, 1), (2, 0))
ax1.plot([0, 1], [0, 1], "k:", label="Perfectly calibrated")
for clf, name in [(lr, 'Logistic'),
(gnb, 'Naive Bayes'),
(svc, 'Support Vector Classification'),
(rfc, 'Random Forest')]:
clf.fit(X_train, y_train)
if hasattr(clf, "predict_proba"):
prob_pos = clf.predict_proba(X_test)[:, 1]
else: # use decision function
prob_pos = clf.decision_function(X_test)
prob_pos = \
(prob_pos - prob_pos.min()) / (prob_pos.max() - prob_pos.min())
fraction_of_positives, mean_predicted_value = \
calibration_curve(y_test, prob_pos, n_bins=10)
ax1.plot(mean_predicted_value, fraction_of_positives, "s-",
label="%s" % (name, ))
ax2.hist(prob_pos, range=(0, 1), bins=10, label=name,
histtype="step", lw=2)
ax1.set_ylabel("Fraction of positives")
ax1.set_ylim([-0.05, 1.05])
ax1.legend(loc="lower right")
ax1.set_title('Calibration plots (reliability curve)')
ax2.set_xlabel("Mean predicted value")
ax2.set_ylabel("Count")
ax2.legend(loc="upper center", ncol=2)
plt.tight_layout()
plt.show()
| bsd-3-clause | 8,407,790,638,423,949,000 | 40.04918 | 80 | 0.70607 | false |
Ophiuchus1312/enigma2-master | lib/python/Tools/Downloader.py | 3 | 1714 | from twisted.web import client
from twisted.internet import reactor, defer
from twisted.python import failure
class HTTPProgressDownloader(client.HTTPDownloader):
def __init__(self, url, outfile, headers=None):
client.HTTPDownloader.__init__(self, url, outfile, headers=headers, agent="STB_BOX HTTP Downloader")
self.status = None
self.progress_callback = None
self.deferred = defer.Deferred()
def noPage(self, reason):
if self.status == "304":
print reason.getErrorMessage()
client.HTTPDownloader.page(self, "")
else:
client.HTTPDownloader.noPage(self, reason)
def gotHeaders(self, headers):
if self.status == "200":
if headers.has_key("content-length"):
self.totalbytes = int(headers["content-length"][0])
else:
self.totalbytes = 0
self.currentbytes = 0.0
return client.HTTPDownloader.gotHeaders(self, headers)
def pagePart(self, packet):
if self.status == "200":
self.currentbytes += len(packet)
if self.totalbytes and self.progress_callback:
self.progress_callback(self.currentbytes, self.totalbytes)
return client.HTTPDownloader.pagePart(self, packet)
def pageEnd(self):
return client.HTTPDownloader.pageEnd(self)
class downloadWithProgress:
def __init__(self, url, outputfile, contextFactory=None, *args, **kwargs):
scheme, host, port, path = client._parse(url)
self.factory = HTTPProgressDownloader(url, outputfile, *args, **kwargs)
self.connection = reactor.connectTCP(host, port, self.factory)
def start(self):
return self.factory.deferred
def stop(self):
print "[stop]"
self.connection.disconnect()
def addProgress(self, progress_callback):
print "[addProgress]"
self.factory.progress_callback = progress_callback
| gpl-2.0 | -3,469,827,636,337,851,000 | 31.339623 | 102 | 0.736873 | false |
pbaesse/Sissens | lib/python2.7/site-packages/eventlet/support/dns/query.py | 2 | 22949 | # Copyright (C) 2003-2017 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Talk to a DNS server."""
from __future__ import generators
import errno
import select
import socket
import struct
import sys
import time
import dns.exception
import dns.inet
import dns.name
import dns.message
import dns.rcode
import dns.rdataclass
import dns.rdatatype
from ._compat import long, string_types
if sys.version_info > (3,):
select_error = OSError
else:
select_error = select.error
# Function used to create a socket. Can be overridden if needed in special
# situations.
socket_factory = socket.socket
class UnexpectedSource(dns.exception.DNSException):
"""A DNS query response came from an unexpected address or port."""
class BadResponse(dns.exception.FormError):
"""A DNS query response does not respond to the question asked."""
class TransferError(dns.exception.DNSException):
"""A zone transfer response got a non-zero rcode."""
def __init__(self, rcode):
message = 'Zone transfer error: %s' % dns.rcode.to_text(rcode)
super(TransferError, self).__init__(message)
self.rcode = rcode
def _compute_expiration(timeout):
if timeout is None:
return None
else:
return time.time() + timeout
# This module can use either poll() or select() as the "polling backend".
#
# A backend function takes an fd, bools for readability, writablity, and
# error detection, and a timeout.
def _poll_for(fd, readable, writable, error, timeout):
"""Poll polling backend."""
event_mask = 0
if readable:
event_mask |= select.POLLIN
if writable:
event_mask |= select.POLLOUT
if error:
event_mask |= select.POLLERR
pollable = select.poll()
pollable.register(fd, event_mask)
if timeout:
event_list = pollable.poll(long(timeout * 1000))
else:
event_list = pollable.poll()
return bool(event_list)
def _select_for(fd, readable, writable, error, timeout):
"""Select polling backend."""
rset, wset, xset = [], [], []
if readable:
rset = [fd]
if writable:
wset = [fd]
if error:
xset = [fd]
if timeout is None:
(rcount, wcount, xcount) = select.select(rset, wset, xset)
else:
(rcount, wcount, xcount) = select.select(rset, wset, xset, timeout)
return bool((rcount or wcount or xcount))
def _wait_for(fd, readable, writable, error, expiration):
# Use the selected polling backend to wait for any of the specified
# events. An "expiration" absolute time is converted into a relative
# timeout.
done = False
while not done:
if expiration is None:
timeout = None
else:
timeout = expiration - time.time()
if timeout <= 0.0:
raise dns.exception.Timeout
try:
if not _polling_backend(fd, readable, writable, error, timeout):
raise dns.exception.Timeout
except select_error as e:
if e.args[0] != errno.EINTR:
raise e
done = True
def _set_polling_backend(fn):
# Internal API. Do not use.
global _polling_backend
_polling_backend = fn
if hasattr(select, 'poll'):
# Prefer poll() on platforms that support it because it has no
# limits on the maximum value of a file descriptor (plus it will
# be more efficient for high values).
_polling_backend = _poll_for
else:
_polling_backend = _select_for
def _wait_for_readable(s, expiration):
_wait_for(s, True, False, True, expiration)
def _wait_for_writable(s, expiration):
_wait_for(s, False, True, True, expiration)
def _addresses_equal(af, a1, a2):
# Convert the first value of the tuple, which is a textual format
# address into binary form, so that we are not confused by different
# textual representations of the same address
try:
n1 = dns.inet.inet_pton(af, a1[0])
n2 = dns.inet.inet_pton(af, a2[0])
except dns.exception.SyntaxError:
return False
return n1 == n2 and a1[1:] == a2[1:]
def _destination_and_source(af, where, port, source, source_port):
# Apply defaults and compute destination and source tuples
# suitable for use in connect(), sendto(), or bind().
if af is None:
try:
af = dns.inet.af_for_address(where)
except Exception:
af = dns.inet.AF_INET
if af == dns.inet.AF_INET:
destination = (where, port)
if source is not None or source_port != 0:
if source is None:
source = '0.0.0.0'
source = (source, source_port)
elif af == dns.inet.AF_INET6:
destination = (where, port, 0, 0)
if source is not None or source_port != 0:
if source is None:
source = '::'
source = (source, source_port, 0, 0)
return (af, destination, source)
def send_udp(sock, what, destination, expiration=None):
"""Send a DNS message to the specified UDP socket.
*sock*, a ``socket``.
*what*, a ``binary`` or ``dns.message.Message``, the message to send.
*destination*, a destination tuple appropriate for the address family
of the socket, specifying where to send the query.
*expiration*, a ``float`` or ``None``, the absolute time at which
a timeout exception should be raised. If ``None``, no timeout will
occur.
Returns an ``(int, float)`` tuple of bytes sent and the sent time.
"""
if isinstance(what, dns.message.Message):
what = what.to_wire()
_wait_for_writable(sock, expiration)
sent_time = time.time()
n = sock.sendto(what, destination)
return (n, sent_time)
def receive_udp(sock, destination, expiration=None,
ignore_unexpected=False, one_rr_per_rrset=False,
keyring=None, request_mac=b''):
"""Read a DNS message from a UDP socket.
*sock*, a ``socket``.
*destination*, a destination tuple appropriate for the address family
of the socket, specifying where the associated query was sent.
*expiration*, a ``float`` or ``None``, the absolute time at which
a timeout exception should be raised. If ``None``, no timeout will
occur.
*ignore_unexpected*, a ``bool``. If ``True``, ignore responses from
unexpected sources.
*one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own
RRset.
*keyring*, a ``dict``, the keyring to use for TSIG.
*request_mac*, a ``binary``, the MAC of the request (for TSIG).
Raises if the message is malformed, if network errors occur, of if
there is a timeout.
Returns a ``dns.message.Message`` object.
"""
wire = b''
while 1:
_wait_for_readable(sock, expiration)
(wire, from_address) = sock.recvfrom(65535)
if _addresses_equal(sock.family, from_address, destination) or \
(dns.inet.is_multicast(destination[0]) and
from_address[1:] == destination[1:]):
break
if not ignore_unexpected:
raise UnexpectedSource('got a response from '
'%s instead of %s' % (from_address,
destination))
received_time = time.time()
r = dns.message.from_wire(wire, keyring=keyring, request_mac=request_mac,
one_rr_per_rrset=one_rr_per_rrset)
return (r, received_time)
def udp(q, where, timeout=None, port=53, af=None, source=None, source_port=0,
ignore_unexpected=False, one_rr_per_rrset=False):
"""Return the response obtained after sending a query via UDP.
*q*, a ``dns.message.message``, the query to send
*where*, a ``text`` containing an IPv4 or IPv6 address, where
to send the message.
*timeout*, a ``float`` or ``None``, the number of seconds to wait before the
query times out. If ``None``, the default, wait forever.
*port*, an ``int``, the port send the message to. The default is 53.
*af*, an ``int``, the address family to use. The default is ``None``,
which causes the address family to use to be inferred from the form of
*where*. If the inference attempt fails, AF_INET is used. This
parameter is historical; you need never set it.
*source*, a ``text`` containing an IPv4 or IPv6 address, specifying
the source address. The default is the wildcard address.
*source_port*, an ``int``, the port from which to send the message.
The default is 0.
*ignore_unexpected*, a ``bool``. If ``True``, ignore responses from
unexpected sources.
*one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own
RRset.
Returns a ``dns.message.Message``.
"""
wire = q.to_wire()
(af, destination, source) = _destination_and_source(af, where, port,
source, source_port)
s = socket_factory(af, socket.SOCK_DGRAM, 0)
received_time = None
sent_time = None
try:
expiration = _compute_expiration(timeout)
s.setblocking(0)
if source is not None:
s.bind(source)
(_, sent_time) = send_udp(s, wire, destination, expiration)
(r, received_time) = receive_udp(s, destination, expiration,
ignore_unexpected, one_rr_per_rrset,
q.keyring, q.mac)
finally:
if sent_time is None or received_time is None:
response_time = 0
else:
response_time = received_time - sent_time
s.close()
r.time = response_time
if not q.is_response(r):
raise BadResponse
return r
def _net_read(sock, count, expiration):
"""Read the specified number of bytes from sock. Keep trying until we
either get the desired amount, or we hit EOF.
A Timeout exception will be raised if the operation is not completed
by the expiration time.
"""
s = b''
while count > 0:
_wait_for_readable(sock, expiration)
n = sock.recv(count)
if n == b'':
raise EOFError
count = count - len(n)
s = s + n
return s
def _net_write(sock, data, expiration):
"""Write the specified data to the socket.
A Timeout exception will be raised if the operation is not completed
by the expiration time.
"""
current = 0
l = len(data)
while current < l:
_wait_for_writable(sock, expiration)
current += sock.send(data[current:])
def send_tcp(sock, what, expiration=None):
"""Send a DNS message to the specified TCP socket.
*sock*, a ``socket``.
*what*, a ``binary`` or ``dns.message.Message``, the message to send.
*expiration*, a ``float`` or ``None``, the absolute time at which
a timeout exception should be raised. If ``None``, no timeout will
occur.
Returns an ``(int, float)`` tuple of bytes sent and the sent time.
"""
if isinstance(what, dns.message.Message):
what = what.to_wire()
l = len(what)
# copying the wire into tcpmsg is inefficient, but lets us
# avoid writev() or doing a short write that would get pushed
# onto the net
tcpmsg = struct.pack("!H", l) + what
_wait_for_writable(sock, expiration)
sent_time = time.time()
_net_write(sock, tcpmsg, expiration)
return (len(tcpmsg), sent_time)
def receive_tcp(sock, expiration=None, one_rr_per_rrset=False,
keyring=None, request_mac=b''):
"""Read a DNS message from a TCP socket.
*sock*, a ``socket``.
*expiration*, a ``float`` or ``None``, the absolute time at which
a timeout exception should be raised. If ``None``, no timeout will
occur.
*one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own
RRset.
*keyring*, a ``dict``, the keyring to use for TSIG.
*request_mac*, a ``binary``, the MAC of the request (for TSIG).
Raises if the message is malformed, if network errors occur, of if
there is a timeout.
Returns a ``dns.message.Message`` object.
"""
ldata = _net_read(sock, 2, expiration)
(l,) = struct.unpack("!H", ldata)
wire = _net_read(sock, l, expiration)
received_time = time.time()
r = dns.message.from_wire(wire, keyring=keyring, request_mac=request_mac,
one_rr_per_rrset=one_rr_per_rrset)
return (r, received_time)
def _connect(s, address):
try:
s.connect(address)
except socket.error:
(ty, v) = sys.exc_info()[:2]
if hasattr(v, 'errno'):
v_err = v.errno
else:
v_err = v[0]
if v_err not in [errno.EINPROGRESS, errno.EWOULDBLOCK, errno.EALREADY]:
raise v
def tcp(q, where, timeout=None, port=53, af=None, source=None, source_port=0,
one_rr_per_rrset=False):
"""Return the response obtained after sending a query via TCP.
*q*, a ``dns.message.message``, the query to send
*where*, a ``text`` containing an IPv4 or IPv6 address, where
to send the message.
*timeout*, a ``float`` or ``None``, the number of seconds to wait before the
query times out. If ``None``, the default, wait forever.
*port*, an ``int``, the port send the message to. The default is 53.
*af*, an ``int``, the address family to use. The default is ``None``,
which causes the address family to use to be inferred from the form of
*where*. If the inference attempt fails, AF_INET is used. This
parameter is historical; you need never set it.
*source*, a ``text`` containing an IPv4 or IPv6 address, specifying
the source address. The default is the wildcard address.
*source_port*, an ``int``, the port from which to send the message.
The default is 0.
*one_rr_per_rrset*, a ``bool``. If ``True``, put each RR into its own
RRset.
Returns a ``dns.message.Message``.
"""
wire = q.to_wire()
(af, destination, source) = _destination_and_source(af, where, port,
source, source_port)
s = socket_factory(af, socket.SOCK_STREAM, 0)
begin_time = None
received_time = None
try:
expiration = _compute_expiration(timeout)
s.setblocking(0)
begin_time = time.time()
if source is not None:
s.bind(source)
_connect(s, destination)
send_tcp(s, wire, expiration)
(r, received_time) = receive_tcp(s, expiration, one_rr_per_rrset,
q.keyring, q.mac)
finally:
if begin_time is None or received_time is None:
response_time = 0
else:
response_time = received_time - begin_time
s.close()
r.time = response_time
if not q.is_response(r):
raise BadResponse
return r
def xfr(where, zone, rdtype=dns.rdatatype.AXFR, rdclass=dns.rdataclass.IN,
timeout=None, port=53, keyring=None, keyname=None, relativize=True,
af=None, lifetime=None, source=None, source_port=0, serial=0,
use_udp=False, keyalgorithm=dns.tsig.default_algorithm):
"""Return a generator for the responses to a zone transfer.
*where*. If the inference attempt fails, AF_INET is used. This
parameter is historical; you need never set it.
*zone*, a ``dns.name.Name`` or ``text``, the name of the zone to transfer.
*rdtype*, an ``int`` or ``text``, the type of zone transfer. The
default is ``dns.rdatatype.AXFR``. ``dns.rdatatype.IXFR`` can be
used to do an incremental transfer instead.
*rdclass*, an ``int`` or ``text``, the class of the zone transfer.
The default is ``dns.rdataclass.IN``.
*timeout*, a ``float``, the number of seconds to wait for each
response message. If None, the default, wait forever.
*port*, an ``int``, the port send the message to. The default is 53.
*keyring*, a ``dict``, the keyring to use for TSIG.
*keyname*, a ``dns.name.Name`` or ``text``, the name of the TSIG
key to use.
*relativize*, a ``bool``. If ``True``, all names in the zone will be
relativized to the zone origin. It is essential that the
relativize setting matches the one specified to
``dns.zone.from_xfr()`` if using this generator to make a zone.
*af*, an ``int``, the address family to use. The default is ``None``,
which causes the address family to use to be inferred from the form of
*where*. If the inference attempt fails, AF_INET is used. This
parameter is historical; you need never set it.
*lifetime*, a ``float``, the total number of seconds to spend
doing the transfer. If ``None``, the default, then there is no
limit on the time the transfer may take.
*source*, a ``text`` containing an IPv4 or IPv6 address, specifying
the source address. The default is the wildcard address.
*source_port*, an ``int``, the port from which to send the message.
The default is 0.
*serial*, an ``int``, the SOA serial number to use as the base for
an IXFR diff sequence (only meaningful if *rdtype* is
``dns.rdatatype.IXFR``).
*use_udp*, a ``bool``. If ``True``, use UDP (only meaningful for IXFR).
*keyalgorithm*, a ``dns.name.Name`` or ``text``, the TSIG algorithm to use.
Raises on errors, and so does the generator.
Returns a generator of ``dns.message.Message`` objects.
"""
if isinstance(zone, string_types):
zone = dns.name.from_text(zone)
if isinstance(rdtype, string_types):
rdtype = dns.rdatatype.from_text(rdtype)
q = dns.message.make_query(zone, rdtype, rdclass)
if rdtype == dns.rdatatype.IXFR:
rrset = dns.rrset.from_text(zone, 0, 'IN', 'SOA',
'. . %u 0 0 0 0' % serial)
q.authority.append(rrset)
if keyring is not None:
q.use_tsig(keyring, keyname, algorithm=keyalgorithm)
wire = q.to_wire()
(af, destination, source) = _destination_and_source(af, where, port,
source, source_port)
if use_udp:
if rdtype != dns.rdatatype.IXFR:
raise ValueError('cannot do a UDP AXFR')
s = socket_factory(af, socket.SOCK_DGRAM, 0)
else:
s = socket_factory(af, socket.SOCK_STREAM, 0)
s.setblocking(0)
if source is not None:
s.bind(source)
expiration = _compute_expiration(lifetime)
_connect(s, destination)
l = len(wire)
if use_udp:
_wait_for_writable(s, expiration)
s.send(wire)
else:
tcpmsg = struct.pack("!H", l) + wire
_net_write(s, tcpmsg, expiration)
done = False
delete_mode = True
expecting_SOA = False
soa_rrset = None
if relativize:
origin = zone
oname = dns.name.empty
else:
origin = None
oname = zone
tsig_ctx = None
first = True
while not done:
mexpiration = _compute_expiration(timeout)
if mexpiration is None or mexpiration > expiration:
mexpiration = expiration
if use_udp:
_wait_for_readable(s, expiration)
(wire, from_address) = s.recvfrom(65535)
else:
ldata = _net_read(s, 2, mexpiration)
(l,) = struct.unpack("!H", ldata)
wire = _net_read(s, l, mexpiration)
is_ixfr = (rdtype == dns.rdatatype.IXFR)
r = dns.message.from_wire(wire, keyring=q.keyring, request_mac=q.mac,
xfr=True, origin=origin, tsig_ctx=tsig_ctx,
multi=True, first=first,
one_rr_per_rrset=is_ixfr)
rcode = r.rcode()
if rcode != dns.rcode.NOERROR:
raise TransferError(rcode)
tsig_ctx = r.tsig_ctx
first = False
answer_index = 0
if soa_rrset is None:
if not r.answer or r.answer[0].name != oname:
raise dns.exception.FormError(
"No answer or RRset not for qname")
rrset = r.answer[0]
if rrset.rdtype != dns.rdatatype.SOA:
raise dns.exception.FormError("first RRset is not an SOA")
answer_index = 1
soa_rrset = rrset.copy()
if rdtype == dns.rdatatype.IXFR:
if soa_rrset[0].serial <= serial:
#
# We're already up-to-date.
#
done = True
else:
expecting_SOA = True
#
# Process SOAs in the answer section (other than the initial
# SOA in the first message).
#
for rrset in r.answer[answer_index:]:
if done:
raise dns.exception.FormError("answers after final SOA")
if rrset.rdtype == dns.rdatatype.SOA and rrset.name == oname:
if expecting_SOA:
if rrset[0].serial != serial:
raise dns.exception.FormError(
"IXFR base serial mismatch")
expecting_SOA = False
elif rdtype == dns.rdatatype.IXFR:
delete_mode = not delete_mode
#
# If this SOA RRset is equal to the first we saw then we're
# finished. If this is an IXFR we also check that we're seeing
# the record in the expected part of the response.
#
if rrset == soa_rrset and \
(rdtype == dns.rdatatype.AXFR or
(rdtype == dns.rdatatype.IXFR and delete_mode)):
done = True
elif expecting_SOA:
#
# We made an IXFR request and are expecting another
# SOA RR, but saw something else, so this must be an
# AXFR response.
#
rdtype = dns.rdatatype.AXFR
expecting_SOA = False
if done and q.keyring and not r.had_tsig:
raise dns.exception.FormError("missing TSIG")
yield r
s.close()
| gpl-3.0 | -1,404,223,086,796,775,000 | 33.406297 | 80 | 0.601377 | false |
terranodo/geonode | geonode/security/tests.py | 13 | 19666 | # -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import json
from django.core.urlresolvers import reverse
from django.test import TestCase
from tastypie.test import ResourceTestCase
from django.contrib.auth import get_user_model
from guardian.shortcuts import get_anonymous_user, assign_perm, remove_perm
from geonode.base.populate_test_data import create_models, all_public
from geonode.maps.tests_populate_maplayers import create_maplayers
from geonode.people.models import Profile
from geonode.layers.models import Layer
from geonode.maps.models import Map
from geonode.layers.populate_layers_data import create_layer_data
from geonode.groups.models import Group
class BulkPermissionsTests(ResourceTestCase):
fixtures = ['initial_data.json', 'bobby']
def setUp(self):
super(BulkPermissionsTests, self).setUp()
self.user = 'admin'
self.passwd = 'admin'
self.list_url = reverse(
'api_dispatch_list',
kwargs={
'api_name': 'api',
'resource_name': 'layers'})
self.bulk_perms_url = reverse('bulk_permissions')
create_models(type='layer')
all_public()
self.perm_spec = {
"users": {"admin": ["view_resourcebase"]}, "groups": {}}
def test_set_bulk_permissions(self):
"""Test that after restrict view permissions on two layers
bobby is unable to see them"""
layers = Layer.objects.all()[:2].values_list('id', flat=True)
layers_id = map(lambda x: str(x), layers)
self.client.login(username='admin', password='admin')
resp = self.client.get(self.list_url)
self.assertEquals(len(self.deserialize(resp)['objects']), 8)
data = {
'permissions': json.dumps(self.perm_spec),
'resources': layers_id
}
resp = self.client.post(self.bulk_perms_url, data)
self.assertHttpOK(resp)
self.client.logout()
self.client.login(username='bobby', password='bob')
resp = self.client.get(self.list_url)
self.assertEquals(len(self.deserialize(resp)['objects']), 6)
def test_bobby_cannot_set_all(self):
"""Test that Bobby can set the permissions only only on the ones
for which he has the right"""
layer = Layer.objects.all()[0]
self.client.login(username='admin', password='admin')
# give bobby the right to change the layer permissions
assign_perm('change_resourcebase', Profile.objects.get(username='bobby'), layer.get_self_resource())
self.client.logout()
self.client.login(username='bobby', password='bob')
layer2 = Layer.objects.all()[1]
data = {
'permissions': json.dumps({"users": {"bobby": ["view_resourcebase"]}, "groups": {}}),
'resources': [layer.id, layer2.id]
}
resp = self.client.post(self.bulk_perms_url, data)
self.assertTrue(layer2.title in json.loads(resp.content)['not_changed'])
class PermissionsTest(TestCase):
"""Tests GeoNode permissions
"""
fixtures = ['initial_data.json', 'bobby']
perm_spec = {
"users": {
"admin": [
"change_resourcebase",
"change_resourcebase_permissions",
"view_resourcebase"]},
"groups": {}}
# Permissions Tests
# Users
# - admin (pk=2)
# - bobby (pk=1)
def setUp(self):
self.user = 'admin'
self.passwd = 'admin'
create_models(type='layer')
create_layer_data()
self.anonymous_user = get_anonymous_user()
def test_layer_set_default_permissions(self):
"""Verify that Layer.set_default_permissions is behaving as expected
"""
# Get a Layer object to work with
layer = Layer.objects.all()[0]
# Set the default permissions
layer.set_default_permissions()
# Test that the anonymous user can read
self.assertTrue(
self.anonymous_user.has_perm(
'view_resourcebase',
layer.get_self_resource()))
# Test that the owner user can read
self.assertTrue(
layer.owner.has_perm(
'view_resourcebase',
layer.get_self_resource()))
# Test that the owner user can download it
self.assertTrue(
layer.owner.has_perm(
'download_resourcebase',
layer.get_self_resource()))
# Test that the owner user can edit metadata
self.assertTrue(
layer.owner.has_perm(
'change_resourcebase_metadata',
layer.get_self_resource()))
# Test that the owner user can edit data if is vector type
if layer.storeType == 'dataStore':
self.assertTrue(
layer.owner.has_perm(
'change_layer_data',
layer))
# Test that the owner user can edit styles
self.assertTrue(
layer.owner.has_perm(
'change_layer_style',
layer))
# Test that the owner can manage the layer
self.assertTrue(
layer.owner.has_perm(
'change_resourcebase',
layer.get_self_resource()))
self.assertTrue(
layer.owner.has_perm(
'delete_resourcebase',
layer.get_self_resource()))
self.assertTrue(
layer.owner.has_perm(
'change_resourcebase_permissions',
layer.get_self_resource()))
self.assertTrue(
layer.owner.has_perm(
'publish_resourcebase',
layer.get_self_resource()))
def test_set_layer_permissions(self):
"""Verify that the set_layer_permissions view is behaving as expected
"""
# Get a layer to work with
layer = Layer.objects.all()[0]
# FIXME Test a comprehensive set of permissions specifications
# Set the Permissions
layer.set_permissions(self.perm_spec)
# Test that the Permissions for anonymous user is are set
self.assertFalse(
self.anonymous_user.has_perm(
'view_resourcebase',
layer.get_self_resource()))
# Test that previous permissions for users other than ones specified in
# the perm_spec (and the layers owner) were removed
current_perms = layer.get_all_level_info()
self.assertEqual(len(current_perms['users'].keys()), 2)
# Test that the User permissions specified in the perm_spec were
# applied properly
for username, perm in self.perm_spec['users'].items():
user = get_user_model().objects.get(username=username)
self.assertTrue(user.has_perm(perm, layer.get_self_resource()))
def test_ajax_layer_permissions(self):
"""Verify that the ajax_layer_permissions view is behaving as expected
"""
# Setup some layer names to work with
valid_layer_typename = Layer.objects.all()[0].id
invalid_layer_id = 9999999
# Test that an invalid layer.typename is handled for properly
response = self.client.post(
reverse(
'resource_permissions', args=(
invalid_layer_id,)), data=json.dumps(
self.perm_spec), content_type="application/json")
self.assertEquals(response.status_code, 404)
# Test that GET returns permissions
response = self.client.get(
reverse(
'resource_permissions',
args=(
valid_layer_typename,
)))
assert('permissions' in response.content)
# Test that a user is required to have maps.change_layer_permissions
# First test un-authenticated
response = self.client.post(
reverse(
'resource_permissions', args=(
valid_layer_typename,)), data=json.dumps(
self.perm_spec), content_type="application/json")
self.assertEquals(response.status_code, 401)
# Next Test with a user that does NOT have the proper perms
logged_in = self.client.login(username='bobby', password='bob')
self.assertEquals(logged_in, True)
response = self.client.post(
reverse(
'resource_permissions', args=(
valid_layer_typename,)), data=json.dumps(
self.perm_spec), content_type="application/json")
self.assertEquals(response.status_code, 401)
# Login as a user with the proper permission and test the endpoint
logged_in = self.client.login(username='admin', password='admin')
self.assertEquals(logged_in, True)
response = self.client.post(
reverse(
'resource_permissions', args=(
valid_layer_typename,)), data=json.dumps(
self.perm_spec), content_type="application/json")
# Test that the method returns 200
self.assertEquals(response.status_code, 200)
# Test that the permissions specification is applied
# Should we do this here, or assume the tests in
# test_set_layer_permissions will handle for that?
def test_perms_info(self):
""" Verify that the perms_info view is behaving as expected
"""
# Test with a Layer object
layer = Layer.objects.all()[0]
layer.set_default_permissions()
# Test that the anonymous user can read
self.assertTrue(
self.anonymous_user.has_perm(
'view_resourcebase',
layer.get_self_resource()))
# Test that layer owner can edit layer
self.assertTrue(
layer.owner.has_perm(
'change_resourcebase',
layer.get_self_resource()))
# TODO Much more to do here once jj0hns0n understands the ACL system
# better
# Test with a Map object
# TODO
# now we test permissions, first on an authenticated user and then on the
# anonymous user
# 1. view_resourcebase
# 2. change_resourcebase
# 3. delete_resourcebase
# 4. change_resourcebase_metadata
# 5. change_resourcebase_permissions
# 6. change_layer_data
# 7. change_layer_style
def test_not_superuser_permissions(self):
# grab bobby
bob = get_user_model().objects.get(username='bobby')
# grab a layer
layer = Layer.objects.all()[0]
layer.set_default_permissions()
# verify bobby has view/change permissions on it but not manage
self.assertFalse(
bob.has_perm(
'change_resourcebase_permissions',
layer.get_self_resource()))
self.assertTrue(self.client.login(username='bobby', password='bob'))
# 1. view_resourcebase
# 1.1 has view_resourcebase: verify that bobby can access the layer
# detail page
self.assertTrue(
bob.has_perm(
'view_resourcebase',
layer.get_self_resource()))
response = self.client.get(reverse('layer_detail', args=(layer.typename,)))
self.assertEquals(response.status_code, 200)
# 1.2 has not view_resourcebase: verify that bobby can not access the
# layer detail page
remove_perm('view_resourcebase', bob, layer.get_self_resource())
anonymous_group = Group.objects.get(name='anonymous')
remove_perm('view_resourcebase', anonymous_group, layer.get_self_resource())
response = self.client.get(reverse('layer_detail', args=(layer.typename,)))
self.assertEquals(response.status_code, 401)
# 2. change_resourcebase
# 2.1 has not change_resourcebase: verify that bobby cannot access the
# layer replace page
response = self.client.get(reverse('layer_replace', args=(layer.typename,)))
self.assertEquals(response.status_code, 401)
# 2.2 has change_resourcebase: verify that bobby can access the layer
# replace page
assign_perm('change_resourcebase', bob, layer.get_self_resource())
self.assertTrue(
bob.has_perm(
'change_resourcebase',
layer.get_self_resource()))
response = self.client.get(reverse('layer_replace', args=(layer.typename,)))
self.assertEquals(response.status_code, 200)
# 3. delete_resourcebase
# 3.1 has not delete_resourcebase: verify that bobby cannot access the
# layer delete page
response = self.client.get(reverse('layer_remove', args=(layer.typename,)))
self.assertEquals(response.status_code, 401)
# 3.2 has delete_resourcebase: verify that bobby can access the layer
# delete page
assign_perm('delete_resourcebase', bob, layer.get_self_resource())
self.assertTrue(
bob.has_perm(
'delete_resourcebase',
layer.get_self_resource()))
response = self.client.get(reverse('layer_remove', args=(layer.typename,)))
self.assertEquals(response.status_code, 200)
# 4. change_resourcebase_metadata
# 4.1 has not change_resourcebase_metadata: verify that bobby cannot
# access the layer metadata page
response = self.client.get(reverse('layer_metadata', args=(layer.typename,)))
self.assertEquals(response.status_code, 401)
# 4.2 has delete_resourcebase: verify that bobby can access the layer
# delete page
assign_perm('change_resourcebase_metadata', bob, layer.get_self_resource())
self.assertTrue(
bob.has_perm(
'change_resourcebase_metadata',
layer.get_self_resource()))
response = self.client.get(reverse('layer_metadata', args=(layer.typename,)))
self.assertEquals(response.status_code, 200)
# 5. change_resourcebase_permissions
# should be impossible for the user without change_resourcebase_permissions
# to change permissions as the permission form is not available in the
# layer detail page?
# 6. change_layer_data
# must be done in integration test sending a WFS-T request with CURL
# 7. change_layer_style
# 7.1 has not change_layer_style: verify that bobby cannot access
# the layer style page
response = self.client.get(reverse('layer_style_manage', args=(layer.typename,)))
self.assertEquals(response.status_code, 401)
# 7.2 has change_layer_style: verify that bobby can access the
# change layer style page
assign_perm('change_layer_style', bob, layer)
self.assertTrue(
bob.has_perm(
'change_layer_style',
layer))
response = self.client.get(reverse('layer_style_manage', args=(layer.typename,)))
self.assertEquals(response.status_code, 200)
def test_anonymus_permissions(self):
# grab a layer
layer = Layer.objects.all()[0]
layer.set_default_permissions()
# 1. view_resourcebase
# 1.1 has view_resourcebase: verify that anonymous user can access
# the layer detail page
self.assertTrue(
self.anonymous_user.has_perm(
'view_resourcebase',
layer.get_self_resource()))
response = self.client.get(reverse('layer_detail', args=(layer.typename,)))
self.assertEquals(response.status_code, 200)
# 1.2 has not view_resourcebase: verify that anonymous user can not
# access the layer detail page
remove_perm('view_resourcebase', self.anonymous_user, layer.get_self_resource())
anonymous_group = Group.objects.get(name='anonymous')
remove_perm('view_resourcebase', anonymous_group, layer.get_self_resource())
response = self.client.get(reverse('layer_detail', args=(layer.typename,)))
self.assertEquals(response.status_code, 302)
# 2. change_resourcebase
# 2.1 has not change_resourcebase: verify that anonymous user cannot
# access the layer replace page but redirected to login
response = self.client.get(reverse('layer_replace', args=(layer.typename,)))
self.assertEquals(response.status_code, 302)
# 3. delete_resourcebase
# 3.1 has not delete_resourcebase: verify that anonymous user cannot
# access the layer delete page but redirected to login
response = self.client.get(reverse('layer_remove', args=(layer.typename,)))
self.assertEquals(response.status_code, 302)
# 4. change_resourcebase_metadata
# 4.1 has not change_resourcebase_metadata: verify that anonymous user
# cannot access the layer metadata page but redirected to login
response = self.client.get(reverse('layer_metadata', args=(layer.typename,)))
self.assertEquals(response.status_code, 302)
# 5 N\A? 6 is an integration test...
# 7. change_layer_style
# 7.1 has not change_layer_style: verify that anonymous user cannot access
# the layer style page but redirected to login
response = self.client.get(reverse('layer_style_manage', args=(layer.typename,)))
self.assertEquals(response.status_code, 302)
def test_map_download(self):
"""Test the correct permissions on layers on map download"""
create_models(type='map')
create_maplayers()
# Get a Map
the_map = Map.objects.get(title='GeoNode Default Map')
# Get a MapLayer and set the parameters as it is local and not a background
# and leave it alone in the map
map_layer = the_map.layer_set.get(name='geonode:CA')
map_layer.local = True
map_layer.group = 'overlay'
map_layer.save()
the_map.layer_set.all().delete()
the_map.layer_set.add(map_layer)
# Get the Layer and set the permissions for bobby to it and the map
bobby = Profile.objects.get(username='bobby')
the_layer = Layer.objects.get(typename='geonode:CA')
remove_perm('download_resourcebase', bobby, the_layer.get_self_resource())
remove_perm('download_resourcebase', Group.objects.get(name='anonymous'),
the_layer.get_self_resource())
assign_perm('view_resourcebase', bobby, the_layer.get_self_resource())
assign_perm('download_resourcebase', bobby, the_map.get_self_resource())
self.client.login(username='bobby', password='bob')
response = self.client.get(reverse('map_download', args=(the_map.id,)))
self.assertTrue('Could not find downloadable layers for this map' in response.content)
| gpl-3.0 | 7,167,866,050,882,535,000 | 38.649194 | 108 | 0.613902 | false |
keitaroyam/yamtbx | dxtbx_formats/FormatEMD.py | 1 | 7738 | # Class for reading .emd file by Velox.
# This code was written based on FormatSER.py from https://github.com/cctbx/dxtbx/blob/master/format/FormatSER.py
from __future__ import absolute_import, division, print_function
import struct
import h5py
import numpy
import os
import json
from scitbx.array_family import flex
from dxtbx.format.Format import Format
from dxtbx.format.FormatMultiImage import FormatMultiImage
def get_metadata(metadata):
mds = []
for i in range(metadata.shape[1]):
metadata_array = metadata[:, i].T
mdata_string = metadata_array.tostring().decode("utf-8")
mds.append(json.loads(mdata_string.rstrip('\x00')))
return mds
# get_metadata()
def analyse_angle(metadata):
alphas = []
for i, md in enumerate(metadata):
alpha = numpy.rad2deg(float(md["Stage"]["AlphaTilt"]))
alphas.append(alpha)
if len(alphas) < 2:
return [0,0], 0.
d_alphas = numpy.diff(alphas)
q25, q50, q75 = numpy.percentile(d_alphas, [25, 50, 75])
iqr = q75-q25
iqrc = 1.5
lowlim, highlim = q25 - iqrc*iqr, q75 + iqrc*iqr
d_alphas2 = d_alphas[numpy.where(numpy.logical_and(d_alphas>lowlim, d_alphas<highlim))] # outlier rejected
d_alpha_z = abs(d_alphas-numpy.mean(d_alphas2))/numpy.std(d_alphas2)
valid_range = [0, len(metadata)-1]
for i in range(len(metadata)-1):
if d_alpha_z[i] < 3: break
valid_range[0] = i+1
for i in reversed(range(len(metadata)-1)):
if d_alpha_z[i] < 3: break
valid_range[1] = i
if valid_range[0] > valid_range[1]:
valid_range = [0, len(metadata)-1] # reset
mean_alpha_step = (alphas[valid_range[1]] - alphas[valid_range[0]])/(valid_range[1]-valid_range[0])
return valid_range, mean_alpha_step
# analyse_angle()
class FormatEMD(FormatMultiImage, Format):
def __init__(self, image_file, **kwargs):
from dxtbx import IncorrectFormatError
if not self.understand(image_file):
raise IncorrectFormatError(self, image_file)
FormatMultiImage.__init__(self, **kwargs)
Format.__init__(self, image_file, **kwargs)
@staticmethod
def understand(image_file):
try:
h = h5py.File(image_file, "r")
except IOError:
return False
if not "/Data/Image" in h:
return False
keys = list(h["/Data/Image"].keys())
if len(keys) > 1: return False
d = h["/Data/Image"][keys[0]]
if "Data" in d and "Metadata" in d and len(d["Data"].shape) == 3:
return True
return False
@staticmethod
def _read_metadata(image_file):
h = h5py.File(image_file, "r")
ret = {}
image_path = h["/Data/Image"]
assert len(image_path.keys()) == 1
k = list(image_path.keys())[0]
ret["image_file"] = image_file
ret["file_handle"] = h
ret["data_path"] = "/Data/Image/%s/Data" % k
ret["metadata_path"] = "/Data/Image/%s/Metadata" % k
metadata = get_metadata(h[ret["metadata_path"]])
valid_range, mean_alpha_step = analyse_angle(metadata)
data = h[ret["data_path"]]
ret["n_frames"] = data.shape[2]
ret["valid_range"] = valid_range
ret["mean_alpha_step"] = mean_alpha_step
ret["width"], ret["height"] = data.shape[:2]
ret["binning"] = int(metadata[0]["BinaryResult"]["ImageSize"]["width"])//ret["width"]
h, m0, e, c = 6.62607004e-34, 9.10938356e-31, 1.6021766208e-19, 299792458.0
voltage = float(metadata[0]["Optics"]["AccelerationVoltage"])
ret["wavelength"] = h/numpy.sqrt(2*m0*e*voltage*(1.+e*voltage/2./m0/c**2)) * 1.e10
return ret
def _start(self):
"""Open the image file, read useful metadata into an internal dictionary
self._header_dictionary"""
self._header_dictionary = self._read_metadata(self._image_file)
return
def _goniometer(self):
"""Dummy goniometer, 'vertical' as the images are viewed. Not completely
sure about the handedness yet"""
if self._header_dictionary["mean_alpha_step"] > 0: # XXX is this really OK??
return self._goniometer_factory.known_axis((0, -1, 0))
else:
return self._goniometer_factory.known_axis((0, 1, 0))
def _detector(self):
"""Dummy detector"""
image_size = (self._header_dictionary["width"], self._header_dictionary["height"])
binning = self._header_dictionary["binning"]
pixel_size = 0.014*binning, 0.014*binning
distance = 2000
trusted_range = (-4, 65535)
beam_centre = [(p * i) / 2 for p, i in zip(pixel_size, image_size)]
d = self._detector_factory.simple(
"PAD",
distance,
beam_centre,
"+x",
"-y",
pixel_size,
image_size,
trusted_range,
)
# Not sure what the gain is
# for p in d: p.set_gain(8)
return d
def _beam(self):
return self._beam_factory.make_polarized_beam(
sample_to_source=(0.0, 0.0, 1.0),
wavelength=self._header_dictionary["wavelength"],
polarization=(0, 1, 0),
polarization_fraction=0.5,
)
def _scan(self):
"""Dummy scan for this stack"""
vr = self._header_dictionary["valid_range"]
image_range = (vr[0]+1, vr[1]+1)
print("Recommended image_raneg=", image_range)
image_range = (1, self._header_dictionary["n_frames"])
exposure_times = 0.0
nframes = self._header_dictionary["n_frames"] #vr[1]-vr[0]+1
#nframes = vr[1]-vr[0]+1
osc_step = abs(self._header_dictionary["mean_alpha_step"])
oscillation = (osc_step*(vr[0]-1), osc_step)
# FIXME we do actually have acquisition times, might as well use them
epochs = [0] * nframes
#print(len(epochs), self.get_num_images())
return self._scan_factory.make_scan(
image_range, exposure_times, oscillation, epochs, deg=True
)
def get_num_images(self):
#h = self._header_dictionary["file_handle"]
#data_path = self._header_dictionary["data_path"]
#return h[data_path].shape[2]
#vr = self._header_dictionary["valid_range"]
return self._header_dictionary["n_frames"] # vr[1] - vr[0] + 1
#return vr[1] - vr[0] + 1
# This is still required for dials_regression/test.py
def get_detectorbase(self):
pass
def get_goniometer(self, index=None):
return Format.get_goniometer(self)
def get_detector(self, index=None):
return Format.get_detector(self)
def get_beam(self, index=None):
return Format.get_beam(self)
def get_scan(self, index=None):
if index == None:
return Format.get_scan(self)
else:
scan = Format.get_scan(self)
return scan[index]
def get_image_file(self, index=None):
return Format.get_image_file(self)
def get_raw_data(self, index):
#print(self._header_dictionary["valid_range"][0])
#index += self._header_dictionary["valid_range"][0]
h = h5py.File(self._header_dictionary["image_file"], "r")
data_path = self._header_dictionary["data_path"]
raw_data = h[data_path][:,:,index].astype(numpy.int32) # flex.int does not suppert int16
offset_key = "DXTBX_EMD_OFFSET"
if os.environ.get(offset_key):
print("DEBUG:: adding %s for %d"%(os.environ[offset_key], index))
raw_data += int(os.environ[offset_key])
return flex.int(raw_data)
| bsd-3-clause | -5,182,075,166,759,346,000 | 31.512605 | 113 | 0.586069 | false |
ec-geolink/glharvest | glharvest/tests/test_scenarios.py | 1 | 6841 | """test_scenarios.py
End-end-end tests for the Harvester.
"""
import sys
import os
import RDF
from glharvest import jobs, registry, void
def test_can_update_a_provider_with_a_new_resource(repository):
"""This test tests the case where a provider gives informationa about one
resource at time t0 then, at time t1, their data dump no longer contains
information about the old resource. In this case, we keep the previous
knowledge and add the new knowledge because we don't allow providers to
completely remove a resource.
"""
# Setup
repository.clear()
provider = 'test'
infile_fmt = 'turtle'
base_uri = 'http://example.org/test/'
parser = RDF.TurtleParser()
state_t0 = """
@prefix void: <http://rdfs.org/ns/void#> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix test: <http://example.org/test/> .
test:A a test:Thing ;
test:someProperty 'some property' .
"""
state_t1 = """@prefix void: <http://rdfs.org/ns/void#> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix test: <http://example.org/test/> .
test:B a test:Thing ;
test:someProperty 'some other property' .
"""
# State t0
for statement in parser.parse_string_as_stream(state_t0, base_uri=base_uri):
print statement.subject
repository.delete_triples_about(statement.subject, context=provider)
repository.import_from_string(state_t0, context=provider, fmt=infile_fmt)
assert repository.size() == 2
# State t1
for statement in parser.parse_string_as_stream(state_t1, base_uri=base_uri):
print statement.subject
repository.delete_triples_about(statement.subject, context=provider)
repository.import_from_string(state_t1, context=provider, fmt=infile_fmt)
assert repository.size() == 4
def test_provide_can_change_knowledge_about_a_previous_resource(repository):
"""This test tests the case where a provider wishes to change the knowledge
about a resource. They do this by making an update datadump with at least
one statement about that resource.
"""
# Setup
repository.clear()
provider = 'test'
infile_fmt = 'turtle'
base_uri = 'http://example.org/test/'
parser = RDF.TurtleParser()
state_t0 = """
@prefix void: <http://rdfs.org/ns/void#> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix test: <http://example.org/test/> .
test:A a test:Thing ;
test:someProperty 'some property' ;
test:anotherProperty 'just another thing' .
"""
state_t1 = """@prefix void: <http://rdfs.org/ns/void#> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix test: <http://example.org/test/> .
test:A a test:Thing ;
test:someProperty 'some other property' .
"""
# State t0
for statement in parser.parse_string_as_stream(state_t0, base_uri=base_uri):
repository.delete_triples_about(statement.subject, context=provider)
repository.import_from_string(state_t0, context=provider, fmt=infile_fmt)
assert repository.size() == 3
# State t1
for statement in parser.parse_string_as_stream(state_t1, base_uri=base_uri):
repository.delete_triples_about(statement.subject, context=provider)
assert repository.size() == 0
repository.import_from_string(state_t1, context=provider, fmt=infile_fmt)
assert repository.size() == 2
def test_can_handle_multiple_duplicate_updates(repository):
"""This tests the case where a provider's datadump is updated but contains
the same information as the datadump at a previous time. We'd assume the
result would be that all statements would be first removed then just added
again so the size would go from N to 0 back to N.
"""
# Setup
repository.clear()
provider = 'test'
infile_fmt = 'turtle'
base_uri = 'http://example.org/test/'
parser = RDF.TurtleParser()
state_t0 = """
@prefix void: <http://rdfs.org/ns/void#> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix test: <http://example.org/test/> .
test:A a test:Thing ;
test:someProperty 'some property' ;
test:anotherProperty 'just another thing' .
"""
state_t1 = """
@prefix void: <http://rdfs.org/ns/void#> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix test: <http://example.org/test/> .
test:A a test:Thing ;
test:someProperty 'some property' ;
test:anotherProperty 'just another thing' .
"""
# State t0
for statement in parser.parse_string_as_stream(state_t0, base_uri=base_uri):
repository.delete_triples_about(statement.subject, context=provider)
repository.import_from_string(state_t0, context=provider, fmt=infile_fmt)
assert repository.size() == 3
# State t1
for statement in parser.parse_string_as_stream(state_t1, base_uri=base_uri):
repository.delete_triples_about(statement.subject, context=provider)
assert repository.size() == 0
repository.import_from_string(state_t1, context=provider, fmt=infile_fmt)
assert repository.size() == 3
def test_can_handle_multiple_providers(repository):
"""This test tests the case where there are two registered providers. Each
provider should have triples in their respective named graph.
"""
# Setup
repository.clear()
infile_fmt = 'turtle'
base_uri = 'http://example.org/test/'
parser = RDF.TurtleParser()
state_t0 = """
@prefix void: <http://rdfs.org/ns/void#> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix test: <http://example.org/test/> .
test:A a test:Thing ;
test:someProperty 'some property' ;
test:anotherProperty 'just another thing' .
"""
state_t1 = """
@prefix void: <http://rdfs.org/ns/void#> .
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
@prefix test: <http://example.org/test/> .
test:A a test:Thing ;
test:someProperty 'some property' ;
test:anotherProperty 'just another thing' .
"""
# State t0
provider = 'providerA'
for statement in parser.parse_string_as_stream(state_t0, base_uri=base_uri):
repository.delete_triples_about(statement.subject, context=provider)
repository.import_from_string(state_t0, context=provider, fmt=infile_fmt)
assert repository.size() == 3
# State t1
provider = 'providerB'
for statement in parser.parse_string_as_stream(state_t1, base_uri=base_uri):
repository.delete_triples_about(statement.subject, context=provider)
assert repository.size() == 3
repository.import_from_string(state_t1, context=provider, fmt=infile_fmt)
assert repository.size() == 6
| apache-2.0 | -1,854,745,154,394,514,700 | 30.671296 | 80 | 0.66423 | false |
Duoxilian/home-assistant | homeassistant/helpers/event_decorators.py | 7 | 2746 | """Event Decorators for custom components."""
import functools
import logging
# pylint: disable=unused-import
from typing import Optional # NOQA
from homeassistant.core import HomeAssistant # NOQA
from homeassistant.helpers import event
HASS = None # type: Optional[HomeAssistant]
_LOGGER = logging.getLogger(__name__)
_MSG = 'Event decorators are deprecated. Support will be removed in 0.40.'
def track_state_change(entity_ids, from_state=None, to_state=None):
"""Decorator factory to track state changes for entity id."""
_LOGGER.warning(_MSG)
def track_state_change_decorator(action):
"""Decorator to track state changes."""
event.track_state_change(HASS, entity_ids,
functools.partial(action, HASS),
from_state, to_state)
return action
return track_state_change_decorator
def track_sunrise(offset=None):
"""Decorator factory to track sunrise events."""
_LOGGER.warning(_MSG)
def track_sunrise_decorator(action):
"""Decorator to track sunrise events."""
event.track_sunrise(HASS,
functools.partial(action, HASS),
offset)
return action
return track_sunrise_decorator
def track_sunset(offset=None):
"""Decorator factory to track sunset events."""
_LOGGER.warning(_MSG)
def track_sunset_decorator(action):
"""Decorator to track sunset events."""
event.track_sunset(HASS,
functools.partial(action, HASS),
offset)
return action
return track_sunset_decorator
def track_time_change(year=None, month=None, day=None, hour=None, minute=None,
second=None):
"""Decorator factory to track time changes."""
_LOGGER.warning(_MSG)
def track_time_change_decorator(action):
"""Decorator to track time changes."""
event.track_time_change(HASS,
functools.partial(action, HASS),
year, month, day, hour, minute, second)
return action
return track_time_change_decorator
def track_utc_time_change(year=None, month=None, day=None, hour=None,
minute=None, second=None):
"""Decorator factory to track time changes."""
_LOGGER.warning(_MSG)
def track_utc_time_change_decorator(action):
"""Decorator to track time changes."""
event.track_utc_time_change(HASS,
functools.partial(action, HASS),
year, month, day, hour, minute, second)
return action
return track_utc_time_change_decorator
| mit | -2,787,413,112,130,598,000 | 31.305882 | 78 | 0.606701 | false |
imply/chuu | build/android/pylib/constants.py | 23 | 3998 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Defines a set of constants shared by test runners and other scripts."""
import os
import subprocess
import sys
DIR_SOURCE_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.pardir, os.pardir, os.pardir))
ISOLATE_DEPS_DIR = os.path.join(DIR_SOURCE_ROOT, 'isolate_deps_dir')
EMULATOR_SDK_ROOT = os.path.abspath(os.path.join(DIR_SOURCE_ROOT, os.pardir,
os.pardir))
CHROME_PACKAGE = 'com.google.android.apps.chrome'
CHROME_ACTIVITY = 'com.google.android.apps.chrome.Main'
CHROME_DEVTOOLS_SOCKET = 'chrome_devtools_remote'
CHROME_TESTS_PACKAGE = 'com.google.android.apps.chrome.tests'
LEGACY_BROWSER_PACKAGE = 'com.google.android.browser'
LEGACY_BROWSER_ACTIVITY = 'com.android.browser.BrowserActivity'
CONTENT_SHELL_PACKAGE = 'org.chromium.content_shell_apk'
CONTENT_SHELL_ACTIVITY = 'org.chromium.content_shell_apk.ContentShellActivity'
CHROME_SHELL_PACKAGE = 'org.chromium.chrome.browser.test'
CHROMIUM_TEST_SHELL_PACKAGE = 'org.chromium.chrome.testshell'
CHROMIUM_TEST_SHELL_ACTIVITY = (
'org.chromium.chrome.testshell.ChromiumTestShellActivity')
CHROMIUM_TEST_SHELL_DEVTOOLS_SOCKET = 'chromium_testshell_devtools_remote'
CHROMIUM_TEST_SHELL_HOST_DRIVEN_DIR = os.path.join(
DIR_SOURCE_ROOT, 'chrome', 'android')
GTEST_TEST_PACKAGE_NAME = 'org.chromium.native_test'
GTEST_TEST_ACTIVITY_NAME = 'org.chromium.native_test.ChromeNativeTestActivity'
GTEST_COMMAND_LINE_FILE = 'chrome-native-tests-command-line'
BROWSERTEST_TEST_PACKAGE_NAME = 'org.chromium.content_browsertests_apk'
BROWSERTEST_TEST_ACTIVITY_NAME = (
'org.chromium.content_browsertests_apk.ContentBrowserTestsActivity')
BROWSERTEST_COMMAND_LINE_FILE = 'content-browser-tests-command-line'
# Ports arrangement for various test servers used in Chrome for Android.
# Lighttpd server will attempt to use 9000 as default port, if unavailable it
# will find a free port from 8001 - 8999.
LIGHTTPD_DEFAULT_PORT = 9000
LIGHTTPD_RANDOM_PORT_FIRST = 8001
LIGHTTPD_RANDOM_PORT_LAST = 8999
TEST_SYNC_SERVER_PORT = 9031
# The net test server is started from port 10201.
# TODO(pliard): http://crbug.com/239014. Remove this dirty workaround once
# http://crbug.com/239014 is fixed properly.
TEST_SERVER_PORT_FIRST = 10201
TEST_SERVER_PORT_LAST = 30000
# A file to record next valid port of test server.
TEST_SERVER_PORT_FILE = '/tmp/test_server_port'
TEST_SERVER_PORT_LOCKFILE = '/tmp/test_server_port.lock'
TEST_EXECUTABLE_DIR = '/data/local/tmp'
# Directories for common java libraries for SDK build.
# These constants are defined in build/android/ant/common.xml
SDK_BUILD_JAVALIB_DIR = 'lib.java'
SDK_BUILD_TEST_JAVALIB_DIR = 'test.lib.java'
SDK_BUILD_APKS_DIR = 'apks'
# The directory on the device where perf test output gets saved to.
DEVICE_PERF_OUTPUT_DIR = '/data/data/' + CHROME_PACKAGE + '/files'
SCREENSHOTS_DIR = os.path.join(DIR_SOURCE_ROOT, 'out_screenshots')
ANDROID_SDK_VERSION = 18
ANDROID_SDK_ROOT = os.path.join(DIR_SOURCE_ROOT,
'third_party/android_tools/sdk')
ANDROID_NDK_ROOT = os.path.join(DIR_SOURCE_ROOT,
'third_party/android_tools/ndk')
UPSTREAM_FLAKINESS_SERVER = 'test-results.appspot.com'
def _GetADBPath():
if os.environ.get('ANDROID_SDK_ROOT'):
return 'adb'
# If envsetup.sh hasn't been sourced and there's no adb in the path,
# set it here.
try:
with file(os.devnull, 'w') as devnull:
subprocess.call(['adb', 'version'], stdout=devnull, stderr=devnull)
return 'adb'
except OSError:
print >> sys.stderr, 'No adb found in $PATH, fallback to checked in binary.'
return os.path.join(ANDROID_SDK_ROOT, 'platform-tools', 'adb')
ADB_PATH = _GetADBPath()
# Exit codes
ERROR_EXIT_CODE = 1
WARNING_EXIT_CODE = 88
| bsd-3-clause | 23,727,958,607,356,470 | 37.442308 | 80 | 0.723112 | false |
Laurawly/tvm-1 | python/tvm/te/hybrid/preprocessor.py | 4 | 4750 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Determines the declaration, r/w status, and last use of each variable"""
import ast
import sys
from .runtime import HYBRID_GLOBALS
from .utils import _internal_assert
class PyVariableUsage(ast.NodeVisitor):
"""The vistor class to determine the declaration, r/w status, and last use of each variable"""
# pylint: disable=invalid-name
# pylint: disable=missing-docstring
def __init__(self, args, symbols, closure_vars):
self.status = {}
self.scope_level = []
self._args = {}
self.args = args
self.aug_assign_ = False
self.symbols = symbols
self.closure_vars = closure_vars
def visit_FunctionDef(self, node):
self.scope_level.append(node)
_internal_assert(
len(node.args.args) == len(self.args),
"#arguments passed should be the same as #arguments defined",
)
for idx, arg in enumerate(node.args.args):
_attr = "id" if sys.version_info[0] < 3 else "arg" # To make py2 and 3 compatible
self._args[getattr(arg, _attr)] = self.args[idx]
for i in node.body:
self.visit(i)
def visit_For(self, node):
_internal_assert(isinstance(node.target, ast.Name), "For's iterator should be an id")
self.visit(node.iter)
self.scope_level.append(node)
for i in node.body:
self.visit(i)
self.scope_level.pop()
def visit_Call(self, node):
# No function pointer supported so far
_internal_assert(isinstance(node.func, ast.Name), "Function call should be an id")
func_id = node.func.id
_internal_assert(
func_id
in list(HYBRID_GLOBALS.keys())
+ ["range", "max", "min", "len"]
+ list(self.symbols.keys()),
"Function call id " + func_id + " not in intrinsics' list",
)
for elem in node.args:
self.visit(elem)
def visit_AugAssign(self, node):
self.aug_assign_ = True
self.generic_visit(node)
self.aug_assign_ = False
def visit_Name(self, node):
# If it is True or False, we do not worry about it!
if sys.version_info[0] == 2 and node.id in ["True", "False"]:
return
# If it is from the argument list or loop variable, we do not worry about it!
if node.id in self._args.keys():
return
fors = [loop.target.id for loop in self.scope_level if isinstance(loop, ast.For)]
if node.id in fors:
return
# The loop variable cannot be overwritten when iteration
_internal_assert(
not isinstance(node.ctx, ast.Store) or node.id not in fors,
"Iter var cannot be overwritten",
)
if node.id not in self.status.keys():
# It is a captured value in closure
if node.id in self.closure_vars:
try:
ast.literal_eval(str(self.closure_vars[node.id]))
except ValueError:
raise ValueError("Only support capturing constant values in closure")
return
_internal_assert(isinstance(node.ctx, ast.Store), "Undeclared variable %s" % node.id)
if self.aug_assign_:
raise ValueError('"First store" cannot be an AugAssign')
self.status[node.id] = (node, self.scope_level[-1], set())
else:
decl, loop, usage = self.status[node.id]
usage.add(type(node.ctx))
_internal_assert(
loop in self.scope_level, "%s is used out of the scope it is defined!" % node.id
)
self.status[node.id] = (decl, loop, usage)
def determine_variable_usage(root, args, symbols, closure_vars):
"""The helper function for calling the dedicated visitor."""
visitor = PyVariableUsage(args, symbols, closure_vars)
visitor.visit(root)
return visitor.status
| apache-2.0 | 9,144,004,332,674,614,000 | 38.583333 | 98 | 0.616421 | false |
Klafyvel/ISN_2015 | nom molecule.py | 1 | 5422 | # Nomenclature
# Copyright (C) 2015 BOUVIN Valentin, HONNORATY Vincent, LEVY-FALK Hugo
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Créé par honnoraty, le 26/01/2015 en Python 3.2
from molecule import *
sourcechaine = dict()
sourcechaine["methan"] = 1,4
sourcechaine["ethan"] = 2,6
sourcechaine["propan"] = 3,8
sourcechaine["butan"] = 4,10
sourcechaine["pentan"] = 5,12
sourcechaine["hexan"] = 6,14
sourcechaine["heptan"] = 7,16
sourcechaine["octan"] = 8,18
sourcechaine["nonan"] = 9,20
sourcechaine["decan"] = 10,22
sourcechaine["undecan"] = 11,24
sourcechaine["dodecan"] = 12,26
sourcechaine["tridecan"] = 13,28
sourcechaine["methyl"] = 1,3
sourcechaine["ethyl"] = 2,5
sourcechaine["dimethyl"] = 2,5
sourcechaine["diethyl"] = 4,9
sourcechaine["trimethyl"] = 3,7
sourcechaine["triethyl"] = 6,12
chainegenerique = ["methan","ethan","propan","butan","hexan","octan","nonan","decan","undecan","dodecan","tridecan"]
branche = ["methyl","ethyl","dimethyl","diethyl","trimethyl","triethyl"]
nomentre = input("- entre les parties du nom")
nomentre = nomentre.split("-")
print(nomentre)
nbchainegen = 0
listbranche = [""]
ChAtome = [""]
positionbranche = [""]
nomMole = ""
for n, elt in enumerate(chainegenerique):
for i, elt in enumerate(nomentre):
if nomentre[i] == chainegenerique[n]:
nbchainegen = n
nbbranche = 0
n = 0
i = 0
lasti = 0
z = 0
y = 0
positionasign = 1
position = 0
hydroSurC = 0
decahydro = 0
decacarbo = 0
decachaine = 0
for n, elt in enumerate(branche):
for i, elt in enumerate(nomentre):
if nomentre[i] == branche[n]:
listbranche[nbbranche] = branche[n]
nbbranche += 1
listbranche.append(1)
while nomentre[i-positionasign].isdecimal():
positionbranche[position] = int(nomentre[i - positionasign])
positionbranche.append(1)
positionasign += 1
position += 1
positionasign = 0
(carb,hydro) = sourcechaine[chainegenerique[nbchainegen]]
for n in range(nbbranche):
carb, hydro = carb + sourcechaine[listbranche[n]][0], hydro + sourcechaine[listbranche[n]][1] - nbbranche
nomMole = "C" + str(carb) + "H" + str(hydro)
print(nomMole)
for n in range(position):
print(positionbranche[n])
for n in range(carb): #Génération des liste d'atome
ChAtome.append(1)
ChAtome[n] = CARBONE()
for n in range(hydro):
ChAtome.append(1)
ChAtome[n + carb] = HYDROGENE()
carbChaineg = int(sourcechaine[chainegenerique[nbchainegen]][0])
for n in range(carbChaineg - 1): #Génération de la chaine principale
ChAtome[n].link(ChAtome[n + 1])
#decacarbo = carbChaineg
print("décalage:" ,carbChaineg)
lasti = 0
for n in range(nbbranche): #Ajout des branches
ChAtome[positionbranche[n] - 1].link(ChAtome[carbChaineg + n])
for i in range(sourcechaine[listbranche[n]][1] + sourcechaine[listbranche[n]][0] - 1):
print("Posi hydro: ",carbChaineg + decachaine + decacarbo)
print("chaine",*ChAtome)
decacarbo += 1
ChAtome[carbChaineg + n + decachaine].link(ChAtome[carbChaineg + decachaine + decacarbo])
print(sourcechaine[listbranche[n]][1] + sourcechaine[listbranche[n]][0])
if ((lasti + 2 == i) and (decachaine == 0 ) or (lasti + 3 == i)):
decachaine += 1
lasti = i
if(i == 2):
decacarbo -= 1
if(i == 5 and (listbranche[n] == "trimethyl")):
decacarbo -= 1
#2-methyl-butan-e
#2-ethyl-butan-e
#2-trimethyl-butan
hydroChaineg = int(sourcechaine[chainegenerique[nbchainegen]][1])
print("yolo")
print(hydroChaineg)
print(len(ChAtome))
print(carbChaineg)
for n in range(carbChaineg):
hydroSurC = 4
for z in range(position):
try:
if(n == positionbranche[n]):
hydroSurC -= 1
except IndexError:
pass
if(((n == 0) or (n == carbChaineg - 1)) and (chainegenerique[nbchainegen] != "methan")):
hydroSurC -= 1
elif (chainegenerique[nbchainegen] != "methan"):
hydroSurC -= 2
print("Hydro sur Carb")
print(hydroSurC)
print(*ChAtome)
print("valeur de:",n)
for y in range(hydroSurC):
print("crab",n)
print(decacarbo)
print("hydro",decahydro + carbChaineg + decacarbo + decachaine)
ChAtome[n].link(ChAtome[n + decahydro + carbChaineg + decacarbo + decachaine])
print("liée")
print(*ChAtome)
decahydro += 1
decahydro -= 1
#molecule=Molecule(*ChAtome)
print(*ChAtome)
#print(molecule.add_atome)
#print(molecule)
#2-methyl-butan-e
| gpl-3.0 | 5,759,920,663,039,216,000 | 25.753846 | 116 | 0.624908 | false |
seann1/portfolio5 | .meteor/dev_bundle/python/Lib/lib-tk/Dialog.py | 187 | 1567 | # dialog.py -- Tkinter interface to the tk_dialog script.
from Tkinter import *
from Tkinter import _cnfmerge
if TkVersion <= 3.6:
DIALOG_ICON = 'warning'
else:
DIALOG_ICON = 'questhead'
class Dialog(Widget):
def __init__(self, master=None, cnf={}, **kw):
cnf = _cnfmerge((cnf, kw))
self.widgetName = '__dialog__'
Widget._setup(self, master, cnf)
self.num = self.tk.getint(
self.tk.call(
'tk_dialog', self._w,
cnf['title'], cnf['text'],
cnf['bitmap'], cnf['default'],
*cnf['strings']))
try: Widget.destroy(self)
except TclError: pass
def destroy(self): pass
def _test():
d = Dialog(None, {'title': 'File Modified',
'text':
'File "Python.h" has been modified'
' since the last time it was saved.'
' Do you want to save it before'
' exiting the application.',
'bitmap': DIALOG_ICON,
'default': 0,
'strings': ('Save File',
'Discard Changes',
'Return to Editor')})
print d.num
if __name__ == '__main__':
t = Button(None, {'text': 'Test',
'command': _test,
Pack: {}})
q = Button(None, {'text': 'Quit',
'command': t.quit,
Pack: {}})
t.mainloop()
| gpl-2.0 | -4,216,742,266,629,097,500 | 30.979592 | 58 | 0.435227 | false |
prathik/thrift | lib/py/setup.py | 46 | 3406 | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import sys
try:
from setuptools import setup, Extension
except:
from distutils.core import setup, Extension, Command
from distutils.command.build_ext import build_ext
from distutils.errors import CCompilerError, DistutilsExecError, DistutilsPlatformError
# Fix to build sdist under vagrant
import os
if 'vagrant' in str(os.environ):
del os.link
include_dirs = []
if sys.platform == 'win32':
include_dirs.append('compat/win32')
ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError, IOError)
else:
ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError)
class BuildFailed(Exception):
pass
class ve_build_ext(build_ext):
def run(self):
try:
build_ext.run(self)
except DistutilsPlatformError as x:
raise BuildFailed()
def build_extension(self, ext):
try:
build_ext.build_extension(self, ext)
except ext_errors as x:
raise BuildFailed()
def run_setup(with_binary):
if with_binary:
extensions = dict(
ext_modules = [
Extension('thrift.protocol.fastbinary',
sources = ['src/protocol/fastbinary.c'],
include_dirs = include_dirs,
)
],
cmdclass=dict(build_ext=ve_build_ext)
)
else:
extensions = dict()
setup(name = 'thrift',
version = '1.0.0-dev',
description = 'Python bindings for the Apache Thrift RPC system',
author = 'Thrift Developers',
author_email = '[email protected]',
url = 'http://thrift.apache.org',
license = 'Apache License 2.0',
packages = [
'thrift',
'thrift.protocol',
'thrift.transport',
'thrift.server',
],
package_dir = {'thrift' : 'src'},
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Topic :: Software Development :: Libraries',
'Topic :: System :: Networking'
],
use_2to3 = True,
**extensions
)
try:
run_setup(True)
except BuildFailed:
print()
print('*' * 80)
print("An error occurred while trying to compile with the C extension enabled")
print("Attempting to build without the extension now")
print('*' * 80)
print()
run_setup(False)
| apache-2.0 | -5,553,007,174,621,648,000 | 29.963636 | 87 | 0.630358 | false |
jonashagstedt/swampdragon | chat_example/chat_example/settings.py | 13 | 2576 | import os
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '))%*y#of%4cnju5=$-1sab!ks(lq=60^rc3oyt-!69c19wl&r_'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'chat_example.chat',
'swampdragon',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'chat_example.urls'
WSGI_APPLICATION = 'chat_example.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dev.sqlite3',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-gb'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static_root')
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static')
]
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
)
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
TEMPLATE_DIRS = [
os.path.join(BASE_DIR, 'templates')
]
SWAMP_DRAGON_CONNECTION = ('chat_example.sockserver.DataConnection', '/data')
DRAGON_URL = 'http://localhost:9999/'
| bsd-3-clause | 4,166,193,999,530,727,000 | 24.76 | 77 | 0.720885 | false |
Shao-Feng/crosswalk-test-suite | webapi/tct-csp-w3c-tests/csp-py/csp_object-src_cross-origin_multi_blocked_int-manual.py | 30 | 2479 | def main(request, response):
import simplejson as json
f = file('config.json')
source = f.read()
s = json.JSONDecoder().decode(source)
url1 = "http://" + s['host'] + ":" + str(s['ports']['http'][1])
url2 = "http://" + s['host'] + ":" + str(s['ports']['http'][0])
_CSP = "object-src " + url2 + " https://tizen.org"
response.headers.set("Content-Security-Policy", _CSP)
response.headers.set("X-Content-Security-Policy", _CSP)
response.headers.set("X-WebKit-CSP", _CSP)
return """<!DOCTYPE html>
<!--
Copyright (c) 2013 Intel Corporation.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of works must retain the original copyright notice, this list
of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the original copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of Intel Corporation nor the names of its contributors
may be used to endorse or promote products derived from this work without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Authors:
Hao, Yunfei <[email protected]>
-->
<html>
<head>
<title>CSP Test: csp_object-src_cross-origin_multi_blocked_int</title>
<link rel="author" title="Intel" href="http://www.intel.com"/>
<link rel="help" href="http://www.w3.org/TR/2012/CR-CSP-20121115/#object-src"/>
<meta name="flags" content=""/>
<meta charset="utf-8"/>
</head>
<body>
<p>Test passes if there is <strong>no red</strong>.</p>
<object data="support/red-100x100.png"/>
</body>
</html> """
| bsd-3-clause | -5,222,405,871,400,813,000 | 43.267857 | 83 | 0.720048 | false |
erudit/eruditorg | eruditorg/erudit/migrations/0089_thesisprovider.py | 1 | 1911 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-04-24 15:15
from __future__ import unicode_literals
from django.db import migrations, models
def populate_thesis_providers(apps, schema_editor):
ThesisProvider = apps.get_model("erudit", "ThesisProvider")
Collection = apps.get_model("erudit", "Collection")
Thesis = apps.get_model("erudit", "Thesis")
collection_ids = Thesis.objects.values_list("collection_id", flat=True)
collections = Collection.objects.filter(id__in=collection_ids)
for collection in collections.all():
tp = ThesisProvider.objects.create(
code=collection.code,
name=collection.name,
solr_name=collection.name,
logo=collection.logo,
)
class Migration(migrations.Migration):
dependencies = [
("erudit", "0088_remove_article_copyrights"),
]
operations = [
migrations.CreateModel(
name="ThesisProvider",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("code", models.CharField(max_length=10, unique=True, verbose_name="Code")),
("name", models.CharField(max_length=200, verbose_name="Nom")),
(
"solr_name",
models.CharField(db_index=True, max_length=200, verbose_name="Nom dans Solr"),
),
("logo", models.ImageField(blank=True, verbose_name="Logo")),
],
options={
"verbose_name_plural": "Éditeurs de thèses",
"verbose_name": "Éditeur de thèses",
},
),
migrations.RunPython(populate_thesis_providers, reverse_code=migrations.RunPython.noop),
]
| gpl-3.0 | -5,948,758,977,435,991,000 | 34.981132 | 98 | 0.559518 | false |
117111302/PyGithub | github/tests/GitBlob.py | 39 | 2801 | # -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <[email protected]> #
# Copyright 2012 Zearin <[email protected]> #
# Copyright 2013 Vincent Jacques <[email protected]> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import Framework
class GitBlob(Framework.TestCase):
def setUp(self):
Framework.TestCase.setUp(self)
self.blob = self.g.get_user().get_repo("PyGithub").get_git_blob("53bce9fa919b4544e67275089b3ec5b44be20667")
def testAttributes(self):
self.assertTrue(self.blob.content.startswith("IyEvdXNyL2Jpbi9lbnYgcHl0aG9uCgpmcm9tIGRpc3R1dGlscy5jb3JlIGlt\ncG9ydCBzZXR1cAppbXBvcnQgdGV4dHdyYXAKCnNldHVwKAogICAgbmFtZSA9\n"))
self.assertTrue(self.blob.content.endswith("Z3JhbW1pbmcgTGFuZ3VhZ2UgOjogUHl0aG9uIiwKICAgICAgICAiVG9waWMg\nOjogU29mdHdhcmUgRGV2ZWxvcG1lbnQiLAogICAgXSwKKQo=\n"))
self.assertEqual(len(self.blob.content), 1757)
self.assertEqual(self.blob.encoding, "base64")
self.assertEqual(self.blob.size, 1295)
self.assertEqual(self.blob.sha, "53bce9fa919b4544e67275089b3ec5b44be20667")
self.assertEqual(self.blob.url, "https://api.github.com/repos/jacquev6/PyGithub/git/blobs/53bce9fa919b4544e67275089b3ec5b44be20667")
| gpl-3.0 | -3,403,871,653,206,204,000 | 67.317073 | 181 | 0.531953 | false |
tbeadle/django | tests/forms_tests/widget_tests/test_checkboxselectmultiple.py | 13 | 4796 | from django.forms import CheckboxSelectMultiple
from .base import WidgetTest
class CheckboxSelectMultipleTest(WidgetTest):
widget = CheckboxSelectMultiple
def test_render_value(self):
self.check_html(self.widget(choices=self.beatles), 'beatles', ['J'], html=(
"""<ul>
<li><label><input checked="checked" type="checkbox" name="beatles" value="J" /> John</label></li>
<li><label><input type="checkbox" name="beatles" value="P" /> Paul</label></li>
<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
</ul>"""
))
def test_render_value_multiple(self):
self.check_html(self.widget(choices=self.beatles), 'beatles', ['J', 'P'], html=(
"""<ul>
<li><label><input checked="checked" type="checkbox" name="beatles" value="J" /> John</label></li>
<li><label><input checked="checked" type="checkbox" name="beatles" value="P" /> Paul</label></li>
<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
</ul>"""
))
def test_render_none(self):
"""
If the value is None, none of the options are selected.
"""
self.check_html(self.widget(choices=self.beatles), 'beatles', None, html=(
"""<ul>
<li><label><input type="checkbox" name="beatles" value="J" /> John</label></li>
<li><label><input type="checkbox" name="beatles" value="P" /> Paul</label></li>
<li><label><input type="checkbox" name="beatles" value="G" /> George</label></li>
<li><label><input type="checkbox" name="beatles" value="R" /> Ringo</label></li>
</ul>"""
))
def test_nested_choices(self):
nested_choices = (
('unknown', 'Unknown'),
('Audio', (('vinyl', 'Vinyl'), ('cd', 'CD'))),
('Video', (('vhs', 'VHS'), ('dvd', 'DVD'))),
)
html = """
<ul id="media">
<li>
<label for="media_0"><input id="media_0" name="nestchoice" type="checkbox" value="unknown" /> Unknown</label>
</li>
<li>Audio<ul id="media_1">
<li>
<label for="media_1_0">
<input checked="checked" id="media_1_0" name="nestchoice" type="checkbox" value="vinyl" /> Vinyl
</label>
</li>
<li>
<label for="media_1_1"><input id="media_1_1" name="nestchoice" type="checkbox" value="cd" /> CD</label>
</li>
</ul></li>
<li>Video<ul id="media_2">
<li>
<label for="media_2_0"><input id="media_2_0" name="nestchoice" type="checkbox" value="vhs" /> VHS</label>
</li>
<li>
<label for="media_2_1">
<input checked="checked" id="media_2_1" name="nestchoice" type="checkbox" value="dvd" /> DVD
</label>
</li>
</ul></li>
</ul>
"""
self.check_html(
self.widget(choices=nested_choices), 'nestchoice', ('vinyl', 'dvd'),
attrs={'id': 'media'}, html=html,
)
def test_separate_ids(self):
"""
Each input gets a separate ID.
"""
choices = [('a', 'A'), ('b', 'B'), ('c', 'C')]
html = """
<ul id="abc">
<li>
<label for="abc_0"><input checked="checked" type="checkbox" name="letters" value="a" id="abc_0" /> A</label>
</li>
<li><label for="abc_1"><input type="checkbox" name="letters" value="b" id="abc_1" /> B</label></li>
<li>
<label for="abc_2"><input checked="checked" type="checkbox" name="letters" value="c" id="abc_2" /> C</label>
</li>
</ul>
"""
self.check_html(self.widget(choices=choices), 'letters', ['a', 'c'], attrs={'id': 'abc'}, html=html)
def test_separate_ids_constructor(self):
"""
Each input gets a separate ID when the ID is passed to the constructor.
"""
widget = CheckboxSelectMultiple(attrs={'id': 'abc'}, choices=[('a', 'A'), ('b', 'B'), ('c', 'C')])
html = """
<ul id="abc">
<li>
<label for="abc_0"><input checked="checked" type="checkbox" name="letters" value="a" id="abc_0" /> A</label>
</li>
<li><label for="abc_1"><input type="checkbox" name="letters" value="b" id="abc_1" /> B</label></li>
<li>
<label for="abc_2"><input checked="checked" type="checkbox" name="letters" value="c" id="abc_2" /> C</label>
</li>
</ul>
"""
self.check_html(widget, 'letters', ['a', 'c'], html=html)
| bsd-3-clause | -3,166,111,628,266,635,000 | 41.070175 | 117 | 0.5196 | false |
itsjeyd/edx-platform | openedx/core/djangoapps/api_admin/decorators.py | 27 | 1127 | """Decorators for API access management."""
from functools import wraps
from django.core.urlresolvers import reverse
from django.http import HttpResponseNotFound
from django.shortcuts import redirect
from openedx.core.djangoapps.api_admin.models import ApiAccessRequest, ApiAccessConfig
def api_access_enabled_or_404(view_func):
"""If API access management feature is not enabled, return a 404."""
@wraps(view_func)
def wrapped_view(view_obj, *args, **kwargs):
"""Wrapper for the view function."""
if ApiAccessConfig.current().enabled:
return view_func(view_obj, *args, **kwargs)
return HttpResponseNotFound()
return wrapped_view
def require_api_access(view_func):
"""If the requesting user does not have API access, bounce them to the request form."""
@wraps(view_func)
def wrapped_view(view_obj, *args, **kwargs):
"""Wrapper for the view function."""
if ApiAccessRequest.has_api_access(args[0].user):
return view_func(view_obj, *args, **kwargs)
return redirect(reverse('api_admin:api-request'))
return wrapped_view
| agpl-3.0 | -6,156,260,784,306,147,000 | 36.566667 | 91 | 0.698314 | false |
eduNEXT/edunext-platform | openedx/core/lib/gating/tests/test_api.py | 3 | 17438 | """
Tests for the gating API
"""
import unittest
import six
from completion.models import BlockCompletion
from ddt import data, ddt, unpack
from django.conf import settings
from milestones import api as milestones_api
from milestones.tests.utils import MilestonesTestCaseMixin
from mock import Mock, patch
from lms.djangoapps.gating import api as lms_gating_api
from lms.djangoapps.grades.constants import GradeOverrideFeatureEnum
from lms.djangoapps.grades.models import PersistentSubsectionGrade, PersistentSubsectionGradeOverride
from lms.djangoapps.grades.tests.base import GradeTestBase
from lms.djangoapps.grades.tests.utils import mock_get_score
from openedx.core.lib.gating import api as gating_api
from openedx.core.lib.gating.exceptions import GatingValidationError
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.django_utils import TEST_DATA_SPLIT_MODULESTORE, ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
@ddt
class TestGatingApi(ModuleStoreTestCase, MilestonesTestCaseMixin):
"""
Tests for the gating API
"""
MODULESTORE = TEST_DATA_SPLIT_MODULESTORE
def setUp(self):
"""
Initial data setup
"""
super(TestGatingApi, self).setUp()
# create course
self.course = CourseFactory.create(
org='edX',
number='EDX101',
run='EDX101_RUN1',
display_name='edX 101'
)
self.course.enable_subsection_gating = True
self.course.save()
# create chapter
self.chapter1 = ItemFactory.create(
parent_location=self.course.location,
category='chapter',
display_name='untitled chapter 1'
)
# create sequentials
self.seq1 = ItemFactory.create(
parent_location=self.chapter1.location,
category='sequential',
display_name='untitled sequential 1'
)
self.seq2 = ItemFactory.create(
parent_location=self.chapter1.location,
category='sequential',
display_name='untitled sequential 2'
)
# create vertical
self.vertical = ItemFactory.create(
parent_location=self.seq1.location,
category='vertical',
display_name='untitled vertical 1'
)
self.generic_milestone = {
'name': 'Test generic milestone',
'namespace': six.text_type(self.seq1.location),
}
@patch('openedx.core.lib.gating.api.log.warning')
def test_get_prerequisite_milestone_returns_none(self, mock_log):
""" Test test_get_prerequisite_milestone_returns_none """
prereq = gating_api._get_prerequisite_milestone(self.seq1.location) # pylint: disable=protected-access
self.assertIsNone(prereq)
self.assertTrue(mock_log.called)
def test_get_prerequisite_milestone_returns_milestone(self):
""" Test test_get_prerequisite_milestone_returns_milestone """
gating_api.add_prerequisite(self.course.id, self.seq1.location)
prereq = gating_api._get_prerequisite_milestone(self.seq1.location) # pylint: disable=protected-access
self.assertIsNotNone(prereq)
@data('', '0', '50', '100')
def test_validate_min_score_is_valid(self, min_score):
""" Test test_validate_min_score_is_valid """
self.assertIsNone(gating_api._validate_min_score(min_score)) # pylint: disable=protected-access
@data('abc', '-10', '110')
def test_validate_min_score_raises(self, min_score):
""" Test test_validate_min_score_non_integer """
with self.assertRaises(GatingValidationError):
gating_api._validate_min_score(min_score) # pylint: disable=protected-access
def test_find_gating_milestones(self):
""" Test test_find_gating_milestones """
gating_api.add_prerequisite(self.course.id, self.seq1.location)
gating_api.set_required_content(self.course.id, self.seq2.location, self.seq1.location, 100)
milestone = milestones_api.add_milestone(self.generic_milestone)
milestones_api.add_course_content_milestone(self.course.id, self.seq1.location, 'fulfills', milestone)
self.assertEqual(len(gating_api.find_gating_milestones(self.course.id, self.seq1.location, 'fulfills')), 1)
self.assertEqual(len(gating_api.find_gating_milestones(self.course.id, self.seq1.location, 'requires')), 0)
self.assertEqual(len(gating_api.find_gating_milestones(self.course.id, self.seq2.location, 'fulfills')), 0)
self.assertEqual(len(gating_api.find_gating_milestones(self.course.id, self.seq2.location, 'requires')), 1)
def test_get_gating_milestone_not_none(self):
""" Test test_get_gating_milestone_not_none """
gating_api.add_prerequisite(self.course.id, self.seq1.location)
gating_api.set_required_content(self.course.id, self.seq2.location, self.seq1.location, 100)
self.assertIsNotNone(gating_api.get_gating_milestone(self.course.id, self.seq1.location, 'fulfills'))
self.assertIsNotNone(gating_api.get_gating_milestone(self.course.id, self.seq2.location, 'requires'))
def test_get_gating_milestone_is_none(self):
""" Test test_get_gating_milestone_is_none """
gating_api.add_prerequisite(self.course.id, self.seq1.location)
gating_api.set_required_content(self.course.id, self.seq2.location, self.seq1.location, 100)
self.assertIsNone(gating_api.get_gating_milestone(self.course.id, self.seq1.location, 'requires'))
self.assertIsNone(gating_api.get_gating_milestone(self.course.id, self.seq2.location, 'fulfills'))
def test_prerequisites(self):
""" Test test_prerequisites """
gating_api.add_prerequisite(self.course.id, self.seq1.location)
prereqs = gating_api.get_prerequisites(self.course.id)
self.assertEqual(len(prereqs), 1)
self.assertEqual(prereqs[0]['block_display_name'], self.seq1.display_name)
self.assertEqual(prereqs[0]['block_usage_key'], six.text_type(self.seq1.location))
self.assertTrue(gating_api.is_prerequisite(self.course.id, self.seq1.location))
gating_api.remove_prerequisite(self.seq1.location)
self.assertEqual(len(gating_api.get_prerequisites(self.course.id)), 0)
self.assertFalse(gating_api.is_prerequisite(self.course.id, self.seq1.location))
def test_required_content(self):
""" Test test_required_content """
gating_api.add_prerequisite(self.course.id, self.seq1.location)
gating_api.set_required_content(self.course.id, self.seq2.location, self.seq1.location, 100, 100)
prereq_content_key, min_score, min_completion = gating_api.get_required_content(
self.course.id, self.seq2.location
)
self.assertEqual(prereq_content_key, six.text_type(self.seq1.location))
self.assertEqual(min_score, 100)
self.assertEqual(min_completion, 100)
gating_api.set_required_content(self.course.id, self.seq2.location, None, None, None)
prereq_content_key, min_score, min_completion = gating_api.get_required_content(
self.course.id, self.seq2.location
)
self.assertIsNone(prereq_content_key)
self.assertIsNone(min_score)
self.assertIsNone(min_completion)
def test_get_gated_content(self):
"""
Verify staff bypasses gated content and student gets list of unfulfilled prerequisites.
"""
staff = UserFactory(is_staff=True)
student = UserFactory(is_staff=False)
self.assertEqual(gating_api.get_gated_content(self.course, staff), [])
self.assertEqual(gating_api.get_gated_content(self.course, student), [])
gating_api.add_prerequisite(self.course.id, self.seq1.location)
gating_api.set_required_content(self.course.id, self.seq2.location, self.seq1.location, 100)
milestone = milestones_api.get_course_content_milestones(self.course.id, self.seq2.location, 'requires')[0]
self.assertEqual(gating_api.get_gated_content(self.course, staff), [])
self.assertEqual(gating_api.get_gated_content(self.course, student), [six.text_type(self.seq2.location)])
milestones_api.add_user_milestone({'id': student.id}, milestone)
self.assertEqual(gating_api.get_gated_content(self.course, student), [])
@data(
(100, 0, 50, 0, False),
(100, 0, 100, 0, True),
(0, 100, 0, 50, False),
(0, 100, 0, 100, True),
(100, 100, 50, 100, False),
(100, 100, 100, 50, False),
(100, 100, 100, 100, True),
)
@unpack
def test_is_gate_fulfilled(self, min_score, min_completion, learner_score, learner_completion, is_gate_fulfilled):
"""
Test if prereq section has any unfulfilled milestones
"""
student = UserFactory(is_staff=False)
gating_api.add_prerequisite(self.course.id, self.seq1.location)
gating_api.set_required_content(
self.course.id, self.seq2.location, self.seq1.location, min_score, min_completion
)
milestone = milestones_api.add_milestone(self.generic_milestone)
milestones_api.add_course_content_milestone(self.course.id, self.seq1.location, 'fulfills', milestone)
self.assertFalse(gating_api.is_gate_fulfilled(self.course.id, self.seq1.location, student.id))
# complete the prerequisite to unlock the gated content
# this call triggers reevaluation of prerequisites fulfilled by the gating block.
with patch.object(gating_api, 'get_subsection_completion_percentage') as mock_grade:
mock_grade.return_value = learner_completion
lms_gating_api.evaluate_prerequisite(
self.course,
Mock(location=self.seq1.location, percent_graded=learner_score / 100.0),
student,
)
self.assertEqual(
gating_api.is_gate_fulfilled(self.course.id, self.seq1.location, student.id), is_gate_fulfilled
)
@data(
(1, 1, 100),
(0, 0, 0),
(1, 0, 100),
(0, 1, 0),
)
@unpack
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
def test_get_subsection_completion_percentage(self, user_problem_completion, user_html_completion,
expected_completion_percentage):
"""
Test if gating_api.get_subsection_completion_percentage returns expected completion percentage
Note:
html blocks are ignored in computation of completion_percentage,so it should not affect result.
"""
student = UserFactory(is_staff=False)
problem_block = ItemFactory.create(
parent_location=self.vertical.location,
category='problem',
display_name='some problem'
)
html_block = ItemFactory.create(
parent_location=self.vertical.location,
category='html',
display_name='some html block'
)
with patch.object(BlockCompletion, 'get_learning_context_completions') as course_block_completions_mock:
course_block_completions_mock.return_value = {
problem_block.location: user_problem_completion,
html_block.location: user_html_completion,
}
completion_percentage = gating_api.get_subsection_completion_percentage(self.seq1.location, student)
self.assertEqual(completion_percentage, expected_completion_percentage)
@data(
('discussion', None, 100),
('html', None, 100),
('html', 1, 100),
('problem', 1, 100),
('problem', 0, 0),
('openassessment', 1, 100),
('openassessment', 0, 0),
)
@unpack
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
def test_get_subsection_completion_percentage_single_component(
self,
component_type,
completed,
expected_completion_percentage
):
"""
Test if gating_api.get_subsection_completion_percentage returns expected completion percentage
when only a single component in a vertical/unit
Note:
html blocks and discussion blocks are ignored in calculations so should always return
100% complete
"""
student = UserFactory(is_staff=False)
component = ItemFactory.create(
parent_location=self.vertical.location,
category=component_type,
display_name=u'{} block'.format(component_type)
)
with patch.object(BlockCompletion, 'get_learning_context_completions') as course_block_completions_mock:
course_block_completions_mock.return_value = {
component.location: completed,
}
completion_percentage = gating_api.get_subsection_completion_percentage(self.seq1.location, student)
self.assertEqual(completion_percentage, expected_completion_percentage)
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
def test_compute_is_prereq_met(self):
"""
Test if prereq has been met and force recompute
"""
student = UserFactory(is_staff=False)
gating_api.add_prerequisite(self.course.id, self.seq1.location)
gating_api.set_required_content(self.course.id, self.seq2.location, self.seq1.location, 100, 0)
# complete the prerequisite to unlock the gated content
# this call triggers reevaluation of prerequisites fulfilled by the gating block.
with patch.object(gating_api, 'get_subsection_grade_percentage') as mock_grade:
mock_grade.return_value = 75
# don't force recompute
prereq_met, prereq_meta_info = gating_api.compute_is_prereq_met(self.seq2.location, student.id, False)
self.assertFalse(prereq_met)
self.assertIsNone(prereq_meta_info['url'])
self.assertIsNone(prereq_meta_info['display_name'])
# force recompute
prereq_met, prereq_meta_info = gating_api.compute_is_prereq_met(self.seq2.location, student.id, True)
self.assertFalse(prereq_met)
self.assertIsNotNone(prereq_meta_info['url'])
self.assertIsNotNone(prereq_meta_info['display_name'])
# change to passing grade
mock_grade.return_value = 100
# don't force recompute
prereq_met, prereq_meta_info = gating_api.compute_is_prereq_met(self.seq2.location, student.id, False)
self.assertFalse(prereq_met)
self.assertIsNone(prereq_meta_info['url'])
self.assertIsNone(prereq_meta_info['display_name'])
# force recompute
prereq_met, prereq_meta_info = gating_api.compute_is_prereq_met(self.seq2.location, student.id, True)
self.assertTrue(prereq_met)
self.assertIsNotNone(prereq_meta_info['url'])
self.assertIsNotNone(prereq_meta_info['display_name'])
class TestGatingGradesIntegration(GradeTestBase):
"""
Tests the integration between the gating API and our Persistent Grades framework.
"""
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
def test_get_subsection_grade_percentage(self):
user = self.request.user
subsection_key = self.sequence.location
with mock_get_score(3, 3):
# this update() call creates a persistent grade
self.subsection_grade_factory.update(self.sequence)
# it's important that we stay in the mock_get_score() context here,
# since get_subsection_grade_percentage() creates its own SubsectionGradeFactory,
# which will in turn make calls to get_score().
grade_percentage = gating_api.get_subsection_grade_percentage(subsection_key, user)
assert 100.0 == grade_percentage
@unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms')
def test_get_subsection_grade_percentage_with_override(self):
user = self.request.user
subsection_key = self.sequence.location
with mock_get_score(3, 3):
# this update() call creates a persistent grade
self.subsection_grade_factory.update(self.sequence)
# there should only be one persistent grade
persistent_grade = PersistentSubsectionGrade.objects.first()
PersistentSubsectionGradeOverride.update_or_create_override(
UserFactory(), # it doesn't matter to us who created the override
persistent_grade,
earned_graded_override=0,
earned_all_override=0,
possible_graded_override=3,
feature=GradeOverrideFeatureEnum.gradebook,
)
# it's important that we stay in the mock_get_score() context here,
# since get_subsection_grade_percentage() creates its own SubsectionGradeFactory,
# which will in turn make calls to get_score().
grade_percentage = gating_api.get_subsection_grade_percentage(subsection_key, user)
assert 0 == grade_percentage
| agpl-3.0 | 8,644,120,898,333,883,000 | 42.595 | 118 | 0.658046 | false |
autosportlabs/podium-api | podium_api/types/venue.py | 1 | 2122 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
class PodiumVenue(object):
"""
Object that represents a Venue.
**Attributes:**
**venue_id** (int): Venue Id
**uri** (string): URI for the Venue.
**name** (string): The Venue's name.
"""
def __init__(self, venue_id, uri, events_uri, updated, created,
name,
centerpoint,
country_code,
configuration,
track_map_array,
start_finish,
finish,
sector_points,
length
):
self.venue_id = venue_id
self.uri = uri
self.events_uri = events_uri
self.updated = updated
self.created = created
self.name = name
self.centerpoint = centerpoint
self.country_code = country_code
self.configuration = configuration
self.track_map_array = track_map_array
self.start_finish = start_finish
self.finish = finish
self.sector_points = sector_points
self.length = length
def get_venue_from_json(json):
"""
Returns a PodiumVenue object from the json dict received from podium api.
Args:
json (dict): Dict of data from REST api
Return:
PodiumVenue: The PodiumVenue object for the data.
"""
return PodiumVenue(json['id'],
json['URI'],
json['events_uri'],
json['updated'],
json['created'],
json.get('name', None),
json.get('centerpoint', None),
json.get('country_code', None),
json.get('configuration', None),
json.get('track_map_array', None),
json.get('start_finish', None),
json.get('finish', None),
json.get('sector_points', None),
json.get('length', None)
)
| mit | 6,597,366,962,457,001,000 | 30.646154 | 77 | 0.465598 | false |
vegetableman/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/steps/steps_unittest.py | 121 | 5674 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest2 as unittest
from webkitpy.common.system.outputcapture import OutputCapture
from webkitpy.common.config.ports import DeprecatedPort
from webkitpy.tool.mocktool import MockOptions, MockTool
from webkitpy.tool import steps
class StepsTest(unittest.TestCase):
def _step_options(self):
options = MockOptions()
options.non_interactive = True
options.port = 'MOCK port'
options.quiet = True
options.test = True
return options
def _run_step(self, step, tool=None, options=None, state=None):
if not tool:
tool = MockTool()
if not options:
options = self._step_options()
if not state:
state = {}
step(tool, options).run(state)
def test_update_step(self):
tool = MockTool()
options = self._step_options()
options.update = True
expected_logs = "Updating working directory\n"
OutputCapture().assert_outputs(self, self._run_step, [steps.Update, tool, options], expected_logs=expected_logs)
def test_prompt_for_bug_or_title_step(self):
tool = MockTool()
tool.user.prompt = lambda message: 50000
self._run_step(steps.PromptForBugOrTitle, tool=tool)
def _post_diff_options(self):
options = self._step_options()
options.git_commit = None
options.description = None
options.comment = None
options.review = True
options.request_commit = False
options.open_bug = True
return options
def _assert_step_output_with_bug(self, step, bug_id, expected_logs, options=None):
state = {'bug_id': bug_id}
OutputCapture().assert_outputs(self, self._run_step, [step, MockTool(), options, state], expected_logs=expected_logs)
def _assert_post_diff_output_for_bug(self, step, bug_id, expected_logs):
self._assert_step_output_with_bug(step, bug_id, expected_logs, self._post_diff_options())
def test_post_diff(self):
expected_logs = "MOCK add_patch_to_bug: bug_id=78, description=Patch, mark_for_review=True, mark_for_commit_queue=False, mark_for_landing=False\nMOCK: user.open_url: http://example.com/78\n"
self._assert_post_diff_output_for_bug(steps.PostDiff, 78, expected_logs)
def test_post_diff_for_commit(self):
expected_logs = "MOCK add_patch_to_bug: bug_id=78, description=Patch for landing, mark_for_review=False, mark_for_commit_queue=False, mark_for_landing=True\n"
self._assert_post_diff_output_for_bug(steps.PostDiffForCommit, 78, expected_logs)
def test_ensure_bug_is_open_and_assigned(self):
expected_logs = "MOCK reopen_bug 50004 with comment 'Reopening to attach new patch.'\n"
self._assert_step_output_with_bug(steps.EnsureBugIsOpenAndAssigned, 50004, expected_logs)
expected_logs = "MOCK reassign_bug: bug_id=50002, assignee=None\n"
self._assert_step_output_with_bug(steps.EnsureBugIsOpenAndAssigned, 50002, expected_logs)
def test_runtests_args(self):
mock_options = self._step_options()
mock_options.non_interactive = False
step = steps.RunTests(MockTool(log_executive=True), mock_options)
tool = MockTool(log_executive=True)
# FIXME: We shouldn't use a real port-object here, but there is too much to mock at the moment.
tool._deprecated_port = DeprecatedPort()
step = steps.RunTests(tool, mock_options)
expected_logs = """Running Python unit tests
MOCK run_and_throw_if_fail: ['Tools/Scripts/test-webkitpy'], cwd=/mock-checkout
Running Perl unit tests
MOCK run_and_throw_if_fail: ['Tools/Scripts/test-webkitperl'], cwd=/mock-checkout
Running JavaScriptCore tests
MOCK run_and_throw_if_fail: ['Tools/Scripts/run-javascriptcore-tests'], cwd=/mock-checkout
Running bindings generation tests
MOCK run_and_throw_if_fail: ['Tools/Scripts/run-bindings-tests'], cwd=/mock-checkout
Running run-webkit-tests
MOCK run_and_throw_if_fail: ['Tools/Scripts/run-webkit-tests', '--quiet'], cwd=/mock-checkout
"""
OutputCapture().assert_outputs(self, step.run, [{}], expected_logs=expected_logs)
| bsd-3-clause | -3,226,710,740,719,380,000 | 47.495726 | 198 | 0.709552 | false |
demon-ru/iml-crm | addons/report_webkit/company.py | 431 | 2562 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010 Camptocamp SA (http://www.camptocamp.com)
# All Right Reserved
#
# Author : Nicolas Bessi (Camptocamp)
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
##############################################################################
from openerp.osv import fields, osv
class res_company(osv.osv):
"""Override company to add Header object link a company can have many header and logos"""
_inherit = "res.company"
_columns = {
'header_image' : fields.many2many(
'ir.header_img',
'company_img_rel',
'company_id',
'img_id',
'Available Images',
),
'header_webkit' : fields.many2many(
'ir.header_webkit',
'company_html_rel',
'company_id',
'html_id',
'Available html',
),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -2,091,172,529,773,448,000 | 45.581818 | 93 | 0.510148 | false |
jkonecki/autorest | AutoRest/Generators/Python/Azure.Python.Tests/Expected/AcceptanceTests/StorageManagementClient/storagemanagementclient/models/custom_domain.py | 2 | 1213 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class CustomDomain(Model):
"""
The custom domain assigned to this storage account. This can be set via
Update.
:param name: Gets or sets the custom domain name. Name is the CNAME
source.
:type name: str
:param use_sub_domain: Indicates whether indirect CName validation is
enabled. Default value is false. This should only be set on updates
:type use_sub_domain: bool
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'use_sub_domain': {'key': 'useSubDomain', 'type': 'bool'},
}
def __init__(self, name=None, use_sub_domain=None, **kwargs):
self.name = name
self.use_sub_domain = use_sub_domain
| mit | 5,302,520,000,750,764,000 | 33.657143 | 76 | 0.587799 | false |
pombreda/https-gitorious.org-appstream-software-center | softwarecenter/ui/gtk3/widgets/cellrenderers.py | 4 | 18064 | # Copyright (C) 2011 Canonical
#
# Authors:
# Matthew McGowan
# Michael Vogt
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; version 3.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from gi.repository import Gtk, Gdk, GObject, Pango
from softwarecenter.utils import utf8
from softwarecenter.ui.gtk3.em import EM
from softwarecenter.ui.gtk3.models.appstore2 import CategoryRowReference
from stars import StarRenderer, StarSize
class CellButtonIDs:
INFO = 0
ACTION = 1
# custom cell renderer to support dynamic grow
class CellRendererAppView(Gtk.CellRendererText):
# x, y offsets for the overlay icon
OVERLAY_XO = OVERLAY_YO = 2
# size of the install overlay icon
OVERLAY_SIZE = 16
# ratings
MAX_STARS = 5
STAR_SIZE = EM
# initialize declared properties (LP: #965937)
application = GObject.Property(
type=GObject.TYPE_PYOBJECT,
nick='document',
blurb='a xapian document containing pkg information',
flags=(GObject.PARAM_READWRITE | GObject.PARAM_CONSTRUCT),
default=None)
isactive = GObject.Property(
type=bool,
nick='isactive',
blurb='is cell active/selected',
flags=(GObject.PARAM_READWRITE | GObject.PARAM_CONSTRUCT),
default=False)
def __init__(self, icons, layout, show_ratings, overlay_icon_name):
Gtk.CellRendererText.__init__(self)
# the icon pixbuf to be displayed in the row
self.icon = None
# geometry-state values
self.pixbuf_width = 0
self.apptitle_width = 0
self.apptitle_height = 0
self.normal_height = 0
self.selected_height = 0
self.show_ratings = show_ratings
# button packing
self.button_spacing = 0
self._buttons = {
Gtk.PackType.START: [],
Gtk.PackType.END: []
}
self._all_buttons = {}
# cache a layout
self._layout = layout
# star painter, paints stars
self._stars = StarRenderer()
self._stars.size = StarSize.SMALL
# icon/overlay jazz
try:
self._installed = icons.load_icon(overlay_icon_name,
self.OVERLAY_SIZE, 0)
except GObject.GError:
# icon not present in theme, probably because running uninstalled
self._installed = icons.load_icon('emblem-system',
self.OVERLAY_SIZE, 0)
def _layout_get_pixel_width(self, layout):
return layout.get_size()[0] / Pango.SCALE
def _layout_get_pixel_height(self, layout):
return layout.get_size()[1] / Pango.SCALE
def _render_category(self,
context, cr, app, cell_area, layout, xpad, ypad, is_rtl):
layout.set_markup('<b>%s</b>' % app.display_name, -1)
# work out max allowable layout width
lw = self._layout_get_pixel_width(layout)
lh = self._layout_get_pixel_height(layout)
if not is_rtl:
x = cell_area.x
else:
x = cell_area.x + cell_area.width - lw
y = cell_area.y + (cell_area.height - lh) / 2
Gtk.render_layout(context, cr, x, y, layout)
def _render_price(self, context, cr, app, layout, cell_area, xpad, ypad,
is_rtl):
layout.set_markup("US$ %s" % self.model.get_price(app), -1)
if is_rtl:
x = cell_area.x + xpad
else:
x = (cell_area.x + cell_area.width - xpad -
self._layout_get_pixel_width(layout))
Gtk.render_layout(context, cr,
x, ypad + cell_area.y, layout)
def _render_icon(self, cr, app, cell_area, xpad, ypad, is_rtl):
# calc offsets so icon is nicely centered
self.icon = self.model.get_icon(app)
self.icon_x_offset = xpad + cell_area.x
self.icon_y_offset = ypad + cell_area.y
xo = (self.pixbuf_width - self.icon.get_width()) / 2
if not is_rtl:
x = cell_area.x + xo + xpad
else:
x = cell_area.x + cell_area.width + xo - self.pixbuf_width - xpad
y = cell_area.y + ypad
# draw appicon pixbuf
Gdk.cairo_set_source_pixbuf(cr, self.icon, x, y)
cr.paint()
# draw overlay if application is installed
if self.model.is_installed(app):
if not is_rtl:
x += (self.pixbuf_width - self.OVERLAY_SIZE + self.OVERLAY_XO)
else:
x -= self.OVERLAY_XO
y += (self.pixbuf_width - self.OVERLAY_SIZE + self.OVERLAY_YO)
Gdk.cairo_set_source_pixbuf(cr, self._installed, x, y)
cr.paint()
def _render_summary(self, context, cr, app,
cell_area, layout, xpad, ypad,
star_width, is_rtl):
layout.set_markup(self.model.get_markup(app), -1)
# work out max allowable layout width
layout.set_width(-1)
lw = self._layout_get_pixel_width(layout)
max_layout_width = (cell_area.width - self.pixbuf_width -
3 * xpad - star_width)
max_layout_width = cell_area.width - self.pixbuf_width - 3 * xpad
stats = self.model.get_review_stats(app)
if self.show_ratings and stats:
max_layout_width -= star_width + 6 * xpad
if (self.props.isactive and
self.model.get_transaction_progress(app) > 0):
action_btn = self.get_button_by_name(CellButtonIDs.ACTION)
max_layout_width -= (xpad + action_btn.width)
if lw >= max_layout_width:
layout.set_width((max_layout_width) * Pango.SCALE)
layout.set_ellipsize(Pango.EllipsizeMode.END)
lw = max_layout_width
apptitle_extents = layout.get_line_readonly(0).get_pixel_extents()[1]
self.apptitle_width = apptitle_extents.width
self.apptitle_height = apptitle_extents.height
if not is_rtl:
x = cell_area.x + 2 * xpad + self.pixbuf_width
else:
x = (cell_area.x + cell_area.width - lw - self.pixbuf_width -
2 * xpad)
y = cell_area.y + ypad
Gtk.render_layout(context, cr, x, y, layout)
def _render_rating(self, context, cr, app,
cell_area, layout, xpad, ypad,
star_width, star_height, is_rtl):
stats = self.model.get_review_stats(app)
if not stats:
return
sr = self._stars
if not is_rtl:
x = (cell_area.x + 3 * xpad + self.pixbuf_width +
self.apptitle_width)
else:
x = (cell_area.x + cell_area.width
- 3 * xpad
- self.pixbuf_width
- self.apptitle_width
- star_width)
y = cell_area.y + ypad + (self.apptitle_height - self.STAR_SIZE) / 2
sr.rating = stats.ratings_average
sr.render_star(context, cr, x, y)
# and nr-reviews in parenthesis to the right of the title
nreviews = stats.ratings_total
s = "(%i)" % nreviews
layout.set_markup("<small>%s</small>" % s, -1)
if not is_rtl:
x += xpad + star_width
else:
x -= xpad + self._layout_get_pixel_width(layout)
context.save()
context.add_class("cellrenderer-avgrating-label")
Gtk.render_layout(context, cr, x, y, layout)
context.restore()
def _render_progress(self, context, cr, progress, cell_area, ypad, is_rtl):
percent = progress * 0.01
# per the spec, the progressbar should be the width of the action
# button
action_btn = self.get_button_by_name(CellButtonIDs.ACTION)
x, _, w, h = action_btn.allocation
# shift the bar to the top edge
y = cell_area.y + ypad
context.save()
context.add_class("trough")
Gtk.render_background(context, cr, x, y, w, h)
Gtk.render_frame(context, cr, x, y, w, h)
context.restore()
bar_size = w * percent
context.save()
context.add_class("progressbar")
if (bar_size > 0):
if is_rtl:
x += (w - bar_size)
Gtk.render_activity(context, cr, x, y, bar_size, h)
context.restore()
def _render_buttons(self, context, cr, cell_area, layout, xpad, ypad,
is_rtl):
# layout buttons and paint
y = cell_area.y + cell_area.height - ypad
spacing = self.button_spacing
if not is_rtl:
start = Gtk.PackType.START
end = Gtk.PackType.END
xs = cell_area.x + 2 * xpad + self.pixbuf_width
xb = cell_area.x + cell_area.width - xpad
else:
start = Gtk.PackType.END
end = Gtk.PackType.START
xs = cell_area.x + xpad
xb = cell_area.x + cell_area.width - 2 * xpad - self.pixbuf_width
for btn in self._buttons[start]:
btn.set_position(xs, y - btn.height)
btn.render(context, cr, layout)
xs += btn.width + spacing
for btn in self._buttons[end]:
xb -= btn.width
btn.set_position(xb, y - btn.height)
btn.render(context, cr, layout)
xb -= spacing
def set_pixbuf_width(self, w):
self.pixbuf_width = w
def set_button_spacing(self, spacing):
self.button_spacing = spacing
def get_button_by_name(self, name):
if name in self._all_buttons:
return self._all_buttons[name]
def get_buttons(self):
btns = ()
for k, v in self._buttons.items():
btns += tuple(v)
return btns
def button_pack(self, btn, pack_type=Gtk.PackType.START):
self._buttons[pack_type].append(btn)
self._all_buttons[btn.name] = btn
def button_pack_start(self, btn):
self.button_pack(btn, Gtk.PackType.START)
def button_pack_end(self, btn):
self.button_pack(btn, Gtk.PackType.END)
def do_set_property(self, pspec, value):
setattr(self, pspec.name, value)
def do_get_property(self, pspec):
return getattr(self, pspec.name)
def do_get_preferred_height_for_width(self, treeview, width):
if not self.get_properties("isactive")[0]:
return self.normal_height, self.normal_height
return self.selected_height, self.selected_height
def do_render(self, cr, widget, bg_area, cell_area, flags):
app = self.props.application
if not app:
return
self.model = widget.appmodel
context = widget.get_style_context()
xpad = self.get_property('xpad')
ypad = self.get_property('ypad')
star_width, star_height = self._stars.get_visible_size(context)
is_rtl = widget.get_direction() == Gtk.TextDirection.RTL
layout = self._layout
# important! ensures correct text rendering, esp. when using hicolor
# theme
#~ if (flags & Gtk.CellRendererState.SELECTED) != 0:
#~ # this follows the behaviour that gtk+ uses for states in
#~ # treeviews
#~ if widget.has_focus():
#~ state = Gtk.StateFlags.SELECTED
#~ else:
#~ state = Gtk.StateFlags.ACTIVE
#~ else:
#~ state = Gtk.StateFlags.NORMAL
context.save()
#~ context.set_state(state)
if isinstance(app, CategoryRowReference):
self._render_category(context, cr, app,
cell_area,
layout,
xpad, ypad,
is_rtl)
return
self._render_icon(cr, app,
cell_area,
xpad, ypad,
is_rtl)
self._render_summary(context, cr, app,
cell_area,
layout,
xpad, ypad,
star_width,
is_rtl)
# only show ratings if we have one
if self.show_ratings:
self._render_rating(context, cr, app,
cell_area,
layout,
xpad, ypad,
star_width,
star_height,
is_rtl)
progress = self.model.get_transaction_progress(app)
if progress > 0:
self._render_progress(context, cr, progress,
cell_area,
ypad,
is_rtl)
elif self.model.is_purchasable(app):
self._render_price(context, cr, app, layout,
cell_area, xpad, ypad, is_rtl)
# below is the stuff that is only done for the active cell
if not self.props.isactive:
return
self._render_buttons(context, cr,
cell_area,
layout,
xpad, ypad,
is_rtl)
context.restore()
class CellButtonRenderer(object):
def __init__(self, widget, name, use_max_variant_width=True):
# use_max_variant_width is currently ignored. assumed to be True
self.name = name
self.markup_variants = {}
self.current_variant = None
self.xpad = 12
self.ypad = 4
self.allocation = [0, 0, 1, 1]
self.state = Gtk.StateFlags.NORMAL
self.has_focus = False
self.visible = True
self.widget = widget
def _layout_reset(self, layout):
layout.set_width(-1)
layout.set_ellipsize(Pango.EllipsizeMode.NONE)
@property
def x(self):
return self.allocation[0]
@property
def y(self):
return self.allocation[1]
@property
def width(self):
return self.allocation[2]
@property
def height(self):
return self.allocation[3]
def configure_geometry(self, layout):
self._layout_reset(layout)
max_size = (0, 0)
for k, variant in self.markup_variants.items():
safe_markup = GObject.markup_escape_text(utf8(variant))
layout.set_markup(safe_markup, -1)
size = layout.get_size()
max_size = max(max_size, size)
w, h = max_size
w /= Pango.SCALE
h /= Pango.SCALE
self.set_size(w + 2 * self.xpad, h + 2 * self.ypad)
def point_in(self, px, py):
x, y, w, h = self.allocation
return (px >= x and px <= x + w and
py >= y and py <= y + h)
def get_size(self):
return self.allocation[2:]
def set_position(self, x, y):
self.allocation[:2] = int(x), int(y)
def set_size(self, w, h):
self.allocation[2:] = int(w), int(h)
def set_state(self, state):
if not isinstance(state, Gtk.StateFlags):
msg = ("state should be of type Gtk.StateFlags, got %s" %
type(state))
raise TypeError(msg)
elif state == self.state:
return
self.state = state
self.widget.queue_draw_area(*self.allocation)
def set_sensitive(self, is_sensitive):
if is_sensitive:
state = Gtk.StateFlags.PRELIGHT
else:
state = Gtk.StateFlags.INSENSITIVE
self.set_state(state)
def show(self):
self.visible = True
def hide(self):
self.visible = False
def set_markup(self, markup):
self.markup_variant = (markup,)
def set_markup_variants(self, markup_variants):
if not isinstance(markup_variants, dict):
msg = type(markup_variants)
raise TypeError("Expects a dict object, got %s" % msg)
elif not markup_variants:
return
self.markup_variants = markup_variants
self.current_variant = markup_variants.keys()[0]
def set_variant(self, current_var):
self.current_variant = current_var
def is_sensitive(self):
return self.state is not Gtk.StateFlags.INSENSITIVE
def render(self, context, cr, layout):
if not self.visible:
return
x, y, width, height = self.allocation
context.save()
context.add_class("cellrenderer-button")
if self.has_focus:
context.set_state(self.state | Gtk.StateFlags.FOCUSED)
else:
context.set_state(self.state)
# render background and focal frame if has-focus
context.save()
context.add_class(Gtk.STYLE_CLASS_BUTTON)
Gtk.render_background(context, cr, x, y, width, height)
context.restore()
if self.has_focus:
Gtk.render_focus(context, cr,
x + 3, y + 3,
width - 6, height - 6)
# position and render layout markup
context.save()
context.add_class(Gtk.STYLE_CLASS_BUTTON)
layout.set_markup(self.markup_variants[self.current_variant], -1)
layout_width = layout.get_pixel_extents()[1].width
x = x + (width - layout_width) / 2
y += self.ypad
Gtk.render_layout(context, cr, x, y, layout)
context.restore()
context.restore()
| gpl-3.0 | 7,838,097,763,595,269,000 | 30.470383 | 79 | 0.549878 | false |
tx137884746/IzayoiMiku | toughradius/tools/livecd.py | 4 | 4933 | #!/usr/bin/env python
#coding:utf-8
from toughradius.tools.secret import gen_secret
def echo_radiusd_cnf():
return '''[DEFAULT]
debug = 0
tz = CST-8
secret = %s
ssl = 1
privatekey = /var/toughradius/privkey.pem
certificate = /var/toughradius/cacert.pem
[database]
dbtype = mysql
dburl = mysql://radiusd:[email protected]/toughradius?charset=utf8
echo = false
pool_size = 120
pool_recycle = 300
[radiusd]
acctport = 1813
adminport = 1815
authport = 1812
cache_timeout = 600
logfile = /var/toughradius/log/radiusd.log
[admin]
port = 1816
logfile = /var/toughradius/log/admin.log
[customer]
port = 1817
logfile = /var/toughradius/log/customer.log
'''%gen_secret(32)
def echo_privkey_pem():
return '''-----BEGIN RSA PRIVATE KEY-----
MIIBPAIBAAJBAK+a5EAeEZFJdpwmMdgexCvE/x5HpsSvkyx+CFt9MDI8Gx9sXTsQ
hn+Satm4bNKq9+0yarGL1MoVoXCmzMkv++0CAwEAAQJBAJel139XeCxTmM54XYsZ
5qc11Gs9zVMFnL9Lh8QadEisGBoLNVGRKspVuR21pf9yWK1APJYtxeY+ElxTeN6v
frECIQDlXCN0ZLF2IBOUbOAEBnBEzYA19cnpktaD1EyeD1bpOwIhAMQAY3R+suNO
JE1MvE/g6ICAQVCDeiSW0JBUHbpXT5z3AiBakZqygHyPD7WLm76N+Fjm4lspc6hK
oqAwqGmk1JvWNwIhAJicyNPLV1S/4mpB5pq3v7FWrASZ6wAUYh8PL/qIw1evAiEA
sS5pdElUCN0d7/EdoOPBmEAJL7RHs6SjYEihK5ds4TQ=
-----END RSA PRIVATE KEY-----'''
def echo_cacert_pem():
return '''-----BEGIN CERTIFICATE-----
MIIDTDCCAvagAwIBAgIJAMZsf8cd/CUeMA0GCSqGSIb3DQEBBQUAMIGiMQswCQYD
VQQGEwJDTjEOMAwGA1UECBMFSHVuYW4xETAPBgNVBAcTCENoYW5nc2hhMRgwFgYD
VQQKEw90b3VnaHJhZGl1cy5uZXQxFDASBgNVBAsTC3RvdWdocmFkaXVzMRgwFgYD
VQQDEw90b3VnaHJhZGl1cy5uZXQxJjAkBgkqhkiG9w0BCQEWF3N1cHBvcnRAdG91
Z2hyYWRpdXMubmV0MB4XDTE1MDMxODE2MTg1N1oXDTIwMTAyNTE2MTg1N1owgaIx
CzAJBgNVBAYTAkNOMQ4wDAYDVQQIEwVIdW5hbjERMA8GA1UEBxMIQ2hhbmdzaGEx
GDAWBgNVBAoTD3RvdWdocmFkaXVzLm5ldDEUMBIGA1UECxMLdG91Z2hyYWRpdXMx
GDAWBgNVBAMTD3RvdWdocmFkaXVzLm5ldDEmMCQGCSqGSIb3DQEJARYXc3VwcG9y
dEB0b3VnaHJhZGl1cy5uZXQwXDANBgkqhkiG9w0BAQEFAANLADBIAkEAr5rkQB4R
kUl2nCYx2B7EK8T/HkemxK+TLH4IW30wMjwbH2xdOxCGf5Jq2bhs0qr37TJqsYvU
yhWhcKbMyS/77QIDAQABo4IBCzCCAQcwHQYDVR0OBBYEFK9UjaxgsGyDZqfLEGUl
zYUhZqyzMIHXBgNVHSMEgc8wgcyAFK9UjaxgsGyDZqfLEGUlzYUhZqyzoYGopIGl
MIGiMQswCQYDVQQGEwJDTjEOMAwGA1UECBMFSHVuYW4xETAPBgNVBAcTCENoYW5n
c2hhMRgwFgYDVQQKEw90b3VnaHJhZGl1cy5uZXQxFDASBgNVBAsTC3RvdWdocmFk
aXVzMRgwFgYDVQQDEw90b3VnaHJhZGl1cy5uZXQxJjAkBgkqhkiG9w0BCQEWF3N1
cHBvcnRAdG91Z2hyYWRpdXMubmV0ggkAxmx/xx38JR4wDAYDVR0TBAUwAwEB/zAN
BgkqhkiG9w0BAQUFAANBAF2J27T8NnXptROTUx7IKU3MIBGvRqj6imtwjsus6fQU
GOLwDVfVEaqmv6YE6jg5ummEfeIcwUfkD5fLgrfRQ9s=
-----END CERTIFICATE-----'''
def echo_radiusd_script():
return '''#!/bin/sh
### BEGIN INIT INFO
# Provides: radiusd
# Required-Start: $all
# Required-Stop:
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: starts the radiusd daemon
# Description: starts toughradius using start-stop-daemon
### END INIT INFO
export PATH=$PATH:/usr/local/bin
set -e
set -u
usage ()
{
cat <<EOF
Usage: $0 [OPTIONS]
start start toughradius
stop stop toughradius
restart restart toughradius,
upgrade update toughradius version and restart
All other options are passed to the toughrad program.
EOF
exit 1
}
start()
{
toughctl --start all
}
stop()
{
toughctl --stop all
}
restart()
{
toughctl --restart all
}
upgrade()
{
echo 'starting upgrade...'
pip install -U https://github.com/talkincode/ToughRADIUS/archive/stable.zip
echo 'upgrade done'
}
case "$1" in
help)
usage
;;
start)
start
;;
stop)
stop
;;
restart)
restart
;;
upgrade)
upgrade
;;
*)
usage
;;
esac
exit 0
'''
def echo_mysql_cnf():
return '''[client]
port = 3306
socket = /var/run/mysqld/mysqld.sock
[mysqld_safe]
socket = /var/run/mysqld/mysqld.sock
nice = 0
[mysqld]
user = mysql
pid-file = /var/run/mysqld/mysqld.pid
socket = /var/run/mysqld/mysqld.sock
port = 3306
basedir = /usr
datadir = /var/lib/mysql
tmpdir = /tmp
lc-messages-dir = /usr/share/mysql
skip-external-locking
bind-address = 127.0.0.1
key_buffer = 16M
max_allowed_packet = 16M
thread_stack = 192K
thread_cache_size = 8
myisam-recover = BACKUP
max_connections = 1000
table_cache = 512
#thread_concurrency = 8
#
# * Query Cache Configuration
#
query_cache_limit = 4M
query_cache_size = 64M
server-id = 1
log_bin = /var/log/mysql/mysql-bin.log
expire_logs_days = 10
max_binlog_size = 100M
#
# * InnoDB
#
innodb_buffer_pool_size = 256M
innodb_data_file_path = ibdata1:16M:autoextend
innodb_additional_mem_pool_size = 16M
innodb_thread_concurrency = 8
innodb_flush_log_at_trx_commit = 1
innodb_log_buffer_size = 8M
innodb_log_file_size = 128M
log-error=/var/log/mysqld.log
[mysqldump]
quick
quote-names
max_allowed_packet = 64M
[mysql]
#no-auto-rehash # faster start of mysql but no tab completition
[isamchk]
key_buffer = 16M
!includedir /etc/mysql/conf.d/
''' | agpl-3.0 | 7,071,285,846,787,794,000 | 20.928889 | 79 | 0.750659 | false |
shumik/skencil-c | Sketch/UI/gradientedit.py | 1 | 9796 | # Sketch - A Python-based interactive drawing program
# Copyright (C) 1998, 1999, 2000, 2002 by Bernhard Herzog
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import PIL.Image
import X, pax
from Sketch import _, Publisher, SketchError, _sketch
from Sketch import Blend, CreateRGBColor, MultiGradient
from Sketch.const import DROP_COLOR
from Sketch.warn import pdebug
from Sketch.Graphics import color
from Tkinter import Frame, Button
from Tkinter import BOTTOM, LEFT, RIGHT, BOTH
from tkext import PyWidget, MenuCommand, UpdatedMenu
import tkext
from colordlg import GetColor
from sketchdlg import SKModal
import skpixmaps
pixmaps = skpixmaps.PixmapTk
handle_height = 8
class GradientView(PyWidget, Publisher):
accept_drop = (DROP_COLOR,)
def __init__(self, master, width, height, gradient, **kw):
image = PIL.Image.new('RGB', (width, height))
self.orig_x = handle_height / 2
if not kw.has_key('width'):
kw["width"] = width + handle_height
if not kw.has_key('height'):
kw["height"] = height + handle_height
apply(PyWidget.__init__, (self, master), kw)
self.set_gradient(gradient)
self.update_pending = 0
self.dragging = 0
self.drag_idx = 0
self.drag_start = 0
self.drag_min = self.drag_max = 0.0
self.gc_initialized = 0
self.image = image
self.ximage = None
self.context_menu = None
self.bind('<ButtonPress-3>', self.PopupContextMenu)
self.bind('<ButtonPress>', self.ButtonPressEvent)
self.bind('<Motion>', self.PointerMotionEvent)
self.bind('<ButtonRelease>', self.ButtonReleaseEvent)
def __del__(self):
pdebug('__del__', '__del__', self)
def MapMethod(self):
if not self.gc_initialized:
self.init_gc()
self.tk.call(self._w, 'motionhints')
self.gc_initialized = 1
def DestroyMethod(self):
if self.context_menu is not None:
self.context_menu.clean_up()
self.context_menu = None
PyWidget.DestroyMethod(self)
def init_gc(self):
self.gc = self.tkwin.GetGC()
self.visual = color.skvisual
w = self.tkwin
width, height = self.image.size
depth = self.visual.depth
if depth > 16:
bpl = 4 * width
elif depth > 8:
bpl = ((2 * width + 3) / 4) * 4
elif depth == 8:
bpl = ((width + 3) / 4) * 4
else:
raise SketchError('unsupported depth for images')
self.ximage = w.CreateImage(depth, X.ZPixmap, 0, None, width, height,
32, bpl)
self.set_image(self.image)
def set_image(self, image):
self.image = image
if self.ximage:
ximage = self.ximage
_sketch.copy_image_to_ximage(self.visual, image.im, ximage,
0, 0, ximage.width, ximage.height)
self.UpdateWhenIdle()
def ResizedMethod(self, width, height):
pass
def set_gradient(self, gradient):
gradient = gradient.Colors()
self.gradient = []
for pos, color in gradient:
self.gradient.append((pos, tuple(color)))
def reverse(self):
for i in range(len(self.gradient)):
self.gradient[i]=(1 - self.gradient[i][0], self.gradient[i][1])
self.gradient.reverse()
self.UpdateWhenIdle()
def x_to_idx(self, x):
width = self.ximage.width
w2 = handle_height / 2
orig_x = self.orig_x
for i in range(len(self.gradient)):
if abs(x - orig_x - self.gradient[i][0] * width) < w2:
return i
return -1
def ButtonPressEvent(self, event):
if not self.dragging:
self.drag_idx = self.x_to_idx(event.x)
if self.drag_idx < 0:
return
if self.drag_idx == 0:
self.gradient.insert(0, self.gradient[0])
self.drag_idx = self.drag_idx + 1
if self.drag_idx == len(self.gradient) - 1:
self.gradient.append(self.gradient[-1])
self.drag_start = event.x, self.gradient[self.drag_idx][0]
if self.drag_idx > 0:
self.drag_min = self.gradient[self.drag_idx - 1][0]
else:
self.drag_min = 0.0
if self.drag_idx < len(self.gradient) - 1:
self.drag_max = self.gradient[self.drag_idx + 1][0]
else:
self.drag_max = 1.0
self.dragging = self.dragging + 1
def ButtonReleaseEvent(self, event):
if self.dragging:
self.dragging = self.dragging - 1
self.move_to(event.x)
if self.drag_idx == 1 and \
self.gradient[0][0] == self.gradient[1][0]:
del self.gradient[0]
elif self.drag_idx == len(self.gradient) - 2 and \
self.gradient[-1][0] == self.gradient[-2][0]:
del self.gradient[-1]
def PointerMotionEvent(self, event):
if self.dragging:
x = self.tkwin.QueryPointer()[4]
self.move_to(x)
def move_to(self, x):
start_x, start_pos = self.drag_start
pos = x - start_x + start_pos * self.ximage.width
pos = float(pos) / self.ximage.width
if pos < self.drag_min:
pos = self.drag_min
if pos > self.drag_max:
pos = self.drag_max
color = self.gradient[self.drag_idx][-1]
self.gradient[self.drag_idx] = (pos, color)
self.UpdateWhenIdle()
def PopupContextMenu(self, event):
self.context_idx = self.x_to_idx(event.x)
self.context_pos = (event.x - self.orig_x) / float(self.ximage.width)
if self.context_menu is None:
items = [MenuCommand(_("Set Handle Color"), self.set_handle_color,
sensitivecb = self.can_set_handle_color),
MenuCommand(_("Delete Handle"), self.delete_handle,
sensitivecb = self.can_delete_handle),
MenuCommand(_("Insert Handle"), self.insert_handle,
sensitivecb = self.can_insert_handle)]
self.context_menu = UpdatedMenu(self, items)
self.context_menu.Popup(event.x_root, event.y_root)
def delete_handle(self):
if 0 < self.context_idx < len(self.gradient) - 1:
del self.gradient[self.context_idx]
self.UpdateWhenIdle()
def can_delete_handle(self):
return 0 < self.context_idx < len(self.gradient) - 1
def insert_handle(self):
gradient = self.gradient
pos = self.context_pos
if 0.0 <= pos <= 1.0:
for i in range(len(gradient) - 1):
if gradient[i][0] < pos < gradient[i + 1][0]:
p1, c1 = gradient[i]
p2, c2 = gradient[i + 1]
color = Blend(apply(CreateRGBColor, c2),
apply(CreateRGBColor, c1),
(pos - p1) / (p2 - p1))
gradient.insert(i + 1, (pos, tuple(color)))
self.UpdateWhenIdle()
break
def can_insert_handle(self):
return self.context_idx < 0 and 0.0 <= self.context_pos <= 1.0
def set_handle_color(self):
if self.context_idx >= 0:
pos, color = self.gradient[self.context_idx]
color = GetColor(self, apply(CreateRGBColor, color))
if color is not None:
self.gradient[self.context_idx] = (pos, tuple(color))
self.UpdateWhenIdle()
def can_set_handle_color(self):
return self.context_idx >= 0
def update_gradient(self):
_sketch.fill_axial_gradient(self.image.im, self.gradient,
0, 0, self.image.size[0] - 1, 0)
self.set_image(self.image)
def UpdateWhenIdle(self):
if not self.update_pending:
self.update_pending = 1
PyWidget.UpdateWhenIdle(self)
def RedrawMethod(self, region = None):
if self.update_pending:
self.update_gradient()
self.update_pending = 0
pixmap = self.tkwin.CreatePixmap()
width = self.ximage.width
height = self.ximage.height
startx = handle_height / 2
self.gc.SetDrawable(pixmap)
self.tkborder.Fill3DRectangle(pixmap, 0, 0,
self.tkwin.width, self.tkwin.height,
0, pax.TK_RELIEF_FLAT)
self.gc.PutImage(self.ximage, 0, 0, startx, 0, width, height)
border = self.tkborder
win = self.tkwin
w2 = handle_height / 2
bot = handle_height + height
for pos in self.gradient:
pos = pos[0]
x = int(pos * width) + startx
poly = [(x - w2, bot), (x, height), (x + w2, bot)]
border.Draw3DPolygon(pixmap, poly, -2, pax.TK_RELIEF_SUNKEN)
self.gc.SetDrawable(self.tkwin)
pixmap.CopyArea(self.tkwin, self.gc, 0, 0,
self.tkwin.width, self.tkwin.height, 0, 0)
def DropAt(self, x, y, what, data):
if what == DROP_COLOR:
idx = self.x_to_idx(x)
if idx >= 0:
pos, color = self.gradient[idx]
self.gradient[idx] = (pos, tuple(data))
self.UpdateWhenIdle()
def GetGradient(self):
result = []
for pos, color in self.gradient:
result.append((pos, apply(CreateRGBColor, color)))
return MultiGradient(result)
gradient_size = (200, 10)
class EditGradientDlg(SKModal):
title = _("Edit Gradient")
def __init__(self, master, gradient, **kw):
self.gradient = gradient
apply(SKModal.__init__, (self, master), kw)
def build_dlg(self):
top = self.top
frame = Frame(top)
frame.pack(side = BOTTOM, fill = BOTH, expand = 1)
button = Button(frame, text = _("Reverse"), command = self.reverse)
button.pack(side = LEFT, expand = 1)
button = Button(frame, text = _("OK"), command = self.ok)
button.pack(side = LEFT, expand = 1)
button = Button(frame, text = _("Cancel"), command = self.cancel)
button.pack(side = RIGHT, expand = 1)
view = GradientView(top, gradient_size[0], gradient_size[1],
self.gradient)
view.pack(side = LEFT)
self.gradient_view = view
def reverse(self, *args):
self.gradient_view.reverse()
def ok(self, *args):
self.close_dlg(self.gradient_view.GetGradient())
def EditGradient(master, gradient):
dlg = EditGradientDlg(master, gradient)
return dlg.RunDialog(grab = 0)
| gpl-2.0 | -2,294,719,440,032,112,400 | 29.141538 | 74 | 0.667211 | false |
MrNuggles/HeyBoet-Telegram-Bot | temboo/Library/Utilities/DataConversions/XMLToXLS.py | 5 | 2894 | # -*- coding: utf-8 -*-
###############################################################################
#
# XMLToXLS
# Converts an XML file to a Base64 encoded Excel file.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class XMLToXLS(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the XMLToXLS Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(XMLToXLS, self).__init__(temboo_session, '/Library/Utilities/DataConversions/XMLToXLS')
def new_input_set(self):
return XMLToXLSInputSet()
def _make_result_set(self, result, path):
return XMLToXLSResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return XMLToXLSChoreographyExecution(session, exec_id, path)
class XMLToXLSInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the XMLToXLS
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_XML(self, value):
"""
Set the value of the XML input for this Choreo. ((required, xml) The XML file you want to convert to XLS format. See documentation for information on the required XML schema.)
"""
super(XMLToXLSInputSet, self)._set_input('XML', value)
class XMLToXLSResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the XMLToXLS Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_XLS(self):
"""
Retrieve the value for the "XLS" output from this Choreo execution. (The Base64 encoded Excel data .)
"""
return self._output.get('XLS', None)
class XMLToXLSChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return XMLToXLSResultSet(response, path)
| gpl-3.0 | 7,555,716,233,608,682,000 | 34.292683 | 183 | 0.671044 | false |
umitproject/openmonitor-aggregator | simplejson/tests/test_fail.py | 136 | 3555 | from unittest import TestCase
import simplejson as json
# Fri Dec 30 18:57:26 2005
JSONDOCS = [
# http://json.org/JSON_checker/test/fail1.json
'"A JSON payload should be an object or array, not a string."',
# http://json.org/JSON_checker/test/fail2.json
'["Unclosed array"',
# http://json.org/JSON_checker/test/fail3.json
'{unquoted_key: "keys must be quoted}',
# http://json.org/JSON_checker/test/fail4.json
'["extra comma",]',
# http://json.org/JSON_checker/test/fail5.json
'["double extra comma",,]',
# http://json.org/JSON_checker/test/fail6.json
'[ , "<-- missing value"]',
# http://json.org/JSON_checker/test/fail7.json
'["Comma after the close"],',
# http://json.org/JSON_checker/test/fail8.json
'["Extra close"]]',
# http://json.org/JSON_checker/test/fail9.json
'{"Extra comma": true,}',
# http://json.org/JSON_checker/test/fail10.json
'{"Extra value after close": true} "misplaced quoted value"',
# http://json.org/JSON_checker/test/fail11.json
'{"Illegal expression": 1 + 2}',
# http://json.org/JSON_checker/test/fail12.json
'{"Illegal invocation": alert()}',
# http://json.org/JSON_checker/test/fail13.json
'{"Numbers cannot have leading zeroes": 013}',
# http://json.org/JSON_checker/test/fail14.json
'{"Numbers cannot be hex": 0x14}',
# http://json.org/JSON_checker/test/fail15.json
'["Illegal backslash escape: \\x15"]',
# http://json.org/JSON_checker/test/fail16.json
'["Illegal backslash escape: \\\'"]',
# http://json.org/JSON_checker/test/fail17.json
'["Illegal backslash escape: \\017"]',
# http://json.org/JSON_checker/test/fail18.json
'[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]',
# http://json.org/JSON_checker/test/fail19.json
'{"Missing colon" null}',
# http://json.org/JSON_checker/test/fail20.json
'{"Double colon":: null}',
# http://json.org/JSON_checker/test/fail21.json
'{"Comma instead of colon", null}',
# http://json.org/JSON_checker/test/fail22.json
'["Colon instead of comma": false]',
# http://json.org/JSON_checker/test/fail23.json
'["Bad value", truth]',
# http://json.org/JSON_checker/test/fail24.json
"['single quote']",
# http://code.google.com/p/simplejson/issues/detail?id=3
u'["A\u001FZ control characters in string"]',
]
SKIPS = {
1: "why not have a string payload?",
18: "spec doesn't specify any nesting limitations",
}
class TestFail(TestCase):
def test_failures(self):
for idx, doc in enumerate(JSONDOCS):
idx = idx + 1
if idx in SKIPS:
json.loads(doc)
continue
try:
json.loads(doc)
except json.JSONDecodeError:
pass
else:
#self.fail("Expected failure for fail{0}.json: {1!r}".format(idx, doc))
self.fail("Expected failure for fail%d.json: %r" % (idx, doc))
def test_array_decoder_issue46(self):
# http://code.google.com/p/simplejson/issues/detail?id=46
for doc in [u'[,]', '[,]']:
try:
json.loads(doc)
except json.JSONDecodeError, e:
self.assertEquals(e.pos, 1)
self.assertEquals(e.lineno, 1)
self.assertEquals(e.colno, 1)
except Exception, e:
self.fail("Unexpected exception raised %r %s" % (e, e))
else:
self.fail("Unexpected success parsing '[,]'") | agpl-3.0 | 9,055,653,390,019,539,000 | 38.076923 | 87 | 0.587342 | false |
sadmansk/servo | components/script/dom/bindings/codegen/parser/tests/test_constructor.py | 23 | 9627 | import WebIDL
def WebIDLTest(parser, harness):
def checkArgument(argument, QName, name, type, optional, variadic):
harness.ok(isinstance(argument, WebIDL.IDLArgument),
"Should be an IDLArgument")
harness.check(argument.identifier.QName(), QName, "Argument has the right QName")
harness.check(argument.identifier.name, name, "Argument has the right name")
harness.check(str(argument.type), type, "Argument has the right return type")
harness.check(argument.optional, optional, "Argument has the right optional value")
harness.check(argument.variadic, variadic, "Argument has the right variadic value")
def checkMethod(method, QName, name, signatures,
static=True, getter=False, setter=False,
deleter=False, legacycaller=False, stringifier=False,
chromeOnly=False, htmlConstructor=False):
harness.ok(isinstance(method, WebIDL.IDLMethod),
"Should be an IDLMethod")
harness.ok(method.isMethod(), "Method is a method")
harness.ok(not method.isAttr(), "Method is not an attr")
harness.ok(not method.isConst(), "Method is not a const")
harness.check(method.identifier.QName(), QName, "Method has the right QName")
harness.check(method.identifier.name, name, "Method has the right name")
harness.check(method.isStatic(), static, "Method has the correct static value")
harness.check(method.isGetter(), getter, "Method has the correct getter value")
harness.check(method.isSetter(), setter, "Method has the correct setter value")
harness.check(method.isDeleter(), deleter, "Method has the correct deleter value")
harness.check(method.isLegacycaller(), legacycaller, "Method has the correct legacycaller value")
harness.check(method.isStringifier(), stringifier, "Method has the correct stringifier value")
harness.check(method.getExtendedAttribute("ChromeOnly") is not None, chromeOnly, "Method has the correct value for ChromeOnly")
harness.check(method.isHTMLConstructor(), htmlConstructor, "Method has the correct htmlConstructor value")
harness.check(len(method.signatures()), len(signatures), "Method has the correct number of signatures")
sigpairs = zip(method.signatures(), signatures)
for (gotSignature, expectedSignature) in sigpairs:
(gotRetType, gotArgs) = gotSignature
(expectedRetType, expectedArgs) = expectedSignature
harness.check(str(gotRetType), expectedRetType,
"Method has the expected return type.")
for i in range(0, len(gotArgs)):
(QName, name, type, optional, variadic) = expectedArgs[i]
checkArgument(gotArgs[i], QName, name, type, optional, variadic)
parser.parse("""
[Constructor]
interface TestConstructorNoArgs {
};
[Constructor(DOMString name)]
interface TestConstructorWithArgs {
};
[Constructor(object foo), Constructor(boolean bar)]
interface TestConstructorOverloads {
};
""")
results = parser.finish()
harness.check(len(results), 3, "Should be three productions")
harness.ok(isinstance(results[0], WebIDL.IDLInterface),
"Should be an IDLInterface")
harness.ok(isinstance(results[1], WebIDL.IDLInterface),
"Should be an IDLInterface")
harness.ok(isinstance(results[2], WebIDL.IDLInterface),
"Should be an IDLInterface")
checkMethod(results[0].ctor(), "::TestConstructorNoArgs::constructor",
"constructor", [("TestConstructorNoArgs (Wrapper)", [])])
checkMethod(results[1].ctor(), "::TestConstructorWithArgs::constructor",
"constructor",
[("TestConstructorWithArgs (Wrapper)",
[("::TestConstructorWithArgs::constructor::name", "name", "String", False, False)])])
checkMethod(results[2].ctor(), "::TestConstructorOverloads::constructor",
"constructor",
[("TestConstructorOverloads (Wrapper)",
[("::TestConstructorOverloads::constructor::foo", "foo", "Object", False, False)]),
("TestConstructorOverloads (Wrapper)",
[("::TestConstructorOverloads::constructor::bar", "bar", "Boolean", False, False)])])
parser = parser.reset()
parser.parse("""
[ChromeConstructor()]
interface TestChromeConstructor {
};
""")
results = parser.finish()
harness.check(len(results), 1, "Should be one production")
harness.ok(isinstance(results[0], WebIDL.IDLInterface),
"Should be an IDLInterface")
checkMethod(results[0].ctor(), "::TestChromeConstructor::constructor",
"constructor", [("TestChromeConstructor (Wrapper)", [])],
chromeOnly=True)
parser = parser.reset()
parser.parse("""
[HTMLConstructor]
interface TestHTMLConstructor {
};
""")
results = parser.finish()
harness.check(len(results), 1, "Should be one production")
harness.ok(isinstance(results[0], WebIDL.IDLInterface),
"Should be an IDLInterface")
checkMethod(results[0].ctor(), "::TestHTMLConstructor::constructor",
"constructor", [("TestHTMLConstructor (Wrapper)", [])],
htmlConstructor=True)
parser = parser.reset()
threw = False
try:
parser.parse("""
[Constructor(),
ChromeConstructor(DOMString a)]
interface TestChromeConstructor {
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Can't have both a Constructor and a ChromeConstructor")
# Test HTMLConstructor with argument
parser = parser.reset()
threw = False
try:
parser.parse("""
[HTMLConstructor(DOMString a)]
interface TestHTMLConstructorWithArgs {
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "HTMLConstructor should take no argument")
# Test HTMLConstructor on a callback interface
parser = parser.reset()
threw = False
try:
parser.parse("""
[HTMLConstructor]
callback interface TestHTMLConstructorOnCallbackInterface {
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "HTMLConstructor can't be used on a callback interface")
# Test HTMLConstructor and Constructor
parser = parser.reset()
threw = False
try:
parser.parse("""
[Constructor,
HTMLConstructor]
interface TestHTMLConstructorAndConstructor {
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Can't have both a Constructor and a HTMLConstructor")
parser = parser.reset()
threw = False
try:
parser.parse("""
[HTMLConstructor,
Constructor]
interface TestHTMLConstructorAndConstructor {
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Can't have both a HTMLConstructor and a Constructor")
parser = parser.reset()
threw = False
try:
parser.parse("""
[HTMLConstructor,
Constructor(DOMString a)]
interface TestHTMLConstructorAndConstructor {
};
""")
except:
threw = True
harness.ok(threw, "Can't have both a HTMLConstructor and a Constructor")
parser = parser.reset()
threw = False
try:
parser.parse("""
[Constructor(DOMString a),
HTMLConstructor]
interface TestHTMLConstructorAndConstructor {
};
""")
except:
threw = True
harness.ok(threw, "Can't have both a HTMLConstructor and a Constructor")
# Test HTMLConstructor and ChromeConstructor
parser = parser.reset()
threw = False
try:
parser.parse("""
[ChromeConstructor,
HTMLConstructor]
interface TestHTMLConstructorAndChromeConstructor {
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Can't have both a HTMLConstructor and a ChromeConstructor")
parser = parser.reset()
threw = False
try:
parser.parse("""
[HTMLConstructor,
ChromeConstructor]
interface TestHTMLConstructorAndChromeConstructor {
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Can't have both a HTMLConstructor and a ChromeConstructor")
parser = parser.reset()
threw = False
try:
parser.parse("""
[ChromeConstructor(DOMString a),
HTMLConstructor]
interface TestHTMLConstructorAndChromeConstructor {
};
""")
results = parser.finish()
except:
threw = True
parser = parser.reset()
threw = False
try:
parser.parse("""
[HTMLConstructor,
ChromeConstructor(DOMString a)]
interface TestHTMLConstructorAndChromeConstructor {
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw, "Can't have both a HTMLConstructor and a ChromeConstructor")
| mpl-2.0 | -2,174,718,412,636,529,200 | 34.393382 | 135 | 0.608185 | false |
taktik/account-invoicing | account_invoice_shipping_address/tests/test_invoice_shipping_test.py | 30 | 2274 | # -*- coding: utf-8 -*-
##############################################################################
# This file is part of account_invoice_shipping_address, an Odoo module.
#
# Copyright (c) 2015 ACSONE SA/NV (<http://acsone.eu>)
#
# account_invoice_line_sort is free software: you can redistribute it
# and/or modify it under the terms of the GNU Affero General Public License
# as published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# account_invoice_line_sort is distributed in the hope that it will
# be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the
# GNU Affero General Public License
# along with account_invoice_line_sort.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import openerp.tests.common as common
class TestAccountInvoiceShippement(common.TransactionCase):
def setUp(self):
super(TestAccountInvoiceShippement, self).setUp()
self.inv_model = self.env['account.invoice']
self.stock_model = self.env['stock.picking']
self.partner_2 = self.ref('base.res_partner_2')
self.partner_address_3 = self.ref('base.res_partner_address_3')
self.shipment4 = self.ref('stock.incomming_shipment4')
self.account_journal = self.ref('account.check_journal')
def test_create_invoice_from_stock(self):
stock = self.stock_model.browse(self.shipment4)
stock.invoice_state = '2binvoiced'
stock.partner_id = self.partner_address_3
stock.move_lines[0].partner_id = self.partner_2
res = stock.action_invoice_create(journal_id=self.account_journal)
self.assertEqual(len(res), 1)
inv_id = res[0]
created_invoice = self.inv_model.browse(inv_id)
self.assertEqual(created_invoice.partner_id.id,
self.partner_address_3)
self.assertEqual(created_invoice.address_shipping_id.id,
self.partner_2)
| agpl-3.0 | 5,404,146,399,965,696,000 | 39.607143 | 79 | 0.62577 | false |
josiah-wolf-oberholtzer/supriya | tests/commands/test_commands_SynthNewRequest.py | 1 | 3544 | import pytest
import uqbar.strings
import supriya
def test_do_not_coerce_arguments():
synth = supriya.realtime.Synth()
group = supriya.realtime.Group()
assert synth.node_id is None
assert group.node_id is None
request = supriya.commands.SynthNewRequest(
node_id=synth, synthdef=synth.synthdef, target_node_id=group
)
assert request.node_id is synth
assert request.target_node_id is group
assert synth.node_id is None
assert group.node_id is None
with pytest.raises(TypeError):
request.to_osc()
def test_allocate_ids_before_remote_application(server):
"""
Local application allocates the synth's ID before we generate the OSC
message.
"""
synth = supriya.realtime.Synth()
group = supriya.realtime.Group().allocate()
assert synth.node_id is None
assert group.node_id == 1000
request = supriya.commands.SynthNewRequest(
node_id=synth, synthdef=synth.synthdef, target_node_id=group
)
assert request.node_id is synth
assert request.target_node_id is group
with server.osc_protocol.capture() as transcript:
request.communicate()
assert [(_.label, _.message) for _ in transcript] == [
("S", supriya.osc.OscMessage("/s_new", "default", 1001, 0, 1000)),
("R", supriya.osc.OscMessage("/n_go", 1001, 1000, -1, -1, 0)),
]
assert synth.node_id == 1001
assert synth.parent is group
assert synth.is_allocated
def test_no_preexisting_synth_object(server):
"""
Communicating without a pre-existing synth creates that synth during local
application.
"""
synthdef = supriya.assets.synthdefs.test.allocate()
group = supriya.realtime.Group().allocate()
request = supriya.commands.SynthNewRequest(
node_id=666, synthdef=synthdef, target_node_id=group
)
assert request.node_id == 666
with server.osc_protocol.capture() as transcript:
request.communicate()
assert [(_.label, _.message) for _ in transcript] == [
("S", supriya.osc.OscMessage("/s_new", "test", 666, 0, 1000)),
("R", supriya.osc.OscMessage("/n_go", 666, 1000, -1, -1, 0)),
]
synth = server[666]
assert synth.parent is group
assert synth.synthdef is synthdef
def test_bus_symbol_mapping(server):
synthdef = supriya.assets.synthdefs.test.allocate()
group = supriya.realtime.Group().allocate()
request = supriya.commands.SynthNewRequest(
node_id=666,
synthdef=synthdef,
target_node_id=group,
amplitude="c0",
frequency="a1",
)
with server.osc_protocol.capture() as transcript:
request.communicate()
assert [(_.label, _.message) for _ in transcript] == [
(
"S",
supriya.osc.OscMessage(
"/s_new", "test", 666, 0, 1000, "amplitude", "c0", "frequency", "a1"
),
),
("R", supriya.osc.OscMessage("/n_go", 666, 1000, -1, -1, 0)),
]
synth = server[666]
assert synth.parent is group
assert synth.synthdef is synthdef
assert str(synth.controls["amplitude"].value) == "c0"
assert str(synth.controls["frequency"].value) == "a1"
server_state = str(server.query_remote_nodes(True))
assert server_state == uqbar.strings.normalize(
"""
NODE TREE 0 group
1 group
1000 group
666 test
amplitude: c0, frequency: a1
"""
)
assert str(server.query_local_nodes(True)) == server_state
| mit | 573,024,230,306,719,400 | 32.433962 | 84 | 0.625 | false |
zsoltdudas/lis-tempest | tempest/services/object_storage/object_client.py | 4 | 9809 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from six.moves import http_client as httplib
from six.moves.urllib import parse as urlparse
from tempest.lib.common import rest_client
class ObjectClient(rest_client.RestClient):
def create_object(self, container, object_name, data,
params=None, metadata=None, headers=None):
"""Create storage object."""
if headers is None:
headers = self.get_headers()
if not data:
headers['content-length'] = '0'
if metadata:
for key in metadata:
headers[str(key)] = metadata[key]
url = "%s/%s" % (str(container), str(object_name))
if params:
url += '?%s' % urlparse.urlencode(params)
resp, body = self.put(url, data, headers)
self.expected_success(201, resp.status)
return resp, body
def update_object(self, container, object_name, data):
"""Upload data to replace current storage object."""
resp, body = self.create_object(container, object_name, data)
self.expected_success(201, resp.status)
return resp, body
def delete_object(self, container, object_name, params=None):
"""Delete storage object."""
url = "%s/%s" % (str(container), str(object_name))
if params:
url += '?%s' % urlparse.urlencode(params)
resp, body = self.delete(url, headers={})
self.expected_success([200, 204], resp.status)
return resp, body
def update_object_metadata(self, container, object_name, metadata,
metadata_prefix='X-Object-Meta-'):
"""Add, remove, or change X-Object-Meta metadata for storage object."""
headers = {}
for key in metadata:
headers["%s%s" % (str(metadata_prefix), str(key))] = metadata[key]
url = "%s/%s" % (str(container), str(object_name))
resp, body = self.post(url, None, headers=headers)
self.expected_success(202, resp.status)
return resp, body
def list_object_metadata(self, container, object_name):
"""List all storage object X-Object-Meta- metadata."""
url = "%s/%s" % (str(container), str(object_name))
resp, body = self.head(url)
self.expected_success(200, resp.status)
return resp, body
def get_object(self, container, object_name, metadata=None):
"""Retrieve object's data."""
headers = {}
if metadata:
for key in metadata:
headers[str(key)] = metadata[key]
url = "{0}/{1}".format(container, object_name)
resp, body = self.get(url, headers=headers)
self.expected_success([200, 206], resp.status)
return resp, body
def copy_object_in_same_container(self, container, src_object_name,
dest_object_name, metadata=None):
"""Copy storage object's data to the new object using PUT."""
url = "{0}/{1}".format(container, dest_object_name)
headers = {}
headers['X-Copy-From'] = "%s/%s" % (str(container),
str(src_object_name))
headers['content-length'] = '0'
if metadata:
for key in metadata:
headers[str(key)] = metadata[key]
resp, body = self.put(url, None, headers=headers)
self.expected_success(201, resp.status)
return resp, body
def copy_object_across_containers(self, src_container, src_object_name,
dst_container, dst_object_name,
metadata=None):
"""Copy storage object's data to the new object using PUT."""
url = "{0}/{1}".format(dst_container, dst_object_name)
headers = {}
headers['X-Copy-From'] = "%s/%s" % (str(src_container),
str(src_object_name))
headers['content-length'] = '0'
if metadata:
for key in metadata:
headers[str(key)] = metadata[key]
resp, body = self.put(url, None, headers=headers)
self.expected_success(201, resp.status)
return resp, body
def copy_object_2d_way(self, container, src_object_name, dest_object_name,
metadata=None):
"""Copy storage object's data to the new object using COPY."""
url = "{0}/{1}".format(container, src_object_name)
headers = {}
headers['Destination'] = "%s/%s" % (str(container),
str(dest_object_name))
if metadata:
for key in metadata:
headers[str(key)] = metadata[key]
resp, body = self.copy(url, headers=headers)
self.expected_success(201, resp.status)
return resp, body
def create_object_segments(self, container, object_name, segment, data):
"""Creates object segments."""
url = "{0}/{1}/{2}".format(container, object_name, segment)
resp, body = self.put(url, data)
self.expected_success(201, resp.status)
return resp, body
def put_object_with_chunk(self, container, name, contents, chunk_size):
"""Put an object with Transfer-Encoding header"""
if self.base_url is None:
self._set_auth()
headers = {'Transfer-Encoding': 'chunked'}
if self.token:
headers['X-Auth-Token'] = self.token
conn = put_object_connection(self.base_url, container, name, contents,
chunk_size, headers)
resp = conn.getresponse()
body = resp.read()
resp_headers = {}
for header, value in resp.getheaders():
resp_headers[header.lower()] = value
self._error_checker('PUT', None, headers, contents, resp, body)
self.expected_success(201, resp.status)
return resp.status, resp.reason, resp_headers
def create_object_continue(self, container, object_name,
data, metadata=None):
"""Create storage object."""
headers = {}
if metadata:
for key in metadata:
headers[str(key)] = metadata[key]
if not data:
headers['content-length'] = '0'
if self.base_url is None:
self._set_auth()
headers['X-Auth-Token'] = self.token
conn = put_object_connection(self.base_url, str(container),
str(object_name), data, None, headers)
response = conn.response_class(conn.sock,
strict=conn.strict,
method=conn._method)
version, status, reason = response._read_status()
resp = {'version': version,
'status': str(status),
'reason': reason}
return resp
def put_object_connection(base_url, container, name, contents=None,
chunk_size=65536, headers=None, query_string=None):
"""Helper function to make connection to put object with httplib
:param base_url: base_url of an object client
:param container: container name that the object is in
:param name: object name to put
:param contents: a string or a file like object to read object data
from; if None, a zero-byte put will be done
:param chunk_size: chunk size of data to write; it defaults to 65536;
used only if the contents object has a 'read'
method, eg. file-like objects, ignored otherwise
:param headers: additional headers to include in the request, if any
:param query_string: if set will be appended with '?' to generated path
"""
parsed = urlparse.urlparse(base_url)
if parsed.scheme == 'https':
conn = httplib.HTTPSConnection(parsed.netloc)
else:
conn = httplib.HTTPConnection(parsed.netloc)
path = str(parsed.path) + "/"
path += "%s/%s" % (str(container), str(name))
if query_string:
path += '?' + query_string
if headers:
headers = dict(headers)
else:
headers = {}
if hasattr(contents, 'read'):
conn.putrequest('PUT', path)
for header, value in six.iteritems(headers):
conn.putheader(header, value)
if 'Content-Length' not in headers:
if 'Transfer-Encoding' not in headers:
conn.putheader('Transfer-Encoding', 'chunked')
conn.endheaders()
chunk = contents.read(chunk_size)
while chunk:
conn.send('%x\r\n%s\r\n' % (len(chunk), chunk))
chunk = contents.read(chunk_size)
conn.send('0\r\n\r\n')
else:
conn.endheaders()
left = headers['Content-Length']
while left > 0:
size = chunk_size
if size > left:
size = left
chunk = contents.read(size)
conn.send(chunk)
left -= len(chunk)
else:
conn.request('PUT', path, contents, headers)
return conn
| apache-2.0 | -5,059,720,037,875,156,000 | 37.01938 | 79 | 0.567132 | false |
adamncasey/servo | tests/wpt/css-tests/tools/html5lib/html5lib/tests/test_parser.py | 451 | 3612 | from __future__ import absolute_import, division, unicode_literals
import os
import sys
import traceback
import warnings
import re
warnings.simplefilter("error")
from .support import get_data_files
from .support import TestData, convert, convertExpected, treeTypes
from html5lib import html5parser, constants
# Run the parse error checks
checkParseErrors = False
# XXX - There should just be one function here but for some reason the testcase
# format differs from the treedump format by a single space character
def convertTreeDump(data):
return "\n".join(convert(3)(data).split("\n")[1:])
namespaceExpected = re.compile(r"^(\s*)<(\S+)>", re.M).sub
def runParserTest(innerHTML, input, expected, errors, treeClass,
namespaceHTMLElements):
with warnings.catch_warnings(record=True) as caughtWarnings:
warnings.simplefilter("always")
p = html5parser.HTMLParser(tree=treeClass,
namespaceHTMLElements=namespaceHTMLElements)
try:
if innerHTML:
document = p.parseFragment(input, innerHTML)
else:
document = p.parse(input)
except:
errorMsg = "\n".join(["\n\nInput:", input, "\nExpected:", expected,
"\nTraceback:", traceback.format_exc()])
assert False, errorMsg
otherWarnings = [x for x in caughtWarnings
if not issubclass(x.category, constants.DataLossWarning)]
assert len(otherWarnings) == 0, [(x.category, x.message) for x in otherWarnings]
if len(caughtWarnings):
return
output = convertTreeDump(p.tree.testSerializer(document))
expected = convertExpected(expected)
if namespaceHTMLElements:
expected = namespaceExpected(r"\1<html \2>", expected)
errorMsg = "\n".join(["\n\nInput:", input, "\nExpected:", expected,
"\nReceived:", output])
assert expected == output, errorMsg
errStr = []
for (line, col), errorcode, datavars in p.errors:
assert isinstance(datavars, dict), "%s, %s" % (errorcode, repr(datavars))
errStr.append("Line: %i Col: %i %s" % (line, col,
constants.E[errorcode] % datavars))
errorMsg2 = "\n".join(["\n\nInput:", input,
"\nExpected errors (" + str(len(errors)) + "):\n" + "\n".join(errors),
"\nActual errors (" + str(len(p.errors)) + "):\n" + "\n".join(errStr)])
if checkParseErrors:
assert len(p.errors) == len(errors), errorMsg2
def test_parser():
sys.stderr.write('Testing tree builders ' + " ".join(list(treeTypes.keys())) + "\n")
files = get_data_files('tree-construction')
for filename in files:
testName = os.path.basename(filename).replace(".dat", "")
if testName in ("template",):
continue
tests = TestData(filename, "data")
for index, test in enumerate(tests):
input, errors, innerHTML, expected = [test[key] for key in
('data', 'errors',
'document-fragment',
'document')]
if errors:
errors = errors.split("\n")
for treeName, treeCls in treeTypes.items():
for namespaceHTMLElements in (True, False):
yield (runParserTest, innerHTML, input, expected, errors, treeCls,
namespaceHTMLElements)
| mpl-2.0 | 3,338,820,939,834,580,000 | 36.625 | 98 | 0.576689 | false |
chrishokamp/fuel | fuel/transformers/image.py | 6 | 12409 | from __future__ import division
from io import BytesIO
import math
import numpy
from PIL import Image
from six import PY3
try:
from ._image import window_batch_bchw
window_batch_bchw_available = True
except ImportError:
window_batch_bchw_available = False
from . import ExpectsAxisLabels, SourcewiseTransformer
from .. import config
class ImagesFromBytes(SourcewiseTransformer):
"""Load from a stream of bytes objects representing encoded images.
Parameters
----------
data_stream : instance of :class:`AbstractDataStream`
The wrapped data stream. The individual examples returned by
this should be the bytes (in a `bytes` container on Python 3
or a `str` on Python 2) comprising an image in a format readable
by PIL, such as PNG, JPEG, etc.
color_mode : str, optional
Mode to pass to PIL for color space conversion. Default is RGB.
If `None`, no coercion is performed.
Notes
-----
Images are returned as NumPy arrays converted from PIL objects.
If there is more than one color channel, then the array is transposed
from the `(height, width, channel)` dimension layout native to PIL to
the `(channel, height, width)` layout that is pervasive in the world
of convolutional networks. If there is only one color channel, as for
monochrome or binary images, a leading axis with length 1 is added for
the sake of uniformity/predictability.
This SourcewiseTransformer supports streams returning single examples
as `bytes` objects (`str` on Python 2.x) as well as streams that
return iterables containing such objects. In the case of an
iterable, a list of loaded images is returned.
"""
def __init__(self, data_stream, color_mode='RGB', **kwargs):
kwargs.setdefault('produces_examples', data_stream.produces_examples)
# Acrobatics currently required to correctly set axis labels.
which_sources = kwargs.get('which_sources', data_stream.sources)
axis_labels = self._make_axis_labels(data_stream, which_sources,
kwargs['produces_examples'])
kwargs.setdefault('axis_labels', axis_labels)
super(ImagesFromBytes, self).__init__(data_stream, **kwargs)
self.color_mode = color_mode
def transform_source_example(self, example, source_name):
if PY3:
bytes_type = bytes
else:
bytes_type = str
if not isinstance(example, bytes_type):
raise TypeError("expected {} object".format(bytes_type.__name__))
pil_image = Image.open(BytesIO(example))
if self.color_mode is not None:
pil_image = pil_image.convert(self.color_mode)
image = numpy.array(pil_image)
if image.ndim == 3:
# Transpose to `(channels, height, width)` layout.
return image.transpose(2, 0, 1)
elif image.ndim == 2:
# Add a channels axis of length 1.
image = image[numpy.newaxis]
else:
raise ValueError('unexpected number of axes')
return image
def transform_source_batch(self, batch, source_name):
return [self.transform_source_example(im, source_name) for im in batch]
def _make_axis_labels(self, data_stream, which_sources, produces_examples):
# This is ugly and probably deserves a refactoring of how we handle
# axis labels. It would be simpler to use memoized read-only
# properties, but the AbstractDataStream constructor tries to set
# self.axis_labels currently. We can't use self.which_sources or
# self.produces_examples here, because this *computes* things that
# need to be passed into the superclass constructor, necessarily
# meaning that the superclass constructor hasn't been called.
# Cooperative inheritance is hard, etc.
labels = {}
for source in data_stream.sources:
if source in which_sources:
if produces_examples:
labels[source] = ('channel', 'height', 'width')
else:
labels[source] = ('batch', 'channel', 'height', 'width')
else:
labels[source] = (data_stream.axis_labels[source]
if source in data_stream.axis_labels
else None)
return labels
class MinimumImageDimensions(SourcewiseTransformer, ExpectsAxisLabels):
"""Resize (lists of) images to minimum dimensions.
Parameters
----------
data_stream : instance of :class:`AbstractDataStream`
The data stream to wrap.
minimum_shape : 2-tuple
The minimum `(height, width)` dimensions every image must have.
Images whose height and width are larger than these dimensions
are passed through as-is.
resample : str, optional
Resampling filter for PIL to use to upsample any images requiring
it. Options include 'nearest' (default), 'bilinear', and 'bicubic'.
See the PIL documentation for more detailed information.
Notes
-----
This transformer expects stream sources returning individual images,
represented as 2- or 3-dimensional arrays, or lists of the same.
The format of the stream is unaltered.
"""
def __init__(self, data_stream, minimum_shape, resample='nearest',
**kwargs):
self.minimum_shape = minimum_shape
try:
self.resample = getattr(Image, resample.upper())
except AttributeError:
raise ValueError("unknown resampling filter '{}'".format(resample))
kwargs.setdefault('produces_examples', data_stream.produces_examples)
kwargs.setdefault('axis_labels', data_stream.axis_labels)
super(MinimumImageDimensions, self).__init__(data_stream, **kwargs)
def transform_source_batch(self, batch, source_name):
self.verify_axis_labels(('batch', 'channel', 'height', 'width'),
self.data_stream.axis_labels[source_name],
source_name)
return [self._example_transform(im, source_name) for im in batch]
def transform_source_example(self, example, source_name):
self.verify_axis_labels(('channel', 'height', 'width'),
self.data_stream.axis_labels[source_name],
source_name)
return self._example_transform(example, source_name)
def _example_transform(self, example, _):
if example.ndim > 3 or example.ndim < 2:
raise NotImplementedError
min_height, min_width = self.minimum_shape
original_height, original_width = example.shape[-2:]
if original_height < min_height or original_width < min_width:
dt = example.dtype
# If we're dealing with a colour image, swap around the axes
# to be in the format that PIL needs.
if example.ndim == 3:
im = example.transpose(1, 2, 0)
else:
im = example
im = Image.fromarray(im)
width, height = im.size
multiplier = max(1, min_width / width, min_height / height)
width = int(math.ceil(width * multiplier))
height = int(math.ceil(height * multiplier))
im = numpy.array(im.resize((width, height))).astype(dt)
# If necessary, undo the axis swap from earlier.
if im.ndim == 3:
example = im.transpose(2, 0, 1)
else:
example = im
return example
class RandomFixedSizeCrop(SourcewiseTransformer, ExpectsAxisLabels):
"""Randomly crop images to a fixed window size.
Parameters
----------
data_stream : :class:`AbstractDataStream`
The data stream to wrap.
window_shape : tuple
The `(height, width)` tuple representing the size of the output
window.
Notes
-----
This transformer expects to act on stream sources which provide one of
* Single images represented as 3-dimensional ndarrays, with layout
`(channel, height, width)`.
* Batches of images represented as lists of 3-dimensional ndarrays,
possibly of different shapes (i.e. images of differing
heights/widths).
* Batches of images represented as 4-dimensional ndarrays, with
layout `(batch, channel, height, width)`.
The format of the stream will be un-altered, i.e. if lists are
yielded by `data_stream` then lists will be yielded by this
transformer.
"""
def __init__(self, data_stream, window_shape, **kwargs):
if not window_batch_bchw_available:
raise ImportError('window_batch_bchw not compiled')
self.window_shape = window_shape
self.rng = kwargs.pop('rng', None)
self.warned_axis_labels = False
if self.rng is None:
self.rng = numpy.random.RandomState(config.default_seed)
kwargs.setdefault('produces_examples', data_stream.produces_examples)
kwargs.setdefault('axis_labels', data_stream.axis_labels)
super(RandomFixedSizeCrop, self).__init__(data_stream, **kwargs)
def transform_source_batch(self, source, source_name):
self.verify_axis_labels(('batch', 'channel', 'height', 'width'),
self.data_stream.axis_labels[source_name],
source_name)
windowed_height, windowed_width = self.window_shape
if isinstance(source, list) and all(isinstance(b, numpy.ndarray) and
b.ndim == 3 for b in source):
return [self.transform_source_example(im, source_name)
for im in source]
elif isinstance(source, numpy.ndarray) and source.ndim == 4:
# Hardcoded assumption of (batch, channels, height, width).
# This is what the fast Cython code supports.
out = numpy.empty(source.shape[:2] + self.window_shape,
dtype=source.dtype)
batch_size = source.shape[0]
image_height, image_width = source.shape[2:]
max_h_off = image_height - windowed_height
max_w_off = image_width - windowed_width
if max_h_off < 0 or max_w_off < 0:
raise ValueError("Got ndarray batch with image dimensions {} "
"but requested window shape of {}".format(
source.shape[2:], self.window_shape))
offsets_w = self.rng.random_integers(0, max_w_off, size=batch_size)
offsets_h = self.rng.random_integers(0, max_h_off, size=batch_size)
window_batch_bchw(source, offsets_h, offsets_w, out)
return out
else:
raise ValueError("uninterpretable batch format; expected a list "
"of arrays with ndim = 3, or an array with "
"ndim = 4")
def transform_source_example(self, example, source_name):
self.verify_axis_labels(('channel', 'height', 'width'),
self.data_stream.axis_labels[source_name],
source_name)
windowed_height, windowed_width = self.window_shape
if not isinstance(example, numpy.ndarray) or example.ndim != 3:
raise ValueError("uninterpretable example format; expected "
"ndarray with ndim = 3")
image_height, image_width = example.shape[1:]
if image_height < windowed_height or image_width < windowed_width:
raise ValueError("can't obtain ({}, {}) window from image "
"dimensions ({}, {})".format(
windowed_height, windowed_width,
image_height, image_width))
if image_height - windowed_height > 0:
off_h = self.rng.random_integers(0, image_height - windowed_height)
else:
off_h = 0
if image_width - windowed_width > 0:
off_w = self.rng.random_integers(0, image_width - windowed_width)
else:
off_w = 0
return example[:, off_h:off_h + windowed_height,
off_w:off_w + windowed_width]
| mit | -2,894,919,595,764,940,300 | 44.621324 | 79 | 0.606737 | false |
jandom/rdkit | rdkit/Chem/FeatMaps/FeatMapParser.py | 12 | 5307 | # $Id$
#
# Copyright (C) 2006 Greg Landrum
#
# @@ All Rights Reserved @@
# This file is part of the RDKit.
# The contents are covered by the terms of the BSD license
# which is included in the file license.txt, found at the root
# of the RDKit source tree.
#
from rdkit import Geometry
from rdkit.Chem.FeatMaps import FeatMaps, FeatMapPoint
import re
"""
ScoreMode=All
DirScoreMode=Ignore
BeginParams
family=Aromatic radius=2.5 width=1.0 profile=Gaussian
family=Acceptor radius=1.5
EndParams
# optional
BeginPoints
family=Acceptor pos=(1.0, 0.0, 5.0) weight=1.25 dir=(1, 1, 0)
family=Aromatic pos=(0.0,1.0,0.0) weight=2.0 dir=(0,0,1) dir=(0,0,-1)
family=Acceptor pos=(1.0,1.0,2.0) weight=1.25
EndPoints
"""
class FeatMapParseError(ValueError):
pass
class FeatMapParser(object):
data = None
def __init__(self, file=None, data=None):
if file:
self.data = file.readlines()
elif data:
self.SetData(data)
self._lineNum = 0
def SetData(self, data):
if isinstance(data, str):
self.data = data.split('\n')
else:
self.data = data
self._lineNum = 0
def _NextLine(self):
txt = ''
while 1:
try:
l = self.data[self._lineNum].split('#')[0].strip()
except IndexError:
break
self._lineNum += 1
if l:
txt += l
if l[-1] != '\\':
break
return txt
def Parse(self, featMap=None):
if featMap is None:
featMap = FeatMaps.FeatMap()
l = self._NextLine().strip()
while l:
splitL = l.split('=')
if len(splitL) == 1:
keyword = splitL[0].strip().lower()
if keyword == 'beginpoints':
pts = self.ParseFeatPointBlock()
for pt in pts:
featMap.AddFeatPoint(pt)
elif keyword == 'beginparams':
featMap.params = self.ParseParamBlock()
else:
raise FeatMapParseError('Unrecognized keyword %s on line %d' % (keyword, self._lineNum))
else:
keyword = splitL[0].strip().lower()
val = splitL[1].strip()
if keyword == 'scoremode':
try:
featMap.scoreMode = getattr(FeatMaps.FeatMapScoreMode, val)
except AttributeError:
raise FeatMapParseError('ScoreMode %s not recognized on line %d' % (val, self._lineNum))
elif keyword == 'dirscoremode':
try:
featMap.dirScoreMode = getattr(FeatMaps.FeatDirScoreMode, val)
except AttributeError:
raise FeatMapParseError('DirScoreMode %s not recognized on line %d' %
(val, self._lineNum))
else:
raise FeatMapParseError('Unrecognized keyword %s on line %d' % (keyword, self._lineNum))
l = self._NextLine().strip()
return featMap
def ParseParamBlock(self):
paramLineSplitter = re.compile(r'([a-zA-Z]+) *= *(\S+)')
params = {}
l = self._NextLine()
while l and l != 'EndParams':
param = FeatMaps.FeatMapParams()
vals = paramLineSplitter.findall(l)
for name, val in vals:
name = name.lower()
if name == 'family':
family = val
elif name == 'radius':
param.radius = float(val)
elif name == 'width':
param.width = float(val)
elif name == 'profile':
try:
param.featProfile = getattr(param.FeatProfile, val)
except AttributeError:
raise FeatMapParseError('Profile %s not recognized on line %d' % (val, self._lineNum))
else:
raise FeatMapParseError('FeatMapParam option %s not recognized on line %d' %
(name, self._lineNum))
params[family] = param
l = self._NextLine()
if l != 'EndParams':
raise FeatMapParseError('EndParams line not found')
return params
def _parsePoint(self, txt):
txt = txt.strip()
startP = 0
endP = len(txt)
if txt[0] == '(':
startP += 1
if txt[-1] == ')':
endP -= 1
txt = txt[startP:endP]
splitL = txt.split(',')
if len(splitL) != 3:
raise ValueError('Bad location string')
vs = [float(x) for x in splitL]
pt = Geometry.Point3D(vs[0], vs[1], vs[2])
return pt
def ParseFeatPointBlock(self):
featLineSplitter = re.compile(r'([a-zA-Z]+) *= *')
feats = []
l = self._NextLine()
while l and l != 'EndPoints':
vals = featLineSplitter.split(l)
while vals.count(''):
vals.remove('')
p = FeatMapPoint.FeatMapPoint()
i = 0
while i < len(vals):
name = vals[i].lower()
if name == 'family':
i += 1
val = vals[i].strip()
p.SetFamily(val)
elif name == 'weight':
i += 1
val = float(vals[i])
p.weight = val
elif name == 'pos':
i += 1
val = vals[i]
pos = self._parsePoint(val)
p.SetPos(pos)
elif name == 'dir':
i += 1
val = vals[i]
pos = self._parsePoint(val)
p.featDirs.append(pos)
else:
raise FeatMapParseError('FeatPoint option %s not recognized on line %d' %
(name, self._lineNum))
i += 1
feats.append(p)
l = self._NextLine()
return feats
| bsd-3-clause | 5,239,492,474,791,536,000 | 26.78534 | 100 | 0.557754 | false |
xingyepei/edx-platform | common/lib/capa/capa/safe_exec/lazymod.py | 193 | 1200 | """A module proxy for delayed importing of modules.
From http://barnesc.blogspot.com/2006/06/automatic-python-imports-with-autoimp.html,
in the public domain.
"""
import sys
class LazyModule(object):
"""A lazy module proxy."""
def __init__(self, modname):
self.__dict__['__name__'] = modname
self._set_mod(None)
def _set_mod(self, mod):
if mod is not None:
self.__dict__ = mod.__dict__
self.__dict__['_lazymod_mod'] = mod
def _load_mod(self):
__import__(self.__name__)
self._set_mod(sys.modules[self.__name__])
def __getattr__(self, name):
if self.__dict__['_lazymod_mod'] is None:
self._load_mod()
mod = self.__dict__['_lazymod_mod']
if hasattr(mod, name):
return getattr(mod, name)
else:
try:
subname = '%s.%s' % (self.__name__, name)
__import__(subname)
submod = getattr(mod, name)
except ImportError:
raise AttributeError("'module' object has no attribute %r" % name)
self.__dict__[name] = LazyModule(subname)
return self.__dict__[name]
| agpl-3.0 | 2,710,745,150,038,104,600 | 26.906977 | 84 | 0.526667 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.