prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>Functions.js<|end_file_name|><|fim▁begin|>var util = require('util');
var fs = require('fs');
var path = require('path');
var ltrim = require('underscore.string/ltrim');
exports.isCommand = function (input) {
if (input.toLowerCase().substr(0, prefix.length) === prefix)
return true;
else
return false;
};
exports.isNumber = function (n) {
return !isNaN(parseFloat(n)) && isFinite(n);
};
exports.isURL = function (str) {
var pattern = new RedExp(
'^' +
'(?:(?:https?|ftp)://)' +
'(?:\\S+(?::\\S*)?@)?' +
'(?:' +
'(?!(?:10|127)(?:\\.\\d{1,3}){3})' +
'(?!(?:169\\.254|192\\.168)(?:\\.\\d{1,3}){2})' +
'(?!172\\.(?:1[6-9]|2\\d|3[0-1])(?:\\.\\d{1,3}){2})' +
'(?:[1-9]\\d?|1\\d\\d|2[01]\\d|22[0-3])' +
'(?:\\.(?:1?\\d{1,2}|2[0-4]\\d|25[0-5])){2}' +<|fim▁hole|> '(?:\\.(?:[1-9]\\d?|1\\d\\d|2[0-4]\\d|25[0-4]))' +
'|' +
'(?:(?:[a-z\\u00a1-\\uffff0-9]-*)*[a-z\\u00a1-\\uffff0-9]+)' +
'(?:\\.(?:[a-z\\u00a1-\\uffff0-9]-*)*[a-z\\u00a1-\\uffff0-9]+)*' +
'(?:\\.(?:[a-z\\u00a1-\\uffff]{2,}))' +
'\\.?' +
')' +
'(?::\\d{2,5})?' +
'(?:[/?#]\\S*)?' +
'$', 'i'
);
if (!pattern.test(str))
return false;
else
return true;
};
exports.getCommand = function (input) {
return input.replace(prefix, '').split(' ')[0];
};
exports.getFolders = function (SourcePath) {
return fs.readdirSync(SourcePath).filter(function (file) {
return fs.statSync(path.join(SourcePath, file)).isDirectory();
});
};
exports.getMessage = function (input) {
return input.replace(prefix, '');
};
exports.getParams = function (text) {
return text.split(' ').slice(1);
};
exports.normalizeChannel = function (channel) {
return util.format('#%s', ltrim(channel.toLowerCase() , '#'));
};
exports.numParams = function (text) {
return text.split(' ').length - 1;
};
exports.splitInput = function (splitAt, message, intSplit) {
var data = message.split(splitAt)[1];
return data.slice(0, data.length - intSplit);
};<|fim▁end|> | |
<|file_name|>quest_player_manager.py<|end_file_name|><|fim▁begin|>from copy import deepcopy
import settings
from twitch.player_manager import PlayerManager
class QuestPlayerManager(PlayerManager):
"""
Functions like add_gold perform a raw store action and then save. __add_gold is the raw store action in this case.
Properties of raw store actions:
- Call username.lower()
- Touch self.players with that name
- Do not save to file
Properties of store actions:
- Do nothing other than call a raw action and then save
Some actions can also take a list of elements. These are all of the form:
def foo(username **kwargs):
if not (isinstance(username), str):
for user in username:
foo(username, **kwargs)
else:
ORIGINAL FUNCTION BODY
Note that both store actions and raw store actions qualify for this.
"""
default_player = deepcopy(PlayerManager.default_player)
default_player.update({
'exp': 0,
'prestige': 0,
'gold': 0,
'items': {}
})
def __add_gold(self, username, gold, prestige_benefits=True):
"""
Gives gold to the specified player.
:param username: str - The player who you are modifying
:param gold: float - How much gold to give that player
:param prestige_benefits: bool - Whether this gold increase is affected by prestige bonuses
"""
# Don't magnify negative amounts of gold
if prestige_benefits and gold > 0:
gold *= 1 + self.players[username]['prestige'] * settings.PRESTIGE_GOLD_AMP
self.players[username]['gold'] += gold
if self.players[username]['gold'] < 0:
self.players[username]['gold'] = 0
def add_gold(self, username, gold, prestige_benefits=True):
"""
Gives gold to the specified player.
:param username: str - The player who you are modifying
:param gold: float - How much gold to give that player
:param prestige_benefits: bool - Whether this gold increase is affected by prestige bonuses
"""
self.__add_gold(username, gold, prestige_benefits=prestige_benefits)
self.save_player(username)
def __add_exp(self, username, exp):
"""
Gives exp to the specified player.
:param username: str - The player who you are modifying
:param exp: float - How much exp to give that player
"""
self.players[username]['exp'] += exp
def add_exp(self, username, exp):
"""
Gives exp to the specified player.
:param username: str - The player who you are modifying
:param exp: float - How much exp to give that player
"""
self.__add_exp(username, exp)
self.save_player(username)
def __add_item(self, username, item):
"""
Item to give to the specified player.
:param username: str - The player who you are modifying
:param item: str or list<str> - The name of the item(s) we are giving to the player
"""
if not isinstance(item, str):
# We must be a list of items
for single_item in item:
self.__add_item(username, single_item)
else:
if item not in self.players[username]['items']:
self.players[username]['items'][item] = 1
else:
self.players[username]['items'][item] += 1
def add_item(self, username, item):
"""
Item to give to the specified player.
:param username: str - The player who you are modifying
:param item: str or list<str> - The name of the item(s) we are giving to the player
"""
self.__add_item(username, item)
self.save_player(username)
def __remove_item(self, username, item):
"""
Item to take from the specified player.
:param username: str - The player who you are modifying
:param item: str or list<str> - The name of the item(s) we are giving to the player
"""
if not isinstance(item, str):
# We must be a list of items
for single_item in item:
self.__remove_item(username, single_item)
else:
# If we don't have the item, do nothing
if item in self.players[username]['items']:
self.players[username]['items'][item] -= 1
if self.players[username]['items'][item] <= 0:
del self.players[username]['items'][item]
def remove_item(self, username, item):
"""
Item to take from the specified player.
:param username: str - The player who you are modifying
:param item: str or list<str> - The name of the item(s) we are giving to the player
"""
self.__remove_item(username, item)
self.save_player(username)
def __reward(self, username, gold=0, exp=0, item=None, prestige_benefits=True):
"""
Gives gold and exp to the specified player.
:param username: str - The player who you are modifying
:param gold: float - How much gold to give that player
:param exp: float - How much exp to give that player
"""
if not isinstance(username, str):
# We must be a list of users
for user in username:
self.__reward(user, gold=gold, exp=exp, item=item, prestige_benefits=prestige_benefits)
else:
self.__add_gold(username, gold, prestige_benefits=prestige_benefits)
self.__add_exp(username, exp)
if item:
self.__add_item(username, item)
def reward(self, username, gold=0, exp=0, item=None, prestige_benefits=True):
"""
Gives gold and exp to the specified player(s).
:param username: str or list<str> - The player(s) who you are modifying
:param gold: float - How much gold to give that player
:param exp: float - How much exp to give that player
"""
if not isinstance(username, str):
# We must be a list of users
for user in username:
self.reward(user, gold=gold, exp=exp, item=item, prestige_benefits=prestige_benefits)
else:
self.__reward(username, gold=gold, exp=exp, item=item, prestige_benefits=prestige_benefits)
self.save_player(username)
def __penalize(self, username, gold=0, exp=0, item=None, prestige_benefits=True):
"""
Gives gold and exp to the specified player(s).
:param username: str or list<str> - The player(s) who you are modifying
:param gold: float - How much gold to give that player
:param exp: float - How much exp to give that player
"""
if not isinstance(username, str):
# We must be a list of users
for user in username:<|fim▁hole|> self.__reward(username, gold=-gold, exp=-exp, item=None, prestige_benefits=prestige_benefits)
if item:
self.__remove_item(username, item)
def penalize(self, username, gold=0, exp=0, item=None, prestige_benefits=True):
"""
Gives gold and exp to the specified player(s).
:param username: str or list<str> - The player(s) who you are modifying
:param gold: float - How much gold to give that player
:param exp: float - How much exp to give that player
"""
if not isinstance(username, str):
# We must be a list of users
for user in username:
self.penalize(user, gold=gold, exp=exp, item=item, prestige_benefits=prestige_benefits)
else:
self.__penalize(username, gold=gold, exp=exp, item=item, prestige_benefits=prestige_benefits)
self.save_player(username)
def get_gold(self, username):
"""
Gets how much gold a given player has.
:param username: str - The player who you are modifying
"""
return self.players[username]['gold']
def get_exp(self, username):
"""
Gets how much exp a given player has.
:param username: str - The player who you are modifying
"""
return self.players[username]['exp']
@staticmethod
def exp_to_level(exp):
# The value for every member of the list is the minimum experience to be a given level
for level, exp_req in enumerate(settings.EXP_LEVELS, start=-1):
if exp < exp_req:
return level
return settings.LEVEL_CAP
def get_level(self, username):
"""
Gets what level a given player is.
:param username: str - The player who you are modifying
"""
exp = self.players[username]['exp']
return self.exp_to_level(exp)
def get_prestige(self, username):
"""
Gets what prestige level a given player is.
:param username: str - The player who you are modifying
"""
return self.players[username]['prestige']
def get_items(self, username):
"""
Gets the items of a given player.
:param username: str - The player who you are modifying
"""
return self.players[username]['items']
def prestige(self, username):
"""
Prestige advances a player.
:param username: str - The player who you are modifying
:return: bool - True if successfully prestiged, False if no change
"""
if self.players[username]['exp'] >= settings.EXP_LEVELS[settings.LEVEL_CAP] and (
self.players[username]['gold'] >= settings.PRESTIGE_COST):
self.players[username]['exp'] -= settings.EXP_LEVELS[settings.LEVEL_CAP]
self.players[username]['gold'] -= settings.PRESTIGE_COST
self.players[username]['prestige'] += 1
self.save_player(username)
return True
else:
return False
@staticmethod
def list_items(items):
msg = ''
for item, quantity in items.items():
if quantity <= 0:
continue
if quantity == 1:
msg += '{}, '.format(item)
else:
msg += '{} ({}), '.format(item, quantity)
msg = msg.rstrip(', ')
return msg
def whisper_stats(self, username):
"""
Whispers a player their relevant stats.
:param username: str - The player who is requesting stat information
"""
player = self.players[username]
msg = '{}Level: {} ({} Exp), Gold: {}{}'.format(
'Prestige: {}, '.format(player['prestige']) if player['prestige'] else '',
self.get_level(username), round(player['exp'], 1), round(player['gold'], 1),
', Items: {}'.format(self.list_items(player['items'])) if player['items'] else '')
self.bot.send_whisper(username, msg)
def save_player(self, username):
"""
Saves a specific player's data to persistent storage. Deletes items with quantity 0 or less.
:param username: str - The player whose data you want to save
"""
# Remove duplicate items. Doesn't use a dict comprehension because items is a custom dict type
remove_items = []
for item, quantity in self.players[username]['items'].items():
if quantity <= 0:
remove_items.append(item)
for remove_item in remove_items:
del self.players[username]['items'][remove_item]
super().save_player(username)<|fim▁end|> | self.__penalize(user, gold=gold, exp=exp, item=item, prestige_benefits=prestige_benefits)
else: |
<|file_name|>unique-mutable.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.<|fim▁hole|> *i = 1;
assert_eq!(*i, 1);
}<|fim▁end|> |
pub fn main() {
let mut i = ~0; |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import cherrypy
import functools
import logging
import logging.handlers
import os
import six
import sys
import traceback
from girder.constants import LOG_ROOT, MAX_LOG_SIZE, LOG_BACKUP_COUNT, TerminalColor, VERSION
from girder.utility import config, mkdir
from girder.utility._cache import cache, requestCache, rateLimitBuffer
__version__ = '2.5.0'
__license__ = 'Apache 2.0'
VERSION['apiVersion'] = __version__
_quiet = False
_originalStdOut = sys.stdout
_originalStdErr = sys.stderr
class LogLevelFilter(object):
"""
Filter log records based on whether they are between a min and max level.
"""
def __init__(self, min, max):
self.minLevel = min
self.maxLevel = max
def filter(self, logRecord):
level = logRecord.levelno
return self.maxLevel >= level >= self.minLevel
class LogFormatter(logging.Formatter):
"""
Custom formatter that adds useful information about the request to the logs
when an exception happens. Cherrypy access logs are passed through without
change.
"""
def formatException(self, exc):
info = '\n'.join((
' Request URL: %s %s' % (cherrypy.request.method.upper(),
cherrypy.url()),
' Query string: ' + cherrypy.request.query_string,
' Remote IP: ' + cherrypy.request.remote.ip
))
return ('%s\n'
'Additional info:\n'
'%s' % (logging.Formatter.formatException(self, exc), info))
def format(self, record, *args, **kwargs):
if hasattr(record, 'name') and hasattr(record, 'message'):
if (record.name.startswith('cherrypy.access') or
record.name.startswith('cherrypy.error')):
return record.message
return super(LogFormatter, self).format(record, *args, **kwargs)
class StreamToLogger(object):
"""
Redirect a file-like stream to a logger.
"""
def __init__(self, stream, logger, level):
self.stream = stream
self.logger = logger
self.level = level
self.logger._girderLogHandlerOutput = False
# This class is intended to override a default stream like sys.stdout
# and sys.stderr and send that information to both the original stream
# and the logger method. However, we want to preserve as much
# functionality for stdout and stderr as possible, so that other
# modules that send data to them can do so without a problem. The only
# method we really need to override is write, but we cannot mutate the
# write method on the stream itself, so we replace the stream with this
# custom class. To preserve the stream methods, all of them get added
# to our class instance except private and built-in methods, which, in
# python, begin with _.
# Fundamentally, this lets our stream replacement handle functions
# flush, writeline, and others without having to enumerate them
# individually.
for key in dir(stream):
# It's possible for a file-like object to have name appear in dir(stream) but not
# actually be an attribute, thus using a default with getattr is required.<|fim▁hole|> if (key != 'write' and not key.startswith('_') and (
callable(getattr(stream, key, None)) or
isinstance(getattr(stream, key, None), (
six.binary_type, six.string_types, six.integer_types, bool)))):
setattr(self, key, getattr(stream, key))
def write(self, buf):
if not self.logger._girderLogHandlerOutput:
self.logger._girderLogHandlerOutput = True
self.stream.write(buf)
for line in buf.rstrip().splitlines():
self.logger.log(self.level, line.rstrip())
self.logger._girderLogHandlerOutput = False
def getLogPaths():
"""
Return the paths to the error and info log files. These are returned as
a dict with "error" and "info" keys that point to the respective file,
as well as a "root" key pointing to the log root directory.
"""
cfg = config.getConfig()
logCfg = cfg.get('logging', {})
root = os.path.expanduser(logCfg.get('log_root', LOG_ROOT))
return {
'root': root,
'error': logCfg.get('error_log_file', os.path.join(root, 'error.log')),
'info': logCfg.get('info_log_file', os.path.join(root, 'info.log'))
}
def _setupLogger():
"""
Sets up the Girder logger.
"""
global _quiet
logger = logging.getLogger('girder')
cfg = config.getConfig()
logCfg = cfg.get('logging', {})
# If we are asked to be quiet, set a global flag so that logprint doesn't
# have to get the configuration settings every time it is used.
if logCfg.get('log_quiet') is True:
_quiet = True
logPaths = getLogPaths()
# Ensure log paths are valid
logDirs = [
logPaths['root'],
os.path.dirname(logPaths['info']),
os.path.dirname(logPaths['error'])
]
for logDir in logDirs:
mkdir(logDir)
# Set log level
level = logging.INFO
if logCfg.get('log_level') and isinstance(getattr(logging, logCfg['log_level'], None), int):
level = getattr(logging, logCfg['log_level'])
logger.setLevel(logging.DEBUG if level is None else level)
logSize = MAX_LOG_SIZE
if logCfg.get('log_max_size'):
sizeValue = logCfg['log_max_size']
sizeUnits = {'kb': 1024, 'Mb': 1024 ** 2, 'Gb': 1024 ** 3}
if sizeValue[-2:] in sizeUnits:
logSize = int(sizeValue[:-2].strip()) * sizeUnits[sizeValue[-2:]]
else:
logSize = int(sizeValue)
backupCount = int(logCfg.get('log_backup_count', LOG_BACKUP_COUNT))
# Remove extant log handlers (this allows this function to called multiple
# times)
for handler in list(logger.handlers):
if hasattr(handler, '_girderLogHandler'):
logger.removeHandler(handler)
cherrypy.log.access_log.removeHandler(handler)
fmt = LogFormatter('[%(asctime)s] %(levelname)s: %(message)s')
infoMaxLevel = logging.INFO
# Create log handlers
if logPaths['error'] != logPaths['info']:
eh = logging.handlers.RotatingFileHandler(
logPaths['error'], maxBytes=logSize, backupCount=backupCount)
eh.setLevel(level)
eh.addFilter(LogLevelFilter(min=logging.WARNING, max=logging.CRITICAL))
eh._girderLogHandler = 'error'
eh.setFormatter(fmt)
logger.addHandler(eh)
# Record cherrypy errors in our logs, too
cherrypy.log.error_log.addHandler(eh)
else:
infoMaxLevel = logging.CRITICAL
if isinstance(getattr(logging, logCfg.get('log_max_info_level', ''), None), int):
infoMaxLevel = getattr(logging, logCfg['log_max_info_level'])
ih = logging.handlers.RotatingFileHandler(
logPaths['info'], maxBytes=logSize, backupCount=backupCount)
ih.setLevel(level)
ih.addFilter(LogLevelFilter(min=logging.DEBUG, max=infoMaxLevel))
ih._girderLogHandler = 'info'
ih.setFormatter(fmt)
logger.addHandler(ih)
# Record cherrypy errors in our logs, too
cherrypy.log.error_log.addHandler(ih)
# Log http accesses to the screen and/or the info log.
accessLog = logCfg.get('log_access', 'screen')
if not isinstance(accessLog, (tuple, list, set)):
accessLog = [accessLog]
if _quiet or ('screen' not in accessLog and 'stdout' not in accessLog):
cherrypy.config.update({'log.screen': False})
if 'info' in accessLog:
cherrypy.log.access_log.addHandler(ih)
return logger
logger = _setupLogger()
def logStdoutStderr(force=False):
if _originalStdOut == sys.stdout or force:
sys.stdout = StreamToLogger(_originalStdOut, logger, logging.INFO)
sys.stderr = StreamToLogger(_originalStdErr, logger, logging.ERROR)
def logprint(*args, **kwargs):
"""
Send a message to both stdout and the appropriate logs. This behaves like
Python3's print statement, plus takes additional named parameters:
:param level: the log level. This determines which log handlers will store
the log message. The log is always sent to stdout.
:param color: one of the constants.TerminalColor values or None.
:param exc_info: None to not print exception information. True for the
last exception, or a tuple of exception information.
"""
data = six.StringIO()
kwargs = (kwargs or {}).copy()
level = kwargs.pop('level', logging.DEBUG)
color = kwargs.pop('color', None)
exc_info = kwargs.pop('exc_info', None)
kwargs['file'] = data
six.print_(*args, **kwargs)
data = data.getvalue().rstrip()
if exc_info and not isinstance(exc_info, tuple):
exc_info = sys.exc_info()
data += '\n' + ''.join(traceback.format_exception(*exc_info)).rstrip()
logger.log(level, data)
if not _quiet:
if color:
data = getattr(TerminalColor, color)(data)
_originalStdOut.write('%s\n' % data)
_originalStdOut.flush()
def _setupCache():
"""
Setup caching based on configuration file.
Cache backends are forcibly replaced because Girder initially configures
the regions with the null backends.
"""
curConfig = config.getConfig()
if curConfig['cache']['enabled']:
# Replace existing backend, this is necessary
# because they're initially configured with the null backend
cacheConfig = {
'cache.global.replace_existing_backend': True,
'cache.request.replace_existing_backend': True
}
curConfig['cache'].update(cacheConfig)
cache.configure_from_config(curConfig['cache'], 'cache.global.')
requestCache.configure_from_config(curConfig['cache'], 'cache.request.')
else:
# Reset caches back to null cache (in the case of server teardown)
cache.configure(backend='dogpile.cache.null', replace_existing_backend=True)
requestCache.configure(backend='dogpile.cache.null', replace_existing_backend=True)
# Although the rateLimitBuffer has no pre-existing backend, this method may be called multiple
# times in testing (where caches were already configured)
rateLimitBuffer.configure(backend='dogpile.cache.memory', replace_existing_backend=True)
# Expose common logging levels and colors as methods of logprint.
logprint.info = functools.partial(logprint, level=logging.INFO, color='info')
logprint.warning = functools.partial(
logprint, level=logging.WARNING, color='warning')
logprint.error = functools.partial(
logprint, level=logging.ERROR, color='error')
logprint.success = functools.partial(
logprint, level=logging.INFO, color='success')
logprint.critical = functools.partial(
logprint, level=logging.CRITICAL, color='error')
logprint.debug = logprint
logprint.exception = functools.partial(
logprint, level=logging.ERROR, color='error', exc_info=True)
# alias girder.plugin => girder.utility.plugin_utilities
from girder.utility import plugin_utilities as plugin # noqa<|fim▁end|> | # See https://github.com/GrahamDumpleton/mod_wsgi/issues/184 for more. |
<|file_name|>ip_renewer_gui.py<|end_file_name|><|fim▁begin|>import weakref
import logging<|fim▁hole|>from core.api import api
from core.config import conf
from qt import signals
#Config parser
OPTION_IP_RENEW_ACTIVE = "ip_renew_active"
OPTION_RENEW_SCRIPT_ACTIVE = "renew_script_active"
class IPRenewerGUI:
""""""
def __init__(self, parent, ip_renewer):
""""""
self.ip_renewer = ip_renewer
self.weak_parent = weakref.ref(parent)
self.id_item_list = []
self.is_working = True
if self.can_change_ip():
self.id_item_list = [download_item.id for download_item in api.get_active_downloads().values() + api.get_queue_downloads().values()]
signals.on_stop_all.emit()
if conf.get_addon_option(OPTION_RENEW_SCRIPT_ACTIVE, default=False, is_bool=True):
self.ip_renewer.start_shell_script()
else:
self.ip_renewer.start_default_ip_renew()
self.status_msg = _("Changing IP...")
signals.status_bar_push_msg.emit(self.status_msg)
self.timer = self.parent.idle_timeout(1000, self.update)
else:
self.is_working = False
@property
def parent(self):
return self.weak_parent()
def can_change_ip(self):
""""""
for download_item in api.get_active_downloads().itervalues():
if download_item.start_time:
return False
return True
def update(self):
""""""
if not self.ip_renewer.is_running():
signals.status_bar_pop_msg.emit(self.status_msg)
for id_item in self.id_item_list:
api.start_download(id_item)
try:
self.parent.downloads.rows_buffer[id_item][1] = self.parent.downloads.icons_dict[cons.STATUS_QUEUE]
except Exception as err:
logger.debug(err)
self.timer.stop()
self.is_working = False<|fim▁end|> | logger = logging.getLogger(__name__)
import core.cons as cons |
<|file_name|>pitchanalysis.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
pitchanalysis.py
--
Christopher Kuech
[email protected]
--
Requires:
Python 2.7
Instructions:
python pitchanalysis.py [wav-file-name]
"""
import matplotlib
from math import log
matplotlib.use("TkAgg")
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
from matplotlib.figure import Figure
import numpy as np
import pyaudio
import sys
from time import time, sleep
import Tkinter as tk
import wavelab
(WIDTH, HEIGHT) = (800, 500)
FNAME = './Bach.wav' if len(sys.argv) != 2 else sys.argv[1]
font = ('Helvetica', 14, 'bold')
CHUNK = 1024
def audioworker():
"""the function run on the audio thread"""
global frame
p = pyaudio.PyAudio()
stream = p.open(format=p.get_format_from_width(2),
channels=1, rate=4*44100, output=True)
# unknown why rate is off by 4x
while True:
stream.write(data[frame:frame+CHUNK].tostring())
frame = (frame + CHUNK) % len(wav)
stream.stop_stream()
stream.close()
p.terminate()
def graphicsworker():
"""the function run on the graphics thread"""
while True:
start = time()
p = ptype.get()
w = wsize.get()
wty = wtype.get()
# compute frequencies from clip
clip = data[frame:frame+w]
if wty == "hanning":
clip *= np.hanning(w)
elif wty == "hamming":
clip *= np.hamming(w)
freqs = wavelab.frequencies(clip)
# update plot
xs = np.sort(freqs.keys())
ys = np.array(map(freqs.get, xs))
axes.cla()
(xmax, ymin, ymax) = (10e4, 0.000001, 10e2)
# (xlim, ylim) = (_, (ymin, ymax)) = ((0, 1e4), (1e-3, 1e7))
axes.set_xscale("log")
axes.set_yscale("linear")
axes.set_xlim((1, xmax))
if p == "square":
# axes.set_yscale("linear")
axes.set_ylim((ymin**2, ymax**2))
ys = ys * ys
elif p == "dB":
# axes.set_yscale("log")
axes.set_ylim((log(ymin), log(ymax)))
ys = np.log(ys)
elif p == "-dB":
# axes.set_yscale("log")
axes.set_ylim((-log(ymax), -log(ymin)))
ys = -np.log(ys)
elif p == "linear":
# axes.set_yscale("linear")
axes.set_ylim((ymin, ymax))
axes.plot(xs, ys, 'r-')
canvas.show()
# pitch tracker
freq = max(freqs, key=lambda f: freqs[f])
pitch.set(wavelab.pitch(freq).replace('/','\n'))
# attempt to achieve 30fps animation (at best)
dt = time() - start
sleep(max(0, 1.0/30.0 - dt))
# read wave file
(framerate, wav) = wavelab.readwav(FNAME)
data = np.concatenate((wav, wav)) # avoid out of bounds
frame = 0
# create a GUI instance (do before any use of Tkinter)
root = tk.Tk()
root.wm_title("Frequency Spectrogram")
# these objects hold the variables from the widgets
wsize = tk.IntVar() # window size (in frames)
wsize.set(2205)
wtype = tk.StringVar() # type of windowing to use
wtype.set("rectangle")
ptype = tk.StringVar() # type of power to use
ptype.set("square")
pitch = tk.StringVar() # the current pitch
pitch.set("")
widgetps = lambda n, v: {'variable': v, 'text': n, 'value': n}
# returns the dict of kwargs that initialize a widget
# create the canvas widget and add it to the GUI
# canvas = tk.Canvas(root, borderwidth=0, width=WIDTH, height=HEIGHT, bg='#000')
# canvas.grid(row=0, column=0, columnspan=4)
# canvas.show()
canvasframe = tk.Frame(root, width=WIDTH, height=HEIGHT)
canvasframe.grid(row=0, column=0, columnspan=4)
figure = Figure()
axes = figure.add_axes( (0.1, 0.1, 0.8, 0.8), frameon=True,
xlabel="Frequency (Hz)", ylabel="Power")
canvas = FigureCanvasTkAgg(figure, canvasframe)
canvas.get_tk_widget().pack(side=tk.TOP, fill=tk.BOTH, expand=1)
canvas.show()
# create the wtype controller and add it to the GUI
tk.Label(root, font=font, text="Windowing").grid(row=1, column=0, pady=10)
wframe = tk.Frame(root)
wframe.grid(row=2, column=0, pady=10, sticky="n")
tk.Radiobutton(wframe, **widgetps("rectangle", wtype)).grid(sticky="w", row=0)
tk.Radiobutton(wframe, **widgetps("hamming" , wtype)).grid(sticky="w", row=1)
tk.Radiobutton(wframe, **widgetps("hanning" , wtype)).grid(sticky="w", row=2)
# create the wsize controller and add it to the GUI
tk.Label(root, font=font, text="Window Size").grid(row=1, column=1, pady=10)
tk.Scale(root, variable=wsize, orient=tk.HORIZONTAL, from_=10, to=4410).grid(row=2, column=1, sticky="wen")
# create the ptype controller and add it to the GUI
tk.Label(root, font=font, text="Power").grid(row=1, column=2, pady=10)
pframe = tk.Frame(root)
pframe.grid(row=2, column=2, pady=10, sticky="n")
tk.Radiobutton(pframe, **widgetps("square", ptype)).grid(sticky="w", row=0)
tk.Radiobutton(pframe, **widgetps("dB", ptype)).grid(sticky="w", row=1)
tk.Radiobutton(pframe, **widgetps("-dB", ptype)).grid(sticky="w", row=2)
tk.Radiobutton(pframe, **widgetps("linear", ptype)).grid(sticky="w", row=3)
# create the area where the pitchlabel is displayed
tk.Label(root, font=font, text="Pitch").grid(row=1, column=3, pady=10)
(fontfamily, fontsize, fontweight) = font
pitchfont = (fontfamily, 24, fontweight)
pitchlabel = tk.Label(root, font=pitchfont, textvariable=pitch, width=7).grid(row=2, column=3)
<|fim▁hole|>
# start the other threads
wavelab.thread(audioworker)
wavelab.thread(graphicsworker)
# start the main update loop for the GUI (and block)
tk.mainloop()<|fim▁end|> | |
<|file_name|>test_static.py<|end_file_name|><|fim▁begin|>import unittest
from cStringIO import StringIO
from ..backends import static
# There aren't many tests here because it turns out to be way more convenient to
# use test_serializer for the majority of cases
class TestStatic(unittest.TestCase):
def compile(self, input_text, input_data):
return static.compile(input_text, input_data)
def test_get_0(self):
data = """
key: value
[Heading 1]
other_key:
if a == 1: value_1
if a == 2: value_2
value_3
"""
manifest = self.compile(data, {"a": 2})
self.assertEquals(manifest.get("key"), "value")
children = list(item for item in manifest.iterchildren())
self.assertEquals(len(children), 1)
section = children[0]
self.assertEquals(section.name, "Heading 1")
self.assertEquals(section.get("other_key"), "value_2")
self.assertEquals(section.get("key"), "value")
def test_get_1(self):
data = """
key: value
[Heading 1]
other_key:
if a == 1: value_1
if a == 2: value_2
value_3
"""
manifest = self.compile(data, {"a": 3})
children = list(item for item in manifest.iterchildren())
section = children[0]
self.assertEquals(section.get("other_key"), "value_3")
def test_get_3(self):
data = """key:
if a == "1": value_1
if a[0] == "ab"[0]: value_2
"""
manifest = self.compile(data, {"a": "1"})
self.assertEquals(manifest.get("key"), "value_1")
manifest = self.compile(data, {"a": "ac"})
self.assertEquals(manifest.get("key"), "value_2")
def test_get_4(self):
data = """key:
if not a: value_1
value_2
"""
manifest = self.compile(data, {"a": True})
self.assertEquals(manifest.get("key"), "value_2")
manifest = self.compile(data, {"a": False})
self.assertEquals(manifest.get("key"), "value_1")
<|fim▁hole|> if a == 1.5: value_1
value_2
key_1: other_value
"""
manifest = self.compile(data, {"a": 1.5})
self.assertFalse(manifest.is_empty)
self.assertEquals(manifest.root, manifest)
self.assertTrue(manifest.has_key("key_1"))
self.assertFalse(manifest.has_key("key_2"))
self.assertEquals(set(manifest.iterkeys()), set(["key", "key_1"]))
self.assertEquals(set(manifest.itervalues()), set(["value_1", "other_value"]))
def test_is_empty_1(self):
data = """
[Section]
[Subsection]
"""
manifest = self.compile(data, {})
self.assertTrue(manifest.is_empty)<|fim▁end|> | def test_api(self):
data = """key: |
<|file_name|>.happydoc.TkMoleculeDrawer.py<|end_file_name|><|fim▁begin|>(S'7f2210613c44962221805a1b28aa76d6'
p1
(ihappydoclib.parseinfo.moduleinfo
ModuleInfo
p2
(dp3
S'_namespaces'
p4
((dp5
S'TkDrawer'
p6
(ihappydoclib.parseinfo.classinfo
ClassInfo
p7
(dp8
g4
((dp9
(dp10
tp11
sS'_filename'
p12
S'../python/frowns/Depict/TkMoleculeDrawer.py'
p13
sS'_docstring'
p14
S''
sS'_class_member_info'
p15
(lp16
sS'_name'
p17
g6
sS'_parent'
p18
g2
sS'_comment_info'
p19
(dp20
sS'_base_class_info'
p21
(lp22
S'DrawMolHarness'
p23
aS'TkMixin'
p24
asS'_configuration_values'
p25
(dp26
sS'_class_info'
p27
g9
sS'_function_info'
p28
g10
sS'_comments'
p29
S''
sbsS'TkMixin'
p30
(ihappydoclib.parseinfo.classinfo
ClassInfo
p31
(dp32
g4
((dp33
(dp34
S'pack_forget'
p35
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p36
(dp37
g4
((dp38
(dp39
tp40
sS'_exception_info'
p41
(dp42
sS'_parameter_names'
p43
(S'self'
p44
tp45
sS'_parameter_info'
p46
(dp47
g44
(NNNtp48
ssg12
g13
sg14
S''
sg17
g35
sg18
g31
sg19
g20
sg25
(dp49
sg27
g38
sg28
g39
sg29
S''
sbsS'_resize'
p50
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p51
(dp52
g4
((dp53
(dp54
tp55
sg41
(dp56
sg43
(S'self'
p57
S'event'
p58
tp59
sg46
(dp60
g57
(NNNtp61
sg58
(NNNtp62
ssg12
g13
sg14
S'(event) -> resive the drawing to event.height, event.width'
p63
sg17
g50
sg18
g31
sg19
g20
sg25
(dp64
sg27
g53
sg28
g54
sg29
S''
sbsS'_clear'
p65
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p66
(dp67
g4
((dp68
(dp69
tp70
sg41
(dp71
sg43
(S'self'
p72
tp73
sg46
(dp74
g72
(NNNtp75
ssg12
g13
sg14
S''
sg17
g65
sg18
g31
sg19
g20
sg25
(dp76
sg27
g68
sg28
g69
sg29
S''
sbsS'_init'
p77
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p78
(dp79
g4
((dp80
(dp81
tp82
sg41
(dp83
sg43
(S'self'
p84
S'master'
p85
S'height'
p86
S'width'
p87
tp88
sg46
(dp89
g87
(NNNtp90
sg84
(NNNtp91
sg85
(NNNtp92
sg86
(NNNtp93
ssg12
g13
sg14
S''
sg17
g77
sg18
g31
sg19
g20
sg25
(dp94
sg27
g80
sg28
g81
sg29
S''
sbsS'postscript'
p95
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p96
(dp97
g4
((dp98
(dp99
tp100
sg41
(dp101
sg43
(S'self'
p102
S'*a'
p103
S'*kw'
p104
tp105
sg46
(dp106
g102
(NNNtp107
sg104
(NNNtp108
sg103
(NNNtp109
ssg12
g13
sg14
S'return a postscript image of the current molecule arguments\n are sent to the Tkinter canvas postscript method'
p110
sg17
g95
sg18
g31
sg19
g20
sg25
(dp111
sg27
g98
sg28
g99
sg29
S''
sbsS'_drawLine'
p112
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p113
(dp114
g4
((dp115
(dp116
tp117
sg41
(dp118
sg43
(S'self'
p119
S'x1'<|fim▁hole|>p120
S'y1'
p121
S'x2'
p122
S'y2'
p123
S'color'
p124
tp125
sg46
(dp126
g123
(NNNtp127
sg124
(NNNtp128
sg119
(NNNtp129
sg122
(NNNtp130
sg121
(NNNtp131
sg120
(NNNtp132
ssg12
g13
sg14
S''
sg17
g112
sg18
g31
sg19
g20
sg25
(dp133
sg27
g115
sg28
g116
sg29
S''
sbsS'grid'
p134
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p135
(dp136
g4
((dp137
(dp138
tp139
sg41
(dp140
sg43
(S'self'
p141
S'*a'
p142
S'*kw'
p143
tp144
sg46
(dp145
g141
(NNNtp146
sg143
(NNNtp147
sg142
(NNNtp148
ssg12
g13
sg14
S''
sg17
g134
sg18
g31
sg19
g20
sg25
(dp149
sg27
g137
sg28
g138
sg29
S''
sbsS'_drawOval'
p150
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p151
(dp152
g4
((dp153
(dp154
tp155
sg41
(dp156
sg43
(S'self'
p157
S'x'
S'y'
S'xh'
p158
S'yh'
p159
tp160
sg46
(dp161
S'y'
(NNNtp162
sS'x'
(NNNtp163
sg157
(NNNtp164
sg158
(NNNtp165
sg159
(NNNtp166
ssg12
g13
sg14
S''
sg17
g150
sg18
g31
sg19
g20
sg25
(dp167
sg27
g153
sg28
g154
sg29
S''
sbsS'_drawText'
p168
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p169
(dp170
g4
((dp171
(dp172
tp173
sg41
(dp174
sg43
(S'self'
p175
S'text'
p176
S'font'
p177
S'fontsize'
p178
S'x'
S'y'
S'color'
p179
S'bg'
p180
tp181
sg46
(dp182
g180
(I1
S'"white"'
Ntp183
sg179
(NNNtp184
sg176
(NNNtp185
sg175
(NNNtp186
sg178
(NNNtp187
sS'y'
(NNNtp188
sS'x'
(NNNtp189
sg177
(NNNtp190
ssg12
g13
sg14
S''
sg17
g168
sg18
g31
sg19
g20
sg25
(dp191
sg27
g171
sg28
g172
sg29
S''
sbsS'pack'
p192
(ihappydoclib.parseinfo.functioninfo
FunctionInfo
p193
(dp194
g4
((dp195
(dp196
tp197
sg41
(dp198
sg43
(S'self'
p199
S'*a'
p200
S'*kw'
p201
tp202
sg46
(dp203
g199
(NNNtp204
sg201
(NNNtp205
sg200
(NNNtp206
ssg12
g13
sg14
S''
sg17
g192
sg18
g31
sg19
g20
sg25
(dp207
sg27
g195
sg28
g196
sg29
S''
sbstp208
sg12
g13
sg14
S''
sg15
(lp209
sg17
g30
sg18
g2
sg19
g20
sg21
(lp210
sg25
(dp211
sg27
g33
sg28
g34
sg29
S''
sbs(dp212
tp213
sS'_import_info'
p214
(ihappydoclib.parseinfo.imports
ImportInfo
p215
(dp216
S'_named_imports'
p217
(dp218
S'Tkinter'
p219
(lp220
S'*'
asS'MoleculeDrawer'
p221
(lp222
S'DrawMolHarness'
p223
assS'_straight_imports'
p224
(lp225
sbsg12
g13
sg14
S''
sg17
S'TkMoleculeDrawer'
p226
sg18
Nsg19
g20
sg25
(dp227
S'include_comments'
p228
I1
sS'cacheFilePrefix'
p229
S'.happydoc.'
p230
sS'useCache'
p231
I1
sS'docStringFormat'
p232
S'StructuredText'
p233
ssg27
g5
sg28
g212
sg29
S''
sbt.<|fim▁end|> | |
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -------------------------------- Database models----------------------------------------------------------------------
import sys, os
import sqlalchemy
from sqlalchemy import create_engine
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
import secrets
import settings
MYSQL_USERNAME = secrets.MYSQL_USERNAME
MYSQL_PASSWORD = secrets.MYSQL_PASSWORD
MYSQL_HOSTNAME = secrets.MYSQL_HOSTNAME
MYSQL_DATABASE_NAME = secrets.MYSQL_DATABASE_NAME
MYSQL_HOST_PORT = secrets.MYSQL_HOST_PORT
MAX_MESSAGE_SIZE = settings.MAX_MESSAGE_SIZE
database_url = 'mysql://{}:{}@{}:{}/{}'.format(MYSQL_USERNAME, MYSQL_PASSWORD, MYSQL_HOSTNAME, MYSQL_HOST_PORT,
MYSQL_DATABASE_NAME)
engine = create_engine(database_url)
from sqlalchemy import Column, Integer, String, Boolean, DateTime, Text
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship<|fim▁hole|>from sqlalchemy.orm import sessionmaker
Base = declarative_base()
class ModelManager(object):
"""
Model manager
"""
@classmethod
def create_session(cls, engine):
"""
create a session based
:param engine: engine object
:return: returns the created session object
"""
Session = sessionmaker(bind=engine)
session = Session()
return session
@classmethod
def add_to_session(cls, session, obj):
"""
add the object to the session
:param obj:
:param session: session object
:return:
"""
session.add(obj)
@classmethod
def commit_session(cls, session):
"""
commit to session
:param session:
:return:
"""
session.commit()
@classmethod
def delete_from_session(cls, session, obj):
"""
delete the object from the session
:param session:
:return:
"""
session.delete(obj)
@classmethod
def rollback_session(cls, session):
"""
rollback the current session
:param session:
:return:
"""
session.rollback()
@classmethod
def close_session(cls, session):
"""
close the current session
:param session:
:return:
"""
session.close()
class Queue(Base):
"""
Queues model class
"""
__tablename__ = "Queue"
id = Column(Integer, primary_key=True)
name = Column(String(20), unique=True)
created_timestamp = Column(DateTime)
message = relationship("Message", back_populates="queue")
def __repr__(self):
"""
representation of the Queue class
:return:
"""
return "<Queue (name: {}, created_timestamp: {})>".format(self.name, self.created_timestamp)
class Message(Base):
"""
Message model class
"""
__tablename__ = "Message"
id = Column(Integer, primary_key=True)
queue_id = Column(Integer, ForeignKey('Queue.id'))
is_fetched = Column(Boolean, default=False)
content = Column(Text)
publish_timestamp = Column(DateTime)
consumed_timestamp = Column(DateTime)
queue = relationship("Queue", back_populates="message")
# The consumed_timestamp should ideally have a null value for default but that is not feasible here so
# for checking we will first check whether the is_fetched value is true, if so we consider the consumed_timestamp
# as the date and time when the message was dequeued.
def __repr__(self):
"""
representation of the Message class
:return:
"""
return "<Message (queue_id: {}, is_fetched: {}, content: {}...{}, publish_timestamp: {}, " \
"consumed_timestamp: {})>".format(self.queue_id, self.is_fetched, self.content[:10],self.content[10:],
self.publish_timestamp, self.consumed_timestamp)<|fim▁end|> | from sqlalchemy import ForeignKey |
<|file_name|>jquery.inputmask.extensions.js<|end_file_name|><|fim▁begin|>/*
Input Mask plugin extensions
http://github.com/RobinHerbots/jquery.inputmask
Copyright (c) 2010 - 2014 Robin Herbots
Licensed under the MIT license (http://www.opensource.org/licenses/mit-license.php)
Version: 0.0.0
Optional extensions on the jquery.inputmask base
*/
(function ($) {
//extra definitions
$.extend($.inputmask.defaults.definitions, {
'A': {
validator: "[A-Za-z]",
cardinality: 1,
casing: "upper" //auto uppercasing
},
'#': {
validator: "[A-Za-z\u0410-\u044F\u0401\u04510-9]",
cardinality: 1,
casing: "upper"
}
});
$.extend($.inputmask.defaults.aliases, {
'url': {
mask: "ir",
placeholder: "",
separator: "",
defaultPrefix: "http://",
regex: {
urlpre1: new RegExp("[fh]"),
urlpre2: new RegExp("(ft|ht)"),<|fim▁hole|> urlpre5: new RegExp("(ftp:/|ftps:|http:|https)"),
urlpre6: new RegExp("(ftp://|ftps:/|http:/|https:)"),
urlpre7: new RegExp("(ftp://|ftps://|http://|https:/)"),
urlpre8: new RegExp("(ftp://|ftps://|http://|https://)")
},
definitions: {
'i': {
validator: function (chrs, buffer, pos, strict, opts) {
return true;
},
cardinality: 8,
prevalidator: (function () {
var result = [], prefixLimit = 8;
for (var i = 0; i < prefixLimit; i++) {
result[i] = (function () {
var j = i;
return {
validator: function (chrs, buffer, pos, strict, opts) {
if (opts.regex["urlpre" + (j + 1)]) {
var tmp = chrs, k;
if (((j + 1) - chrs.length) > 0) {
tmp = buffer.join('').substring(0, ((j + 1) - chrs.length)) + "" + tmp;
}
var isValid = opts.regex["urlpre" + (j + 1)].test(tmp);
if (!strict && !isValid) {
pos = pos - j;
for (k = 0; k < opts.defaultPrefix.length; k++) {
buffer[pos] = opts.defaultPrefix[k]; pos++;
}
for (k = 0; k < tmp.length - 1; k++) {
buffer[pos] = tmp[k]; pos++;
}
return { "pos": pos };
}
return isValid;
} else {
return false;
}
}, cardinality: j
};
})();
}
return result;
})()
},
"r": {
validator: ".",
cardinality: 50
}
},
insertMode: false,
autoUnmask: false
},
"ip": { //ip-address mask
mask: "i[i[i]].i[i[i]].i[i[i]].i[i[i]]",
definitions: {
'i': {
validator: function (chrs, buffer, pos, strict, opts) {
if (pos - 1 > -1 && buffer[pos - 1] != ".") {
chrs = buffer[pos - 1] + chrs;
if (pos - 2 > -1 && buffer[pos - 2] != ".") {
chrs = buffer[pos - 2] + chrs;
} else chrs = "0" + chrs;
} else chrs = "00" + chrs;
return new RegExp("25[0-5]|2[0-4][0-9]|[01][0-9][0-9]").test(chrs);
},
cardinality: 1
}
}
},
"email": {
mask: "*{1,20}[.*{1,20}][.*{1,20}][.*{1,20}]@*{1,20}.*{2,6}[.*{1,2}]",
greedy: false
}
});
})(jQuery);<|fim▁end|> | urlpre3: new RegExp("(ftp|htt)"),
urlpre4: new RegExp("(ftp:|http|ftps)"), |
<|file_name|>EntityMethodPostUpdateTest.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (c) 1998, 2015 Oracle and/or its affiliates. All rights reserved.
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0
* which accompanies this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Eclipse Distribution License is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*
* Contributors:
* Oracle - initial API and implementation from Oracle TopLink
******************************************************************************/
package org.eclipse.persistence.testing.tests.jpa.advanced;
import org.eclipse.persistence.testing.models.jpa.advanced.Project;
/**
* Tests the @PostUpdate events from an Entity.
*
* @author Guy Pelletier
*/
public class EntityMethodPostUpdateTest extends CallbackEventTest {
public void test() throws Exception {
m_beforeEvent = 0; // Loading a new object to update, count starts at 0.
<|fim▁hole|> Project project = updateProject();
m_afterEvent = project.post_update_count;
}
}<|fim▁end|> | |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.http import HttpResponse
from django.shortcuts import render
from survey.models import Choice
from survey.forms import ChoiceForm
import csv
import random
# Create your views here.
def index(request):
examples = ['controlling Exposure', 'changing Temperature', 'modifying Highlights', 'changing Shadows', 'Zooming in/out', 'changing the Constrast']
if request.method == 'POST':
f = ChoiceForm(request.POST)
if f.is_valid():
newChoice = f.save()
if request.session.get('previous_responses', False):
prev_response_array = request.session['previous_responses']
prev_response_array.append({'program':newChoice.program, 'text':newChoice.text})
request.session['previous_responses'] = prev_response_array
else:
request.session['previous_responses'] = [{'program':newChoice.program, 'text':newChoice.text}];
return render(request, 'index.html', {'previous':1, 'previous_responses':request.session['previous_responses'], 'example':random.choice(examples)})
if request.session.get('previous_responses', False):
return render(request, 'index.html', {'previous':1, 'previous_responses':request.session['previous_responses'], 'example':random.choice(examples)})
else:
return render(request, 'index.html', {'previous':None, 'previous_responses':None, 'example':random.choice(examples)})
def responses(request):
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="responses.csv"'
writer = csv.writer(response)
writer.writerow(['Date', 'Application', 'Suggested Operation'])
for aChoice in Choice.objects.all():
writer.writerow([aChoice.date, aChoice.program, aChoice.text])<|fim▁hole|><|fim▁end|> |
return response |
<|file_name|>Equipment.java<|end_file_name|><|fim▁begin|>package com.winsun.fruitmix.model;
/**
* Created by Administrator on 2016/7/6.
*/
public class Equipment {
private String serviceName;
private String host;
private int port;
public Equipment(String serviceName, String host, int port) {
this.serviceName = serviceName;
this.host = host;
this.port = port;
}
public Equipment() {
}
public String getServiceName() {
return serviceName;
}
public void setServiceName(String serviceName) {
this.serviceName = serviceName;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;<|fim▁hole|> return port;
}
public void setPort(int port) {
this.port = port;
}
}<|fim▁end|> | }
public int getPort() { |
<|file_name|>timeseries.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
This module will define useful objects for conditional analysis
"""
import collections
import numpy as np
import pandas as pd
from tunacell.base.datatools import Coordinates
# define an object to handle heterogeneous types of time series
class TimeSeries(object):
"""Object that decorates the data with other useful attributes.
Parameters
----------
ts : :class:`Coordinates` instance, or 2d structured ndarray
better to use Coordinates, so that names can be carried along
ids : sequence of cell identifiers from which data was collected
index_cycles : sequence of couples (index_first, index_last)
that delimit data corresponding to cell id, must be same length as ids
slices : sequence of slice objects
each item can be used to slice the entire table
time_bounds : sequence of couples of floats
for each cell, first element is the lower bound of cell cycle, the
second element is the upper bound of cell cycle, must be same length
as ids
select_ids : sequences of True/False values corresponding whether or
not to include data from cell id in timeseries, must be same length as
ids
"""
def __init__(self, ts=[], ids=[], index_cycles=[], slices=None,
time_bounds=[], select_ids={}, container_label=None,
experiment_label=None):
# ts is a Coordinates instance
self.container_label = container_label
self.experiment_label = experiment_label
if isinstance(ts, Coordinates):
self._timeseries = ts
# ts is a numpy array (structured if possible)
elif isinstance(ts, np.ndarray):
# convert structured arrays to 2d ndarrays
if ts.dtype.names is not None:
_arr = ts.view((float, len(ts.dtype.names)))
_x_name, _y_name = ts.dtype.names[:2] # take only first 2 cols
else:
_arr = ts
_x_name, _y_name = 'x', 'y'
_x = _arr[:, 0]
_y = _arr[:, 1]
self._timeseries = Coordinates(_x, _y,
x_name=_x_name, y_name=_y_name)
# ... list of couples
elif isinstance(ts, collections.Iterable):
_ts = list(ts)
_x, _y = map(np.array, zip(*_ts))
self._timeseries = Coordinates(_x, _y)
self.time_bounds = time_bounds
self.slices = []
if index_cycles: # array indices corresponding to (first, last) frame for each cell
self.index_cycles = index_cycles
slices = []
for item in index_cycles:
if item is None:
slices.append(None)
# indices are reported as a single None
# when no data is reported for a given cell
else:
i, j = item
if j is not None:
slices.append(slice(i, j+1))
else:
slices.append(slice(i, None))
self.slices = slices
elif slices is not None:
self.slices = slices
index_cycles = []
for item in slices:
if item is None:
index_cycles.append(None)
else:
if item.stop is not None:
index_cycles.append((item.start, item.stop - 1))
else:
index_cycles.append((item.start, None))
self.index_cycles = index_cycles
self.ids = ids
if len(select_ids.keys()) > 0: # master is already defined
self.selections = select_ids
else: # nothing is defined, we define master here
self.selections = {'master': [True for _ in self.ids]}
return
def use_condition(self, condition_label='master',
sharp_tleft=None, sharp_tright=None):
"""Get conditioned timeseries.
Parameter
---------
condition_label : str (default 'master')
must be a key of dictionary self.selections, and corresponds to
the repr of a given :class:`FilterSet` instance.
sharp_left : float (default None)
sharp lower bound for cell cycle timing. USE ONLY FOR CELL CYCLE
OBSERVABLES
sharp_right : float (default None)
sharp upper bound for cell cycle timing. USE ONLY FOR CELL CYCLE
OBSERVABLES
Returns
-------
Coordinates instance made of valid (x, y) points
"""
selection = self.selections[condition_label]
xs, ys = [], []
for index, cid in enumerate(self.ids):
if selection[index] and self.slices[index] is not None:
if sharp_tleft is not None:
if self.time_bounds[index][0] < sharp_tleft:
continue
if sharp_tright is not None:
if self.time_bounds[index][1] > sharp_tright:
continue
xs.append(self.timeseries.x[self.slices[index]])
ys.append(self.timeseries.y[self.slices[index]])
if len(xs) > 0:
_x = np.concatenate(xs)
_y = np.concatenate(ys)
else:
_x = []
_y = []
out = Coordinates(_x, _y, x_name=self.timeseries.x_name,
y_name=self.timeseries.y_name)
return out
@property
def timeseries(self):
return self._timeseries
#
# @timeseries.setter
# def timeseries(self, ts):
# self._timeseries = ts
# def __getitem__(self, key):
# return self.timeseries[key]
def __repr__(self):
return repr(self.timeseries)
def as_text(self, sep='\t', cell_sep='\n', print_labels=False):
"""Export TimeSeries as text arrays
Parameters
----------
sep : str (default '\t')
how to separate columns
cell_sep : str (default '\n')
how to separate cells (default: one blank line)
print_labels : bool {False, True}
first line is labels, followed by empty line
"""
printout = ''
labels = [self.timeseries.x_name,
self.timeseries.y_name,
'cellID',
'containerID',
'experimentID']
if print_labels and labels is not None:
printout += '\t'.join(labels) + '\n'
printout += '\n'
for index, sl in enumerate(self.slices):
chunk = ''
x = self.timeseries.x[sl]
y = self.timeseries.y[sl]
ids = len(x) * [self.ids[index]]
container_id = len(x) * [self.container_label, ]
exp_id = len(x) * [self.experiment_label, ]
for line in zip(x, y, ids, container_id, exp_id):
chunk += '{}'.format(sep).join(['{}'.format(item) for item in line]) + '\n'
printout += chunk
printout += cell_sep
return printout.lstrip().rstrip() # remove empty lines at beginning/end
def to_dataframe(self, start_index=0, sharp_tleft=None, sharp_tright=None):
dic = {}
dic[self.timeseries.x_name] = [] # self.timeseries.x
dic[self.timeseries.y_name] = [] # self.timeseries.y
dic['cellID'] = []
dic['containerID'] = []
dic['experimentID'] = []
for key in self.selections.keys():
if key == 'master':
continue
dic[key] = []
size = 0
# add cell ID, container ID, experiment ID, and TRUE/FALSE for each cdt
for index, sl in enumerate(self.slices):
# collect only if within bounds
if sharp_tleft is not None:
if self.time_bounds[index][0] < sharp_tleft:
continue
if sharp_tright is not None:
if self.time_bounds[index][1] > sharp_tright:
continue
_x = self.timeseries.x[sl]
_y = self.timeseries.y[sl]<|fim▁hole|> dic['containerID'].extend(len(_x) * [self.container_label, ])
dic['experimentID'].extend(len(_x) * [self.experiment_label, ])
# True/False for each
for key, values in self.selections.items():
# master: all True, useless to printout
if key == 'master':
continue
val = values[index]
dic[key].extend(len(_x) * [val, ])
size += len(_x)
df = pd.DataFrame(dic, index=range(start_index, start_index + size))
return df<|fim▁end|> | dic[self.timeseries.x_name].extend(_x)
dic[self.timeseries.y_name].extend(_y)
dic['cellID'].extend(len(_x) * [self.ids[index], ]) |
<|file_name|>SocketStatus.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tomcat.util.net;
/**
* Someone, please change the enum name.<|fim▁hole|> OPEN, STOP, TIMEOUT, DISCONNECT, ERROR
}<|fim▁end|> | *
* @author remm
*/
public enum SocketStatus { |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate uu_cut;<|fim▁hole|>
fn main() {
std::process::exit(uu_cut::uumain(std::env::args().collect()));
}<|fim▁end|> | |
<|file_name|>ka.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2003-2016, CKSource - Frederico Knabben. All rights reserved.<|fim▁hole|>CKEDITOR.plugins.setLang( 'removeformat', 'ka', {
toolbar: 'ფორმატირების მოხსნა'
} );<|fim▁end|> | For licensing, see LICENSE.md or http://ckeditor.com/license
*/ |
<|file_name|>test_schematron_1_3.py<|end_file_name|><|fim▁begin|># coding: utf-8
from __future__ import unicode_literals
import unittest
import io
from lxml import isoschematron, etree
from packtools.catalogs import SCHEMAS
SCH = etree.parse(SCHEMAS['sps-1.3'])
def TestPhase(phase_name, cache):
"""Factory of parsed Schematron phases.
:param phase_name: the phase name
:param cache: mapping type
"""
if phase_name not in cache:
phase = isoschematron.Schematron(SCH, phase=phase_name)
cache[phase_name] = phase
return cache[phase_name]
class PhaseBasedTestCase(unittest.TestCase):
cache = {}
def _run_validation(self, sample):
schematron = TestPhase(self.sch_phase, self.cache)
return schematron.validate(etree.parse(sample))
class JournalIdTests(PhaseBasedTestCase):
"""Tests for article/front/journal-meta/journal-id elements.
Ticket #14 makes @journal-id-type="publisher-id" mandatory.
Ref: https://github.com/scieloorg/scielo_publishing_schema/issues/14
"""
sch_phase = 'phase.journal-id'
def test_case1(self):
"""
presence(@nlm-ta) is True
presence(@publisher-id) is True
"""
sample = u"""<article>
<front>
<journal-meta>
<journal-id journal-id-type="nlm-ta">
Rev Saude Publica
</journal-id>
<journal-id journal-id-type="publisher-id">
RSP
</journal-id>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_case2(self):
"""
presence(@nlm-ta) is True
presence(@publisher-id) is False
"""
sample = u"""<article>
<front>
<journal-meta>
<journal-id journal-id-type="nlm-ta">
Rev Saude Publica
</journal-id>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_case3(self):
"""
presence(@nlm-ta) is False
presence(@publisher-id) is True
"""
sample = u"""<article>
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id">
RSP
</journal-id>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_case4(self):
"""
presence(@nlm-ta) is False
presence(@publisher-id) is False
"""
sample = u"""<article>
<front>
<journal-meta>
<journal-id journal-id-type='doi'>
123.plin
</journal-id>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_publisher_id_cannot_be_empty(self):
sample = u"""<article>
<front>
<journal-meta>
<journal-id journal-id-type="publisher-id"></journal-id>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class JournalTitleGroupTests(PhaseBasedTestCase):
"""Tests for article/front/journal-meta/journal-title-group elements.
"""
sch_phase = 'phase.journal-title-group'
def test_journal_title_group_is_absent(self):
sample = u"""<article>
<front>
<journal-meta>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_case1(self):
"""
A: presence(journal-title) is True
B: presence(abbrev-journal-title[@abbrev-type='publisher']) is True
A ^ B is True
"""
sample = u"""<article>
<front>
<journal-meta>
<journal-title-group>
<journal-title>
Revista de Saude Publica
</journal-title>
<abbrev-journal-title abbrev-type='publisher'>
Rev. Saude Publica
</abbrev-journal-title>
</journal-title-group>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_case2(self):
"""
A: presence(journal-title) is True
B: presence(abbrev-journal-title[@abbrev-type='publisher']) is False
A ^ B is False
"""
sample = u"""<article>
<front>
<journal-meta>
<journal-title-group>
<journal-title>
Revista de Saude Publica
</journal-title>
</journal-title-group>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_case3(self):
"""
A: presence(journal-title) is False
B: presence(abbrev-journal-title[@abbrev-type='publisher']) is True
A ^ B is False
"""
sample = u"""<article>
<front>
<journal-meta>
<journal-title-group>
<abbrev-journal-title abbrev-type='publisher'>
Rev. Saude Publica
</abbrev-journal-title>
</journal-title-group>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_case4(self):
"""
A: presence(journal-title) is False
B: presence(abbrev-journal-title[@abbrev-type='publisher']) is False
A ^ B is False
"""
sample = u"""<article>
<front>
<journal-meta>
<journal-title-group>
</journal-title-group>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_empty_journal_title(self):
sample = u"""<article>
<front>
<journal-meta>
<journal-title-group>
<journal-title></journal-title>
<abbrev-journal-title abbrev-type='publisher'>Rev. Saude Publica</abbrev-journal-title>
</journal-title-group>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_empty_abbrev_journal_title(self):
sample = u"""<article>
<front>
<journal-meta>
<journal-title-group>
<journal-title>Revista de Saude Publica</journal-title>
<abbrev-journal-title abbrev-type='publisher'></abbrev-journal-title>
</journal-title-group>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class PublisherTests(PhaseBasedTestCase):
"""Tests for article/front/journal-meta/publisher elements.
"""
sch_phase = 'phase.publisher'
def test_publisher_is_present(self):
sample = u"""<article>
<front>
<journal-meta>
<publisher>
<publisher-name>British Medical Journal</publisher-name>
</publisher>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_publisher_is_absent(self):
sample = u"""<article>
<front>
<journal-meta>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_publisher_is_empty(self):
sample = u"""<article>
<front>
<journal-meta>
<publisher>
<publisher-name></publisher-name>
</publisher>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class ArticleCategoriesTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/article-categories elements.
"""
sch_phase = 'phase.article-categories'
def test_article_categories_is_present(self):
sample = u"""<article>
<front>
<article-meta>
<article-categories>
<subj-group>
<subject>ISO/TC 108</subject>
<subject>
SC 2, Measurement and evaluation of...
</subject>
</subj-group>
</article-categories>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_article_categories_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class fpage_OR_elocationTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/fpage or elocation-id elements.
"""
sch_phase = 'phase.fpage_or_elocation-id'
def test_case1(self):
"""
fpage is True
elocation-id is True
fpage v elocation-id is True
"""
sample = u"""<article>
<front>
<article-meta>
<fpage>01</fpage>
<elocation-id>E27</elocation-id>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_case2(self):
"""
fpage is True
elocation-id is False
fpage v elocation-id is True
"""
sample = u"""<article>
<front>
<article-meta>
<fpage>01</fpage>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_case3(self):
"""
fpage is False
elocation-id is True
fpage v elocation-id is True
"""
sample = u"""<article>
<front>
<article-meta>
<elocation-id>E27</elocation-id>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_case4(self):
"""
fpage is False
elocation-id is False
fpage v elocation-id is False
"""
sample = u"""<article>
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_empty_fpage(self):
sample = u"""<article>
<front>
<article-meta>
<fpage></fpage>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_empty_elocationid(self):
sample = u"""<article>
<front>
<article-meta>
<elocation-id></elocation-id>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class ISSNTests(PhaseBasedTestCase):
"""Tests for article/front/journal-meta/issn elements.
"""
sch_phase = 'phase.issn'
def test_case1(self):
"""
A: @pub-type='epub' is True
B: @pub-type='ppub' is True
A v B is True
"""
sample = u"""<article>
<front>
<journal-meta>
<issn pub-type="epub">
0959-8138
</issn>
<issn pub-type="ppub">
0959-813X
</issn>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_case2(self):
"""
A: @pub-type='epub' is True
B: @pub-type='ppub' is False
A v B is True
"""
sample = u"""<article>
<front>
<journal-meta>
<issn pub-type="epub">
0959-8138
</issn>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_case3(self):
"""
A: @pub-type='epub' is False
B: @pub-type='ppub' is True
A v B is True
"""
sample = u"""<article>
<front>
<journal-meta>
<issn pub-type="ppub">
0959-813X
</issn>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_case4(self):
"""
A: @pub-type='epub' is False
B: @pub-type='ppub' is False
A v B is False
"""
sample = u"""<article>
<front>
<journal-meta>
<issn>
0959-813X
</issn>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_empty_issn(self):
sample = u"""<article>
<front>
<journal-meta>
<issn pub-type="epub"></issn>
</journal-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class ArticleIdTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/article-id elements.
"""
sch_phase = 'phase.article-id'
def test_article_id_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_pub_id_type_doi_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
<article-id>
10.1590/1414-431X20143434
</article-id>
<article-id pub-id-type='other'>
10.1590/1414-431X20143435
</article-id>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_pub_id_type_doi(self):
sample = u"""<article>
<front>
<article-meta>
<article-id pub-id-type='doi'>
10.1590/1414-431X20143434
</article-id>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_pub_id_type_doi_is_empty(self):
sample = u"""<article>
<front>
<article-meta>
<article-id pub-id-type='doi'/>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_invalid_pub_id_type(self):
sample = u"""<article>
<front>
<article-meta>
<article-id pub-id-type='unknown'>
10.1590/1414-431X20143434
</article-id>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_invalid_pub_id_type_case2(self):
sample = u"""<article>
<front>
<article-meta>
<article-id pub-id-type='unknown'>
10.1590/1414-431X20143434
</article-id>
<article-id pub-id-type='doi'>
10.1590/1414-431X20143434
</article-id>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_valid_pub_id_type_values(self):
for typ in ['doi', 'publisher-id', 'other']:
sample = u"""<article>
<front>
<article-meta>
<article-id pub-id-type='%s'>
10.1590/1414-431X20143433
</article-id>
<article-id pub-id-type='doi'>
10.1590/1414-431X20143434
</article-id>
</article-meta>
</front>
</article>
""" % typ
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
class SubjGroupTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/article-categories/subj-group elements.
"""
sch_phase = 'phase.subj-group'
def test_subj_group_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
<article-categories>
</article-categories>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_without_heading_type(self):
sample = u"""<article>
<front>
<article-meta>
<article-categories>
<subj-group subj-group-type="kwd">
<subject content-type="neurosci">
Cellular and Molecular Biology
</subject>
<subj-group>
<subject content-type="neurosci">
Blood and brain barrier
</subject>
</subj-group>
</subj-group>
</article-categories>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_with_heading_type(self):
sample = u"""<article>
<front>
<article-meta>
<article-categories>
<subj-group subj-group-type="heading">
<subject>
Cellular and Molecular Biology
</subject>
<subj-group>
<subject content-type="neurosci">
Blood and brain barrier
</subject>
</subj-group>
</subj-group>
</article-categories>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_with_heading_in_subarticle_pt(self):
sample = u"""<article>
<front>
<article-meta>
<article-categories>
<subj-group subj-group-type="heading">
<subject>
Original Article
</subject>
<subj-group>
<subject content-type="neurosci">
Blood and brain barrier
</subject>
</subj-group>
</subj-group>
</article-categories>
</article-meta>
</front>
<sub-article xml:lang="pt" article-type="translation" id="S01">
<front-stub>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Artigos Originais</subject>
</subj-group>
</article-categories>
</front-stub>
</sub-article>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_with_many_heading_in_subarticle_pt(self):
sample = u"""<article>
<front>
<article-meta>
<article-categories>
<subj-group subj-group-type="heading">
<subject>
Original Article
</subject>
<subj-group>
<subject content-type="neurosci">
Blood and brain barrier
</subject>
</subj-group>
</subj-group>
</article-categories>
</article-meta>
</front>
<sub-article xml:lang="pt" article-type="translation" id="S01">
<front-stub>
<article-categories>
<subj-group subj-group-type="heading">
<subject>Artigos Originais</subject>
</subj-group>
<subj-group subj-group-type="heading">
<subject>Artigos Piratas</subject>
</subj-group>
</article-categories>
</front-stub>
</sub-article>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_with_heading_type_in_the_deep(self):
sample = u"""<article>
<front>
<article-meta>
<article-categories>
<subj-group>
<subject>
Cellular and Molecular Biology
</subject>
<subj-group subj-group-type="heading">
<subject>
Blood and brain barrier
</subject>
</subj-group>
</subj-group>
</article-categories>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_with_many_heading_type(self):
sample = u"""<article>
<front>
<article-meta>
<article-categories>
<subj-group subj-group-type="heading">
<subject>
Cellular and Molecular Biology
</subject>
</subj-group>
<subj-group subj-group-type="heading">
<subject>
Blood and brain barrier
</subject>
</subj-group>
</article-categories>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class AbstractLangTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/abstract elements.
"""
sch_phase = 'phase.abstract_lang'
def test_is_present(self):
sample = u"""<article>
<front>
<article-meta>
<abstract>
<p>Differing socioeconomic positions in...</p>
</abstract>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_is_present_with_lang(self):
sample = u"""<?xml version="1.0" encoding="UTF-8"?>
<article>
<front>
<article-meta>
<abstract xml:lang="en">
<p>Differing socioeconomic positions in...</p>
</abstract>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_for_research_article(self):
sample = u"""<?xml version="1.0" encoding="UTF-8"?>
<article article-type="research-article">
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_research_article(self):
sample = u"""<?xml version="1.0" encoding="UTF-8"?>
<article article-type="research-article">
<front>
<article-meta>
<abstract>
<p>Differing socioeconomic positions in...</p>
</abstract>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_research_article_only_with_transabstract(self):
sample = u"""<?xml version="1.0" encoding="UTF-8"?>
<article article-type="research-article">
<front>
<article-meta>
<trans-abstract xml:lang="en">
<p>Differing socioeconomic positions in...</p>
</trans-abstract>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_missing_for_review_article(self):
sample = u"""<?xml version="1.0" encoding="UTF-8"?>
<article article-type="review-article">
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_review_article(self):
sample = u"""<?xml version="1.0" encoding="UTF-8"?>
<article article-type="review-article">
<front>
<article-meta>
<abstract>
<p>Differing socioeconomic positions in...</p>
</abstract>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_review_article_only_with_transabstract(self):
sample = u"""<?xml version="1.0" encoding="UTF-8"?>
<article article-type="review-article">
<front>
<article-meta>
<trans-abstract xml:lang="en">
<p>Differing socioeconomic positions in...</p>
</trans-abstract>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
class ArticleTitleLangTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/title-group/article-title elements.
"""
sch_phase = 'phase.article-title_lang'
def test_is_present(self):
sample = u"""<article>
<front>
<article-meta>
<title-group>
<article-title>
Systematic review of day hospital care...
</article-title>
</title-group>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_is_present_with_lang(self):
sample = u"""<article>
<front>
<article-meta>
<title-group>
<article-title xml:lang="en">
Systematic review of day hospital care...
</article-title>
</title-group>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_is_present_in_elementcitation(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<mixed-citation>Aires M, Paz AA, Perosa CT. Situação de saúde e grau de dependência de pessoas idosas institucionalizadas. <italic>Rev Gaucha Enferm.</italic> 2009;30(3):192-9.</mixed-citation>
<element-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Aires</surname>
<given-names>M</given-names>
</name>
<name>
<surname>Paz</surname>
<given-names>AA</given-names>
</name>
<name>
<surname>Perosa</surname>
<given-names>CT</given-names>
</name>
</person-group>
<article-title>Situação de saúde e grau de dependência de pessoas idosas institucionalizadas</article-title>
<source>Rev Gaucha Enferm</source>
<year>2009</year>
<volume>30</volume>
<issue>3</issue>
<fpage>192</fpage>
<lpage>199</lpage>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_is_present_in_elementcitation_with_lang(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<mixed-citation>Aires M, Paz AA, Perosa CT. Situação de saúde e grau de dependência de pessoas idosas institucionalizadas. <italic>Rev Gaucha Enferm.</italic> 2009;30(3):192-9.</mixed-citation>
<element-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Aires</surname>
<given-names>M</given-names>
</name>
<name>
<surname>Paz</surname>
<given-names>AA</given-names>
</name>
<name>
<surname>Perosa</surname>
<given-names>CT</given-names>
</name>
</person-group>
<article-title xml:lang="pt">Situação de saúde e grau de dependência de pessoas idosas institucionalizadas</article-title>
<source>Rev Gaucha Enferm</source>
<year>2009</year>
<volume>30</volume>
<issue>3</issue>
<fpage>192</fpage>
<lpage>199</lpage>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class KwdGroupLangTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/kwd-group elements.
"""
sch_phase = 'phase.kwd-group_lang'
def test_single_occurence(self):
sample = u"""<article>
<front>
<article-meta>
<kwd-group>
<kwd>gene expression</kwd>
</kwd-group>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_many_occurencies(self):
sample = u"""<article>
<front>
<article-meta>
<kwd-group xml:lang="en">
<kwd>gene expression</kwd>
</kwd-group>
<kwd-group xml:lang="pt">
<kwd>expressao do gene</kwd>
</kwd-group>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_many_occurencies_without_lang(self):
sample = u"""<article>
<front>
<article-meta>
<kwd-group>
<kwd>gene expression</kwd>
</kwd-group>
<kwd-group>
<kwd>expressao do gene</kwd>
</kwd-group>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class AffContentTypeTests(PhaseBasedTestCase):
"""Tests for:
- article/front/article-meta/contrib-group
- article/front/article-meta
"""
sch_phase = 'phase.aff_contenttypes'
def test_original_is_present(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_original_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution>
Grupo de ...
</institution>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_many_original(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<institution content-type="original">
Galera de ...
</institution>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_original_is_present_and_absent(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
</aff>
<aff>
<institution>
Grupo de ...
</institution>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_original_is_present_and_present(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
</aff>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_allowed_orgdiv1(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<institution content-type="orgdiv1">
Instituto de Matematica e Estatistica
</institution>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_allowed_orgdiv2(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<institution content-type="orgdiv2">
Instituto de Matematica e Estatistica
</institution>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_allowed_orgdiv3(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<institution content-type="orgdiv3">
Instituto de Matematica e Estatistica
</institution>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_allowed_normalized(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<institution content-type="normalized">
Instituto de Matematica e Estatistica
</institution>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_orgdiv4(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<institution content-type="orgdiv4">
Instituto de Matematica e Estatistica
</institution>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_orgname_inside_contrib_group(self):
sample = u"""<article>
<front>
<article-meta>
<contrib-group>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<institution content-type="orgname">
Instituto de Matematica e Estatistica
</institution>
</aff>
</contrib-group>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
class CountsTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/counts elements.
"""
sch_phase = 'phase.counts'
def test_absent(self):
sample = u"""<article>
<front>
<article-meta>
<fpage>0</fpage>
<lpage>0</lpage>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_table_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
<counts>
<ref-count count="0"/>
<fig-count count="0"/>
<equation-count count="0"/>
<page-count count="0"/>
</counts>
<fpage>0</fpage>
<lpage>0</lpage>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_ref_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
<counts>
<table-count count="0"/>
<fig-count count="0"/>
<equation-count count="0"/>
<page-count count="0"/>
</counts>
<fpage>0</fpage>
<lpage>0</lpage>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_fig_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
<counts>
<table-count count="0"/>
<ref-count count="0"/>
<equation-count count="0"/>
<page-count count="0"/>
</counts>
<fpage>0</fpage>
<lpage>0</lpage>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_equation_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
<counts>
<table-count count="0"/>
<ref-count count="0"/>
<fig-count count="0"/>
<page-count count="0"/>
</counts>
<fpage>0</fpage>
<lpage>0</lpage>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_page_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
<counts>
<table-count count="0"/>
<ref-count count="0"/>
<fig-count count="0"/>
<equation-count count="0"/>
</counts>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_tables(self):
sample = u"""<article>
<front>
<article-meta>
<counts>
<table-count count="1"/>
<ref-count count="0"/>
<fig-count count="0"/>
<equation-count count="0"/>
<page-count count="0"/>
</counts>
<fpage>0</fpage>
<lpage>0</lpage>
</article-meta>
</front>
<body>
<sec>
<p>
<table-wrap>
<table frame="hsides" rules="groups">
<colgroup width="25%"><col/><col/><col/><col/></colgroup>
<thead>
<tr>
<th style="font-weight:normal" align="left">Modelo</th>
<th style="font-weight:normal">Estrutura</th>
<th style="font-weight:normal">Processos</th>
<th style="font-weight:normal">Resultados</th>
</tr>
</thead>
<tbody>
<tr>
<td valign="top">SIPA<sup>1,2</sup></td>
<td valign="top">Urgência e hospitalar.</td>
<td valign="top">Realiza triagem para fragilidade.</td>
<td valign="top">Maior gasto comunitário, menor gasto.</td>
</tr>
</tbody>
</table>
</table-wrap>
</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_tables_as_graphic(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<counts>
<table-count count="1"/>
<ref-count count="0"/>
<fig-count count="0"/>
<equation-count count="0"/>
<page-count count="0"/>
</counts>
<fpage>0</fpage>
<lpage>0</lpage>
</article-meta>
</front>
<body>
<sec>
<p>
<table-wrap id="t01">
<graphic mimetype="image"
xlink:href="1414-431X-bjmbr-1414-431X20142875-gt001">
</graphic>
</table-wrap>
</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_ref(self):
sample = u"""<article>
<front>
<article-meta>
<counts>
<table-count count="0"/>
<ref-count count="1"/>
<fig-count count="0"/>
<equation-count count="0"/>
<page-count count="0"/>
</counts>
<fpage>0</fpage>
<lpage>0</lpage>
</article-meta>
</front>
<back>
<ref-list>
<title>REFERÊNCIAS</title>
<ref id="B1">
<label>1</label>
<mixed-citation>
Béland F, Bergman H, Lebel P, Clarfield AM, Tousignant P, ...
</mixed-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_fig(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<counts>
<table-count count="0"/>
<ref-count count="0"/>
<fig-count count="1"/>
<equation-count count="0"/>
<page-count count="0"/>
</counts>
<fpage>0</fpage>
<lpage>0</lpage>
</article-meta>
</front>
<body>
<sec>
<p>
<fig id="f01">
<label>Figura 1</label>
<caption>
<title>Modelo das cinco etapas da pesquisa translacional.</title>
</caption>
<graphic xlink:href="0034-8910-rsp-48-2-0347-gf01"/>
</fig>
</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_equation(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<counts>
<table-count count="0"/>
<ref-count count="0"/>
<fig-count count="0"/>
<equation-count count="1"/>
<page-count count="0"/>
</counts>
<fpage>0</fpage>
<lpage>0</lpage>
</article-meta>
</front>
<body>
<sec>
<disp-formula>
<tex-math id="M1">
</tex-math>
</disp-formula>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_page(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<counts>
<table-count count="0"/>
<ref-count count="0"/>
<fig-count count="0"/>
<equation-count count="0"/>
<page-count count="11"/>
</counts>
<fpage>140</fpage>
<lpage>150</lpage>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_page_wrong_count(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<counts>
<table-count count="0"/>
<ref-count count="0"/>
<fig-count count="0"/>
<equation-count count="0"/>
<page-count count="50"/>
</counts>
<fpage>140</fpage>
<lpage>150</lpage>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_non_digit_pages(self):
"""Non-digit page interval cannot be checked automatically.
"""
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<counts>
<table-count count="0"/>
<ref-count count="0"/>
<fig-count count="0"/>
<equation-count count="0"/>
<page-count count="11"/>
</counts>
<fpage>A140</fpage>
<lpage>A150</lpage>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_elocationid_pages(self):
"""Electronic pagination cannot be checked automatically.
"""
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<counts>
<table-count count="0"/>
<ref-count count="0"/>
<fig-count count="0"/>
<equation-count count="0"/>
<page-count count="11"/>
</counts>
<elocation-id>A140</elocation-id>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
class AuthorNotesTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/author-notes elements.
"""
sch_phase = 'phase.fn-group'
def test_allowed_fn_types(self):
for fn_type in ['author', 'con', 'conflict', 'corresp', 'current-aff',
'deceased', 'edited-by', 'equal', 'on-leave', 'participating-researchers',
'present-address', 'previously-at', 'study-group-members', 'other']:
sample = u"""<article>
<front>
<article-meta>
<author-notes>
<fn fn-type="%s">
<p>foobar</p>
</fn>
</author-notes>
</article-meta>
</front>
</article>
""" % fn_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_fn_types(self):
sample = u"""<article>
<front>
<article-meta>
<author-notes>
<fn fn-type="wtf">
<p>foobar</p>
</fn>
</author-notes>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class PubDateTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/pub-date elements.
"""
sch_phase = 'phase.pub-date'
def test_pub_type_absent(self):
sample = u"""<article>
<front>
<article-meta>
<pub-date>
<day>17</day>
<month>03</month>
<year>2014</year>
</pub-date>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_pub_type_allowed_values(self):
for pub_type in ['epub', 'epub-ppub', 'collection']:
sample = u"""<article>
<front>
<article-meta>
<pub-date pub-type="%s">
<day>17</day>
<month>03</month>
<year>2014</year>
</pub-date>
</article-meta>
</front>
</article>
""" % pub_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_pub_type_disallowed_value(self):
sample = u"""<article>
<front>
<article-meta>
<pub-date pub-type="wtf">
<day>17</day>
<month>03</month>
<year>2014</year>
</pub-date>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class VolumeTests(PhaseBasedTestCase):
"""Tests for:
- article/front/article-meta/volume
- article/back/ref-list/ref/element-citation/volume
"""
sch_phase = 'phase.volume'
def test_absent_in_front(self):
sample = u"""<article>
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_present_but_empty_in_front(self):
sample = u"""<article>
<front>
<article-meta>
<volume></volume>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_present_in_front(self):
sample = u"""<article>
<front>
<article-meta>
<volume>10</volume>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
class IssueTests(PhaseBasedTestCase):
"""Tests for:
- article/front/article-meta/issue
- article/back/ref-list/ref/element-citation/issue
"""
sch_phase = 'phase.issue'
def test_absent_in_front(self):
sample = u"""<article>
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_present_but_empty_in_front(self):
sample = u"""<article>
<front>
<article-meta>
<issue></issue>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_present_in_front(self):
sample = u"""<article>
<front>
<article-meta>
<issue>10</issue>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
class SupplementTests(PhaseBasedTestCase):
"""Tests for:
- article/front/article-meta/supplement
"""
sch_phase = 'phase.supplement'
def test_absent(self):
sample = u"""<article>
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_present(self):
sample = u"""<article>
<front>
<article-meta>
<supplement>Suppl 2</supplement>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class ElocationIdTests(PhaseBasedTestCase):
"""Tests for:
- article/front/article-meta/elocation-id
- article/back/ref-list/ref/element-citation/elocation-id
"""
sch_phase = 'phase.elocation-id'
def test_absent(self):
sample = u"""<article>
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_with_fpage(self):
sample = u"""<article>
<front>
<article-meta>
<elocation-id>E27</elocation-id>
<fpage>12</fpage>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_without_fpage(self):
sample = u"""<article>
<front>
<article-meta>
<elocation-id>E27</elocation-id>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_absent_back(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_with_fpage_back(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation>
<elocation-id>E27</elocation-id>
<fpage>12</fpage>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_without_fpage_back(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation>
<elocation-id>E27</elocation-id>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_with_and_without_fpage_back(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation>
<elocation-id>E27</elocation-id>
<fpage>12</fpage>
</element-citation>
</ref>
<ref>
<element-citation>
<elocation-id>E27</elocation-id>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class HistoryTests(PhaseBasedTestCase):
"""Tests for:
- article/front/article-meta/history
"""
sch_phase = 'phase.history'
def test_absent(self):
sample = u"""<article>
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_(self):
sample = u"""<article>
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_date_type_allowed_values(self):
for pub_type in ['received', 'accepted', 'rev-recd']:
sample = u"""<article>
<front>
<article-meta>
<history>
<date date-type="%s">
<day>17</day>
<month>03</month>
<year>2014</year>
</date>
</history>
</article-meta>
</front>
</article>
""" % pub_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_date_type_disallowed_values(self):
sample = u"""<article>
<front>
<article-meta>
<history>
<date date-type="invalid">
<day>17</day>
<month>03</month>
<year>2014</year>
</date>
</history>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_date_type_allowed_values_multi(self):
sample = u"""<article>
<front>
<article-meta>
<history>
<date date-type="received">
<day>17</day>
<month>03</month>
<year>2014</year>
</date>
<date date-type="accepted">
<day>17</day>
<month>03</month>
<year>2014</year>
</date>
</history>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
class ProductTests(PhaseBasedTestCase):
"""Tests for:
- article/front/article-meta/product
"""
sch_phase = 'phase.product'
def test_absent(self):
sample = u"""<article>
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_absent_allowed_types(self):
for art_type in ['book-review', 'product-review']:
sample = u"""<article article-type="%s">
<front>
<article-meta>
</article-meta>
</front>
</article>
""" % art_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_allowed_types(self):
for art_type in ['book-review', 'product-review']:
sample = u"""<article article-type="%s">
<front>
<article-meta>
<product product-type="book">
<person-group person-group-type="author">
<name>
<surname>Sobrenome do autor</surname>
<given-names>Prenomes do autor</given-names>
</name>
</person-group>
<source>Título do livro</source>
<year>Ano de publicação</year>
<publisher-name>Nome da casa publicadora/Editora</publisher-name>
<publisher-loc>Local de publicação</publisher-loc>
<page-count count="total de paginação do livro (opcional)"/>
<isbn>ISBN do livro, se houver</isbn>
<inline-graphic>1234-5678-rctb-45-05-690-gf01.tif</inline-graphic>
</product>
</article-meta>
</front>
</article>
""" % art_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_types(self):
sample = u"""<article article-type="research-article">
<front>
<article-meta>
<product product-type="book">
<person-group person-group-type="author">
<name>
<surname>Sobrenome do autor</surname>
<given-names>Prenomes do autor</given-names>
</name>
</person-group>
<source>Título do livro</source>
<year>Ano de publicação</year>
<publisher-name>Nome da casa publicadora/Editora</publisher-name>
<publisher-loc>Local de publicação</publisher-loc>
<page-count count="total de paginação do livro (opcional)"/>
<isbn>ISBN do livro, se houver</isbn>
<inline-graphic>1234-5678-rctb-45-05-690-gf01.tif</inline-graphic>
</product>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_no_type(self):
sample = u"""<article>
<front>
<article-meta>
<product product-type="book">
<person-group person-group-type="author">
<name>
<surname>Sobrenome do autor</surname>
<given-names>Prenomes do autor</given-names>
</name>
</person-group>
<source>Título do livro</source>
<year>Ano de publicação</year>
<publisher-name>Nome da casa publicadora/Editora</publisher-name>
<publisher-loc>Local de publicação</publisher-loc>
<page-count count="total de paginação do livro (opcional)"/>
<isbn>ISBN do livro, se houver</isbn>
<inline-graphic>1234-5678-rctb-45-05-690-gf01.tif</inline-graphic>
</product>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_product_type(self):
sample = u"""<article article-type="book-review">
<front>
<article-meta>
<product>
<person-group person-group-type="author">
<name>
<surname>Sobrenome do autor</surname>
<given-names>Prenomes do autor</given-names>
</name>
</person-group>
<source>Título do livro</source>
<year>Ano de publicação</year>
<publisher-name>Nome da casa publicadora/Editora</publisher-name>
<publisher-loc>Local de publicação</publisher-loc>
<page-count count="total de paginação do livro (opcional)"/>
<isbn>ISBN do livro, se houver</isbn>
<inline-graphic>1234-5678-rctb-45-05-690-gf01.tif</inline-graphic>
</product>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_allowed_product_types(self):
for prod_type in ['book', 'software', 'article', 'chapter', 'other']:
sample = u"""<article article-type="book-review">
<front>
<article-meta>
<product product-type="%s">
<person-group person-group-type="author">
<name>
<surname>Sobrenome do autor</surname>
<given-names>Prenomes do autor</given-names>
</name>
</person-group>
<source>Título do livro</source>
<year>Ano de publicação</year>
<publisher-name>Nome da casa publicadora/Editora</publisher-name>
<publisher-loc>Local de publicação</publisher-loc>
<page-count count="total de paginação do livro (opcional)"/>
<isbn>ISBN do livro, se houver</isbn>
<inline-graphic>1234-5678-rctb-45-05-690-gf01.tif</inline-graphic>
</product>
</article-meta>
</front>
</article>
""" % prod_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_product_types(self):
sample = u"""<article article-type="book-review">
<front>
<article-meta>
<product product-type="invalid">
<person-group person-group-type="author">
<name>
<surname>Sobrenome do autor</surname>
<given-names>Prenomes do autor</given-names>
</name>
</person-group>
<source>Título do livro</source>
<year>Ano de publicação</year>
<publisher-name>Nome da casa publicadora/Editora</publisher-name>
<publisher-loc>Local de publicação</publisher-loc>
<page-count count="total de paginação do livro (opcional)"/>
<isbn>ISBN do livro, se houver</isbn>
<inline-graphic>1234-5678-rctb-45-05-690-gf01.tif</inline-graphic>
</product>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class SecTitleTests(PhaseBasedTestCase):
"""Tests for:
- article/body/sec/title
"""
sch_phase = 'phase.sectitle'
def test_absent(self):
sample = u"""<article>
<body>
<sec>
<p>Foo bar</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_has_title(self):
sample = u"""<article>
<body>
<sec>
<title>Introduction</title>
<p>Foo bar</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_has_empty_title(self):
sample = u"""<article>
<body>
<sec>
<title></title>
<p>Foo bar</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class ParagraphTests(PhaseBasedTestCase):
"""Tests for //p
"""
sch_phase = 'phase.paragraph'
def test_sec_without_id(self):
sample = u"""<article>
<body>
<sec>
<title>Intro</title>
<p>Foo bar</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_sec_with_id(self):
sample = u"""<article>
<body>
<sec>
<title>Intro</title>
<p id="p01">Foo bar</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_body_without_id(self):
sample = u"""<article>
<body>
<p>Foo bar</p>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_body_with_id(self):
sample = u"""<article>
<body>
<p id="p01">Foo bar</p>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class XrefRidTests(PhaseBasedTestCase):
"""Tests for //xref[@rid]
"""
sch_phase = 'phase.rid_integrity'
def test_mismatching_rid(self):
sample = u"""<article>
<front>
<article-meta>
<contrib-group>
<contrib>
<xref ref-type="aff" rid="aff1">
<sup>I</sup>
</xref>
</contrib>
</contrib-group>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_matching_rid(self):
sample = u"""<article>
<front>
<article-meta>
<contrib-group>
<contrib>
<xref ref-type="aff" rid="aff1">
<sup>I</sup>
</xref>
</contrib>
</contrib-group>
<aff id="aff1">
<label>I</label>
<institution content-type="orgname">
Secretaria Municipal de Saude de Belo Horizonte
</institution>
<addr-line>
<named-content content-type="city">Belo Horizonte</named-content>
<named-content content-type="state">MG</named-content>
</addr-line>
<country>Brasil</country>
<institution content-type="original">
Secretaria Municipal de Saude de Belo Horizonte. Belo Horizonte, MG, Brasil
</institution>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_mismatching_reftype(self):
sample = u"""<article>
<body>
<sec>
<table-wrap id="t01">
</table-wrap>
</sec>
<sec>
<p>
<xref ref-type="aff" rid="t01">table 1</xref>
</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class XrefRefTypeTests(PhaseBasedTestCase):
"""Tests for //xref[@ref-type]
"""
sch_phase = 'phase.xref_reftype_integrity'
def test_allowed_ref_types(self):
for reftype in ['aff', 'app', 'author-notes', 'bibr', 'contrib',
'corresp', 'disp-formula', 'fig', 'fn', 'sec',
'supplementary-material', 'table', 'table-fn',
'boxed-text']:
sample = u"""<article>
<body>
<sec>
<p>
<xref ref-type="%s">foo</xref>
</p>
</sec>
</body>
</article>
""" % reftype
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_ref_types(self):
for reftype in ['chem', 'kwd', 'list', 'other', 'plate'
'scheme', 'statement']:
sample = u"""<article>
<body>
<sec>
<p>
<xref ref-type="%s">foo</xref>
</p>
</sec>
</body>
</article>
""" % reftype
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class CaptionTests(PhaseBasedTestCase):
"""Tests for //caption
"""
sch_phase = 'phase.caption'
def test_with_title(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<fig id="f03">
<label>Figura 3</label>
<caption>
<title>
Percentual de atividade mitocondrial.
</title>
</caption>
<graphic xlink:href="1234-5678-rctb-45-05-0110-gf01.tif"/>
</fig>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_without_title(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<fig id="f03">
<label>Figura 3</label>
<caption>
<label>
Percentual de atividade mitocondrial.
</label>
</caption>
<graphic xlink:href="1234-5678-rctb-45-05-0110-gf01.tif"/>
</fig>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_with_title_and_more(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<fig id="f03">
<label>Figura 3</label>
<caption>
<title>
Percentual de atividade mitocondrial.
</title>
<label>
Percentual de atividade mitocondrial.
</label>
</caption>
<graphic xlink:href="1234-5678-rctb-45-05-0110-gf01.tif"/>
</fig>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
class LicenseTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/permissions/license element.
"""
sch_phase = 'phase.license'
def test_missing_permissions_elem(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_license(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<permissions>
</permissions>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_allowed_license_type(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<permissions>
<license license-type="open-access"
xlink:href="http://creativecommons.org/licenses/by/4.0/">
<license-p>
This is an open-access article distributed under the terms...
</license-p>
</license>
</permissions>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_license_type(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<permissions>
<license license-type="closed-access"
xlink:href="http://creativecommons.org/licenses/by/4.0/">
<license-p>
This is an open-access article distributed under the terms...
</license-p>
</license>
</permissions>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_allowed_license_href(self):
allowed_licenses = [
'http://creativecommons.org/licenses/by-nc/4.0/',
'http://creativecommons.org/licenses/by-nc/3.0/',
'http://creativecommons.org/licenses/by/4.0/',
'http://creativecommons.org/licenses/by/3.0/',
'http://creativecommons.org/licenses/by-nc-nd/4.0/',
'http://creativecommons.org/licenses/by-nc-nd/3.0/',
'http://creativecommons.org/licenses/by/3.0/igo/',
'http://creativecommons.org/licenses/by-nc/3.0/igo/',
'http://creativecommons.org/licenses/by-nc-nd/3.0/igo/',
]
for license in allowed_licenses:
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<permissions>
<license license-type="open-access"
xlink:href="%s">
<license-p>
This is an open-access article distributed under the terms...
</license-p>
</license>
</permissions>
</article-meta>
</front>
</article>
""" % license
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_allowed_license_href_https_scheme(self):
allowed_licenses = [
'https://creativecommons.org/licenses/by-nc/4.0/',
'https://creativecommons.org/licenses/by-nc/3.0/',
'https://creativecommons.org/licenses/by/4.0/',
'https://creativecommons.org/licenses/by/3.0/',
'https://creativecommons.org/licenses/by-nc-nd/4.0/',
'https://creativecommons.org/licenses/by-nc-nd/3.0/',
'https://creativecommons.org/licenses/by/3.0/igo/',
'https://creativecommons.org/licenses/by-nc/3.0/igo/',
'https://creativecommons.org/licenses/by-nc-nd/3.0/igo/',
]
for license in allowed_licenses:
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<permissions>
<license license-type="open-access"
xlink:href="%s">
<license-p>
This is an open-access article distributed under the terms...
</license-p>
</license>
</permissions>
</article-meta>
</front>
</article>
""" % license
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_license_href(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<permissions>
<license license-type="open-access"
xlink:href="http://opensource.org/licenses/MIT">
<license-p>
This is an open-access article distributed under the terms...
</license-p>
</license>
</permissions>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_trailing_slash(self):
allowed_licenses = [
'https://creativecommons.org/licenses/by-nc/4.0',
]
for license in allowed_licenses:
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<front>
<article-meta>
<permissions>
<license license-type="open-access"
xlink:href="%s">
<license-p>
This is an open-access article distributed under the terms...
</license-p>
</license>
</permissions>
</article-meta>
</front>
</article>
""" % license
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
class AckTests(PhaseBasedTestCase):
"""Tests for article/back/ack element.
"""
sch_phase = 'phase.ack'
def test_with_sec(self):
sample = u"""<article>
<back>
<ack>
<sec>
<p>Some</p>
</sec>
</ack>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_without_sec(self):
sample = u"""<article>
<back>
<ack>
<title>Acknowledgment</title>
<p>Some text</p>
</ack>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
class ElementCitationTests(PhaseBasedTestCase):
"""Tests for article/back/ref-list/ref/element-citation element.
"""
sch_phase = 'phase.element-citation'
def test_with_name_outside_persongroup(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation publication-type="journal">
<name>Foo</name>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_with_name_inside_persongroup(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation publication-type="journal">
<person-group>
<name>Foo</name>
</person-group>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_with_etal_outside_persongroup(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation publication-type="journal">
<etal>Foo</etal>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_with_etal_inside_persongroup(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation publication-type="journal">
<person-group>
<etal>Foo</etal>
</person-group>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_with_collab_outside_persongroup(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation publication-type="journal">
<collab>Foo</collab>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_with_collab_inside_persongroup(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation publication-type="journal">
<person-group>
<collab>Foo</collab>
</person-group>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_allowed_publication_types(self):
for pub_type in ['journal', 'book', 'webpage', 'thesis', 'confproc',
'patent', 'software', 'database', 'legal-doc', 'newspaper',
'other', 'report']:
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation publication-type="%s">
</element-citation>
</ref>
</ref-list>
</back>
</article>
""" % pub_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_publication_types(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation publication-type="invalid">
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_outside_ref(self):
sample = u"""<article>
<body>
<sec>
<p>
<element-citation publication-type="journal">
<person-group>
<collab>Foo</collab>
</person-group>
</element-citation>
</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class PersonGroupTests(PhaseBasedTestCase):
"""Tests for
- article/back/ref-list/ref/element-citation/person-group
- article/front/article-meta/product/person-group
"""
sch_phase = 'phase.person-group'
def test_missing_type(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation>
<person-group>
<name>Foo</name>
</person-group>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_type_at_product(self):
sample = u"""<article>
<front>
<article-meta>
<product>
<person-group>
<name>Foo</name>
</person-group>
</product>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_with_type(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation>
<person-group person-group-type="author">
<name>Foo</name>
</person-group>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_allowed_types(self):
for group_type in ['author', 'compiler', 'editor', 'translator']:
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation>
<person-group person-group-type="%s">
<name>Foo</name>
</person-group>
</element-citation>
</ref>
</ref-list>
</back>
</article>
""" % group_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_type(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation>
<person-group person-group-type="invalid">
<name>Foo</name>
</person-group>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_loose_text_below_element_citation_node(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation publication-type="journal">
<person-group person-group-type="author">HERE
<collab>Foo</collab>
</person-group>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_loose_text_below_product_node(self):
sample = u"""<article>
<front>
<article-meta>
<product>
<person-group person-group-type="author">HERE
<collab>Foo</collab>
</person-group>
</product>
</article-meta>
</front>
<back>
<ref-list>
<ref>
<element-citation publication-type="journal">
<person-group person-group-type="author"><|fim▁hole|> </person-group>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class FNGroupTests(PhaseBasedTestCase):
"""Tests for article/back/fn-group/fn element.
"""
sch_phase = 'phase.fn-group'
def test_allowed_fn_types(self):
for fn_type in ['abbr', 'com', 'financial-disclosure', 'supported-by',
'presented-at', 'supplementary-material', 'other']:
sample = u"""<article>
<back>
<fn-group>
<fn fn-type="%s">
<p>foobar</p>
</fn>
</fn-group>
</back>
</article>
""" % fn_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_fn_types(self):
sample = u"""<article>
<back>
<fn-group>
<fn fn-type="invalid">
<p>foobar</p>
</fn>
</fn-group>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class XHTMLTableTests(PhaseBasedTestCase):
"""Tests for //table elements.
"""
sch_phase = 'phase.xhtml-table'
def test_valid_toplevel(self):
for elem in ['caption', 'summary', 'col', 'colgroup', 'thead', 'tfoot', 'tbody']:
sample = u"""<article>
<body>
<sec>
<p>
<table>
<%s></%s>
</table>
</p>
</sec>
</body>
</article>
""" % (elem, elem)
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_invalid_toplevel(self):
for elem in ['tr']:
sample = u"""<article>
<body>
<sec>
<p>
<table>
<%s></%s>
</table>
</p>
</sec>
</body>
</article>
""" % (elem, elem)
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_tbody_upon_th(self):
sample = u"""<article>
<body>
<sec>
<p>
<table>
<tbody>
<tr>
<th>Foo</th>
</tr>
</tbody>
</table>
</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_thead_upon_th(self):
sample = u"""<article>
<body>
<sec>
<p>
<table>
<thead>
<tr>
<th>Foo</th>
</tr>
</thead>
</table>
</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_thead_upon_td(self):
sample = u"""<article>
<body>
<sec>
<p>
<table>
<thead>
<tr>
<td>Foo</td>
</tr>
</thead>
</table>
</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class SupplementaryMaterialMimetypeTests(PhaseBasedTestCase):
"""Tests for article//supplementary-material elements.
"""
sch_phase = 'phase.supplementary-material'
def test_case1(self):
"""mimetype is True
mime-subtype is True
mimetype ^ mime-subtype is True
"""
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<supplementary-material id="S1"
xlink:title="local_file"
xlink:href="1471-2105-1-1-s1.pdf"
mimetype="application"
mime-subtype="pdf">
<label>Additional material</label>
<caption>
<p>Supplementary PDF file supplied by authors.</p>
</caption>
</supplementary-material>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_case2(self):
"""mimetype is True
mime-subtype is False
mimetype ^ mime-subtype is False
"""
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<supplementary-material id="S1"
xlink:title="local_file"
xlink:href="1471-2105-1-1-s1.pdf"
mimetype="application">
<label>Additional material</label>
<caption>
<p>Supplementary PDF file supplied by authors.</p>
</caption>
</supplementary-material>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_case3(self):
"""mimetype is False
mime-subtype is True
mimetype ^ mime-subtype is False
"""
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<supplementary-material id="S1"
xlink:title="local_file"
xlink:href="1471-2105-1-1-s1.pdf"
mime-subtype="pdf">
<label>Additional material</label>
<caption>
<p>Supplementary PDF file supplied by authors.</p>
</caption>
</supplementary-material>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_case4(self):
"""mimetype is False
mime-subtype is False
mimetype ^ mime-subtype is False
"""
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<supplementary-material id="S1"
xlink:title="local_file"
xlink:href="1471-2105-1-1-s1.pdf">
<label>Additional material</label>
<caption>
<p>Supplementary PDF file supplied by authors.</p>
</caption>
</supplementary-material>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class AuthorNotesFNTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/author-notes/fn element.
"""
sch_phase = 'phase.fn-group'
def test_allowed_fn_types(self):
for fn_type in ['author', 'con', 'conflict', 'corresp', 'current-aff',
'deceased', 'edited-by', 'equal', 'on-leave',
'participating-researchers', 'present-address',
'previously-at', 'study-group-members', 'other',
'presented-at', 'presented-by']:
sample = u"""<article>
<front>
<article-meta>
<author-notes>
<fn fn-type="%s">
<p>foobar</p>
</fn>
</author-notes>
</article-meta>
</front>
</article>
""" % fn_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_fn_types(self):
sample = u"""<article>
<front>
<article-meta>
<author-notes>
<fn fn-type="invalid">
<p>foobar</p>
</fn>
</author-notes>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class ArticleAttributesTests(PhaseBasedTestCase):
"""Tests for article element.
"""
sch_phase = 'phase.article-attrs'
def test_allowed_article_types(self):
for art_type in ['other', 'article-commentary', 'case-report',
'editorial', 'correction', 'letter', 'research-article',
'in-brief', 'review-article', 'book-review', 'retraction',
'brief-report', 'rapid-communication', 'reply', 'translation']:
sample = u"""<article article-type="%s" xml:lang="en" dtd-version="1.0" specific-use="sps-1.3">
</article>
""" % art_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_article_type(self):
sample = u"""<article article-type="invalid" dtd-version="1.0" specific-use="sps-1.3">
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_article_type(self):
sample = u"""<article xml:lang="en" dtd-version="1.0" specific-use="sps-1.3">
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_xmllang(self):
sample = u"""<article article-type="research-article" dtd-version="1.0" specific-use="sps-1.3">
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_dtdversion(self):
sample = u"""<article article-type="research-article" xml:lang="en" specific-use="sps-1.3">
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_sps_version(self):
sample = u"""<article article-type="research-article" dtd-version="1.0" xml:lang="en">
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_invalid_sps_version(self):
sample = u"""<article article-type="research-article" dtd-version="1.0" xml:lang="en" specific-use="sps-1.0">
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class NamedContentTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/aff/addr-line/named-content elements.
"""
sch_phase = 'phase.named-content_attrs'
def test_missing_contenttype(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<addr-line>
<named-content>Foo</named-content>
</addr-line>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_allowed_contenttype(self):
for ctype in ['city', 'state']:
sample = u"""<article>
<front>
<article-meta>
<aff>
<addr-line>
<named-content content-type="%s">Foo</named-content>
</addr-line>
</aff>
</article-meta>
</front>
</article>
""" % ctype
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_contenttype(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<addr-line>
<named-content content-type="invalid">Foo</named-content>
</addr-line>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class MonthTests(PhaseBasedTestCase):
"""Tests for //month elements.
"""
sch_phase = 'phase.month'
def test_range_1_12(self):
for month in range(1, 13):
sample = u"""<article>
<front>
<article-meta>
<pub-date>
<month>%s</month>
</pub-date>
</article-meta>
</front>
</article>
""" % month
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_range_01_12(self):
for month in range(1, 13):
sample = u"""<article>
<front>
<article-meta>
<pub-date>
<month>%02d</month>
</pub-date>
</article-meta>
</front>
</article>
""" % month
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_out_of_range(self):
for month in [0, 13]:
sample = u"""<article>
<front>
<article-meta>
<pub-date>
<month>%s</month>
</pub-date>
</article-meta>
</front>
</article>
""" % month
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_must_be_integer(self):
sample = u"""<article>
<front>
<article-meta>
<pub-date>
<month>January</month>
</pub-date>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class SizeTests(PhaseBasedTestCase):
"""Tests for:
- article/front/article-meta/product/size
- article/back/ref-list/ref/element-citation/size
"""
sch_phase = 'phase.size'
def test_in_element_citation(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation>
<size units="pages">2</size>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_in_product(self):
sample = u"""<article>
<front>
<article-meta>
<product>
<size units="pages">2</size>
</product>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_missing_units_in_product(self):
sample = u"""<article>
<front>
<article-meta>
<product>
<size>2</size>
</product>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_units_in_element_citation(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation>
<size>2</size>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_invalid_units_value(self):
sample = u"""<article>
<front>
<article-meta>
<product>
<size units="invalid">2</size>
</product>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class ListTests(PhaseBasedTestCase):
"""Tests for list elements.
"""
sch_phase = 'phase.list'
def test_allowed_list_type(self):
for list_type in ['order', 'bullet', 'alpha-lower', 'alpha-upper',
'roman-lower', 'roman-upper', 'simple']:
sample = u"""<article>
<body>
<sec>
<p>
<list list-type="%s">
<title>Lista Númerica</title>
<list-item>
<p>Nullam gravida tellus eget condimentum egestas.</p>
</list-item>
<list-item>
<list list-type="%s">
<list-item>
<p>Curabitur luctus lorem ac feugiat pretium.</p>
</list-item>
</list>
</list-item>
<list-item>
<p>Donec pulvinar odio ut enim lobortis, eu dignissim elit accumsan.</p>
</list-item>
</list>
</p>
</sec>
</body>
</article>
""" % (list_type, list_type)
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_list_type(self):
sample = u"""<article>
<body>
<sec>
<p>
<list list-type="invalid">
<title>Lista Númerica</title>
<list-item>
<p>Nullam gravida tellus eget condimentum egestas.</p>
</list-item>
<list-item>
<list list-type="invalid">
<list-item>
<p>Curabitur luctus lorem ac feugiat pretium.</p>
</list-item>
</list>
</list-item>
<list-item>
<p>Donec pulvinar odio ut enim lobortis, eu dignissim elit accumsan.</p>
</list-item>
</list>
</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_disallowed_sub_list_type(self):
sample = u"""<article>
<body>
<sec>
<p>
<list list-type="order">
<title>Lista Númerica</title>
<list-item>
<p>Nullam gravida tellus eget condimentum egestas.</p>
</list-item>
<list-item>
<list list-type="invalid">
<list-item>
<p>Curabitur luctus lorem ac feugiat pretium.</p>
</list-item>
</list>
</list-item>
<list-item>
<p>Donec pulvinar odio ut enim lobortis, eu dignissim elit accumsan.</p>
</list-item>
</list>
</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_list_type(self):
sample = u"""<article>
<body>
<sec>
<p>
<list>
<title>Lista Númerica</title>
<list-item>
<p>Nullam gravida tellus eget condimentum egestas.</p>
</list-item>
<list-item>
<list>
<list-item>
<p>Curabitur luctus lorem ac feugiat pretium.</p>
</list-item>
</list>
</list-item>
<list-item>
<p>Donec pulvinar odio ut enim lobortis, eu dignissim elit accumsan.</p>
</list-item>
</list>
</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_sub_list_type(self):
sample = u"""<article>
<body>
<sec>
<p>
<list list-type="order">
<title>Lista Númerica</title>
<list-item>
<p>Nullam gravida tellus eget condimentum egestas.</p>
</list-item>
<list-item>
<list>
<list-item>
<p>Curabitur luctus lorem ac feugiat pretium.</p>
</list-item>
</list>
</list-item>
<list-item>
<p>Donec pulvinar odio ut enim lobortis, eu dignissim elit accumsan.</p>
</list-item>
</list>
</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class MediaTests(PhaseBasedTestCase):
"""Tests for article/body//p/media elements.
"""
sch_phase = 'phase.media_attributes'
def test_missing_mimetype(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<p><media mime-subtype="mp4" xlink:href="1234-5678-rctb-45-05-0110-m01.mp4"/></p>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_mime_subtype(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<p><media mimetype="video" xlink:href="1234-5678-rctb-45-05-0110-m01.mp4"/></p>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_href(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<p><media mimetype="video" mime-subtype="mp4"/></p>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_all_present(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<p><media mimetype="video" mime-subtype="mp4" xlink:href="1234-5678-rctb-45-05-0110-m01.mp4"/></p>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
class ExtLinkTests(PhaseBasedTestCase):
"""Tests for ext-link elements.
"""
sch_phase = 'phase.ext-link'
def test_complete(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<sec>
<p>Neque porro quisquam est <ext-link ext-link-type="uri" xlink:href="http://www.scielo.org">www.scielo.org</ext-link> qui dolorem ipsum quia</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_allowed_extlinktype(self):
for link_type in ['uri', 'clinical-trial' ]:
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<sec>
<p>Neque porro quisquam est <ext-link ext-link-type="%s" xlink:href="http://www.scielo.org">www.scielo.org</ext-link> qui dolorem ipsum quia</p>
</sec>
</body>
</article>
""" % link_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_extlinktype(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<sec>
<p>Neque porro quisquam est <ext-link ext-link-type="invalid" xlink:href="http://www.scielo.org">www.scielo.org</ext-link> qui dolorem ipsum quia</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_extlinktype(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<sec>
<p>Neque porro quisquam est <ext-link xlink:href="http://www.scielo.org">www.scielo.org</ext-link> qui dolorem ipsum quia</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_xlinkhref(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<sec>
<p>Neque porro quisquam est <ext-link ext-link-type="uri">www.scielo.org</ext-link> qui dolorem ipsum quia</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_uri_without_scheme(self):
sample = u"""<article xmlns:xlink="http://www.w3.org/1999/xlink">
<body>
<sec>
<p>Neque porro quisquam est <ext-link ext-link-type="uri" xlink:href="www.scielo.org">www.scielo.org</ext-link> qui dolorem ipsum quia</p>
</sec>
</body>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class SubArticleAttributesTests(PhaseBasedTestCase):
"""Tests for sub-article element.
"""
sch_phase = 'phase.sub-article-attrs'
def test_allowed_article_types(self):
for art_type in ['abstract', 'letter', 'reply', 'translation']:
sample = u"""<article article-type="research-article" xml:lang="en" dtd-version="1.0" specific-use="sps-1.3">
<sub-article article-type="%s" xml:lang="pt" id="sa1"></sub-article>
</article>
""" % art_type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_article_type(self):
sample = u"""<article article-type="research-article" dtd-version="1.0" specific-use="sps-1.3">
<sub-article article-type="invalid" xml:lang="pt" id="trans_pt"></sub-article>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_article_type(self):
sample = u"""<article article-type="research-article" dtd-version="1.0" specific-use="sps-1.3">
<sub-article xml:lang="pt" id="trans_pt"></sub-article>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_xmllang(self):
sample = u"""<article article-type="research-article" dtd-version="1.0" specific-use="sps-1.3">
<sub-article article-type="translation" id="trans_pt"></sub-article>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_id(self):
sample = u"""<article article-type="research-article" dtd-version="1.0" specific-use="sps-1.3">
<sub-article article-type="translation" xml:lang="pt"></sub-article>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class ResponseAttributesTests(PhaseBasedTestCase):
"""Tests for response element.
"""
sch_phase = 'phase.response-attrs'
def test_allowed_response_types(self):
for type in ['addendum', 'discussion', 'reply']:
sample = u"""<article>
<response response-type="%s" xml:lang="pt" id="r1"></response>
</article>
""" % type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_response_type(self):
sample = u"""<article>
<response response-type="invalid" xml:lang="pt" id="r1"></response>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_response_type(self):
sample = u"""<article>
<response xml:lang="pt" id="r1"></response>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_xmllang(self):
sample = u"""<article>
<response response-type="invalid" id="r1"></response>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_id(self):
sample = u"""<article>
<response response-type="invalid" xml:lang="pt"></response>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class ResponseReplyAttributeTests(PhaseBasedTestCase):
"""Tests for response[@response-type='reply'] elements.
"""
sch_phase = 'phase.response-reply-type'
def test_reply_type_demands_an_article_type(self):
""" the article-type of value `article-commentary` is required
"""
sample = u"""<article article-type="article-commentary">
<response response-type="reply" xml:lang="pt" id="r1">
<front-stub>
<related-article related-article-type="commentary-article" id="ra1" vol="109" page="87-92"/>
</front-stub>
</response>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_reply_type_invalid_article_type(self):
""" anything different of `article-commentary` is invalid
"""
sample = u"""<article article-type="research-article">
<response response-type="reply" xml:lang="pt" id="r1">
<front-stub>
<related-article related-article-type="commentary-article" id="ra1" vol="109" page="87-92"/>
</front-stub>
</response>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_reply_type_missing_related_article(self):
""" the article-type of value `article-commentary` is required
"""
sample = u"""<article article-type="article-commentary">
<response response-type="reply" xml:lang="pt" id="r1">
<front-stub>
</front-stub>
</response>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_related_article_missing_vol(self):
sample = u"""<article article-type="article-commentary">
<response response-type="reply" xml:lang="pt" id="r1">
<front-stub>
<related-article related-article-type="commentary-article" id="ra1" page="87-92"/>
</front-stub>
</response>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_related_article_missing_page(self):
sample = u"""<article article-type="article-commentary">
<response response-type="reply" xml:lang="pt" id="r1">
<front-stub>
<related-article related-article-type="commentary-article" id="ra1" vol="109" elocation-id="1q2w"/>
</front-stub>
</response>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_related_article_missing_elocationid(self):
sample = u"""<article article-type="article-commentary">
<response response-type="reply" xml:lang="pt" id="r1">
<front-stub>
<related-article related-article-type="commentary-article" id="ra1" vol="109" page="87-92"/>
</front-stub>
</response>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_related_article_missing_page_and_elocationid(self):
sample = u"""<article article-type="article-commentary">
<response response-type="reply" xml:lang="pt" id="r1">
<front-stub>
<related-article related-article-type="commentary-article" id="ra1" vol="109"/>
</front-stub>
</response>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class RelatedArticleTypesTests(PhaseBasedTestCase):
"""Tests for related-article element.
"""
sch_phase = 'phase.related-article-attrs'
def test_allowed_related_article_types(self):
for type in ['corrected-article', 'press-release', 'commentary-article', 'article-reference']:
sample = u"""<article>
<front>
<article-meta>
<related-article related-article-type="%s" id="01"/>
</article-meta>
</front>
</article>
""" % type
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_disallowed_related_article_type(self):
sample = u"""<article>
<front>
<article-meta>
<related-article related-article-type="invalid" id="01"/>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_id(self):
sample = u"""<article>
<front>
<article-meta>
<related-article related-article-type="corrected-article"/>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_related_article_type(self):
sample = u"""<article>
<front>
<article-meta>
<related-article id="01"/>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class CorrectionTests(PhaseBasedTestCase):
"""Tests for article[@article-type="correction"] element.
"""
sch_phase = 'phase.correction'
def test_expected_elements(self):
sample = u"""<article article-type="correction">
<front>
<article-meta>
<related-article related-article-type="corrected-article" id="01"/>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_missing_related_article(self):
""" must have a related-article[@related-article-type='corrected-article']
element.
"""
sample = u"""<article article-type="correction">
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_article_type_must_be_correction(self):
sample = u"""<article article-type="research-article">
<front>
<article-meta>
<related-article related-article-type="corrected-article" id="01"/>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class InBriefTests(PhaseBasedTestCase):
"""Tests for article[@article-type="in-brief"] element.
"""
sch_phase = 'phase.in-brief'
def test_expected_elements(self):
sample = u"""<article article-type="in-brief">
<front>
<article-meta>
<related-article related-article-type="article-reference" id="01"/>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_missing_related_article(self):
""" must have a related-article[@related-article-type='in-brief']
element.
"""
sample = u"""<article article-type="in-brief">
<front>
<article-meta>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_article_type_must_be_in_brief(self):
sample = u"""<article article-type="research-article">
<front>
<article-meta>
<related-article related-article-type="article-reference" id="01"/>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class FundingGroupTests(PhaseBasedTestCase):
"""Tests for article/front/article-meta/funding-group elements.
"""
sch_phase = 'phase.funding-group'
def test_funding_statement_when_fn_is_present_missing_award_group(self):
sample = u"""<article>
<front>
<article-meta>
<funding-group>
<funding-statement>This study was supported by FAPEST #12345</funding-statement>
</funding-group>
</article-meta>
</front>
<back>
<fn-group>
<fn id="fn01" fn-type="financial-disclosure">
<p>This study was supported by FAPEST #12345</p>
</fn>
</fn-group>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_funding_statement_when_fn_is_present(self):
sample = u"""<article>
<front>
<article-meta>
<funding-group>
<award-group>
<funding-source>FAPEST</funding-source>
<award-id>12345</award-id>
</award-group>
<funding-statement>This study was supported by FAPEST #12345</funding-statement>
</funding-group>
</article-meta>
</front>
<back>
<fn-group>
<fn id="fn01" fn-type="financial-disclosure">
<p>This study was supported by FAPEST #12345</p>
</fn>
</fn-group>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_missing_funding_statement_when_fn_is_present(self):
sample = u"""<article>
<front>
<article-meta>
<funding-group>
<award-group>
<funding-source>FAPEST</funding-source>
<award-id>12345</award-id>
</award-group>
</funding-group>
</article-meta>
</front>
<back>
<fn-group>
<fn id="fn01" fn-type="financial-disclosure">
<p>This study was supported by FAPEST #12345</p>
</fn>
</fn-group>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class AffCountryTests(PhaseBasedTestCase):
""" //aff/country/@country is required.
See: https://github.com/scieloorg/packtools/issues/44
"""
sch_phase = 'phase.aff_country'
def test_attribute_is_present(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<country country="BR">Brasil</country>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_attribute_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<country>Brasil</country>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_attribute_value_is_not_validated(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<country country="XZ">Brasil</country>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_country_cannot_be_empty(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<country country="XZ"></country>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_country_cannot_be_empty_closed_element(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<country country="XZ"/>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class RefTests(PhaseBasedTestCase):
"""Tests for article/back/ref-list/ref element.
"""
sch_phase = 'phase.ref'
def test_element_and_mixed_citation_elements(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<mixed-citation>Aires M, Paz AA, Perosa CT. Situação de saúde e grau de dependência de pessoas idosas institucionalizadas. <italic>Rev Gaucha Enferm.</italic> 2009;30(3):192-9.</mixed-citation>
<element-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Aires</surname>
<given-names>M</given-names>
</name>
<name>
<surname>Paz</surname>
<given-names>AA</given-names>
</name>
<name>
<surname>Perosa</surname>
<given-names>CT</given-names>
</name>
</person-group>
<article-title>Situação de saúde e grau de dependência de pessoas idosas institucionalizadas</article-title>
<source>Rev Gaucha Enferm</source>
<year>2009</year>
<volume>30</volume>
<issue>3</issue>
<fpage>192</fpage>
<lpage>199</lpage>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_missing_element_citation(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<mixed-citation>Aires M, Paz AA, Perosa CT. Situação de saúde e grau de dependência de pessoas idosas institucionalizadas. <italic>Rev Gaucha Enferm.</italic> 2009;30(3):192-9.</mixed-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_missing_mixed_citation(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<element-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Aires</surname>
<given-names>M</given-names>
</name>
<name>
<surname>Paz</surname>
<given-names>AA</given-names>
</name>
<name>
<surname>Perosa</surname>
<given-names>CT</given-names>
</name>
</person-group>
<article-title>Situação de saúde e grau de dependência de pessoas idosas institucionalizadas</article-title>
<source>Rev Gaucha Enferm</source>
<year>2009</year>
<volume>30</volume>
<issue>3</issue>
<fpage>192</fpage>
<lpage>199</lpage>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
def test_mixed_citation_cannot_be_empty(self):
sample = u"""<article>
<back>
<ref-list>
<ref>
<mixed-citation></mixed-citation>
<element-citation publication-type="journal">
<person-group person-group-type="author">
<name>
<surname>Aires</surname>
<given-names>M</given-names>
</name>
<name>
<surname>Paz</surname>
<given-names>AA</given-names>
</name>
<name>
<surname>Perosa</surname>
<given-names>CT</given-names>
</name>
</person-group>
<article-title>Situação de saúde e grau de dependência de pessoas idosas institucionalizadas</article-title>
<source>Rev Gaucha Enferm</source>
<year>2009</year>
<volume>30</volume>
<issue>3</issue>
<fpage>192</fpage>
<lpage>199</lpage>
</element-citation>
</ref>
</ref-list>
</back>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))
class AffTests(PhaseBasedTestCase):
""" /article//aff is required.
"""
sch_phase = 'phase.aff'
def test_country_is_present(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
<country country="BR">Brasil</country>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertTrue(self._run_validation(sample))
def test_country_is_absent(self):
sample = u"""<article>
<front>
<article-meta>
<aff>
<institution content-type="original">
Grupo de ...
</institution>
</aff>
</article-meta>
</front>
</article>
"""
sample = io.BytesIO(sample.encode('utf-8'))
self.assertFalse(self._run_validation(sample))<|fim▁end|> | <collab>Foo</collab> |
<|file_name|>buildbot_selector.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import os
import subprocess
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import pynacl.platform
python = sys.executable
bash = '/bin/bash'
echo = 'echo'<|fim▁hole|> # Buildbots.
######################################################################
'xp-newlib-opt':
python + ' buildbot\\buildbot_standard.py opt 32 newlib --no-gyp',
'xp-glibc-opt':
python + ' buildbot\\buildbot_standard.py opt 32 glibc --no-gyp',
'xp-bare-newlib-opt':
python + ' buildbot\\buildbot_standard.py opt 32 newlib --no-gyp',
'xp-bare-glibc-opt':
python + ' buildbot\\buildbot_standard.py opt 32 glibc --no-gyp',
'precise-64-validator-opt':
python + ' buildbot/buildbot_standard.py opt 64 glibc --validator',
# Clang.
'precise_64-newlib-dbg-clang':
python + ' buildbot/buildbot_standard.py dbg 64 newlib --clang',
'mac10.7-newlib-dbg-clang':
python + ' buildbot/buildbot_standard.py dbg 32 newlib --clang',
# ASan.
'precise_64-newlib-dbg-asan':
python + ' buildbot/buildbot_standard.py opt 64 newlib --asan',
'mac10.7-newlib-dbg-asan':
python + ' buildbot/buildbot_standard.py opt 32 newlib --asan',
# PNaCl.
'oneiric_32-newlib-arm_hw-pnacl-panda-dbg':
bash + ' buildbot/buildbot_pnacl.sh mode-buildbot-arm-hw-dbg',
'oneiric_32-newlib-arm_hw-pnacl-panda-opt':
bash + ' buildbot/buildbot_pnacl.sh mode-buildbot-arm-hw-opt',
'precise_64-newlib-arm_qemu-pnacl-dbg':
bash + ' buildbot/buildbot_pnacl.sh mode-buildbot-arm-dbg',
'precise_64-newlib-arm_qemu-pnacl-opt':
bash + ' buildbot/buildbot_pnacl.sh mode-buildbot-arm-opt',
'precise_64-newlib-x86_32-pnacl':
python + ' buildbot/buildbot_pnacl.py opt 32 pnacl',
'precise_64-newlib-x86_64-pnacl':
python + ' buildbot/buildbot_pnacl.py opt 64 pnacl',
'mac10.8-newlib-opt-pnacl':
python + ' buildbot/buildbot_pnacl.py opt 32 pnacl',
'win7-64-newlib-opt-pnacl':
python + ' buildbot/buildbot_pnacl.py opt 64 pnacl',
'precise_64-newlib-mips-pnacl':
echo + ' "TODO(mseaborn): add mips"',
# PNaCl Spec
'precise_64-newlib-arm_qemu-pnacl-buildonly-spec':
bash + ' buildbot/buildbot_spec2k.sh pnacl-arm-buildonly',
'oneiric_32-newlib-arm_hw-pnacl-panda-spec':
bash + ' buildbot/buildbot_spec2k.sh pnacl-arm-hw',
'lucid_64-newlib-x86_32-pnacl-spec':
bash + ' buildbot/buildbot_spec2k.sh pnacl-x8632',
'lucid_64-newlib-x86_64-pnacl-spec':
bash + ' buildbot/buildbot_spec2k.sh pnacl-x8664',
# NaCl Spec
'lucid_64-newlib-x86_32-spec':
bash + ' buildbot/buildbot_spec2k.sh nacl-x8632',
'lucid_64-newlib-x86_64-spec':
bash + ' buildbot/buildbot_spec2k.sh nacl-x8664',
# Valgrind bots.
'precise-64-newlib-dbg-valgrind':
echo + ' "Valgrind bots are disabled: see '
'https://code.google.com/p/nativeclient/issues/detail?id=3158"',
'precise-64-glibc-dbg-valgrind':
echo + ' "Valgrind bots are disabled: see '
'https://code.google.com/p/nativeclient/issues/detail?id=3158"',
# Coverage.
'mac10.6-newlib-coverage':
python + (' buildbot/buildbot_standard.py '
'coverage 64 newlib --coverage'),
'precise-64-32-newlib-coverage':
python + (' buildbot/buildbot_standard.py '
'coverage 32 newlib --coverage'),
'precise-64-64-newlib-coverage':
python + (' buildbot/buildbot_standard.py '
'coverage 64 newlib --coverage'),
'xp-newlib-coverage':
python + (' buildbot/buildbot_standard.py '
'coverage 32 newlib --coverage'),
######################################################################
# Trybots.
######################################################################
'nacl-precise64_validator_opt':
python + ' buildbot/buildbot_standard.py opt 64 glibc --validator',
'nacl-precise64_newlib_dbg_valgrind':
bash + ' buildbot/buildbot_valgrind.sh newlib',
'nacl-precise64_glibc_dbg_valgrind':
bash + ' buildbot/buildbot_valgrind.sh glibc',
# Coverage trybots.
'nacl-mac10.6-newlib-coverage':
python + (' buildbot/buildbot_standard.py '
'coverage 64 newlib --coverage'),
'nacl-precise-64-32-newlib-coverage':
python + (' buildbot/buildbot_standard.py '
'coverage 32 newlib --coverage'),
'nacl-precise-64-64-newlib-coverage':
python + (' buildbot/buildbot_standard.py '
'coverage 64 newlib --coverage'),
'nacl-win32-newlib-coverage':
python + (' buildbot/buildbot_standard.py '
'coverage 32 newlib --coverage'),
# Clang trybots.
'nacl-precise_64-newlib-dbg-clang':
python + ' buildbot/buildbot_standard.py dbg 64 newlib --clang',
'nacl-mac10.6-newlib-dbg-clang':
python + ' buildbot/buildbot_standard.py dbg 32 newlib --clang',
# Pnacl main trybots
'nacl-precise_64-newlib-arm_qemu-pnacl':
bash + ' buildbot/buildbot_pnacl.sh mode-trybot-qemu',
'nacl-precise_64-newlib-x86_32-pnacl':
python + ' buildbot/buildbot_pnacl.py opt 32 pnacl',
'nacl-precise_64-newlib-x86_64-pnacl':
python + ' buildbot/buildbot_pnacl.py opt 64 pnacl',
'nacl-precise_64-newlib-mips-pnacl':
echo + ' "TODO(mseaborn): add mips"',
'nacl-arm_opt_panda':
bash + ' buildbot/buildbot_pnacl.sh mode-buildbot-arm-try',
'nacl-arm_hw_opt_panda':
bash + ' buildbot/buildbot_pnacl.sh mode-buildbot-arm-hw-try',
'nacl-mac10.8_newlib_opt_pnacl':
python + ' buildbot/buildbot_pnacl.py opt 32 pnacl',
'nacl-win7_64_newlib_opt_pnacl':
python + ' buildbot/buildbot_pnacl.py opt 64 pnacl',
# Pnacl spec2k trybots
'nacl-precise_64-newlib-x86_32-pnacl-spec':
bash + ' buildbot/buildbot_spec2k.sh pnacl-trybot-x8632',
'nacl-precise_64-newlib-x86_64-pnacl-spec':
bash + ' buildbot/buildbot_spec2k.sh pnacl-trybot-x8664',
'nacl-arm_perf_panda':
bash + ' buildbot/buildbot_spec2k.sh pnacl-trybot-arm-buildonly',
'nacl-arm_hw_perf_panda':
bash + ' buildbot/buildbot_spec2k.sh pnacl-trybot-arm-hw',
# Toolchain glibc.
'precise64-glibc': bash + ' buildbot/buildbot_linux-glibc-makefile.sh',
'mac-glibc': bash + ' buildbot/buildbot_mac-glibc-makefile.sh',
'win7-glibc': 'buildbot\\buildbot_windows-glibc-makefile.bat',
# Toolchain newlib x86.
'win7-toolchain_x86': 'buildbot\\buildbot_toolchain_win.bat',
'mac-toolchain_x86': bash + ' buildbot/buildbot_toolchain.sh mac',
'precise64-toolchain_x86': bash + ' buildbot/buildbot_toolchain.sh linux',
# Toolchain newlib arm.
'win7-toolchain_arm':
python +
' buildbot/buildbot_toolchain_build.py'
' toolchain_build'
' --buildbot',
'mac-toolchain_arm':
python +
' buildbot/buildbot_toolchain_build.py'
' toolchain_build'
' --buildbot',
'precise64-toolchain_arm':
python +
' buildbot/buildbot_toolchain_build.py'
' toolchain_build'
' --buildbot',
# BIONIC toolchain builders.
'precise64-toolchain_bionic':
python +
' buildbot/buildbot_toolchain_build.py'
' toolchain_build_bionic'
' --buildbot',
# Pnacl toolchain builders.
'linux-armtools-x86_32':
bash + ' buildbot/buildbot_toolchain_arm_trusted.sh',
'linux-pnacl-x86_32':
python + ' buildbot/buildbot_pnacl_toolchain.py --buildbot',
'linux-pnacl-x86_64':
python + ' buildbot/buildbot_pnacl_toolchain.py --buildbot',
'precise-pnacl-x86_32':
python + ' buildbot/buildbot_pnacl_toolchain.py --buildbot',
'precise-pnacl-x86_64':
python + ' buildbot/buildbot_pnacl_toolchain.py --buildbot',
'mac-pnacl-x86_32':
python + ' buildbot/buildbot_pnacl_toolchain.py --buildbot',
# TODO(robertm): Delete this once we are using win-pnacl-x86_64
'win-pnacl-x86_32':
python + ' buildbot/buildbot_pnacl_toolchain.py --buildbot',
# TODO(robertm): use this in favor or the misnamed win-pnacl-x86_32
'win-pnacl-x86_64':
python + ' buildbot/buildbot_pnacl_toolchain.py --buildbot',
# Pnacl toolchain testers
'linux-pnacl-x86_64-tests-x86_64':
bash + ' buildbot/buildbot_pnacl_toolchain_tests.sh tc-test-bot x86-64',
'linux-pnacl-x86_64-tests-x86_32':
bash + ' buildbot/buildbot_pnacl_toolchain_tests.sh tc-test-bot x86-32',
'linux-pnacl-x86_64-tests-arm':
bash + ' buildbot/buildbot_pnacl_toolchain_tests.sh tc-test-bot arm',
# MIPS toolchain buildbot.
'linux-pnacl-x86_32-tests-mips':
bash + ' buildbot/buildbot_toolchain_mips_trusted.sh',
# Toolchain trybots.
'nacl-toolchain-precise64-newlib':
bash + ' buildbot/buildbot_toolchain.sh linux',
'nacl-toolchain-mac-newlib': bash + ' buildbot/buildbot_toolchain.sh mac',
'nacl-toolchain-win7-newlib': 'buildbot\\buildbot_toolchain_win.bat',
'nacl-toolchain-precise64-newlib-arm':
python +
' buildbot/buildbot_toolchain_build.py'
' toolchain_build'
' --trybot',
'nacl-toolchain-mac-newlib-arm':
python +
' buildbot/buildbot_toolchain_build.py'
' toolchain_build'
' --trybot',
'nacl-toolchain-win7-newlib-arm':
python +
' buildbot/buildbot_toolchain_build.py'
' toolchain_build'
' --trybot',
'nacl-toolchain-precise64-glibc':
bash + ' buildbot/buildbot_linux-glibc-makefile.sh',
'nacl-toolchain-mac-glibc':
bash + ' buildbot/buildbot_mac-glibc-makefile.sh',
'nacl-toolchain-win7-glibc':
'buildbot\\buildbot_windows-glibc-makefile.bat',
# Pnacl toolchain trybots.
'nacl-toolchain-linux-pnacl-x86_32':
python + ' buildbot/buildbot_pnacl_toolchain.py --trybot',
'nacl-toolchain-linux-pnacl-x86_64':
python + ' buildbot/buildbot_pnacl_toolchain.py --trybot',
'nacl-toolchain-linux-pnacl-mips': echo + ' "TODO(mseaborn)"',
'nacl-toolchain-precise-pnacl-x86_32':
python + ' buildbot/buildbot_pnacl_toolchain.py --trybot',
'nacl-toolchain-precise-pnacl-x86_64':
python + ' buildbot/buildbot_pnacl_toolchain.py --trybot',
'nacl-toolchain-precise-pnacl-mips': echo + ' "TODO(mseaborn)"',
'nacl-toolchain-mac-pnacl-x86_32':
python + ' buildbot/buildbot_pnacl_toolchain.py --trybot',
'nacl-toolchain-win7-pnacl-x86_64':
python + ' buildbot/buildbot_pnacl_toolchain.py --trybot',
}
special_for_arm = [
'win7_64',
'win7-64',
'lucid-64',
'lucid64',
'precise-64',
'precise64'
]
for platform in [
'vista', 'win7', 'win8', 'win',
'mac10.6', 'mac10.7', 'mac10.8',
'lucid', 'precise'] + special_for_arm:
if platform in special_for_arm:
arch_variants = ['arm']
else:
arch_variants = ['', '32', '64', 'arm']
for arch in arch_variants:
arch_flags = ''
real_arch = arch
arch_part = '-' + arch
# Disable GYP build for win32 bots and arm cross-builders. In this case
# "win" means Windows XP, not Vista, Windows 7, etc.
#
# Building via GYP always builds all toolchains by default, but the win32
# XP pnacl builds are pathologically slow (e.g. ~38 seconds per compile on
# the nacl-win32_glibc_opt trybot). There are other builders that test
# Windows builds via gyp, so the reduced test coverage should be slight.
if arch == 'arm' or (platform == 'win' and arch == '32'):
arch_flags += ' --no-gyp'
if arch == '':
arch_part = ''
real_arch = '32'
# Test with Breakpad tools only on basic Linux builds.
if sys.platform.startswith('linux'):
arch_flags += ' --use-breakpad-tools'
for mode in ['dbg', 'opt']:
for libc in ['newlib', 'glibc']:
# Buildbots.
for bare in ['', '-bare']:
name = platform + arch_part + bare + '-' + libc + '-' + mode
assert name not in BOT_ASSIGNMENT, name
BOT_ASSIGNMENT[name] = (
python + ' buildbot/buildbot_standard.py ' +
mode + ' ' + real_arch + ' ' + libc + arch_flags)
# Trybots
for arch_sep in ['', '-', '_']:
name = 'nacl-' + platform + arch_sep + arch + '_' + libc + '_' + mode
assert name not in BOT_ASSIGNMENT, name
BOT_ASSIGNMENT[name] = (
python + ' buildbot/buildbot_standard.py ' +
mode + ' ' + real_arch + ' ' + libc + arch_flags)
def EscapeJson(data):
return '"' + json.dumps(data).replace('"', r'\"') + '"'
def Main():
builder = os.environ.get('BUILDBOT_BUILDERNAME')
build_number = os.environ.get('BUILDBOT_BUILDNUMBER')
slave_type = os.environ.get('BUILDBOT_SLAVE_TYPE')
cmd = BOT_ASSIGNMENT.get(builder)
if not cmd:
sys.stderr.write('ERROR - unset/invalid builder name\n')
sys.exit(1)
env = os.environ.copy()
# Don't write out .pyc files because in cases in which files move around or
# the PYTHONPATH / sys.path change, old .pyc files can be mistakenly used.
# This avoids the need for admin changes on the bots in this case.
env['PYTHONDONTWRITEBYTECODE'] = '1'
# Use .boto file from home-dir instead of buildbot supplied one.
if 'AWS_CREDENTIAL_FILE' in env:
del env['AWS_CREDENTIAL_FILE']
env['BOTO_CONFIG'] = os.path.expanduser('~/.boto')
env['GSUTIL'] = '/b/build/third_party/gsutil/gsutil'
# When running from cygwin, we sometimes want to use a native python.
# The native python will use the depot_tools version by invoking python.bat.
if pynacl.platform.IsWindows():
env['NATIVE_PYTHON'] = 'python.bat'
else:
env['NATIVE_PYTHON'] = 'python'
if sys.platform == 'win32':
# If the temp directory is not on the same drive as the working directory,
# there can be random failures when cleaning up temp directories, so use
# a directory on the current drive. Use __file__ here instead of os.getcwd()
# because toolchain_main picks its working directories relative to __file__
filedrive, _ = os.path.splitdrive(__file__)
tempdrive, _ = os.path.splitdrive(env['TEMP'])
if tempdrive != filedrive:
env['TEMP'] = filedrive + '\\temp'
env['TMP'] = env['TEMP']
if not os.path.exists(env['TEMP']):
os.mkdir(env['TEMP'])
# Run through runtest.py to get upload of perf data.
build_properties = {
'buildername': builder,
'mastername': 'client.nacl',
'buildnumber': str(build_number),
}
factory_properties = {
'perf_id': builder,
'show_perf_results': True,
'step_name': 'naclperf', # Seems unused, but is required.
'test_name': 'naclperf', # Really "Test Suite"
}
# Locate the buildbot build directory by relative path, as it's absolute
# location varies by platform and configuration.
buildbot_build_dir = os.path.join(* [os.pardir] * 4)
runtest = os.path.join(buildbot_build_dir, 'scripts', 'slave', 'runtest.py')
# For builds with an actual build number, require that the script is present
# (i.e. that we're run from an actual buildbot).
if build_number is not None and not os.path.exists(runtest):
raise Exception('runtest.py script not found at: %s\n' % runtest)
cmd_exe = cmd.split(' ')[0]
cmd_exe_ext = os.path.splitext(cmd_exe)[1]
# Do not wrap these types of builds with runtest.py:
# - tryjobs
# - commands beginning with 'echo '
# - batch files
# - debug builders
if not (slave_type == 'Trybot' or
cmd_exe == echo or
cmd_exe_ext == '.bat' or
'-dbg' in builder):
# Perf dashboards are now generated by output scraping that occurs in the
# script runtest.py, which lives in the buildbot repository.
# Non-trybot builds should be run through runtest, allowing it to upload
# perf data if relevant.
cmd = ' '.join([
python, runtest,
'--build-dir=src/out',
'--results-url=https://chromeperf.appspot.com',
'--annotate=graphing',
'--no-xvfb', # We provide our own xvfb invocation.
'--factory-properties', EscapeJson(factory_properties),
'--build-properties', EscapeJson(build_properties),
cmd,
])
print "%s runs: %s\n" % (builder, cmd)
retcode = subprocess.call(cmd, env=env, shell=True)
sys.exit(retcode)
if __name__ == '__main__':
Main()<|fim▁end|> |
BOT_ASSIGNMENT = {
###################################################################### |
<|file_name|>test_analyser.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# This file contains a class to analyse text in giellatekno xml format
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this file. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright 2013-2014 Børre Gaup <[email protected]>
#
from __future__ import unicode_literals
import os
import unittest
import doctest
from lxml import etree
from lxml import doctestcompare
from corpustools import analyser
from corpustools import parallelize
from corpustools import util
here = os.path.dirname(__file__)
class TestAnalyser(unittest.TestCase):
def setUp(self):
self.a = analyser.Analyser(
'sme',
'xfst',
fst_file=os.path.join(here, 'analyser.xfst'),
disambiguation_analysis_file=os.path.join(here,
'disambiguation.cg3'),
function_analysis_file=os.path.join(here, 'functions.cg3'),
dependency_analysis_file=os.path.join(here, 'dependency.cg3'))
self.a.xml_file = parallelize.CorpusXMLFile(
os.path.join(here, 'smefile.xml'))
def assertXmlEqual(self, got, want):
"""Check if two stringified xml snippets are equal
"""
checker = doctestcompare.LXMLOutputChecker()
if not checker.check_output(want, got, 0):
message = checker.output_difference(
doctest.Example("", want), got, 0).encode('utf-8')
raise AssertionError(message)
def test_raise_on_None_file(self):
with self.assertRaises(TypeError):
analyser.Analyser('sme', 'xfst', None, None, None, None)
def test_raise_on_bad_file(self):
with self.assertRaises(util.ArgumentError):
analyser.Analyser('sme',
'xfst',
fst_file=os.path.join(here, 'notafile'),
disambiguation_analysis_file=os.path.join(here,
'notafile'),
function_analysis_file=os.path.join(here, 'notafile'),
dependency_analysis_file=os.path.join(here, 'notafile'))
def test_sme_ccat_output(self):
"""Test if the ccat output is what we expect it to be
"""
got = self.a.ccat()
want = (
'Muhto gaskkohagaid, ja erenoamážit dalle go lei buolaš, '
'de aggregáhta billánii. ¶\n')<|fim▁hole|> self.assertEqual(got, want.encode('utf8'))
def test_sme_preprocess_output(self):
"""Test if the preprocess output is what we expect it to be
"""
got = self.a.preprocess()
want = (
'Muhto\ngaskkohagaid\n,\nja\nerenoamážit\ndalle go\nlei\n'
'buolaš\n,\nde\naggregáhta\nbillánii\n.\n¶\n')
self.assertEqual(got, want.encode('utf8'))
def test_sme_disambiguation_output(self):
"""Check if disambiguation analysis gives the expected output
"""
self.a.disambiguation_analysis()
got = self.a.get_disambiguation()
want = (
'"<Muhto>"\n\t"muhto" CC <sme> @CVP\n"<gaskkohagaid>"\n'
'\t"gaskkohagaid" Adv <sme>\n"<,>"\n\t"," CLB\n"<ja>"\n'
'\t"ja" CC <sme> @CNP\n"<erenoamážit>"\n'
'\t"erenoamážit" Adv <sme>\n"<dalle_go>"\n'
'\t"dalle_go" MWE CS <sme> @CVP\n"<lei>"\n'
'\t"leat" V <sme> IV Ind Prt Sg3 @+FMAINV\n"<buolaš>"\n'
'\t"buolaš" Sem/Wthr N <sme> Sg Nom\n"<,>"\n'
'\t"," CLB\n"<de>"\n\t"de" Adv <sme>\n"<aggregáhta>"\n'
'\t"aggregáhta" N <sme> Sg Nom\n"<billánii>"\n'
'\t"billánit" V <sme> IV Ind Prt Sg3 @+FMAINV\n"<.>"\n'
'\t"." CLB\n\n"<¶>"\n\t"¶" CLB\n\n')
self.assertEqual(got, want.encode('utf8'))
def test_sme_dependency_output(self):
"""Check if disambiguation analysis gives the expected output
"""
self.a.dependency_analysis()
got = self.a.get_dependency()
want = (
'"<Muhto>"\n\t"muhto" CC @CVP #1->1\n"<gaskkohagaid>"\n'
'\t"gaskkohagaid" Adv @ADVL> #2->12\n"<,>"\n'
'\t"," CLB #3->4\n"<ja>"\n\t"ja" CC @CNP #4->2\n"<erenoamážit>"\n'
'\t"erenoamážit" Adv @ADVL> #5->12\n"<dalle_go>"\n'
'\t"dalle_go" CS @CVP #6->7\n"<lei>"\n'
'\t"leat" V IV Ind Prt Sg3 @FS-ADVL> #7->12\n"<buolaš>"\n'
'\t"buolaš" N Sg Nom @<SPRED #8->7\n"<,>"\n'
'\t"," CLB #9->6\n"<de>"\n'
'\t"de" Adv @ADVL> #10->12\n"<aggregáhta>"\n'
'\t"aggregáhta" N Sg Nom @SUBJ> #11->12\n"<billánii>"\n'
'\t"billánit" V IV Ind Prt Sg3 @FS-ADVL> #12->0\n"<.>"\n'
'\t"." CLB #13->12\n\n"<¶>"\n\t"¶" CLB #1->1\n\n')
self.assertEqual(got, want.encode('utf8'))
def test_analysisXml(self):
"""Check if the xml is what it is supposed to be
"""
self.a.dependency_analysis()
self.a.get_analysis_xml()
got = self.a.xml_file.get_etree()
want = (
'<document xml:lang="sme" id="no_id">\n'
' <header>\n'
' <title>Internáhtta sosiálalaš giliguovddážin</title>\n'
' <genre code="facta"/>\n'
' <author>\n'
' <person firstname="Abba" lastname="Abbamar" sex="m" '
'born="1900" nationality="nor"/>\n'
' </author>\n'
' <translator>\n'
' <person firstname="Ibba" lastname="Ibbamar" sex="unknown" '
'born="" nationality=""/>\n'
' </translator>\n'
' <translated_from xml:lang="nob"/>\n'
' <year>2005</year>\n'
' <publChannel>\n'
' <publication>\n'
' <publisher>Almmuheaddji OS</publisher>\n'
' </publication>\n'
' </publChannel>\n'
' <wordcount>10</wordcount>\n'
' <availability>\n'
' <free/>\n'
' </availability>\n'
' <submitter name="Børre Gaup" '
'email="[email protected]"/>\n'
' <multilingual>\n'
' <language xml:lang="nob"/>\n'
' </multilingual>\n'
' <origFileName>aarseth_s.htm</origFileName>\n'
' <metadata>\n'
' <uncomplete/>\n'
' </metadata>\n'
' <version>XSLtemplate 1.9 ; file-specific xsl '
'$Revision: 1.3 $; common.xsl $Revision$; </version>\n'
' </header>\n'
' <body><disambiguation><![CDATA["<Muhto>"\n'
'\t"muhto" CC <sme> @CVP\n"<gaskkohagaid>"\n'
'\t"gaskkohagaid" Adv <sme>\n"<,>"\n\t"," CLB\n"<ja>"\n'
'\t"ja" CC <sme> @CNP\n"<erenoamážit>"\n'
'\t"erenoamážit" Adv <sme>\n"<dalle_go>"\n'
'\t"dalle_go" MWE CS <sme> @CVP\n"<lei>"\n'
'\t"leat" V <sme> IV Ind Prt Sg3 @+FMAINV\n"<buolaš>"\n'
'\t"buolaš" Sem/Wthr N <sme> Sg Nom\n"<,>"\n'
'\t"," CLB\n"<de>"\n\t"de" Adv <sme>\n"<aggregáhta>"\n'
'\t"aggregáhta" N <sme> Sg Nom\n"<billánii>"\n'
'\t"billánit" V <sme> IV Ind Prt Sg3 @+FMAINV\n"<.>"\n'
'\t"." CLB\n\n"<¶>"\n\t"¶" CLB\n\n]]></disambiguation>'
'<dependency><![CDATA["<Muhto>"\n'
'\t"muhto" CC @CVP #1->1\n"<gaskkohagaid>"\n'
'\t"gaskkohagaid" Adv @ADVL> #2->12\n"<,>"\n'
'\t"," CLB #3->4\n"<ja>"\n\t"ja" CC @CNP #4->2\n"<erenoamážit>"\n'
'\t"erenoamážit" Adv @ADVL> #5->12\n"<dalle_go>"\n'
'\t"dalle_go" CS @CVP #6->7\n"<lei>"\n'
'\t"leat" V IV Ind Prt Sg3 @FS-ADVL> #7->12\n"<buolaš>"\n'
'\t"buolaš" N Sg Nom @<SPRED #8->7\n"<,>"\n'
'\t"," CLB #9->6\n"<de>"\n'
'\t"de" Adv @ADVL> #10->12\n"<aggregáhta>"\n'
'\t"aggregáhta" N Sg Nom @SUBJ> #11->12\n"<billánii>"\n'
'\t"billánit" V IV Ind Prt Sg3 @FS-ADVL> #12->0\n"<.>"\n'
'\t"." CLB #13->12\n\n"<¶>"\n'
'\t"¶" CLB #1->1\n\n]]></dependency></body></document>')
self.maxDiff = None
self.assertEqual(etree.tostring(got, encoding='unicode'), want)<|fim▁end|> | |
<|file_name|>init.py<|end_file_name|><|fim▁begin|>"""
Created on Thu May 05 20:02:00 2011
@author: Tillsten
"""
import numpy as np
from scipy.linalg import qr
eps = np.finfo(float).eps
def mls(B, v, umin, umax, Wv=None, Wu=None, ud=None, u=None, W=None, imax=100):
"""
mls - Control allocation using minimal least squares.
[u,W,iter] = mls_alloc(B,v,umin,umax,[Wv,Wu,ud,u0,W0,imax])
Solves the bounded sequential least-squares problem
min ||Wu(u-ud)|| subj. to u in M
where M is the set of control signals solving
min ||Wv(Bu-v)|| subj. to umin <= u <= umax
using a two stage active set method. Wu must be diagonal since the
problem is reformulated as a minimal least squares problem. The
<|fim▁hole|> B control effectiveness matrix (k x m)
v commanded virtual control (k x 1)
umin lower position limits (m x 1)
umax upper position limits (m x 1)
Wv virtual control weighting matrix (k x k) [I]
Wu control weighting matrix (m x m), diagonal [I]
ud desired control (m x 1) [0]
u0 initial point (m x 1)
W0 initial working set (m x 1) [empty]
imax max no. of iterations [100]
Outputs:
-------
u optimal control
W optimal active set
iter no. of iterations (= no. of changes in the working set + 1)
0 if u_i not saturated
Active set syntax: W_i = -1 if u_i = umin_i
+1 if u_i = umax_i
Directly Based on the code from:
Ola Harkegard, www.control.isy.liu.se/~ola
see licsence.
"""
#k = number of virtual controls
#m = number of variables (actuators)
k, m = B.shape
if u == None:
u = np.mean(umin + umax, 0)[:, None]
if W == None:
W = np.zeros((m, 1))
if ud == None:
ud = np.zeros((m, 1))
if Wu == None:
Wu = np.eye(m)
if Wv == None:
Wv = np.eye(k)
phase = 1
#Reformulate as a minimal least squares problem. See 2002-03-08 (1).
A = Wv.dot(B).dot(np.linalg.pinv(Wu))
b = Wv.dot(v - B.dot(ud))
xmin = (umin - ud).flatten()
xmax = (umax - ud).flatten()
# Compute initial point and residual.
x = Wu.dot(u - ud)
r = np.atleast_2d(A.dot(x) - b)
#Determine indeces of free variables
i_free = (W == 0).flatten()
m_free = np.sum(i_free)
for i in range(imax):
#print 'Iter: ', i
if phase == 1:
A_free = A[:, i_free]
if m_free <= k:
if m_free > 0:
p_free = np.linalg.lstsq(-A_free, r)[0]
else:
q1, r1 = qr(A_free.T)
p_free = -q1.dot(np.solve(r1.T, r))
p = np.zeros((m, 1))
if A.shape[1] > 1:
p[i_free] = p_free
else:
p[i_free] = p_free.flatten()
else:
i_fixed = np.logical_not(i_free)
m_fixed = m - m_free
if m_fixed > 0:
HT = U[i_fixed.squeeze(), :].T
V, Rtot = qr(np.atleast_2d(HT))
V1 = V[:, :m_fixed]
V2 = V[:, m_fixed + 1:]
R = Rtot[:, m_fixed]
else:
V, Rtot = np.array([[]]), np.array([[]])
V1 = V2 = R = V.T
s = -V2.T.dot(z)
pz = V2.dot(s)
p = U.dot(pz)
x_opt = x + p
infeasible = np.logical_or(x_opt < xmin, x_opt > xmax)
if not np.any(infeasible[i_free]):
x = x_opt
if phase == 1:
r = r + A.dot(p)
else:
z = z + pz
if phase == 1 and m_free >= k:
phase = 2
Utot, Stot = qr(A.T)
U = Utot[:, k:]
z = U.T.dot(x)
else:
lam = np.zeros((m, 1))
if m_free < m:
if phase == 1:
g = A.T.dot(r)
lam = -W * g
else:
lam[i_fixed] = -W[i_fixed] * np.linalg.solve(R, V1.T.dot(z))
if np.all(lam >= -eps):
u = np.linalg.solve(Wu, x) + ud
return u
lambda_neg, i_neg = np.min(lam), np.argmin(lam)
W[i_neg] = 0
i_free[i_neg] = True
m_free += 1
else:
dist = np.ones(m)
i_min = np.logical_and(i_free, p.flat < 0).flatten()
i_max = np.logical_and(i_free, p.flat > 0).flatten()
dist[i_min] = (xmin[i_min] - x[i_min]) / p[i_min]
dist[i_max] = (xmax[i_max] - x[i_max]) / p[i_max]
alpha, i_alpha = np.min(dist), np.argmin(dist)
x = x + alpha * p
if phase == 1:
r = r + A.dot(alpha * p) #!!
else:
z = z + alpha * pz
W[i_alpha] = np.sign(p[i_alpha])
if i_free[i_alpha]:
i_free[i_alpha] = False
m_free -= 1
u = np.linalg.solve(Wu, x) + ud
return u
def bounded_lsq(A, b, lower_lim, upper_lim):
"""
Minimizes:
|Ax-b|_2
for lower_lim<x<upper_lim.
"""
return mls(A, b, lower_lim, upper_lim)
def test_bounded_lsq():
from numpy.core.umath_tests import matrix_multiply
s = np.linspace(0, 10, 100)
A = np.exp(-((s - 5) ** 2) / 20)
A = A[:, None]
b = 16 * A
x = bounded_lsq(A, b, np.atleast_2d(0), np.atleast_2d(15))
np.testing.assert_almost_equal(x, 15)
A = np.array([[1, -3], [5, 7]])
b = np.array([[-50], [50]])
ll = np.array(([[-10], [-10]]))
ul = np.array(([[10], [10]]))
x0 = bounded_lsq(A, b, ll, ul)
np.testing.assert_array_almost_equal(x0, np.array([[-4.61538462], [10.]]))
if __name__ == '__main__':
from numpy.core.umath_tests import matrix_multiply
import matplotlib.pyplot as plt
test_bounded_lsq()
s = np.linspace(0, 10, 100)
A = np.exp(-((s - 5) ** 2) / 20)
A = A[:, None]
b = 16 * A
x = bounded_lsq(A, b, np.atleast_2d(0), np.atleast_2d(4))
plt.plot(A.dot(x))
plt.plot(b)
plt.figure()
plt.rcParams['font.family'] = 'serif'
A = np.array([[1, -3], [5, 7]])
b = np.array([[-50], [50]])
ll = np.array(([[-10], [-10]]))
ul = np.array(([[10], [10]]))
Ud = np.array(([0, 0]))
gamma = 1000
x0 = bounded_lsq(A, b, ll, ul)
x = np.linspace(-30, 30, 500)
y = np.linspace(-30, 30, 500)
X, Y = np.meshgrid(x, y)
S = np.dstack((X, Y))
SN = matrix_multiply(S, A.T)
plt.clf()
plt.contourf(x, y, np.sqrt(((SN - b.T) ** 2).sum(-1)), 30,
cmap=plt.cm.PuBu_r)
plt.colorbar()
#plt.axhline(ll[0])
#plt.axhline(ul[0])
#plt.axvline(ll[1])
#plt.axvline(ul[1])
rect = np.vstack((ll, ul - ll))
patch = plt.Rectangle(ll, *(ul - ll), facecolor=(0.0, 0., 0., 0))
plt.gca().add_patch(patch)
plt.annotate("Bounded Min",
xy=x0, xycoords='data',
xytext=(-5, 5), textcoords='data',
arrowprops=dict(arrowstyle="->",
connectionstyle="arc3"),
)
plt.annotate("Lsq Min",
xy=np.linalg.lstsq(A, b)[0], xycoords='data',
xytext=(20, 10), textcoords='offset points',
arrowprops=dict(arrowstyle="->",
connectionstyle="arc3"),
)
plt.scatter(*x0)
plt.scatter(*np.linalg.lstsq(A, b)[0])
plt.show()<|fim▁end|> | implementation does not handle the case of coplanar controls.
Inputs:
-------
|
<|file_name|>compareWithPreviousOutputs.check.js<|end_file_name|><|fim▁begin|>/*
* Copyright 2016 Amadeus s.a.s.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
"use strict";
const co = require("co");
const path = require("path");
const assertFilesEqual = require("../helpers/assertFilesEqual");
const exec = require("../helpers/exec");
module.exports = function (results) {
const outDir = results.outDir;
const atDiffExecutable = require.resolve("../../bin/at-diff");
const filesToCompare = [
// The .json extension is automatically added<|fim▁hole|> "at.parse",
"version1to2.diff",
"filteredVersion1to2.diff",
"impactsOnUser.diff",
"filteredImpactsOnUser.diff"
];
filesToCompare.forEach((fileName) => {
const nonDeterministicFileName = `${fileName}.json`;
it(nonDeterministicFileName, co.wrap(function *() {
this.timeout(10000);
const transformCommand = /\.parse$/.test(fileName) ? "reformat" : "reserialize";
const deterministicFileName = `${fileName}.deterministic.json`;
yield exec(atDiffExecutable, [transformCommand, nonDeterministicFileName, "--json-output", deterministicFileName, "--deterministic-output", "--json-beautify"], {
cwd: outDir
});
yield assertFilesEqual(path.join(outDir, deterministicFileName), path.join(__dirname, "..", "expected-output", deterministicFileName));
}));
});
};<|fim▁end|> | "version1.parse",
"version2.parse",
"user.parse", |
<|file_name|>4d8d18f311aa_add_deleted_bool_to_.py<|end_file_name|><|fim▁begin|>"""add deleted bool to things
Revision ID: 4d8d18f311aa
Revises: 4e34b4290fbc
Create Date: 2019-02-27 21:31:09.575521
"""
# revision identifiers, used by Alembic.
revision = '4d8d18f311aa'
down_revision = '4e34b4290fbc'
from alembic import op
import sqlalchemy as sa
<|fim▁hole|>def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('content_musicplaylist', sa.Column('deleted', sa.Boolean(), nullable=True))
op.add_column('content_podcast', sa.Column('deleted', sa.Boolean(), nullable=True))
op.add_column('content_stream', sa.Column('deleted', sa.Boolean(), nullable=True))
op.add_column('content_track', sa.Column('deleted', sa.Boolean(), nullable=True))
op.add_column('radio_person', sa.Column('deleted', sa.Boolean(), nullable=True))
op.add_column('radio_program', sa.Column('deleted', sa.Boolean(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('radio_program', 'deleted')
op.drop_column('radio_person', 'deleted')
op.drop_column('content_track', 'deleted')
op.drop_column('content_stream', 'deleted')
op.drop_column('content_podcast', 'deleted')
op.drop_column('content_musicplaylist', 'deleted')
### end Alembic commands ###<|fim▁end|> | |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# ChatterBot documentation build configuration file, created by
# sphinx-quickstart on Mon May 9 14:38:54 2016.
import sys
import os
import sphinx_rtd_theme
from datetime import datetime
# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its version is used.
current_directory = os.path.dirname(os.path.abspath(__file__))
parent_directory = os.path.abspath(os.path.join(current_directory, os.pardir))
sys.path.insert(0, parent_directory)
import chatterbot
# -- General configuration ------------------------------------------------
# Add any Sphinx extension module names here, as strings.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosectionlabel',
'sphinx.ext.coverage',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.mathjax',
'sphinx.ext.todo',
'sphinx.ext.viewcode'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
source_suffix = ['.rst', '.md']
# The encoding of source files<|fim▁hole|># The master toctree document
master_doc = 'index'
# General information about the project
project = 'ChatterBot'
copyright = '{}, {}'.format(datetime.now().year, chatterbot.__author__)
author = chatterbot.__author__
# The short X.Y version
version = chatterbot.__version__
# The full version, including alpha/beta/rc tags
release = chatterbot.__version__
language = 'en'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# If true, '()' will be appended to :func: etc. cross-reference text
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::)
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use
pygments_style = 'sphinx'
# -- Options for HTML output ----------------------------------------------
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
'logo_only': True
}
html_show_sourcelink = False
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = '../graphics/banner.png'
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = '_static/favicon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#html_last_updated_fmt = None
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = False
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
html_search_language = 'en'
# Output file base name for HTML help builder
htmlhelp_basename = 'ChatterBotdoc'
# Read the docs theme modifications
html_context = {
'extra_css_files': [
'_static/style.css'
]
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class])
latex_documents = [
(master_doc, 'ChatterBot.tex', u'ChatterBot Documentation',
u'Gunther Cox', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section)
man_pages = [
(master_doc, 'chatterbot', u'ChatterBot Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'ChatterBot', u'ChatterBot Documentation',
author, 'ChatterBot', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# A list of files that should not be packed into the epub file
epub_exclude_files = ['search.html']
# Example configuration for intersphinx: refer to the Python standard library
intersphinx_mapping = {'https://docs.python.org/': None}<|fim▁end|> | #source_encoding = 'utf-8-sig'
|
<|file_name|>base.js<|end_file_name|><|fim▁begin|>define([
'domReady',
'jquery',
'underscore',
'gettext',
'common/js/components/views/feedback_notification',
'common/js/components/views/feedback_prompt',
'js/utils/date_utils',
'js/utils/module',
'js/utils/handle_iframe_binding',
'edx-ui-toolkit/js/dropdown-menu/dropdown-menu-view',
'jquery.ui',
'jquery.leanModal',
'jquery.form',
'jquery.smoothScroll'
],
function(
domReady,
$,
_,
gettext,
NotificationView,
PromptView,
DateUtils,
ModuleUtils,
IframeUtils,
DropdownMenuView
) {
'use strict';
var $body;
function smoothScrollLink(e) {
(e).preventDefault();
$.smoothScroll({
offset: -200,<|fim▁hole|> easing: 'swing',
speed: 1000,
scrollElement: null,
scrollTarget: $(this).attr('href')
});
}
function hideNotification(e) {
(e).preventDefault();
$(this)
.closest('.wrapper-notification')
.removeClass('is-shown')
.addClass('is-hiding')
.attr('aria-hidden', 'true');
}
function hideAlert(e) {
(e).preventDefault();
$(this).closest('.wrapper-alert').removeClass('is-shown');
}
domReady(function() {
var dropdownMenuView;
$body = $('body');
$body.on('click', '.embeddable-xml-input', function() {
$(this).select();
});
$body.addClass('js');
// alerts/notifications - manual close
$('.action-alert-close, .alert.has-actions .nav-actions a').bind('click', hideAlert);
$('.action-notification-close').bind('click', hideNotification);
// nav - dropdown related
$body.click(function() {
$('.nav-dd .nav-item .wrapper-nav-sub').removeClass('is-shown');
$('.nav-dd .nav-item .title').removeClass('is-selected');
});
$('.nav-dd .nav-item, .filterable-column .nav-item').click(function(e) {
var $subnav = $(this).find('.wrapper-nav-sub'),
$title = $(this).find('.title');
if ($subnav.hasClass('is-shown')) {
$subnav.removeClass('is-shown');
$title.removeClass('is-selected');
} else {
$('.nav-dd .nav-item .title').removeClass('is-selected');
$('.nav-dd .nav-item .wrapper-nav-sub').removeClass('is-shown');
$title.addClass('is-selected');
$subnav.addClass('is-shown');
// if propagation is not stopped, the event will bubble up to the
// body element, which will close the dropdown.
e.stopPropagation();
}
});
// general link management - new window/tab
$('a[rel="external"]:not([title])')
.attr('title', gettext('This link will open in a new browser window/tab'));
$('a[rel="external"]').attr('target', '_blank');
// general link management - lean modal window
$('a[rel="modal"]').attr('title', gettext('This link will open in a modal window')).leanModal({
overlay: 0.50,
closeButton: '.action-modal-close'
});
$('.action-modal-close').click(function(e) {
(e).preventDefault();
});
// general link management - smooth scrolling page links
$('a[rel*="view"][href^="#"]').bind('click', smoothScrollLink);
IframeUtils.iframeBinding();
// disable ajax caching in IE so that backbone fetches work
if ($.browser.msie) {
$.ajaxSetup({cache: false});
}
// Initiate the edx tool kit dropdown menu
if ($('.js-header-user-menu').length) {
dropdownMenuView = new DropdownMenuView({
el: '.js-header-user-menu'
});
dropdownMenuView.postRender();
}
window.studioNavMenuActive = true;
});
}); // end require()<|fim▁end|> | |
<|file_name|>config.py<|end_file_name|><|fim▁begin|><|fim▁hole|># you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os
from oslo_config import cfg, types
from st2common import log as logging
import st2common.config as common_config
from st2common.constants.system import DEFAULT_CONFIG_FILE_PATH
from st2common.constants.garbage_collection import DEFAULT_COLLECTION_INTERVAL
from st2common.constants.garbage_collection import DEFAULT_SLEEP_DELAY
from st2common.constants.sensors import DEFAULT_PARTITION_LOADER
from st2tests.fixturesloader import get_fixtures_packs_base_path
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
# Ued for tests. For majority of tests, we want this value to be False.
USE_DEFAULT_CONFIG_FILES = False
def reset():
cfg.CONF.reset()
def parse_args(args=None, coordinator_noop=True):
_setup_config_opts(coordinator_noop=coordinator_noop)
kwargs = {}
if USE_DEFAULT_CONFIG_FILES:
kwargs["default_config_files"] = [DEFAULT_CONFIG_FILE_PATH]
cfg.CONF(args=args or [], **kwargs)
def _setup_config_opts(coordinator_noop=True):
reset()
try:
_register_config_opts()
except Exception as e:
print(e)
# Some scripts register the options themselves which means registering them again will
# cause a non-fatal exception
return
_override_config_opts(coordinator_noop=coordinator_noop)
def _override_config_opts(coordinator_noop=False):
_override_db_opts()
_override_common_opts()
_override_api_opts()
_override_keyvalue_opts()
_override_scheduler_opts()
_override_workflow_engine_opts()
_override_coordinator_opts(noop=coordinator_noop)
def _register_config_opts():
_register_common_opts()
_register_api_opts()
_register_stream_opts()
_register_auth_opts()
_register_action_sensor_opts()
_register_ssh_runner_opts()
_register_scheduler_opts()
_register_exporter_opts()
_register_sensor_container_opts()
_register_garbage_collector_opts()
def _override_db_opts():
CONF.set_override(name="db_name", override="st2-test", group="database")
CONF.set_override(name="host", override="127.0.0.1", group="database")
def _override_common_opts():
packs_base_path = get_fixtures_packs_base_path()
CONF.set_override(name="base_path", override=packs_base_path, group="system")
CONF.set_override(name="validate_output_schema", override=True, group="system")
CONF.set_override(
name="system_packs_base_path", override=packs_base_path, group="content"
)
CONF.set_override(
name="packs_base_paths", override=packs_base_path, group="content"
)
CONF.set_override(name="api_url", override="http://127.0.0.1", group="auth")
CONF.set_override(name="mask_secrets", override=True, group="log")
CONF.set_override(name="stream_output", override=False, group="actionrunner")
def _override_api_opts():
CONF.set_override(
name="allow_origin",
override=["http://127.0.0.1:3000", "http://dev"],
group="api",
)
def _override_keyvalue_opts():
current_file_path = os.path.dirname(__file__)
rel_st2_base_path = os.path.join(current_file_path, "../..")
abs_st2_base_path = os.path.abspath(rel_st2_base_path)
rel_enc_key_path = "st2tests/conf/st2_kvstore_tests.crypto.key.json"
ovr_enc_key_path = os.path.join(abs_st2_base_path, rel_enc_key_path)
CONF.set_override(
name="encryption_key_path", override=ovr_enc_key_path, group="keyvalue"
)
def _override_scheduler_opts():
CONF.set_override(name="sleep_interval", group="scheduler", override=0.01)
def _override_coordinator_opts(noop=False):
driver = None if noop else "zake://"
CONF.set_override(name="url", override=driver, group="coordination")
CONF.set_override(name="lock_timeout", override=1, group="coordination")
def _override_workflow_engine_opts():
cfg.CONF.set_override("retry_stop_max_msec", 500, group="workflow_engine")
cfg.CONF.set_override("retry_wait_fixed_msec", 100, group="workflow_engine")
cfg.CONF.set_override("retry_max_jitter_msec", 100, group="workflow_engine")
cfg.CONF.set_override("gc_max_idle_sec", 1, group="workflow_engine")
def _register_common_opts():
try:
common_config.register_opts(ignore_errors=True)
except:
LOG.exception("Common config registration failed.")
def _register_api_opts():
# XXX: note : template_path value only works if started from the top-level of the codebase.
# Brittle!
pecan_opts = [
cfg.StrOpt(
"root",
default="st2api.controllers.root.RootController",
help="Pecan root controller",
),
cfg.StrOpt("template_path", default="%(confdir)s/st2api/st2api/templates"),
cfg.ListOpt("modules", default=["st2api"]),
cfg.BoolOpt("debug", default=True),
cfg.BoolOpt("auth_enable", default=True),
cfg.DictOpt("errors", default={404: "/error/404", "__force_dict__": True}),
]
_register_opts(pecan_opts, group="api_pecan")
api_opts = [
cfg.BoolOpt("debug", default=True),
cfg.IntOpt(
"max_page_size",
default=100,
help="Maximum limit (page size) argument which can be specified by the user in a query "
"string. If a larger value is provided, it will default to this value.",
),
]
_register_opts(api_opts, group="api")
messaging_opts = [
cfg.StrOpt(
"url",
default="amqp://guest:[email protected]:5672//",
help="URL of the messaging server.",
),
cfg.ListOpt(
"cluster_urls",
default=[],
help="URL of all the nodes in a messaging service cluster.",
),
cfg.IntOpt(
"connection_retries",
default=10,
help="How many times should we retry connection before failing.",
),
cfg.IntOpt(
"connection_retry_wait",
default=10000,
help="How long should we wait between connection retries.",
),
cfg.BoolOpt(
"ssl",
default=False,
help="Use SSL / TLS to connect to the messaging server. Same as "
'appending "?ssl=true" at the end of the connection URL string.',
),
cfg.StrOpt(
"ssl_keyfile",
default=None,
help="Private keyfile used to identify the local connection against RabbitMQ.",
),
cfg.StrOpt(
"ssl_certfile",
default=None,
help="Certificate file used to identify the local connection (client).",
),
cfg.StrOpt(
"ssl_cert_reqs",
default=None,
choices="none, optional, required",
help="Specifies whether a certificate is required from the other side of the "
"connection, and whether it will be validated if provided.",
),
cfg.StrOpt(
"ssl_ca_certs",
default=None,
help="ca_certs file contains a set of concatenated CA certificates, which are "
"used to validate certificates passed from RabbitMQ.",
),
cfg.StrOpt(
"login_method",
default=None,
help="Login method to use (AMQPLAIN, PLAIN, EXTERNAL, etc.).",
),
]
_register_opts(messaging_opts, group="messaging")
ssh_runner_opts = [
cfg.StrOpt(
"remote_dir",
default="/tmp",
help="Location of the script on the remote filesystem.",
),
cfg.BoolOpt(
"allow_partial_failure",
default=False,
help="How partial success of actions run on multiple nodes should be treated.",
),
cfg.BoolOpt(
"use_ssh_config",
default=False,
help="Use the .ssh/config file. Useful to override ports etc.",
),
]
_register_opts(ssh_runner_opts, group="ssh_runner")
def _register_stream_opts():
stream_opts = [
cfg.IntOpt(
"heartbeat",
default=25,
help="Send empty message every N seconds to keep connection open",
),
cfg.BoolOpt("debug", default=False, help="Specify to enable debug mode."),
]
_register_opts(stream_opts, group="stream")
def _register_auth_opts():
auth_opts = [
cfg.StrOpt("host", default="127.0.0.1"),
cfg.IntOpt("port", default=9100),
cfg.BoolOpt("use_ssl", default=False),
cfg.StrOpt("mode", default="proxy"),
cfg.StrOpt("backend", default="flat_file"),
cfg.StrOpt("backend_kwargs", default=None),
cfg.StrOpt("logging", default="conf/logging.conf"),
cfg.IntOpt("token_ttl", default=86400, help="Access token ttl in seconds."),
cfg.BoolOpt("sso", default=True),
cfg.StrOpt("sso_backend", default="noop"),
cfg.StrOpt("sso_backend_kwargs", default=None),
cfg.BoolOpt("debug", default=True),
]
_register_opts(auth_opts, group="auth")
def _register_action_sensor_opts():
action_sensor_opts = [
cfg.BoolOpt(
"enable",
default=True,
help="Whether to enable or disable the ability to post a trigger on action.",
),
cfg.StrOpt(
"triggers_base_url",
default="http://127.0.0.1:9101/v1/triggertypes/",
help="URL for action sensor to post TriggerType.",
),
cfg.IntOpt(
"request_timeout",
default=1,
help="Timeout value of all httprequests made by action sensor.",
),
cfg.IntOpt(
"max_attempts", default=10, help="No. of times to retry registration."
),
cfg.IntOpt(
"retry_wait",
default=1,
help="Amount of time to wait prior to retrying a request.",
),
]
_register_opts(action_sensor_opts, group="action_sensor")
def _register_ssh_runner_opts():
ssh_runner_opts = [
cfg.BoolOpt(
"use_ssh_config",
default=False,
help="Use the .ssh/config file. Useful to override ports etc.",
),
cfg.StrOpt(
"remote_dir",
default="/tmp",
help="Location of the script on the remote filesystem.",
),
cfg.BoolOpt(
"allow_partial_failure",
default=False,
help="How partial success of actions run on multiple nodes should be treated.",
),
cfg.IntOpt(
"max_parallel_actions",
default=50,
help="Max number of parallel remote SSH actions that should be run. "
"Works only with Paramiko SSH runner.",
),
]
_register_opts(ssh_runner_opts, group="ssh_runner")
def _register_scheduler_opts():
scheduler_opts = [
cfg.FloatOpt(
"execution_scheduling_timeout_threshold_min",
default=1,
help="How long GC to search back in minutes for orphaned scheduled actions",
),
cfg.IntOpt(
"pool_size",
default=10,
help="The size of the pool used by the scheduler for scheduling executions.",
),
cfg.FloatOpt(
"sleep_interval",
default=0.01,
help="How long to sleep between each action scheduler main loop run interval (in ms).",
),
cfg.FloatOpt(
"gc_interval",
default=5,
help="How often to look for zombie executions before rescheduling them (in ms).",
),
cfg.IntOpt(
"retry_max_attempt",
default=3,
help="The maximum number of attempts that the scheduler retries on error.",
),
cfg.IntOpt(
"retry_wait_msec",
default=100,
help="The number of milliseconds to wait in between retries.",
),
]
_register_opts(scheduler_opts, group="scheduler")
def _register_exporter_opts():
exporter_opts = [
cfg.StrOpt(
"dump_dir",
default="/opt/stackstorm/exports/",
help="Directory to dump data to.",
)
]
_register_opts(exporter_opts, group="exporter")
def _register_sensor_container_opts():
partition_opts = [
cfg.StrOpt(
"sensor_node_name", default="sensornode1", help="name of the sensor node."
),
cfg.Opt(
"partition_provider",
type=types.Dict(value_type=types.String()),
default={"name": DEFAULT_PARTITION_LOADER},
help="Provider of sensor node partition config.",
),
]
_register_opts(partition_opts, group="sensorcontainer")
# Other options
other_opts = [
cfg.BoolOpt(
"single_sensor_mode",
default=False,
help="Run in a single sensor mode where parent process exits when a sensor crashes / "
"dies. This is useful in environments where partitioning, sensor process life "
"cycle and failover is handled by a 3rd party service such as kubernetes.",
)
]
_register_opts(other_opts, group="sensorcontainer")
# CLI options
cli_opts = [
cfg.StrOpt(
"sensor-ref",
help="Only run sensor with the provided reference. Value is of the form "
"<pack>.<sensor-name> (e.g. linux.FileWatchSensor).",
),
cfg.BoolOpt(
"single-sensor-mode",
default=False,
help="Run in a single sensor mode where parent process exits when a sensor crashes / "
"dies. This is useful in environments where partitioning, sensor process life "
"cycle and failover is handled by a 3rd party service such as kubernetes.",
),
]
_register_cli_opts(cli_opts)
def _register_garbage_collector_opts():
common_opts = [
cfg.IntOpt(
"collection_interval",
default=DEFAULT_COLLECTION_INTERVAL,
help="How often to check database for old data and perform garbage collection.",
),
cfg.FloatOpt(
"sleep_delay",
default=DEFAULT_SLEEP_DELAY,
help="How long to wait / sleep (in seconds) between "
"collection of different object types.",
),
]
_register_opts(common_opts, group="garbagecollector")
ttl_opts = [
cfg.IntOpt(
"action_executions_ttl",
default=None,
help="Action executions and related objects (live actions, action output "
"objects) older than this value (days) will be automatically deleted.",
),
cfg.IntOpt(
"action_executions_output_ttl",
default=7,
help="Action execution output objects (ones generated by action output "
"streaming) older than this value (days) will be automatically deleted.",
),
cfg.IntOpt(
"trigger_instances_ttl",
default=None,
help="Trigger instances older than this value (days) will be automatically deleted.",
),
]
_register_opts(ttl_opts, group="garbagecollector")
inquiry_opts = [
cfg.BoolOpt(
"purge_inquiries",
default=False,
help="Set to True to perform garbage collection on Inquiries (based on "
"the TTL value per Inquiry)",
)
]
_register_opts(inquiry_opts, group="garbagecollector")
def _register_opts(opts, group=None):
CONF.register_opts(opts, group)
def _register_cli_opts(opts):
cfg.CONF.register_cli_opts(opts)<|fim▁end|> | # Copyright 2020 The StackStorm Authors.
# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); |
<|file_name|>simple_api_unittest.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "tools/json_schema_compiler/test/simple_api.h"
#include "testing/gtest/include/gtest/gtest.h"
using namespace test::api::simple_api;
namespace {
static scoped_ptr<base::DictionaryValue> CreateTestTypeDictionary() {
scoped_ptr<base::DictionaryValue> value(new base::DictionaryValue());
value->SetWithoutPathExpansion("number", new base::FundamentalValue(1.1));
value->SetWithoutPathExpansion("integer", new base::FundamentalValue(4));
value->SetWithoutPathExpansion("string", new base::StringValue("bling"));
value->SetWithoutPathExpansion("boolean", new base::FundamentalValue(true));
return value.Pass();
}
} // namespace
TEST(JsonSchemaCompilerSimpleTest, IncrementIntegerResultCreate) {
scoped_ptr<base::ListValue> results = IncrementInteger::Results::Create(5);
base::ListValue expected;
expected.Append(new base::FundamentalValue(5));
EXPECT_TRUE(results->Equals(&expected));
}
TEST(JsonSchemaCompilerSimpleTest, IncrementIntegerParamsCreate) {
scoped_ptr<base::ListValue> params_value(new base::ListValue());
params_value->Append(new base::FundamentalValue(6));
scoped_ptr<IncrementInteger::Params> params(
IncrementInteger::Params::Create(*params_value));
EXPECT_TRUE(params.get());
EXPECT_EQ(6, params->num);
}
TEST(JsonSchemaCompilerSimpleTest, NumberOfParams) {
{
scoped_ptr<base::ListValue> params_value(new base::ListValue());
params_value->Append(new base::StringValue("text"));
params_value->Append(new base::StringValue("text"));
scoped_ptr<OptionalString::Params> params(
OptionalString::Params::Create(*params_value));
EXPECT_FALSE(params.get());
}
{
scoped_ptr<base::ListValue> params_value(new base::ListValue());
scoped_ptr<IncrementInteger::Params> params(
IncrementInteger::Params::Create(*params_value));
EXPECT_FALSE(params.get());
}
}
TEST(JsonSchemaCompilerSimpleTest, OptionalStringParamsCreate) {
{
scoped_ptr<base::ListValue> params_value(new base::ListValue());
scoped_ptr<OptionalString::Params> params(
OptionalString::Params::Create(*params_value));
EXPECT_TRUE(params.get());
EXPECT_FALSE(params->str.get());
}
{
scoped_ptr<base::ListValue> params_value(new base::ListValue());
params_value->Append(new base::StringValue("asdf"));
scoped_ptr<OptionalString::Params> params(
OptionalString::Params::Create(*params_value));
EXPECT_TRUE(params.get());
EXPECT_TRUE(params->str.get());
EXPECT_EQ("asdf", *params->str);
}
}
TEST(JsonSchemaCompilerSimpleTest, OptionalParamsTakingNull) {
{
scoped_ptr<base::ListValue> params_value(new base::ListValue());
params_value->Append(base::Value::CreateNullValue());
scoped_ptr<OptionalString::Params> params(
OptionalString::Params::Create(*params_value));
EXPECT_TRUE(params.get());
EXPECT_FALSE(params->str.get());
}
}
TEST(JsonSchemaCompilerSimpleTest, OptionalStringParamsWrongType) {
{
scoped_ptr<base::ListValue> params_value(new base::ListValue());
params_value->Append(new base::FundamentalValue(5));
scoped_ptr<OptionalString::Params> params(
OptionalString::Params::Create(*params_value));
EXPECT_FALSE(params.get());
}
}
TEST(JsonSchemaCompilerSimpleTest, OptionalBeforeRequired) {
{
scoped_ptr<base::ListValue> params_value(new base::ListValue());
params_value->Append(base::Value::CreateNullValue());
params_value->Append(new base::StringValue("asdf"));
scoped_ptr<OptionalBeforeRequired::Params> params(
OptionalBeforeRequired::Params::Create(*params_value));
EXPECT_TRUE(params.get());
EXPECT_FALSE(params->first.get());
EXPECT_EQ("asdf", params->second);
}
}
TEST(JsonSchemaCompilerSimpleTest, NoParamsResultCreate) {
scoped_ptr<base::ListValue> results = OptionalString::Results::Create();
base::ListValue expected;
EXPECT_TRUE(results->Equals(&expected));
}
TEST(JsonSchemaCompilerSimpleTest, TestTypePopulate) {
{
scoped_ptr<TestType> test_type(new TestType());
scoped_ptr<base::DictionaryValue> value = CreateTestTypeDictionary();
EXPECT_TRUE(TestType::Populate(*value, test_type.get()));
EXPECT_EQ("bling", test_type->string);
EXPECT_EQ(1.1, test_type->number);
EXPECT_EQ(4, test_type->integer);
EXPECT_EQ(true, test_type->boolean);
EXPECT_TRUE(value->Equals(test_type->ToValue().get()));
}
{
scoped_ptr<TestType> test_type(new TestType());
scoped_ptr<base::DictionaryValue> value = CreateTestTypeDictionary();
value->Remove("number", NULL);<|fim▁hole|> EXPECT_FALSE(TestType::Populate(*value, test_type.get()));
}
}
TEST(JsonSchemaCompilerSimpleTest, GetTestType) {
{
scoped_ptr<base::DictionaryValue> value = CreateTestTypeDictionary();
scoped_ptr<TestType> test_type(new TestType());
EXPECT_TRUE(TestType::Populate(*value, test_type.get()));
scoped_ptr<base::ListValue> results =
GetTestType::Results::Create(*test_type);
base::DictionaryValue* result = NULL;
results->GetDictionary(0, &result);
EXPECT_TRUE(result->Equals(value.get()));
}
}
TEST(JsonSchemaCompilerSimpleTest, OnIntegerFiredCreate) {
{
scoped_ptr<base::ListValue> results(OnIntegerFired::Create(5));
base::ListValue expected;
expected.Append(new base::FundamentalValue(5));
EXPECT_TRUE(results->Equals(&expected));
}
}
TEST(JsonSchemaCompilerSimpleTest, OnStringFiredCreate) {
{
scoped_ptr<base::ListValue> results(OnStringFired::Create("yo dawg"));
base::ListValue expected;
expected.Append(new base::StringValue("yo dawg"));
EXPECT_TRUE(results->Equals(&expected));
}
}
TEST(JsonSchemaCompilerSimpleTest, OnTestTypeFiredCreate) {
{
TestType some_test_type;
scoped_ptr<base::DictionaryValue> expected = CreateTestTypeDictionary();
ASSERT_TRUE(expected->GetDouble("number", &some_test_type.number));
ASSERT_TRUE(expected->GetString("string", &some_test_type.string));
ASSERT_TRUE(expected->GetInteger("integer", &some_test_type.integer));
ASSERT_TRUE(expected->GetBoolean("boolean", &some_test_type.boolean));
scoped_ptr<base::ListValue> results(
OnTestTypeFired::Create(some_test_type));
base::DictionaryValue* result = NULL;
results->GetDictionary(0, &result);
EXPECT_TRUE(result->Equals(expected.get()));
}
}<|fim▁end|> | |
<|file_name|>unicodetext.cc<|end_file_name|><|fim▁begin|>// Copyright (C) 2006 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Author: Jim Meehan
#include <iostream><|fim▁hole|>#include <sstream>
#include <cassert>
#include "phonenumbers/utf/unicodetext.h"
//#include "base/logging.h"
#include "phonenumbers/utf/stringpiece.h"
//#include "utf/stringprintf.h"
#include "phonenumbers/utf/utf.h"
#include "phonenumbers/utf/unilib.h"
using std::stringstream;
using std::max;
using std::hex;
using std::dec;
using std::cerr;
using std::endl;
static int CodepointDistance(const char* start, const char* end) {
int n = 0;
// Increment n on every non-trail-byte.
for (const char* p = start; p < end; ++p) {
n += (*reinterpret_cast<const signed char*>(p) >= -0x40);
}
return n;
}
static int CodepointCount(const char* utf8, int len) {
return CodepointDistance(utf8, utf8 + len);
}
UnicodeText::const_iterator::difference_type
distance(const UnicodeText::const_iterator& first,
const UnicodeText::const_iterator& last) {
return CodepointDistance(first.it_, last.it_);
}
// ---------- Utility ----------
static int ConvertToInterchangeValid(char* start, int len) {
// This routine is called only when we've discovered that a UTF-8 buffer
// that was passed to CopyUTF8, TakeOwnershipOfUTF8, or PointToUTF8
// was not interchange valid. This indicates a bug in the caller, and
// a LOG(WARNING) is done in that case.
// This is similar to CoerceToInterchangeValid, but it replaces each
// structurally valid byte with a space, and each non-interchange
// character with a space, even when that character requires more
// than one byte in UTF8. E.g., "\xEF\xB7\x90" (U+FDD0) is
// structurally valid UTF8, but U+FDD0 is not an interchange-valid
// code point. The result should contain one space, not three.
//
// Since the conversion never needs to write more data than it
// reads, it is safe to change the buffer in place. It returns the
// number of bytes written.
char* const in = start;
char* out = start;
char* const end = start + len;
while (start < end) {
int good = UniLib::SpanInterchangeValid(start, end - start);
if (good > 0) {
if (out != start) {
memmove(out, start, good);
}
out += good;
start += good;
if (start == end) {
break;
}
}
// Is the current string invalid UTF8 or just non-interchange UTF8?
char32 rune;
int n;
if (isvalidcharntorune(start, end - start, &rune, &n)) {
// structurally valid UTF8, but not interchange valid
start += n; // Skip over the whole character.
} else { // bad UTF8
start += 1; // Skip over just one byte
}
*out++ = ' ';
}
return out - in;
}
// *************** Data representation **********
// Note: the copy constructor is undefined.
// After reserve(), resize(), or clear(), we're an owner, not an alias.
void UnicodeText::Repr::reserve(int new_capacity) {
// If there's already enough capacity, and we're an owner, do nothing.
if (capacity_ >= new_capacity && ours_) return;
// Otherwise, allocate a new buffer.
capacity_ = max(new_capacity, (3 * capacity_) / 2 + 20);
char* new_data = new char[capacity_];
// If there is an old buffer, copy it into the new buffer.
if (data_) {
memcpy(new_data, data_, size_);
if (ours_) delete[] data_; // If we owned the old buffer, free it.
}
data_ = new_data;
ours_ = true; // We own the new buffer.
// size_ is unchanged.
}
void UnicodeText::Repr::resize(int new_size) {
if (new_size == 0) {
clear();
} else {
if (!ours_ || new_size > capacity_) reserve(new_size);
// Clear the memory in the expanded part.
if (size_ < new_size) memset(data_ + size_, 0, new_size - size_);
size_ = new_size;
ours_ = true;
}
}
// This implementation of clear() deallocates the buffer if we're an owner.
// That's not strictly necessary; we could just set size_ to 0.
void UnicodeText::Repr::clear() {
if (ours_) delete[] data_;
data_ = NULL;
size_ = capacity_ = 0;
ours_ = true;
}
void UnicodeText::Repr::Copy(const char* data, int size) {
resize(size);
memcpy(data_, data, size);
}
void UnicodeText::Repr::TakeOwnershipOf(char* data, int size, int capacity) {
if (data == data_) return; // We already own this memory. (Weird case.)
if (ours_ && data_) delete[] data_; // If we owned the old buffer, free it.
data_ = data;
size_ = size;
capacity_ = capacity;
ours_ = true;
}
void UnicodeText::Repr::PointTo(const char* data, int size) {
if (ours_ && data_) delete[] data_; // If we owned the old buffer, free it.
data_ = const_cast<char*>(data);
size_ = size;
capacity_ = size;
ours_ = false;
}
void UnicodeText::Repr::append(const char* bytes, int byte_length) {
reserve(size_ + byte_length);
memcpy(data_ + size_, bytes, byte_length);
size_ += byte_length;
}
string UnicodeText::Repr::DebugString() const {
stringstream ss;
ss << "{Repr " << hex << this << " data=" << data_ << " size=" << dec
<< size_ << " capacity=" << capacity_ << " "
<< (ours_ ? "Owned" : "Alias") << "}";
string result;
ss >> result;
return result;
}
// *************** UnicodeText ******************
// ----- Constructors -----
// Default constructor
UnicodeText::UnicodeText() {
}
// Copy constructor
UnicodeText::UnicodeText(const UnicodeText& src) {
Copy(src);
}
// Substring constructor
UnicodeText::UnicodeText(const UnicodeText::const_iterator& first,
const UnicodeText::const_iterator& last) {
assert(first <= last && "Incompatible iterators");
repr_.append(first.it_, last.it_ - first.it_);
}
string UnicodeText::UTF8Substring(const const_iterator& first,
const const_iterator& last) {
assert(first <= last && "Incompatible iterators");
return string(first.it_, last.it_ - first.it_);
}
// ----- Copy -----
UnicodeText& UnicodeText::operator=(const UnicodeText& src) {
if (this != &src) {
Copy(src);
}
return *this;
}
UnicodeText& UnicodeText::Copy(const UnicodeText& src) {
repr_.Copy(src.repr_.data_, src.repr_.size_);
return *this;
}
UnicodeText& UnicodeText::CopyUTF8(const char* buffer, int byte_length) {
repr_.Copy(buffer, byte_length);
if (!UniLib:: IsInterchangeValid(buffer, byte_length)) {
cerr << "UTF-8 buffer is not interchange-valid." << endl;
repr_.size_ = ConvertToInterchangeValid(repr_.data_, byte_length);
}
return *this;
}
UnicodeText& UnicodeText::UnsafeCopyUTF8(const char* buffer,
int byte_length) {
repr_.Copy(buffer, byte_length);
return *this;
}
// ----- TakeOwnershipOf -----
UnicodeText& UnicodeText::TakeOwnershipOfUTF8(char* buffer,
int byte_length,
int byte_capacity) {
repr_.TakeOwnershipOf(buffer, byte_length, byte_capacity);
if (!UniLib:: IsInterchangeValid(buffer, byte_length)) {
cerr << "UTF-8 buffer is not interchange-valid." << endl;
repr_.size_ = ConvertToInterchangeValid(repr_.data_, byte_length);
}
return *this;
}
UnicodeText& UnicodeText::UnsafeTakeOwnershipOfUTF8(char* buffer,
int byte_length,
int byte_capacity) {
repr_.TakeOwnershipOf(buffer, byte_length, byte_capacity);
return *this;
}
// ----- PointTo -----
UnicodeText& UnicodeText::PointToUTF8(const char* buffer, int byte_length) {
if (UniLib:: IsInterchangeValid(buffer, byte_length)) {
repr_.PointTo(buffer, byte_length);
} else {
cerr << "UTF-8 buffer is not interchange-valid." << endl;
repr_.Copy(buffer, byte_length);
repr_.size_ = ConvertToInterchangeValid(repr_.data_, byte_length);
}
return *this;
}
UnicodeText& UnicodeText::UnsafePointToUTF8(const char* buffer,
int byte_length) {
repr_.PointTo(buffer, byte_length);
return *this;
}
UnicodeText& UnicodeText::PointTo(const UnicodeText& src) {
repr_.PointTo(src.repr_.data_, src.repr_.size_);
return *this;
}
UnicodeText& UnicodeText::PointTo(const const_iterator &first,
const const_iterator &last) {
assert(first <= last && " Incompatible iterators");
repr_.PointTo(first.utf8_data(), last.utf8_data() - first.utf8_data());
return *this;
}
// ----- Append -----
UnicodeText& UnicodeText::append(const UnicodeText& u) {
repr_.append(u.repr_.data_, u.repr_.size_);
return *this;
}
UnicodeText& UnicodeText::append(const const_iterator& first,
const const_iterator& last) {
assert(first <= last && "Incompatible iterators");
repr_.append(first.it_, last.it_ - first.it_);
return *this;
}
UnicodeText& UnicodeText::UnsafeAppendUTF8(const char* utf8, int len) {
repr_.append(utf8, len);
return *this;
}
// ----- substring searching -----
UnicodeText::const_iterator UnicodeText::find(const UnicodeText& look,
const_iterator start_pos) const {
assert(start_pos.utf8_data() >= utf8_data());
assert(start_pos.utf8_data() <= utf8_data() + utf8_length());
return UnsafeFind(look, start_pos);
}
UnicodeText::const_iterator UnicodeText::find(const UnicodeText& look) const {
return UnsafeFind(look, begin());
}
UnicodeText::const_iterator UnicodeText::UnsafeFind(
const UnicodeText& look, const_iterator start_pos) const {
// Due to the magic of the UTF8 encoding, searching for a sequence of
// letters is equivalent to substring search.
StringPiece searching(utf8_data(), utf8_length());
StringPiece look_piece(look.utf8_data(), look.utf8_length());
StringPiece::size_type found =
searching.find(look_piece, start_pos.utf8_data() - utf8_data());
if (found == StringPiece::npos) return end();
return const_iterator(utf8_data() + found);
}
bool UnicodeText::HasReplacementChar() const {
// Equivalent to:
// UnicodeText replacement_char;
// replacement_char.push_back(0xFFFD);
// return find(replacement_char) != end();
StringPiece searching(utf8_data(), utf8_length());
StringPiece looking_for("\xEF\xBF\xBD", 3);
return searching.find(looking_for) != StringPiece::npos;
}
// ----- other methods -----
// Clear operator
void UnicodeText::clear() {
repr_.clear();
}
// Destructor
UnicodeText::~UnicodeText() {}
void UnicodeText::push_back(char32 c) {
if (UniLib::IsValidCodepoint(c)) {
char buf[UTFmax];
int len = runetochar(buf, &c);
if (UniLib::IsInterchangeValid(buf, len)) {
repr_.append(buf, len);
} else {
cerr << "Unicode value 0x" << hex << c
<< " is not valid for interchange" << endl;
repr_.append(" ", 1);
}
} else {
cerr << "Illegal Unicode value: 0x" << hex << c << endl;
repr_.append(" ", 1);
}
}
int UnicodeText::size() const {
return CodepointCount(repr_.data_, repr_.size_);
}
bool operator==(const UnicodeText& lhs, const UnicodeText& rhs) {
if (&lhs == &rhs) return true;
if (lhs.repr_.size_ != rhs.repr_.size_) return false;
return memcmp(lhs.repr_.data_, rhs.repr_.data_, lhs.repr_.size_) == 0;
}
string UnicodeText::DebugString() const {
stringstream ss;
ss << "{UnicodeText " << hex << this << dec << " chars="
<< size() << " repr=" << repr_.DebugString() << "}";
#if 0
return StringPrintf("{UnicodeText %p chars=%d repr=%s}",
this,
size(),
repr_.DebugString().c_str());
#endif
string result;
ss >> result;
return result;
}
// ******************* UnicodeText::const_iterator *********************
// The implementation of const_iterator would be nicer if it
// inherited from boost::iterator_facade
// (http://boost.org/libs/iterator/doc/iterator_facade.html).
UnicodeText::const_iterator::const_iterator() : it_(0) {}
UnicodeText::const_iterator::const_iterator(const const_iterator& other)
: it_(other.it_) {
}
UnicodeText::const_iterator&
UnicodeText::const_iterator::operator=(const const_iterator& other) {
if (&other != this)
it_ = other.it_;
return *this;
}
UnicodeText::const_iterator UnicodeText::begin() const {
return const_iterator(repr_.data_);
}
UnicodeText::const_iterator UnicodeText::end() const {
return const_iterator(repr_.data_ + repr_.size_);
}
bool operator<(const UnicodeText::const_iterator& lhs,
const UnicodeText::const_iterator& rhs) {
return lhs.it_ < rhs.it_;
}
char32 UnicodeText::const_iterator::operator*() const {
// (We could call chartorune here, but that does some
// error-checking, and we're guaranteed that our data is valid
// UTF-8. Also, we expect this routine to be called very often. So
// for speed, we do the calculation ourselves.)
// Convert from UTF-8
uint8 byte1 = static_cast<uint8>(it_[0]);
if (byte1 < 0x80)
return byte1;
uint8 byte2 = static_cast<uint8>(it_[1]);
if (byte1 < 0xE0)
return ((byte1 & 0x1F) << 6)
| (byte2 & 0x3F);
uint8 byte3 = static_cast<uint8>(it_[2]);
if (byte1 < 0xF0)
return ((byte1 & 0x0F) << 12)
| ((byte2 & 0x3F) << 6)
| (byte3 & 0x3F);
uint8 byte4 = static_cast<uint8>(it_[3]);
return ((byte1 & 0x07) << 18)
| ((byte2 & 0x3F) << 12)
| ((byte3 & 0x3F) << 6)
| (byte4 & 0x3F);
}
UnicodeText::const_iterator& UnicodeText::const_iterator::operator++() {
it_ += UniLib::OneCharLen(it_);
return *this;
}
UnicodeText::const_iterator& UnicodeText::const_iterator::operator--() {
while (UniLib::IsTrailByte(*--it_)) { }
return *this;
}
int UnicodeText::const_iterator::get_utf8(char* utf8_output) const {
utf8_output[0] = it_[0];
if (static_cast<unsigned char>(it_[0]) < 0x80)
return 1;
utf8_output[1] = it_[1];
if (static_cast<unsigned char>(it_[0]) < 0xE0)
return 2;
utf8_output[2] = it_[2];
if (static_cast<unsigned char>(it_[0]) < 0xF0)
return 3;
utf8_output[3] = it_[3];
return 4;
}
UnicodeText::const_iterator UnicodeText::MakeIterator(const char* p) const {
assert(p != NULL);
const char* start = utf8_data();
int len = utf8_length();
const char* end = start + len;
assert(p >= start);
assert(p <= end);
assert(p == end || !UniLib::IsTrailByte(*p));
return const_iterator(p);
}
string UnicodeText::const_iterator::DebugString() const {
stringstream ss;
ss << "{iter " << hex << it_ << "}";
string result;
ss >> result;
return result;
}<|fim▁end|> | |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from distutils.core import setup
setup(<|fim▁hole|> name='Korail',
packages=['korail'],
version='0.0.3',
description='An unoffical API for Korail.',
long_description=open('README.rst').read(),
license='BSD License',
author='Su Yeol Jeon',
author_email='[email protected]',
url='https://github.com/devxoul/korail',
keywords=['Korail'],
classifiers=[],
install_requires=[
'requests',
'BeautifulSoup4'
]
)<|fim▁end|> | |
<|file_name|>small-enum-range-edge.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed<|fim▁hole|> * Tests the range assertion wraparound case in trans::middle::adt::load_discr.
*/
#[repr(u8)]
enum Eu { Lu = 0, Hu = 255 }
static CLu: Eu = Eu::Lu;
static CHu: Eu = Eu::Hu;
#[repr(i8)]
enum Es { Ls = -128, Hs = 127 }
static CLs: Es = Es::Ls;
static CHs: Es = Es::Hs;
pub fn main() {
assert_eq!((Eu::Hu as u8) + 1, Eu::Lu as u8);
assert_eq!((Es::Hs as i8) + 1, Es::Ls as i8);
assert_eq!(CLu as u8, Eu::Lu as u8);
assert_eq!(CHu as u8, Eu::Hu as u8);
assert_eq!(CLs as i8, Es::Ls as i8);
assert_eq!(CHs as i8, Es::Hs as i8);
}<|fim▁end|> | // except according to those terms.
/*! |
<|file_name|>consumer.py<|end_file_name|><|fim▁begin|>import datetime
import logging
import os
import signal
import threading
import time
from multiprocessing import Event as ProcessEvent
from multiprocessing import Process
try:
import gevent
from gevent import Greenlet
from gevent.event import Event as GreenEvent
except ImportError:
Greenlet = GreenEvent = None
from huey.exceptions import DataStoreGetException
from huey.exceptions import QueueException
from huey.exceptions import QueueReadException
from huey.exceptions import DataStorePutException
from huey.exceptions import QueueWriteException
from huey.exceptions import ScheduleAddException
from huey.exceptions import ScheduleReadException
from huey.registry import registry
class BaseProcess(object):
def __init__(self, huey, utc):
self.huey = huey
self.utc = utc
def get_now(self):
if self.utc:
return datetime.datetime.utcnow()
return datetime.datetime.now()
def sleep_for_interval(self, start_ts, nseconds):
delta = time.time() - start_ts
if delta < nseconds:
time.sleep(nseconds - (time.time() - start_ts))
def enqueue(self, task):
try:
self.huey.enqueue(task)
except QueueWriteException:
self._logger.error('Error enqueueing task: %s' % task)
else:
self.huey.emit_task('enqueued', task)
def loop(self, now=None):
raise NotImplementedError
class Worker(BaseProcess):
def __init__(self, huey, default_delay, max_delay, backoff, utc):
self.delay = self.default_delay = default_delay
self.max_delay = max_delay
self.backoff = backoff
self._logger = logging.getLogger('huey.consumer.Worker')
super(Worker, self).__init__(huey, utc)
def loop(self, now=None):
self._logger.debug('Checking for message')
task = None
exc_raised = True
try:
task = self.huey.dequeue()
except QueueReadException as exc:
self._logger.exception('Error reading from queue')
except QueueException:
self._logger.exception('Queue exception')
except KeyboardInterrupt:
raise
except:
self._logger.exception('Unknown exception')
else:
exc_raised = False
if task:
self.delay = self.default_delay
self.handle_task(task, now or self.get_now())
elif exc_raised or not self.huey.blocking:
self.sleep()
<|fim▁hole|> def sleep(self):
if self.delay > self.max_delay:
self.delay = self.max_delay
self._logger.debug('No messages, sleeping for: %s' % self.delay)
time.sleep(self.delay)
self.delay *= self.backoff
def handle_task(self, task, ts):
if not self.huey.ready_to_run(task, ts):
self._logger.info('Adding %s to schedule' % task)
self.add_schedule(task)
elif not self.is_revoked(task, ts):
self.process_task(task, ts)
else:
self._logger.debug('Task %s was revoked, not running' % task)
def process_task(self, task, ts):
self._logger.info('Executing %s' % task)
self.huey.emit_task('started', task)
try:
self.huey.execute(task)
except DataStorePutException:
self._logger.exception('Error storing result')
except:
self._logger.exception('Unhandled exception in worker thread')
self.huey.emit_task('error', task, error=True)
if task.retries:
self.huey.emit_task('retrying', task)
self.requeue_task(task, self.get_now())
else:
self.huey.emit_task('finished', task)
def requeue_task(self, task, ts):
task.retries -= 1
self._logger.info('Re-enqueueing task %s, %s tries left' %
(task.task_id, task.retries))
if task.retry_delay:
delay = datetime.timedelta(seconds=task.retry_delay)
task.execute_time = ts + delay
self._logger.debug('Execute %s at: %s' % (task, task.execute_time))
self.add_schedule(task)
else:
self.enqueue(task)
def add_schedule(self, task):
try:
self.huey.add_schedule(task)
except ScheduleAddException:
self._logger.error('Error adding task to schedule: %s' % task)
else:
self.huey.emit_task('scheduled', task)
def is_revoked(self, task, ts):
try:
if self.huey.is_revoked(task, ts, peek=False):
self.huey.emit_task('revoked', task)
return True
return False
except DataStoreGetException:
self._logger.error('Error checking if task is revoked: %s' % task)
return True
class Scheduler(BaseProcess):
def __init__(self, huey, interval, utc, periodic):
super(Scheduler, self).__init__(huey, utc)
self.interval = min(interval, 60)
self.periodic = periodic
if periodic:
# Determine the periodic task interval.
self._q, self._r = divmod(60, self.interval)
if not self._r:
self._q -= 1
self._counter = 0
self._logger = logging.getLogger('huey.consumer.Scheduler')
def loop(self, now=None):
now = now or self.get_now()
start = time.time()
for task in self.huey.read_schedule(now):
self._logger.info('Scheduling %s for execution' % task)
self.enqueue(task)
should_sleep = True
if self.periodic:
if self._counter == self._q:
if self._r:
self.sleep_for_interval(start, self._r)
self._logger.debug('Checking periodic tasks')
self._counter = 0
for task in self.huey.read_periodic(now):
self._logger.info('Scheduling periodic task %s.' % task)
self.enqueue(task)
self.sleep_for_interval(start, self.interval - self._r)
should_sleep = False
else:
self._counter += 1
if should_sleep:
self.sleep_for_interval(start, self.interval)
class Environment(object):
def get_stop_flag(self):
raise NotImplementedError
def create_process(self, runnable, name):
raise NotImplementedError
class ThreadEnvironment(Environment):
def get_stop_flag(self):
return threading.Event()
def create_process(self, runnable, name):
t = threading.Thread(target=runnable, name=name)
t.daemon = True
return t
class GreenletEnvironment(Environment):
def get_stop_flag(self):
return GreenEvent()
def create_process(self, runnable, name):
def run_wrapper():
gevent.sleep()
runnable()
gevent.sleep()
return Greenlet(run=run_wrapper)
class ProcessEnvironment(Environment):
def get_stop_flag(self):
return ProcessEvent()
def create_process(self, runnable, name):
p = Process(target=runnable, name=name)
p.daemon = True
return p
worker_to_environment = {
'thread': ThreadEnvironment,
'greenlet': GreenletEnvironment,
'gevent': GreenletEnvironment, # Same as greenlet.
'process': ProcessEnvironment,
}
class Consumer(object):
def __init__(self, huey, workers=1, periodic=True, initial_delay=0.1,
backoff=1.15, max_delay=10.0, utc=True, scheduler_interval=1,
worker_type='thread'):
self._logger = logging.getLogger('huey.consumer')
self.huey = huey
self.workers = workers
self.periodic = periodic
self.default_delay = initial_delay
self.backoff = backoff
self.max_delay = max_delay
self.utc = utc
self.scheduler_interval = max(min(scheduler_interval, 60), 1)
self.worker_type = worker_type
if worker_type not in worker_to_environment:
raise ValueError('worker_type must be one of %s.' %
', '.join(worker_to_environment))
else:
self.environment = worker_to_environment[worker_type]()
self._received_signal = False
self.stop_flag = self.environment.get_stop_flag()
scheduler = self._create_runnable(self._create_scheduler())
self.scheduler = self.environment.create_process(
scheduler,
'Scheduler')
self.worker_threads = []
for i in range(workers):
worker = self._create_runnable(self._create_worker())
self.worker_threads.append(self.environment.create_process(
worker,
'Worker-%d' % (i + 1)))
def _create_worker(self):
return Worker(
huey=self.huey,
default_delay=self.default_delay,
max_delay=self.max_delay,
backoff=self.backoff,
utc=self.utc)
def _create_scheduler(self):
return Scheduler(
huey=self.huey,
interval=self.scheduler_interval,
utc=self.utc,
periodic=self.periodic)
def _create_runnable(self, consumer_process):
def _run():
try:
while not self.stop_flag.is_set():
consumer_process.loop()
except KeyboardInterrupt:
pass
return _run
def start(self):
self._logger.info('Huey consumer started with %s %s, PID %s' % (
self.workers,
self.worker_type,
os.getpid()))
self._logger.info('Scheduler runs every %s seconds.' % (
self.scheduler_interval))
self._logger.info('Periodic tasks are %s.' % (
'enabled' if self.periodic else 'disabled'))
self._set_signal_handler()
msg = ['The following commands are available:']
for command in registry._registry:
msg.append('+ %s' % command.replace('queuecmd_', ''))
self._logger.info('\n'.join(msg))
self.scheduler.start()
for worker in self.worker_threads:
worker.start()
def stop(self):
self.stop_flag.set()
self._logger.info('Shutting down')
def run(self):
self.start()
while True:
try:
is_set = self.stop_flag.wait(timeout=0.1)
time.sleep(0.1)
except KeyboardInterrupt:
self.stop()
except:
self._logger.exception('Error in consumer.')
self.stop()
else:
if self._received_signal:
self.stop()
if self.stop_flag.is_set():
break
self._logger.info('Consumer exiting.')
def _set_signal_handler(self):
signal.signal(signal.SIGTERM, self._handle_signal)
def _handle_signal(self, sig_num, frame):
self._logger.info('Received SIGTERM')
self._received_signal = True<|fim▁end|> | |
<|file_name|>helpers.js<|end_file_name|><|fim▁begin|>DV.Schema.helpers = {
HOST_EXTRACTOR : (/https?:\/\/([^\/]+)\//),
annotationClassName: '.DV-annotation',
// Bind all events for the docviewer
// live/delegate are the preferred methods of event attachment
bindEvents: function(context){
var boundZoom = this.events.compile('zoom');
var doc = context.models.document;
var value = _.indexOf(doc.ZOOM_RANGES, doc.zoomLevel);
var viewer = this.viewer;
viewer.slider = viewer.$('.DV-zoomBox').slider({
step: 1,
min: 0,
max: 4,
value: value,
slide: function(el,d){
boundZoom(context.models.document.ZOOM_RANGES[parseInt(d.value, 10)]);
},
change: function(el,d){
boundZoom(context.models.document.ZOOM_RANGES[parseInt(d.value, 10)]);
}
});
// next/previous
var history = viewer.history;
var compiled = viewer.compiled;
compiled.next = this.events.compile('next');
compiled.previous = this.events.compile('previous');
var states = context.states;
viewer.$('.DV-navControls').delegate('span.DV-next','click', compiled.next);
viewer.$('.DV-navControls').delegate('span.DV-previous','click', compiled.previous);
viewer.$('.DV-annotationView').delegate('.DV-trigger','click',function(e){
e.preventDefault();
context.open('ViewAnnotation');
});
viewer.$('.DV-documentView').delegate('.DV-trigger','click',function(e){
// history.save('document/p'+context.models.document.currentPage());
context.open('ViewDocument');
});
viewer.$('.DV-thumbnailsView').delegate('.DV-trigger','click',function(e){
context.open('ViewThumbnails');
});
viewer.$('.DV-textView').delegate('.DV-trigger','click',function(e){
// history.save('text/p'+context.models.document.currentPage());
context.open('ViewText');
});
viewer.$('.DV-allAnnotations').delegate('.DV-annotationGoto .DV-trigger','click', DV.jQuery.proxy(this.gotoPage, this));
viewer.$('.DV-allAnnotations').delegate('.DV-annotationTitle .DV-trigger','click', DV.jQuery.proxy(this.gotoPage, this));
viewer.$('form.DV-searchDocument').submit(this.events.compile('search'));
viewer.$('.DV-searchBar').delegate('.DV-closeSearch','click',function(e){
viewer.$('.DV-searchBar').fadeOut(250);
e.preventDefault();
// history.save('text/p'+context.models.document.currentPage());
});
viewer.$('.DV-searchBox').delegate('.DV-searchInput-cancel', 'click', DV.jQuery.proxy(this.clearSearch, this));
viewer.$('.DV-searchResults').delegate('span.DV-resultPrevious','click', DV.jQuery.proxy(this.highlightPreviousMatch, this));
viewer.$('.DV-searchResults').delegate('span.DV-resultNext','click', DV.jQuery.proxy(this.highlightNextMatch, this));
// Prevent navigation elements from being selectable when clicked.
viewer.$('.DV-trigger').bind('selectstart', function(){ return false; });
this.elements.viewer.delegate('.DV-fullscreen', 'click', _.bind(this.openFullScreen, this));
var boundToggle = DV.jQuery.proxy(this.annotationBridgeToggle, this);
var collection = this.elements.collection;
collection.delegate('.DV-annotationTab','click', boundToggle);
collection.delegate('.DV-annotationRegion','click', DV.jQuery.proxy(this.annotationBridgeShow, this));
collection.delegate('.DV-annotationNext','click', DV.jQuery.proxy(this.annotationBridgeNext, this));
collection.delegate('.DV-annotationPrevious','click', DV.jQuery.proxy(this.annotationBridgePrevious, this));
collection.delegate('.DV-showEdit','click', DV.jQuery.proxy(this.showAnnotationEdit, this));
collection.delegate('.DV-cancelEdit','click', DV.jQuery.proxy(this.cancelAnnotationEdit, this));
collection.delegate('.DV-saveAnnotation','click', DV.jQuery.proxy(this.saveAnnotation, this));
collection.delegate('.DV-saveAnnotationDraft','click', DV.jQuery.proxy(this.saveAnnotation, this));
collection.delegate('.DV-deleteAnnotation','click', DV.jQuery.proxy(this.deleteAnnotation, this));
collection.delegate('.DV-pageNumber', 'click', _.bind(this.permalinkPage, this, 'document'));
collection.delegate('.DV-textCurrentPage', 'click', _.bind(this.permalinkPage, this, 'text'));
collection.delegate('.DV-annotationTitle', 'click', _.bind(this.permalinkAnnotation, this));
collection.delegate('.DV-permalink', 'click', _.bind(this.permalinkAnnotation, this));
// Thumbnails
viewer.$('.DV-thumbnails').delegate('.DV-thumbnail-page', 'click', function(e) {
var $thumbnail = viewer.$(e.currentTarget);
if (!viewer.openEditor) {
var pageIndex = $thumbnail.closest('.DV-thumbnail').attr('data-pageNumber') - 1;
viewer.models.document.setPageIndex(pageIndex);
viewer.open('ViewDocument');
// viewer.history.save('document/p'+pageNumber);
}
});
// Handle iPad / iPhone scroll events...
_.bindAll(this, 'touchStart', 'touchMove', 'touchEnd');
this.elements.window[0].ontouchstart = this.touchStart;
this.elements.window[0].ontouchmove = this.touchMove;
this.elements.window[0].ontouchend = this.touchEnd;
this.elements.well[0].ontouchstart = this.touchStart;
this.elements.well[0].ontouchmove = this.touchMove;
this.elements.well[0].ontouchend = this.touchEnd;
viewer.$('.DV-descriptionToggle').live('click',function(e){
e.preventDefault();
e.stopPropagation();
viewer.$('.DV-descriptionText').toggle();
viewer.$('.DV-descriptionToggle').toggleClass('DV-showDescription');
});
var cleanUp = DV.jQuery.proxy(viewer.pageSet.cleanUp, this);
this.elements.window.live('mousedown',
function(e){
var el = viewer.$(e.target);
if (el.parents().is('.DV-annotation') || el.is('.DV-annotation')) return true;
if(context.elements.window.hasClass('DV-coverVisible')){
if((el.width() - parseInt(e.clientX,10)) >= 15){
cleanUp();
}
}
}
);
var docId = viewer.schema.document.id;
//if(DV.jQuery.browser.msie == true){
// this.elements.browserDocument.bind('focus.' + docId, DV.jQuery.proxy(this.focusWindow,this));
// this.elements.browserDocument.bind('focusout.' + docId, DV.jQuery.proxy(this.focusOut,this));
// }else{
this.elements.browserWindow.bind('focus.' + docId, DV.jQuery.proxy(this.focusWindow,this));
this.elements.browserWindow.bind('blur.' + docId, DV.jQuery.proxy(this.blurWindow,this));
// }
// When the document is scrolled, even in the background, resume polling.
this.elements.window.bind('scroll.' + docId, DV.jQuery.proxy(this.focusWindow, this));
this.elements.coverPages.live('mousedown', cleanUp);
viewer.acceptInput = this.elements.currentPage.acceptInput({ changeCallBack: DV.jQuery.proxy(this.acceptInputCallBack,this) });
},
// Unbind jQuery events that have been bound to objects outside of the viewer.
unbindEvents: function() {
var viewer = this.viewer;
var docId = viewer.schema.document.id;
if(DV.jQuery.browser.msie == true){
this.elements.browserDocument.unbind('focus.' + docId);
this.elements.browserDocument.unbind('focusout.' + docId);
}else{
viewer.helpers.elements.browserWindow.unbind('focus.' + docId);
viewer.helpers.elements.browserWindow.unbind('blur.' + docId);
}
viewer.helpers.elements.browserWindow.unbind('scroll.' + docId);
_.each(viewer.observers, function(obs){ viewer.helpers.removeObserver(obs); });
},
// We're entering the Notes tab -- make sure that there are no data-src
// attributes remaining.
ensureAnnotationImages : function() {
this.viewer.$(".DV-img[data-src]").each(function() {
var el = DV.jQuery(this);
el.attr('src', el.attr('data-src'));
});
},
startCheckTimer: function(){
var _t = this.viewer;
var _check = function(){
_t.events.check();
};
this.viewer.checkTimer = setInterval(_check,100);
},
stopCheckTimer: function(){
clearInterval(this.viewer.checkTimer);
},
blurWindow: function(){
if(this.viewer.isFocus === true){
this.viewer.isFocus = false;
// pause draw timer
this.stopCheckTimer();
}else{
return;
}
},
focusOut: function(){
if(this.viewer.activeElement != document.activeElement){
this.viewer.activeElement = document.activeElement;
this.viewer.isFocus = true;
}else{
// pause draw timer
this.viewer.isFocus = false;
this.viewer.helpers.stopCheckTimer();
return;
}
},
focusWindow: function(){
if(this.viewer.isFocus === true){
return;
}else{
this.viewer.isFocus = true;
// restart draw timer
this.startCheckTimer();
}
},
touchStart : function(e) {
e.stopPropagation();
e.preventDefault();
var touch = e.changedTouches[0];
this._moved = false;
this._touchX = touch.pageX;
this._touchY = touch.pageY;
},
touchMove : function(e) {
var el = e.currentTarget;
var touch = e.changedTouches[0];
var xDiff = this._touchX - touch.pageX;
var yDiff = this._touchY - touch.pageY;
el.scrollLeft += xDiff;
el.scrollTop += yDiff;
this._touchX -= xDiff;
this._touchY -= yDiff;
if (yDiff != 0 || xDiff != 0) this._moved = true;
},
touchEnd : function(e) {
if (!this._moved) {
var touch = e.changedTouches[0];
var target = touch.target;
var fakeClick = document.createEvent('MouseEvent');
while (target.nodeType !== 1) target = target.parentNode;
fakeClick.initMouseEvent('click', true, true, touch.view, 1,
touch.screenX, touch.screenY, touch.clientX, touch.clientY,
false, false, false, false, 0, null);
target.dispatchEvent(fakeClick);
}
this._moved = false;
},
// Click to open a page's permalink.
permalinkPage : function(mode, e) {
if (mode == 'text') {
var number = this.viewer.models.document.currentPage();
} else {
var pageId = this.viewer.$(e.target).closest('.DV-set').attr('data-id');
var page = this.viewer.pageSet.pages[pageId];
var number = page.pageNumber;
this.jump(page.index);
}
this.viewer.history.save(mode + '/p' + number);
},
// Click to open an annotation's permalink.
permalinkAnnotation : function(e) {
var id = this.viewer.$(e.target).closest('.DV-annotation').attr('data-id');
var anno = this.viewer.models.annotations.getAnnotation(id);
var sid = anno.server_id || anno.id;
if (this.viewer.state == 'ViewDocument') {
this.viewer.pageSet.showAnnotation(anno);
this.viewer.history.save('document/p' + anno.pageNumber + '/a' + sid);
} else {
this.viewer.history.save('annotation/a' + sid);
}
},
setDocHeight: function(height,diff) {
this.elements.bar.css('height', height);
this.elements.window[0].scrollTop += diff;
},
getWindowDimensions: function(){
var d = {
height: window.innerHeight ? window.innerHeight : this.elements.browserWindow.height(),
width: this.elements.browserWindow.width()
};
return d;
},
// Is the given URL on a remote domain?
isCrossDomain : function(url) {
var match = url.match(this.HOST_EXTRACTOR);
return match && (match[1] != window.location.host);
},
resetScrollState: function(){
this.elements.window.scrollTop(0);
},
gotoPage: function(e){
e.preventDefault();
var aid = this.viewer.$(e.target).parents('.DV-annotation').attr('rel').replace('aid-','');
var annotation = this.models.annotations.getAnnotation(aid);
var viewer = this.viewer;
if(viewer.state !== 'ViewDocument'){
this.models.document.setPageIndex(annotation.index);
viewer.open('ViewDocument');
// this.viewer.history.save('document/p'+(parseInt(annotation.index,10)+1));
}
},
openFullScreen : function() {
var doc = this.viewer.schema.document;
var url = doc.canonicalURL.replace(/#\S+$/,"");
var currentPage = this.models.document.currentPage();
// construct url fragment based on current viewer state
switch (this.viewer.state) {
case 'ViewAnnotation':
url += '#annotation/a' + this.viewer.activeAnnotationId; // default to the top of the annotations page.
break;
case 'ViewDocument':
url += '#document/p' + currentPage;
break;
case 'ViewSearch':
url += '#search/p' + currentPage + '/' + encodeURIComponent(this.elements.searchInput.val());
break;
case 'ViewText':
url += '#text/p' + currentPage;
break;
case 'ViewThumbnails':
url += '#pages/p' + currentPage; // need to set up a route to catch this.
break;
}
window.open(url, "documentviewer", "toolbar=no,resizable=yes,scrollbars=no,status=no");
},
// Determine the correct DOM page ordering for a given page index.
sortPages : function(pageIndex) {
if (pageIndex == 0 || pageIndex % 3 == 1) return ['p0', 'p1', 'p2'];
if (pageIndex % 3 == 2) return ['p1', 'p2', 'p0'];
if (pageIndex % 3 == 0) return ['p2', 'p0', 'p1'];
},
addObserver: function(observerName){
this.removeObserver(observerName);
this.viewer.observers.push(observerName);
},
removeObserver: function(observerName){
var observers = this.viewer.observers;
for(var i = 0,len=observers.length;i<len;i++){
if(observerName === observers[i]){
observers.splice(i,1);
}
}
},
toggleContent: function(toggleClassName){
this.elements.viewer.removeClass('DV-viewText DV-viewSearch DV-viewDocument DV-viewAnnotations DV-viewThumbnails').addClass('DV-'+toggleClassName);
},
jump: function(pageIndex, modifier, forceRedraw){
modifier = (modifier) ? parseInt(modifier, 10) : 0;
var position = this.models.document.getOffset(parseInt(pageIndex, 10)) + modifier;
this.elements.window[0].scrollTop = position;
this.models.document.setPageIndex(pageIndex);
if (forceRedraw) this.viewer.pageSet.redraw(true);
if (this.viewer.state === 'ViewThumbnails') {
this.viewer.thumbnails.highlightCurrentPage();
}
},
shift: function(argHash){
var windowEl = this.elements.window;
var scrollTopShift = windowEl.scrollTop() + argHash.deltaY;
var scrollLeftShift = windowEl.scrollLeft() + argHash.deltaX;
windowEl.scrollTop(scrollTopShift);
windowEl.scrollLeft(scrollLeftShift);
},
getAppState: function(){
var docModel = this.models.document;
var currentPage = (docModel.currentIndex() == 0) ? 1 : docModel.currentPage();
return { page: currentPage, zoom: docModel.zoomLevel, view: this.viewer.state };
},
constructPages: function(){
var pages = [];
var totalPagesToCreate = (this.viewer.schema.data.totalPages < 3) ? this.viewer.schema.data.totalPages : 3;
var height = this.models.pages.height;
for (var i = 0; i < totalPagesToCreate; i++) {
pages.push(JST.pages({ pageNumber: i+1, pageIndex: i , pageImageSource: null, baseHeight: height }));
}
return pages.join('');
},
// Position the viewer on the page. For a full screen viewer, this means
// absolute from the current y offset to the bottom of the viewport.
positionViewer : function() {
var offset = this.elements.viewer.position();
this.elements.viewer.css({position: 'absolute', top: offset.top, bottom: 0, left: offset.left, right: offset.left});
},
unsupportedBrowser : function() {
if (!(DV.jQuery.browser.msie && DV.jQuery.browser.version <= "6.0")) return false;
DV.jQuery(this.viewer.options.container).html(JST.unsupported({viewer : this.viewer}));
return true;
},
registerHashChangeEvents: function(){
var events = this.events;
var history = this.viewer.history;
// Default route
history.defaultCallback = _.bind(events.handleHashChangeDefault,this.events);
// Handle page loading
history.register(/document\/p(\d*)$/, _.bind(events.handleHashChangeViewDocumentPage,this.events));
// Legacy NYT stuff
history.register(/p(\d*)$/, _.bind(events.handleHashChangeLegacyViewDocumentPage,this.events));
history.register(/p=(\d*)$/, _.bind(events.handleHashChangeLegacyViewDocumentPage,this.events));
// Handle annotation loading in document view
history.register(/document\/p(\d*)\/a(\d*)$/, _.bind(events.handleHashChangeViewDocumentAnnotation,this.events));
// Handle annotation loading in annotation view
history.register(/annotation\/a(\d*)$/, _.bind(events.handleHashChangeViewAnnotationAnnotation,this.events));
// Handle loading of the pages view
history.register(/pages$/, _.bind(events.handleHashChangeViewPages, events));
// Handle page loading in text view
history.register(/text\/p(\d*)$/, _.bind(events.handleHashChangeViewText,this.events));
// Handle entity display requests.
history.register(/entity\/p(\d*)\/(.*)\/(\d+):(\d+)$/, _.bind(events.handleHashChangeViewEntity,this.events));
// Handle search requests
history.register(/search\/p(\d*)\/(.*)$/, _.bind(events.handleHashChangeViewSearchRequest,this.events));
},
// Sets up the zoom slider to match the appropriate for the specified
// initial zoom level, and real document page sizes.
autoZoomPage: function() {
var windowWidth = this.elements.window.outerWidth(true);
var zoom;<|fim▁hole|> );
} else {
zoom = this.viewer.options.zoom;
}
// Setup ranges for auto-width zooming
var ranges = [];
if (zoom <= 500) {
var zoom2 = (zoom + 700) / 2;
ranges = [zoom, zoom2, 700, 850, 1000];
} else if (zoom <= 750) {
var zoom2 = ((1000 - 700) / 3) + zoom;
var zoom3 = ((1000 - 700) / 3)*2 + zoom;
ranges = [.66*zoom, zoom, zoom2, zoom3, 1000];
} else if (750 < zoom && zoom <= 850){
var zoom2 = ((1000 - zoom) / 2) + zoom;
ranges = [.66*zoom, 700, zoom, zoom2, 1000];
} else if (850 < zoom && zoom < 1000){
var zoom2 = ((zoom - 700) / 2) + 700;
ranges = [.66*zoom, 700, zoom2, zoom, 1000];
} else if (zoom >= 1000) {
zoom = 1000;
ranges = this.viewer.models.document.ZOOM_RANGES;
}
this.viewer.models.document.ZOOM_RANGES = ranges;
this.viewer.slider.slider({'value': parseInt(_.indexOf(ranges, zoom), 10)});
this.events.zoom(zoom);
},
handleInitialState: function(){
var initialRouteMatch = this.viewer.history.loadURL(true);
if(!initialRouteMatch) {
var opts = this.viewer.options;
this.viewer.open('ViewDocument');
if (opts.note) {
this.viewer.pageSet.showAnnotation(this.viewer.models.annotations.byId[opts.note]);
} else if (opts.page) {
this.jump(opts.page - 1);
}
}
}
};<|fim▁end|> | if (this.viewer.options.zoom == 'auto') {
zoom = Math.min(
700,
windowWidth - (this.viewer.models.pages.REDUCED_PADDING * 2) |
<|file_name|>EncapsulateUI.py<|end_file_name|><|fim▁begin|>##########################################################################
#
# Copyright (c) 2017, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import Gaffer
import GafferScene
Gaffer.Metadata.registerNode(
GafferScene.Encapsulate,
"description",
"""
Encapsulates a portion of the scene by collapsing the hierarchy
and replacing it with a procedural which will be evaluated at
render time.
This has two primary uses :
- To optimise scene generation. Downstream nodes do not see
the encapsulated locations, so do not spend time processing
them.
- To enable high-level instancing of hierarchies. If multiple
copies of the encapsulated procedural are made by the
downstream network, then the procedural itself can be instanced
at render time. This works no matter how the copies are
made, but typically the Instancer or Duplicate nodes would
be the most common method of copying the procedural.
> Note : Encapsulation currently has some limitations<|fim▁hole|> >
> - Motion blur options are taken from the globals at the
> point of Encapsulation, not the downstream globals
> at the point of rendering.
> - Motion blur attributes are not inherited - only
> attributes within the encapsulate hierarchy are
> considered.
""",
)<|fim▁end|> | |
<|file_name|>optionsmodel.cpp<|end_file_name|><|fim▁begin|>#include "optionsmodel.h"
#include "bitcoinunits.h"
#include <QSettings>
#include "init.h"
#include "walletdb.h"
#include "guiutil.h"
OptionsModel::OptionsModel(QObject *parent) :
QAbstractListModel(parent)
{
Init();
}
bool static ApplyProxySettings()
{
QSettings settings;
CService addrProxy(settings.value("addrProxy", "127.0.0.1:9050").toString().toStdString());
int nSocksVersion(settings.value("nSocksVersion", 5).toInt());
if (!settings.value("fUseProxy", false).toBool()) {
addrProxy = CService();
nSocksVersion = 0;
return false;
}
if (nSocksVersion && !addrProxy.IsValid())
return false;
if (!IsLimited(NET_IPV4))
SetProxy(NET_IPV4, addrProxy, nSocksVersion);
if (nSocksVersion > 4) {
#ifdef USE_IPV6
if (!IsLimited(NET_IPV6))
SetProxy(NET_IPV6, addrProxy, nSocksVersion);
#endif
SetNameProxy(addrProxy, nSocksVersion);
}
return true;
}
void OptionsModel::Init()
{
QSettings settings;
nDefaultCurrency = settings.value("nDefaultColor", DEFAULT_COLOR).toInt();
if (nDefaultCurrency == BREAKOUT_COLOR_NONE)
{
nDefaultCurrency = DEFAULT_COLOR;
}
nDisplayUnitBrostake = settings.value("nDisplayUnitBrostake", BitcoinUnits::BTC).toInt();
nDisplayUnitBrocoin = settings.value("nDisplayUnitBrocoin", BitcoinUnits::BTC).toInt();
bDisplayAddresses = settings.value("bDisplayAddresses", false).toBool();
bDisplayGenerated = settings.value("bDisplayGenerated", false).toBool();
fMinimizeToTray = settings.value("fMinimizeToTray", false).toBool();
fMinimizeOnClose = settings.value("fMinimizeOnClose", false).toBool();
fCoinControlFeatures = settings.value("fCoinControlFeatures", false).toBool();
vTransactionFee[BREAKOUT_COLOR_BROSTAKE] = settings.value("nTransactionFeeBrostake").toLongLong();
vTransactionFee[BREAKOUT_COLOR_BROCOIN] = settings.value("nTransactionFeeBrocoin").toLongLong();
vReserveBalance[BREAKOUT_COLOR_BROSTAKE] = settings.value("nReserveBalance").toLongLong();
language = settings.value("language", "").toString();
// These are shared with core Bitcoin; we want
// command-line options to override the GUI settings:
if (settings.contains("fUseUPnP"))
SoftSetBoolArg("-upnp", settings.value("fUseUPnP").toBool());
if (settings.contains("addrProxy") && settings.value("fUseProxy").toBool())
SoftSetArg("-proxy", settings.value("addrProxy").toString().toStdString());
if (settings.contains("nSocksVersion") && settings.value("fUseProxy").toBool())
SoftSetArg("-socks", settings.value("nSocksVersion").toString().toStdString());
if (settings.contains("detachDB"))
SoftSetBoolArg("-detachdb", settings.value("detachDB").toBool());
if (!language.isEmpty())
SoftSetArg("-lang", language.toStdString());
}
bool OptionsModel::Upgrade()
{
QSettings settings;
if (settings.contains("bImportFinished"))
return false; // Already upgraded
settings.setValue("bImportFinished", true);
// Move settings from old wallet.dat (if any):
CWalletDB walletdb(strWalletFileName);
QList<QString> intOptions;
intOptions << "nDisplayUnitBrostake" << "nDisplayUnitBrocoin"
<< "nTransactionFeeBrostake" << "nTransactionFeeBrocoin"
<< "nReserveBalance" << "nDefaultColor";
foreach(QString key, intOptions)
{
int value = 0;
if (walletdb.ReadSetting(key.toStdString(), value))
{
settings.setValue(key, value);
walletdb.EraseSetting(key.toStdString());
}
}
QList<QString> boolOptions;
boolOptions << "bDisplayAddresses" <<
"bDisplayGenerated" <<
"fMinimizeToTray" <<
"fMinimizeOnClose" <<
"fUseProxy" << "fUseUPnP";
foreach(QString key, boolOptions)
{
bool value = false;
if (walletdb.ReadSetting(key.toStdString(), value))
{
settings.setValue(key, value);
walletdb.EraseSetting(key.toStdString());
}
}
try
{
CAddress addrProxyAddress;
if (walletdb.ReadSetting("addrProxy", addrProxyAddress))
{
settings.setValue("addrProxy", addrProxyAddress.ToStringIPPort().c_str());
walletdb.EraseSetting("addrProxy");
}
}
catch (std::ios_base::failure &e)
{
// 0.6.0rc1 saved this as a CService, which causes failure when parsing as a CAddress
CService addrProxy;
if (walletdb.ReadSetting("addrProxy", addrProxy))
{
settings.setValue("addrProxy", addrProxy.ToStringIPPort().c_str());
walletdb.EraseSetting("addrProxy");
}
}
ApplyProxySettings();
Init();
return true;
}
int OptionsModel::rowCount(const QModelIndex & parent) const
{
return OptionIDRowCount;
}
QVariant OptionsModel::data(const QModelIndex & index, int role) const
{
if(role == Qt::EditRole)
{
QSettings settings;
switch(index.row())
{
case StartAtStartup:
return QVariant(GUIUtil::GetStartOnSystemStartup());
case MinimizeToTray:
return QVariant(fMinimizeToTray);
case MapPortUPnP:
return settings.value("fUseUPnP", GetBoolArg("-upnp", true));
case MinimizeOnClose:
return QVariant(fMinimizeOnClose);
case ProxyUse:
return settings.value("fUseProxy", false);
case ProxyIP: {
proxyType proxy;
if (GetProxy(NET_IPV4, proxy))
return QVariant(QString::fromStdString(proxy.first.ToStringIP()));
else
return QVariant(QString::fromStdString("127.0.0.1"));
}
case ProxyPort: {
proxyType proxy;
if (GetProxy(NET_IPV4, proxy))
return QVariant(proxy.first.GetPort());
else
return QVariant(9050);
}
case ProxySocksVersion:
return settings.value("nSocksVersion", 5);
case FeeBrostake:
return QVariant((qint64) vTransactionFee[BREAKOUT_COLOR_BROSTAKE]);
case FeeBrocoin:
return QVariant((qint64) vTransactionFee[BREAKOUT_COLOR_BROCOIN]);
case ReserveBalance:
return QVariant((qint64) vReserveBalance[BREAKOUT_COLOR_BROSTAKE]);
case DefaultColor:
return QVariant(nDefaultCurrency);
case DisplayUnitBrostake:
return QVariant(nDisplayUnitBrostake);
case DisplayUnitBrocoin:
return QVariant(nDisplayUnitBrocoin);
case DisplayAddresses:
return QVariant(bDisplayAddresses);
case DisplayGenerated:
return QVariant(bDisplayGenerated);
case DetachDatabases:
return QVariant(bitdb.GetDetach());
case Language:
return settings.value("language", "");
case CoinControlFeatures:
return QVariant(fCoinControlFeatures);
default:
return QVariant();
}
}
return QVariant();
}
bool OptionsModel::setData(const QModelIndex & index, const QVariant & value, int role)
{
bool successful = true; /* set to false on parse error */
if(role == Qt::EditRole)
{
QSettings settings;
switch(index.row())
{
case StartAtStartup:
successful = GUIUtil::SetStartOnSystemStartup(value.toBool());
break;
case MinimizeToTray:
fMinimizeToTray = value.toBool();
settings.setValue("fMinimizeToTray", fMinimizeToTray);
break;
case MapPortUPnP:
fUseUPnP = value.toBool();
settings.setValue("fUseUPnP", fUseUPnP);
MapPort();
break;
case MinimizeOnClose:
fMinimizeOnClose = value.toBool();
settings.setValue("fMinimizeOnClose", fMinimizeOnClose);
break;
case ProxyUse:
settings.setValue("fUseProxy", value.toBool());
ApplyProxySettings();
break;
case ProxyIP: {
proxyType proxy;
proxy.first = CService("127.0.0.1", 9050);
GetProxy(NET_IPV4, proxy);
CNetAddr addr(value.toString().toStdString());
proxy.first.SetIP(addr);
settings.setValue("addrProxy", proxy.first.ToStringIPPort().c_str());
successful = ApplyProxySettings();
}
break;
case ProxyPort: {
proxyType proxy;
proxy.first = CService("127.0.0.1", 9050);
GetProxy(NET_IPV4, proxy);
proxy.first.SetPort(value.toInt());
settings.setValue("addrProxy", proxy.first.ToStringIPPort().c_str());
successful = ApplyProxySettings();
}
break;
case ProxySocksVersion: {
proxyType proxy;
proxy.second = 5;
GetProxy(NET_IPV4, proxy);
proxy.second = value.toInt();
settings.setValue("nSocksVersion", proxy.second);
successful = ApplyProxySettings();
}
break;
case FeeBrostake:
vTransactionFee[BREAKOUT_COLOR_BROSTAKE] = value.toLongLong();
settings.setValue("nTransactionFeeBrostake",
(qint64) vTransactionFee[BREAKOUT_COLOR_BROSTAKE]);
emit transactionFeeChangedBrostake(vTransactionFee[BREAKOUT_COLOR_BROSTAKE]);
break;
case FeeBrocoin:
vTransactionFee[BREAKOUT_COLOR_BROCOIN] = value.toLongLong();
settings.setValue("nTransactionFeeBrocoin",
(qint64) vTransactionFee[BREAKOUT_COLOR_BROCOIN]);
emit transactionFeeChangedBrostake(vTransactionFee[BREAKOUT_COLOR_BROCOIN]);
break;
case ReserveBalance:
vReserveBalance[BREAKOUT_COLOR_BROSTAKE] = value.toLongLong();
settings.setValue("nReserveBalance",
(qint64) vReserveBalance[BREAKOUT_COLOR_BROSTAKE]);
emit reserveBalanceChanged(vReserveBalance[BREAKOUT_COLOR_BROSTAKE]);
break;
case DefaultColor:
nDefaultCurrency = value.toInt();
settings.setValue("nDefaultColor", nDefaultCurrency);
emit defaultColorChanged(nDefaultCurrency);
break;
case DisplayUnitBrostake:
nDisplayUnitBrostake = value.toInt();
settings.setValue("nDisplayUnitBrostake", nDisplayUnitBrostake);
emit displayUnitChangedBrostake(nDisplayUnitBrostake);
break;
case DisplayUnitBrocoin:
nDisplayUnitBrocoin = value.toInt();
settings.setValue("nDisplayUnitBrocoin", nDisplayUnitBrocoin);
emit displayUnitChangedBrocoin(nDisplayUnitBrocoin);
break;
case DisplayAddresses:
bDisplayAddresses = value.toBool();
settings.setValue("bDisplayAddresses", bDisplayAddresses);
break;
case DisplayGenerated:
bDisplayGenerated = value.toBool();
settings.setValue("bDisplayGenerated", bDisplayGenerated);
break;
case DetachDatabases: {
bool fDetachDB = value.toBool();
bitdb.SetDetach(fDetachDB);
settings.setValue("detachDB", fDetachDB);
}
break;
case Language:
settings.setValue("language", value);
break;
case CoinControlFeatures: {
fCoinControlFeatures = value.toBool();
settings.setValue("fCoinControlFeatures", fCoinControlFeatures);
emit coinControlFeaturesChanged(fCoinControlFeatures);
}
break;
default:
break;
}
}
emit dataChanged(index, index);
return successful;
}
// brostake and brocoin are the only currencies that have fees
qint64 OptionsModel::getTransactionFeeBrostake()
{
return vTransactionFee[BREAKOUT_COLOR_BROSTAKE];
}
qint64 OptionsModel::getTransactionFeeBrocoin()
{
return vTransactionFee[BREAKOUT_COLOR_BROSTAKE];
}
// Brostake is the only staking currency
qint64 OptionsModel::getReserveBalance()
{
return vReserveBalance[BREAKOUT_COLOR_BROSTAKE];
}
bool OptionsModel::getCoinControlFeatures()
{
return fCoinControlFeatures;
}
bool OptionsModel::getMinimizeToTray()
{
return fMinimizeToTray;
}
bool OptionsModel::getMinimizeOnClose()
{
return fMinimizeOnClose;
}
int OptionsModel::getDefaultColor()
{
return nDefaultCurrency;
}
<|fim▁hole|> return nDisplayUnitBrostake;
}
int OptionsModel::getDisplayUnitBrocoin()
{
return nDisplayUnitBrocoin;
}
int OptionsModel::getDisplayUnit(int nColor)
{
switch (nColor)
{
case BREAKOUT_COLOR_BROSTAKE:
return getDisplayUnitBrostake();
case BREAKOUT_COLOR_BROCOIN:
return getDisplayUnitBrocoin();
case BREAKOUT_COLOR_SISCOIN:
return BitcoinUnits::BTC;
default:
return BitcoinUnits::BTC;
}
}
bool OptionsModel::getDisplayAddresses()
{
return bDisplayAddresses;
}
bool OptionsModel::getDisplayGenerated()
{
return bDisplayGenerated;
}<|fim▁end|> | int OptionsModel::getDisplayUnitBrostake()
{ |
<|file_name|>RideNavigator.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2010 Mark Liversedge ([email protected])
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc., 51
* Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "Athlete.h"
#include "Context.h"
#include "Colors.h"
#include "RideCache.h"
#include "RideCacheModel.h"
#include "RideItem.h"
#include "RideNavigator.h"
#include "RideNavigatorProxy.h"
#include "SearchFilterBox.h"
#include "TabView.h"
#include "HelpWhatsThis.h"
#include <QtGui>
#include <QString>
#include <QTreeView>
#include <QStyle>
#include <QStyleFactory>
#include <QScrollBar>
RideNavigator::RideNavigator(Context *context, bool mainwindow) : context(context), active(false), _groupBy(-1)
{
// get column headings
// default column layouts etc
_columns = QString(tr("*|Workout Code|Date|"));
_widths = QString("0|100|100|");
_sortByIndex = 2;
_sortByOrder = 0;
currentColumn = -1;
this->mainwindow = mainwindow;
_groupBy = -1;
fontHeight = QFontMetrics(QFont()).height();
ColorEngine ce(context);
reverseColor = ce.reverseColor;
currentItem = NULL;
init = false;
mainLayout = new QVBoxLayout(this);
mainLayout->setSpacing(0);
if (mainwindow) mainLayout->setContentsMargins(0,0,0,0);
else mainLayout->setContentsMargins(2,2,2,2); // so we can resize!
searchFilter = new SearchFilter(this);
searchFilter->setSourceModel(context->athlete->rideCache->model()); // filter out/in search results
groupByModel = new GroupByModel(this);
groupByModel->setSourceModel(searchFilter);
sortModel = new BUGFIXQSortFilterProxyModel(this);
sortModel->setSourceModel(groupByModel);
sortModel->setDynamicSortFilter(true);
if (!mainwindow) {
searchFilterBox = new SearchFilterBox(this, context, false);
mainLayout->addWidget(searchFilterBox);
HelpWhatsThis *searchHelp = new HelpWhatsThis(searchFilterBox);
searchFilterBox->setWhatsThis(searchHelp->getWhatsThisText(HelpWhatsThis::SearchFilterBox));
}
// get setup
tableView = new RideTreeView;
delegate = new NavigatorCellDelegate(this);
tableView->setAnimated(true);
tableView->setItemDelegate(delegate);
tableView->setModel(sortModel);
tableView->setSortingEnabled(true);
tableView->setAlternatingRowColors(false);
tableView->setEditTriggers(QAbstractItemView::NoEditTriggers); // read-only
mainLayout->addWidget(tableView);
tableView->expandAll();
tableView->header()->setCascadingSectionResizes(true); // easier to resize this way
tableView->setContextMenuPolicy(Qt::CustomContextMenu);
tableView->header()->setStretchLastSection(false);
tableView->header()->setMinimumSectionSize(0);
tableView->header()->setFocusPolicy(Qt::NoFocus);
#ifdef Q_OS_WIN
QStyle *cde = QStyleFactory::create(OS_STYLE);
tableView->verticalScrollBar()->setStyle(cde);
#endif
#ifdef Q_OS_MAC
tableView->header()->setSortIndicatorShown(false); // blue looks nasty
tableView->setAttribute(Qt::WA_MacShowFocusRect, 0);
#endif
tableView->installEventFilter(this);
tableView->viewport()->installEventFilter(this);
tableView->setMouseTracking(true);
tableView->setFrameStyle(QFrame::NoFrame);
tableView->setAcceptDrops(true);
tableView->setColumnWidth(1, 100);
HelpWhatsThis *helpTableView = new HelpWhatsThis(tableView);
if (mainwindow)
tableView->setWhatsThis(helpTableView->getWhatsThisText(HelpWhatsThis::SideBarRidesView_Rides));
else
tableView->setWhatsThis(helpTableView->getWhatsThisText(HelpWhatsThis::ChartDiary_Navigator));
// good to go
tableView->show();
resetView();
// refresh when config changes (metric/imperial?)
connect(context, SIGNAL(configChanged(qint32)), this, SLOT(configChanged(qint32)));
// refresh when rides added/removed
connect(context, SIGNAL(rideAdded(RideItem*)), this, SLOT(refresh()));
connect(context, SIGNAL(rideDeleted(RideItem*)), this, SLOT(rideDeleted(RideItem*)));
// user selected a ride on the ride list, we should reflect that too..
connect(tableView, SIGNAL(rowSelected(QItemSelection)), this, SLOT(selectionChanged(QItemSelection)));
// user hit return or double clicked a ride !
connect(tableView, SIGNAL(doubleClicked(QModelIndex)), this, SLOT(selectRide(QModelIndex)));
// user moved columns
connect(tableView->header(), SIGNAL(sectionMoved(int,int,int)), this, SLOT(columnsChanged()));
connect(tableView->header(), SIGNAL(sectionResized(int,int,int)), this, SLOT(columnsResized(int, int, int)));
// user sorted by column
connect(tableView->header(), SIGNAL(sortIndicatorChanged(int, Qt::SortOrder)), this, SLOT(cursorRide()));
connect(tableView,SIGNAL(customContextMenuRequested(const QPoint &)), this, SLOT(showTreeContextMenuPopup(const QPoint &)));
connect(tableView->header(), SIGNAL(sortIndicatorChanged(int,Qt::SortOrder)), this, SLOT(setSortBy(int,Qt::SortOrder)));
// repaint etc when background refresh is working
connect(context, SIGNAL(refreshStart()), this, SLOT(backgroundRefresh()));
connect(context, SIGNAL(refreshEnd()), this, SLOT(backgroundRefresh()));
connect(context, SIGNAL(refreshUpdate(QDate)), this, SLOT(backgroundRefresh())); // we might miss 1st one
if (!mainwindow) {
connect(searchFilterBox, SIGNAL(searchResults(QStringList)), this, SLOT(searchStrings(QStringList)));
connect(searchFilterBox, SIGNAL(searchClear()), this, SLOT(clearSearch()));
}
// we accept drag and drop operations
setAcceptDrops(true);
// lets go
configChanged(CONFIG_APPEARANCE | CONFIG_NOTECOLOR | CONFIG_FIELDS);
}
RideNavigator::~RideNavigator()
{
delete tableView;
delete groupByModel;
}
void
RideNavigator::configChanged(qint32 state)
{
ColorEngine ce(context);
fontHeight = QFontMetrics(QFont()).height();
reverseColor = ce.reverseColor;
// hide ride list scroll bar ?
#ifndef Q_OS_MAC
tableView->setStyleSheet(TabView::ourStyleSheet());
if (mainwindow) {
if (appsettings->value(this, GC_RIDESCROLL, true).toBool() == false)
tableView->setVerticalScrollBarPolicy(Qt::ScrollBarAlwaysOff);
else
tableView->setVerticalScrollBarPolicy(Qt::ScrollBarAsNeeded);
//if (appsettings->value(this, GC_RIDEHEAD, true).toBool() == false)
//tableView->header()->hide();
//else
tableView->header()->show();
tableView->header()->setStyleSheet(
QString("QHeaderView { background-color: %1; color: %2; }"
"QHeaderView::section { background-color: %1; color: %2; "
" border: 0px ; }")
.arg(GColor(CPLOTBACKGROUND).name())
.arg(GCColor::invertColor(GColor(CPLOTBACKGROUND)).name()));
}
#endif
// if the fields changed we need to reset indexes etc
if (state & CONFIG_FIELDS) resetView();
refresh();
}
void
RideNavigator::rideDeleted(RideItem*item)
{
if (currentItem == item) currentItem = NULL;
refresh();
}
void
RideNavigator::refresh()
{
active=false;
setWidth(geometry().width());
cursorRide();
}
void
RideNavigator::backgroundRefresh()
{
tableView->doItemsLayout();
}
void
RideNavigator::resizeEvent(QResizeEvent*)
{
// ignore if main window .. we get told to resize
// by the splitter mover
if (mainwindow) return;
setWidth(geometry().width());
}
void
RideNavigator::resetView()
{
active = true;
QList<QString> cols = _columns.split("|", QString::SkipEmptyParts);
int widco = _widths.split("|", QString::SkipEmptyParts).count();
// something is wrong with the config ? reset
if (widco != cols.count() || widco <= 1) {
_columns = QString(tr("*|Workout Code|Date|"));
_widths = QString("0|100|100|");
cols = _columns.split("|", QString::SkipEmptyParts);
}
// to account for translations
QMap <QString, QString> internalNameMap;
nameMap.clear();
columnMetrics.clear();
// add the standard columns to the map
nameMap.insert("filename", tr("File"));
internalNameMap.insert("File", tr("File"));
nameMap.insert("timestamp", tr("Last updated"));
internalNameMap.insert("Last updated", tr("Last updated"));
nameMap.insert("ride_date", tr("Date"));
internalNameMap.insert("Date", tr("Date"));
nameMap.insert("ride_time", tr("Time")); // virtual columns show time from ride_date
internalNameMap.insert("Time", tr("Time"));
nameMap.insert("fingerprint", tr("Config Checksum"));
internalNameMap.insert("Config Checksum", tr("Config Checksum"));
// add metrics to the map
const RideMetricFactory &factory = RideMetricFactory::instance();
for (int i=0; i<factory.metricCount(); i++) {
QString converted = QTextEdit(factory.rideMetric(factory.metricName(i))->name()).toPlainText();
// from sql column name to friendly metric name
nameMap.insert(QString("X%1").arg(factory.metricName(i)), converted);
// from (english) internalName to (translated) Name
internalNameMap.insert(factory.rideMetric(factory.metricName(i))->internalName(), converted);
// from friendly metric name to metric pointer
columnMetrics.insert(converted, factory.rideMetric(factory.metricName(i)));
}
// add metadata fields...
SpecialFields sp; // all the special fields are in here...
foreach(FieldDefinition field, context->athlete->rideMetadata()->getFields()) {
if (!sp.isMetric(field.name) && (field.type < 5 || field.type == 7)) {
nameMap.insert(QString("Z%1").arg(sp.makeTechName(field.name)), sp.displayName(field.name));
internalNameMap.insert(field.name, sp.displayName(field.name));
}
}
// cols list needs to be mapped to match logicalHeadings
for (int i = 0; i < cols.count(); i++)
cols[i] = internalNameMap.value(cols[i], cols[i]);
logicalHeadings.clear();
tableView->reset();
tableView->header()->reset();
// setup the logical heading list
for (int i=0; i<tableView->header()->count(); i++) {
QString friendly, techname = sortModel->headerData(i, Qt::Horizontal).toString();
if ((friendly = nameMap.value(techname, "unknown")) != "unknown") {
sortModel->setHeaderData(i, Qt::Horizontal, friendly);
logicalHeadings << friendly;
} else
logicalHeadings << techname;
}
// hide everything, we show what we want later
for (int i=0; i<tableView->header()->count(); i++) {
int index = tableView->header()->logicalIndex(i);
tableView->setColumnHidden(index, true);
tableView->setColumnWidth(index, 0);
}
// now re-order the columns according to the
// prevailing preferences. They are listed in
// the order we want them, column zero is the
// group by column, so we leave that alone
for(int i=1; i<cols.count(); i++) {
tableView->header()->moveSection(tableView->header()->visualIndex(logicalHeadings.indexOf(cols[i])), i);
}
// initialise to whatever groupBy we want to start with
tableView->sortByColumn(sortByIndex(), static_cast<Qt::SortOrder>(sortByOrder()));;
//tableView->setColumnHidden(0, true);
tableView->setColumnWidth(0,0);
// set the column widths
int columnnumber=0;
foreach(QString size, _widths.split("|", QString::SkipEmptyParts)) {
if (columnnumber >= cols.count()) break;
int index = tableView->header()->logicalIndex(columnnumber);
tableView->setColumnHidden(index, false);
tableView->setColumnWidth(index, columnnumber ? size.toInt() : 0);
columnnumber++;
}
setGroupByColumn();
active = false;
resizeEvent(NULL);
// Select the current ride
cursorRide();
// get height
tableView->doItemsLayout();
columnsChanged();
}
void RideNavigator::searchStrings(QStringList list)
{
searchFilter->setStrings(list);
setWidth(geometry().width());
}
void RideNavigator::clearSearch()
{
searchFilter->clearStrings();
QApplication::processEvents(); // repaint/resize list view - scrollbar..
setWidth(geometry().width()); // before we update column sizes!
}
void RideNavigator::setWidth(int x)
{
// use helper function
setColumnWidth(x, false);
}
// make sure the columns are all neat and tidy when the ride navigator is shown
void
RideNavigator::showEvent(QShowEvent *)
{
init = true;
//setWidth(geometry().width());
}
// routines called by the sidebar to let the user
// update the columns/grouping without using right-click
QStringList
RideNavigator::columnNames() const
{
return visualHeadings;
}
void
RideNavigator::setGroupByColumnName(QString name)
{
if (name == "") {
noGroups();
} else {
int logical = logicalHeadings.indexOf(name);
if (logical >= 0) {
currentColumn = logical;
setGroupByColumn();
}
}
}
void
RideNavigator::columnsChanged()
{
// do the work - (column changed, but no "inWidget" column resize)
calcColumnsChanged(false);
}
void
RideNavigator::columnsResized(int logicalIndex, int oldSize, int newSize)
{
// do the work - resize only
calcColumnsChanged(true, logicalIndex, oldSize, newSize);
}
bool
RideNavigator::eventFilter(QObject *object, QEvent *e)
{
// not for the table?
if (object != (QObject *)tableView) return false;
// what happnned?
switch(e->type())
{
case QEvent::ContextMenu:
{
//borderMenu(((QMouseEvent *)e)->pos());
borderMenu(tableView->mapFromGlobal(QCursor::pos()));
return true; // I'll take that thanks
break;
}
case QEvent::KeyPress:
{
QKeyEvent *keyEvent = static_cast<QKeyEvent *>(e);
if (keyEvent->modifiers() & Qt::ControlModifier) {
// Ctrl+Key
switch (keyEvent->key()) {
case Qt::Key_C: // defacto standard for copy
return true;
case Qt::Key_V: // defacto standard for paste
return true;
case Qt::Key_X: // defacto standard for cut
return true;
case Qt::Key_Y: // emerging standard for redo
return true;
case Qt::Key_Z: // common standard for undo
return true;
case Qt::Key_0:
return true;
default:
return false;
}
} else {
// Not Ctrl ...
switch (keyEvent->key()) {
case Qt::Key_Return:
case Qt::Key_Enter:
selectRide(tableView->currentIndex());
return true;
default:
return false;
}
}
break;
}
case QEvent::WindowActivate:
{
active=true;
// set the column widths
int columnnumber=0;
foreach(QString size, _widths.split("|", QString::SkipEmptyParts)) {
tableView->setColumnWidth(columnnumber, size.toInt());
}
active=false;
setWidth(geometry().width()); // calculate width...
}
break;
default:
break;
}
return false;
}
void
RideNavigator::borderMenu(const QPoint &pos)
{
// Which column did we right click on?
//
// if not in the border then do nothing, this
// context menu should only be shown when
// the user right clicks on a column heading.
int column=0;
if (pos.y() <= tableView->header()->height())
column = tableView->header()->logicalIndexAt(pos);
else return; // not in the border
QMenu menu(tableView);
// reset viaual headings first
columnsChanged();
// don't allow user to delete last column!
// need to also include '*' column 0 wide in count hence 2 not 1
if (visualHeadings.count() > 2) {
QAction *delCol = new QAction(tr("Remove Column"), tableView);
delCol->setEnabled(true);
menu.addAction(delCol);
connect(delCol, SIGNAL(triggered()), this, SLOT(removeColumn()));
}
QAction *insCol = new QAction(tr("Column Chooser"), tableView);
insCol->setEnabled(true);
menu.addAction(insCol);
connect(insCol, SIGNAL(triggered()), this, SLOT(showColumnChooser()));
QAction *toggleGroupBy = new QAction(_groupBy >= 0 ? tr("Do Not Show in Groups") : tr("Show In Groups"), tableView);
toggleGroupBy->setEnabled(column!=1?true:false); // No group for Ride Time
menu.addAction(toggleGroupBy);
connect(toggleGroupBy, SIGNAL(triggered()), this, SLOT(setGroupByColumn()));
// set current column...
currentColumn = column;
menu.exec(tableView->mapToGlobal(QPoint(pos.x(), pos.y())));
}
void
RideNavigator::setGroupByColumn()
{
// toggle
setGroupBy(_groupBy >= 0 ? -1 : currentColumn);
// set proxy model
groupByModel->setGroupBy(_groupBy);
// lets expand column 0 for the groupBy heading
for (int i=0; i < groupByModel->groupCount(); i++) {
tableView->setFirstColumnSpanned (i, QModelIndex(), true);
}
// now show em
tableView->expandAll();
// reselect current ride - since selectionmodel
// is changed by setGroupBy()
cursorRide();
}
void
RideNavigator::setSortBy(int index, Qt::SortOrder order)
{
_sortByIndex = index;
_sortByOrder = static_cast<int>(order);
}
void
RideNavigator::calcColumnsChanged(bool resized, int logicalIndex, int oldSize, int newSize ) {
// double use - for "changing" and "only resizing" of the columns
if (active == true) return;
active = true;
visualHeadings.clear(); // they have moved
// get the names used
for (int i=0; i<tableView->header()->count(); i++) {
if (tableView->header()->isSectionHidden(tableView->header()->logicalIndex(i)) != true) {
int index = tableView->header()->logicalIndex(i);
visualHeadings << logicalHeadings[index];
}
}
// write to config
QString headings;
foreach(QString name, visualHeadings)
headings += name + "|";
_columns = headings;
// correct width and store result
setColumnWidth(geometry().width(), resized, logicalIndex, oldSize, newSize); // calculate width...
// get column widths
QString widths;
for (int i=0; i<tableView->header()->count(); i++) {
int index = tableView->header()->logicalIndex(i);
if (tableView->header()->isSectionHidden(index) != true) {
widths += QString("%1|").arg(tableView->columnWidth(index));
}
}
_widths = widths;
active = false;
}
void
RideNavigator::setColumnWidth(int x, bool resized, int logicalIndex, int oldWidth, int newWidth) {
// double use - for use after any change (e.g. widget size,..) "changing" and "only resizing" of the columns
if (init == false) return;
active = true;
#if !defined (Q_OS_MAC) || (defined (Q_OS_MAC) && (QT_VERSION < 0x050000)) // on QT5 the scrollbars have no width
if (tableView->verticalScrollBar()->isVisible())
x -= tableView->verticalScrollBar()->width()
+ 0 ; // !! no longer account for content margins of 3,3,3,3 was + 6
#else // we're on a mac with QT5 .. so dodgy way of spotting preferences for scrollbars...
// this is a nasty hack, to see if the 'always on' preference for scrollbars is set we
// look at the scrollbar width which is 15 in this case (it is 16 when they 'appear' when
// needed. No doubt this will change over time and need to be fixed by referencing the
// Mac system preferences via an NSScroller - but that will be a massive hassle.
if (tableView->verticalScrollBar()->isVisible() && tableView->verticalScrollBar()->width() == 15)
x -= tableView->verticalScrollBar()->width() + 0 ;
#endif
// take the margins into account top
x -= mainLayout->contentsMargins().left() + mainLayout->contentsMargins().right();
// ** NOTE **
// When iterating over the section headings we
// always use the tableview not the sortmodel
// so we can skip over the virtual column 0
// which is used to group by, is visible but
// must have a width of 0. This is why all
// the for loops start with i=1
tableView->setColumnWidth(0,0); // in case use grabbed it
// is it narrower than the headings?
int headwidth=0;
int n=0;
for (int i=1; i<tableView->header()->count(); i++)
if (tableView->header()->isSectionHidden(i) == false) {
headwidth += tableView->columnWidth(i);
n++;
}
if (!resized) {
// headwidth is no, x is to-be width
// we need to 'stretch' the sections
// proportionally to fit into new
// layout
int setwidth=0;
int newwidth=0;
for (int i=1; i<tableView->header()->count(); i++) {
if (tableView->header()->isSectionHidden(i) == false) {
newwidth = (double)((((double)tableView->columnWidth(i)/(double)headwidth)) * (double)x);
if (newwidth < 20) newwidth = 20;
QString columnName = tableView->model()->headerData(i, Qt::Horizontal).toString();
if (columnName == "*") newwidth = 0;
tableView->setColumnWidth(i, newwidth);
setwidth += newwidth;
}
}
// UGH. Now account for the fact that the smaller columns
// didn't take their fair share of a negative resize
// so we need to snip off from the larger columns.
if (setwidth != x) {
// how many columns we got to snip from?
int colsleft = 0;
for (int i=1; i<tableView->header()->count(); i++)
if (tableView->header()->isSectionHidden(i) == false && tableView->columnWidth(i)>20)
colsleft++;
// run through ... again.. snipping off some pixels
if (colsleft) {
int snip = (setwidth-x) / colsleft; //could be negative too
for (int i=1; i<tableView->header()->count(); i++) {
if (tableView->header()->isSectionHidden(i) == false && tableView->columnWidth(i)>20) {
tableView->setColumnWidth(i, tableView->columnWidth(i)-snip);
setwidth -= snip;
}
}
}
}
if (setwidth < x)
delegate->setWidth(pwidth=setwidth);
else
delegate->setWidth(pwidth=x);
} else {
// columns are resized - for each affected column this function is called
// and makes sure that
// a) nothing gets smaller than 20 and
// b) last section is not moved over the right border / does not fill the widget to the right border
// first step: make sure that the current column got smaller than 20 by resizing
if (newWidth < 20) {
tableView->setColumnWidth(logicalIndex, oldWidth);
// correct the headwidth by the added space
headwidth += (oldWidth - newWidth);
}
// get the index of the most right column (since here all further resizing will start)
int visIndex = 0;
// find the most right visible column
for (int i=1; i<tableView->header()->count(); i++) {
if (tableView->header()->isSectionHidden(i) == false &&
tableView->header()->visualIndex(i) > visIndex)
visIndex = tableView->header()->visualIndex(i);
}
if (headwidth > x) {
// now make sure that no column disappears right border of the table view
// by taking the overlapping part "cut" from last column(s)
int cut = headwidth - x;
// now resize, but not smaller than 20 (from right to left of Visible Columns)
while (cut >0 && visIndex > 0) {
int logIndex = tableView->header()->logicalIndex(visIndex);
int k = tableView->columnWidth(logIndex);
if (k - cut >= 20) {
tableView->setColumnWidth(logIndex, k-cut);
cut = 0;
} else {
tableView->setColumnWidth(logIndex, 20);
cut -= (k-20);
}
visIndex--;
}
} else {
// since QT on fast mouse moves resizes more columns then expected
// give all available space to the last visible column
int logIndex = tableView->header()->logicalIndex(visIndex);
int k = tableView->columnWidth(logIndex);
tableView->setColumnWidth(logIndex, (k+x-headwidth));
}
}
// make the scrollbars go away
tableView->setHorizontalScrollBarPolicy(Qt::ScrollBarAlwaysOff);
active = false;
}
//
// This function is called for every row in the ridecache
// and wants to know what group string or 'name' you want
// to put this row into. It is passed the heading value
// as a string, and the row value for this column.
//
// It should return a string that will be used in the first
// column tree to group rows together.
//
// It is intended to allow us to do clever groupings, such
// as grouping dates as 'This week', 'Last week' etc or
// Power values into zones etc.
//
class groupRange {
public:
class range {
public:
double low, high;
QString name;
range(double low, double high, QString name) : low(low), high(high), name(name) {}
range() : low(0), high(0), name("") {}
};
QString column; // column name
QList<range> ranges; // list of ranges we can put them in
};
static QList<groupRange> groupRanges;
bool
GroupByModel::initGroupRanges()
{
groupRange::range add;
groupRange addColumn;
// TSS
addColumn.column = "TSS";
add = groupRange::range( 0.0, 0.0, tr("Zero or not present")); addColumn.ranges << add;
add = groupRange::range( 0, 150, tr("Low Stress")); addColumn.ranges << add;
add = groupRange::range( 150, 300, tr("Medium Stress")); addColumn.ranges << add;
add = groupRange::range( 300, 450, tr("High Stress")); addColumn.ranges << add;
add = groupRange::range( 450, 0.00, tr("Very High Stress")); addColumn.ranges << add;
groupRanges << addColumn;
addColumn.ranges.clear();
// Intensity Factor
addColumn.column = "IF";
add = groupRange::range( 0.0, 0.0, tr("Zero or not present")); addColumn.ranges << add;
add = groupRange::range( 0.0, 0.55, tr("Active Recovery")); addColumn.ranges << add;
add = groupRange::range( 0.55, 0.75, tr("Endurance")); addColumn.ranges << add;
add = groupRange::range( 0.75, 0.90, tr("Tempo")); addColumn.ranges << add;
add = groupRange::range( 0.90, 1.05, tr("Threshold")); addColumn.ranges << add;
add = groupRange::range( 1.05, 1.2, tr("VO2Max")); addColumn.ranges << add;
add = groupRange::range( 1.2, 1.5, tr("Anaerobic Capacity")); addColumn.ranges << add;
add = groupRange::range( 1.5, 0.0, tr("Maximal")); addColumn.ranges << add;
groupRanges << addColumn;
addColumn.ranges.clear();
// Variability Index
addColumn.column = "VI";
add = groupRange::range( 0.0, 1.0, tr("Zero or not present")); addColumn.ranges << add;
add = groupRange::range( 1.0, 1.05, tr("Isopower")); addColumn.ranges << add;
add = groupRange::range( 1.05, 1.1, tr("Steady")); addColumn.ranges << add;
add = groupRange::range( 1.1, 1.2, tr("Variable")); addColumn.ranges << add;
add = groupRange::range( 1.2, 0.0, tr("Highly Variable")); addColumn.ranges << add;
groupRanges << addColumn;
addColumn.ranges.clear();
// Duration (seconds)
addColumn.column = "Duration";
add = groupRange::range( 0.0, 3600.0, tr("Less than an hour")); addColumn.ranges << add;
add = groupRange::range( 3600, 5400, tr("Less than 90 minutes")); addColumn.ranges << add;
add = groupRange::range( 5400, 10800, tr("Less than 3 hours")); addColumn.ranges << add;
add = groupRange::range( 10800, 18000, tr("Less than 5 hours")); addColumn.ranges << add;
add = groupRange::range( 18000, 0.0, tr("More than 5 hours")); addColumn.ranges << add;
groupRanges << addColumn;
addColumn.ranges.clear();
// Distance (km)
addColumn.column = "Distance";
add = groupRange::range( 0.0, 0.0, tr("Zero or not present")); addColumn.ranges << add;
add = groupRange::range( 0.0, 40.0, tr("Short")); addColumn.ranges << add;
add = groupRange::range( 40, 80.00, tr("Medium")); addColumn.ranges << add;
add = groupRange::range( 80, 140, tr("Long")); addColumn.ranges << add;
add = groupRange::range( 140, 0, tr("Very Long")); addColumn.ranges << add;
groupRanges << addColumn;
addColumn.ranges.clear();
return true;
}
static bool _initGroupRanges = false;
// Perhaps a groupName function on the metrics would be useful
QString
GroupByModel::groupFromValue(QString headingName, QString value, double rank, double count) const
{
if (!_initGroupRanges)
_initGroupRanges = initGroupRanges();
// Check for predefined thresholds / zones / bands for this metric/column
foreach (groupRange orange, groupRanges) {
if (orange.column == headingName) {
double number = value.toDouble();
// use thresholds defined for this column/metric
foreach(groupRange::range range, orange.ranges) {
// 0-x is lower, x-0 is upper, 0-0 is no data and x-x is a range
if (range.low == 0.0 && range.high == 0.0 && number == 0.0) return range.name;
else if (range.high != 0.0 && range.low == 0.0 && number < range.high) return range.name;
else if (range.low != 0.0 && range.high == 0.0 && number >= range.low) return range.name;
else if (number < range.high && number >= range.low) return range.name;
}
return tr("Undefined");
}
}
// Use upper quartile for anything left that is a metric
if (rideNavigator->columnMetrics.value(headingName, NULL) != NULL) {
double quartile = rank / count;
if (value.toDouble() == 0) return QString(tr("Zero or not present"));
else if (rank < 10) return QString(tr("Best 10"));
else if (quartile <= 0.25) return QString (tr("Quartile 1: 0% - 25%"));
else if (quartile <= 0.50) return QString (tr("Quartile 2: 25% - 50%"));
else if (quartile <= 0.75) return QString (tr("Quartile 3: 50% - 75%"));
else if (quartile <= 1) return QString (tr("Quartile 4: 75% - 100%"));
} else {
if (headingName == tr("Date")) {
// get the date from value string
QDateTime dateTime = QDateTime::fromString(value, Qt::ISODate);
QDateTime today = QDateTime::currentDateTime();
if (today.date().weekNumber() == dateTime.date().weekNumber()
&& today.date().year() == dateTime.date().year())
return tr("This week");
else if (today.date().month() == dateTime.date().month()
&& today.date().year() == dateTime.date().year())
return tr("This month");
else if (today.date().month() == (dateTime.date().month()+1)
&& today.date().year() == dateTime.date().year())
return tr("Last month");
else {
return dateTime.toString(tr("yyyy-MM (MMMM)"));
}
}
// not a metric, i.e. metadata
return value;
}
// if all else fails just return the value and group by
// that. Which is a fair approach for text fields for example
return value;
}
void
RideNavigator::removeColumn()
{
active = true;
tableView->setColumnHidden(currentColumn, true);
active = false;
setWidth(geometry().width()); // calculate width...
columnsChanged(); // need to do after, just once
columnsChanged(); // need to do after, and again
}
void
RideNavigator::showColumnChooser()
{
ColumnChooser *selector = new ColumnChooser(logicalHeadings);
selector->show();
}
// user selected a different ride somewhere else, we need to align with that
void
RideNavigator::setRide(RideItem*rideItem)
{
if (currentItem == rideItem) return;
for (int i=0; i<tableView->model()->rowCount(); i++) {
QModelIndex group = tableView->model()->index(i,0,QModelIndex());
for (int j=0; j<tableView->model()->rowCount(group); j++) {
QString fileName = tableView->model()->data(tableView->model()->index(j,3, group), Qt::DisplayRole).toString();
if (fileName == rideItem->fileName) {
// we set current index to column 2 (date/time) since we can be guaranteed it is always show (all others are removable)
QItemSelection row(tableView->model()->index(j,0,group),
tableView->model()->index(j,tableView->model()->columnCount()-1, group));
tableView->selectionModel()->select(row, QItemSelectionModel::Rows | QItemSelectionModel::ClearAndSelect);
tableView->selectionModel()->setCurrentIndex(tableView->model()->index(j,0,group), QItemSelectionModel::NoUpdate);
tableView->scrollTo(tableView->model()->index(j,3,group), QAbstractItemView::PositionAtCenter);
currentItem = rideItem;
repaint();
active = false;
return;
}
}
}
}
void
RideNavigator::selectionChanged(QItemSelection selected)
{
QModelIndex ref = selected.indexes().first();
QModelIndex fileIndex = tableView->model()->index(ref.row(), 3, ref.parent());
QString filename = tableView->model()->data(fileIndex, Qt::DisplayRole).toString();
// lets make sure we know what we've selected, so we don't
// select it twice
foreach(RideItem *item, context->athlete->rideCache->rides()) {
if (item->fileName == filename) {
currentItem = item;
break;
}
}
// lets notify others
context->athlete->selectRideFile(filename);
}
void
RideNavigator::selectRide(const QModelIndex &index)
{
// we don't use this at present, but hitting return
// or double clicking a ride will cause this to get called....
QModelIndex fileIndex = tableView->model()->index(index.row(), 3, index.parent()); // column 2 for filename ?
QString filename = tableView->model()->data(fileIndex, Qt::DisplayRole).toString();
// do nothing .. but maybe later do something ?
//context->athlete->selectRideFile(filename);
}
void
RideNavigator::cursorRide()
{
if (currentItem == NULL) return;
// find our ride and scroll to it
for (int i=0; i<tableView->model()->rowCount(); i++) {
QModelIndex group = tableView->model()->index(i,0,QModelIndex());
for (int j=0; j<tableView->model()->rowCount(group); j++) {
QString fileName = tableView->model()->data(tableView->model()->index(j,2, group), Qt::UserRole+1).toString();
if (fileName == currentItem->fileName) {
// we set current index to column 2 (date/time) since we can be guaranteed it is always show (all others are removable)
tableView->scrollTo(tableView->model()->index(j,3,group));
return;
}
}
}
}
// Drag and drop columns from the chooser...
void
RideNavigator::dragEnterEvent(QDragEnterEvent *event)
{
if (event->mimeData()->data("application/x-columnchooser") != "")
event->acceptProposedAction(); // whatever you wanna drop we will try and process!
else
event->ignore();
}
void
RideNavigator::dropEvent(QDropEvent *event)
{
QByteArray rawData = event->mimeData()->data("application/x-columnchooser");
QDataStream stream(&rawData, QIODevice::ReadOnly);
stream.setVersion(QDataStream::Qt_4_6);
QString name;
stream >> name;
tableView->setColumnHidden(logicalHeadings.indexOf(name), false);
tableView->setColumnWidth(logicalHeadings.indexOf(name), 50);
tableView->header()->moveSection(tableView->header()->visualIndex(logicalHeadings.indexOf(name)), 1);
columnsChanged();
}
NavigatorCellDelegate::NavigatorCellDelegate(RideNavigator *rideNavigator, QObject *parent) :
QItemDelegate(parent), rideNavigator(rideNavigator), pwidth(300)
{
}
// Editing functions are null since the model is read-only
QWidget *NavigatorCellDelegate::createEditor(QWidget *, const QStyleOptionViewItem &, const QModelIndex &) const { return NULL; }
void NavigatorCellDelegate::commitAndCloseEditor() { }
void NavigatorCellDelegate::setEditorData(QWidget *, const QModelIndex &) const { }
void NavigatorCellDelegate::updateEditorGeometry(QWidget *, const QStyleOptionViewItem &, const QModelIndex &) const {}
void NavigatorCellDelegate::setModelData(QWidget *, QAbstractItemModel *, const QModelIndex &) const { }
bool NavigatorCellDelegate::helpEvent(QHelpEvent*, QAbstractItemView*, const QStyleOptionViewItem&, const QModelIndex&) { return true; }
QSize NavigatorCellDelegate::sizeHint(const QStyleOptionViewItem & /*option*/, const QModelIndex &index) const
{
QSize s;
if (rideNavigator->groupByModel->mapToSource(rideNavigator->sortModel->mapToSource(index)) != QModelIndex() &&
rideNavigator->groupByModel->data(rideNavigator->sortModel->mapToSource(index), Qt::UserRole).toString() != "") {
s.setHeight((rideNavigator->fontHeight+2) * 3);
} else s.setHeight(rideNavigator->fontHeight + 2);
return s;
}
// anomalies are underlined in red, otherwise straight paintjob
void NavigatorCellDelegate::paint(QPainter *painter, const QStyleOptionViewItem &option,
const QModelIndex &index) const<|fim▁hole|>
// state of item
bool hover = option.state & QStyle::State_MouseOver;
bool selected = option.state & QStyle::State_Selected;
bool focus = option.state & QStyle::State_HasFocus;
//bool isRun = rideNavigator->tableView->model()->data(index, Qt::UserRole+2).toBool();
// format the cell depending upon what it is...
QString columnName = rideNavigator->tableView->model()->headerData(index.column(), Qt::Horizontal).toString();
const RideMetric *m;
QString value;
// are we a selected cell ? need to paint accordingly
//bool selected = false;
//if (rideNavigator->tableView->selectionModel()->selectedIndexes().count()) { // zero if no rides in list
//if (rideNavigator->tableView->selectionModel()->selectedIndexes().value(0).row() == index.row())
//selected = true;
//}
if ((m=rideNavigator->columnMetrics.value(columnName, NULL)) != NULL) {
// get double from sqlmodel
value = index.model()->data(index, Qt::DisplayRole).toString();
// get rid of 0 its ugly
if (value =="nan" || value == "0" || value == "0.0" || value == "0.00") value="";
} else {
// is this the ride date/time ?
value = index.model()->data(index, Qt::DisplayRole).toString();
if (columnName == tr("Date")) {
QDateTime dateTime = QDateTime::fromString(value, Qt::ISODate);
value = dateTime.toString(tr("MMM d, yyyy")); // same format as ride list
} else if (columnName == tr("Time")) {
QDateTime dateTime = QDateTime::fromString(value, Qt::ISODate);
value = dateTime.toString("hh:mm:ss"); // same format as ride list
} else if (columnName == tr("Last updated")) {
QDateTime dateTime;
dateTime.setTime_t(index.model()->data(index, Qt::DisplayRole).toInt());
value = dateTime.toString(tr("ddd MMM d, yyyy hh:mm")); // same format as ride list
}
}
QStyleOptionViewItem myOption = option;
// groupBy in bold please
if (columnName == "*") {
QFont enbolden = option.font;
enbolden.setWeight(QFont::Bold);
myOption.font = enbolden;
}
// normal render
bool isnormal=false;
QString calendarText = rideNavigator->tableView->model()->data(index, Qt::UserRole).toString();
QColor userColor = rideNavigator->tableView->model()->data(index, Qt::BackgroundRole).value<QBrush>().color();
if (userColor == QColor(1,1,1)) {
rideBG = false; // default so don't swap round...
isnormal = true; // just default so no bg or box
userColor = GColor(CPLOTMARKER);
}
// basic background
QBrush background = QBrush(GColor(CPLOTBACKGROUND));
// runs are darker
//if (isRun) {
//background.setColor(background.color().darker(150));
//userColor = userColor.darker(150);
//}
if (columnName != "*") {
myOption.displayAlignment = Qt::AlignLeft | Qt::AlignTop;
QRectF bigger(myOption.rect.x(), myOption.rect.y(), myOption.rect.width()+1, myOption.rect.height()+1);
if (hover) painter->fillRect(myOption.rect, QColor(Qt::lightGray));
else painter->fillRect(bigger, rideBG ? userColor : background);
// clear first
drawDisplay(painter, myOption, myOption.rect, ""); //added
// draw border of each cell
QPen rpen;
rpen.setWidth(1);
rpen.setColor(GColor(CPLOTGRID));
QPen isColor = painter->pen();
QFont isFont = painter->font();
painter->setPen(rpen);
painter->drawLine(0,myOption.rect.y(),rideNavigator->pwidth-1,myOption.rect.y());
painter->drawLine(0,myOption.rect.y()+myOption.rect.height(),rideNavigator->pwidth-1,myOption.rect.y()+myOption.rect.height());
painter->drawLine(0,myOption.rect.y()+myOption.rect.height(),0,myOption.rect.y()+myOption.rect.height());
painter->drawLine(rideNavigator->pwidth-1, myOption.rect.y(), rideNavigator->pwidth-1, myOption.rect.y()+myOption.rect.height());
// indent first column and draw all in plotmarker color
myOption.rect.setHeight(rideNavigator->fontHeight + 2); //added
myOption.font.setWeight(QFont::Bold);
QFont boldened = painter->font();
boldened.setWeight(QFont::Bold);
painter->setFont(boldened);
if (!selected) {
// not selected, so invert ride plot color
if (hover) painter->setPen(QColor(Qt::black));
else painter->setPen(rideBG ? rideNavigator->reverseColor : userColor);
} else if (!focus) { // selected but out of focus //
painter->setPen(QColor(Qt::black));
}
QRect normal(myOption.rect.x(), myOption.rect.y()+1, myOption.rect.width(), myOption.rect.height());
if (myOption.rect.x() == 0) {
// first line ?
QRect indented(myOption.rect.x()+5, myOption.rect.y()+1, myOption.rect.width()-5, myOption.rect.height());
painter->drawText(indented, value); //added
} else {
painter->drawText(normal, value); //added
}
painter->setPen(isColor);
painter->setFont(isFont);
// now get the calendar text to appear ...
if (calendarText != "") {
QRect high(myOption.rect.x()+myOption.rect.width() - 7, myOption.rect.y(), 7, (rideNavigator->fontHeight+2) * 3);
myOption.rect.setX(0);
myOption.rect.setY(myOption.rect.y() + rideNavigator->fontHeight + 2);//was +23
myOption.rect.setWidth(rideNavigator->pwidth);
myOption.rect.setHeight(rideNavigator->fontHeight * 2); //was 36
myOption.font.setPointSize(myOption.font.pointSize());
myOption.font.setWeight(QFont::Normal);
if (hover) painter->fillRect(myOption.rect, QColor(Qt::lightGray));
else painter->fillRect(myOption.rect, rideBG ? userColor : background.color());
drawDisplay(painter, myOption, myOption.rect, "");
myOption.rect.setX(10); // wider notes display
myOption.rect.setWidth(pwidth-20);// wider notes display
painter->setFont(myOption.font);
QPen isColor = painter->pen();
if (!selected) {
// not selected, so invert ride plot color
if (hover) painter->setPen(QPen(Qt::black));
else painter->setPen(rideBG ? rideNavigator->reverseColor : GCColor::invertColor(GColor(CPLOTBACKGROUND)));
}
painter->drawText(myOption.rect, Qt::AlignLeft | Qt::TextWordWrap, calendarText);
painter->setPen(isColor);
#if (defined (Q_OS_MAC) && (QT_VERSION >= 0x050000)) // on QT5 the scrollbars have no width
if (!selected && !rideBG && high.x()+12 > rideNavigator->geometry().width() && !isnormal) {
#else
if (!selected && !rideBG && high.x()+32 > rideNavigator->geometry().width() && !isnormal) {
#endif
painter->fillRect(high, userColor);
} else {
// border
QPen rpen;
rpen.setWidth(1);
rpen.setColor(GColor(CPLOTGRID));
QPen isColor = painter->pen();
QFont isFont = painter->font();
painter->setPen(rpen);
painter->drawLine(rideNavigator->pwidth-1, myOption.rect.y(), rideNavigator->pwidth-1, myOption.rect.y()+myOption.rect.height());
painter->setPen(isColor);
}
}
} else {
if (value != "") {
myOption.displayAlignment = Qt::AlignLeft | Qt::AlignBottom;
myOption.rect.setX(0);
myOption.rect.setHeight(rideNavigator->fontHeight + 2);
myOption.rect.setWidth(rideNavigator->pwidth);
painter->fillRect(myOption.rect, GColor(CPLOTBACKGROUND));
}
QPen isColor = painter->pen();
painter->setPen(QPen(GColor(CPLOTMARKER)));
myOption.palette.setColor(QPalette::WindowText, QColor(GColor(CPLOTMARKER))); //XXX
painter->drawText(myOption.rect, value);
painter->setPen(isColor);
}
}
ColumnChooser::ColumnChooser(QList<QString>&logicalHeadings)
{
// wipe away everything when you close please...
setWindowTitle(tr("Column Chooser"));
setAttribute(Qt::WA_DeleteOnClose);
setWindowFlags(windowFlags() | Qt::WindowStaysOnTopHint | Qt::Tool);
clicked = new QSignalMapper(this); // maps each button click event
connect(clicked, SIGNAL(mapped(const QString &)), this, SLOT(buttonClicked(const QString &)));
QVBoxLayout *us = new QVBoxLayout(this);
us->setSpacing(0);
us->setContentsMargins(0,0,0,0);
scrollarea = new QScrollArea(this);
us->addWidget(scrollarea);
QWidget *but = new QWidget(this);
buttons = new QVBoxLayout(but);
buttons->setSpacing(0);
buttons->setContentsMargins(0,0,0,0);
QFont small;
small.setPointSize(8);
QList<QString> buttonNames = logicalHeadings;
qSort(buttonNames);
foreach (QString column, buttonNames) {
if (column == "*") continue;
// setup button
QPushButton *add = new QPushButton(column, this);
add->setFont(small);
add->setContentsMargins(0,0,0,0);
buttons->addWidget(add);
connect(add, SIGNAL(pressed()), clicked, SLOT(map()));
clicked->setMapping(add, column);
}
scrollarea->setWidget(but);
but->setFixedWidth(230);
scrollarea->setFixedWidth(250);
setFixedWidth(250);
}
void
ColumnChooser::buttonClicked(QString name)
{
// setup the drag data
QMimeData *mimeData = new QMimeData;
// we need to pack into a byte array (since UTF8() conversion is not reliable in QT4.8 vs QT 5.3)
QByteArray rawData;
QDataStream stream(&rawData, QIODevice::WriteOnly);
stream.setVersion(QDataStream::Qt_4_6);
stream << name;
// send raw
mimeData->setData("application/x-columnchooser", rawData);
// create a drag event
QDrag *drag = new QDrag(this);
drag->setMimeData(mimeData);
drag->exec(Qt::MoveAction);
}
void
RideNavigator::showTreeContextMenuPopup(const QPoint &pos)
{
// map to global does not take into account the height of the header (??)
// so we take it off the result of map to global
// in the past this called mainwindow routinesfor the menu -- that was
// a bad design since it coupled the ride navigator with the gui
// we emit signals now, which only the sidebar is interested in trapping
// so the activity log for example doesn't have a context menu now
emit customContextMenuRequested(tableView->mapToGlobal(pos+QPoint(0,tableView->header()->geometry().height())));
}
RideTreeView::RideTreeView()
{
#if (defined WIN32) && (QT_VERSION > 0x050000) && (QT_VERSION < 0x050301)
// don't allow ride drop on Windows with QT5 until 5.3.1 when they fixed the bug
#else
setDragDropMode(QAbstractItemView::InternalMove);
setDragEnabled(true);
setDragDropOverwriteMode(false);
setDropIndicatorShown(true);
#endif
#ifdef Q_OS_MAC
setAttribute(Qt::WA_MacShowFocusRect, 0);
#endif
}<|fim▁end|> | {
// paint background for user defined color ?
bool rideBG = appsettings->value(this,GC_RIDEBG,false).toBool(); |
<|file_name|>grayscale.rs<|end_file_name|><|fim▁begin|>#pragma version(1)
#pragma rs java_package_name(com.example.background)
rs_allocation in;<|fim▁hole|>
uint32_t width;
uint32_t height;
void root(const uchar4* v_in, uchar4* v_out, const void* usrData, uint32_t x, uint32_t y) {
int r = v_in->r;
int g = v_in->g;
int b = v_in->b;
int gray = (r+g+b)/3;
v_out->r = gray;
v_out->g = gray;
v_out->b = gray;
v_out->a = v_in->a;
}
void filter() {
rsDebug("Processing image with dimensions for grayscale", width, height);
rsForEach(script, in, out);
}<|fim▁end|> | rs_allocation out;
rs_script script; |
<|file_name|>dump.py<|end_file_name|><|fim▁begin|># This file is part of Codetrawl
# Copyright (C) 2015 Nathaniel Smith <[email protected]>
# See file LICENSE.txt for license information.
"""Usage:
codetrawl.dump PATTERN FILE [FILE...]
where PATTERN is a Python format string like "{raw_url}", with allowed keys:
- service
- query
- repo
- path
- raw_url
- content
"""
import sys
import docopt
from .read import read_matches<|fim▁hole|> for match in read_matches(args["FILE"]):
sys.stdout.write(args["PATTERN"].format(**match))
sys.stdout.write("\n")<|fim▁end|> |
if __name__ == "__main__":
args = docopt.docopt(__doc__)
|
<|file_name|>make-release.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
make-release
~~~~~~~~~~~~
Helper script that performs a release. Does pretty much everything
automatically for us.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import sys
import os
import re
from datetime import datetime, date
from subprocess import Popen, PIPE
_date_clean_re = re.compile(r'(\d+)(st|nd|rd|th)')
def parse_changelog():
with open('CHANGES') as f:
lineiter = iter(f)
for line in lineiter:
match = re.search('^Version\s+(.*)', line.strip())
if match is None:
continue
length = len(match.group(1))
version = match.group(1).strip()
if lineiter.next().count('-') != len(match.group(0)):
continue
while 1:
change_info = lineiter.next().strip()
if change_info:
break
match = re.search(r'released on (\w+\s+\d+\w+\s+\d+)'
r'(?:, codename (.*))?(?i)', change_info)
if match is None:
continue
datestr, codename = match.groups()
return version, parse_date(datestr), codename
def bump_version(version):
try:
parts = map(int, version.split('.'))
except ValueError:
fail('Current version is not numeric')
parts[-1] += 1
return '.'.join(map(str, parts))
<|fim▁hole|> string = _date_clean_re.sub(r'\1', string)
return datetime.strptime(string, '%B %d %Y')
def set_filename_version(filename, version_number, pattern):
changed = []
def inject_version(match):
before, old, after = match.groups()
changed.append(True)
return before + version_number + after
with open(filename) as f:
contents = re.sub(r"^(\s*%s\s*=\s*')(.+?)(')(?sm)" % pattern,
inject_version, f.read())
if not changed:
fail('Could not find %s in %s', pattern, filename)
with open(filename, 'w') as f:
f.write(contents)
def set_init_version(version):
info('Setting __init__.py version to %s', version)
set_filename_version('logbook/__init__.py', version, '__version__')
def set_setup_version(version):
info('Setting setup.py version to %s', version)
set_filename_version('setup.py', version, 'version')
def build_and_upload():
Popen([sys.executable, 'setup.py', 'release', 'sdist', 'upload']).wait()
def fail(message, *args):
print >> sys.stderr, 'Error:', message % args
sys.exit(1)
def info(message, *args):
print >> sys.stderr, message % args
def get_git_tags():
return set(Popen(['git', 'tag'], stdout=PIPE).communicate()[0].splitlines())
def git_is_clean():
return Popen(['git', 'diff', '--quiet']).wait() == 0
def make_git_commit(message, *args):
message = message % args
Popen(['git', 'commit', '-am', message]).wait()
def make_git_tag(tag):
info('Tagging "%s"', tag)
Popen(['git', 'tag', tag]).wait()
def main():
os.chdir(os.path.join(os.path.dirname(__file__), '..'))
rv = parse_changelog()
if rv is None:
fail('Could not parse changelog')
version, release_date, codename = rv
dev_version = bump_version(version) + '-dev'
info('Releasing %s (codename %s, release date %s)',
version, codename, release_date.strftime('%d/%m/%Y'))
tags = get_git_tags()
if version in tags:
fail('Version "%s" is already tagged', version)
if release_date.date() != date.today():
fail('Release date is not today (%s != %s)' % (release_date.date(), date.today()))
if not git_is_clean():
fail('You have uncommitted changes in git')
set_init_version(version)
set_setup_version(version)
make_git_commit('Bump version number to %s', version)
make_git_tag(version)
build_and_upload()
set_init_version(dev_version)
set_setup_version(dev_version)
if __name__ == '__main__':
main()<|fim▁end|> | def parse_date(string): |
<|file_name|>application_test.js<|end_file_name|><|fim▁begin|>/*globals EmberDev */
import Ember from "ember-metal/core";
import {get} from "ember-metal/property_get";
import {set} from "ember-metal/property_set";
import {forEach} from "ember-metal/array";
import run from "ember-metal/run_loop";
import Application from "ember-application/system/application";
import {DefaultResolver} from "ember-application/system/resolver";
import Router from "ember-routing/system/router";
import {View} from "ember-views/views/view";
import {Controller} from "ember-runtime/controllers/controller";
import NoneLocation from "ember-routing/location/none_location";
import EmberHandlebars from "ember-handlebars";
import EmberObject from "ember-runtime/system/object";
import {outletHelper} from "ember-routing/helpers/outlet";
import jQuery from "ember-views/system/jquery";
var trim = jQuery.trim;
var view, app, application, originalLookup, originalDebug;
module("Ember.Application", {
setup: function() {
originalLookup = Ember.lookup;
originalDebug = Ember.debug;
jQuery("#qunit-fixture").html("<div id='one'><div id='one-child'>HI</div></div><div id='two'>HI</div>");
run(function() {
application = Application.create({ rootElement: '#one', router: null });
});
},
teardown: function() {
jQuery("#qunit-fixture").empty();
Ember.debug = originalDebug;
Ember.lookup = originalLookup;
if (application) {
run(application, 'destroy');
}
if (app) {
run(app, 'destroy');
}
}
});
test("you can make a new application in a non-overlapping element", function() {
run(function() {
app = Application.create({ rootElement: '#two', router: null });
});
run(app, 'destroy');
ok(true, "should not raise");
});
test("you cannot make a new application that is a parent of an existing application", function() {
expectAssertion(function() {
run(function() {
Application.create({ rootElement: '#qunit-fixture' });
});
});
});
test("you cannot make a new application that is a descendent of an existing application", function() {
expectAssertion(function() {
run(function() {
Application.create({ rootElement: '#one-child' });
});
});
});
test("you cannot make a new application that is a duplicate of an existing application", function() {
expectAssertion(function() {
run(function() {
Application.create({ rootElement: '#one' });
});
});
});
<|fim▁hole|>test("you cannot make two default applications without a rootElement error", function() {
expectAssertion(function() {
run(function() {
Application.create({ router: false });
});
});
});
test("acts like a namespace", function() {
var lookup = Ember.lookup = {}, app;
run(function() {
app = lookup.TestApp = Application.create({ rootElement: '#two', router: false });
});
Ember.BOOTED = false;
app.Foo = EmberObject.extend();
equal(app.Foo.toString(), "TestApp.Foo", "Classes pick up their parent namespace");
});
module("Ember.Application initialization", {
teardown: function() {
if (app) {
run(app, 'destroy');
}
Ember.TEMPLATES = {};
}
});
test('initialized application go to initial route', function() {
run(function() {
app = Application.create({
rootElement: '#qunit-fixture'
});
app.Router.reopen({
location: 'none'
});
app.register('template:application',
EmberHandlebars.compile("{{outlet}}")
);
Ember.TEMPLATES.index = EmberHandlebars.compile(
"<h1>Hi from index</h1>"
);
});
equal(jQuery('#qunit-fixture h1').text(), "Hi from index");
});
test("initialize application via initialize call", function() {
run(function() {
app = Application.create({
rootElement: '#qunit-fixture'
});
app.Router.reopen({
location: 'none'
});
app.ApplicationView = View.extend({
template: function() { return "<h1>Hello!</h1>"; }
});
});
// This is not a public way to access the container; we just
// need to make some assertions about the created router
var router = app.__container__.lookup('router:main');
equal(router instanceof Router, true, "Router was set from initialize call");
equal(router.location instanceof NoneLocation, true, "Location was set from location implementation name");
});
test("initialize application with stateManager via initialize call from Router class", function() {
run(function() {
app = Application.create({
rootElement: '#qunit-fixture'
});
app.Router.reopen({
location: 'none'
});
app.register('template:application', function() {
return "<h1>Hello!</h1>";
});
});
var router = app.__container__.lookup('router:main');
equal(router instanceof Router, true, "Router was set from initialize call");
equal(jQuery("#qunit-fixture h1").text(), "Hello!");
});
test("ApplicationView is inserted into the page", function() {
run(function() {
app = Application.create({
rootElement: '#qunit-fixture'
});
app.ApplicationView = View.extend({
render: function(buffer) {
buffer.push("<h1>Hello!</h1>");
}
});
app.ApplicationController = Controller.extend();
app.Router.reopen({
location: 'none'
});
});
equal(jQuery("#qunit-fixture h1").text(), "Hello!");
});
test("Minimal Application initialized with just an application template", function() {
jQuery('#qunit-fixture').html('<script type="text/x-handlebars">Hello World</script>');
run(function () {
app = Application.create({
rootElement: '#qunit-fixture'
});
});
equal(trim(jQuery('#qunit-fixture').text()), 'Hello World');
});
test('enable log of libraries with an ENV var', function() {
if (EmberDev && EmberDev.runningProdBuild){
ok(true, 'Logging does not occur in production builds');
return;
}
var debug = Ember.debug;
var messages = [];
Ember.LOG_VERSION = true;
Ember.debug = function(message) {
messages.push(message);
};
Ember.libraries.register("my-lib", "2.0.0a");
run(function() {
app = Application.create({
rootElement: '#qunit-fixture'
});
});
equal(messages[1], "Ember : " + Ember.VERSION);
equal(messages[2], "Handlebars : " + EmberHandlebars.VERSION);
equal(messages[3], "jQuery : " + jQuery().jquery);
equal(messages[4], "my-lib : " + "2.0.0a");
Ember.libraries.deRegister("my-lib");
Ember.LOG_VERSION = false;
Ember.debug = debug;
});
test('disable log version of libraries with an ENV var', function() {
var logged = false;
Ember.LOG_VERSION = false;
Ember.debug = function(message) {
logged = true;
};
jQuery("#qunit-fixture").empty();
run(function() {
app = Application.create({
rootElement: '#qunit-fixture'
});
app.Router.reopen({
location: 'none'
});
});
ok(!logged, 'library version logging skipped');
});
test("can resolve custom router", function(){
var CustomRouter = Router.extend();
var CustomResolver = DefaultResolver.extend({
resolveOther: function(parsedName){
if (parsedName.type === "router") {
return CustomRouter;
} else {
return this._super(parsedName);
}
}
});
app = run(function(){
return Application.create({
Resolver: CustomResolver
});
});
ok(app.__container__.lookup('router:main') instanceof CustomRouter, 'application resolved the correct router');
});<|fim▁end|> | |
<|file_name|>ioptron.py<|end_file_name|><|fim▁begin|>import re
from astropy import units as u
from astropy.coordinates import SkyCoord
from astropy.time import Time
from panoptes.mount.mount import AbstractMount
from ..utils.logger import has_logger
from ..utils.config import load_config
from ..utils import error as error
@has_logger
class Mount(AbstractMount):
"""
Mount class for iOptron mounts. Overrides the base `initialize` method
and providers some helper methods to convert coordinates.
"""
def __init__(self, *args, **kwargs):
self.logger.info('Creating iOptron mount')
super().__init__(*args, **kwargs)
self.config = load_config()
# Regexp to match the iOptron RA/Dec format
self._ra_format = '(?P<ra_millisecond>\d{8})'
self._dec_format = '(?P<dec_sign>[\+\-])(?P<dec_arcsec>\d{8})'
self._coords_format = re.compile(self._dec_format + self._ra_format)
self._raw_status = None
self._status_format = re.compile(
'(?P<gps>[0-2]{1})' +
'(?P<system>[0-7]{1})' +
'(?P<tracking>[0-4]{1})' +
'(?P<movement_speed>[1-9]{1})' +
'(?P<time_source>[1-3]{1})' +
'(?P<hemisphere>[01]{1})'
)
self._status_lookup = {
'gps': {
'0': 'Off',
'1': 'On',
'2': 'Data Extracted'
},
'system': {
'0': 'Stopped - Not at Zero Position',
'1': 'Tracking (PEC disabled)',
'2': 'Slewing',
'3': 'Guiding',
'4': 'Meridian Flipping',
'5': 'Tracking (PEC enabled)',
'6': 'Parked',
'7': 'Stopped - Zero Position'
},
'tracking': {
'0': 'Sidereal',
'1': 'Lunar',
'2': 'Solar',
'3': 'King',
'4': 'Custom'
},
'movement_speed': {
'1': '1x sidereal',
'2': '2x sidereal',
'3': '8x sidereal',
'4': '16x sidereal',
'5': '64x sidereal',
'6': '128x sidereal',
'7': '256x sidereal',
'8': '512x sidereal',
'9': 'Max sidereal',
},
'time_source': {
'1': 'RS-232',
'2': 'Hand Controller',
'3': 'GPS'
},
'hemisphere': {
'0': 'Southern',
'1': 'Northern'
}
}
self.logger.info('Mount created')
##################################################################################################
# Properties
##################################################################################################
@property
def is_parked(self):
""" bool: Mount parked status. """
self._is_parked = 'Parked' in self.status().get('system', '')
return self._is_parked
@property
def is_home(self):
""" bool: Mount home status. """
self._is_home = 'Stopped - Zero Position' in self.status().get('system', '')
return self._is_home
@property
def is_tracking(self):
""" bool: Mount tracking status. """
self._is_tracking = 'Tracking' in self.status().get('system', '')
return self._is_tracking
@property
def is_slewing(self):
""" bool: Mount slewing status. """
self._is_slewing = 'Slewing' in self.status().get('system', '')
return self._is_slewing
##################################################################################################
# Public Methods
##################################################################################################
def initialize(self):
""" Initialize the connection with the mount and setup for location.
iOptron mounts are initialized by sending the following two commands
to the mount:
* Version
* MountInfo
If the mount is successfully initialized, the `_setup_location_for_mount` method
is also called.
Returns:
bool: Returns the value from `self.is_initialized`.
"""<|fim▁hole|> if not self.is_connected:
self.connect()
if self.is_connected and not self.is_initialized:
# We trick the mount into thinking it's initialized while we
# initialize otherwise the `serial_query` method will test
# to see if initialized and be put into loop.
self.is_initialized = True
actual_version = self.serial_query('version')
actual_mount_info = self.serial_query('mount_info')
expected_version = self.commands.get('version').get('response')
expected_mount_info = self.commands.get( 'mount_info').get('response')
self.is_initialized = False
# Test our init procedure for iOptron
if actual_version != expected_version or actual_mount_info != expected_mount_info:
self.logger.debug('{} != {}'.format(actual_version, expected_version))
self.logger.debug('{} != {}'.format(actual_mount_info, expected_mount_info))
raise error.MountNotFound('Problem initializing mount')
else:
self.is_initialized = True
self._setup_location_for_mount()
self.logger.info('Mount initialized: {}'.format(self.is_initialized))
return self.is_initialized
##################################################################################################
# Private Methods
##################################################################################################
def _setup_location_for_mount(self):
"""
Sets the mount up to the current location. Mount must be initialized first.
This uses mount.location (an astropy.coords.EarthLocation) to set most of the params and the rest is
read from a config file. Users should not call this directly.
Includes:
* Latitude set_long
* Longitude set_lat
* Daylight Savings disable_daylight_savings
* Universal Time Offset set_gmt_offset
* Current Date set_local_date
* Current Time set_local_time
"""
assert self.is_initialized, self.logger.warning('Mount has not been initialized')
assert self.location is not None, self.logger.warning( 'Please set a location before attempting setup')
self.logger.info('Setting up mount for location')
# Location
# Adjust the lat/long for format expected by iOptron
lat = '{:+07.0f}'.format(self.location.latitude.to(u.arcsecond).value)
lon = '{:+07.0f}'.format(self.location.longitude.to(u.arcsecond).value)
self.serial_query('set_long', lon)
self.serial_query('set_lat', lat)
# Time
self.serial_query('disable_daylight_savings')
gmt_offset = self.config.get('location').get('gmt_offset', 0)
self.serial_query('set_gmt_offset', gmt_offset)
now = Time.now() + gmt_offset * u.minute
self.serial_query('set_local_time', now.datetime.strftime("%H%M%S"))
self.serial_query('set_local_date', now.datetime.strftime("%y%m%d"))
def _mount_coord_to_skycoord(self, mount_coords):
"""
Converts between iOptron RA/Dec format and a SkyCoord
Args:
mount_coords (str): Coordinates as returned by mount
Returns:
astropy.SkyCoord: Mount coordinates as astropy SkyCoord with
EarthLocation included.
"""
coords_match = self._coords_format.fullmatch(mount_coords)
coords = None
self.logger.info("Mount coordinates: {}".format(coords_match))
if coords_match is not None:
ra = (coords_match.group('ra_millisecond') * u.millisecond).to(u.hour)
dec = (coords_match.group('dec_arcsec') * u.centiarcsecond).to(u.arcsec)
dec_sign = coords_match.group('dec_sign')
if dec_sign == '-':
dec = dec * -1
coords = SkyCoord(ra=ra, dec=dec, frame='icrs', unit=(u.hour, u.arcsecond))
else:
self.logger.warning(
"Cannot create SkyCoord from mount coordinates")
return coords
def _skycoord_to_mount_coord(self, coords):
"""
Converts between SkyCoord and a iOptron RA/Dec format.
`
TTTTTTTT(T) 0.01 arc-seconds
XXXXX(XXX) milliseconds
Command: “:SrXXXXXXXX#”
Defines the commanded right ascension, RA. Slew, calibrate and park commands operate on the
most recently defined right ascension.
Command: “:SdsTTTTTTTT#”
Defines the commanded declination, Dec. Slew, calibrate and park commands operate on the most
recently defined declination.
`
@param coords astropy.coordinates.SkyCoord
@retval A tuple of RA/Dec coordinates
"""
# RA in milliseconds
ra_ms = (coords.ra.hour * u.hour).to(u.millisecond)
mount_ra = "{:08.0f}".format(ra_ms.value)
self.logger.debug("RA (ms): {}".format(ra_ms))
dec_dms = (coords.dec.degree * u.degree).to(u.centiarcsecond)
self.logger.debug("Dec (centiarcsec): {}".format(dec_dms))
mount_dec = "{:=+08.0f}".format(dec_dms.value)
mount_coords = (mount_ra, mount_dec)
return mount_coords
def _set_zero_position(self):
""" Sets the current position as the zero position.
The iOptron allows you to set the current position directly, so
we simply call the iOptron command.
"""
self.logger.info("Setting zero position")
return self.serial_query('set_zero_position')<|fim▁end|> | self.logger.info('Initializing {} mount'.format(__name__)) |
<|file_name|>ConnectionWrapper.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* This file is part of OpenNMS(R).
*<|fim▁hole|> *
* OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
*
* OpenNMS(R) is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* OpenNMS(R) is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with OpenNMS(R). If not, see:
* http://www.gnu.org/licenses/
*
* For more information contact:
* OpenNMS(R) Licensing <[email protected]>
* http://www.opennms.org/
* http://www.opennms.com/
*******************************************************************************/
package org.opennms.protocols.jmx.connectors;
import javax.management.MBeanServerConnection;
/*
* This interface defines the ability to handle a live connection and the ability to
* close it.
*
* @author <A HREF="mailto:[email protected]">Mike Jamison </A>
* @author <A HREF="http://www.opennms.org/">OpenNMS </A>
*/
/**
* <p>ConnectionWrapper interface.</p>
*
* @author ranger
* @version $Id: $
*/
public interface ConnectionWrapper {
/**
* <p>getMBeanServer</p>
*
* @return a {@link javax.management.MBeanServerConnection} object.
*/
public MBeanServerConnection getMBeanServer();
/**
* <p>close</p>
*/
public void close();
}<|fim▁end|> | * Copyright (C) 2006-2011 The OpenNMS Group, Inc.
* OpenNMS(R) is Copyright (C) 1999-2011 The OpenNMS Group, Inc. |
<|file_name|>abdt_branch__t.py<|end_file_name|><|fim▁begin|>"""Test suite for abdt_branch."""
# =============================================================================
# TEST PLAN
# -----------------------------------------------------------------------------
# Here we detail the things we are concerned to test and specify which tests
# cover those concerns.
#
# Concerns:
# [XB] can test is_abandoned, is_null, is_new
# [XC] can move between all states without error
# [XD] can set and retrieve repo name, branch link
# [ C] can move bad_pre_review -> 'new' states without duplicating branches
# [ D] unique names and emails are returned in the order of most recent first
# [ E] all commits are shown when no arguments are supplied
# [ E] number of commits can be limited by max_commits argument
# [ E] number of commits can be limited by max_size argument
# [ ] can detect if review branch has new commits (after ff, merge, rebase)
# [ ] can get raw diff from branch
# [ ] can get author names and emails from branch
# [ ] raise if get author names and emails from branch with no history
# [ ] raise if get author names and emails from branch with invalid base
# [ ] can 'get_any_author_emails', raise if no emails ever
# [ ] bad unicode chars in diffs
# [ ] bad unicode chars in commit messages
# [ ] can land an uncomplicated review
# [ ] XXX: withReservedBranch
# [ ] XXX: emptyMergeWorkflow
# [ ] XXX: mergeConflictWorkflow
# [ ] XXX: changeAlreadyMergedOnBase
# [ ] XXX: commandeeredLand
# [ ] XXX: createHugeReview
# [ ] XXX: hugeUpdateToReview
# [ ] XXX: empty repository, no history
# [ ] XXX: landing when origin has been updated underneath us
# [ ] XXX: moving tracker branches when there's something in the way
# -----------------------------------------------------------------------------
# Tests:
# [ A] test_A_Breathing
# [ B] test_B_Empty
# [ C] test_C_BadPreReviewToNew
# [ D] test_D_AlternatingAuthors
# [ E] test_E_NewCommitsDescription
# [XB] test_XB_UntrackedBranch
# [XC] test_XC_MoveBetweenAllMarkedStates
# [XD] check_XD_SetRetrieveRepoNameBranchLink
# =============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import unittest
import phlgit_branch
import phlgit_push
import phlgit_revparse
import phlgitu_fixture
import phlgitx_refcache
import abdt_branch
import abdt_branchtester
import abdt_classicnaming
import abdt_differresultcache
import abdt_git
import abdt_naming
class Test(unittest.TestCase):
def __init__(self, data):
super(Test, self).__init__(data)
self.repos = None
self.repo_central = None
self.repo_dev = None
self.repo_arcyd = None
def setUp(self):
self.repos = phlgitu_fixture.CentralisedWithTwoWorkers()
self.repo_central = self.repos.central_repo
self.repo_dev = self.repos.w0.repo
sys_repo = self.repos.w1.repo
refcache_repo = phlgitx_refcache.Repo(sys_repo)
differ_cache = abdt_differresultcache.Cache(refcache_repo)
self.repo_arcyd = abdt_git.Repo(
refcache_repo, differ_cache, 'origin', 'myrepo')
def tearDown(self):
self.repos.close()
def test_A_Breathing(self):
pass
def test_B_Empty(self):
pass
def test_C_BadPreReviewToNew(self):
# can move bad_pre_review -> 'new' states without duplicating branches
base, branch_name, branch = self._setup_for_untracked_branch()
transition_list = [
branch.mark_ok_new_review, branch.mark_new_bad_in_review
]<|fim▁hole|> branch.mark_bad_pre_review()
branches_bad_pre = phlgit_branch.get_remote(
self.repo_arcyd, 'origin')
do_transition(102)
branches_new = phlgit_branch.get_remote(self.repo_arcyd, 'origin')
# we expect to have gained one branch when starting to track as
# 'bad_pre_review'.
self.assertEqual(len(branches_bad_pre), len(branches) + 1)
# we expect to have the same number of branches after moving with
# 'mark_ok_new_review'
self.assertEqual(len(branches_bad_pre), len(branches_new))
# remove the tracking branch and make sure the count has gone down
branch.clear_mark()
branches_cleared = phlgit_branch.get_remote(
self.repo_arcyd, 'origin')
self.assertEqual(len(branches_cleared), len(branches))
def test_D_AlternatingAuthors(self):
base, branch_name, branch = self._setup_for_untracked_branch()
alice_user = 'Alice'
alice_email = '[email protected]'
bob_user = 'Bob'
bob_email = '[email protected]'
self._dev_commit_new_empty_file('ALICE1', alice_user, alice_email)
self._dev_commit_new_empty_file('BOB1', bob_user, bob_email)
self._dev_commit_new_empty_file('ALICE2', alice_user, alice_email)
phlgit_push.push(self.repo_dev, branch_name, 'origin')
self.repo_arcyd('fetch', 'origin')
author_names_emails = branch.get_author_names_emails()
self.assertTupleEqual(
author_names_emails[0],
(bob_user, bob_email))
self.assertTupleEqual(
author_names_emails[1],
(alice_user, alice_email))
# any_author_emails = branch.get_any_author_emails()
# self.assertEqual(any_author_emails[-1], alice_email)
# self.assertEqual(any_author_emails[-2], bob_email)
def test_E_NewCommitsDescription(self):
base, branch_name, branch = self._setup_for_untracked_branch()
user = 'Alice'
email = '[email protected]'
self._dev_commit_new_empty_file('Commit 1', user, email)
self._dev_commit_new_empty_file('Commit 2', user, email)
self._dev_commit_new_empty_file('Commit 3', user, email)
self._dev_commit_new_empty_file('Commit 4', user, email)
phlgit_push.push(self.repo_dev, branch_name, 'origin')
self.repo_arcyd('fetch', 'origin')
# [ E] all commits are shown when no arguments are supplied
new_commits_str = branch.describe_new_commits()
new_commits = new_commits_str.splitlines()
self.assertEqual(4, len(new_commits))
count = 4
for line in new_commits:
self.assertTrue(line.endswith('Commit {}'.format(count)))
count -= 1
# [ E] number of commits can be limited by max_commits argument
new_commits_str = branch.describe_new_commits(2)
new_commits = new_commits_str.splitlines()
self.assertEqual(3, len(new_commits))
self.assertTrue(new_commits[0].endswith('Commit 4'))
self.assertTrue(new_commits[1].endswith('Commit 3'))
self.assertEqual(new_commits[2], '...2 commits not shown.')
# [ E] number of commits can be limited by max_size argument
new_commits_str = branch.describe_new_commits(3, 20)
new_commits = new_commits_str.splitlines()
self.assertEqual(2, len(new_commits))
self.assertTrue(new_commits[0].endswith('Commit 4'))
self.assertEqual(new_commits[1], '...3 commits not shown.')
def _dev_commit_new_empty_file(self, filename, user, email):
self._create_new_file(self.repo_dev, filename)
self.repo_dev('add', filename)
self.repo_dev(
'commit',
'-m',
filename,
'--author=' + '{} <{}>'.format(user, email))
def test_XB_UntrackedBranch(self):
abdt_branchtester.check_XB_UntrackedBranch(self)
def test_XC_MoveBetweenAllMarkedStates(self):
abdt_branchtester.check_XC_MoveBetweenAllMarkedStates(self)
def check_D_SetRetrieveRepoNameBranchLink(self):
abdt_branchtester.check_XD_SetRetrieveRepoNameBranchLink(self)
def _create_new_file(self, repo, filename):
self.assertFalse(os.path.isfile(filename))
open(os.path.join(repo.working_dir, filename), 'a').close()
def _setup_for_tracked_branch(self):
base, branch_name, branch = self._setup_for_untracked_branch()
branch.mark_ok_new_review(101)
return base, branch_name, branch
def _setup_for_untracked_branch(self, repo_name='name', branch_url=None):
base = abdt_naming.EXAMPLE_REVIEW_BRANCH_BASE
naming = abdt_classicnaming.Naming()
branch_name = abdt_classicnaming.EXAMPLE_REVIEW_BRANCH_NAME
self.repo_dev('checkout', '-b', branch_name)
phlgit_push.push(self.repo_dev, branch_name, 'origin')
self.repo_arcyd('fetch', 'origin')
review_branch = naming.make_review_branch_from_name(branch_name)
review_hash = phlgit_revparse.get_sha1_or_none(
self.repo_arcyd, review_branch.branch)
branch = abdt_branch.Branch(
self.repo_arcyd,
review_branch,
review_hash,
None,
None,
None,
repo_name,
branch_url)
# should not raise
branch.verify_review_branch_base()
return base, branch_name, branch
# -----------------------------------------------------------------------------
# Copyright (C) 2013-2015 Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------ END-OF-FILE ----------------------------------<|fim▁end|> |
for do_transition in transition_list:
branches = phlgit_branch.get_remote(self.repo_arcyd, 'origin') |
<|file_name|>authconfig_data.go<|end_file_name|><|fim▁begin|>package data
import (
"github.com/rancher/rancher/pkg/auth/providers/activedirectory"
"github.com/rancher/rancher/pkg/auth/providers/azure"
"github.com/rancher/rancher/pkg/auth/providers/github"
"github.com/rancher/rancher/pkg/auth/providers/googleoauth"
"github.com/rancher/rancher/pkg/auth/providers/ldap"
localprovider "github.com/rancher/rancher/pkg/auth/providers/local"
"github.com/rancher/rancher/pkg/auth/providers/saml"
client "github.com/rancher/rancher/pkg/client/generated/management/v3"
v3 "github.com/rancher/rancher/pkg/generated/norman/management.cattle.io/v3"
"github.com/rancher/rancher/pkg/types/config"
apierrors "k8s.io/apimachinery/pkg/api/errors"
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
func AuthConfigs(management *config.ManagementContext) error {<|fim▁hole|> if err := addAuthConfig(activedirectory.Name, client.ActiveDirectoryConfigType, false, management); err != nil {
return err
}
if err := addAuthConfig(azure.Name, client.AzureADConfigType, false, management); err != nil {
return err
}
if err := addAuthConfig(ldap.OpenLdapName, client.OpenLdapConfigType, false, management); err != nil {
return err
}
if err := addAuthConfig(ldap.FreeIpaName, client.FreeIpaConfigType, false, management); err != nil {
return err
}
if err := addAuthConfig(saml.PingName, client.PingConfigType, false, management); err != nil {
return err
}
if err := addAuthConfig(saml.ADFSName, client.ADFSConfigType, false, management); err != nil {
return err
}
if err := addAuthConfig(saml.KeyCloakName, client.KeyCloakConfigType, false, management); err != nil {
return err
}
if err := addAuthConfig(saml.OKTAName, client.OKTAConfigType, false, management); err != nil {
return err
}
if err := addAuthConfig(saml.ShibbolethName, client.ShibbolethConfigType, false, management); err != nil {
return err
}
if err := addAuthConfig(googleoauth.Name, client.GoogleOauthConfigType, false, management); err != nil {
return err
}
return addAuthConfig(localprovider.Name, client.LocalConfigType, true, management)
}
func addAuthConfig(name, aType string, enabled bool, management *config.ManagementContext) error {
_, err := management.Management.AuthConfigs("").ObjectClient().Create(&v3.AuthConfig{
ObjectMeta: v1.ObjectMeta{
Name: name,
},
Type: aType,
Enabled: enabled,
})
if err != nil && !apierrors.IsAlreadyExists(err) {
return err
}
return nil
}<|fim▁end|> | if err := addAuthConfig(github.Name, client.GithubConfigType, false, management); err != nil {
return err
}
|
<|file_name|>node.go<|end_file_name|><|fim▁begin|>package interpreter
import (
"fmt"
"github.com/Azer0s/Hummus/parser"
)
// NodeType a variable type
type NodeType uint8
// Node a variable node
type Node struct {
Value interface{}
NodeType NodeType
}
const (
// NODETYPE_INT int variable type
NODETYPE_INT NodeType = 0
// NODETYPE_FLOAT float variable type
NODETYPE_FLOAT NodeType = 1
// NODETYPE_STRING string variable type
NODETYPE_STRING NodeType = 2
// NODETYPE_BOOL bool variable type
NODETYPE_BOOL NodeType = 3
// NODETYPE_ATOM atom variable type
NODETYPE_ATOM NodeType = 4
// NODETYPE_FN function literal
NODETYPE_FN NodeType = 5
// NODETYPE_LIST list type
NODETYPE_LIST NodeType = 6
// NODETYPE_MAP map type
NODETYPE_MAP NodeType = 7
// NODETYPE_STRUCT struct type<|fim▁hole|>type FnLiteral struct {
Parameters []string
Body []parser.Node
Context map[string]Node
}
// ListNode a list value
type ListNode struct {
Values []Node
}
// MapNode a map node
type MapNode struct {
Values map[string]Node
}
// StructDef struct definition
type StructDef struct {
Parameters []string
}
// Smaller < operator for Node
func (node *Node) Smaller(compareTo Node) bool {
if node.NodeType != compareTo.NodeType {
panic("Can't compare nodes of two different types!")
}
switch node.NodeType {
case NODETYPE_INT:
return node.Value.(int) < compareTo.Value.(int)
case NODETYPE_FLOAT:
return node.Value.(float64) < compareTo.Value.(float64)
case NODETYPE_STRING:
return node.Value.(string) < compareTo.Value.(string)
case NODETYPE_ATOM:
return node.Value.(string) < compareTo.Value.(string)
default:
panic(fmt.Sprintf("Nodetype %d cannot be compared!", node.NodeType))
}
}
// Bigger > operator for Node
func (node *Node) Bigger(compareTo Node) bool {
if node.NodeType != compareTo.NodeType {
panic("Can't compare nodes of two different types!")
}
switch node.NodeType {
case NODETYPE_INT:
return node.Value.(int) > compareTo.Value.(int)
case NODETYPE_FLOAT:
return node.Value.(float64) > compareTo.Value.(float64)
case NODETYPE_STRING:
return node.Value.(string) > compareTo.Value.(string)
case NODETYPE_ATOM:
return node.Value.(string) > compareTo.Value.(string)
default:
panic(fmt.Sprintf("Nodetype %d cannot be compared!", node.NodeType))
}
}
// OptionalNode return an optional node
func OptionalNode(val interface{}, nodeType NodeType, err bool) Node {
return Node{
Value: MapNode{Values: map[string]Node{
"value": {
Value: val,
NodeType: nodeType,
},
"error": {
Value: err,
NodeType: NODETYPE_BOOL,
},
}},
NodeType: NODETYPE_MAP,
}
}<|fim▁end|> | NODETYPE_STRUCT NodeType = 8
)
// FnLiteral a function literal (block) |
<|file_name|>test_key.py<|end_file_name|><|fim▁begin|>from nose.tools import istest, eq_<|fim▁hole|> @istest
def same_key_is_equal(self):
eq_(Key("key"), Key("key"))
@istest
def key_is_usable_as_key(self):
key_dict = {
Key("key"): "Value"
}
eq_(key_dict[Key("key")], "Value")<|fim▁end|> | from tetris.values.key import Key
class TestKey(object):
|
<|file_name|>plugin_padtools_ru.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.0" language="ru_RU">
<context>
<name>PadTools::Internal::PadWriter</name>
<message>
<location filename="../../plugins/padtoolsplugin/padwriter.ui" line="14"/>
<source>Form</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../plugins/padtoolsplugin/padwriter.ui" line="86"/>
<source>Raw source</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../plugins/padtoolsplugin/padwriter.ui" line="97"/>
<source>Errors</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>PadTools::Internal::TokenEditor</name>
<message>
<location filename="../../plugins/padtoolsplugin/tokeneditor.ui" line="14"/>
<source>Dialog</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../plugins/padtoolsplugin/tokeneditor.ui" line="32"/>
<source>Token editor</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>PadTools::Internal::TokenEditorWidget</name>
<message>
<location filename="../../plugins/padtoolsplugin/tokeneditorwidget.ui" line="14"/>
<source>Form</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../plugins/padtoolsplugin/tokeneditorwidget.ui" line="20"/>
<source>Token</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../plugins/padtoolsplugin/tokeneditorwidget.ui" line="38"/>
<source>Current value</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../plugins/padtoolsplugin/tokeneditorwidget.ui" line="76"/>
<source>Text value</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../plugins/padtoolsplugin/tokeneditorwidget.ui" line="108"/>
<source>Block formatting of the token value</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../plugins/padtoolsplugin/tokeneditorwidget.ui" line="140"/>
<source>Conditional texts</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../plugins/padtoolsplugin/tokeneditorwidget.ui" line="153"/>
<source>Before</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../plugins/padtoolsplugin/tokeneditorwidget.ui" line="167"/>
<source>After</source>
<translation type="unfinished"></translation>
</message><|fim▁hole|> <name>PadWriter</name>
<message>
<location filename="../../plugins/padtoolsplugin/constants.h" line="63"/>
<source>PadTools</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../plugins/padtoolsplugin/constants.h" line="64"/>
<source>Follow cursor in result output</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../plugins/padtoolsplugin/constants.h" line="65"/>
<source>Find cursor</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../plugins/padtoolsplugin/constants.h" line="66"/>
<source>Automatic update of results</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../plugins/padtoolsplugin/constants.h" line="67"/>
<source>View output</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../plugins/padtoolsplugin/constants.h" line="68"/>
<source>Show source</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../plugins/padtoolsplugin/constants.h" line="71"/>
<source>Inside conditional before text</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../plugins/padtoolsplugin/constants.h" line="72"/>
<source>Inside conditional after text</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../plugins/padtoolsplugin/constants.h" line="73"/>
<source>Before the token</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../plugins/padtoolsplugin/constants.h" line="74"/>
<source>After the token</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../plugins/padtoolsplugin/constants.h" line="75"/>
<source>Nested token</source>
<translation type="unfinished"></translation>
</message>
</context>
</TS><|fim▁end|> | </context>
<context> |
<|file_name|>workspace.cpp<|end_file_name|><|fim▁begin|><|fim▁hole|>#include <new>
static const int32_t EXT_CONST = 0;
struct ExtType {
uint32_t data;
};
extern "C" {
void consume_ext(ExtType _ext);
} // extern "C"<|fim▁end|> | #include <cstdarg>
#include <cstdint>
#include <cstdlib>
#include <ostream> |
<|file_name|>iter-step-overflow-debug.rs<|end_file_name|><|fim▁begin|>// run-pass
// ignore-wasm32-bare compiled with panic=abort by default
// compile-flags: -C debug_assertions=yes
use std::panic;
fn main() {
let r = panic::catch_unwind(|| {
let mut it = u8::MAX..;
it.next().unwrap(); // 255<|fim▁hole|> let r = panic::catch_unwind(|| {
let mut it = i8::MAX..;
it.next().unwrap(); // 127
it.next().unwrap();
});
assert!(r.is_err());
}<|fim▁end|> | it.next().unwrap();
});
assert!(r.is_err());
|
<|file_name|>ur.py<|end_file_name|><|fim▁begin|>from ..rerequest import TemplateRequest
init_req = TemplateRequest(
re = r'(http://)?(www\.)?(?P<domain>ur(play)?)\.se/(?P<req_url>.+)',
encode_vars = lambda v: { 'req_url': 'http://%(domain)s.se/%(req_url)s' % v } )
hls = { 'title': 'UR-play', 'url': 'http://urplay.se/', 'feed_url': 'http://urplay.se/rss',
'items': [init_req,
TemplateRequest(
re = r'file_html5":\s?"(?P<final_url>[^"]+)".*?"subtitles":\s?"(?P<subtitles>[^",]*)',
encode_vars = lambda v: { 'final_url': ('http://130.242.59.75/%(final_url)s/playlist.m3u8' % v).replace('\\', ''),
'suffix-hint': 'mp4',
'subtitles': v.get('subtitles', '').replace('\\', '') % v } )] }
rtmp = { 'items': [init_req,
TemplateRequest(
re = r'file_flash":\s?"(?P<final_url>[^"]+\.(?P<ext>mp[34]))".*?"subtitles":\s?"(?P<subtitles>[^",]*)',
encode_vars = lambda v: { 'final_url': ('rtmp://130.242.59.75/ondemand playpath=%(ext)s:/%(final_url)s app=ondemand' % v).replace('\\', ''),
'suffix-hint': 'flv',
'rtmpdump-realtime': True,
'subtitles': v.get('subtitles', '').replace('\\', '') % v } )] }<|fim▁hole|>services = [hls, rtmp]<|fim▁end|> | |
<|file_name|>client.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
import sys
import json
import click
import serial
import pkg_resources
import serial.tools.list_ports
import logging.config
from educube.web import server as webserver
import logging
logger = logging.getLogger(__name__)
plugin_folder = os.path.join(os.path.dirname(__file__), 'commands')
def configure_logging(verbose):
loglevels = {
0: logging.ERROR,
1: logging.WARNING,
2: logging.INFO,
3: logging.DEBUG,
}
logging.basicConfig(level=loglevels[verbose])
def verify_serial_connection(port, baud):
try:
ser = serial.Serial(port, baud, timeout=1)
a = ser.read()
if a:
logger.debug('Serial open: %s' % port)
else:
logger.debug('Serial exists but is not readable (permissions?): %s' % port)
ser.close()
except serial.serialutil.SerialException as e:
raise click.BadParameter("Serial not readable: %s" % e)
##############################
# COMMANDS
##############################
def get_serial():
ports = serial.tools.list_ports.comports()
suggested_educube_port = ports[-1]
return suggested_educube_port.device
def get_baud():
ports = serial.tools.list_ports.comports()<|fim▁hole|> else:
return 115200
@click.group()
@click.option('-v', '--verbose', count=True)
@click.pass_context
def cli(ctx, verbose):
"""Educube Client"""
configure_logging(verbose)
@cli.command()
def version():
"""Prints the EduCube client version"""
print(pkg_resources.require("educube")[0].version)
@cli.command()
@click.option('-s', '--serial', default=get_serial, prompt=True)
@click.option('-b', '--baud', default=get_baud, prompt=True)
@click.option('-e', '--board', default='CDH')
@click.option('--fake', is_flag=True, default=False, help="Fake the serial")
@click.option('--json', is_flag=True, default=False, help="Outputs mostly JSON instead")
@click.pass_context
def start(ctx, serial, baud, board, fake, json):
"""Starts the EduCube web interface"""
logger.debug("""Running with settings:
Serial: %s
Baudrate: %s
EduCube board: %s
""" % (serial, baud, board))
ctx.obj['connection'] = {
"type": "serial",
"port": serial,
"baud": baud,
"board": board,
"fake": fake,
}
if not fake:
verify_serial_connection(serial, baud)
webserver.start_webserver(
connection=ctx.obj.get('connection')
)
def main():
cli(obj={})
if __name__ == '__main__':
main()<|fim▁end|> | suggested_educube_port = ports[-1]
if suggested_educube_port.description == 'BASE':
return 9600 |
<|file_name|>centrifuge.py<|end_file_name|><|fim▁begin|>from protocols.forms import forms
from core.utils import SPEED_UNITS
class CentrifugeForm(forms.VerbForm):
name = "Centrifuge"
slug = "centrifuge"
has_machine = True
# duration = forms.IntegerField(help_text='this is the minimal time this should take', initial = 'sec')
# min_speed = forms.IntegerField()
# max_speed = forms.IntegerField(required = False)
# speed_units = forms.ChoiceField(required=False, choices = SPEED_UNITS, initial = 'rpm' )<|fim▁hole|> # comment_why = forms.CharField(required = False)<|fim▁end|> | # speed_comment = forms.CharField(required=False) |
<|file_name|>ExportResultsAction.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* <copyright> Copyright (c) 2014 - 2021 Bauhaus Luftfahrt e.V.. All rights reserved. This program and the accompanying
* materials are made available under the terms of the GNU General Public License v3.0 which accompanies this distribution,
* and is available at https://www.gnu.org/licenses/gpl-3.0.html.en </copyright>
*******************************************************************************/
package com.paxelerate.execution.actions;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import com.paxelerate.core.simulation.astar.SimulationHandler;
import com.paxelerate.model.Deck;
import com.paxelerate.model.Model;
import com.paxelerate.model.ModelFactory;
import com.paxelerate.model.SimulationResult;
import com.paxelerate.model.agent.Passenger;
import net.bhl.opensource.toolbox.time.TimeHelper;
/**
* @author Michael.Schmidt, Marc.Engelmann
* @since 22.08.2019
*
*/
public class ExportResultsAction {
/**
*
* @param handler
* @param cabin
* @param boardingStatus
* @param time
* @param simulationTime
*/
static void setSimulationData(SimulationHandler handler, Model model, List<ArrayList<Integer>> boardingStatus,
double time, double simulationTime) {
Deck deck = model.getDeck();
SimulationResult result = ModelFactory.eINSTANCE.createSimulationResult();
model.getSimulationResults().add(result);
result.setPassengers(deck.getPassengers().size());
result.setBoardingTime(
handler.getMasterBoardingTime() * model.getSettings().getSimulationSpeedFactor() / 1000.0);
result.setSimulationTime(simulationTime);
result.setId(model.getSimulationResults().size() + 1);
result.setName(new SimpleDateFormat("dd.MM, HH:mm").format(new Date()));
result.setDate(new Date());
result.setBoardingTimeString(TimeHelper.toTimeOfDay(time));
result.setWaymakingCompleted(handler.getPassengersByState(null, true).stream()
.mapToInt(Passenger::getNumberOfMakeWayOperations).sum());
result.setLayoutConceptType(model.getSettings().getSeatType());
// result.setLuggageStorageFillingDegree(deck.getLuggageStorages().stream()
// .mapToDouble(s -> 100 - s.getFreeVolume() * 100 / s.getNetVolume()).average().orElse(0));
// TODO: WRONG!
// r.setTotalLargeBagsStowed(deck.getLuggageStorages().stream().mapToInt(l -> l.getMaximumLargeBags()).sum());
// result.setTotalStorageVolume(
// deck.getLuggageStorages().stream().mapToDouble(LuggageStorage::getNetVolume).sum());<|fim▁hole|> (int) boardingStatus.stream().mapToDouble(l -> l.get(2)).average().orElse(0));
result.setMaxNumberOfActivePassengers(boardingStatus.stream().mapToInt(l -> l.get(2)).max().orElse(0));
result.setAverageNumberOfBags(
deck.getPassengers().stream().mapToDouble(p -> p.getLuggage().size()).average().orElse(0));
}
}<|fim▁end|> |
result.setAverageNumberOfActivePassengers( |
<|file_name|>em_ahc_test.py<|end_file_name|><|fim▁begin|>import unittest
import pylab as pl
import matplotlib as mpl
import itertools
import sys
import math
import timeit
import copy
from em import *
def generate_synthetic_data(N):
np.random.seed(0)
C = np.array([[0., -0.7], [3.5, .7]])
C1 = np.array([[-0.4, 1.7], [0.3, .7]])
Y = np.r_[
np.dot(np.random.randn(N/3, 2), C1),
np.dot(np.random.randn(N/3, 2), C),
np.random.randn(N/3, 2) + np.array([3, 3]),
]
return Y.astype(np.float32)
class EMTester(object):
def __init__(self, from_file, variant_param_spaces, device_id, num_subps, names_of_backends):
self.results = {}
self.variant_param_spaces = variant_param_spaces
self.device_id = device_id
self.num_subplots = num_subps
self.names_of_backends = names_of_backends
self.plot_id = num_subps/2*100 + 21
if from_file:
self.X = np.ndfromtxt('IS1000a.csv', delimiter=',', dtype=np.float32)
self.N = self.X.shape[0]
self.D = self.X.shape[1]
else:
N = 1000
self.X = generate_synthetic_data(N)
self.N = self.X.shape[0]
self.D = self.X.shape[1]
def new_gmm(self, M):
self.M = M
self.gmm = GMM(self.M, self.D, names_of_backends_to_use=self.names_of_backends, variant_param_spaces=self.variant_param_spaces, device_id=self.device_id)
def new_gmm_list(self, M, k):
self.M = M
self.init_num_clusters = k
self.gmm_list = [GMM(self.M, self.D, names_of_backends_to_use=self.names_of_backends, variant_param_spaces=self.variant_param_spaces, device_id=self.device_id) for i in range(k)]
def test_speech_ahc(self):
# Get the events, divide them into an initial k clusters and train each GMM on a cluster
per_cluster = self.N/self.init_num_clusters
init_training = zip(self.gmm_list,np.vsplit(self.X, range(per_cluster, self.N, per_cluster)))
for g, x in init_training:
g.train(x)
# Perform hierarchical agglomeration based on BIC scores
best_BIC_score = 1.0
while (best_BIC_score > 0 and len(self.gmm_list) > 1):
print "Num GMMs: %d, last score: %d" % (len(self.gmm_list), best_BIC_score)
num_clusters = len(self.gmm_list)
# Resegment data based on likelihood scoring
likelihoods = self.gmm_list[0].score(self.X)
for g in self.gmm_list[1:]:
likelihoods = np.column_stack((likelihoods, g.score(self.X)))
most_likely = likelihoods.argmax(axis=1)
# Across 2.5 secs of observations, vote on which cluster they should be associated with
iter_training = {}
for i in range(250, self.N, 250):
votes = np.zeros(num_clusters)
for j in range(i-250, i):
votes[most_likely[j]] += 1
#print votes.argmax()
iter_training.setdefault(self.gmm_list[votes.argmax()],[]).append(self.X[i-250:i,:])
votes = np.zeros(num_clusters)
for j in range((self.N/250)*250, self.N):
votes[most_likely[j]] += 1
#print votes.argmax()
iter_training.setdefault(self.gmm_list[votes.argmax()],[]).append(self.X[(self.N/250)*250:self.N,:])
# Retrain the GMMs on the clusters for which they were voted most likely and
# make a list of candidates for merging
iter_bic_list = []
for g, data_list in iter_training.iteritems():
cluster_data = data_list[0]
for d in data_list[1:]:
cluster_data = np.concatenate((cluster_data, d))
cluster_data = np.ascontiguousarray(cluster_data)
g.train(cluster_data)
iter_bic_list.append((g,cluster_data))
# Keep any GMMs that lost all votes in candidate list for merging
for g in self.gmm_list:
if g not in iter_training.keys():
iter_bic_list.append((g,None))
# Score all pairs of GMMs using BIC
best_merged_gmm = None
best_BIC_score = 0.0
merged_tuple = None
for gmm1idx in range(len(iter_bic_list)):
for gmm2idx in range(gmm1idx+1, len(iter_bic_list)):
g1, d1 = iter_bic_list[gmm1idx]
g2, d2 = iter_bic_list[gmm2idx]
score = 0.0
if d1 is not None or d2 is not None:
if d1 is not None and d2 is not None:
new_gmm, score = compute_distance_BIC(g1, g2, np.concatenate((d1, d2)))
elif d1 is not None:
new_gmm, score = compute_distance_BIC(g1, g2, d1)
else:
new_gmm, score = compute_distance_BIC(g1, g2, d2)
print "Comparing BIC %d with %d: %f" % (gmm1idx, gmm2idx, score)
if score > best_BIC_score:
best_merged_gmm = new_gmm
merged_tuple = (g1, g2)
best_BIC_score = score
# Merge the winning candidate pair if its deriable to do so
if best_BIC_score > 0.0:
self.gmm_list.remove(merged_tuple[0])
self.gmm_list.remove(merged_tuple[1])
self.gmm_list.append(best_merged_gmm)
print "Final size of each cluster:", [ g.M for g in self.gmm_list]
def test_cytosis_ahc(self):
M_start = self.M
M_end = 0
plot_counter = 2
for M in reversed(range(M_end, M_start)):
print "======================== AHC loop: M = ", M+1, " ==========================="
self.gmm.train(self.X)
#plotting
means = self.gmm.components.means.reshape((self.gmm.M, self.gmm.D))
covars = self.gmm.components.covars.reshape((self.gmm.M, self.gmm.D, self.gmm.D))
Y = self.gmm.predict(self.X)
if(self.plot_id % 10 <= self.num_subplots):
self.results['_'.join(['ASP v',str(self.plot_id-(100*self.num_subplots+11)),'@',str(self.gmm.D),str(self.gmm.M),str(self.N)])] = (str(self.plot_id), copy.deepcopy(means), copy.deepcopy(covars), copy.deepcopy(Y))
self.plot_id += 1
#find closest components and merge
if M > 0: #don't merge if there is only one component
gmm_list = []
for c1 in range(0, self.gmm.M):
for c2 in range(c1+1, self.gmm.M):
new_component, dist = self.gmm.compute_distance_rissanen(c1, c2)
gmm_list.append((dist, (c1, c2, new_component)))
#print "gmm_list after append: ", gmm_list
#compute minimum distance
min_c1, min_c2, min_component = min(gmm_list, key=lambda gmm: gmm[0])[1]
self.gmm.merge_components(min_c1, min_c2, min_component)
def time_cytosis_ahc(self):
M_start = self.M
M_end = 0
for M in reversed(range(M_end, M_start)):
print "======================== AHC loop: M = ", M+1, " ==========================="
self.gmm.train(self.X)
#find closest components and merge
if M > 0: #don't merge if there is only one component
gmm_list = []
for c1 in range(0, self.gmm.M):
for c2 in range(c1+1, self.gmm.M):
new_component, dist = self.gmm.compute_distance_rissanen(c1, c2)
gmm_list.append((dist, (c1, c2, new_component)))
#compute minimum distance
min_c1, min_c2, min_component = min(gmm_list, key=lambda gmm: gmm[0])[1]
self.gmm.merge_components(min_c1, min_c2, min_component)
def plot(self):
for t, r in self.results.iteritems():
splot = pl.subplot(r[0], title=t)
color_iter = itertools.cycle (['r', 'g', 'b', 'c'])
Y_ = r[3]
for i, (mean, covar, color) in enumerate(zip(r[1], r[2], color_iter)):
v, w = np.linalg.eigh(covar)
u = w[0] / np.linalg.norm(w[0])
pl.scatter(self.X.T[0,Y_==i], self.X.T[1,Y_==i], .8, color=color)
angle = np.arctan(u[1]/u[0])
angle = 180 * angle / np.pi
ell = mpl.patches.Ellipse (mean, v[0], v[1], 180 + angle, color=color)
ell.set_clip_box(splot.bbox)
ell.set_alpha(0.5)
splot.add_artist(ell)
pl.show()
if __name__ == '__main__':
device_id = 0
num_subplots = 6
variant_param_spaces = {'base': {},
'cuda_boost': {'num_blocks_estep': ['16'],
'num_threads_estep': ['512'],
'num_threads_mstep': ['512'],
'num_event_blocks': ['128'],<|fim▁hole|> 'max_num_components': ['122'],
'max_num_dimensions_covar_v3': ['40'],
'max_num_components_covar_v3': ['82'],
'diag_only': ['0'],
'max_iters': ['10'],
'min_iters': ['10'],
'covar_version_name': ['V1', 'V2A', 'V2B', 'V3'] },
'cilk_boost': {}
}
emt = EMTester(True, variant_param_spaces, device_id, num_subplots, ['cuda'])
#emt.new_gmm(6)
#t = timeit.Timer(emt.time_cytosis_ahc)
#print t.timeit(number=1)
#emt.test_cytosis_ahc()
#emt.plot()
emt.new_gmm_list(5, 16)
emt.test_speech_ahc()<|fim▁end|> | 'max_num_dimensions': ['50'], |
<|file_name|>test_reboot_unprovisioned.py<|end_file_name|><|fim▁begin|># Copyright 2015 Rackspace, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from onmetal_scripts.lib import states
from onmetal_scripts import reboot_unprovisioned
from onmetal_scripts.tests import base
import mock
class TestRebootUnprovisioned(base.BaseTest):
def setUp(self):
self.script = reboot_unprovisioned.RebootUnprovisioned()
self.script.get_argument = mock.Mock()
self.script.get_argument.return_value = 0
@mock.patch('onmetal_scripts.reboot_unprovisioned.RebootUnprovisioned.'
'ironic_client')
def test_run(self, ironic_mock):<|fim▁hole|> inactive_node = self._get_test_node(
provision_state=states.AVAILABLE,
instance_uuid=None,
maintenance=False)
ironic_mock.list_nodes.return_value = [active_node, inactive_node]
self.script.run()
ironic_mock.set_target_power_state.assert_called_once_with(
inactive_node, states.REBOOT)
@mock.patch('onmetal_scripts.reboot_unprovisioned.RebootUnprovisioned.'
'ironic_client')
def test_run_fail(self, ironic_mock):
inactive_node = self._get_test_node(
provision_state=states.AVAILABLE,
instance_uuid=None,
maintenance=False)
ironic_mock.list_nodes.return_value = [inactive_node]
ironic_mock.set_target_power_state.side_effect = ValueError
self.script.run()
ironic_mock.set_target_power_state.assert_called_once_with(
inactive_node, states.REBOOT)<|fim▁end|> | active_node = self._get_test_node(
provision_state=states.ACTIVE,
instance_uuid='118ad976-084a-443f-9ec5-77d477f2bfcc') |
<|file_name|>heuristics.py<|end_file_name|><|fim▁begin|>import calendar
import time
from email.utils import formatdate, parsedate, parsedate_tz
from datetime import datetime, timedelta
TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT"
def expire_after(delta, date=None):
date = date or datetime.utcnow()
return date + delta
def datetime_to_header(dt):
return formatdate(calendar.timegm(dt.timetuple()))
class BaseHeuristic(object):
def warning(self, response):
"""
Return a valid 1xx warning header value describing the cache
adjustments.
The response is provided too allow warnings like 113
http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need
to explicitly say response is over 24 hours old.
"""
return '110 - "Response is Stale"'
def update_headers(self, response):
"""Update the response headers with any new headers.
NOTE: This SHOULD always include some Warning header to
signify that the response was cached by the client, not
by way of the provided headers.
"""
return {}
def apply(self, response):
updated_headers = self.update_headers(response)
if updated_headers:
response.headers.update(updated_headers)
warning_header_value = self.warning(response)
if warning_header_value is not None:
response.headers.update({'Warning': warning_header_value})
return response
class OneDayCache(BaseHeuristic):
"""
Cache the response by providing an expires 1 day in the
future.
"""
def update_headers(self, response):
headers = {}
if 'expires' not in response.headers:
date = parsedate(response.headers['date'])
expires = expire_after(timedelta(days=1),
date=datetime(*date[:6]))
headers['expires'] = datetime_to_header(expires)
headers['cache-control'] = 'public'
return headers
class ExpiresAfter(BaseHeuristic):
"""
Cache **all** requests for a defined time period.
"""
def __init__(self, **kw):
self.delta = timedelta(**kw)
def update_headers(self, response):
expires = expire_after(self.delta)
return {
'expires': datetime_to_header(expires),<|fim▁hole|> tmpl = '110 - Automatically cached for %s. Response might be stale'
return tmpl % self.delta
class LastModified(BaseHeuristic):
"""
If there is no Expires header already, fall back on Last-Modified
using the heuristic from
http://tools.ietf.org/html/rfc7234#section-4.2.2
to calculate a reasonable value.
Firefox also does something like this per
https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ
http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
Unlike mozilla we limit this to 24-hr.
"""
cacheable_by_default_statuses = set([
200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501
])
def update_headers(self, resp):
headers = resp.headers
if 'expires' in headers:
return {}
if 'cache-control' in headers and headers['cache-control'] != 'public':
return {}
if resp.status not in self.cacheable_by_default_statuses:
return {}
if 'date' not in headers or 'last-modified' not in headers:
return {}
date = calendar.timegm(parsedate_tz(headers['date']))
last_modified = parsedate(headers['last-modified'])
if date is None or last_modified is None:
return {}
now = time.time()
current_age = max(0, now - date)
delta = date - calendar.timegm(last_modified)
freshness_lifetime = max(0, min(delta / 10, 24 * 3600))
if freshness_lifetime <= current_age:
return {}
expires = date + freshness_lifetime
return {'expires': time.strftime(TIME_FMT, time.gmtime(expires))}
def warning(self, resp):
return None<|fim▁end|> | 'cache-control': 'public',
}
def warning(self, response): |
<|file_name|>github.py<|end_file_name|><|fim▁begin|>import hashlib
import hmac
import json
import requests
class GitHubResponse:
"""Wrapper for GET request response from GitHub"""
def __init__(self, response):
self.response = response
@property
def is_ok(self):
"""Check if request has been successful
:return: if it was OK
:rtype: bool
"""
return self.response.status_code < 300
@property
def data(self):
"""Response data as dict/list
:return: data of response
:rtype: dict|list
"""
return self.response.json()
@property
def url(self):
"""URL of the request leading to this response
:return: URL origin
:rtype: str
"""
return self.response.url
@property
def links(self):
"""Response header links
:return: URL origin
:rtype: dict
"""
return self.response.links
@property
def is_first_page(self):
"""Check if this is the first page of data
:return: if it is the first page of data
:rtype: bool
"""
return 'first' not in self.links
@property
def is_last_page(self):
"""Check if this is the last page of data
:return: if it is the last page of data
:rtype: bool
"""
return 'last' not in self.links
@property
def is_only_page(self):
"""Check if this is the only page of data
:return: if it is the only page page of data
:rtype: bool
"""
return self.is_first_page and self.is_last_page
@property
def total_pages(self):
"""Number of pages
:return: number of pages
:rtype: int
"""
if 'last' not in self.links:
return self.actual_page
return self.parse_page_number(self.links['last']['url'])
@property
def actual_page(self):
"""Actual page number
:return: actual page number
:rtype: int
"""
return self.parse_page_number(self.url)
@staticmethod
def parse_page_number(url):
"""Parse page number from GitHub GET URL
:param url: URL used for GET request
:type url: str
:return: page number
:rtype: int
"""
if '?' not in url:
return 1
params = url.split('?')[1].split('=')
params = {k: v for k, v in zip(params[0::2], params[1::2])}
if 'page' not in params:
return 1
return int(params['page'])
class GitHubAPI:
"""Simple GitHub API communication wrapper
It provides simple way for getting the basic GitHub API
resources and special methods for working with webhooks.
.. todo:: handle if GitHub is out of service, custom errors,
better abstraction, work with extensions
"""
#: URL to GitHub API
API_URL = 'https://api.github.com'
#: URL for OAuth request at GitHub
AUTH_URL = 'https://github.com/login/oauth/authorize?scope={}&client_id={}'
#: URL for OAuth token at GitHub
TOKEN_URL = 'https://github.com/login/oauth/access_token'
#: Scopes for OAuth request
SCOPES = ['user', 'repo', 'admin:repo_hook']
#: Required webhooks to be registered
WEBHOOKS = ['push', 'release', 'repository']
#: Controller for incoming webhook events
WEBHOOK_CONTROLLER = 'webhooks.gh_webhook'
#: URL for checking connections within GitHub
CONNECTIONS_URL = 'https://github.com/settings/connections/applications/{}'
def __init__(self, client_id, client_secret, webhooks_secret,
session=None, token=None):
self.client_id = client_id
self.client_secret = client_secret
self.webhooks_secret = webhooks_secret
self.session = session or requests.Session()
self.token = token
self.scope = []
def _get_headers(self):
"""Prepare auth header fields (empty if no token provided)
:return: Headers for the request
:rtype: dict
"""
if self.token is None:
return {}
return {
'Authorization': 'token {}'.format(self.token),
'Accept': 'application/vnd.github.mercy-preview+json'
}
def get_auth_url(self):
"""Create OAuth request URL
:return: OAuth request URL
:rtype: str
"""
return self.AUTH_URL.format(' '.join(self.SCOPES), self.client_id)
def login(self, session_code):
"""Authorize via OAuth with given session code
:param session_code: The session code for OAuth
:type session_code: str
:return: If the auth procedure was successful
:rtype: bool
.. todo:: check granted scope vs GH_SCOPES
"""
response = self.session.post(
self.TOKEN_URL,
headers={
'Accept': 'application/json'
},
data={
'client_id': self.client_id,
'client_secret': self.client_secret,
'code': session_code,
}
)
if response.status_code != 200:
return False
data = response.json()
self.token = data['access_token']
self.scope = [x for x in data['scope'].split(',')]
return True
def get(self, what, page=0):
"""Perform GET request on GitHub API
:param what: URI of requested resource
:type what: str
:param page: Number of requested page
:type page: int
:return: Response from the GitHub
:rtype: ``repocribro.github.GitHubResponse``
"""
uri = self.API_URL + what
if page > 0:
uri += '?page={}'.format(page)
return GitHubResponse(self.session.get(
uri,
headers=self._get_headers()
))
def webhook_get(self, full_name, hook_id):
"""Perform GET request for repo's webhook
:param full_name: Full name of repository that contains the hook
:type full_name: str
:param hook_id: GitHub ID of hook to be get
:type hook_id: int
:return: Data of the webhook<|fim▁hole|> return self.get('/repos/{}/hooks/{}'.format(full_name, hook_id))
def webhooks_get(self, full_name):
"""GET all webhooks of the repository
:param full_name: Full name of repository
:type full_name: str
:return: List of returned webhooks
:rtype: ``repocribro.github.GitHubResponse``
"""
return self.get('/repos/{}/hooks'.format(full_name))
def webhook_create(self, full_name, hook_url, events=None):
"""Create new webhook for specified repository
:param full_name: Full name of the repository
:type full_name: str
:param hook_url: URL where the webhook data will be sent
:type hook_url: str
:param events: List of requested events for that webhook
:type events: list of str
:return: The created webhook data
:rtype: dict or None
"""
if events is None:
events = self.WEBHOOKS
data = {
'name': 'web',
'active': True,
'events': events,
'config': {
'url': hook_url,
'content_type': 'json',
'secret': self.webhooks_secret
}
}
response = self.session.post(
self.API_URL + '/repos/{}/hooks'.format(full_name),
data=json.dumps(data),
headers=self._get_headers()
)
if response.status_code == 201:
return response.json()
return None
def webhook_tests(self, full_name, hook_id):
"""Perform test request for repo's webhook
:param full_name: Full name of repository that contains the hook
:type full_name: str
:param hook_id: GitHub ID of hook to be tested
:type hook_id: int
:return: If request was successful
:rtype: bool
"""
response = self.session.delete(
self.API_URL + '/repos/{}/hooks/{}/tests'.format(
full_name, hook_id
),
headers=self._get_headers()
)
return response.status_code == 204
def webhook_delete(self, full_name, hook_id):
"""Perform DELETE request for repo's webhook
:param full_name: Full name of repository that contains the hook
:type full_name: str
:param hook_id: GitHub ID of hook to be deleted
:type hook_id: int
:return: If request was successful
:rtype: bool
"""
response = self.session.delete(
self.API_URL + '/repos/{}/hooks/{}'.format(
full_name, hook_id
),
headers=self._get_headers()
)
return response.status_code == 204
def webhook_verify_signature(self, data, signature):
"""Verify the content with signature
:param data: Request data to be verified
:param signature: The signature of data
:type signature: str
:return: If the content is verified
:rtype: bool
"""
h = hmac.new(
self.webhooks_secret.encode('utf-8'),
data,
hashlib.sha1
)
return hmac.compare_digest(h.hexdigest(), signature)
@property
def app_connections_link(self):
return self.CONNECTIONS_URL.format(self.client_id)<|fim▁end|> | :rtype: ``repocribro.github.GitHubResponse``
""" |
<|file_name|>pretty.rs<|end_file_name|><|fim▁begin|>// Pris -- A language for designing slides
// Copyright 2017 Ruud van Asseldonk
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License version 3. A copy
// of the License is available in the root of the repository.
//! The string formatting primitives in `std::fmt` are not really suitable for
//! pretty-printing code, because they do not support indentation in a proper
//! way. This module provides an alternative abstraction for pretty printing
//! that automatically inserts indents after newlines. It also assumes that
//! printing cannot fail, which avoids clumsy error handling.
use std::fmt::Write;
use std::rc::Rc;
// The compiler is wrong, this function *is* used, from the macro at the end of
// this file. And that macro itself is also used, in tests.
#[allow(dead_code)]
pub fn print<P: Print>(content: P) -> String {
let mut f = Formatter::new();
f.print(content);
f.into_string()
}
pub trait Print {
fn print(&self, f: &mut Formatter);
}
pub struct Formatter {
target: String,
indent: u32,
}
impl<'a, P: Print> Print for &'a P {
fn print(&self, f: &mut Formatter) {
(*self).print(f);
}
}
impl<P: Print> Print for Box<P> {
fn print(&self, f: &mut Formatter) {
(**self).print(f);
}
}
impl<P: Print> Print for Rc<P> {
fn print(&self, f: &mut Formatter) {
(**self).print(f);
}
}
impl<'a> Print for &'a str {
fn print(&self, f: &mut Formatter) {
f.target.push_str(self);
}
}
impl Print for i32 {
fn print(&self, f: &mut Formatter) {
write!(&mut f.target, "{}", self).unwrap();
}
}
impl Print for u32 {
fn print(&self, f: &mut Formatter) {
write!(&mut f.target, "{}", self).unwrap();
}
}
impl Print for usize {
fn print(&self, f: &mut Formatter) {
write!(&mut f.target, "{}", self).unwrap();
}
}
impl Print for f64 {
fn print(&self, f: &mut Formatter) {
write!(&mut f.target, "{}", self).unwrap();
}
}
impl Formatter {
pub fn new() -> Formatter {
Formatter { target: String::new(), indent: 0 }
}
pub fn indent_more(&mut self) {
self.indent += 1;
}
pub fn indent_less(&mut self) {
assert!(self.indent > 0);
self.indent -= 1;
}
pub fn print<P: Print>(&mut self, content: P) {
content.print(self);
}
pub fn println<P: Print>(&mut self, content: P) {
for _ in 0..self.indent * 2 {
self.target.push(' ');
}
self.print(content);
}
pub fn print_hex_byte(&mut self, content: u8) {
write!(&mut self.target, "{:2x}", content).unwrap();
}
pub fn into_string(self) -> String {
self.target
}
}
/// Assert that two values of type `P: Print` are equal.<|fim▁hole|>macro_rules! assert_preq {
($left: expr, $right: expr) => {
{
use pretty;
let left = &$left;
let right = &$right;
assert!(left == right,
"\nExpected:\n\n{}\n\nBut found:\n\n{}\n\n",
pretty::print(right),
pretty::print(left));
}
}
}<|fim▁end|> | ///
/// This is similar to `assert_eq!`, but using `Print` rather than `fmt::Debug`.
#[macro_export] |
<|file_name|>0003_remove_message_issue.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals<|fim▁hole|>
class Migration(migrations.Migration):
dependencies = [
('msgs', '0002_auto_20150204_1116'),
]
operations = [
migrations.RemoveField(
model_name='message',
name='issue',
),
]<|fim▁end|> |
from django.db import models, migrations
|
<|file_name|>TypePayment.java<|end_file_name|><|fim▁begin|>package st.domain.quitanda.client.model.contract;
/**
* Created by Daniel Costa at 8/27/16.
* Using user computer xdata
*/
public interface TypePayment<|fim▁hole|><|fim▁end|> | {
public int getDataBaseId();
} |
<|file_name|>page_runner.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
import time
import traceback
import urlparse
import random
import csv
from chrome_remote_control import page_test
from chrome_remote_control import util
from chrome_remote_control import wpr_modes
class PageState(object):
def __init__(self):
self.did_login = False
class PageRunner(object):
"""Runs a given test against a given test."""
def __init__(self, page_set):
self.page_set = page_set
def __enter__(self):
return self
def __exit__(self, *args):
self.Close()
def _ReorderPageSet(self, test_shuffle_order_file):
page_set_dict = {}
for page in self.page_set:
page_set_dict[page.url] = page
self.page_set.pages = []
with open(test_shuffle_order_file, 'rb') as csv_file:
csv_reader = csv.reader(csv_file)
csv_header = csv_reader.next()
if 'url' not in csv_header:
raise Exception('Unusable test_shuffle_order_file.')
url_index = csv_header.index('url')
for csv_row in csv_reader:
if csv_row[url_index] in page_set_dict:
self.page_set.pages.append(page_set_dict[csv_row[url_index]])
else:
raise Exception('Unusable test_shuffle_order_file.')
def Run(self, options, possible_browser, test, results):
archive_path = os.path.abspath(os.path.join(self.page_set.base_dir,
self.page_set.archive_path))
if options.wpr_mode == wpr_modes.WPR_OFF:
if os.path.isfile(archive_path):
possible_browser.options.wpr_mode = wpr_modes.WPR_REPLAY
else:
possible_browser.options.wpr_mode = wpr_modes.WPR_OFF
logging.warning("""
The page set archive %s does not exist, benchmarking against live sites!
Results won't be repeatable or comparable.
To fix this, either add svn-internal to your .gclient using
http://goto/read-src-internal, or create a new archive using --record.
""", os.path.relpath(archive_path))
credentials_path = None
if self.page_set.credentials_path:
credentials_path = os.path.join(self.page_set.base_dir,
self.page_set.credentials_path)
if not os.path.exists(credentials_path):
credentials_path = None
with possible_browser.Create() as b:
b.credentials.credentials_path = credentials_path
test.SetUpBrowser(b)
b.credentials.WarnIfMissingCredentials(self.page_set)
if not options.test_shuffle and options.test_shuffle_order_file is not\
None:
raise Exception('--test-shuffle-order-file requires --test-shuffle.')
# Set up a random generator for shuffling the page running order.
test_random = random.Random()
b.SetReplayArchivePath(archive_path)
with b.ConnectToNthTab(0) as tab:
if options.test_shuffle_order_file is None:
for _ in range(int(options.pageset_repeat)):
if options.test_shuffle:<|fim▁hole|> for page in self.page_set:
for _ in range(int(options.page_repeat)):
self._RunPage(options, page, tab, test, results)
else:
self._ReorderPageSet(options.test_shuffle_order_file)
for page in self.page_set:
self._RunPage(options, page, tab, test, results)
def _RunPage(self, options, page, tab, test, results):
logging.info('Running %s' % page.url)
page_state = PageState()
try:
did_prepare = self.PreparePage(page, tab, page_state, results)
except Exception, ex:
logging.error('Unexpected failure while running %s: %s',
page.url, traceback.format_exc())
self.CleanUpPage(page, tab, page_state)
raise
if not did_prepare:
self.CleanUpPage(page, tab, page_state)
return
try:
test.Run(options, page, tab, results)
except page_test.Failure, ex:
logging.info('%s: %s', ex, page.url)
results.AddFailure(page, ex, traceback.format_exc())
return
except util.TimeoutException, ex:
logging.warning('Timed out while running %s', page.url)
results.AddFailure(page, ex, traceback.format_exc())
return
except Exception, ex:
logging.error('Unexpected failure while running %s: %s',
page.url, traceback.format_exc())
raise
finally:
self.CleanUpPage(page, tab, page_state)
def Close(self):
pass
@staticmethod
def WaitForPageToLoad(expression, tab):
def IsPageLoaded():
return tab.runtime.Evaluate(expression)
# Wait until the form is submitted and the page completes loading.
util.WaitFor(lambda: IsPageLoaded(), 60) # pylint: disable=W0108
def PreparePage(self, page, tab, page_state, results):
parsed_url = urlparse.urlparse(page.url)
if parsed_url[0] == 'file':
path = os.path.join(self.page_set.base_dir,
parsed_url.netloc) # pylint: disable=E1101
dirname, filename = os.path.split(path)
tab.browser.SetHTTPServerDirectory(dirname)
target_side_url = tab.browser.http_server.UrlOf(filename)
else:
target_side_url = page.url
if page.credentials:
page_state.did_login = tab.browser.credentials.LoginNeeded(
tab, page.credentials)
if not page_state.did_login:
msg = 'Could not login to %s on %s' % (page.credentials,
target_side_url)
logging.info(msg)
results.AddFailure(page, msg, "")
return False
tab.page.Navigate(target_side_url)
# Wait for unpredictable redirects.
if page.wait_time_after_navigate:
time.sleep(page.wait_time_after_navigate)
if page.wait_for_javascript_expression is not None:
self.WaitForPageToLoad(page.wait_for_javascript_expression, tab)
tab.WaitForDocumentReadyStateToBeInteractiveOrBetter()
return True
def CleanUpPage(self, page, tab, page_state): # pylint: disable=R0201
if page.credentials and page_state.did_login:
tab.browser.credentials.LoginNoLongerNeeded(tab, page.credentials)
tab.runtime.Evaluate("""chrome && chrome.benchmarking &&
chrome.benchmarking.closeConnections()""")<|fim▁end|> | test_random.shuffle(self.page_set) |
<|file_name|>ghtml.go<|end_file_name|><|fim▁begin|>// Copyright GoFrame Author(https://goframe.org). All Rights Reserved.
//
// This Source Code Form is subject to the terms of the MIT License.
// If a copy of the MIT was not distributed with this file,
// You can obtain one at https://github.com/gogf/gf.
// Package ghtml provides useful API for HTML content handling.
package ghtml
import (
"html"
"reflect"
"strings"
strip "github.com/grokify/html-strip-tags-go"
)<|fim▁hole|>// Referer: http://php.net/manual/zh/function.strip-tags.php
func StripTags(s string) string {
return strip.StripTags(s)
}
// Entities encodes all HTML chars for content.
// Referer: http://php.net/manual/zh/function.htmlentities.php
func Entities(s string) string {
return html.EscapeString(s)
}
// EntitiesDecode decodes all HTML chars for content.
// Referer: http://php.net/manual/zh/function.html-entity-decode.php
func EntitiesDecode(s string) string {
return html.UnescapeString(s)
}
// SpecialChars encodes some special chars for content, these special chars are:
// "&", "<", ">", `"`, "'".
// Referer: http://php.net/manual/zh/function.htmlspecialchars.php
func SpecialChars(s string) string {
return strings.NewReplacer(
"&", "&",
"<", "<",
">", ">",
`"`, """,
"'", "'",
).Replace(s)
}
// SpecialCharsDecode decodes some special chars for content, these special chars are:
// "&", "<", ">", `"`, "'".
// Referer: http://php.net/manual/zh/function.htmlspecialchars-decode.php
func SpecialCharsDecode(s string) string {
return strings.NewReplacer(
"&", "&",
"<", "<",
">", ">",
""", `"`,
"'", "'",
).Replace(s)
}
// SpecialCharsMapOrStruct automatically encodes string values/attributes for map/struct.
func SpecialCharsMapOrStruct(mapOrStruct interface{}) error {
var (
reflectValue = reflect.ValueOf(mapOrStruct)
reflectKind = reflectValue.Kind()
)
for reflectValue.IsValid() && (reflectKind == reflect.Ptr || reflectKind == reflect.Interface) {
reflectValue = reflectValue.Elem()
reflectKind = reflectValue.Kind()
}
switch reflectKind {
case reflect.Map:
var (
mapKeys = reflectValue.MapKeys()
mapValue reflect.Value
)
for _, key := range mapKeys {
mapValue = reflectValue.MapIndex(key)
switch mapValue.Kind() {
case reflect.String:
reflectValue.SetMapIndex(key, reflect.ValueOf(SpecialChars(mapValue.String())))
case reflect.Interface:
if mapValue.Elem().Kind() == reflect.String {
reflectValue.SetMapIndex(key, reflect.ValueOf(SpecialChars(mapValue.Elem().String())))
}
}
}
case reflect.Struct:
var (
fieldValue reflect.Value
)
for i := 0; i < reflectValue.NumField(); i++ {
fieldValue = reflectValue.Field(i)
switch fieldValue.Kind() {
case reflect.String:
fieldValue.Set(reflect.ValueOf(SpecialChars(fieldValue.String())))
}
}
}
return nil
}<|fim▁end|> |
// StripTags strips HTML tags from content, and returns only text. |
<|file_name|>vnstat.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Display vnstat statistics.
Coloring rules.
If value is bigger that dict key, status string will turn to color, specified
in the value.
Example:
coloring = {
800: "#dddd00",
900: "#dd0000",
}
(0 - 800: white, 800-900: yellow, >900 - red)
Format of status string placeholders:
{down} download
{total} total
{up} upload
Requires:
- external program called `vnstat` installed and configured to work.
@author shadowprince
@license Eclipse Public License
"""
from __future__ import division # python2 compatibility
from time import time
from subprocess import check_output
def get_stat(statistics_type):
"""
Get statistics from devfile in list of lists of words
"""
def filter_stat():
out = check_output(["vnstat", "--dumpdb"]).decode("utf-8").splitlines()
for x in out:
if x.startswith("{};0;".format(statistics_type)):
return x
<|fim▁hole|> except OSError as e:
print("Looks like you haven't installed or configured vnstat!")
raise e
except ValueError:
err = "vnstat returned wrong output, "
err += "maybe it's configured wrong or module is outdated"
raise RuntimeError(err)
up = (int(txm) * 1024 + int(txk)) * 1024
down = (int(rxm) * 1024 + int(rxk)) * 1024
return {
"up": up,
"down": down,
"total": up+down
}
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 180
coloring = {}
format = "{total}"
# initial multiplier, if you want to get rid of first bytes, set to 1 to
# disable
initial_multi = 1024
left_align = 0
# if value is greater, divide it with unit_multi and get next unit from
# units
multiplier_top = 1024
precision = 1
statistics_type = "d" # d for daily, m for monthly
unit_multi = 1024 # value to divide if rate is greater than multiplier_top
def __init__(self, *args, **kwargs):
"""
Format of total, up and down placeholders under FORMAT.
As default, substitutes left_align and precision as %s and %s
Placeholders:
value - value (float)
unit - unit (string)
"""
self.last_stat = get_stat(self.statistics_type)
self.last_time = time()
self.last_interface = None
self.value_format = "{value:%s.%sf} {unit}" % (
self.left_align, self.precision
)
# list of units, first one - value/initial_multi, second - value/1024,
# third - value/1024^2, etc...
self.units = ["kb", "mb", "gb", "tb", ]
def _divide_and_format(self, value):
"""
Divide a value and return formatted string
"""
value /= self.initial_multi
for i, unit in enumerate(self.units):
if value > self.multiplier_top:
value /= self.unit_multi
else:
break
return self.value_format.format(value=value, unit=unit)
def currentSpeed(self, i3s_output_list, i3s_config):
stat = get_stat(self.statistics_type)
color = None
keys = list(self.coloring.keys())
keys.sort()
for k in keys:
if stat["total"] < k * 1024 * 1024:
break
else:
color = self.coloring[k]
response = {
'cached_until': time() + self.cache_timeout,
'full_text': self.format.format(
total=self._divide_and_format(stat['total']),
up=self._divide_and_format(stat['up']),
down=self._divide_and_format(stat['down']),
),
'transformed': True
}
if color:
response["color"] = color
return response
if __name__ == "__main__":
"""
Test this module by calling it directly.
"""
from time import sleep
x = Py3status()
config = {
'color_good': '#00FF00',
'color_bad': '#FF0000',
}
while True:
print(x.currentSpeed([], config))
sleep(1)<|fim▁end|> | try:
type, number, ts, rxm, txm, rxk, txk, fill = filter_stat().split(";") |
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>import time
import json
import datetime
import urllib
from django.test import TestCase, Client
from django.core.urlresolvers import reverse
from django_dynamic_fixture import G
from django.utils.dateformat import format
from .utils import datetime_to_timestamp, timestamp_to_datetime
from .models import Message, Channel
def create_message(text, timestamp, username, channel):
"""
Creates a message with the given text, datetime,
username, channel and with typing set to True.
"""
return Message.objects.create(
text=text,
datetime_start=timestamp_to_datetime(timestamp),
username=username,
typing=True,
channel=channel
)
class ChatClient(Client):
def delete(self, url, qstring):
return Client().delete(
url,
qstring,
content_type='application/x-www-form-urlencoded'
)
def patch(slef, url, qstring):
return Client().patch(
url,
qstring,
content_type='application/x-www-form-urlencoded'
)
class ChatTests(TestCase):
def setUp(self):
super(ChatTests, self).setUp()
self.channel = G(Channel, name='Channel')
class MessageViewPOSTTests(ChatTests):
def post_and_get_response(self, text, timestamp, username, typing):
"""
Posts a message on chat:message and returns the response
"""
return self.client.post(
reverse('chat:message', args=(self.channel.name,)),
{'text': text, 'username': username, 'datetime_start': timestamp, 'typing': typing}
)
def test_post_valid_message(self):
"""
When a valid message is sent, the view should
save the message in the database and return
the id of the message.
"""
timestamp = 10 ** 11
username = 'vitsalisa'
text = 'Message'
response = self.post_and_get_response(
text=text,
timestamp=timestamp,
username=username,
typing=True
)
messages = Message.objects.filter(username=username)
self.assertTrue(messages.exists())
self.assertEquals(len(messages), 1)
self.assertEqual(response.status_code, 200)
message = Message.objects.get(username=username);
self.assertEqual(int(response.content), message.id);
self.assertEqual(message.username, username);
self.assertTrue(message.typing)
self.assertEqual(message.text, text)
self.assertEqual(datetime_to_timestamp(message.datetime_start), timestamp)
def test_post_message_without_datetime_start(self):
"""
When a message is sent without a datetime_start the view
should produce an appropriate error and a 400(Bad Request)
status code. The message should not be saved.
"""
post_dict = {'text': 'Message', 'username': 'vitsalis', 'typing': True}
response = self.client.post(
reverse('chat:message', args=(self.channel.name,)),
post_dict
)
self.assertFalse(Message.objects.filter(username='vitsalis').exists())
self.assertEqual(response.status_code, 400)
def test_post_message_without_username(self):
"""
When a message is sent without a username the view
should produce an appropriate error and a 400(Bad Request)
status code. The message should not be saved.
"""
timestamp = 10 ** 11
post_dict = {'text': 'Message', 'datetime_start': timestamp, 'typing': True}
response = self.client.post(
reverse('chat:message', args=(self.channel.name,)),
post_dict
)
datetime_start_field = timestamp_to_datetime(timestamp)
self.assertFalse(Message.objects.filter(datetime_start=datetime_start_field).exists())
self.assertEqual(response.status_code, 400)
def test_post_message_with_invalid_channel_name(self):
"""
When a message is sent with an invalid channel name
the view should produce an appropriate error and a
404(Not Found) status code. The message should not be saved.
"""
timestamp = 10 ** 11
response = self.client.post(
reverse('chat:message', args=('invalid_channel',)),
{'text': 'Message', 'username': 'vitsalis', 'datetime_start': timestamp, 'typing': True}
)
self.assertFalse(Message.objects.filter(username='vitsalis').exists())
self.assertEqual(response.status_code, 404)
def test_post_message_without_text(self):
"""
When a message is sent without a channel_id the view
should produce an appropriate error and a 400(Bad Request)
status code. The message should not be saved.
"""
timestamp = 10 ** 11
post_dict = {'username': 'vitsalis', 'datetime_start': timestamp, 'typing': True}
response = self.client.post(
reverse('chat:message', args=(self.channel.name,)),
post_dict
)
self.assertFalse(Message.objects.filter(username='vitsalis').exists())
self.assertEqual(response.status_code, 400)
def test_post_message_with_invalid_datetime_start(self):
"""
When a message is sent with an invalid datetime the view
should produce an appropriate error and a 400(Bad Request)
status code. The message should not be saved.
"""
response = self.post_and_get_response(
text='Message',
timestamp='wtf',
username='vitsalis',
typing=True
)
self.assertFalse(Message.objects.filter(username='vitsalis').exists())
self.assertEqual(response.status_code, 400)
def test_post_message_with_future_datetime_start(self):
"""
When a message is sent with a future datetime the view
should change the datetime to the current one and save the message.
"""
timestamp = int(format(datetime.datetime.utcnow() + datetime.timedelta(days=1), 'U')) * 1000
response = self.post_and_get_response(
text='Message',
timestamp=timestamp,
username='vitsalis',
typing=True
)
messages = Message.objects.filter(username='vitsalis')
self.assertTrue(messages.exists())
self.assertEqual(len(messages), 1)
self.assertTrue(datetime_to_timestamp(messages[0].datetime_start) < timestamp)
self.assertEqual(response.status_code, 200)
self.assertEqual(int(response.content), messages[0].id)
def test_post_message_with_typing_false(self):
"""
When typing is False the view should save the message
and make its datetime_sent equal to datetime_start.
"""
timestamp = 10 ** 11
response = self.post_and_get_response(
text='Message',
timestamp=timestamp,
username='vitsalis',
typing=False
)
messages = Message.objects.filter(username='vitsalis')
self.assertTrue(messages.exists())
self.assertEqual(len(messages), 1)
self.assertEqual(messages[0].datetime_sent, messages[0].datetime_start)
class MessageViewGETTests(ChatTests):
def test_request_messages(self):
"""
When a valid request is sent the view should return
a JSON object containing messages. Each message should be
in the form {text: ...,username: ..., datetime: ...}.
The messages should be in chronological order(more recent first).
The number of objects is specified by the lim argument.
"""
lim = 2
timestamp = 10 ** 11
message1 = Message.objects.create(
text='Message1',
datetime_start=timestamp_to_datetime(timestamp),
datetime_sent=timestamp_to_datetime(timestamp + 10),
username='vitsalis',
typing=True,
channel=self.channel
)
message2 = Message.objects.create(
text='Message2',
datetime_start=timestamp_to_datetime(timestamp + 60 * 60),
datetime_sent=timestamp_to_datetime(timestamp + 60 * 60 + 10),
username='pkakelas',
typing=True,
channel=self.channel
)
response = self.client.get(
reverse('chat:message', args=(self.channel.name,)),
{'lim': lim}
)
messages = json.loads(response.content)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(messages), 2)
# The order is reverse chronological
self.assertEqual(messages[0]['text'], message2.text)
self.assertEqual(messages[0]['username'], message2.username)
self.assertEqual(messages[0]['datetime_start'], datetime_to_timestamp(message2.datetime_start))
self.assertTrue(messages[0]['typing'])
self.assertEqual(messages[0]['id'], message2.id)
self.assertEqual(messages[0]['datetime_sent'], datetime_to_timestamp(message2.datetime_sent))
self.assertEqual(messages[1]['text'], message1.text)
self.assertEqual(messages[1]['username'], message1.username)
self.assertEqual(messages[1]['datetime_start'], datetime_to_timestamp(message1.datetime_start))
self.assertTrue(messages[1]['typing'])
self.assertEqual(messages[1]['id'], message1.id)
self.assertEqual(messages[1]['datetime_sent'], datetime_to_timestamp(message1.datetime_sent))
def test_request_messages_with_bigger_limit_than_messages(self):
"""
When the lim is bigger than the number of the messages
on the database for the channel, the server should return
all the messages for the channel.
"""
lim = 100
timestamp = 10 ** 11
create_message(
text='Message1',
timestamp=timestamp,
username='vitsalis',
channel=self.channel
)
create_message(
text='Message2',
timestamp=timestamp + 60 * 60,
username='pkakelas',
channel=self.channel
)
messages = json.loads(self.client.get(
reverse('chat:message', args=(self.channel.name,)),
{'lim': lim}
).content)
self.assertEqual(len(messages), 2)
def test_request_messages_with_smaller_limit_than_messages(self):
"""
When the lim is smaller than the number of the messages
on the database for the channel, the server should return
no more than <lim> messages.
"""
lim = 2
timestamp = 10 ** 11
for i in range(100):
create_message(
text='Message' + str(i),
timestamp=timestamp + i,
username='vitsalis',
channel=self.channel
)
messages = json.loads(self.client.get(
reverse('chat:message', args=(self.channel.name,)),
{'lim': lim}
).content)
self.assertEqual(len(messages), 2)
self.assertEqual(messages[0]['text'], 'Message99')
self.assertEqual(messages[1]['text'], 'Message98')
def test_request_messages_without_lim(self):
"""
When the lim is not specified the view should return
100 messages(or less if there are less than 100 messages).
"""
timestamp = 10 ** 11
for i in range(200):
create_message(
text='Message' + str(i),
timestamp=timestamp + i,
username='vitsalis',
channel=self.channel
)
messages = json.loads(self.client.get(
reverse('chat:message', args=(self.channel.name,)),
).content)
self.assertEqual(len(messages), 100)
def test_request_messages_from_one_channel(self):
"""
The view should return the messages from the
channel specified.
"""
channel1 = G(Channel, name='Channel1')
channel2 = G(Channel, name='Channel2')
timestamp = 10 ** 11
message1 = create_message(
text='Message1',
timestamp=timestamp,
username='vitsalis',
channel=channel1
)
create_message(
text='Message2',
timestamp=timestamp,
username='vitsalis',
channel=channel2
)
messages = json.loads(self.client.get(
reverse('chat:message', args=(channel1.name,)),
).content)
self.assertEqual(len(messages), 1)
self.assertEqual(messages[0]['text'], message1.text)
def test_request_messages_with_invalid_channel_name(self):
"""
When the channel with the name <channel_name>
does not exist, a 404(Not Found) response code
should be returned from the view.
"""
timestamp = 10 ** 11
create_message(
text='Message1',
timestamp=timestamp,
username='vitsalis',
channel=self.channel
)
response = self.client.get(
reverse('chat:message', args=('invalid_name',)),
)
self.assertEqual(response.status_code, 404)
class MessageViewPATCHTests(ChatTests):
client_class = ChatClient
def patch_and_get_response(self, messageid, text, timestamp, typing):
"""
Patches a message on chat:message and returns the response
"""
qstring = urllib.urlencode({
'id': messageid,
'text': text,
'datetime_sent': timestamp,
'typing': typing
})
return self.client.patch(
reverse('chat:message', args=(self.channel.name,)),
qstring
)
def test_patch_message(self):
"""
The view should update the message according to the
data provided and respond with a 204(No Content) code.
"""
timestamp = 10 ** 11
message = create_message(
text='Message',
username='vitsalis',
channel=self.channel,
timestamp=timestamp
)
response = self.patch_and_get_response(
messageid=message.id,
text='Message Updated',
timestamp=timestamp + 10,
typing=False
)
messages = Message.objects.filter(username='vitsalis')
self.assertTrue(messages.exists())
self.assertEqual(len(messages), 1)
self.assertEqual(response.status_code, 204)
self.assertEqual(messages[0].text, 'Message Updated')
self.assertEqual(datetime_to_timestamp(messages[0].datetime_start), timestamp)
self.assertEqual(datetime_to_timestamp(messages[0].datetime_sent), timestamp + 10)
self.assertEqual(messages[0].username, 'vitsalis')
self.assertFalse(messages[0].typing)
def test_patch_message_second_time(self):
"""
The view should not update a message that has been
made persistent. Instead it should respond with a
400(Bad Request) code.
"""
timestamp = 10 ** 11
message = create_message(
text='Message',
username='vitsalis',
channel=self.channel,
timestamp=timestamp
)
self.patch_and_get_response(
messageid=message.id,
text='Message Updated',
timestamp=timestamp + 10,
typing=False
)
response = self.patch_and_get_response(
messageid=message.id,
text='Message Updated Again',
timestamp=timestamp + 100,
typing=False
)
messages = Message.objects.filter(username='vitsalis')
self.assertTrue(messages.exists())
self.assertEqual(messages[0].text, 'Message Updated')
self.assertEqual(response.status_code, 400)
def test_patch_message_with_datetime_sent_before_datetime_start(self):
"""
When the datetime_sent is before datetime_start the view
should make the datetime_sent equal to the datetime_sent,
save the message and respond with a 204(No Content) code.
"""
timestamp = 10 ** 11
message = create_message(
text='Message',
username='vitsalis',
channel=self.channel,
timestamp=timestamp
)
response = self.patch_and_get_response(
messageid=message.id,
text='Message Updated',
timestamp=timestamp - 1,
typing=False
)
dbmessage = Message.objects.get(pk=message.id)
self.assertEqual(response.status_code, 204)
self.assertEqual(dbmessage.text, 'Message Updated')
self.assertTrue(hasattr(dbmessage, 'datetime_sent'))
self.assertEqual(dbmessage.datetime_sent, message.datetime_start)
self.assertEqual(dbmessage.datetime_sent, dbmessage.datetime_start)
self.assertEqual(datetime_to_timestamp(dbmessage.datetime_start), timestamp)
self.assertFalse(dbmessage.typing)
def test_patch_message_without_id(self):
"""
When the id is not specified the view should
not patch the message and respond with a
400(Bad Request) code.
"""
timestamp = 10 ** 11
message = create_message(
text='Message',
username='vitsalis',
channel=self.channel,
timestamp=timestamp
)
qstring = urllib.urlencode({
'text': 'Message Updated',
'datetime_sent': timestamp + 10,
'typing': False
})
response = self.client.patch(
reverse('chat:message', args=(self.channel.name,)),
qstring
)
dbmessage = Message.objects.get(pk=message.id)
self.assertEqual(response.status_code, 400)
self.assertEqual(dbmessage.text, message.text)
self.assertIsNone(dbmessage.datetime_sent)
def test_patch_message_without_text(self):
"""
When the text is not specified the view
should not patch the message and respond with a
400(Bad Request) code.
"""
timestamp = 10 ** 11
message = create_message(
text='Message',
username='vitsalis',
channel=self.channel,
timestamp=timestamp
)
qstring = urllib.urlencode({
'id': message.id,
'datetime_sent': timestamp + 10,
'typing': False
})
response = self.client.patch(
reverse('chat:message', args=(self.channel.name,)),
qstring
)
dbmessage = Message.objects.get(pk=message.id)
self.assertEqual(response.status_code, 400)
self.assertEqual(dbmessage.text, message.text)
self.assertIsNone(dbmessage.datetime_sent)
def test_patch_message_without_datetime_sent(self):
"""
When the datetime_sent is not specified the view
should not patch the message and respond with a
400(Bad Request) code.
"""
timestamp = 10 ** 11
message = create_message(
text='Message',
username='vitsalis',
channel=self.channel,
timestamp=timestamp
)
qstring = urllib.urlencode({
'id': message.id,
'text': 'Message Updated',
'typing': False
})
response = self.client.patch(
reverse('chat:message', args=(self.channel.name,)),
qstring
)
dbmessage = Message.objects.get(pk=message.id)
self.assertEqual(response.status_code, 400)
self.assertEqual(dbmessage.text, message.text)
self.assertIsNone(dbmessage.datetime_sent)
class MessageViewDELETETests(ChatTests):
client_class = ChatClient
def test_delete_message(self):
"""
The view should delete the message with the
specified id and respond with a 204(No Content)
code.
"""
timestamp = 10 ** 11
message = create_message(
text='Message',
username='vitsalis',
channel=self.channel,
timestamp=timestamp
)
qstring = urllib.urlencode({
'id': message.id
})
response = self.client.delete(
reverse('chat:message', args=(self.channel.name,)),
qstring
)
messages = Message.objects.filter(username='vitsalis')
self.assertEqual(response.status_code, 204)
self.assertEqual(len(messages), 0)
def test_delete_message_without_id(self):
"""
When the id is not specified the view should
return a 400(Bad Request) code.
"""
qstring = urllib.urlencode({})
response = self.client.delete(
reverse('chat:message', args=(self.channel.name,)),
qstring
)
self.assertEqual(response.status_code, 400)
def test_delete_message_that_does_not_exist(self):
"""
When a message with the specified id doesn't exist
the view should respond with a 404(Not Found) code.
"""
timestamp = 10 ** 11
message = create_message(
text='Message',
username='vitsalis',
channel=self.channel,
timestamp=timestamp
)
qstring = urllib.urlencode({
'id': message.id + 1
})
response = self.client.delete(
reverse('chat:message', args=(self.channel.name,)),
qstring
)
self.assertEqual(response.status_code, 404)
messages = Message.objects.filter(username='vitsalis')
self.assertEqual(len(messages), 1)
class ChannelViewPOSTTests(ChatTests):
def test_create_valid_channel(self):
"""
When a channel is created the view should
respond with a 204(No Content) code and save the channel
in the database.
"""
response = self.client.post(
reverse('chat:channel'),
{'name': 'New_Channel'}
)
self.assertTrue(Channel.objects.filter(name='New_Channel').exists())
self.assertEqual(Channel.objects.filter(name='New_Channel').count(), 1)
self.assertEqual(response.status_code, 204)
class ChannelViewGETTests(ChatTests):
def test_request_valid_channel(self):
"""
When a channel with a name that exists in
the database is requested, the view should return
a JSON object containing the name of the channel
and a 200(OK) status code.
"""
response = self.client.get(
reverse('chat:channel'),
{'name': self.channel.name}
)
returned_channel = json.loads(response.content)
self.assertEqual(response.status_code, 200)
self.assertEqual(returned_channel['name'], self.channel.name)
def test_request_channel_that_does_not_exist(self):
"""
When a channel that does not exist is requested
the view should return a 404(Not Found) status code.
"""
response = self.client.get(
reverse('chat:channel'),
{'name': 'invalid_channel'}
)
<|fim▁hole|>class MessageModelTests(ChatTests):
def test_message_create(self):
"""
A message must be saved correctly in the database.
"""
message = create_message(
text='Message',
timestamp=10 ** 11,
username='vitsalis',
channel=self.channel
)
messages = Message.objects.filter(pk=message.id)
self.assertTrue(messages.exists())
self.assertEqual(messages.count(), 1)
dbmessage = messages[0]
self.assertEqual(dbmessage.text, message.text)
self.assertEqual(dbmessage.datetime_start, message.datetime_start)
self.assertEqual(dbmessage.username, message.username)
self.assertEqual(dbmessage.channel.id, message.channel.id)
self.assertTrue(dbmessage.typing)
class ChannelModelTests(ChatTests):
def test_channel_create(self):
"""
A channel must be saved in the database.
"""
channels = Channel.objects.filter(pk=self.channel.id)
self.assertTrue(channels.exists())
self.assertEqual(channels.count(), 1)
self.assertEqual(channels[0].name, self.channel.name)
class URLTests(ChatTests):
def test_urls(self):
self.assertEqual(
reverse('chat:message', args=('foo',)),
'/messages/foo/'
)<|fim▁end|> | self.assertEqual(response.status_code, 404)
|
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django import forms
from django.contrib import messages
from django.contrib.admin.views.decorators import staff_member_required
from django.shortcuts import render
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from data.models import ImportTask
class ImportTaskForm(forms.ModelForm):
class Meta:
model = ImportTask
fields = ('data_file',)
@staff_member_required
def upload(request):
if request.method == 'POST':
import_task_form = ImportTaskForm(data=request.POST, files=request.FILES)
if import_task_form.is_valid():
import_task = import_task_form.save()
import_task.enqueue()
messages.info(request, 'Data file queued for processing')
return HttpResponseRedirect(reverse('data_upload'))
else:
import_task_form = ImportTaskForm()
return render(request, 'data/upload.html', {
'import_task_form': import_task_form,
})
<|fim▁hole|>def enqueue(request, import_task_id):
import_task = ImportTask.objects.get(pk=import_task_id)
import_task.enqueue()
messages.info(request, 'Data file queued for processing')
return HttpResponseRedirect(reverse('admin:data_importtask_changelist'))<|fim▁end|> |
@staff_member_required |
<|file_name|>thumbnail.js<|end_file_name|><|fim▁begin|>(function (factory) {
if (typeof module === 'object' && typeof module.exports === 'object') {
var v = factory(require, exports); if (v !== undefined) module.exports = v;
}
else if (typeof define === 'function' && define.amd) {
define(["require", "exports", '@angular/core'], factory);
}
})(function (require, exports) {
"use strict";<|fim▁hole|> /**
* \@name Thumbnail
* \@module ionic
* \@description
* A Thumbnail is a component that creates a squared image for an item.
* Thumbnails can be place on the left or right side of an item with the `item-left` or `item-right` directive.
* @see {\@link /docs/v2/components/#thumbnail-list Thumbnail Component Docs}
*/
var Thumbnail = (function () {
function Thumbnail() {
}
Thumbnail.decorators = [
{ type: core_1.Directive, args: [{
selector: 'ion-thumbnail'
},] },
];
/** @nocollapse */
Thumbnail.ctorParameters = function () { return []; };
return Thumbnail;
}());
exports.Thumbnail = Thumbnail;
function Thumbnail_tsickle_Closure_declarations() {
/** @type {?} */
Thumbnail.decorators;
/**
* @nocollapse
* @type {?}
*/
Thumbnail.ctorParameters;
}
});
//# sourceMappingURL=thumbnail.js.map<|fim▁end|> | var core_1 = require('@angular/core'); |
<|file_name|>new.go<|end_file_name|><|fim▁begin|>package buildah
import (
"context"
"fmt"
"math/rand"
"strings"
"github.com/containers/buildah/define"
"github.com/containers/buildah/pkg/blobcache"
"github.com/containers/common/libimage"
"github.com/containers/common/pkg/config"
"github.com/containers/image/v5/image"
"github.com/containers/image/v5/manifest"
"github.com/containers/image/v5/transports"
"github.com/containers/image/v5/types"
"github.com/containers/storage"
digest "github.com/opencontainers/go-digest"
"github.com/openshift/imagebuilder"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
)
const (
// BaseImageFakeName is the "name" of a source image which we interpret
// as "no image".
BaseImageFakeName = imagebuilder.NoBaseImageSpecifier
)
func getImageName(name string, img *storage.Image) string {
imageName := name
if len(img.Names) > 0 {
imageName = img.Names[0]
// When the image used by the container is a tagged image
// the container name might be set to the original image instead of
// the image given in the "from" command line.
// This loop is supposed to fix this.
for _, n := range img.Names {
if strings.Contains(n, name) {
imageName = n
break
}
}
}
return imageName
}
func imageNamePrefix(imageName string) string {
prefix := imageName
s := strings.Split(prefix, ":")
if len(s) > 0 {
prefix = s[0]
}
s = strings.Split(prefix, "/")
if len(s) > 0 {
prefix = s[len(s)-1]
}
s = strings.Split(prefix, "@")
if len(s) > 0 {
prefix = s[0]
}
return prefix
}
func newContainerIDMappingOptions(idmapOptions *define.IDMappingOptions) storage.IDMappingOptions {
var options storage.IDMappingOptions
if idmapOptions != nil {
options.HostUIDMapping = idmapOptions.HostUIDMapping
options.HostGIDMapping = idmapOptions.HostGIDMapping
uidmap, gidmap := convertRuntimeIDMaps(idmapOptions.UIDMap, idmapOptions.GIDMap)
if len(uidmap) > 0 && len(gidmap) > 0 {
options.UIDMap = uidmap
options.GIDMap = gidmap
} else {
options.HostUIDMapping = true
options.HostGIDMapping = true
}
}
return options
}
func containerNameExist(name string, containers []storage.Container) bool {
for _, container := range containers {
for _, cname := range container.Names {
if cname == name {
return true
}
}
}
return false
}
func findUnusedContainer(name string, containers []storage.Container) string {
suffix := 1
tmpName := name
for containerNameExist(tmpName, containers) {
tmpName = fmt.Sprintf("%s-%d", name, suffix)
suffix++
}
return tmpName
}
func newBuilder(ctx context.Context, store storage.Store, options BuilderOptions) (*Builder, error) {
var (
ref types.ImageReference
img *storage.Image
err error
)
if options.FromImage == BaseImageFakeName {
options.FromImage = ""
}
systemContext := getSystemContext(store, options.SystemContext, options.SignaturePolicyPath)
if options.FromImage != "" && options.FromImage != "scratch" {
imageRuntime, err := libimage.RuntimeFromStore(store, &libimage.RuntimeOptions{SystemContext: systemContext})
if err != nil {
return nil, err
}
pullPolicy, err := config.ParsePullPolicy(options.PullPolicy.String())
if err != nil {
return nil, err
}
// Note: options.Format does *not* relate to the image we're
// about to pull (see tests/digests.bats). So we're not
// forcing a MIMEType in the pullOptions below.
pullOptions := libimage.PullOptions{}
pullOptions.RetryDelay = &options.PullRetryDelay
pullOptions.OciDecryptConfig = options.OciDecryptConfig
pullOptions.SignaturePolicyPath = options.SignaturePolicyPath
pullOptions.Writer = options.ReportWriter
maxRetries := uint(options.MaxPullRetries)
pullOptions.MaxRetries = &maxRetries
if options.BlobDirectory != "" {
pullOptions.DestinationLookupReferenceFunc = blobcache.CacheLookupReferenceFunc(options.BlobDirectory, types.PreserveOriginal)
}
pulledImages, err := imageRuntime.Pull(ctx, options.FromImage, pullPolicy, &pullOptions)
if err != nil {
return nil, err
}
if len(pulledImages) > 0 {
img = pulledImages[0].StorageImage()
ref, err = pulledImages[0].StorageReference()
if err != nil {
return nil, err
}
}
}
imageSpec := options.FromImage
imageID := ""
imageDigest := ""
topLayer := ""
if img != nil {
imageSpec = getImageName(imageNamePrefix(imageSpec), img)
imageID = img.ID
topLayer = img.TopLayer
}
var src types.Image
if ref != nil {
srcSrc, err := ref.NewImageSource(ctx, systemContext)
if err != nil {
return nil, errors.Wrapf(err, "error instantiating image for %q", transports.ImageName(ref))
}
defer srcSrc.Close()
manifestBytes, manifestType, err := srcSrc.GetManifest(ctx, nil)
if err != nil {
return nil, errors.Wrapf(err, "error loading image manifest for %q", transports.ImageName(ref))
}
if manifestDigest, err := manifest.Digest(manifestBytes); err == nil {
imageDigest = manifestDigest.String()
}
var instanceDigest *digest.Digest
if manifest.MIMETypeIsMultiImage(manifestType) {
list, err := manifest.ListFromBlob(manifestBytes, manifestType)
if err != nil {
return nil, errors.Wrapf(err, "error parsing image manifest for %q as list", transports.ImageName(ref))
}
instance, err := list.ChooseInstance(systemContext)
if err != nil {
return nil, errors.Wrapf(err, "error finding an appropriate image in manifest list %q", transports.ImageName(ref))
}
instanceDigest = &instance
}
src, err = image.FromUnparsedImage(ctx, systemContext, image.UnparsedInstance(srcSrc, instanceDigest))
if err != nil {
return nil, errors.Wrapf(err, "error instantiating image for %q instance %q", transports.ImageName(ref), instanceDigest)
}
}
name := "working-container"
if options.Container != "" {
name = options.Container
} else {
if imageSpec != "" {
name = imageNamePrefix(imageSpec) + "-" + name
}
}
var container *storage.Container
tmpName := name
if options.Container == "" {
containers, err := store.Containers()
if err != nil {
return nil, errors.Wrapf(err, "unable to check for container names")
}
tmpName = findUnusedContainer(tmpName, containers)
}
conflict := 100
for {
coptions := storage.ContainerOptions{
LabelOpts: options.CommonBuildOpts.LabelOpts,
IDMappingOptions: newContainerIDMappingOptions(options.IDMappingOptions),
Volatile: true,
}
container, err = store.CreateContainer("", []string{tmpName}, imageID, "", "", &coptions)
if err == nil {
name = tmpName
break
}
if errors.Cause(err) != storage.ErrDuplicateName || options.Container != "" {
return nil, errors.Wrapf(err, "error creating container")
}
tmpName = fmt.Sprintf("%s-%d", name, rand.Int()%conflict)
conflict = conflict * 10
}
defer func() {
if err != nil {
if err2 := store.DeleteContainer(container.ID); err2 != nil {
logrus.Errorf("error deleting container %q: %v", container.ID, err2)
}
}
}()
uidmap, gidmap := convertStorageIDMaps(container.UIDMap, container.GIDMap)
defaultNamespaceOptions, err := DefaultNamespaceOptions()
if err != nil {
return nil, err
}
namespaceOptions := defaultNamespaceOptions
namespaceOptions.AddOrReplace(options.NamespaceOptions...)
builder := &Builder{
store: store,
Type: containerType,
FromImage: imageSpec,
FromImageID: imageID,
FromImageDigest: imageDigest,
Container: name,
ContainerID: container.ID,
ImageAnnotations: map[string]string{},
ImageCreatedBy: "",
ProcessLabel: container.ProcessLabel(),
MountLabel: container.MountLabel(),
DefaultMountsFilePath: options.DefaultMountsFilePath,
Isolation: options.Isolation,
NamespaceOptions: namespaceOptions,
ConfigureNetwork: options.ConfigureNetwork,
CNIPluginPath: options.CNIPluginPath,
CNIConfigDir: options.CNIConfigDir,
IDMappingOptions: define.IDMappingOptions{
HostUIDMapping: len(uidmap) == 0,
HostGIDMapping: len(uidmap) == 0,
UIDMap: uidmap,
GIDMap: gidmap,
},
Capabilities: copyStringSlice(options.Capabilities),
CommonBuildOpts: options.CommonBuildOpts,
TopLayer: topLayer,
Args: options.Args,
Format: options.Format,
TempVolumes: map[string]bool{},
Devices: options.Devices,
}
if options.Mount {
_, err = builder.Mount(container.MountLabel())
if err != nil {
return nil, errors.Wrapf(err, "error mounting build container %q", builder.ContainerID)
}
}
if err := builder.initConfig(ctx, src); err != nil {
return nil, errors.Wrapf(err, "error preparing image configuration")<|fim▁hole|> return nil, errors.Wrapf(err, "error saving builder state for container %q", builder.ContainerID)
}
return builder, nil
}<|fim▁end|> | }
err = builder.Save()
if err != nil { |
<|file_name|>css.js<|end_file_name|><|fim▁begin|>// Modules.<|fim▁hole|> config = require('../../config'),
path = require('path');
// Package libraries.
gulp.task('package:css', () => {
'use strict';
let sourcePath = path.join(config.development.paths.css, '**/*');
let destinationPath = config.package.paths.css;
return gulp
.src([sourcePath])
.pipe(gulp.dest(destinationPath));
});<|fim▁end|> | let gulp = require('gulp'), |
<|file_name|>test_ntpath.py<|end_file_name|><|fim▁begin|>import ntpath
import os
import sys
from test.test_support import TestFailed
from test import test_support, test_genericpath
import unittest
def tester0(fn, wantResult):
gotResult = eval(fn)
if wantResult != gotResult:
raise TestFailed, "%s should return: %r but returned: %r" \
%(fn, wantResult, gotResult)
def tester(fn, wantResult):
fn = fn.replace("\\", "\\\\")
tester0(fn, wantResult)
class TestNtpath(unittest.TestCase):
def test_splitext(self):
tester('ntpath.splitext("foo.ext")', ('foo', '.ext'))
tester('ntpath.splitext("/foo/foo.ext")', ('/foo/foo', '.ext'))
tester('ntpath.splitext(".ext")', ('.ext', ''))
tester('ntpath.splitext("\\foo.ext\\foo")', ('\\foo.ext\\foo', ''))
tester('ntpath.splitext("foo.ext\\")', ('foo.ext\\', ''))
tester('ntpath.splitext("")', ('', ''))
tester('ntpath.splitext("foo.bar.ext")', ('foo.bar', '.ext'))
tester('ntpath.splitext("xx/foo.bar.ext")', ('xx/foo.bar', '.ext'))
tester('ntpath.splitext("xx\\foo.bar.ext")', ('xx\\foo.bar', '.ext'))
tester('ntpath.splitext("c:a/b\\c.d")', ('c:a/b\\c', '.d'))
def test_splitdrive(self):
tester('ntpath.splitdrive("c:\\foo\\bar")',
('c:', '\\foo\\bar'))
<|fim▁hole|> tester('ntpath.splitdrive("c:/foo/bar")',
('c:', '/foo/bar'))
def test_splitunc(self):
tester('ntpath.splitunc("c:\\foo\\bar")',
('', 'c:\\foo\\bar'))
tester('ntpath.splitunc("c:/foo/bar")',
('', 'c:/foo/bar'))
tester('ntpath.splitunc("\\\\conky\\mountpoint\\foo\\bar")',
('\\\\conky\\mountpoint', '\\foo\\bar'))
tester('ntpath.splitunc("//conky/mountpoint/foo/bar")',
('//conky/mountpoint', '/foo/bar'))
tester('ntpath.splitunc("\\\\\\conky\\mountpoint\\foo\\bar")',
('', '\\\\\\conky\\mountpoint\\foo\\bar'))
tester('ntpath.splitunc("///conky/mountpoint/foo/bar")',
('', '///conky/mountpoint/foo/bar'))
tester('ntpath.splitunc("\\\\conky\\\\mountpoint\\foo\\bar")',
('', '\\\\conky\\\\mountpoint\\foo\\bar'))
tester('ntpath.splitunc("//conky//mountpoint/foo/bar")',
('', '//conky//mountpoint/foo/bar'))
self.assertEqual(ntpath.splitunc(u'//conky/MOUNTPO\u0130NT/foo/bar'),
(u'//conky/MOUNTPO\u0130NT', u'/foo/bar'))
def test_split(self):
tester('ntpath.split("c:\\foo\\bar")', ('c:\\foo', 'bar'))
tester('ntpath.split("\\\\conky\\mountpoint\\foo\\bar")',
('\\\\conky\\mountpoint\\foo', 'bar'))
tester('ntpath.split("c:\\")', ('c:\\', ''))
tester('ntpath.split("\\\\conky\\mountpoint\\")',
('\\\\conky\\mountpoint', ''))
tester('ntpath.split("c:/")', ('c:/', ''))
tester('ntpath.split("//conky/mountpoint/")', ('//conky/mountpoint', ''))
def test_isabs(self):
tester('ntpath.isabs("c:\\")', 1)
tester('ntpath.isabs("\\\\conky\\mountpoint\\")', 1)
tester('ntpath.isabs("\\foo")', 1)
tester('ntpath.isabs("\\foo\\bar")', 1)
def test_commonprefix(self):
tester('ntpath.commonprefix(["/home/swenson/spam", "/home/swen/spam"])',
"/home/swen")
tester('ntpath.commonprefix(["\\home\\swen\\spam", "\\home\\swen\\eggs"])',
"\\home\\swen\\")
tester('ntpath.commonprefix(["/home/swen/spam", "/home/swen/spam"])',
"/home/swen/spam")
def test_join(self):
tester('ntpath.join("")', '')
tester('ntpath.join("", "", "")', '')
tester('ntpath.join("a")', 'a')
tester('ntpath.join("/a")', '/a')
tester('ntpath.join("\\a")', '\\a')
tester('ntpath.join("a:")', 'a:')
tester('ntpath.join("a:", "\\b")', 'a:\\b')
tester('ntpath.join("a", "\\b")', '\\b')
tester('ntpath.join("a", "b", "c")', 'a\\b\\c')
tester('ntpath.join("a\\", "b", "c")', 'a\\b\\c')
tester('ntpath.join("a", "b\\", "c")', 'a\\b\\c')
tester('ntpath.join("a", "b", "\\c")', '\\c')
tester('ntpath.join("d:\\", "\\pleep")', 'd:\\pleep')
tester('ntpath.join("d:\\", "a", "b")', 'd:\\a\\b')
tester("ntpath.join('', 'a')", 'a')
tester("ntpath.join('', '', '', '', 'a')", 'a')
tester("ntpath.join('a', '')", 'a\\')
tester("ntpath.join('a', '', '', '', '')", 'a\\')
tester("ntpath.join('a\\', '')", 'a\\')
tester("ntpath.join('a\\', '', '', '', '')", 'a\\')
tester("ntpath.join('a/', '')", 'a/')
tester("ntpath.join('a/b', 'x/y')", 'a/b\\x/y')
tester("ntpath.join('/a/b', 'x/y')", '/a/b\\x/y')
tester("ntpath.join('/a/b/', 'x/y')", '/a/b/x/y')
tester("ntpath.join('c:', 'x/y')", 'c:x/y')
tester("ntpath.join('c:a/b', 'x/y')", 'c:a/b\\x/y')
tester("ntpath.join('c:a/b/', 'x/y')", 'c:a/b/x/y')
tester("ntpath.join('c:/', 'x/y')", 'c:/x/y')
tester("ntpath.join('c:/a/b', 'x/y')", 'c:/a/b\\x/y')
tester("ntpath.join('c:/a/b/', 'x/y')", 'c:/a/b/x/y')
#tester("ntpath.join('//computer/share', 'x/y')", '//computer/share\\x/y')
#tester("ntpath.join('//computer/share/', 'x/y')", '//computer/share/x/y')
#tester("ntpath.join('//computer/share/a/b', 'x/y')", '//computer/share/a/b\\x/y')
tester("ntpath.join('a/b', '/x/y')", '/x/y')
tester("ntpath.join('/a/b', '/x/y')", '/x/y')
tester("ntpath.join('c:', '/x/y')", 'c:/x/y')
tester("ntpath.join('c:a/b', '/x/y')", 'c:/x/y')
tester("ntpath.join('c:/', '/x/y')", 'c:/x/y')
tester("ntpath.join('c:/a/b', '/x/y')", 'c:/x/y')
#tester("ntpath.join('//computer/share', '/x/y')", '//computer/share/x/y')
#tester("ntpath.join('//computer/share/', '/x/y')", '//computer/share/x/y')
#tester("ntpath.join('//computer/share/a', '/x/y')", '//computer/share/x/y')
tester("ntpath.join('c:', 'C:x/y')", 'C:x/y')
tester("ntpath.join('c:a/b', 'C:x/y')", 'C:a/b\\x/y')
tester("ntpath.join('c:/', 'C:x/y')", 'C:/x/y')
tester("ntpath.join('c:/a/b', 'C:x/y')", 'C:/a/b\\x/y')
for x in ('', 'a/b', '/a/b', 'c:', 'c:a/b', 'c:/', 'c:/a/b'):
for y in ('d:', 'd:x/y', 'd:/', 'd:/x/y'):
tester("ntpath.join(%r, %r)" % (x, y), y)
def test_normpath(self):
tester("ntpath.normpath('A//////././//.//B')", r'A\B')
tester("ntpath.normpath('A/./B')", r'A\B')
tester("ntpath.normpath('A/foo/../B')", r'A\B')
tester("ntpath.normpath('C:A//B')", r'C:A\B')
tester("ntpath.normpath('D:A/./B')", r'D:A\B')
tester("ntpath.normpath('e:A/foo/../B')", r'e:A\B')
tester("ntpath.normpath('C:///A//B')", r'C:\A\B')
tester("ntpath.normpath('D:///A/./B')", r'D:\A\B')
tester("ntpath.normpath('e:///A/foo/../B')", r'e:\A\B')
tester("ntpath.normpath('..')", r'..')
tester("ntpath.normpath('.')", r'.')
tester("ntpath.normpath('')", r'.')
tester("ntpath.normpath('/')", '\\')
tester("ntpath.normpath('c:/')", 'c:\\')
tester("ntpath.normpath('/../.././..')", '\\')
tester("ntpath.normpath('c:/../../..')", 'c:\\')
tester("ntpath.normpath('../.././..')", r'..\..\..')
tester("ntpath.normpath('K:../.././..')", r'K:..\..\..')
tester("ntpath.normpath('C:////a/b')", r'C:\a\b')
tester("ntpath.normpath('//machine/share//a/b')", r'\\machine\share\a\b')
tester("ntpath.normpath('\\\\.\\NUL')", r'\\.\NUL')
tester("ntpath.normpath('\\\\?\\D:/XY\\Z')", r'\\?\D:/XY\Z')
def test_expandvars(self):
with test_support.EnvironmentVarGuard() as env:
env.clear()
env["foo"] = "bar"
env["{foo"] = "baz1"
env["{foo}"] = "baz2"
tester('ntpath.expandvars("foo")', "foo")
tester('ntpath.expandvars("$foo bar")', "bar bar")
tester('ntpath.expandvars("${foo}bar")', "barbar")
tester('ntpath.expandvars("$[foo]bar")', "$[foo]bar")
tester('ntpath.expandvars("$bar bar")', "$bar bar")
tester('ntpath.expandvars("$?bar")', "$?bar")
tester('ntpath.expandvars("$foo}bar")', "bar}bar")
tester('ntpath.expandvars("${foo")', "${foo")
tester('ntpath.expandvars("${{foo}}")', "baz1}")
tester('ntpath.expandvars("$foo$foo")', "barbar")
tester('ntpath.expandvars("$bar$bar")', "$bar$bar")
tester('ntpath.expandvars("%foo% bar")', "bar bar")
tester('ntpath.expandvars("%foo%bar")', "barbar")
tester('ntpath.expandvars("%foo%%foo%")', "barbar")
tester('ntpath.expandvars("%%foo%%foo%foo%")', "%foo%foobar")
tester('ntpath.expandvars("%?bar%")', "%?bar%")
tester('ntpath.expandvars("%foo%%bar")', "bar%bar")
tester('ntpath.expandvars("\'%foo%\'%bar")', "\'%foo%\'%bar")
@unittest.skipUnless(test_support.FS_NONASCII, 'need test_support.FS_NONASCII')
def test_expandvars_nonascii(self):
encoding = sys.getfilesystemencoding()
def check(value, expected):
tester0("ntpath.expandvars(%r)" % value, expected)
tester0("ntpath.expandvars(%r)" % value.decode(encoding),
expected.decode(encoding))
with test_support.EnvironmentVarGuard() as env:
env.clear()
unonascii = test_support.FS_NONASCII
snonascii = unonascii.encode(encoding)
env['spam'] = snonascii
env[snonascii] = 'ham' + snonascii
check('$spam bar', '%s bar' % snonascii)
check('$%s bar' % snonascii, '$%s bar' % snonascii)
check('${spam}bar', '%sbar' % snonascii)
check('${%s}bar' % snonascii, 'ham%sbar' % snonascii)
check('$spam}bar', '%s}bar' % snonascii)
check('$%s}bar' % snonascii, '$%s}bar' % snonascii)
check('%spam% bar', '%s bar' % snonascii)
check('%{}% bar'.format(snonascii), 'ham%s bar' % snonascii)
check('%spam%bar', '%sbar' % snonascii)
check('%{}%bar'.format(snonascii), 'ham%sbar' % snonascii)
def test_expanduser(self):
tester('ntpath.expanduser("test")', 'test')
with test_support.EnvironmentVarGuard() as env:
env.clear()
tester('ntpath.expanduser("~test")', '~test')
env['HOMEPATH'] = 'eric\\idle'
env['HOMEDRIVE'] = 'C:\\'
tester('ntpath.expanduser("~test")', 'C:\\eric\\test')
tester('ntpath.expanduser("~")', 'C:\\eric\\idle')
del env['HOMEDRIVE']
tester('ntpath.expanduser("~test")', 'eric\\test')
tester('ntpath.expanduser("~")', 'eric\\idle')
env.clear()
env['USERPROFILE'] = 'C:\\eric\\idle'
tester('ntpath.expanduser("~test")', 'C:\\eric\\test')
tester('ntpath.expanduser("~")', 'C:\\eric\\idle')
env.clear()
env['HOME'] = 'C:\\idle\\eric'
tester('ntpath.expanduser("~test")', 'C:\\idle\\test')
tester('ntpath.expanduser("~")', 'C:\\idle\\eric')
tester('ntpath.expanduser("~test\\foo\\bar")',
'C:\\idle\\test\\foo\\bar')
tester('ntpath.expanduser("~test/foo/bar")',
'C:\\idle\\test/foo/bar')
tester('ntpath.expanduser("~\\foo\\bar")',
'C:\\idle\\eric\\foo\\bar')
tester('ntpath.expanduser("~/foo/bar")',
'C:\\idle\\eric/foo/bar')
def test_abspath(self):
# ntpath.abspath() can only be used on a system with the "nt" module
# (reasonably), so we protect this test with "import nt". This allows
# the rest of the tests for the ntpath module to be run to completion
# on any platform, since most of the module is intended to be usable
# from any platform.
# XXX this needs more tests
try:
import nt
except ImportError:
# check that the function is there even if we are not on Windows
ntpath.abspath
else:
tester('ntpath.abspath("C:\\")', "C:\\")
def test_relpath(self):
currentdir = os.path.split(os.getcwd())[-1]
tester('ntpath.relpath("a")', 'a')
tester('ntpath.relpath(os.path.abspath("a"))', 'a')
tester('ntpath.relpath("a/b")', 'a\\b')
tester('ntpath.relpath("../a/b")', '..\\a\\b')
tester('ntpath.relpath("a", "../b")', '..\\'+currentdir+'\\a')
tester('ntpath.relpath("a/b", "../c")', '..\\'+currentdir+'\\a\\b')
tester('ntpath.relpath("a", "b/c")', '..\\..\\a')
tester('ntpath.relpath("//conky/mountpoint/a", "//conky/mountpoint/b/c")', '..\\..\\a')
tester('ntpath.relpath("a", "a")', '.')
tester('ntpath.relpath("/foo/bar/bat", "/x/y/z")', '..\\..\\..\\foo\\bar\\bat')
tester('ntpath.relpath("/foo/bar/bat", "/foo/bar")', 'bat')
tester('ntpath.relpath("/foo/bar/bat", "/")', 'foo\\bar\\bat')
tester('ntpath.relpath("/", "/foo/bar/bat")', '..\\..\\..')
tester('ntpath.relpath("/foo/bar/bat", "/x")', '..\\foo\\bar\\bat')
tester('ntpath.relpath("/x", "/foo/bar/bat")', '..\\..\\..\\x')
tester('ntpath.relpath("/", "/")', '.')
tester('ntpath.relpath("/a", "/a")', '.')
tester('ntpath.relpath("/a/b", "/a/b")', '.')
tester('ntpath.relpath("c:/foo", "C:/FOO")', '.')
class NtCommonTest(test_genericpath.CommonTest):
pathmodule = ntpath
attributes = ['relpath', 'splitunc']
def test_main():
test_support.run_unittest(TestNtpath, NtCommonTest)
if __name__ == "__main__":
unittest.main()<|fim▁end|> | |
<|file_name|>get_effective_class_service.py<|end_file_name|><|fim▁begin|>##############################################################################
#<|fim▁hole|># designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2021 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from ddd.logic.learning_unit.builder.effective_class_identity_builder import EffectiveClassIdentityBuilder
from ddd.logic.learning_unit.commands import GetEffectiveClassCommand
from ddd.logic.learning_unit.domain.model.effective_class import EffectiveClass
from ddd.logic.learning_unit.repository.i_effective_class import IEffectiveClassRepository
# FIXME :: à tester unitairement + renvoyer EffectiveClassFromRepositoryDTO au lieu de l'objet du domaine
def get_effective_class(
cmd: 'GetEffectiveClassCommand',
effective_class_repository: 'IEffectiveClassRepository'
) -> 'EffectiveClass':
effective_class_identity = EffectiveClassIdentityBuilder.build_from_code_and_learning_unit_identity_data(
class_code=cmd.class_code,
learning_unit_code=cmd.learning_unit_code,
learning_unit_year=cmd.learning_unit_year
)
return effective_class_repository.get(entity_id=effective_class_identity)<|fim▁end|> | # OSIS stands for Open Student Information System. It's an application |
<|file_name|>qspsound.cpp<|end_file_name|><|fim▁begin|>/* Copyright (C) 2009 AI */
/* Copyright (C) 2011-2012 Yury P. Fedorchenko (yuryfdr at users.sf.net) */
/*
* This library is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 2.1 of the License, or
* (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
#include "sys/wait.h"
#include "string.h"
#include "qspcallbacks.h"
#include "screens.h"
#include "qspthread.h"
#include "pbwildmidi.h"
#ifdef __EMU__
#define KBDOPTS KBD_SCREENTOP
#else
#define KBDOPTS 0
#endif
#ifdef HAVE_SOUND
static struct played_file {
std::string name;
bool played;
pthread_mutex_t mt;
pthread_t th;
pid_t pid;
played_file():played(false), th(-1), pid(-1) {
pthread_mutex_init(&mt, NULL);
pthread_attr_init(&attr);
pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
} pthread_attr_t attr;
} played;
#ifdef __EMU__
static const char *const timidity[] = { "timidity", "-s", "22050", NULL, NULL };
static const char *const sox[] = { "sox", "-d", "-S", NULL, NULL };
static const char *const mpg123[] = { "mpg123", "-q", "-q", NULL, NULL };
static const char *const ogg123[] = { "ogg123", "-v", "-v", NULL, NULL };
static const int narg = 3;
#else
static const char *const timidity[] = { "timidity", "-s", "11025", NULL, NULL };
static const char *const sox[] = { "sndfile-play", "", "", NULL, NULL };
static const char *const mpg123[] = { "mpg123", "-a", "/dev/sound/dsp", NULL, NULL };
static const char *const ogg123[] = { "ogg123", "-d", "/dev/sound/dsp", NULL, NULL };
static const int narg = 3;
#endif
void *playfn(void *pf)
{
if (played.name.size() != 0 && played.name.size() > 4) {
pthread_mutex_lock(&played.mt);
std::string ext = played.name.substr(played.name.size() - 4, played.name.size() - 1);
fprintf(stderr, "ext :%s:\n", ext.c_str());
std::string name =
(to_utf8(played.name.substr(GetQuestPath().size()).c_str(), koi8_to_unicode));
name = GetQuestPath() + name;
std::string runstr;
const char *const *targs;
if (ext == ".mp3") {
runstr = "mpg123";
targs = mpg123;
} else if (ext == ".mid" || ext == ".MID") {
#if 0
played.pid = -1;
played.played = true;
pthread_mutex_unlock(&played.mt);
play_midi(convertbackslash(name).c_str(), 22050, 127);
return pf;
#endif
runstr = "timidity";
targs = timidity;
} /*else if(ext==".ogg" || ext==".Ogg"){
runstr="ogg123";
//timidity[narg]=convertbackslash(name).c_str();
targs=ogg123;
//runstr+=convertbackslash(name);
} */
else {
runstr = "sndfile-play";
targs = sox;
}
for (int i = 0; i < narg + 2; ++i) {
fprintf(stderr, "arg %d :%s\n", i, targs[i]);
}
if (played.pid != -1)
kill(played.pid, SIGKILL);
pid_t pid = fork();
played.played = true;
pthread_mutex_unlock(&played.mt);
if (pid != -1) {
if (pid == 0) {
fprintf(stderr, "run %s %s %s\n", runstr.c_str(), convertbackslash(name).c_str(), targs[1]);
#ifndef __EMU__
execlp((std::string("/mnt/ext1/system/bin/") + runstr).c_str(), targs[0], targs[1]
, targs[2], convertbackslash(name).c_str(), (char *)0);
#else
execlp((std::string("/usr/bin/") + runstr).c_str(), targs[0], targs[1]
, targs[2], convertbackslash(name).c_str(), (char *)0);
#endif
printf("child %d %s exited\n", errno, strerror(errno));
} else {
played.pid = pid;
int status;
waitpid(pid, &status, 0);
played.pid = -1;
fprintf(stderr, "afwp child %s exited\n", runstr.c_str());
}
}
}
pthread_mutex_lock(&played.mt);
played.played = false;
pthread_mutex_unlock(&played.mt);
return pf;
}
/*void* playfn_aaa(void* pf){
if(played.name.empty())return pf;
if(played.name.size()<=4)
pthread_mutex_lock(&played.mt);
std::string ext=played.name.substr(played.name.size()-4,played.name.size()-1);
//fprintf(stderr,"ext :%s:\n",ext.c_str());
std::string name=played.name.substr(GetQuestPath().size());
played.played=true;
pthread_mutex_unlock(&played.mt);
if(ext==".mp3"){
}else if(ext==".mid" || ext==".MID"){
play_midi(convertbackslash(name).c_str(),22050,127);
}else if(ext==".ogg" || ext==".Ogg"){<|fim▁hole|> played.played=false;
played.name="";
}
pthread_mutex_unlock(&played.mt);
return pf;
} */
QSP_BOOL QSPCallbacks::IsPlay(const QSP_CHAR * file)
{
return (played.name == file);
}
void QSPCallbacks::CloseFile(const QSP_CHAR * file)
{
pthread_mutex_lock(&played.mt);
fprintf(stderr, "close %s\n", file);
#if 0
stop_play();
#endif
played.played = false;
if (played.pid != -1) {
fprintf(stderr, "kill %d\n", played.pid);
kill(played.pid, SIGKILL);
played.pid = -1;
}
void *ret;
fprintf(stderr, "before join %d\n", played.th);
pthread_mutex_unlock(&played.mt);
if (played.th != -1)
pthread_join(played.th, &ret);
fprintf(stderr, "after join %d\n", played.th);
played.th = -1;
}
void QSPCallbacks::PlayFile(const QSP_CHAR * file, long volume)
{
fprintf(stderr, "Play:%s :%d\n", file, played.played);
if (IsPlay(file))
return;
else
CloseFile(NULL);
fprintf(stderr, "Play:%s :%d\n", file, played.played);
pthread_mutex_lock(&played.mt);
played.name = file;
played.played = true;
pthread_mutex_unlock(&played.mt);
pthread_create(&played.th, &played.attr, playfn, NULL);
}
#else
QSP_BOOL QSPCallbacks::IsPlay(const QSP_CHAR * file)
{
return true;
}
void QSPCallbacks::CloseFile(const QSP_CHAR * file)
{
}
void QSPCallbacks::PlayFile(const QSP_CHAR * file, long volume)
{
}
#endif<|fim▁end|> | }
pthread_mutex_lock(&played.mt);
if(name==played.name.substr(GetQuestPath().size())){ |
<|file_name|>builder.go<|end_file_name|><|fim▁begin|>package cmd
import (
"fmt"
"io/ioutil"
"os"
"os/exec"
"path/filepath"
docker "github.com/fsouza/go-dockerclient"
"github.com/golang/glog"
kapi "k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/client/restclient"
"k8s.io/kubernetes/pkg/runtime"
s2iapi "github.com/openshift/source-to-image/pkg/api"
"github.com/openshift/origin/pkg/build/api"
bld "github.com/openshift/origin/pkg/build/builder"
"github.com/openshift/origin/pkg/build/builder/cmd/scmauth"
"github.com/openshift/origin/pkg/client"
dockerutil "github.com/openshift/origin/pkg/cmd/util/docker"
"github.com/openshift/origin/pkg/generate/git"
"github.com/openshift/origin/pkg/version"
)
type builder interface {
Build(dockerClient bld.DockerClient, sock string, buildsClient client.BuildInterface, build *api.Build, gitClient bld.GitClient, cgLimits *s2iapi.CGroupLimits) error
}
type builderConfig struct {
build *api.Build
sourceSecretDir string
dockerClient *docker.Client
dockerEndpoint string
buildsClient client.BuildInterface
}
func newBuilderConfigFromEnvironment() (*builderConfig, error) {
cfg := &builderConfig{}
var err error
// build (BUILD)
buildStr := os.Getenv("BUILD")
glog.V(4).Infof("$BUILD env var is %s \n", buildStr)
cfg.build = &api.Build{}
if err = runtime.DecodeInto(kapi.Codecs.UniversalDecoder(), []byte(buildStr), cfg.build); err != nil {
return nil, fmt.Errorf("unable to parse build: %v", err)
}
masterVersion := os.Getenv(api.OriginVersion)
thisVersion := version.Get().String()
if len(masterVersion) != 0 && masterVersion != thisVersion {
glog.Warningf("Master version %q does not match Builder image version %q", masterVersion, thisVersion)
} else {
glog.V(2).Infof("Master version %q, Builder version %q", masterVersion, thisVersion)
}
// sourceSecretsDir (SOURCE_SECRET_PATH)
cfg.sourceSecretDir = os.Getenv("SOURCE_SECRET_PATH")
// dockerClient and dockerEndpoint (DOCKER_HOST)
// usually not set, defaults to docker socket
cfg.dockerClient, cfg.dockerEndpoint, err = dockerutil.NewHelper().GetClient()
if err != nil {
return nil, fmt.Errorf("error obtaining docker client: %v", err)
}
// buildsClient (KUBERNETES_SERVICE_HOST, KUBERNETES_SERVICE_PORT)
clientConfig, err := restclient.InClusterConfig()
if err != nil {
return nil, fmt.Errorf("failed to get client config: %v", err)
}
osClient, err := client.New(clientConfig)
if err != nil {
return nil, fmt.Errorf("error obtaining OpenShift client: %v", err)
}
cfg.buildsClient = osClient.Builds(cfg.build.Namespace)
return cfg, nil
}
func (c *builderConfig) setupGitEnvironment() ([]string, error) {
gitSource := c.build.Spec.Source.Git
// For now, we only handle git. If not specified, we're done
if gitSource == nil {
return []string{}, nil
}
sourceSecret := c.build.Spec.Source.SourceSecret
gitEnv := []string{"GIT_ASKPASS=true"}
// If a source secret is present, set it up and add its environment variables
if sourceSecret != nil {
// TODO: this should be refactored to let each source type manage which secrets
// it accepts
sourceURL, err := git.ParseRepository(gitSource.URI)
if err != nil {
return nil, fmt.Errorf("cannot parse build URL: %s", gitSource.URI)
}
scmAuths := scmauth.GitAuths(sourceURL)
// TODO: remove when not necessary to fix up the secret dir permission
sourceSecretDir, err := fixSecretPermissions(c.sourceSecretDir)
if err != nil {
return nil, fmt.Errorf("cannot fix source secret permissions: %v", err)
}
secretsEnv, overrideURL, err := scmAuths.Setup(sourceSecretDir)
if err != nil {
return nil, fmt.Errorf("cannot setup source secret: %v", err)
}
if overrideURL != nil {
c.build.Annotations[bld.OriginalSourceURLAnnotationKey] = gitSource.URI
gitSource.URI = overrideURL.String()
}
gitEnv = append(gitEnv, secretsEnv...)
}
if gitSource.HTTPProxy != nil && len(*gitSource.HTTPProxy) > 0 {
gitEnv = append(gitEnv, fmt.Sprintf("HTTP_PROXY=%s", *gitSource.HTTPProxy))
gitEnv = append(gitEnv, fmt.Sprintf("http_proxy=%s", *gitSource.HTTPProxy))
}
if gitSource.HTTPSProxy != nil && len(*gitSource.HTTPSProxy) > 0 {
gitEnv = append(gitEnv, fmt.Sprintf("HTTPS_PROXY=%s", *gitSource.HTTPSProxy))
gitEnv = append(gitEnv, fmt.Sprintf("https_proxy=%s", *gitSource.HTTPSProxy))
}
return bld.MergeEnv(os.Environ(), gitEnv), nil
}
// execute is responsible for running a build
func (c *builderConfig) execute(b builder) error {
gitEnv, err := c.setupGitEnvironment()
if err != nil {
return err
}
gitClient := git.NewRepositoryWithEnv(gitEnv)
cgLimits, err := bld.GetCGroupLimits()
if err != nil {
return fmt.Errorf("failed to retrieve cgroup limits: %v", err)
}
glog.V(2).Infof("Running build with cgroup limits: %#v", *cgLimits)
if err := b.Build(c.dockerClient, c.dockerEndpoint, c.buildsClient, c.build, gitClient, cgLimits); err != nil {
return fmt.Errorf("build error: %v", err)
}
if c.build.Spec.Output.To == nil || len(c.build.Spec.Output.To.Name) == 0 {
glog.Warning("Build does not have an Output defined, no output image was pushed to a registry.")
}
<|fim▁hole|>
// fixSecretPermissions loweres access permissions to very low acceptable level
// TODO: this method should be removed as soon as secrets permissions are fixed upstream
// Kubernetes issue: https://github.com/kubernetes/kubernetes/issues/4789
func fixSecretPermissions(secretsDir string) (string, error) {
secretTmpDir, err := ioutil.TempDir("", "tmpsecret")
if err != nil {
return "", err
}
cmd := exec.Command("cp", "-R", ".", secretTmpDir)
cmd.Dir = secretsDir
if err := cmd.Run(); err != nil {
return "", err
}
secretFiles, err := ioutil.ReadDir(secretTmpDir)
if err != nil {
return "", err
}
for _, file := range secretFiles {
if err := os.Chmod(filepath.Join(secretTmpDir, file.Name()), 0600); err != nil {
return "", err
}
}
return secretTmpDir, nil
}
type dockerBuilder struct{}
// Build starts a Docker build.
func (dockerBuilder) Build(dockerClient bld.DockerClient, sock string, buildsClient client.BuildInterface, build *api.Build, gitClient bld.GitClient, cgLimits *s2iapi.CGroupLimits) error {
return bld.NewDockerBuilder(dockerClient, buildsClient, build, gitClient, cgLimits).Build()
}
type s2iBuilder struct{}
// Build starts an S2I build.
func (s2iBuilder) Build(dockerClient bld.DockerClient, sock string, buildsClient client.BuildInterface, build *api.Build, gitClient bld.GitClient, cgLimits *s2iapi.CGroupLimits) error {
return bld.NewS2IBuilder(dockerClient, sock, buildsClient, build, gitClient, cgLimits).Build()
}
func runBuild(builder builder) {
cfg, err := newBuilderConfigFromEnvironment()
if err != nil {
glog.Fatalf("Cannot setup builder configuration: %v", err)
}
err = cfg.execute(builder)
if err != nil {
glog.Fatalf("Error: %v", err)
}
}
// RunDockerBuild creates a docker builder and runs its build
func RunDockerBuild() {
runBuild(dockerBuilder{})
}
// RunSTIBuild creates a STI builder and runs its build
func RunSTIBuild() {
runBuild(s2iBuilder{})
}<|fim▁end|> | return nil
} |
<|file_name|>show_status.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright 2019 IBM Corp.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# python ./show_status.py /home/rhel72/config-test.yml DEBUG
import os
import sys
import subprocess
import readline
from lib.inventory import Inventory
from lib.logger import Logger
from lib.ssh import SSH_CONNECTION, SSH_Exception
from lib import genesis
GEN_PATH = genesis.GEN_PATH
GEN_CONTAINER_NAME = genesis.container_name
GEN_CONTAINER_RUNNING = genesis.container_running()
GEN_CONTAINER_ADDR = genesis.container_addr()
GEN_CONTAINER_SSH_KEY_PRIVATE = genesis.get_ssh_private_key_file()
HOME_DIR = os.path.expanduser('~')
FILE_PATH = os.path.dirname(os.path.abspath(__file__))
def rlinput(prompt, prefill=''):
readline.set_startup_hook(lambda: readline.insert_text(prefill))
try:
return input(prompt)
finally:
readline.set_startup_hook()
def main(log, inv_file):
inv = Inventory(log, inv_file)
print('\nBridge Status: \n')
vlan_mgmt = inv.get_vlan_mgmt_network()
bridge_vlan_mgmt = 'br' + str(vlan_mgmt)
vlan_mgmt_client = inv.get_vlan_mgmt_client_network()<|fim▁hole|> output = subprocess.check_output(['bash', '-c', 'brctl show']
).decode("utf-8")
if bridge_vlan_mgmt not in output:
print(' Management bridge {} not found\n'.format(bridge_vlan_mgmt))
else:
print(subprocess.check_output(
['bash', '-c', 'brctl show ' + bridge_vlan_mgmt]))
if bridge_vlan_mgmt_client not in output:
print(' Client bridge {} not found\n'.format(bridge_vlan_mgmt_client))
else:
print(subprocess.check_output(
['bash', '-c', 'brctl show ' + bridge_vlan_mgmt_client]))
print('Container Status: \n')
output = subprocess.check_output(['bash', '-c', 'sudo lxc-ls -f']
).decode("utf-8")
if GEN_CONTAINER_NAME + ' ' in output:
print(output)
else:
print(' ' + GEN_CONTAINER_NAME + ' container does not exist\n')
if GEN_CONTAINER_RUNNING:
ssh_cont = None
ssh_log_filename = GEN_PATH + '/gen_ssh.log'
if os.path.isfile(GEN_CONTAINER_SSH_KEY_PRIVATE):
try:
ssh_cont = SSH_CONNECTION(
GEN_CONTAINER_ADDR,
log=log,
ssh_log=ssh_log_filename,
username='deployer',
look_for_keys=False,
key_filename=GEN_CONTAINER_SSH_KEY_PRIVATE)
except SSH_Exception as exc:
print('Failed to SSH to container {} using private key {}'
.format(GEN_CONTAINER_NAME, GEN_CONTAINER_SSH_KEY_PRIVATE))
print(exc)
if not ssh_cont:
PASSWORD = 'ubuntu'
print('Trying password "{}"'.format(PASSWORD))
while PASSWORD[-1:] != '.':
try:
ssh_cont = SSH_CONNECTION(
GEN_CONTAINER_ADDR,
log=log,
ssh_log=ssh_log_filename,
username='deployer',
password=PASSWORD,
look_for_keys=False)
break
except SSH_Exception as exc:
print('Failed to SSH to container {} using password {}'
.format(GEN_CONTAINER_NAME, PASSWORD))
print(exc)
PASSWORD = rlinput("Enter a password for container (last char = '.' to terminate): ", PASSWORD)
else:
sys.exit(1)
print()
_, cobbler_running, _ = ssh_cont.send_cmd(
'ps aux|grep cobbler')
if 'root' in cobbler_running:
print('cobbler is running')
_, cobbler_status, _ = ssh_cont.send_cmd(
'sudo cobbler status')
print(cobbler_status)
else:
print('cobbler is not running')
_, dnsmasq_running, _ = ssh_cont.send_cmd(
'ps aux|grep dnsmasq')
if 'root' in dnsmasq_running:
print('dnsmasq is running')
_, dnsmasq_status, _ = ssh_cont.send_cmd(
'cat /var/lib/misc/dnsmasq.leases')
print(dnsmasq_status)
else:
print('dnsmasq is not running')
ssh_cont.close()
else:
print('Container not running {}'.format(GEN_CONTAINER_RUNNING))
def print_lines(str, line_list):
"""Splits str at newline (\n) characters, then prints the lines which
contain elements from line_list. If line_list=[*] then all lines are
printed."""
str = str.splitlines()
index = 0
for _ in range(len(str)):
for substr in line_list:
if substr in str[index] or substr == '*':
print(str[index])
index += 1
def get_int_input(prompt_str, minn, maxx):
while 1:
try:
_input = int(input(prompt_str))
if not (minn <= _input <= maxx):
raise ValueError()
else:
break
except ValueError:
print("enter an integer between " +
str(minn) + ' and ' + str(maxx))
return _input
if __name__ == '__main__':
"""Show status of the POWER-Up environment
Args:
INV_FILE (string): Inventory file.
LOG_LEVEL (string): Log level.
Raises:
Exception: If parameter count is invalid.
"""
LOG = Logger(__file__)
ARGV_MAX = 3
ARGV_COUNT = len(sys.argv)
if ARGV_COUNT > ARGV_MAX:
try:
raise Exception()
except:
LOG.error('Invalid argument count')
sys.exit(1)
INV_FILE = sys.argv[1]
LOG.set_level(sys.argv[2])
main(LOG, INV_FILE)<|fim▁end|> | bridge_vlan_mgmt_client = 'br' + str(vlan_mgmt_client)
|
<|file_name|>AppCompatPreferenceActivity.java<|end_file_name|><|fim▁begin|>/**
Copyright:
2015/2016 Benjamin Aigner
developer.google.com
This file is part of AustrianPublicStream.
AustrianPublicStream is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
AustrianPublicStream is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with AustrianPublicStream. If not, see <http://www.gnu.org/licenses/>.
**/
package systems.byteswap.publicstream;
import android.content.res.Configuration;
import android.os.Bundle;
import android.preference.PreferenceActivity;
import android.support.annotation.LayoutRes;
import android.support.annotation.NonNull;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AppCompatDelegate;
import android.view.MenuInflater;
import android.view.View;
import android.view.ViewGroup;
/**
* A {@link android.preference.PreferenceActivity} which implements and proxies the necessary calls
* to be used with AppCompat.
*/
public abstract class AppCompatPreferenceActivity extends PreferenceActivity {
private AppCompatDelegate mDelegate;
@Override
protected void onCreate(Bundle savedInstanceState) {
getDelegate().installViewFactory();
getDelegate().onCreate(savedInstanceState);
super.onCreate(savedInstanceState);
}
@Override
protected void onPostCreate(Bundle savedInstanceState) {
super.onPostCreate(savedInstanceState);
getDelegate().onPostCreate(savedInstanceState);
}
public ActionBar getSupportActionBar() {
return getDelegate().getSupportActionBar();
}
@NonNull
@Override
public MenuInflater getMenuInflater() {
return getDelegate().getMenuInflater();
}
<|fim▁hole|>
@Override
public void setContentView(View view) {
getDelegate().setContentView(view);
}
@Override
public void setContentView(View view, ViewGroup.LayoutParams params) {
getDelegate().setContentView(view, params);
}
@Override
public void addContentView(View view, ViewGroup.LayoutParams params) {
getDelegate().addContentView(view, params);
}
@Override
protected void onPostResume() {
super.onPostResume();
getDelegate().onPostResume();
}
@Override
protected void onTitleChanged(CharSequence title, int color) {
super.onTitleChanged(title, color);
getDelegate().setTitle(title);
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
getDelegate().onConfigurationChanged(newConfig);
}
@Override
protected void onStop() {
super.onStop();
getDelegate().onStop();
}
@Override
protected void onDestroy() {
super.onDestroy();
getDelegate().onDestroy();
}
public void invalidateOptionsMenu() {
getDelegate().invalidateOptionsMenu();
}
private AppCompatDelegate getDelegate() {
if (mDelegate == null) {
mDelegate = AppCompatDelegate.create(this, null);
}
return mDelegate;
}
}<|fim▁end|> | @Override
public void setContentView(@LayoutRes int layoutResID) {
getDelegate().setContentView(layoutResID);
} |
<|file_name|>AbstractDozerB2IBeanConverter.java<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2012 GFT Appverse, S.L., Sociedad Unipersonal.
This Source Code Form is subject to the terms of the Appverse Public License
Version 2.0 (“APL v2.0”). If a copy of the APL was not distributed with this
file, You can obtain one at http://www.appverse.mobi/licenses/apl_v2.0.pdf. [^]
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the conditions of the AppVerse Public License v2.0
are met.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. EXCEPT IN CASE OF WILLFUL MISCONDUCT OR GROSS NEGLIGENCE, IN NO EVENT
SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT(INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
package org.appverse.web.framework.backend.core.enterprise.converters;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.Resource;
import org.appverse.web.framework.backend.core.beans.AbstractBusinessBean;
import org.appverse.web.framework.backend.core.beans.AbstractIntegrationBean;
import org.dozer.Mapper;
import org.dozer.spring.DozerBeanMapperFactoryBean;
public abstract class AbstractDozerB2IBeanConverter<BusinessBean extends AbstractBusinessBean, IntegrationBean extends AbstractIntegrationBean>
implements IB2IBeanConverter<BusinessBean, IntegrationBean> {
private Class<IntegrationBean> integrationBeanClass;
private Class<BusinessBean> businessBeanClass;
private String SCOPE_WITHOUT_DEPENDENCIES = "default-scope-without-dependencies";
private String SCOPE_COMPLETE = "default-scope-complete";
private String SCOPE_CUSTOM = "default-scope-custom";
@Resource
protected DozerBeanMapperFactoryBean dozerBeanMapperFactoryBean;
public AbstractDozerB2IBeanConverter() {
}
@Override
public IntegrationBean convert(BusinessBean bean) throws Exception {
return convert(bean, ConversionType.Complete);
}
@Override
public IntegrationBean convert(BusinessBean businessBean,
ConversionType conversionType) throws Exception {
return convert(businessBean, getScope(conversionType));
}
@Override
public IntegrationBean convert(BusinessBean businessBean,
String scope) throws Exception {
return ((Mapper) dozerBeanMapperFactoryBean.getObject()).map(
businessBean, integrationBeanClass, scope);
}
@Override
public void convert(final BusinessBean businessBean,
IntegrationBean integrationBean) throws Exception {
convert(businessBean, integrationBean, ConversionType.Complete);
}
@Override
public void convert(final BusinessBean businessBean,
IntegrationBean integrationBean, ConversionType conversionType)
throws Exception {
convert(businessBean, integrationBean, getScope(conversionType));
}
@Override
public void convert(final BusinessBean businessBean,
IntegrationBean integrationBean, String scope)
throws Exception{
((Mapper) dozerBeanMapperFactoryBean.getObject()).map(businessBean,
integrationBean, scope);
}
@Override
public BusinessBean convert(IntegrationBean bean) throws Exception {
return convert(bean, ConversionType.Complete);
}
@Override
public void convert(final IntegrationBean integrationBean,
BusinessBean businessBean) throws Exception {
convert(integrationBean, businessBean, ConversionType.Complete);
}
@Override
public void convert(final IntegrationBean integrationBean,
BusinessBean businessBean, ConversionType conversionType)
throws Exception {
convert(integrationBean, businessBean, getScope(conversionType));
}
@Override
public void convert(final IntegrationBean integrationBean,
BusinessBean businessBean, String scope)
throws Exception {
((Mapper) dozerBeanMapperFactoryBean.getObject()).map(integrationBean,
businessBean, scope);
}
@Override
public BusinessBean convert(IntegrationBean integrationBean,
ConversionType conversionType) throws Exception {
return convert(integrationBean, getScope(conversionType));
}
@Override
public BusinessBean convert(IntegrationBean integrationBean,
String scope) throws Exception {
return ((Mapper) dozerBeanMapperFactoryBean.getObject()).map(
integrationBean, businessBeanClass, scope);
}
@Override
public List<IntegrationBean> convertBusinessList(
List<BusinessBean> businessBeans) throws Exception {
List<IntegrationBean> integrationBeans = new ArrayList<IntegrationBean>();
for (BusinessBean businessBean : businessBeans) {
IntegrationBean integrationBean = convert(businessBean,
ConversionType.Complete);
integrationBeans.add(integrationBean);
}
return integrationBeans;
}
@Override
public List<IntegrationBean> convertBusinessList(
List<BusinessBean> businessBeans, ConversionType conversionType)
throws Exception {
return convertBusinessList(businessBeans, getScope(conversionType));
}
@Override
public List<IntegrationBean> convertBusinessList(
List<BusinessBean> businessBeans, String scope)
throws Exception {
List<IntegrationBean> integrationBeans = new ArrayList<IntegrationBean>();
for (BusinessBean businessBean : businessBeans) {
IntegrationBean integrationBean = ((Mapper) dozerBeanMapperFactoryBean
.getObject()).map(businessBean, integrationBeanClass,
scope);
integrationBeans.add(integrationBean);
}
return integrationBeans;
}
@Override
public void convertBusinessList(final List<BusinessBean> businessBeans,
List<IntegrationBean> integrationBeans) throws Exception {
if (businessBeans.size() != integrationBeans.size()) {
throw new ListDiffersSizeException();
}
for (int i = 0; i < businessBeans.size(); i++) {
convert(businessBeans.get(i), integrationBeans.get(i),
ConversionType.Complete);
}
}
@Override
public void convertBusinessList(final List<BusinessBean> businessBeans,
List<IntegrationBean> integrationBeans,
ConversionType conversionType) throws Exception {
convertBusinessList(businessBeans, integrationBeans, getScope(conversionType));
}
@Override
public void convertBusinessList(final List<BusinessBean> businessBeans,
List<IntegrationBean> integrationBeans,
String scope) throws Exception {
if (businessBeans.size() != integrationBeans.size()) {
throw new ListDiffersSizeException();
}
for (int i = 0; i < businessBeans.size(); i++) {
((Mapper) dozerBeanMapperFactoryBean.getObject()).map(
businessBeans.get(i), integrationBeans.get(i),
scope);
}
}
@Override
public List<BusinessBean> convertIntegrationList(
List<IntegrationBean> integrationBeans) throws Exception {
List<BusinessBean> businessBeans = new ArrayList<BusinessBean>();
for (IntegrationBean integrationBean : integrationBeans) {
BusinessBean businessBean = convert(integrationBean,
ConversionType.Complete);
businessBeans.add(businessBean);
}
return businessBeans;
}
@Override
public List<BusinessBean> convertIntegrationList(
List<IntegrationBean> integrationBeans,
ConversionType conversionType) throws Exception {
return convertIntegrationList(integrationBeans, getScope(conversionType));
}
@Override
public List<BusinessBean> convertIntegrationList(
List<IntegrationBean> integrationBeans,
String scope) throws Exception {
List<BusinessBean> businessBeans = new ArrayList<BusinessBean>();
for (IntegrationBean integrationBean : integrationBeans) {
BusinessBean businessBean = ((Mapper) dozerBeanMapperFactoryBean
.getObject()).map(integrationBean, businessBeanClass,
scope);
businessBeans.add(businessBean);
}
return businessBeans;
}
@Override
public void convertIntegrationList(<|fim▁hole|> }
for (int i = 0; i < integrationBeans.size(); i++) {
convert(integrationBeans.get(i), businessBeans.get(i),
ConversionType.Complete);
}
}
@Override
public void convertIntegrationList(
final List<IntegrationBean> integrationBeans,
List<BusinessBean> businessBeans, ConversionType conversionType)
throws Exception {
convertIntegrationList(integrationBeans, businessBeans, getScope(conversionType));
}
@Override
public void convertIntegrationList(
final List<IntegrationBean> integrationBeans,
List<BusinessBean> businessBeans, String scope)
throws Exception {
if (integrationBeans.size() != businessBeans.size()) {
throw new ListDiffersSizeException();
}
for (int i = 0; i < integrationBeans.size(); i++) {
((Mapper) dozerBeanMapperFactoryBean.getObject()).map(
integrationBeans.get(i), businessBeans.get(i),
scope);
}
}
@Override
public String getScope(ConversionType conversionType) {
String scope = null;
if (conversionType == ConversionType.WithoutDependencies) {
scope = SCOPE_WITHOUT_DEPENDENCIES;
} else if (conversionType == ConversionType.Custom) {
scope = SCOPE_CUSTOM;
} else {
scope = SCOPE_COMPLETE;
}
return scope;
}
public void setBeanClasses(Class<BusinessBean> businessBeanClass,
Class<IntegrationBean> integrationBeanClass) {
this.integrationBeanClass = integrationBeanClass;
this.businessBeanClass = businessBeanClass;
}
@Override
public void setScopes(String... scopes) {
if (scopes.length > 0) {
this.SCOPE_COMPLETE = scopes[0];
}
if (scopes.length > 1) {
this.SCOPE_WITHOUT_DEPENDENCIES = scopes[1];
}
if (scopes.length > 2) {
this.SCOPE_CUSTOM = scopes[2];
}
}
}<|fim▁end|> | final List<IntegrationBean> integrationBeans,
List<BusinessBean> businessBeans) throws Exception {
if (integrationBeans.size() != businessBeans.size()) {
throw new ListDiffersSizeException(); |
<|file_name|>setting.go<|end_file_name|><|fim▁begin|>// Copyright 2014 The Gogs Authors. All rights reserved.
// Copyright 2018 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package repo
import (
"errors"
"fmt"
"io/ioutil"
"net/url"
"regexp"
"strings"
"time"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/auth"
"code.gitea.io/gitea/modules/base"
"code.gitea.io/gitea/modules/context"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/structs"
"code.gitea.io/gitea/modules/timeutil"
"code.gitea.io/gitea/modules/validation"
"code.gitea.io/gitea/routers/utils"
"code.gitea.io/gitea/services/mailer"
mirror_service "code.gitea.io/gitea/services/mirror"
repo_service "code.gitea.io/gitea/services/repository"
"github.com/unknwon/com"
"mvdan.cc/xurls/v2"
)
const (
tplSettingsOptions base.TplName = "repo/settings/options"
tplCollaboration base.TplName = "repo/settings/collaboration"
tplBranches base.TplName = "repo/settings/branches"
tplGithooks base.TplName = "repo/settings/githooks"
tplGithookEdit base.TplName = "repo/settings/githook_edit"
tplDeployKeys base.TplName = "repo/settings/deploy_keys"
tplProtectedBranch base.TplName = "repo/settings/protected_branch"
)
var validFormAddress *regexp.Regexp
// Settings show a repository's settings page
func Settings(ctx *context.Context) {
ctx.Data["Title"] = ctx.Tr("repo.settings")
ctx.Data["PageIsSettingsOptions"] = true
ctx.Data["ForcePrivate"] = setting.Repository.ForcePrivate
ctx.HTML(200, tplSettingsOptions)
}
// SettingsPost response for changes of a repository
func SettingsPost(ctx *context.Context, form auth.RepoSettingForm) {
ctx.Data["Title"] = ctx.Tr("repo.settings")
ctx.Data["PageIsSettingsOptions"] = true
repo := ctx.Repo.Repository
switch ctx.Query("action") {
case "update":
if ctx.HasError() {
ctx.HTML(200, tplSettingsOptions)
return
}
newRepoName := form.RepoName
// Check if repository name has been changed.
if repo.LowerName != strings.ToLower(newRepoName) {
// Close the GitRepo if open
if ctx.Repo.GitRepo != nil {
ctx.Repo.GitRepo.Close()
ctx.Repo.GitRepo = nil
}
if err := repo_service.ChangeRepositoryName(ctx.Repo.Owner, repo, newRepoName); err != nil {
ctx.Data["Err_RepoName"] = true
switch {
case models.IsErrRepoAlreadyExist(err):
ctx.RenderWithErr(ctx.Tr("form.repo_name_been_taken"), tplSettingsOptions, &form)
case models.IsErrNameReserved(err):
ctx.RenderWithErr(ctx.Tr("repo.form.name_reserved", err.(models.ErrNameReserved).Name), tplSettingsOptions, &form)
case models.IsErrNamePatternNotAllowed(err):
ctx.RenderWithErr(ctx.Tr("repo.form.name_pattern_not_allowed", err.(models.ErrNamePatternNotAllowed).Pattern), tplSettingsOptions, &form)
default:
ctx.ServerError("ChangeRepositoryName", err)
}
return
}
log.Trace("Repository name changed: %s/%s -> %s", ctx.Repo.Owner.Name, repo.Name, newRepoName)
}
// In case it's just a case change.
repo.Name = newRepoName
repo.LowerName = strings.ToLower(newRepoName)
repo.Description = form.Description
repo.Website = form.Website
repo.IsTemplate = form.Template
// Visibility of forked repository is forced sync with base repository.
if repo.IsFork {
form.Private = repo.BaseRepo.IsPrivate
}
visibilityChanged := repo.IsPrivate != form.Private
// when ForcePrivate enabled, you could change public repo to private, but only admin users can change private to public
if visibilityChanged && setting.Repository.ForcePrivate && !form.Private && !ctx.User.IsAdmin {
ctx.ServerError("Force Private enabled", errors.New("cannot change private repository to public"))
return
}
repo.IsPrivate = form.Private
if err := models.UpdateRepository(repo, visibilityChanged); err != nil {
ctx.ServerError("UpdateRepository", err)
return
}
log.Trace("Repository basic settings updated: %s/%s", ctx.Repo.Owner.Name, repo.Name)
ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
ctx.Redirect(repo.Link() + "/settings")
case "mirror":
if !repo.IsMirror {
ctx.NotFound("", nil)
return
}
// This section doesn't require repo_name/RepoName to be set in the form, don't show it
// as an error on the UI for this action
ctx.Data["Err_RepoName"] = nil
interval, err := time.ParseDuration(form.Interval)
if err != nil || (interval != 0 && interval < setting.Mirror.MinInterval) {
ctx.Data["Err_Interval"] = true
ctx.RenderWithErr(ctx.Tr("repo.mirror_interval_invalid"), tplSettingsOptions, &form)
} else {
ctx.Repo.Mirror.EnablePrune = form.EnablePrune
ctx.Repo.Mirror.Interval = interval
if interval != 0 {
ctx.Repo.Mirror.NextUpdateUnix = timeutil.TimeStampNow().AddDuration(interval)
} else {
ctx.Repo.Mirror.NextUpdateUnix = 0
}
if err := models.UpdateMirror(ctx.Repo.Mirror); err != nil {
ctx.Data["Err_Interval"] = true
ctx.RenderWithErr(ctx.Tr("repo.mirror_interval_invalid"), tplSettingsOptions, &form)
return
}
}
// Validate the form.MirrorAddress
u, err := url.Parse(form.MirrorAddress)
if err != nil {
ctx.Data["Err_MirrorAddress"] = true
ctx.RenderWithErr(ctx.Tr("repo.mirror_address_url_invalid"), tplSettingsOptions, &form)
return
}
if u.Opaque != "" || !(u.Scheme == "http" || u.Scheme == "https" || u.Scheme == "git") {
ctx.Data["Err_MirrorAddress"] = true
ctx.RenderWithErr(ctx.Tr("repo.mirror_address_protocol_invalid"), tplSettingsOptions, &form)
return
}
if form.MirrorUsername != "" || form.MirrorPassword != "" {
u.User = url.UserPassword(form.MirrorUsername, form.MirrorPassword)
}
// Now use xurls
address := validFormAddress.FindString(form.MirrorAddress)
if address != form.MirrorAddress && form.MirrorAddress != "" {
ctx.Data["Err_MirrorAddress"] = true
ctx.RenderWithErr(ctx.Tr("repo.mirror_address_url_invalid"), tplSettingsOptions, &form)
return
}
if u.EscapedPath() == "" || u.Host == "" || !u.IsAbs() {
ctx.Data["Err_MirrorAddress"] = true
ctx.RenderWithErr(ctx.Tr("repo.mirror_address_url_invalid"), tplSettingsOptions, &form)
return
}
address = u.String()
if err := mirror_service.SaveAddress(ctx.Repo.Mirror, address); err != nil {
ctx.ServerError("SaveAddress", err)
return
}
ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
ctx.Redirect(repo.Link() + "/settings")
case "mirror-sync":
if !repo.IsMirror {
ctx.NotFound("", nil)
return
}
mirror_service.StartToMirror(repo.ID)
ctx.Flash.Info(ctx.Tr("repo.settings.mirror_sync_in_progress"))
ctx.Redirect(repo.Link() + "/settings")
case "advanced":
var units []models.RepoUnit
var deleteUnitTypes []models.UnitType
// This section doesn't require repo_name/RepoName to be set in the form, don't show it
// as an error on the UI for this action
ctx.Data["Err_RepoName"] = nil
if form.EnableWiki && form.EnableExternalWiki && !models.UnitTypeExternalWiki.UnitGlobalDisabled() {
if !validation.IsValidExternalURL(form.ExternalWikiURL) {
ctx.Flash.Error(ctx.Tr("repo.settings.external_wiki_url_error"))
ctx.Redirect(repo.Link() + "/settings")
return
}
units = append(units, models.RepoUnit{
RepoID: repo.ID,
Type: models.UnitTypeExternalWiki,
Config: &models.ExternalWikiConfig{
ExternalWikiURL: form.ExternalWikiURL,
},
})
deleteUnitTypes = append(deleteUnitTypes, models.UnitTypeWiki)
} else if form.EnableWiki && !form.EnableExternalWiki && !models.UnitTypeWiki.UnitGlobalDisabled() {
units = append(units, models.RepoUnit{
RepoID: repo.ID,
Type: models.UnitTypeWiki,
Config: new(models.UnitConfig),
})
deleteUnitTypes = append(deleteUnitTypes, models.UnitTypeExternalWiki)
} else {
if !models.UnitTypeExternalWiki.UnitGlobalDisabled() {
deleteUnitTypes = append(deleteUnitTypes, models.UnitTypeExternalWiki)
}
if !models.UnitTypeWiki.UnitGlobalDisabled() {
deleteUnitTypes = append(deleteUnitTypes, models.UnitTypeWiki)
}
}
if form.EnableIssues && form.EnableExternalTracker && !models.UnitTypeExternalTracker.UnitGlobalDisabled() {
if !validation.IsValidExternalURL(form.ExternalTrackerURL) {
ctx.Flash.Error(ctx.Tr("repo.settings.external_tracker_url_error"))
ctx.Redirect(repo.Link() + "/settings")
return
}
if len(form.TrackerURLFormat) != 0 && !validation.IsValidExternalTrackerURLFormat(form.TrackerURLFormat) {
ctx.Flash.Error(ctx.Tr("repo.settings.tracker_url_format_error"))
ctx.Redirect(repo.Link() + "/settings")
return
}
units = append(units, models.RepoUnit{
RepoID: repo.ID,
Type: models.UnitTypeExternalTracker,
Config: &models.ExternalTrackerConfig{
ExternalTrackerURL: form.ExternalTrackerURL,
ExternalTrackerFormat: form.TrackerURLFormat,
ExternalTrackerStyle: form.TrackerIssueStyle,
},
})
deleteUnitTypes = append(deleteUnitTypes, models.UnitTypeIssues)
} else if form.EnableIssues && !form.EnableExternalTracker && !models.UnitTypeIssues.UnitGlobalDisabled() {
units = append(units, models.RepoUnit{
RepoID: repo.ID,
Type: models.UnitTypeIssues,
Config: &models.IssuesConfig{
EnableTimetracker: form.EnableTimetracker,
AllowOnlyContributorsToTrackTime: form.AllowOnlyContributorsToTrackTime,
EnableDependencies: form.EnableIssueDependencies,
},
})
deleteUnitTypes = append(deleteUnitTypes, models.UnitTypeExternalTracker)
} else {
if !models.UnitTypeExternalTracker.UnitGlobalDisabled() {
deleteUnitTypes = append(deleteUnitTypes, models.UnitTypeExternalTracker)
}
if !models.UnitTypeIssues.UnitGlobalDisabled() {
deleteUnitTypes = append(deleteUnitTypes, models.UnitTypeIssues)
}
}
if form.EnablePulls && !models.UnitTypePullRequests.UnitGlobalDisabled() {
units = append(units, models.RepoUnit{
RepoID: repo.ID,
Type: models.UnitTypePullRequests,
Config: &models.PullRequestsConfig{
IgnoreWhitespaceConflicts: form.PullsIgnoreWhitespace,
AllowMerge: form.PullsAllowMerge,
AllowRebase: form.PullsAllowRebase,
AllowRebaseMerge: form.PullsAllowRebaseMerge,
AllowSquash: form.PullsAllowSquash,
},
})
} else if !models.UnitTypePullRequests.UnitGlobalDisabled() {
deleteUnitTypes = append(deleteUnitTypes, models.UnitTypePullRequests)
}
if err := models.UpdateRepositoryUnits(repo, units, deleteUnitTypes); err != nil {
ctx.ServerError("UpdateRepositoryUnits", err)
return
}
log.Trace("Repository advanced settings updated: %s/%s", ctx.Repo.Owner.Name, repo.Name)
ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
ctx.Redirect(ctx.Repo.RepoLink + "/settings")
case "admin":
if !ctx.User.IsAdmin {
ctx.Error(403)
return
}
if repo.IsFsckEnabled != form.EnableHealthCheck {
repo.IsFsckEnabled = form.EnableHealthCheck
}
if repo.CloseIssuesViaCommitInAnyBranch != form.EnableCloseIssuesViaCommitInAnyBranch {
repo.CloseIssuesViaCommitInAnyBranch = form.EnableCloseIssuesViaCommitInAnyBranch
}
if err := models.UpdateRepository(repo, false); err != nil {
ctx.ServerError("UpdateRepository", err)
return
}
log.Trace("Repository admin settings updated: %s/%s", ctx.Repo.Owner.Name, repo.Name)
ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
ctx.Redirect(ctx.Repo.RepoLink + "/settings")
case "convert":
if !ctx.Repo.IsOwner() {
ctx.Error(404)
return
}
if repo.Name != form.RepoName {
ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_repo_name"), tplSettingsOptions, nil)
return
}
if !repo.IsMirror {
ctx.Error(404)
return
}
repo.IsMirror = false
if _, err := repository.CleanUpMigrateInfo(repo); err != nil {
ctx.ServerError("CleanUpMigrateInfo", err)
return
} else if err = models.DeleteMirrorByRepoID(ctx.Repo.Repository.ID); err != nil {
ctx.ServerError("DeleteMirrorByRepoID", err)
return
}
log.Trace("Repository converted from mirror to regular: %s/%s", ctx.Repo.Owner.Name, repo.Name)
ctx.Flash.Success(ctx.Tr("repo.settings.convert_succeed"))
ctx.Redirect(setting.AppSubURL + "/" + ctx.Repo.Owner.Name + "/" + repo.Name)
case "transfer":
if !ctx.Repo.IsOwner() {
ctx.Error(404)
return
}
if repo.Name != form.RepoName {
ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_repo_name"), tplSettingsOptions, nil)
return
}
newOwner, err := models.GetUserByName(ctx.Query("new_owner_name"))
if err != nil {
if models.IsErrUserNotExist(err) {
ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_owner_name"), tplSettingsOptions, nil)
return
}
ctx.ServerError("IsUserExist", err)
return
}
if newOwner.Type == models.UserTypeOrganization {
if !ctx.User.IsAdmin && newOwner.Visibility == structs.VisibleTypePrivate && !ctx.User.IsUserPartOfOrg(newOwner.ID) {
// The user shouldn't know about this organization
ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_owner_name"), tplSettingsOptions, nil)
return
}
}
// Close the GitRepo if open
if ctx.Repo.GitRepo != nil {
ctx.Repo.GitRepo.Close()
ctx.Repo.GitRepo = nil
}
if err = repo_service.TransferOwnership(ctx.User, newOwner, repo, nil); err != nil {
if models.IsErrRepoAlreadyExist(err) {
ctx.RenderWithErr(ctx.Tr("repo.settings.new_owner_has_same_repo"), tplSettingsOptions, nil)
} else {
ctx.ServerError("TransferOwnership", err)
}
return
}
log.Trace("Repository transferred: %s/%s -> %s", ctx.Repo.Owner.Name, repo.Name, newOwner)
ctx.Flash.Success(ctx.Tr("repo.settings.transfer_succeed"))
ctx.Redirect(setting.AppSubURL + "/" + newOwner.Name + "/" + repo.Name)
case "delete":
if !ctx.Repo.IsOwner() {
ctx.Error(404)
return
}
if repo.Name != form.RepoName {
ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_repo_name"), tplSettingsOptions, nil)
return
}
if err := repo_service.DeleteRepository(ctx.User, ctx.Repo.Repository); err != nil {
ctx.ServerError("DeleteRepository", err)
return
}
log.Trace("Repository deleted: %s/%s", ctx.Repo.Owner.Name, repo.Name)
ctx.Flash.Success(ctx.Tr("repo.settings.deletion_success"))
ctx.Redirect(ctx.Repo.Owner.DashboardLink())
case "delete-wiki":
if !ctx.Repo.IsOwner() {
ctx.Error(404)
return
}
if repo.Name != form.RepoName {
ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_repo_name"), tplSettingsOptions, nil)
return
}
err := repo.DeleteWiki()
if err != nil {
log.Error("Delete Wiki: %v", err.Error())
}
log.Trace("Repository wiki deleted: %s/%s", ctx.Repo.Owner.Name, repo.Name)
ctx.Flash.Success(ctx.Tr("repo.settings.wiki_deletion_success"))
ctx.Redirect(ctx.Repo.RepoLink + "/settings")
case "archive":
if !ctx.Repo.IsOwner() {
ctx.Error(403)
return
}
if repo.IsMirror {
ctx.Flash.Error(ctx.Tr("repo.settings.archive.error_ismirror"))
ctx.Redirect(ctx.Repo.RepoLink + "/settings")
return
}
if err := repo.SetArchiveRepoState(true); err != nil {
log.Error("Tried to archive a repo: %s", err)
ctx.Flash.Error(ctx.Tr("repo.settings.archive.error"))
ctx.Redirect(ctx.Repo.RepoLink + "/settings")
return
}
ctx.Flash.Success(ctx.Tr("repo.settings.archive.success"))
log.Trace("Repository was archived: %s/%s", ctx.Repo.Owner.Name, repo.Name)
ctx.Redirect(ctx.Repo.RepoLink + "/settings")
case "unarchive":
if !ctx.Repo.IsOwner() {
ctx.Error(403)
return
}
if err := repo.SetArchiveRepoState(false); err != nil {
log.Error("Tried to unarchive a repo: %s", err)
ctx.Flash.Error(ctx.Tr("repo.settings.unarchive.error"))
ctx.Redirect(ctx.Repo.RepoLink + "/settings")
return
}
ctx.Flash.Success(ctx.Tr("repo.settings.unarchive.success"))
log.Trace("Repository was un-archived: %s/%s", ctx.Repo.Owner.Name, repo.Name)
ctx.Redirect(ctx.Repo.RepoLink + "/settings")
default:
ctx.NotFound("", nil)
}
}
// Collaboration render a repository's collaboration page
func Collaboration(ctx *context.Context) {
ctx.Data["Title"] = ctx.Tr("repo.settings")
ctx.Data["PageIsSettingsCollaboration"] = true
users, err := ctx.Repo.Repository.GetCollaborators(models.ListOptions{})
if err != nil {
ctx.ServerError("GetCollaborators", err)
return
}
ctx.Data["Collaborators"] = users
teams, err := ctx.Repo.Repository.GetRepoTeams()
if err != nil {
ctx.ServerError("GetRepoTeams", err)
return
}
ctx.Data["Teams"] = teams
ctx.Data["Repo"] = ctx.Repo.Repository
ctx.Data["OrgID"] = ctx.Repo.Repository.OwnerID
ctx.Data["OrgName"] = ctx.Repo.Repository.OwnerName
ctx.Data["Org"] = ctx.Repo.Repository.Owner
ctx.Data["Units"] = models.Units
ctx.HTML(200, tplCollaboration)
}
// CollaborationPost response for actions for a collaboration of a repository
func CollaborationPost(ctx *context.Context) {
name := utils.RemoveUsernameParameterSuffix(strings.ToLower(ctx.Query("collaborator")))
if len(name) == 0 || ctx.Repo.Owner.LowerName == name {
ctx.Redirect(setting.AppSubURL + ctx.Req.URL.Path)
return
}
u, err := models.GetUserByName(name)
if err != nil {
if models.IsErrUserNotExist(err) {
ctx.Flash.Error(ctx.Tr("form.user_not_exist"))
ctx.Redirect(setting.AppSubURL + ctx.Req.URL.Path)
} else {
ctx.ServerError("GetUserByName", err)
}
return
}
if !u.IsActive {
ctx.Flash.Error(ctx.Tr("repo.settings.add_collaborator_inactive_user"))
ctx.Redirect(setting.AppSubURL + ctx.Req.URL.Path)
return
}
// Organization is not allowed to be added as a collaborator.
if u.IsOrganization() {
ctx.Flash.Error(ctx.Tr("repo.settings.org_not_allowed_to_be_collaborator"))
ctx.Redirect(setting.AppSubURL + ctx.Req.URL.Path)
return
}
if got, err := ctx.Repo.Repository.IsCollaborator(u.ID); err == nil && got {
ctx.Flash.Error(ctx.Tr("repo.settings.add_collaborator_duplicate"))
ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
return
}
if err = ctx.Repo.Repository.AddCollaborator(u); err != nil {
ctx.ServerError("AddCollaborator", err)
return
}
if setting.Service.EnableNotifyMail {
mailer.SendCollaboratorMail(u, ctx.User, ctx.Repo.Repository)
}
ctx.Flash.Success(ctx.Tr("repo.settings.add_collaborator_success"))
ctx.Redirect(setting.AppSubURL + ctx.Req.URL.Path)
}
// ChangeCollaborationAccessMode response for changing access of a collaboration
func ChangeCollaborationAccessMode(ctx *context.Context) {
if err := ctx.Repo.Repository.ChangeCollaborationAccessMode(
ctx.QueryInt64("uid"),
models.AccessMode(ctx.QueryInt("mode"))); err != nil {
log.Error("ChangeCollaborationAccessMode: %v", err)
}
}
// DeleteCollaboration delete a collaboration for a repository
func DeleteCollaboration(ctx *context.Context) {
if err := ctx.Repo.Repository.DeleteCollaboration(ctx.QueryInt64("id")); err != nil {
ctx.Flash.Error("DeleteCollaboration: " + err.Error())
} else {
ctx.Flash.Success(ctx.Tr("repo.settings.remove_collaborator_success"))
}
ctx.JSON(200, map[string]interface{}{
"redirect": ctx.Repo.RepoLink + "/settings/collaboration",
})
}
// AddTeamPost response for adding a team to a repository
func AddTeamPost(ctx *context.Context) {
if !ctx.Repo.Owner.RepoAdminChangeTeamAccess && !ctx.Repo.IsOwner() {
ctx.Flash.Error(ctx.Tr("repo.settings.change_team_access_not_allowed"))
ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
return
}
name := utils.RemoveUsernameParameterSuffix(strings.ToLower(ctx.Query("team")))
if len(name) == 0 {
ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
return
}
team, err := ctx.Repo.Owner.GetTeam(name)
if err != nil {
if models.IsErrTeamNotExist(err) {<|fim▁hole|> ctx.Flash.Error(ctx.Tr("form.team_not_exist"))
ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
} else {
ctx.ServerError("GetTeam", err)
}
return
}
if team.OrgID != ctx.Repo.Repository.OwnerID {
ctx.Flash.Error(ctx.Tr("repo.settings.team_not_in_organization"))
ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
return
}
if models.HasTeamRepo(ctx.Repo.Repository.OwnerID, team.ID, ctx.Repo.Repository.ID) {
ctx.Flash.Error(ctx.Tr("repo.settings.add_team_duplicate"))
ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
return
}
if err = team.AddRepository(ctx.Repo.Repository); err != nil {
ctx.ServerError("team.AddRepository", err)
return
}
ctx.Flash.Success(ctx.Tr("repo.settings.add_team_success"))
ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
}
// DeleteTeam response for deleting a team from a repository
func DeleteTeam(ctx *context.Context) {
if !ctx.Repo.Owner.RepoAdminChangeTeamAccess && !ctx.Repo.IsOwner() {
ctx.Flash.Error(ctx.Tr("repo.settings.change_team_access_not_allowed"))
ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
return
}
team, err := models.GetTeamByID(ctx.QueryInt64("id"))
if err != nil {
ctx.ServerError("GetTeamByID", err)
return
}
if err = team.RemoveRepository(ctx.Repo.Repository.ID); err != nil {
ctx.ServerError("team.RemoveRepositorys", err)
return
}
ctx.Flash.Success(ctx.Tr("repo.settings.remove_team_success"))
ctx.JSON(200, map[string]interface{}{
"redirect": ctx.Repo.RepoLink + "/settings/collaboration",
})
}
// parseOwnerAndRepo get repos by owner
func parseOwnerAndRepo(ctx *context.Context) (*models.User, *models.Repository) {
owner, err := models.GetUserByName(ctx.Params(":username"))
if err != nil {
if models.IsErrUserNotExist(err) {
ctx.NotFound("GetUserByName", err)
} else {
ctx.ServerError("GetUserByName", err)
}
return nil, nil
}
repo, err := models.GetRepositoryByName(owner.ID, ctx.Params(":reponame"))
if err != nil {
if models.IsErrRepoNotExist(err) {
ctx.NotFound("GetRepositoryByName", err)
} else {
ctx.ServerError("GetRepositoryByName", err)
}
return nil, nil
}
return owner, repo
}
// GitHooks hooks of a repository
func GitHooks(ctx *context.Context) {
ctx.Data["Title"] = ctx.Tr("repo.settings.githooks")
ctx.Data["PageIsSettingsGitHooks"] = true
hooks, err := ctx.Repo.GitRepo.Hooks()
if err != nil {
ctx.ServerError("Hooks", err)
return
}
ctx.Data["Hooks"] = hooks
ctx.HTML(200, tplGithooks)
}
// GitHooksEdit render for editing a hook of repository page
func GitHooksEdit(ctx *context.Context) {
ctx.Data["Title"] = ctx.Tr("repo.settings.githooks")
ctx.Data["PageIsSettingsGitHooks"] = true
ctx.Data["RequireSimpleMDE"] = true
name := ctx.Params(":name")
hook, err := ctx.Repo.GitRepo.GetHook(name)
if err != nil {
if err == git.ErrNotValidHook {
ctx.NotFound("GetHook", err)
} else {
ctx.ServerError("GetHook", err)
}
return
}
ctx.Data["Hook"] = hook
ctx.HTML(200, tplGithookEdit)
}
// GitHooksEditPost response for editing a git hook of a repository
func GitHooksEditPost(ctx *context.Context) {
name := ctx.Params(":name")
hook, err := ctx.Repo.GitRepo.GetHook(name)
if err != nil {
if err == git.ErrNotValidHook {
ctx.NotFound("GetHook", err)
} else {
ctx.ServerError("GetHook", err)
}
return
}
hook.Content = ctx.Query("content")
if err = hook.Update(); err != nil {
ctx.ServerError("hook.Update", err)
return
}
ctx.Redirect(ctx.Repo.RepoLink + "/settings/hooks/git")
}
// DeployKeys render the deploy keys list of a repository page
func DeployKeys(ctx *context.Context) {
ctx.Data["Title"] = ctx.Tr("repo.settings.deploy_keys")
ctx.Data["PageIsSettingsKeys"] = true
ctx.Data["DisableSSH"] = setting.SSH.Disabled
keys, err := models.ListDeployKeys(ctx.Repo.Repository.ID, models.ListOptions{})
if err != nil {
ctx.ServerError("ListDeployKeys", err)
return
}
ctx.Data["Deploykeys"] = keys
ctx.HTML(200, tplDeployKeys)
}
// DeployKeysPost response for adding a deploy key of a repository
func DeployKeysPost(ctx *context.Context, form auth.AddKeyForm) {
ctx.Data["Title"] = ctx.Tr("repo.settings.deploy_keys")
ctx.Data["PageIsSettingsKeys"] = true
keys, err := models.ListDeployKeys(ctx.Repo.Repository.ID, models.ListOptions{})
if err != nil {
ctx.ServerError("ListDeployKeys", err)
return
}
ctx.Data["Deploykeys"] = keys
if ctx.HasError() {
ctx.HTML(200, tplDeployKeys)
return
}
content, err := models.CheckPublicKeyString(form.Content)
if err != nil {
if models.IsErrSSHDisabled(err) {
ctx.Flash.Info(ctx.Tr("settings.ssh_disabled"))
} else if models.IsErrKeyUnableVerify(err) {
ctx.Flash.Info(ctx.Tr("form.unable_verify_ssh_key"))
} else {
ctx.Data["HasError"] = true
ctx.Data["Err_Content"] = true
ctx.Flash.Error(ctx.Tr("form.invalid_ssh_key", err.Error()))
}
ctx.Redirect(ctx.Repo.RepoLink + "/settings/keys")
return
}
key, err := models.AddDeployKey(ctx.Repo.Repository.ID, form.Title, content, !form.IsWritable)
if err != nil {
ctx.Data["HasError"] = true
switch {
case models.IsErrDeployKeyAlreadyExist(err):
ctx.Data["Err_Content"] = true
ctx.RenderWithErr(ctx.Tr("repo.settings.key_been_used"), tplDeployKeys, &form)
case models.IsErrKeyAlreadyExist(err):
ctx.Data["Err_Content"] = true
ctx.RenderWithErr(ctx.Tr("settings.ssh_key_been_used"), tplDeployKeys, &form)
case models.IsErrKeyNameAlreadyUsed(err):
ctx.Data["Err_Title"] = true
ctx.RenderWithErr(ctx.Tr("repo.settings.key_name_used"), tplDeployKeys, &form)
default:
ctx.ServerError("AddDeployKey", err)
}
return
}
log.Trace("Deploy key added: %d", ctx.Repo.Repository.ID)
ctx.Flash.Success(ctx.Tr("repo.settings.add_key_success", key.Name))
ctx.Redirect(ctx.Repo.RepoLink + "/settings/keys")
}
// DeleteDeployKey response for deleting a deploy key
func DeleteDeployKey(ctx *context.Context) {
if err := models.DeleteDeployKey(ctx.User, ctx.QueryInt64("id")); err != nil {
ctx.Flash.Error("DeleteDeployKey: " + err.Error())
} else {
ctx.Flash.Success(ctx.Tr("repo.settings.deploy_key_deletion_success"))
}
ctx.JSON(200, map[string]interface{}{
"redirect": ctx.Repo.RepoLink + "/settings/keys",
})
}
func init() {
var err error
validFormAddress, err = xurls.StrictMatchingScheme(`(https?)|(git)://`)
if err != nil {
panic(err)
}
}
// UpdateAvatarSetting update repo's avatar
func UpdateAvatarSetting(ctx *context.Context, form auth.AvatarForm) error {
ctxRepo := ctx.Repo.Repository
if form.Avatar == nil {
// No avatar is uploaded and we not removing it here.
// No random avatar generated here.
// Just exit, no action.
if !com.IsFile(ctxRepo.CustomAvatarPath()) {
log.Trace("No avatar was uploaded for repo: %d. Default icon will appear instead.", ctxRepo.ID)
}
return nil
}
r, err := form.Avatar.Open()
if err != nil {
return fmt.Errorf("Avatar.Open: %v", err)
}
defer r.Close()
if form.Avatar.Size > setting.AvatarMaxFileSize {
return errors.New(ctx.Tr("settings.uploaded_avatar_is_too_big"))
}
data, err := ioutil.ReadAll(r)
if err != nil {
return fmt.Errorf("ioutil.ReadAll: %v", err)
}
if !base.IsImageFile(data) {
return errors.New(ctx.Tr("settings.uploaded_avatar_not_a_image"))
}
if err = ctxRepo.UploadAvatar(data); err != nil {
return fmt.Errorf("UploadAvatar: %v", err)
}
return nil
}
// SettingsAvatar save new POSTed repository avatar
func SettingsAvatar(ctx *context.Context, form auth.AvatarForm) {
form.Source = auth.AvatarLocal
if err := UpdateAvatarSetting(ctx, form); err != nil {
ctx.Flash.Error(err.Error())
} else {
ctx.Flash.Success(ctx.Tr("repo.settings.update_avatar_success"))
}
ctx.Redirect(ctx.Repo.RepoLink + "/settings")
}
// SettingsDeleteAvatar delete repository avatar
func SettingsDeleteAvatar(ctx *context.Context) {
if err := ctx.Repo.Repository.DeleteAvatar(); err != nil {
ctx.Flash.Error(fmt.Sprintf("DeleteAvatar: %v", err))
}
ctx.Redirect(ctx.Repo.RepoLink + "/settings")
}<|fim▁end|> | |
<|file_name|>io_util_test.go<|end_file_name|><|fim▁begin|>package util
import (
"testing"
. "github.com/bborbe/assert"
)
func TestIsDirectory(t *testing.T) {
isDir, err := IsDirectory("/tmp")
if err := AssertThat(err, NilValue()); err != nil {
t.Fatal(err)
}
if err := AssertThat(isDir, Is(true)); err != nil {
t.Fatal(err)
}
}
func TestNormalizePath(t *testing.T) {
dir, err := NormalizePath("/tmp")
if err := AssertThat(err, NilValue()); err != nil {
t.Fatal(err)
}
if err := AssertThat(dir, Is("/tmp")); err != nil {
t.Fatal(err)<|fim▁hole|><|fim▁end|> | }
} |
<|file_name|>toku_crash.cc<|end_file_name|><|fim▁begin|>/* -*- mode: C++; c-basic-offset: 4; indent-tabs-mode: nil -*- */
// vim: ft=cpp:expandtab:ts=8:sw=4:softtabstop=4:
#ident "$Id$"
/*
COPYING CONDITIONS NOTICE:
This program is free software; you can redistribute it and/or modify
it under the terms of version 2 of the GNU General Public License as
published by the Free Software Foundation, and provided that the
following conditions are met:
* Redistributions of source code must retain this COPYING<|fim▁hole|> GRANT (below).
* Redistributions in binary form must reproduce this COPYING
CONDITIONS NOTICE, the COPYRIGHT NOTICE (below), the
DISCLAIMER (below), the UNIVERSITY PATENT NOTICE (below), the
PATENT MARKING NOTICE (below), and the PATENT RIGHTS
GRANT (below) in the documentation and/or other materials
provided with the distribution.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301, USA.
COPYRIGHT NOTICE:
TokuDB, Tokutek Fractal Tree Indexing Library.
Copyright (C) 2007-2013 Tokutek, Inc.
DISCLAIMER:
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
UNIVERSITY PATENT NOTICE:
The technology is licensed by the Massachusetts Institute of
Technology, Rutgers State University of New Jersey, and the Research
Foundation of State University of New York at Stony Brook under
United States of America Serial No. 11/760379 and to the patents
and/or patent applications resulting from it.
PATENT MARKING NOTICE:
This software is covered by US Patent No. 8,185,551.
This software is covered by US Patent No. 8,489,638.
PATENT RIGHTS GRANT:
"THIS IMPLEMENTATION" means the copyrightable works distributed by
Tokutek as part of the Fractal Tree project.
"PATENT CLAIMS" means the claims of patents that are owned or
licensable by Tokutek, both currently or in the future; and that in
the absence of this license would be infringed by THIS
IMPLEMENTATION or by using or running THIS IMPLEMENTATION.
"PATENT CHALLENGE" shall mean a challenge to the validity,
patentability, enforceability and/or non-infringement of any of the
PATENT CLAIMS or otherwise opposing any of the PATENT CLAIMS.
Tokutek hereby grants to you, for the term and geographical scope of
the PATENT CLAIMS, a non-exclusive, no-charge, royalty-free,
irrevocable (except as stated in this section) patent license to
make, have made, use, offer to sell, sell, import, transfer, and
otherwise run, modify, and propagate the contents of THIS
IMPLEMENTATION, where such license applies only to the PATENT
CLAIMS. This grant does not include claims that would be infringed
only as a consequence of further modifications of THIS
IMPLEMENTATION. If you or your agent or licensee institute or order
or agree to the institution of patent litigation against any entity
(including a cross-claim or counterclaim in a lawsuit) alleging that
THIS IMPLEMENTATION constitutes direct or contributory patent
infringement, or inducement of patent infringement, then any rights
granted to you under this License shall terminate as of the date
such litigation is filed. If you or your agent or exclusive
licensee institute or order or agree to the institution of a PATENT
CHALLENGE, then Tokutek may terminate any rights granted to you
under this License.
*/
#ident "Copyright (c) 2007-2013 Tokutek Inc. All rights reserved."
#include <unistd.h>
#ifdef HAVE_SYS_PRCTL_H
#include <sys/prctl.h>
#endif
#include <sys/wait.h>
#include <toku_race_tools.h>
#include "toku_crash.h"
#include "toku_atomic.h"
enum { MAX_GDB_ARGS = 128 };
static void
run_gdb(pid_t parent_pid, const char *gdb_path) {
// 3 bytes per intbyte, null byte
char pid_buf[sizeof(pid_t) * 3 + 1];
char exe_buf[sizeof(pid_buf) + sizeof("/proc//exe")];
// Get pid and path to executable.
int n;
n = snprintf(pid_buf, sizeof(pid_buf), "%d", parent_pid);
invariant(n >= 0 && n < (int)sizeof(pid_buf));
n = snprintf(exe_buf, sizeof(exe_buf), "/proc/%d/exe", parent_pid);
invariant(n >= 0 && n < (int)sizeof(exe_buf));
toku_dup2(2, 1); // redirect output to stderr
// Arguments are not dynamic due to possible security holes.
execlp(gdb_path, gdb_path, "--batch", "-n",
"-ex", "thread",
"-ex", "bt",
"-ex", "bt full",
"-ex", "thread apply all bt",
"-ex", "thread apply all bt full",
exe_buf, pid_buf,
NULL);
}
static void
intermediate_process(pid_t parent_pid, const char *gdb_path) {
// Disable generating of core dumps
#if defined(HAVE_SYS_PRCTL_H)
prctl(PR_SET_DUMPABLE, 0, 0, 0);
#endif
pid_t worker_pid = fork();
if (worker_pid < 0) {
perror("spawn gdb fork: ");
goto failure;
}
if (worker_pid == 0) {
// Child (debugger)
run_gdb(parent_pid, gdb_path);
// Normally run_gdb will not return.
// In case it does, kill the process.
goto failure;
} else {
pid_t timeout_pid = fork();
if (timeout_pid < 0) {
perror("spawn timeout fork: ");
kill(worker_pid, SIGKILL);
goto failure;
}
if (timeout_pid == 0) {
sleep(5); // Timeout of 5 seconds
goto success;
} else {
pid_t exited_pid = wait(NULL); // Wait for first child to exit
if (exited_pid == worker_pid) {
// Kill slower child
kill(timeout_pid, SIGKILL);
goto success;
} else if (exited_pid == timeout_pid) {
// Kill slower child
kill(worker_pid, SIGKILL);
goto failure; // Timed out.
} else {
perror("error while waiting for gdb or timer to end: ");
//Some failure. Kill everything.
kill(timeout_pid, SIGKILL);
kill(worker_pid, SIGKILL);
goto failure;
}
}
}
success:
_exit(EXIT_SUCCESS);
failure:
_exit(EXIT_FAILURE);
}
static void
spawn_gdb(const char *gdb_path) {
pid_t parent_pid = toku_os_getpid();
#if defined(HAVE_SYS_PRCTL_H)
// On systems that require permission for the same user to ptrace,
// give permission for this process and (more importantly) all its children to debug this process.
prctl(PR_SET_PTRACER, parent_pid, 0, 0, 0);
#endif
fprintf(stderr, "Attempting to use gdb @[%s] on pid[%d]\n", gdb_path, parent_pid);
fflush(stderr);
int intermediate_pid = fork();
if (intermediate_pid < 0) {
perror("spawn_gdb intermediate process fork: ");
} else if (intermediate_pid == 0) {
intermediate_process(parent_pid, gdb_path);
} else {
waitpid(intermediate_pid, NULL, 0);
}
}
void
toku_try_gdb_stack_trace(const char *gdb_path) {
char default_gdb_path[] = "/usr/bin/gdb";
static bool started = false;
if (RUNNING_ON_VALGRIND) {
fprintf(stderr, "gdb stack trace skipped due to running under valgrind\n");
fflush(stderr);
} else if (toku_sync_bool_compare_and_swap(&started, false, true)) {
spawn_gdb(gdb_path ? gdb_path : default_gdb_path);
}
}<|fim▁end|> | CONDITIONS NOTICE, the COPYRIGHT NOTICE (below), the
DISCLAIMER (below), the UNIVERSITY PATENT NOTICE (below), the
PATENT MARKING NOTICE (below), and the PATENT RIGHTS |
<|file_name|>docker_test.go<|end_file_name|><|fim▁begin|>// Copyright 2014 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package api
import (
"fmt"
"os"
"strconv"
"testing"
"time"
info "github.com/google/cadvisor/info/v1"
"github.com/google/cadvisor/info/v2"
"github.com/google/cadvisor/integration/framework"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
// Sanity check the container by:
// - Checking that the specified alias is a valid one for this container.
// - Verifying that stats are not empty.
func sanityCheck(alias string, containerInfo info.ContainerInfo, t *testing.T) {
assert.Contains(t, containerInfo.Aliases, alias, "Alias %q should be in list of aliases %v", alias, containerInfo.Aliases)
assert.NotEmpty(t, containerInfo.Stats, "Expected container to have stats")
}
// Sanity check the container by:
// - Checking that the specified alias is a valid one for this container.
// - Verifying that stats are not empty.
func sanityCheckV2(alias string, info v2.ContainerInfo, t *testing.T) {
assert.Contains(t, info.Spec.Aliases, alias, "Alias %q should be in list of aliases %v", alias, info.Spec.Aliases)
assert.NotEmpty(t, info.Stats, "Expected container to have stats")
}
// Waits up to 5s for a container with the specified alias to appear.
func waitForContainer(alias string, fm framework.Framework) {
err := framework.RetryForDuration(func() error {
ret, err := fm.Cadvisor().Client().DockerContainer(alias, &info.ContainerInfoRequest{
NumStats: 1,
})
if err != nil {
return err
}
if len(ret.Stats) != 1 {
return fmt.Errorf("no stats returned for container %q", alias)
}
return nil
}, 5*time.Second)
require.NoError(fm.T(), err, "Timed out waiting for container %q to be available in cAdvisor: %v", alias, err)
}
// A Docker container in /docker/<ID>
func TestDockerContainerById(t *testing.T) {
fm := framework.New(t)
defer fm.Cleanup()
containerID := fm.Docker().RunPause()
// Wait for the container to show up.
waitForContainer(containerID, fm)
request := &info.ContainerInfoRequest{
NumStats: 1,
}
containerInfo, err := fm.Cadvisor().Client().DockerContainer(containerID, request)
require.NoError(t, err)
sanityCheck(containerID, containerInfo, t)
}
// A Docker container in /docker/<name>
func TestDockerContainerByName(t *testing.T) {
fm := framework.New(t)
defer fm.Cleanup()
containerName := fmt.Sprintf("test-docker-container-by-name-%d", os.Getpid())
fm.Docker().Run(framework.DockerRunArgs{
Image: "kubernetes/pause",
Args: []string{"--name", containerName},
})
// Wait for the container to show up.
waitForContainer(containerName, fm)
request := &info.ContainerInfoRequest{
NumStats: 1,
}
containerInfo, err := fm.Cadvisor().Client().DockerContainer(containerName, request)
require.NoError(t, err)
sanityCheck(containerName, containerInfo, t)
}
// Find the first container with the specified alias in containers.
func findContainer(alias string, containers []info.ContainerInfo, t *testing.T) info.ContainerInfo {
for _, cont := range containers {
for _, a := range cont.Aliases {
if alias == a {
return cont
}
}
}
t.Fatalf("Failed to find container %q in %+v", alias, containers)
return info.ContainerInfo{}
}
// All Docker containers through /docker
func TestGetAllDockerContainers(t *testing.T) {
fm := framework.New(t)
defer fm.Cleanup()
// Wait for the containers to show up.
containerID1 := fm.Docker().RunPause()
containerID2 := fm.Docker().RunPause()
waitForContainer(containerID1, fm)
waitForContainer(containerID2, fm)
request := &info.ContainerInfoRequest{
NumStats: 1,
}
containersInfo, err := fm.Cadvisor().Client().AllDockerContainers(request)
require.NoError(t, err)
<|fim▁hole|> t.Fatalf("At least 2 Docker containers should exist, received %d: %+v", len(containersInfo), containersInfo)
}
sanityCheck(containerID1, findContainer(containerID1, containersInfo, t), t)
sanityCheck(containerID2, findContainer(containerID2, containersInfo, t), t)
}
// Check expected properties of a Docker container.
func TestBasicDockerContainer(t *testing.T) {
fm := framework.New(t)
defer fm.Cleanup()
containerName := fmt.Sprintf("test-basic-docker-container-%d", os.Getpid())
containerID := fm.Docker().Run(framework.DockerRunArgs{
Image: "kubernetes/pause",
Args: []string{
"--name", containerName,
},
})
// Wait for the container to show up.
waitForContainer(containerID, fm)
request := &info.ContainerInfoRequest{
NumStats: 1,
}
containerInfo, err := fm.Cadvisor().Client().DockerContainer(containerID, request)
require.NoError(t, err)
// Check that the contianer is known by both its name and ID.
sanityCheck(containerID, containerInfo, t)
sanityCheck(containerName, containerInfo, t)
assert.Empty(t, containerInfo.Subcontainers, "Should not have subcontainers")
assert.Len(t, containerInfo.Stats, 1, "Should have exactly one stat")
}
// TODO(vmarmol): Handle if CPU or memory is not isolated on this system.
// Check the ContainerSpec.
func TestDockerContainerSpec(t *testing.T) {
fm := framework.New(t)
defer fm.Cleanup()
var (
cpuShares = uint64(2048)
cpuMask = "0"
memoryLimit = uint64(1 << 30) // 1GB
image = "kubernetes/pause"
env = map[string]string{"test_var": "FOO"}
labels = map[string]string{"bar": "baz"}
)
containerID := fm.Docker().Run(framework.DockerRunArgs{
Image: image,
Args: []string{
"--cpu-shares", strconv.FormatUint(cpuShares, 10),
"--cpuset-cpus", cpuMask,
"--memory", strconv.FormatUint(memoryLimit, 10),
"--env", "TEST_VAR=FOO",
"--label", "bar=baz",
},
})
// Wait for the container to show up.
waitForContainer(containerID, fm)
request := &info.ContainerInfoRequest{
NumStats: 1,
}
containerInfo, err := fm.Cadvisor().Client().DockerContainer(containerID, request)
require.NoError(t, err)
sanityCheck(containerID, containerInfo, t)
assert := assert.New(t)
assert.True(containerInfo.Spec.HasCpu, "CPU should be isolated")
assert.Equal(cpuShares, containerInfo.Spec.Cpu.Limit, "Container should have %d shares, has %d", cpuShares, containerInfo.Spec.Cpu.Limit)
assert.Equal(cpuMask, containerInfo.Spec.Cpu.Mask, "Cpu mask should be %q, but is %q", cpuMask, containerInfo.Spec.Cpu.Mask)
assert.True(containerInfo.Spec.HasMemory, "Memory should be isolated")
assert.Equal(memoryLimit, containerInfo.Spec.Memory.Limit, "Container should have memory limit of %d, has %d", memoryLimit, containerInfo.Spec.Memory.Limit)
assert.True(containerInfo.Spec.HasNetwork, "Network should be isolated")
assert.True(containerInfo.Spec.HasDiskIo, "Blkio should be isolated")
assert.Equal(image, containerInfo.Spec.Image, "Spec should include container image")
assert.Equal(env, containerInfo.Spec.Envs, "Spec should include environment variables")
assert.Equal(labels, containerInfo.Spec.Labels, "Spec should include labels")
}
// Check the CPU ContainerStats.
func TestDockerContainerCpuStats(t *testing.T) {
fm := framework.New(t)
defer fm.Cleanup()
// Wait for the container to show up.
containerID := fm.Docker().RunBusybox("ping", "www.google.com")
waitForContainer(containerID, fm)
request := &info.ContainerInfoRequest{
NumStats: 1,
}
containerInfo, err := fm.Cadvisor().Client().DockerContainer(containerID, request)
if err != nil {
t.Fatal(err)
}
sanityCheck(containerID, containerInfo, t)
// Checks for CpuStats.
checkCPUStats(t, containerInfo.Stats[0].Cpu)
}
// Check the memory ContainerStats.
func TestDockerContainerMemoryStats(t *testing.T) {
fm := framework.New(t)
defer fm.Cleanup()
// Wait for the container to show up.
containerID := fm.Docker().RunBusybox("ping", "www.google.com")
waitForContainer(containerID, fm)
request := &info.ContainerInfoRequest{
NumStats: 1,
}
containerInfo, err := fm.Cadvisor().Client().DockerContainer(containerID, request)
require.NoError(t, err)
sanityCheck(containerID, containerInfo, t)
// Checks for MemoryStats.
checkMemoryStats(t, containerInfo.Stats[0].Memory)
}
// Check the network ContainerStats.
func TestDockerContainerNetworkStats(t *testing.T) {
fm := framework.New(t)
defer fm.Cleanup()
// Wait for the container to show up.
containerID := fm.Docker().RunBusybox("watch", "-n1", "wget", "http://www.google.com/")
waitForContainer(containerID, fm)
// Wait for at least one additional housekeeping interval
time.Sleep(20 * time.Second)
request := &info.ContainerInfoRequest{
NumStats: 1,
}
containerInfo, err := fm.Cadvisor().Client().DockerContainer(containerID, request)
require.NoError(t, err)
sanityCheck(containerID, containerInfo, t)
stat := containerInfo.Stats[0]
ifaceStats := stat.Network.InterfaceStats
// macOS we have more than one interface, since traffic is
// only on eth0 we need to pick that one
if len(stat.Network.Interfaces) > 0 {
for _, iface := range stat.Network.Interfaces {
if iface.Name == "eth0" {
ifaceStats = iface
}
}
}
// Checks for NetworkStats.
assert := assert.New(t)
assert.NotEqual(0, ifaceStats.TxBytes, "Network tx bytes should not be zero")
assert.NotEqual(0, ifaceStats.TxPackets, "Network tx packets should not be zero")
assert.NotEqual(0, ifaceStats.RxBytes, "Network rx bytes should not be zero")
assert.NotEqual(0, ifaceStats.RxPackets, "Network rx packets should not be zero")
assert.NotEqual(ifaceStats.RxBytes, ifaceStats.TxBytes, fmt.Sprintf("Network tx (%d) and rx (%d) bytes should not be equal", ifaceStats.TxBytes, ifaceStats.RxBytes))
assert.NotEqual(ifaceStats.RxPackets, ifaceStats.TxPackets, fmt.Sprintf("Network tx (%d) and rx (%d) packets should not be equal", ifaceStats.TxPackets, ifaceStats.RxPackets))
}
func TestDockerFilesystemStats(t *testing.T) {
fm := framework.New(t)
defer fm.Cleanup()
storageDriver := fm.Docker().StorageDriver()
if storageDriver == framework.DeviceMapper {
// Filesystem stats not supported with devicemapper, yet
return
}
const (
ddUsage = uint64(1 << 3) // 1 KB
sleepDuration = 10 * time.Second
)
// Wait for the container to show up.
// FIXME: Tests should be bundled and run on the remote host instead of being run over ssh.
// Escaping bash over ssh is ugly.
// Once github issue 1130 is fixed, this logic can be removed.
dockerCmd := fmt.Sprintf("dd if=/dev/zero of=/file count=2 bs=%d & ping google.com", ddUsage)
if fm.Hostname().Host != "localhost" {
dockerCmd = fmt.Sprintf("'%s'", dockerCmd)
}
containerID := fm.Docker().RunBusybox("/bin/sh", "-c", dockerCmd)
waitForContainer(containerID, fm)
request := &v2.RequestOptions{
IdType: v2.TypeDocker,
Count: 1,
}
needsBaseUsageCheck := false
switch storageDriver {
case framework.Aufs, framework.Overlay, framework.Overlay2, framework.DeviceMapper:
needsBaseUsageCheck = true
}
pass := false
// We need to wait for the `dd` operation to complete.
for i := 0; i < 10; i++ {
containerInfo, err := fm.Cadvisor().ClientV2().Stats(containerID, request)
if err != nil {
t.Logf("%v stats unavailable - %v", time.Now().String(), err)
t.Logf("retrying after %s...", sleepDuration.String())
time.Sleep(sleepDuration)
continue
}
require.Equal(t, len(containerInfo), 1)
var info v2.ContainerInfo
// There is only one container in containerInfo. Since it is a map with unknown key,
// use the value blindly.
for _, cInfo := range containerInfo {
info = cInfo
}
sanityCheckV2(containerID, info, t)
require.NotNil(t, info.Stats[0], "got info: %+v", info)
require.NotNil(t, info.Stats[0].Filesystem, "got info: %+v", info)
require.NotNil(t, info.Stats[0].Filesystem.TotalUsageBytes, "got info: %+v", info.Stats[0].Filesystem)
if *info.Stats[0].Filesystem.TotalUsageBytes >= ddUsage {
if !needsBaseUsageCheck {
pass = true
break
}
require.NotNil(t, info.Stats[0].Filesystem.BaseUsageBytes)
if *info.Stats[0].Filesystem.BaseUsageBytes >= ddUsage {
pass = true
break
}
}
t.Logf("expected total usage %d bytes to be greater than %d bytes", *info.Stats[0].Filesystem.TotalUsageBytes, ddUsage)
if needsBaseUsageCheck {
t.Logf("expected base %d bytes to be greater than %d bytes", *info.Stats[0].Filesystem.BaseUsageBytes, ddUsage)
}
t.Logf("retrying after %s...", sleepDuration.String())
time.Sleep(sleepDuration)
}
if !pass {
t.Fail()
}
}<|fim▁end|> | if len(containersInfo) < 2 { |
<|file_name|>null-presence-channel.d.ts<|end_file_name|><|fim▁begin|>import { NullChannel } from './null-channel';
import { PresenceChannel } from './presence-channel';
export declare class NullPresenceChannel extends NullChannel implements PresenceChannel {
here(callback: Function): NullPresenceChannel;
<|fim▁hole|><|fim▁end|> | joining(callback: Function): NullPresenceChannel;
leaving(callback: Function): NullPresenceChannel;
whisper(eventName: any, data: any): NullPresenceChannel;
} |
<|file_name|>browser.ts<|end_file_name|><|fim▁begin|>// Copyright (c) Jupyter Development Team.
// Distributed under the terms of the Modified BSD License.
'use strict';
import {
IContentsModel
} from 'jupyter-js-services';
import {
Message
} from 'phosphor-messaging';
import {
PanelLayout
} from 'phosphor-panel';
import {
Widget
} from 'phosphor-widget';
import {
FileButtons
} from './buttons';
import {
BreadCrumbs
} from './crumbs';
import {
DirListing
} from './listing';
import {
FileBrowserModel
} from './model';
import {
FILE_BROWSER_CLASS, showErrorMessage
} from './utils';
/**
* The class name added to the filebrowser crumbs node.
*/
const CRUMBS_CLASS = 'jp-FileBrowser-crumbs';
/**
* The class name added to the filebrowser buttons node.
*/
const BUTTON_CLASS = 'jp-FileBrowser-buttons';
/**
* The class name added to the filebrowser listing node.
*/
const LISTING_CLASS = 'jp-FileBrowser-listing';
/**
* The duration of auto-refresh in ms.
*/
const REFRESH_DURATION = 30000;
/**
* A widget which hosts a file browser.
*
* The widget uses the Jupyter Contents API to retreive contents,
* and presents itself as a flat list of files and directories with
* breadcrumbs.
*/
export
class FileBrowserWidget extends Widget {
/**
* Construct a new file browser.
*
* @param model - The file browser view model.
*/
constructor(model: FileBrowserModel) {
super();
this.addClass(FILE_BROWSER_CLASS);
this._model = model;
this._model.refreshed.connect(this._handleRefresh, this)
this._crumbs = new BreadCrumbs(model);
this._buttons = new FileButtons(model);
this._listing = new DirListing(model);
this._crumbs.addClass(CRUMBS_CLASS);
this._buttons.addClass(BUTTON_CLASS);
this._listing.addClass(LISTING_CLASS);
let layout = new PanelLayout();
layout.addChild(this._crumbs);
layout.addChild(this._buttons);
layout.addChild(this._listing);
this.layout = layout;
}
/**
* Dispose of the resources held by the file browser.
*/
dispose() {
this._model = null;
this._crumbs = null;
this._buttons = null;
this._listing = null;
super.dispose();
}
/**
* Get the model used by the file browser.
*
* #### Notes
* This is a read-only property.
*/
get model(): FileBrowserModel {
return this._model;
}
/**
* Get the widget factory for the widget.
*/
get widgetFactory(): (model: IContentsModel) => Widget {
return this._listing.widgetFactory;
}
/**
* Set the widget factory for the widget.
*/
set widgetFactory(factory: (model: IContentsModel) => Widget) {
this._listing.widgetFactory = factory;
}
/**
* Change directory.
*/
cd(path: string): Promise<void> {
return this._model.cd(path);
}
/**
* Open the currently selected item(s).
*
* Changes to the first directory encountered.
* Emits [[openRequested]] signals for files.
*/
open(): void {
let foundDir = false;
let items = this._model.sortedItems;
for (let item of items) {
if (!this._model.isSelected(item.name)) {
continue;
}
if (item.type === 'directory' && !foundDir) {
foundDir = true;
this._model.open(item.name).catch(error =>
showErrorMessage(this, 'Open directory', error)
);
} else {
this.model.open(item.name);
}
}
}
/**
* Create a new untitled file or directory in the current directory.
*/
newUntitled(type: string, ext?: string): Promise<IContentsModel> {
return this.model.newUntitled(type, ext);
}
/**
* Rename the first currently selected item.
*/
rename(): Promise<string> {
return this._listing.rename();
}
/**
* Cut the selected items.
*/
cut(): void {
this._listing.cut();
}
/**
* Copy the selected items.
*/
copy(): void {
this._listing.copy();
}
/**
* Paste the items from the clipboard.
*/
paste(): Promise<void> {
return this._listing.paste();
}
/**
* Delete the currently selected item(s).
*/
delete(): Promise<void> {
return this._listing.delete();
}
/**
* Duplicate the currently selected item(s).
*/
duplicate(): Promise<void> {
return this._listing.duplicate();
}
/**
* Download the currently selected item(s).
*/
download(): Promise<void> {
return this._listing.download();
}
/**
* Shut down kernels on the applicable currently selected items.
*/
shutdownKernels(): Promise<void> {
return this._listing.shutdownKernels();
}
/**
* Refresh the current directory.
*/
refresh(): Promise<void> {
return this._model.refresh().catch(
error => showErrorMessage(this, 'Refresh Error', error)
);
}
/**
* Select next item.
*/
selectNext(): void {
this._listing.selectNext();
}
/**
* Select previous item.
*/
selectPrevious(): void {
this._listing.selectPrevious();
}
/**
* A message handler invoked on an `'after-attach'` message.
*/
protected onAfterAttach(msg: Message): void {
super.onAfterAttach(msg);
this.refresh();
}
/**
* A message handler invoked on an `'after-show'` message.
*/
protected onAfterShow(msg: Message): void {
super.onAfterShow(msg);
this.refresh();
}
/**<|fim▁hole|> clearTimeout(this._timeoutId);
this._timeoutId = setTimeout(() => this.refresh(), REFRESH_DURATION);
}
private _model: FileBrowserModel = null;
private _crumbs: BreadCrumbs = null;
private _buttons: FileButtons = null;
private _listing: DirListing = null;
private _timeoutId = -1;
}<|fim▁end|> | * Handle a model refresh.
*/
private _handleRefresh(): void { |
<|file_name|>repl.js<|end_file_name|><|fim▁begin|><|fim▁hole|>// Compiled by ClojureScript 0.0-2311
goog.provide('antares.repl');
goog.require('cljs.core');
goog.require('weasel.repl');
goog.require('weasel.repl');
console.log("WEASEL REPL ACTIVATED");
weasel.repl.connect.call(null,"ws://localhost:9001",new cljs.core.Keyword(null,"verbose","verbose",1694226060),true);
//# sourceMappingURL=repl.js.map<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>// Copyright © 2015, Peter Atashian
// Licensed under the MIT License <LICENSE.md>
//! FFI bindings to alink.
#![no_std]
#![experimental]
extern crate winapi;
use winapi::*;
extern "system" {
}<|fim▁end|> | |
<|file_name|>gdiobj.cpp<|end_file_name|><|fim▁begin|>/////////////////////////////////////////////////////////////////////////////
// Name: src/msw/gdiobj.cpp
// Purpose: wxGDIObject class
// Author: Julian Smart
// Modified by:
// Created: 01/02/97
// RCS-ID: $Id: gdiobj.cpp 40626 2006-08-16 14:53:49Z VS $
// Copyright: (c) Julian Smart
// Licence: wxWindows licence
/////////////////////////////////////////////////////////////////////////////
// For compilers that support precompilation, includes "wx.h".
#include "wx/wxprec.h"
#ifdef __BORLANDC__
#pragma hdrstop
#endif
#include "wx/gdiobj.h"
<|fim▁hole|>#ifndef WX_PRECOMP
#include <stdio.h>
#include "wx/list.h"
#include "wx/utils.h"
#include "wx/app.h"
#endif
#include "wx/msw/private.h"
#define M_GDIDATA wx_static_cast(wxGDIRefData*, m_refData)
/*
void wxGDIObject::IncrementResourceUsage(void)
{
if ( !M_GDIDATA )
return;
// wxDebugMsg("Object %ld about to be incremented: %d\n", (long)this, m_usageCount);
M_GDIDATA->m_usageCount ++;
};
void wxGDIObject::DecrementResourceUsage(void)
{
if ( !M_GDIDATA )
return;
M_GDIDATA->m_usageCount --;
if (wxTheApp)
wxTheApp->SetPendingCleanup(true);
// wxDebugMsg("Object %ld decremented: %d\n", (long)this, M_GDIDATA->m_usageCount);
if (M_GDIDATA->m_usageCount < 0)
{
char buf[80];
sprintf(buf, "Object %ld usage count is %d\n", (long)this, M_GDIDATA->m_usageCount);
wxDebugMsg(buf);
}
// assert(M_GDIDATA->m_usageCount >= 0);
};
*/<|fim▁end|> | |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate crypto;
extern crate hyper;
extern crate rustc_serialize;
extern crate rand;
mod hmac_sha1;
use hyper::server::{Server, Request, Response};
use hyper::status::StatusCode;
use hyper::net::Fresh;
use hyper::uri::RequestUri::AbsolutePath;
const HOST: &'static str = "localhost:9000";
const DELAY: u32 = 1;
fn main() {
let key = gen_key();
println!("Key: {} (len {})", format_hex(&key[..]), key.len());
let server = Server::http(HOST).unwrap();
println!("test.txt hmac: {} (Shhhh!)",
format_hex(&file_hmac(&key[..], "test.txt").unwrap()[..]));
println!("Listening on port 9000");
server.handle(
move |req: Request, res: Response| {
handle_request(&key[..], req, res)
}
).unwrap();
}
fn format_hex(hex: &[u8]) -> String {
use std::fmt::Write;
let mut s = String::new();
for el in hex.iter() {
write!(&mut s, "{:02x}", el).unwrap();
}
s
}
fn gen_key() -> Vec<u8> {
use rand::Rng;
let mut rng = rand::thread_rng();
let key_len = rng.gen_range(10, 256);
rng.gen_iter().take(key_len).collect()
}
fn handle_request(key: &[u8], req: Request, mut res: Response<Fresh>) {
match req.method {
hyper::Get => {
match req.uri {
AbsolutePath(path) => *res.status_mut() = handle_path(key, &path[..]),
_ => *res.status_mut() = StatusCode::NotFound,
}
},
_ => *res.status_mut() = StatusCode::MethodNotAllowed,
}
send_response(res);
}
fn handle_path(key: &[u8], path: &str) -> StatusCode {
let full_path = format!("http://{}/{}", HOST, path);
match hyper::Url::parse(&full_path[..]).ok().and_then(|url| url.query_pairs()) {
Some(pairs) => {
if pairs.len() == 2 {
let (ref arg1, ref filename) = pairs[0];
let (ref arg2, ref signature) = pairs[1];
if &arg1[..]=="file" && &arg2[..]=="signature" {
check_signature(key, &filename[..], &signature[..])
}
else { StatusCode::BadRequest }
}
else { StatusCode::BadRequest }
},
_ => StatusCode::NotFound,
}
}
fn send_response(res: Response) {
match res.status() {
StatusCode::Ok =>
{ res.send(b"<h1>Server says everything is a-okay</h1>\n").unwrap(); },
StatusCode::BadRequest =>
{ res.send(b"<h1>400: Bad Request</h1>\n").unwrap(); },
StatusCode::NotFound =>
{ res.send(b"<h1>404: Not Found</h1>\n").unwrap(); },
StatusCode::MethodNotAllowed =>
{ res.send(b"<h1>405: Method Not Allowed</h1>\n").unwrap(); },
StatusCode::InternalServerError =>
{ res.send(b"<h1>500: Internal Server Error</h1>\n").unwrap(); },
_ => {},
}
}
fn check_signature(key: &[u8], filename: &str, signature: &str) -> StatusCode {
use rustc_serialize::hex::FromHex;
let parsed_signature = match signature.from_hex() {
Ok(sig) => sig,
_ => return StatusCode::BadRequest,
};
let file_hash = match file_hmac(key, filename) {
Ok(sha1) => sha1,
_ => return StatusCode::NotFound,<|fim▁hole|> else {
StatusCode::InternalServerError
}
}
fn file_hmac(key: &[u8], filename: &str) -> std::io::Result<[u8; 20]> {
use std::io::prelude::*;
use std::fs::File;
let mut file = try!(File::open(filename));
let mut s = String::new();
try!(file.read_to_string(&mut s));
Ok(hmac_sha1::hmac_sha1(key, &s.into_bytes()[..]))
}
fn insecure_compare(first: &[u8], second: &[u8]) -> bool {
for (x, y) in first.iter().zip(second.iter()) {
if { x != y } { return false; }
std::thread::sleep_ms(DELAY);
}
if first.len() != second.len() { //do this after step-by-step to preserve
return false; //element-by-element comparison
}
true
}
#[cfg(test)]
mod tests {
#[test] #[ignore]
fn insecure_compare() {
assert!(super::insecure_compare(b"yellow submarine", b"yellow submarine"),
"should have been equal");
assert!(!super::insecure_compare(b"yellow submarine", b"yellow_submarine"),
"should have been unequal");
}
}<|fim▁end|> | };
if insecure_compare(&file_hash[..], &parsed_signature[..]) {
StatusCode::Ok
} |
<|file_name|>download-exercise-images.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 *-*
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
import requests
import os
from optparse import make_option
from django.core.exceptions import ImproperlyConfigured
from django.core.management.base import BaseCommand, CommandError
from django.core.files import File
from django.core.files.temp import NamedTemporaryFile
from django.core.validators import URLValidator
from django.core.exceptions import ValidationError
from django.conf import settings
from wger.exercises.models import Exercise, ExerciseImage
class Command(BaseCommand):
'''
Download exercise images from wger.de and updates the local database
The script assumes that the local IDs correspond to the remote ones, which
is the case if the user installed the exercises from the JSON fixtures.
Otherwise, the exercise is simply skipped
'''
option_list = BaseCommand.option_list + (
make_option('--remote-url',
action='store',
dest='remote_url',
default='https://wger.de',
help='Remote URL to fetch the exercises from (default: https://wger.de)'),
)
help = ('Download exercise images from wger.de and update the local database\n'
'\n'
'ATTENTION: The script will download the images from the server and add them\n'
' to your local exercises. The exercises are identified by\n'
' their UUID field, if you manually edited or changed it\n'
' the script will not be able to match them.')
def handle(self, *args, **options):
if not settings.MEDIA_ROOT:
raise ImproperlyConfigured('Please set MEDIA_ROOT in your settings file')
remote_url = options['remote_url']
try:
val = URLValidator()
val(remote_url)
except ValidationError:
raise CommandError('Please enter a valid URL')
exercise_api = "{0}/api/v2/exercise/?limit=999"
image_api = "{0}/api/v2/exerciseimage/?exercise={1}"
thumbnail_api = "{0}/api/v2/exerciseimage/{1}/thumbnails/"
# Get all exercises
result = requests.get(exercise_api.format(remote_url)).json()
for exercise_json in result['results']:
exercise_name = exercise_json['name'].encode('utf-8')
exercise_uuid = exercise_json['uuid']
exercise_id = exercise_json['id']
self.stdout.write('')
self.stdout.write(u"*** Processing {0} (ID: {1}, UUID: {2})".format(exercise_name,
exercise_id,
exercise_uuid))
try:
exercise = Exercise.objects.get(uuid=exercise_uuid)
except Exercise.DoesNotExist:
self.stdout.write(' Remote exercise not found in local DB, skipping...')
continue
# Get all images<|fim▁hole|>
for image_json in images['results']:
image_id = image_json['id']
result = requests.get(thumbnail_api.format(remote_url, image_id)).json()
image_name = os.path.basename(result['original'])
self.stdout.write(' Fetching image {0} - {1}'.format(image_id, image_name))
try:
image = ExerciseImage.objects.get(pk=image_id)
self.stdout.write(' --> Image already present locally, skipping...')
continue
except ExerciseImage.DoesNotExist:
self.stdout.write(' --> Image not found in local DB, creating now...')
image = ExerciseImage()
image.pk = image_id
# Save the downloaded image, see link for details
# http://stackoverflow.com/questions/1308386/programmatically-saving-image-to-
retrieved_image = requests.get(result['original'])
img_temp = NamedTemporaryFile(delete=True)
img_temp.write(retrieved_image.content)
img_temp.flush()
image.exercise = exercise
image.is_main = image_json['is_main']
image.status = image_json['status']
image.image.save(
os.path.basename(image_name),
File(img_temp),
)
image.save()
else:
self.stdout.write(' No images for this exercise, nothing to do')<|fim▁end|> | images = requests.get(image_api.format(remote_url, exercise_id)).json()
if images['count']: |
<|file_name|>TextItem.cpp<|end_file_name|><|fim▁begin|>#include "TextItem.h"
#include <QPainter>
#include <QFont>
#include <QDebug>
////////////////////////////////////////////////////////////////
TextItem::TextItem(const QString& text, QGraphicsLayoutItem *parent)
: BaseItem(parent)
{
_text = text;
QFont font;
font.setPointSize(11);
font.setBold(false);
setFont(font);
}
////////////////////////////////////////////////////////////////
TextItem::~TextItem()
{
}
////////////////////////////////////////////////////////////////<|fim▁hole|> _font = font;
QFontMetrics fm(_font);
}
////////////////////////////////////////////////////////////////
QSizeF TextItem::measureSize() const
{
QFontMetrics fm(_font);
const QSizeF& size = fm.size(Qt::TextExpandTabs, _text);
// NOTE: flag Qt::TextSingleLine ignores newline characters.
return size;
}
////////////////////////////////////////////////////////////////
void TextItem::draw(QPainter *painter, const QRectF& bounds)
{
painter->setFont(_font);
// TODO: mozno bude treba specialne handlovat novy riadok
painter->drawText(bounds, _text);
}<|fim▁end|> | void TextItem::setFont(const QFont &font)
{ |
<|file_name|>CLP.cpp<|end_file_name|><|fim▁begin|>#include "CLP.h"
#include <sstream>
#include <iostream>
namespace osuCrypto
{
void CLP::parse(int argc, char const*const* argv)
{
if (argc > 0)
{
std::stringstream ss;
auto ptr = argv[0];
while (*ptr != 0)
ss << *ptr++;
mProgramName = ss.str();
}
for (int i = 1; i < argc;)
{
auto ptr = argv[i];
if (*ptr++ != '-')
{
throw CommandLineParserError();
}
std::stringstream ss;
while (*ptr != 0)
ss << *ptr++;
++i;
ptr = argv[i];
std::pair<std::string, std::list<std::string>> keyValues;
keyValues.first = ss.str();;
while (i < argc && (ptr[0] != '-' || (ptr[0] == '-' && ptr[1] >= '0' && ptr[1] <= '9')))
{
ss.str("");
while (*ptr != 0)
ss << *ptr++;
keyValues.second.push_back(ss.str());
++i;
ptr = argv[i];
}
mKeyValues.emplace(keyValues);
}
}
void CLP::setDefault(std::string key, std::string value)
{
if (hasValue(key) == false)
{
if (isSet(key))
{
mKeyValues[key].emplace_back(value);
}
else
{
mKeyValues.emplace(std::make_pair(key, std::list<std::string>{ value }));
}
}
}
void CLP::setDefault(std::vector<std::string> keys, std::string value)
{
if (hasValue(keys) == false)
{
setDefault(keys[0], value);
}
}
bool CLP::isSet(std::string name)
{
return mKeyValues.find(name) != mKeyValues.end();
}
<|fim▁hole|> bool CLP::isSet(std::vector<std::string> names)
{
for (auto name : names)
{
if (isSet(name))
{
return true;
}
}
return false;
}
bool CLP::hasValue(std::string name)
{
return mKeyValues.find(name) != mKeyValues.end() && mKeyValues[name].size();
}
bool CLP::hasValue(std::vector<std::string> names)
{
for (auto name : names)
{
if (hasValue(name))
{
return true;
}
}
return false;
}
//
//int CLP::getInt(std::vector<std::string> names, std::string failMessage)
//{
// for (auto name : names)
// {
// if (hasValue(name))
// {
// return getInt(name);
// }
// }
//
// if (failMessage != "")
// std::cout << failMessage << std::endl;
//
// throw CommandLineParserError();
//}
//
//double CLP::getDouble(std::string name)
//{
// std::stringstream ss;
// ss << *mKeyValues[name].begin();
//
// double ret;
// ss >> ret;
//
// return ret;
//}
//
//double CLP::getDouble(std::vector<std::string> names, std::string failMessage)
//{
// for (auto name : names)
// {
// if (hasValue(name))
// {
// return getDouble(name);
// }
// }
//
// if (failMessage != "")
// std::cout << failMessage << std::endl;
//
// throw CommandLineParserError();
//}
//
//std::string CLP::getString(std::string name)
//{
// return *mKeyValues[name].begin();
//}
//
//std::list<std::string> CLP::getStrings(std::string name)
//{
// return mKeyValues[name];
//}
//
//std::list<std::string> CLP::getStrings(std::vector<std::string> names, std::string failMessage)
//{
// for (auto name : names)
// {
// if (hasValue(name))
// {
// return getStrings(name);
// }
// }
//
// if (failMessage != "")
// std::cout << failMessage << std::endl;
//
// throw CommandLineParserError();
//}
//
//
//std::string CLP::getString(std::vector<std::string> names, std::string failMessage)
//{
// for (auto name : names)
// {
// if (hasValue(name))
// {
// return getString(name);
// }
// }
//
// if (failMessage != "")
// std::cout << failMessage << std::endl;
//
// throw CommandLineParserError();
//}
//
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Natural Language Toolkit: Classifiers
#
# Copyright (C) 2001-2016 NLTK Project
# Author: Edward Loper <[email protected]>
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
"""
Classes and interfaces for labeling tokens with category labels (or
"class labels"). Typically, labels are represented with strings
(such as ``'health'`` or ``'sports'``). Classifiers can be used to
perform a wide range of classification tasks. For example,
classifiers can be used...
- to classify documents by topic
- to classify ambiguous words by which word sense is intended
- to classify acoustic signals by which phoneme they represent
- to classify sentences by their author
Features
========
In order to decide which category label is appropriate for a given
token, classifiers examine one or more 'features' of the token. These
"features" are typically chosen by hand, and indicate which aspects
of the token are relevant to the classification decision. For
example, a document classifier might use a separate feature for each
word, recording how often that word occurred in the document.
Featuresets
===========
The features describing a token are encoded using a "featureset",
which is a dictionary that maps from "feature names" to "feature
values". Feature names are unique strings that indicate what aspect
of the token is encoded by the feature. Examples include
``'prevword'``, for a feature whose value is the previous word; and
``'contains-word(library)'`` for a feature that is true when a document
contains the word ``'library'``. Feature values are typically
booleans, numbers, or strings, depending on which feature they
describe.
Featuresets are typically constructed using a "feature detector"
(also known as a "feature extractor"). A feature detector is a
function that takes a token (and sometimes information about its
context) as its input, and returns a featureset describing that token.
For example, the following feature detector converts a document
(stored as a list of words) to a featureset describing the set of
words included in the document:
>>> # Define a feature detector function.
<|fim▁hole|> >>> def document_features(document):
... return dict([('contains-word(%s)' % w, True) for w in document])
Feature detectors are typically applied to each token before it is fed
to the classifier:
>>> # Classify each Gutenberg document.
>>> from nltk.corpus import gutenberg
>>> for fileid in gutenberg.fileids(): # doctest: +SKIP
... doc = gutenberg.words(fileid) # doctest: +SKIP
... print fileid, classifier.classify(document_features(doc)) # doctest: +SKIP
The parameters that a feature detector expects will vary, depending on
the task and the needs of the feature detector. For example, a
feature detector for word sense disambiguation (WSD) might take as its
input a sentence, and the index of a word that should be classified,
and return a featureset for that word. The following feature detector
for WSD includes features describing the left and right contexts of
the target word:
>>> def wsd_features(sentence, index):
... featureset = {}
... for i in range(max(0, index-3), index):
... featureset['left-context(%s)' % sentence[i]] = True
... for i in range(index, max(index+3, len(sentence))):
... featureset['right-context(%s)' % sentence[i]] = True
... return featureset
Training Classifiers
====================
Most classifiers are built by training them on a list of hand-labeled
examples, known as the "training set". Training sets are represented
as lists of ``(featuredict, label)`` tuples.
"""
from nltk.classify.api import ClassifierI, MultiClassifierI
from nltk.classify.megam import config_megam, call_megam
from nltk.classify.weka import WekaClassifier, config_weka
from nltk.classify.naivebayes import NaiveBayesClassifier
from nltk.classify.positivenaivebayes import PositiveNaiveBayesClassifier
from nltk.classify.decisiontree import DecisionTreeClassifier
from nltk.classify.rte_classify import rte_classifier, rte_features, RTEFeatureExtractor
from nltk.classify.util import accuracy, apply_features, log_likelihood
from nltk.classify.scikitlearn import SklearnClassifier
from nltk.classify.maxent import (MaxentClassifier, BinaryMaxentFeatureEncoding,
TypedMaxentFeatureEncoding,
ConditionalExponentialClassifier)
from nltk.classify.senna import Senna
from nltk.classify.textcat import TextCat<|fim▁end|> | |
<|file_name|>base_test.go<|end_file_name|><|fim▁begin|>// SPDX-License-Identifier: ISC
// Copyright (c) 2014-2020 Bitmark Inc.
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package transactionrecord_test
import (
"bytes"
"encoding/json"
"reflect"
"testing"
"golang.org/x/crypto/ed25519"
"github.com/bitmark-inc/bitmarkd/currency"
"github.com/bitmark-inc/bitmarkd/fault"
"github.com/bitmark-inc/bitmarkd/merkle"
"github.com/bitmark-inc/bitmarkd/transactionrecord"
"github.com/bitmark-inc/bitmarkd/util"
)
// test the packing/unpacking of base record
//
// ensures that pack->unpack returns the same original value
func TestPackBaseData(t *testing.T) {
proofedByAccount := makeAccount(proofedBy.publicKey)
r := transactionrecord.OldBaseData{
Currency: currency.Bitcoin,
PaymentAddress: "mipcBbFg9gMiCh81Kj8tqqdgoZub1ZJRfn",
Owner: proofedByAccount,
Nonce: 0x12345678,
}
expected := []byte{
0x01, 0x01, 0x22, 0x6d, 0x69, 0x70, 0x63, 0x42,
0x62, 0x46, 0x67, 0x39, 0x67, 0x4d, 0x69, 0x43,
0x68, 0x38, 0x31, 0x4b, 0x6a, 0x38, 0x74, 0x71,
0x71, 0x64, 0x67, 0x6f, 0x5a, 0x75, 0x62, 0x31,
0x5a, 0x4a, 0x52, 0x66, 0x6e, 0x21, 0x13, 0x55,
0xb2, 0x98, 0x88, 0x17, 0xf7, 0xea, 0xec, 0x37,
0x74, 0x1b, 0x82, 0x44, 0x71, 0x63, 0xca, 0xaa,
0x5a, 0x9d, 0xb2, 0xb6, 0xf0, 0xce, 0x72, 0x26,
0x26, 0x33, 0x8e, 0x5e, 0x3f, 0xd7, 0xf7, 0xf8,
0xac, 0xd1, 0x91, 0x01,
}
expectedTxId := merkle.Digest{
0x9e, 0x93, 0x2b, 0x8e, 0xa1, 0xa3, 0xd4, 0x30,
0xc5, 0x9a, 0x23, 0xfd, 0x56, 0x75, 0xe8, 0xba,
0x64, 0x0e, 0xe8, 0x1c, 0xf3, 0x0e, 0x68, 0xca,
0x14, 0x8e, 0xe1, 0x1f, 0x13, 0xdb, 0xd4, 0x27,
}
// manually sign the record and attach signature to "expected"
signature := ed25519.Sign(proofedBy.privateKey, expected)
r.Signature = signature
//t.Logf("signature: %#v", r.Signature)
l := util.ToVarint64(uint64(len(signature)))
expected = append(expected, l...)
expected = append(expected, signature...)
// test the packer
packed, err := r.Pack(proofedByAccount)
if nil != err {
t.Fatalf("pack error: %s", err)
}
// if either of above fail we will have the message _without_ a signature
if !bytes.Equal(packed, expected) {
t.Errorf("pack record: %x expected: %x", packed, expected)
t.Errorf("*** GENERATED Packed:\n%s", util.FormatBytes("expected", packed))
t.Fatal("fatal error")
}
// check the record type
if transactionrecord.BaseDataTag != packed.Type() {
t.Fatalf("pack record type: %x expected: %x", packed.Type(), transactionrecord.BaseDataTag)
}
t.Logf("Packed length: %d bytes", len(packed))
// check txIds
txId := packed.MakeLink()
if txId != expectedTxId {
t.Errorf("pack tx id: %#v expected: %#v", txId, expectedTxId)
t.Errorf("*** GENERATED tx id:\n%s", util.FormatBytes("expectedTxId", txId[:]))
}
// test the unpacker
unpacked, n, err := packed.Unpack(true)
if nil != err {
t.Fatalf("unpack error: %s", err)
}
if len(packed) != n {
t.Errorf("did not unpack all data: only used: %d of: %d bytes", n, len(packed))
}
baseData, ok := unpacked.(*transactionrecord.OldBaseData)
if !ok {
t.Fatalf("did not unpack to BaseData")
}
// display a JSON version for information
item := struct {
TxId merkle.Digest
BaseData *transactionrecord.OldBaseData
}{
TxId: txId,
BaseData: baseData,
}
b, err := json.MarshalIndent(item, "", " ")
if nil != err {
t.Fatalf("json error: %s", err)
}
t.Logf("BaseData: JSON: %s", b)
// check that structure is preserved through Pack/Unpack
// note reg is a pointer here
if !reflect.DeepEqual(r, *baseData) {
t.Errorf("different, original: %v recovered: %v", r, *baseData)
}
checkPackedData(t, "base data", packed)
}
// test the pack failure on trying to use the zero public key
func TestPackBaseDataWithZeroAccount(t *testing.T) {
proofedByAccount := makeAccount(theZeroKey.publicKey)
<|fim▁hole|> Nonce: 0x12345678,
Signature: []byte{1, 2, 3, 4},
}
// test the packer
_, err := r.Pack(proofedByAccount)
if nil == err {
t.Fatalf("pack should have failed")
}
if fault.InvalidOwnerOrRegistrant != err {
t.Fatalf("unexpected pack error: %s", err)
}
}<|fim▁end|> | r := transactionrecord.OldBaseData{
Currency: currency.Bitcoin,
PaymentAddress: "mipcBbFg9gMiCh81Kj8tqqdgoZub1ZJRfn",
Owner: proofedByAccount, |
<|file_name|>learning_unit.py<|end_file_name|><|fim▁begin|>##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2019 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from django.conf import settings
from rest_framework import serializers
from base.models.enums.summary_status import SummaryStatus
from base.models.learning_unit_year import LearningUnitYear
from learning_unit.api.serializers.campus import LearningUnitCampusSerializer
from learning_unit.api.serializers.component import LearningUnitComponentSerializer
from learning_unit.api.serializers.utils import LearningUnitHyperlinkedIdentityField, \<|fim▁hole|>class LearningUnitTitleSerializer(serializers.ModelSerializer):
title = serializers.SerializerMethodField()
class Meta:
model = LearningUnitYear
fields = (
'title',
)
def get_title(self, learning_unit_year):
language = self.context['language']
return getattr(
learning_unit_year,
'full_title' + ('_' + language if language not in settings.LANGUAGE_CODE_FR else '')
)
class LearningUnitSerializer(LearningUnitTitleSerializer):
url = LearningUnitHyperlinkedIdentityField(read_only=True)
osis_url = serializers.HyperlinkedIdentityField(
view_name='learning_unit',
lookup_url_kwarg="learning_unit_year_id",
read_only=True
)
requirement_entity = serializers.CharField(
source='entity_requirement',
read_only=True
)
allocation_entity = serializers.CharField(
source='entity_allocation',
read_only=True
)
academic_year = serializers.IntegerField(source='academic_year.year')
type = serializers.CharField(source='learning_container_year.container_type')
type_text = serializers.CharField(source='get_container_type_display', read_only=True)
subtype_text = serializers.CharField(source='get_subtype_display', read_only=True)
has_proposal = serializers.SerializerMethodField()
class Meta(LearningUnitTitleSerializer.Meta):
model = LearningUnitYear
fields = LearningUnitTitleSerializer.Meta.fields + (
'url',
'osis_url',
'acronym',
'academic_year',
'credits',
'status',
'requirement_entity',
'allocation_entity',
'type',
'type_text',
'subtype',
'subtype_text',
'has_proposal',
)
def get_has_proposal(self, learning_unit_year):
return getattr(learning_unit_year, "has_proposal", None)
class LearningUnitDetailedSerializer(LearningUnitSerializer):
periodicity_text = serializers.CharField(source='get_periodicity_display', read_only=True)
quadrimester_text = serializers.CharField(source='get_quadrimester_display', read_only=True)
language = serializers.CharField(source='language.code', read_only=True)
team = serializers.BooleanField(source='learning_container_year.team', read_only=True)
campus = LearningUnitCampusSerializer(read_only=True)
components = LearningUnitComponentSerializer(many=True, source='learningcomponentyear_set', read_only=True)
parent = LearningUnitHyperlinkedRelatedField(read_only=True, lookup_field='acronym')
partims = LearningUnitHyperlinkedRelatedField(read_only=True, many=True, source='get_partims_related')
proposal = serializers.SerializerMethodField()
summary_status = serializers.SerializerMethodField()
remark = serializers.CharField(source='other_remark', read_only=True)
remark_en = serializers.CharField(source='other_remark_english', read_only=True)
class Meta(LearningUnitSerializer.Meta):
model = LearningUnitYear
fields = LearningUnitSerializer.Meta.fields + (
'quadrimester',
'quadrimester_text',
'periodicity',
'periodicity_text',
'campus',
'team',
'language',
'exchange_students',
'french_friendly',
'english_friendly',
'components',
'parent',
'partims',
'proposal',
'summary_status',
'professional_integration',
'remark',
'remark_en',
)
@staticmethod
def get_proposal(learning_unit_year):
if not hasattr(learning_unit_year, "proposallearningunit"):
return {}
return {
"folder": learning_unit_year.proposallearningunit.folder,
"type": learning_unit_year.proposallearningunit.get_type_display(),
"status": learning_unit_year.proposallearningunit.get_state_display(),
}
@staticmethod
def get_summary_status(learning_unit_year):
if getattr(learning_unit_year, "summary_status", False):
return SummaryStatus.MODIFIED.value
elif learning_unit_year.summary_locked:
return SummaryStatus.BLOCKED.value
return SummaryStatus.NOT_MODIFIED.value
class ExternalLearningUnitDetailedSerializer(LearningUnitDetailedSerializer):
local_url = serializers.CharField(source='externallearningunityear.url')
local_code = serializers.CharField(source='externallearningunityear.external_acronym')
class Meta(LearningUnitDetailedSerializer.Meta):
model = LearningUnitYear
fields = LearningUnitDetailedSerializer.Meta.fields + (
'local_code',
'local_url'
)<|fim▁end|> | LearningUnitHyperlinkedRelatedField
|
<|file_name|>course.py<|end_file_name|><|fim▁begin|>"""
Views related to operations on course objects
"""
import copy
import json
import logging
import random
import string # pylint: disable=deprecated-module
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseNotFound, Http404
from django.shortcuts import redirect
import django.utils
from django.utils.translation import ugettext as _
from django.views.decorators.http import require_http_methods, require_GET
from django.views.decorators.csrf import ensure_csrf_cookie
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.locations import Location
from .component import (
ADVANCED_COMPONENT_TYPES,
SPLIT_TEST_COMPONENT_TYPE,
)
from .item import create_xblock_info
from .library import LIBRARIES_ENABLED
from contentstore import utils
from contentstore.course_group_config import (
COHORT_SCHEME,
GroupConfiguration,
GroupConfigurationsValidationError,
RANDOM_SCHEME,
)
from contentstore.course_info_model import get_course_updates, update_course_updates, delete_course_update
from contentstore.courseware_index import CoursewareSearchIndexer, SearchIndexingError
from contentstore.push_notification import push_notification_enabled
from contentstore.tasks import rerun_course
from contentstore.utils import (
add_instructor,
initialize_permissions,
get_lms_link_for_item,
remove_all_instructors,
reverse_course_url,
reverse_library_url,
reverse_usage_url,
reverse_url,
)
from contentstore.views.entrance_exam import (
create_entrance_exam,
delete_entrance_exam,
update_entrance_exam,
)
from course_action_state.managers import CourseActionStateItemNotFoundError
from course_action_state.models import CourseRerunState, CourseRerunUIStateManager
from course_creators.views import get_course_creator_status, add_user_with_status_unrequested
from edxmako.shortcuts import render_to_response
from microsite_configuration import microsite
from models.settings.course_grading import CourseGradingModel
from models.settings.course_metadata import CourseMetadata
from models.settings.encoder import CourseSettingsEncoder
from openedx.core.djangoapps.content.course_structures.api.v0 import api, errors
from openedx.core.djangoapps.credit.api import is_credit_course, get_credit_requirements
from openedx.core.djangoapps.credit.tasks import update_credit_course_requirements
from openedx.core.djangoapps.ga_optional.api import is_available
from openedx.core.djangoapps.ga_optional.models import (
CUSTOM_LOGO_OPTION_KEY,
LIBRARY_OPTION_KEY,
PROGRESS_RESTRICTION_OPTION_KEY
)
from openedx.core.djangoapps.models.course_details import CourseDetails
from openedx.core.djangoapps.programs.models import ProgramsApiConfig
from openedx.core.djangoapps.programs.utils import get_programs
from openedx.core.djangoapps.self_paced.models import SelfPacedConfiguration
from openedx.core.lib.course_tabs import CourseTabPluginManager
from openedx.core.lib.courses import course_image_url, custom_logo_url
from openedx.core.lib.ga_course_utils import is_using_jwplayer_course
from openedx.core.lib.js_utils import escape_json_dumps
from student import auth
from student.auth import has_course_author_access, has_studio_write_access, has_studio_read_access
from student.models import CourseAccessRole
from student.roles import (
CourseInstructorRole, CourseStaffRole, CourseCreatorRole, GaGlobalCourseCreatorRole, GlobalStaff, UserBasedRole,
)
from util.date_utils import get_default_time_display
from util.json_request import JsonResponse, JsonResponseBadRequest, expect_json
from util.milestones_helpers import (
is_entrance_exams_enabled,
is_prerequisite_courses_enabled,
is_valid_course_key,
set_prerequisite_courses,
)
from ga_maintenance_cms.models import MaintenanceMessage
from util.organizations_helpers import (
add_organization_course,
get_organization_by_short_name,
organizations_enabled,
)
from util.string_utils import _has_non_ascii_characters
from xmodule.contentstore.content import StaticContent
from xmodule.course_module import CourseFields
from xmodule.course_module import DEFAULT_START_DATE
from xmodule.error_module import ErrorDescriptor
from xmodule.modulestore import EdxJSONEncoder
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.exceptions import ItemNotFoundError, DuplicateCourseError
from xmodule.tabs import CourseTab, CourseTabList, InvalidTabsException
log = logging.getLogger(__name__)
__all__ = ['course_info_handler', 'course_handler', 'course_listing',
'library_listing',
'course_info_update_handler', 'course_search_index_handler',
'course_rerun_handler',
'settings_handler',
'grading_handler',
'advanced_settings_handler',
'course_notifications_handler',
'textbooks_list_handler', 'textbooks_detail_handler',
'group_configurations_list_handler', 'group_configurations_detail_handler']
class AccessListFallback(Exception):
"""
An exception that is raised whenever we need to `fall back` to fetching *all* courses
available to a user, rather than using a shorter method (i.e. fetching by group)
"""
pass
def get_course_and_check_access(course_key, user, depth=0):
"""
Internal method used to calculate and return the locator and course module
for the view functions in this file.
"""
if not has_studio_read_access(user, course_key):
raise PermissionDenied()
course_module = modulestore().get_course(course_key, depth=depth)
return course_module
def reindex_course_and_check_access(course_key, user):
"""
Internal method used to restart indexing on a course.
"""
if not has_course_author_access(user, course_key):
raise PermissionDenied()
return CoursewareSearchIndexer.do_course_reindex(modulestore(), course_key)
@login_required
def course_notifications_handler(request, course_key_string=None, action_state_id=None):
"""
Handle incoming requests for notifications in a RESTful way.
course_key_string and action_state_id must both be set; else a HttpBadResponseRequest is returned.
For each of these operations, the requesting user must have access to the course;<|fim▁hole|> else a PermissionDenied error is returned.
GET
json: return json representing information about the notification (action, state, etc)
DELETE
json: return json repressing success or failure of dismissal/deletion of the notification
PUT
Raises a NotImplementedError.
POST
Raises a NotImplementedError.
"""
# ensure that we have a course and an action state
if not course_key_string or not action_state_id:
return HttpResponseBadRequest()
response_format = request.REQUEST.get('format', 'html')
course_key = CourseKey.from_string(course_key_string)
if response_format == 'json' or 'application/json' in request.META.get('HTTP_ACCEPT', 'application/json'):
if not has_studio_write_access(request.user, course_key):
raise PermissionDenied()
if request.method == 'GET':
return _course_notifications_json_get(action_state_id)
elif request.method == 'DELETE':
# we assume any delete requests dismiss actions from the UI
return _dismiss_notification(request, action_state_id)
elif request.method == 'PUT':
raise NotImplementedError()
elif request.method == 'POST':
raise NotImplementedError()
else:
return HttpResponseBadRequest()
else:
return HttpResponseNotFound()
def _course_notifications_json_get(course_action_state_id):
"""
Return the action and the action state for the given id
"""
try:
action_state = CourseRerunState.objects.find_first(id=course_action_state_id)
except CourseActionStateItemNotFoundError:
return HttpResponseBadRequest()
action_state_info = {
'action': action_state.action,
'state': action_state.state,
'should_display': action_state.should_display
}
return JsonResponse(action_state_info)
def _dismiss_notification(request, course_action_state_id): # pylint: disable=unused-argument
"""
Update the display of the course notification
"""
try:
action_state = CourseRerunState.objects.find_first(id=course_action_state_id)
except CourseActionStateItemNotFoundError:
# Can't dismiss a notification that doesn't exist in the first place
return HttpResponseBadRequest()
if action_state.state == CourseRerunUIStateManager.State.FAILED:
# We remove all permissions for this course key at this time, since
# no further access is required to a course that failed to be created.
remove_all_instructors(action_state.course_key)
# The CourseRerunState is no longer needed by the UI; delete
action_state.delete()
return JsonResponse({'success': True})
# pylint: disable=unused-argument
@login_required
def course_handler(request, course_key_string=None):
"""
The restful handler for course specific requests.
It provides the course tree with the necessary information for identifying and labeling the parts. The root
will typically be a 'course' object but may not be especially as we support modules.
GET
html: return course listing page if not given a course id
html: return html page overview for the given course if given a course id
json: return json representing the course branch's index entry as well as dag w/ all of the children
replaced w/ json docs where each doc has {'_id': , 'display_name': , 'children': }
POST
json: create a course, return resulting json
descriptor (same as in GET course/...). Leaving off /branch/draft would imply create the course w/ default
branches. Cannot change the structure contents ('_id', 'display_name', 'children') but can change the
index entry.
PUT
json: update this course (index entry not xblock) such as repointing head, changing display name, org,
course, run. Return same json as above.
DELETE
json: delete this branch from this course (leaving off /branch/draft would imply delete the course)
"""
try:
response_format = request.REQUEST.get('format', 'html')
if response_format == 'json' or 'application/json' in request.META.get('HTTP_ACCEPT', 'application/json'):
if request.method == 'GET':
course_key = CourseKey.from_string(course_key_string)
with modulestore().bulk_operations(course_key):
course_module = get_course_and_check_access(course_key, request.user, depth=None)
return JsonResponse(_course_outline_json(request, course_module))
elif request.method == 'POST': # not sure if this is only post. If one will have ids, it goes after access
return _create_or_rerun_course(request)
elif not has_studio_write_access(request.user, CourseKey.from_string(course_key_string)):
raise PermissionDenied()
elif request.method == 'PUT':
raise NotImplementedError()
elif request.method == 'DELETE':
raise NotImplementedError()
else:
return HttpResponseBadRequest()
elif request.method == 'GET': # assume html
if course_key_string is None:
return redirect(reverse("home"))
else:
return course_index(request, CourseKey.from_string(course_key_string))
else:
return HttpResponseNotFound()
except InvalidKeyError:
raise Http404
@login_required
@ensure_csrf_cookie
@require_http_methods(["GET"])
def course_rerun_handler(request, course_key_string):
"""
The restful handler for course reruns.
GET
html: return html page with form to rerun a course for the given course id
"""
# Only global staff (PMs) and GaGlobalCourseCreator are able to rerun courses during the soft launch
# Note: GaGlobalCourseCreator has access to rerun (#2150)
if not GlobalStaff().has_user(request.user) and not GaGlobalCourseCreatorRole().has_user(request.user):
raise PermissionDenied()
course_key = CourseKey.from_string(course_key_string)
with modulestore().bulk_operations(course_key):
course_module = get_course_and_check_access(course_key, request.user, depth=3)
if request.method == 'GET':
return render_to_response('course-create-rerun.html', {
'source_course_key': course_key,
'display_name': course_module.display_name,
'user': request.user,
'course_creator_status': _get_course_creator_status(request.user),
'allow_unicode_course_id': settings.FEATURES.get('ALLOW_UNICODE_COURSE_ID', False)
})
@login_required
@ensure_csrf_cookie
@require_GET
def course_search_index_handler(request, course_key_string):
"""
The restful handler for course indexing.
GET
html: return status of indexing task
json: return status of indexing task
"""
# Only global staff (PMs) and GaGlobalCourseCreator are able to index courses
# Note: GaGlobalCourseCreator has access to course search index (#2150)
if not GlobalStaff().has_user(request.user) and not GaGlobalCourseCreatorRole().has_user(request.user):
raise PermissionDenied()
course_key = CourseKey.from_string(course_key_string)
content_type = request.META.get('CONTENT_TYPE', None)
if content_type is None:
content_type = "application/json; charset=utf-8"
with modulestore().bulk_operations(course_key):
try:
reindex_course_and_check_access(course_key, request.user)
except SearchIndexingError as search_err:
return HttpResponse(escape_json_dumps({
"user_message": search_err.error_list
}), content_type=content_type, status=500)
return HttpResponse(escape_json_dumps({
"user_message": _("Course has been successfully reindexed.")
}), content_type=content_type, status=200)
def _course_outline_json(request, course_module):
"""
Returns a JSON representation of the course module and recursively all of its children.
"""
return create_xblock_info(
course_module,
include_child_info=True,
course_outline=True,
include_children_predicate=lambda xblock: not xblock.category == 'vertical',
user=request.user
)
def _accessible_courses_list(request):
"""
List all courses available to the logged in user by iterating through all the courses
"""
def course_filter(course):
"""
Filter out unusable and inaccessible courses
"""
if isinstance(course, ErrorDescriptor):
return False
# pylint: disable=fixme
# TODO remove this condition when templates purged from db
if course.location.course == 'templates':
return False
return has_studio_read_access(request.user, course.id)
courses = filter(course_filter, modulestore().get_courses())
in_process_course_actions = [
course for course in
CourseRerunState.objects.find_all(
exclude_args={'state': CourseRerunUIStateManager.State.SUCCEEDED}, should_display=True
)
if has_studio_read_access(request.user, course.course_key)
]
return courses, in_process_course_actions
def _accessible_courses_list_from_groups(request):
"""
List all courses available to the logged in user by reversing access group names
"""
courses_list = {}
in_process_course_actions = []
instructor_courses = UserBasedRole(request.user, CourseInstructorRole.ROLE).courses_with_role()
staff_courses = UserBasedRole(request.user, CourseStaffRole.ROLE).courses_with_role()
all_courses = instructor_courses | staff_courses
for course_access in all_courses:
course_key = course_access.course_id
if course_key is None:
# If the course_access does not have a course_id, it's an org-based role, so we fall back
raise AccessListFallback
if course_key not in courses_list:
# check for any course action state for this course
in_process_course_actions.extend(
CourseRerunState.objects.find_all(
exclude_args={'state': CourseRerunUIStateManager.State.SUCCEEDED},
should_display=True,
course_key=course_key,
)
)
# check for the course itself
try:
course = modulestore().get_course(course_key)
except ItemNotFoundError:
# If a user has access to a course that doesn't exist, don't do anything with that course
pass
if course is not None and not isinstance(course, ErrorDescriptor):
# ignore deleted or errored courses
courses_list[course_key] = course
return courses_list.values(), in_process_course_actions
def _accessible_libraries_list(user):
"""
List all libraries available to the logged in user by iterating through all libraries
"""
# No need to worry about ErrorDescriptors - split's get_libraries() never returns them.
return [lib for lib in modulestore().get_libraries() if has_studio_read_access(user, lib.location.library_key)]
@login_required
@ensure_csrf_cookie
def course_listing(request):
"""
List all courses available to the logged in user
"""
courses, in_process_course_actions = get_courses_accessible_to_user(request)
libraries = _accessible_libraries_list(request.user) if LIBRARIES_ENABLED else []
programs_config = ProgramsApiConfig.current()
raw_programs = get_programs(request.user) if programs_config.is_studio_tab_enabled else []
# Sort programs alphabetically by name.
# TODO: Support ordering in the Programs API itself.
programs = sorted(raw_programs, key=lambda p: p['name'].lower())
def format_in_process_course_view(uca):
"""
Return a dict of the data which the view requires for each unsucceeded course
"""
return {
'display_name': uca.display_name,
'course_key': unicode(uca.course_key),
'org': uca.course_key.org,
'number': uca.course_key.course,
'run': uca.course_key.run,
'is_failed': True if uca.state == CourseRerunUIStateManager.State.FAILED else False,
'is_in_progress': True if uca.state == CourseRerunUIStateManager.State.IN_PROGRESS else False,
'dismiss_link': reverse_course_url(
'course_notifications_handler',
uca.course_key,
kwargs={
'action_state_id': uca.id,
},
) if uca.state == CourseRerunUIStateManager.State.FAILED else ''
}
def format_library_for_view(library):
"""
Return a dict of the data which the view requires for each library
"""
return {
'display_name': library.display_name,
'library_key': unicode(library.location.library_key),
'url': reverse_library_url('library_handler', unicode(library.location.library_key)),
'org': library.display_org_with_default,
'number': library.display_number_with_default,
'can_edit': has_studio_write_access(request.user, library.location.library_key),
}
courses = _remove_in_process_courses(courses, in_process_course_actions)
in_process_course_actions = [format_in_process_course_view(uca) for uca in in_process_course_actions]
maintenance_message = MaintenanceMessage.messages_for_all()
return render_to_response('index.html', {
'courses': courses,
'in_process_course_actions': in_process_course_actions,
'libraries_enabled': LIBRARIES_ENABLED,
'libraries': [format_library_for_view(lib) for lib in libraries],
'show_new_library_button': LIBRARIES_ENABLED and request.user.is_active,
'user': request.user,
'request_course_creator_url': reverse('contentstore.views.request_course_creator'),
'course_creator_status': _get_course_creator_status(request.user),
# Note: GaGlobalCourseCreator can see the rerun status (#2150)
'rerun_creator_status': GlobalStaff().has_user(request.user) or GaGlobalCourseCreatorRole().has_user(request.user),
'allow_unicode_course_id': settings.FEATURES.get('ALLOW_UNICODE_COURSE_ID', False),
'allow_course_reruns': settings.FEATURES.get('ALLOW_COURSE_RERUNS', True),
'maintenance_message': maintenance_message,
# Note: GaGlobalCourseCreator has same authority a global staff in studio (#2150)
'is_programs_enabled': programs_config.is_studio_tab_enabled and (
request.user.is_staff or GaGlobalCourseCreatorRole().has_user(request.user)),
'programs': programs,
'program_authoring_url': reverse('programs'),
})
@login_required
@ensure_csrf_cookie
def library_listing(request, course_key_string=None):
"""
List all libraries for the course
"""
def format_library_for_view(library, course):
"""
Return a dict of the data which the view requires for each library
"""
return {
'display_name': library.display_name,
'library_key': unicode(library.location.library_key),
'url': reverse_course_url('course_library_handler', course, kwargs={'library_key_string': unicode(library.location.library_key)}),
'org': library.display_org_with_default,
'number': library.display_number_with_default,
'can_edit': has_studio_write_access(request.user, library.location.library_key),
}
maintenance_message = MaintenanceMessage.messages_for_all()
course_key = CourseKey.from_string(course_key_string)
with modulestore().bulk_operations(course_key):
course_module = get_course_and_check_access(course_key, request.user, depth=None)
instructor_courses = UserBasedRole(request.user, CourseInstructorRole.ROLE).courses_with_role()
global_staff = GlobalStaff().has_user(request.user)
# Note: GaGlobalCourseCreator has access to library listing (#2150)
ga_global_course_creator = GaGlobalCourseCreatorRole().has_user(request.user)
if not course_module or not is_available(LIBRARY_OPTION_KEY, course_key):
raise Http404
if not instructor_courses and not global_staff and not ga_global_course_creator:
raise Http404
target_libraries = course_module.target_library
libraries = modulestore().get_libraries()
for library in libraries[:]:
if unicode(library.location.library_key) not in target_libraries:
libraries.remove(library)
return render_to_response('index_lib.html', {
'context_course': course_module,
'libraries_enabled': LIBRARIES_ENABLED,
'libraries': [format_library_for_view(lib, course_key) for lib in libraries],
'show_new_library_button': LIBRARIES_ENABLED and request.user.is_active,
'library_option': is_available(LIBRARY_OPTION_KEY, course_key),
'user': request.user,
'maintenance_message': maintenance_message,
'allow_unicode_course_id': settings.FEATURES.get('ALLOW_UNICODE_COURSE_ID', False)
})
def _get_rerun_link_for_item(course_key):
""" Returns the rerun link for the given course key. """
return reverse_course_url('course_rerun_handler', course_key)
def _deprecated_blocks_info(course_module, deprecated_block_types):
"""
Returns deprecation information about `deprecated_block_types`
Arguments:
course_module (CourseDescriptor): course object
deprecated_block_types (list): list of deprecated blocks types
Returns:
Dict with following keys:
block_types (list): list containing types of all deprecated blocks
block_types_enabled (bool): True if any or all `deprecated_blocks` present in Advanced Module List else False
blocks (list): List of `deprecated_block_types` component names and their parent's url
advance_settings_url (str): URL to advance settings page
"""
data = {
'block_types': deprecated_block_types,
'block_types_enabled': any(
block_type in course_module.advanced_modules for block_type in deprecated_block_types
),
'blocks': [],
'advance_settings_url': reverse_course_url('advanced_settings_handler', course_module.id)
}
try:
structure_data = api.course_structure(course_module.id, block_types=deprecated_block_types)
except errors.CourseStructureNotAvailableError:
return data
blocks = []
for block in structure_data['blocks'].values():
blocks.append([reverse_usage_url('container_handler', block['parent']), block['display_name']])
data['blocks'].extend(blocks)
return data
@login_required
@ensure_csrf_cookie
def course_index(request, course_key):
"""
Display an editable course overview.
org, course, name: Attributes of the Location for the item to edit
"""
# A depth of None implies the whole course. The course outline needs this in order to compute has_changes.
# A unit may not have a draft version, but one of its components could, and hence the unit itself has changes.
with modulestore().bulk_operations(course_key):
course_module = get_course_and_check_access(course_key, request.user, depth=None)
if not course_module:
raise Http404
lms_link = get_lms_link_for_item(course_module.location)
reindex_link = None
if settings.FEATURES.get('ENABLE_COURSEWARE_INDEX', False):
reindex_link = "/course/{course_id}/search_reindex".format(course_id=unicode(course_key))
sections = course_module.get_children()
course_structure = _course_outline_json(request, course_module)
locator_to_show = request.REQUEST.get('show', None)
course_release_date = get_default_time_display(course_module.start) if course_module.start != DEFAULT_START_DATE else _("Unscheduled")
settings_url = reverse_course_url('settings_handler', course_key)
try:
current_action = CourseRerunState.objects.find_first(course_key=course_key, should_display=True)
except (ItemNotFoundError, CourseActionStateItemNotFoundError):
current_action = None
deprecated_blocks_info = _deprecated_blocks_info(course_module, settings.DEPRECATED_BLOCK_TYPES)
return render_to_response('course_outline.html', {
'context_course': course_module,
'lms_link': lms_link,
'sections': sections,
'course_structure': course_structure,
'initial_state': course_outline_initial_state(locator_to_show, course_structure) if locator_to_show else None,
'rerun_notification_id': current_action.id if current_action else None,
'course_release_date': course_release_date,
'settings_url': settings_url,
'reindex_link': reindex_link,
'deprecated_blocks_info': deprecated_blocks_info,
'notification_dismiss_url': reverse_course_url(
'course_notifications_handler',
current_action.course_key,
kwargs={
'action_state_id': current_action.id,
},
) if current_action else None,
'library_option': is_available(LIBRARY_OPTION_KEY, course_key),
'is_restricted_in_progress': is_available(PROGRESS_RESTRICTION_OPTION_KEY, course_key),
})
def get_courses_accessible_to_user(request):
"""
Try to get all courses by first reversing django groups and fallback to old method if it fails
Note: overhead of pymongo reads will increase if getting courses from django groups fails
"""
# Note: GaGlobalCourseCreator has access to courses (#2150)
if GlobalStaff().has_user(request.user) or GaGlobalCourseCreatorRole().has_user(request.user):
# user has global access so no need to get courses from django groups
courses, in_process_course_actions = _accessible_courses_list(request)
else:
try:
courses, in_process_course_actions = _accessible_courses_list_from_groups(request)
except AccessListFallback:
# user have some old groups or there was some error getting courses from django groups
# so fallback to iterating through all courses
courses, in_process_course_actions = _accessible_courses_list(request)
return courses, in_process_course_actions
def _remove_in_process_courses(courses, in_process_course_actions):
"""
removes any in-process courses in courses list. in-process actually refers to courses
that are in the process of being generated for re-run
"""
def format_course_for_view(course):
"""
Return a dict of the data which the view requires for each course
"""
return {
'display_name': course.display_name,
'course_key': unicode(course.location.course_key),
'url': reverse_course_url('course_handler', course.id),
'lms_link': get_lms_link_for_item(course.location),
'rerun_link': _get_rerun_link_for_item(course.id),
'org': course.display_org_with_default,
'number': course.display_number_with_default,
'run': course.location.run
}
in_process_action_course_keys = [uca.course_key for uca in in_process_course_actions]
courses = [
format_course_for_view(c)
for c in courses
if not isinstance(c, ErrorDescriptor) and (c.id not in in_process_action_course_keys)
]
return courses
def course_outline_initial_state(locator_to_show, course_structure):
"""
Returns the desired initial state for the course outline view. If the 'show' request parameter
was provided, then the view's initial state will be to have the desired item fully expanded
and to scroll to see the new item.
"""
def find_xblock_info(xblock_info, locator):
"""
Finds the xblock info for the specified locator.
"""
if xblock_info['id'] == locator:
return xblock_info
children = xblock_info['child_info']['children'] if xblock_info.get('child_info', None) else None
if children:
for child_xblock_info in children:
result = find_xblock_info(child_xblock_info, locator)
if result:
return result
return None
def collect_all_locators(locators, xblock_info):
"""
Collect all the locators for an xblock and its children.
"""
locators.append(xblock_info['id'])
children = xblock_info['child_info']['children'] if xblock_info.get('child_info', None) else None
if children:
for child_xblock_info in children:
collect_all_locators(locators, child_xblock_info)
selected_xblock_info = find_xblock_info(course_structure, locator_to_show)
if not selected_xblock_info:
return None
expanded_locators = []
collect_all_locators(expanded_locators, selected_xblock_info)
return {
'locator_to_show': locator_to_show,
'expanded_locators': expanded_locators
}
@expect_json
def _create_or_rerun_course(request):
"""
To be called by requests that create a new destination course (i.e., create_new_course and rerun_course)
Returns the destination course_key and overriding fields for the new course.
Raises DuplicateCourseError and InvalidKeyError
"""
# Note: GaGlobalCourseCreator has access to create or rerun course (#2150)
if not auth.user_has_role(request.user, CourseCreatorRole()) \
and not auth.user_has_role(request.user, GaGlobalCourseCreatorRole()):
raise PermissionDenied()
try:
org = request.json.get('org')
course = request.json.get('number', request.json.get('course'))
display_name = request.json.get('display_name')
# force the start date for reruns and allow us to override start via the client
start = request.json.get('start', CourseFields.start.default)
run = request.json.get('run')
# allow/disable unicode characters in course_id according to settings
if not settings.FEATURES.get('ALLOW_UNICODE_COURSE_ID'):
if _has_non_ascii_characters(org) or _has_non_ascii_characters(course) or _has_non_ascii_characters(run):
return JsonResponse(
{'error': _('Special characters not allowed in organization, course number, and course run.')},
status=400
)
fields = {'start': start}
if display_name is not None:
if len(display_name) > settings.MAX_LENGTH_COURSE_DISPLAY_NAME:
return JsonResponse({
'ErrMsg': _(
'Course name, please be up to {max_length} characters.'
).format(max_length=settings.MAX_LENGTH_COURSE_DISPLAY_NAME),
})
fields['display_name'] = display_name
# Set a unique wiki_slug for newly created courses. To maintain active wiki_slugs for
# existing xml courses this cannot be changed in CourseDescriptor.
# # TODO get rid of defining wiki slug in this org/course/run specific way and reconcile
# w/ xmodule.course_module.CourseDescriptor.__init__
wiki_slug = u"{0}.{1}.{2}".format(org, course, run)
definition_data = {'wiki_slug': wiki_slug}
fields.update(definition_data)
if 'source_course_key' in request.json:
return _rerun_course(request, org, course, run, fields)
else:
return _create_new_course(request, org, course, run, fields)
except DuplicateCourseError:
return JsonResponse({
'ErrMsg': _(
'There is already a course defined with the same '
'organization and course number. Please '
'change either organization or course number to be unique.'
),
'OrgErrMsg': _(
'Please change either the organization or '
'course number so that it is unique.'),
'CourseErrMsg': _(
'Please change either the organization or '
'course number so that it is unique.'),
})
except InvalidKeyError as error:
return JsonResponse({
"ErrMsg": _("Unable to create course '{name}'.\n\n{err}").format(name=display_name, err=error.message)}
)
def _create_new_course(request, org, number, run, fields):
"""
Create a new course.
Returns the URL for the course overview page.
Raises DuplicateCourseError if the course already exists
"""
org_data = get_organization_by_short_name(org)
if not org_data and organizations_enabled():
return JsonResponse(
{'error': _('You must link this course to an organization in order to continue. '
'Organization you selected does not exist in the system, '
'you will need to add it to the system')},
status=400
)
store_for_new_course = modulestore().default_modulestore.get_modulestore_type()
new_course = create_new_course_in_store(store_for_new_course, request.user, org, number, run, fields)
add_organization_course(org_data, new_course.id)
return JsonResponse({
'url': reverse_course_url('course_handler', new_course.id),
'course_key': unicode(new_course.id),
})
def create_new_course_in_store(store, user, org, number, run, fields):
"""
Create course in store w/ handling instructor enrollment, permissions, and defaulting the wiki slug.
Separated out b/c command line course creation uses this as well as the web interface.
"""
# Set default language from settings and enable web certs
fields.update({
'language': getattr(settings, 'DEFAULT_COURSE_LANGUAGE', 'en'),
'cert_html_view_enabled': True,
})
with modulestore().default_store(store):
# Creating the course raises DuplicateCourseError if an existing course with this org/name is found
new_course = modulestore().create_course(
org,
number,
run,
user.id,
fields=fields,
)
# Make sure user has instructor and staff access to the new course
add_instructor(new_course.id, user, user)
# Initialize permissions for user in the new course
initialize_permissions(new_course.id, user)
return new_course
def _rerun_course(request, org, number, run, fields):
"""
Reruns an existing course.
Returns the URL for the course listing page.
"""
source_course_key = CourseKey.from_string(request.json.get('source_course_key'))
# verify user has access to the original course
if not has_studio_write_access(request.user, source_course_key):
raise PermissionDenied()
# create destination course key
store = modulestore()
with store.default_store('split'):
destination_course_key = store.make_course_key(org, number, run)
# verify org course and run don't already exist
if store.has_course(destination_course_key, ignore_case=True):
raise DuplicateCourseError(source_course_key, destination_course_key)
# Make sure user has instructor and staff access to the destination course
# so the user can see the updated status for that course
add_instructor(destination_course_key, request.user, request.user)
# Mark the action as initiated
CourseRerunState.objects.initiated(source_course_key, destination_course_key, request.user, fields['display_name'])
# Clear the fields that must be reset for the rerun
fields['advertised_start'] = None
# update target_library
source_course = modulestore().get_course(source_course_key)
target_libraries = source_course.target_library if source_course else None
# Rerun the course as a new celery task
json_fields = json.dumps(fields, cls=EdxJSONEncoder)
rerun_course.delay(unicode(source_course_key), unicode(destination_course_key), request.user.id, json_fields, target_libraries)
# Return course listing page
return JsonResponse({
'url': reverse_url('course_handler'),
'destination_course_key': unicode(destination_course_key)
})
# pylint: disable=unused-argument
@login_required
@ensure_csrf_cookie
@require_http_methods(["GET"])
def course_info_handler(request, course_key_string):
"""
GET
html: return html for editing the course info handouts and updates.
"""
try:
course_key = CourseKey.from_string(course_key_string)
except InvalidKeyError:
raise Http404
with modulestore().bulk_operations(course_key):
course_module = get_course_and_check_access(course_key, request.user)
if not course_module:
raise Http404
if 'text/html' in request.META.get('HTTP_ACCEPT', 'text/html'):
custom_logo_enabled = is_available(CUSTOM_LOGO_OPTION_KEY, course_key)
return render_to_response(
'course_info.html',
{
'context_course': course_module,
'updates_url': reverse_course_url('course_info_update_handler', course_key),
'handouts_locator': course_key.make_usage_key('course_info', 'handouts'),
'base_asset_url': StaticContent.get_base_url_path_for_course_assets(course_module.id),
'push_notification_enabled': push_notification_enabled(),
'custom_logo_enabled': custom_logo_enabled,
'custom_logo_for_url': custom_logo_url(course_module),
'library_option': is_available(LIBRARY_OPTION_KEY, course_key)
}
)
else:
return HttpResponseBadRequest("Only supports html requests")
# pylint: disable=unused-argument
@login_required
@ensure_csrf_cookie
@require_http_methods(("GET", "POST", "PUT", "DELETE"))
@expect_json
def course_info_update_handler(request, course_key_string, provided_id=None):
"""
restful CRUD operations on course_info updates.
provided_id should be none if it's new (create) and index otherwise.
GET
json: return the course info update models
POST
json: create an update
PUT or DELETE
json: change an existing update
"""
if 'application/json' not in request.META.get('HTTP_ACCEPT', 'application/json'):
return HttpResponseBadRequest("Only supports json requests")
course_key = CourseKey.from_string(course_key_string)
usage_key = course_key.make_usage_key('course_info', 'updates')
if provided_id == '':
provided_id = None
# check that logged in user has permissions to this item (GET shouldn't require this level?)
if not has_studio_write_access(request.user, usage_key.course_key):
raise PermissionDenied()
if request.method == 'GET':
course_updates = get_course_updates(usage_key, provided_id, request.user.id)
if isinstance(course_updates, dict) and course_updates.get('error'):
return JsonResponse(course_updates, course_updates.get('status', 400))
else:
return JsonResponse(course_updates)
elif request.method == 'DELETE':
try:
return JsonResponse(delete_course_update(usage_key, request.json, provided_id, request.user))
except:
return HttpResponseBadRequest(
"Failed to delete",
content_type="text/plain"
)
# can be either and sometimes django is rewriting one to the other:
elif request.method in ('POST', 'PUT'):
try:
return JsonResponse(update_course_updates(usage_key, request.json, provided_id, request.user))
except:
return HttpResponseBadRequest(
"Failed to save",
content_type="text/plain"
)
@login_required
@ensure_csrf_cookie
@require_http_methods(("GET", "PUT", "POST"))
@expect_json
def settings_handler(request, course_key_string):
"""
Course settings for dates and about pages
GET
html: get the page
json: get the CourseDetails model
PUT
json: update the Course and About xblocks through the CourseDetails model
"""
course_key = CourseKey.from_string(course_key_string)
credit_eligibility_enabled = settings.FEATURES.get('ENABLE_CREDIT_ELIGIBILITY', False)
with modulestore().bulk_operations(course_key):
course_module = get_course_and_check_access(course_key, request.user)
if 'text/html' in request.META.get('HTTP_ACCEPT', '') and request.method == 'GET':
upload_asset_url = reverse_course_url('assets_handler', course_key)
# see if the ORG of this course can be attributed to a 'Microsite'. In that case, the
# course about page should be editable in Studio
marketing_site_enabled = microsite.get_value_for_org(
course_module.location.org,
'ENABLE_MKTG_SITE',
settings.FEATURES.get('ENABLE_MKTG_SITE', False)
)
about_page_editable = not marketing_site_enabled
# Note: GaGlobalCourseCreator can edit enrollment end date the course (#2150)
enrollment_end_editable = GlobalStaff().has_user(request.user) or not marketing_site_enabled or \
GaGlobalCourseCreatorRole().has_user(request.user)
short_description_editable = settings.FEATURES.get('EDITABLE_SHORT_DESCRIPTION', True)
self_paced_enabled = SelfPacedConfiguration.current().enabled
custom_logo_enabled = is_available(CUSTOM_LOGO_OPTION_KEY, course_key)
settings_context = {
'context_course': course_module,
'course_locator': course_key,
'lms_link_for_about_page': utils.get_lms_link_for_about_page(course_key),
'course_image_url': course_image_url(course_module),
'details_url': reverse_course_url('settings_handler', course_key),
'about_page_editable': about_page_editable,
'short_description_editable': short_description_editable,
'upload_asset_url': upload_asset_url,
'course_handler_url': reverse_course_url('course_handler', course_key),
'language_options': settings.ALL_LANGUAGES,
'credit_eligibility_enabled': credit_eligibility_enabled,
'is_credit_course': False,
'show_min_grade_warning': False,
'enrollment_end_editable': enrollment_end_editable,
'is_prerequisite_courses_enabled': is_prerequisite_courses_enabled(),
'is_entrance_exams_enabled': is_entrance_exams_enabled(),
'self_paced_enabled': self_paced_enabled,
'custom_logo_enabled': custom_logo_enabled,
'custom_logo_for_url': custom_logo_url(course_module),
'library_option': is_available(LIBRARY_OPTION_KEY, course_key),
'use_jwplayer': is_using_jwplayer_course(course_module),
}
if is_prerequisite_courses_enabled():
courses, in_process_course_actions = get_courses_accessible_to_user(request)
# exclude current course from the list of available courses
courses = [course for course in courses if course.id != course_key]
if courses:
courses = _remove_in_process_courses(courses, in_process_course_actions)
settings_context.update({'possible_pre_requisite_courses': courses})
if credit_eligibility_enabled:
if is_credit_course(course_key):
# get and all credit eligibility requirements
credit_requirements = get_credit_requirements(course_key)
# pair together requirements with same 'namespace' values
paired_requirements = {}
for requirement in credit_requirements:
namespace = requirement.pop("namespace")
paired_requirements.setdefault(namespace, []).append(requirement)
# if 'minimum_grade_credit' of a course is not set or 0 then
# show warning message to course author.
show_min_grade_warning = False if course_module.minimum_grade_credit > 0 else True
settings_context.update(
{
'is_credit_course': True,
'credit_requirements': paired_requirements,
'show_min_grade_warning': show_min_grade_warning,
}
)
return render_to_response('settings.html', settings_context)
elif 'application/json' in request.META.get('HTTP_ACCEPT', ''):
if request.method == 'GET':
course_details = CourseDetails.fetch(course_key)
return JsonResponse(
course_details,
# encoder serializes dates, old locations, and instances
encoder=CourseSettingsEncoder
)
# For every other possible method type submitted by the caller...
else:
# if pre-requisite course feature is enabled set pre-requisite course
if is_prerequisite_courses_enabled():
prerequisite_course_keys = request.json.get('pre_requisite_courses', [])
if prerequisite_course_keys:
if not all(is_valid_course_key(course_key) for course_key in prerequisite_course_keys):
return JsonResponseBadRequest({"error": _("Invalid prerequisite course key")})
set_prerequisite_courses(course_key, prerequisite_course_keys)
# If the entrance exams feature has been enabled, we'll need to check for some
# feature-specific settings and handle them accordingly
# We have to be careful that we're only executing the following logic if we actually
# need to create or delete an entrance exam from the specified course
if is_entrance_exams_enabled():
course_entrance_exam_present = course_module.entrance_exam_enabled
entrance_exam_enabled = request.json.get('entrance_exam_enabled', '') == 'true'
ee_min_score_pct = request.json.get('entrance_exam_minimum_score_pct', None)
# If the entrance exam box on the settings screen has been checked...
if entrance_exam_enabled:
# Load the default minimum score threshold from settings, then try to override it
entrance_exam_minimum_score_pct = float(settings.ENTRANCE_EXAM_MIN_SCORE_PCT)
if ee_min_score_pct:
entrance_exam_minimum_score_pct = float(ee_min_score_pct)
if entrance_exam_minimum_score_pct.is_integer():
entrance_exam_minimum_score_pct = entrance_exam_minimum_score_pct / 100
entrance_exam_minimum_score_pct = unicode(entrance_exam_minimum_score_pct)
# If there's already an entrance exam defined, we'll update the existing one
if course_entrance_exam_present:
exam_data = {
'entrance_exam_minimum_score_pct': entrance_exam_minimum_score_pct
}
update_entrance_exam(request, course_key, exam_data)
# If there's no entrance exam defined, we'll create a new one
else:
create_entrance_exam(request, course_key, entrance_exam_minimum_score_pct)
# If the entrance exam box on the settings screen has been unchecked,
# and the course has an entrance exam attached...
elif not entrance_exam_enabled and course_entrance_exam_present:
delete_entrance_exam(request, course_key)
# Perform the normal update workflow for the CourseDetails model
return JsonResponse(
CourseDetails.update_from_json(course_key, request.json, request.user),
encoder=CourseSettingsEncoder
)
@login_required
@ensure_csrf_cookie
@require_http_methods(("GET", "POST", "PUT", "DELETE"))
@expect_json
def grading_handler(request, course_key_string, grader_index=None):
"""
Course Grading policy configuration
GET
html: get the page
json no grader_index: get the CourseGrading model (graceperiod, cutoffs, and graders)
json w/ grader_index: get the specific grader
PUT
json no grader_index: update the Course through the CourseGrading model
json w/ grader_index: create or update the specific grader (create if index out of range)
"""
course_key = CourseKey.from_string(course_key_string)
if request.user.is_staff:
certificate_url = 'http://' + str(settings.LMS_BASE) + '/certificate'
else:
studio_user_count = CourseAccessRole.objects.filter(user=request.user, course_id=course_key,
role__in=['instructor', 'staff']).count()
if studio_user_count:
certificate_url = 'http://' + str(settings.LMS_BASE) + '/certificate'
else:
certificate_url = ''
with modulestore().bulk_operations(course_key):
course_module = get_course_and_check_access(course_key, request.user)
if 'text/html' in request.META.get('HTTP_ACCEPT', '') and request.method == 'GET':
course_details = CourseGradingModel.fetch(course_key)
return render_to_response('settings_graders.html', {
'context_course': course_module,
'course_locator': course_key,
'course_details': course_details,
'grading_url': reverse_course_url('grading_handler', course_key),
'is_credit_course': is_credit_course(course_key),
'library_option': is_available(LIBRARY_OPTION_KEY, course_key),
'certificate_url': certificate_url,
})
elif 'application/json' in request.META.get('HTTP_ACCEPT', ''):
if request.method == 'GET':
if grader_index is None:
return JsonResponse(
CourseGradingModel.fetch(course_key),
# encoder serializes dates, old locations, and instances
encoder=CourseSettingsEncoder
)
else:
return JsonResponse(CourseGradingModel.fetch_grader(course_key, grader_index))
elif request.method in ('POST', 'PUT'): # post or put, doesn't matter.
# update credit course requirements if 'minimum_grade_credit'
# field value is changed
if 'minimum_grade_credit' in request.json:
update_credit_course_requirements.delay(unicode(course_key))
# None implies update the whole model (cutoffs, graceperiod, and graders) not a specific grader
if grader_index is None:
return JsonResponse(
CourseGradingModel.update_from_json(course_key, request.json, request.user),
encoder=CourseSettingsEncoder
)
else:
return JsonResponse(
CourseGradingModel.update_grader_from_json(course_key, request.json, request.user)
)
elif request.method == "DELETE" and grader_index is not None:
CourseGradingModel.delete_grader(course_key, grader_index, request.user)
return JsonResponse()
def _refresh_course_tabs(request, course_module):
"""
Automatically adds/removes tabs if changes to the course require them.
Raises:
InvalidTabsException: raised if there's a problem with the new version of the tabs.
"""
def update_tab(tabs, tab_type, tab_enabled):
"""
Adds or removes a course tab based upon whether it is enabled.
"""
tab_panel = {
"type": tab_type.type,
}
has_tab = tab_panel in tabs
if tab_enabled and not has_tab:
tabs.append(CourseTab.from_json(tab_panel))
elif not tab_enabled and has_tab:
tabs.remove(tab_panel)
course_tabs = copy.copy(course_module.tabs)
# Additionally update any tabs that are provided by non-dynamic course views
for tab_type in CourseTabPluginManager.get_tab_types():
if not tab_type.is_dynamic and tab_type.is_default:
tab_enabled = tab_type.is_enabled(course_module, user=request.user)
update_tab(course_tabs, tab_type, tab_enabled)
CourseTabList.validate_tabs(course_tabs)
# Save the tabs into the course if they have been changed
if course_tabs != course_module.tabs:
course_module.tabs = course_tabs
@login_required
@ensure_csrf_cookie
@require_http_methods(("GET", "POST", "PUT"))
@expect_json
def advanced_settings_handler(request, course_key_string):
"""
Course settings configuration
GET
html: get the page
json: get the model
PUT, POST
json: update the Course's settings. The payload is a json rep of the
metadata dicts.
"""
course_key = CourseKey.from_string(course_key_string)
with modulestore().bulk_operations(course_key):
course_module = get_course_and_check_access(course_key, request.user)
if 'text/html' in request.META.get('HTTP_ACCEPT', '') and request.method == 'GET':
return render_to_response('settings_advanced.html', {
'context_course': course_module,
'advanced_dict': CourseMetadata.fetch(course_module),
'advanced_settings_url': reverse_course_url('advanced_settings_handler', course_key),
'library_option': is_available(LIBRARY_OPTION_KEY, course_key)
})
elif 'application/json' in request.META.get('HTTP_ACCEPT', ''):
if request.method == 'GET':
return JsonResponse(CourseMetadata.fetch(course_module))
else:
try:
# validate data formats and update the course module.
# Note: don't update mongo yet, but wait until after any tabs are changed
is_valid, errors, updated_data = CourseMetadata.validate_and_update_from_json(
course_module,
request.json,
user=request.user,
)
if is_valid:
try:
# update the course tabs if required by any setting changes
_refresh_course_tabs(request, course_module)
except InvalidTabsException as err:
log.exception(err.message)
response_message = [
{
'message': _('An error occurred while trying to save your tabs'),
'model': {'display_name': _('Tabs Exception')}
}
]
return JsonResponseBadRequest(response_message)
# now update mongo
modulestore().update_item(course_module, request.user.id)
return JsonResponse(updated_data)
else:
return JsonResponseBadRequest(errors)
# Handle all errors that validation doesn't catch
except (TypeError, ValueError, InvalidTabsException) as err:
return HttpResponseBadRequest(
django.utils.html.escape(err.message),
content_type="text/plain"
)
class TextbookValidationError(Exception):
"An error thrown when a textbook input is invalid"
pass
def validate_textbooks_json(text):
"""
Validate the given text as representing a single PDF textbook
"""
try:
textbooks = json.loads(text)
except ValueError:
raise TextbookValidationError("invalid JSON")
if not isinstance(textbooks, (list, tuple)):
raise TextbookValidationError("must be JSON list")
for textbook in textbooks:
validate_textbook_json(textbook)
# check specified IDs for uniqueness
all_ids = [textbook["id"] for textbook in textbooks if "id" in textbook]
unique_ids = set(all_ids)
if len(all_ids) > len(unique_ids):
raise TextbookValidationError("IDs must be unique")
return textbooks
def validate_textbook_json(textbook):
"""
Validate the given text as representing a list of PDF textbooks
"""
if isinstance(textbook, basestring):
try:
textbook = json.loads(textbook)
except ValueError:
raise TextbookValidationError("invalid JSON")
if not isinstance(textbook, dict):
raise TextbookValidationError("must be JSON object")
if not textbook.get("tab_title"):
raise TextbookValidationError("must have tab_title")
tid = unicode(textbook.get("id", ""))
if tid and not tid[0].isdigit():
raise TextbookValidationError("textbook ID must start with a digit")
return textbook
def assign_textbook_id(textbook, used_ids=()):
"""
Return an ID that can be assigned to a textbook
and doesn't match the used_ids
"""
tid = Location.clean(textbook["tab_title"])
if not tid[0].isdigit():
# stick a random digit in front
tid = random.choice(string.digits) + tid
while tid in used_ids:
# add a random ASCII character to the end
tid = tid + random.choice(string.ascii_lowercase)
return tid
@require_http_methods(("GET", "POST", "PUT"))
@login_required
@ensure_csrf_cookie
def textbooks_list_handler(request, course_key_string):
"""
A RESTful handler for textbook collections.
GET
html: return textbook list page (Backbone application)
json: return JSON representation of all textbooks in this course
POST
json: create a new textbook for this course
PUT
json: overwrite all textbooks in the course with the given list
"""
course_key = CourseKey.from_string(course_key_string)
store = modulestore()
with store.bulk_operations(course_key):
course = get_course_and_check_access(course_key, request.user)
if "application/json" not in request.META.get('HTTP_ACCEPT', 'text/html'):
# return HTML page
upload_asset_url = reverse_course_url('assets_handler', course_key)
textbook_url = reverse_course_url('textbooks_list_handler', course_key)
return render_to_response('textbooks.html', {
'context_course': course,
'textbooks': course.pdf_textbooks,
'upload_asset_url': upload_asset_url,
'textbook_url': textbook_url,
'library_option': is_available(LIBRARY_OPTION_KEY, course_key)
})
# from here on down, we know the client has requested JSON
if request.method == 'GET':
return JsonResponse(course.pdf_textbooks)
elif request.method == 'PUT':
try:
textbooks = validate_textbooks_json(request.body)
except TextbookValidationError as err:
return JsonResponse({"error": err.message}, status=400)
tids = set(t["id"] for t in textbooks if "id" in t)
for textbook in textbooks:
if "id" not in textbook:
tid = assign_textbook_id(textbook, tids)
textbook["id"] = tid
tids.add(tid)
if not any(tab['type'] == 'pdf_textbooks' for tab in course.tabs):
course.tabs.append(CourseTab.load('pdf_textbooks'))
course.pdf_textbooks = textbooks
store.update_item(course, request.user.id)
return JsonResponse(course.pdf_textbooks)
elif request.method == 'POST':
# create a new textbook for the course
try:
textbook = validate_textbook_json(request.body)
except TextbookValidationError as err:
return JsonResponse({"error": err.message}, status=400)
if not textbook.get("id"):
tids = set(t["id"] for t in course.pdf_textbooks if "id" in t)
textbook["id"] = assign_textbook_id(textbook, tids)
existing = course.pdf_textbooks
existing.append(textbook)
course.pdf_textbooks = existing
if not any(tab['type'] == 'pdf_textbooks' for tab in course.tabs):
course.tabs.append(CourseTab.load('pdf_textbooks'))
store.update_item(course, request.user.id)
resp = JsonResponse(textbook, status=201)
resp["Location"] = reverse_course_url(
'textbooks_detail_handler',
course.id,
kwargs={'textbook_id': textbook["id"]}
)
return resp
@login_required
@ensure_csrf_cookie
@require_http_methods(("GET", "POST", "PUT", "DELETE"))
def textbooks_detail_handler(request, course_key_string, textbook_id):
"""
JSON API endpoint for manipulating a textbook via its internal ID.
Used by the Backbone application.
GET
json: return JSON representation of textbook
POST or PUT
json: update textbook based on provided information
DELETE
json: remove textbook
"""
course_key = CourseKey.from_string(course_key_string)
store = modulestore()
with store.bulk_operations(course_key):
course_module = get_course_and_check_access(course_key, request.user)
matching_id = [tb for tb in course_module.pdf_textbooks
if unicode(tb.get("id")) == unicode(textbook_id)]
if matching_id:
textbook = matching_id[0]
else:
textbook = None
if request.method == 'GET':
if not textbook:
return JsonResponse(status=404)
return JsonResponse(textbook)
elif request.method in ('POST', 'PUT'): # can be either and sometimes
# django is rewriting one to the other
try:
new_textbook = validate_textbook_json(request.body)
except TextbookValidationError as err:
return JsonResponse({"error": err.message}, status=400)
new_textbook["id"] = textbook_id
if textbook:
i = course_module.pdf_textbooks.index(textbook)
new_textbooks = course_module.pdf_textbooks[0:i]
new_textbooks.append(new_textbook)
new_textbooks.extend(course_module.pdf_textbooks[i + 1:])
course_module.pdf_textbooks = new_textbooks
else:
course_module.pdf_textbooks.append(new_textbook)
store.update_item(course_module, request.user.id)
return JsonResponse(new_textbook, status=201)
elif request.method == 'DELETE':
if not textbook:
return JsonResponse(status=404)
i = course_module.pdf_textbooks.index(textbook)
remaining_textbooks = course_module.pdf_textbooks[0:i]
remaining_textbooks.extend(course_module.pdf_textbooks[i + 1:])
course_module.pdf_textbooks = remaining_textbooks
store.update_item(course_module, request.user.id)
return JsonResponse()
def remove_content_or_experiment_group(request, store, course, configuration, group_configuration_id, group_id=None):
"""
Remove content group or experiment group configuration only if it's not in use.
"""
configuration_index = course.user_partitions.index(configuration)
if configuration.scheme.name == RANDOM_SCHEME:
usages = GroupConfiguration.get_content_experiment_usage_info(store, course)
used = int(group_configuration_id) in usages
if used:
return JsonResponse(
{"error": _("This group configuration is in use and cannot be deleted.")},
status=400
)
course.user_partitions.pop(configuration_index)
elif configuration.scheme.name == COHORT_SCHEME:
if not group_id:
return JsonResponse(status=404)
group_id = int(group_id)
usages = GroupConfiguration.get_content_groups_usage_info(store, course)
used = group_id in usages
if used:
return JsonResponse(
{"error": _("This content group is in use and cannot be deleted.")},
status=400
)
matching_groups = [group for group in configuration.groups if group.id == group_id]
if matching_groups:
group_index = configuration.groups.index(matching_groups[0])
configuration.groups.pop(group_index)
else:
return JsonResponse(status=404)
course.user_partitions[configuration_index] = configuration
store.update_item(course, request.user.id)
return JsonResponse(status=204)
@require_http_methods(("GET", "POST"))
@login_required
@ensure_csrf_cookie
def group_configurations_list_handler(request, course_key_string):
"""
A RESTful handler for Group Configurations
GET
html: return Group Configurations list page (Backbone application)
POST
json: create new group configuration
"""
course_key = CourseKey.from_string(course_key_string)
store = modulestore()
with store.bulk_operations(course_key):
course = get_course_and_check_access(course_key, request.user)
if 'text/html' in request.META.get('HTTP_ACCEPT', 'text/html'):
group_configuration_url = reverse_course_url('group_configurations_list_handler', course_key)
course_outline_url = reverse_course_url('course_handler', course_key)
should_show_experiment_groups = are_content_experiments_enabled(course)
if should_show_experiment_groups:
experiment_group_configurations = GroupConfiguration.get_split_test_partitions_with_usage(store, course)
else:
experiment_group_configurations = None
content_group_configuration = GroupConfiguration.get_or_create_content_group(store, course)
return render_to_response('group_configurations.html', {
'context_course': course,
'group_configuration_url': group_configuration_url,
'course_outline_url': course_outline_url,
'experiment_group_configurations': experiment_group_configurations,
'should_show_experiment_groups': should_show_experiment_groups,
'content_group_configuration': content_group_configuration,
'library_option': is_available(LIBRARY_OPTION_KEY, course_key)
})
elif "application/json" in request.META.get('HTTP_ACCEPT'):
if request.method == 'POST':
# create a new group configuration for the course
try:
new_configuration = GroupConfiguration(request.body, course).get_user_partition()
except GroupConfigurationsValidationError as err:
return JsonResponse({"error": err.message}, status=400)
course.user_partitions.append(new_configuration)
response = JsonResponse(new_configuration.to_json(), status=201)
response["Location"] = reverse_course_url(
'group_configurations_detail_handler',
course.id,
kwargs={'group_configuration_id': new_configuration.id}
)
store.update_item(course, request.user.id)
return response
else:
return HttpResponse(status=406)
@login_required
@ensure_csrf_cookie
@require_http_methods(("POST", "PUT", "DELETE"))
def group_configurations_detail_handler(request, course_key_string, group_configuration_id, group_id=None):
"""
JSON API endpoint for manipulating a group configuration via its internal ID.
Used by the Backbone application.
POST or PUT
json: update group configuration based on provided information
"""
course_key = CourseKey.from_string(course_key_string)
store = modulestore()
with store.bulk_operations(course_key):
course = get_course_and_check_access(course_key, request.user)
matching_id = [p for p in course.user_partitions
if unicode(p.id) == unicode(group_configuration_id)]
if matching_id:
configuration = matching_id[0]
else:
configuration = None
if request.method in ('POST', 'PUT'): # can be either and sometimes
# django is rewriting one to the other
try:
new_configuration = GroupConfiguration(request.body, course, group_configuration_id).get_user_partition()
except GroupConfigurationsValidationError as err:
return JsonResponse({"error": err.message}, status=400)
if configuration:
index = course.user_partitions.index(configuration)
course.user_partitions[index] = new_configuration
else:
course.user_partitions.append(new_configuration)
store.update_item(course, request.user.id)
configuration = GroupConfiguration.update_usage_info(store, course, new_configuration)
return JsonResponse(configuration, status=201)
elif request.method == "DELETE":
if not configuration:
return JsonResponse(status=404)
return remove_content_or_experiment_group(
request=request,
store=store,
course=course,
configuration=configuration,
group_configuration_id=group_configuration_id,
group_id=group_id
)
def are_content_experiments_enabled(course):
"""
Returns True if content experiments have been enabled for the course.
"""
return (
SPLIT_TEST_COMPONENT_TYPE in ADVANCED_COMPONENT_TYPES and
SPLIT_TEST_COMPONENT_TYPE in course.advanced_modules
)
def _get_course_creator_status(user):
"""
Helper method for returning the course creator status for a particular user,
taking into account the values of DISABLE_COURSE_CREATION and ENABLE_CREATOR_GROUP.
If the user passed in has not previously visited the index page, it will be
added with status 'unrequested' if the course creator group is in use.
"""
# Note: GaGlobalCourseCreator can create a course (#2150)
if user.is_staff or GaGlobalCourseCreatorRole().has_user(user):
course_creator_status = 'granted'
elif settings.FEATURES.get('DISABLE_COURSE_CREATION', False):
course_creator_status = 'disallowed_for_this_site'
elif settings.FEATURES.get('ENABLE_CREATOR_GROUP', False):
course_creator_status = get_course_creator_status(user)
if course_creator_status is None:
# User not grandfathered in as an existing user, has not previously visited the dashboard page.
# Add the user to the course creator admin table with status 'unrequested'.
add_user_with_status_unrequested(user)
course_creator_status = get_course_creator_status(user)
else:
course_creator_status = 'granted'
return course_creator_status<|fim▁end|> | |
<|file_name|>ChatFromSimulator.ts<|end_file_name|><|fim▁begin|>import { Constants } from '../../utilities'
import { UUID } from '../types'
import Delegate from './Delegate'
class ChatFromSimulator extends Delegate {
public handle(packet): void {
for (const data of packet.data.chatData) {
const chatter = {
key: data.source,<|fim▁hole|> owner: UUID.zero,
position: data.position
}
if (data.source === Constants.ChatSources.OBJECT) {
chatter.owner = data.owner
}
this.client.nearby.emit('message', chatter, data.message.toString().slice(0, -1))
}
}
get waiting(): boolean {
return !!this.client.nearby.listenerCount('message')
}
}
export default ChatFromSimulator<|fim▁end|> | name: data.fromName.toString().slice(0, -1),
type: data.sourceType, |
<|file_name|>OOMPpart_RESE_0805_X_O271_67.py<|end_file_name|><|fim▁begin|>import OOMP
<|fim▁hole|>newPart.addTag("oompColor", "X")
newPart.addTag("oompDesc", "O271")
newPart.addTag("oompIndex", "67")
OOMP.parts.append(newPart)<|fim▁end|> | newPart = OOMP.oompItem(9452)
newPart.addTag("oompType", "RESE")
newPart.addTag("oompSize", "0805") |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import sinon from 'sinon';
import chai from 'chai';
import sinonChai from "sinon-chai";<|fim▁hole|>import subset from 'chai-subset';
window.sinon = sinon;
window.chai = chai;
chai.use(sinonChai);
chai.use(subset);
window.expect = chai.expect;
window.should = chai.should;
import * as fixtures from './fixtures.js';
window.fixtures = fixtures;
var context = require.context('.', true, /.spec.js(x?)$/); //make sure you have your directory and regex test set correctly!
context.keys().forEach(context);<|fim▁end|> | |
<|file_name|>DateFunctionsTest.cpp<|end_file_name|><|fim▁begin|>////////////////////////////////////////////////////////////////////////////////
/// DISCLAIMER
///
/// Copyright 2014-2020 ArangoDB GmbH, Cologne, Germany
/// Copyright 2004-2014 triAGENS GmbH, Cologne, Germany
///
/// Licensed under the Apache License, Version 2.0 (the "License");
/// you may not use this file except in compliance with the License.
/// You may obtain a copy of the License at
///
/// http://www.apache.org/licenses/LICENSE-2.0
///
/// Unless required by applicable law or agreed to in writing, software
/// distributed under the License is distributed on an "AS IS" BASIS,
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/// See the License for the specific language governing permissions and
/// limitations under the License.
///
/// Copyright holder is ArangoDB GmbH, Cologne, Germany
///
/// @author Michael Hackstein
/// @author Copyright 2018, ArangoDB GmbH, Cologne, Germany
////////////////////////////////////////////////////////////////////////////////
#include "gtest/gtest.h"
#include "fakeit.hpp"
#include "Aql/AqlValue.h"
#include "Aql/AstNode.h"
#include "Aql/ExpressionContext.h"
#include "Aql/Function.h"
#include "Aql/Functions.h"
#include "Containers/SmallVector.h"
#include "Transaction/Methods.h"
#include <velocypack/Builder.h>
#include <velocypack/Iterator.h>
#include <velocypack/Parser.h>
#include <velocypack/Slice.h>
#include <velocypack/velocypack-aliases.h>
#include <cmath>
using namespace arangodb;
using namespace arangodb::aql;
using namespace arangodb::containers;
namespace arangodb {
namespace tests {
namespace date_functions_aql {
struct TestDateModifierFlagFactory {
public:
enum FLAGS { INVALID, MILLI, SECOND, MINUTE, HOUR, DAY, WEEK, MONTH, YEAR };
static std::vector<std::string> createAllFlags(FLAGS const& e) {
switch (e) {
case INVALID:
return {"abc"};
case MILLI:
return {"f", "millisecond", "milliseconds", "MiLLiSeCOnd"};
case SECOND:
return {"s", "second", "seconds", "SeCoNd"};
case MINUTE:
return {"i", "minute", "minutes", "MiNutEs"};
case HOUR:
return {"h", "hour", "hours", "HoUr"};
case DAY:
return {"d", "day", "days", "daYs"};
case WEEK:
return {"w", "week", "weeks", "WeEkS"};
case MONTH:
return {"m", "month", "months", "mOnTHs"};
case YEAR:
return {"y", "year", "years", "yeArS"};
}
return {"abc"};
}
static std::string createFlag(FLAGS const& e) {
switch (e) {
case INVALID:
return "abc";
case MILLI:
return "f";
case SECOND:
return "s";
case MINUTE:
return "i";
case HOUR:
return "h";
case DAY:
return "d";
case WEEK:
return "w";
case MONTH:
return "m";
case YEAR:
return "y";
}
}
};
namespace is_datestring {
struct TestDate {
public:
TestDate(std::string const json, bool v) : _date(nullptr), _isValid(v) {
// Make sure to only insert valid JSON.
// We are not testing the parser here.
_date = arangodb::velocypack::Parser::fromJson(json);
}
std::string const testName() const {
return _date->toJson() + " => " + (_isValid ? "true" : "false");
}
void buildParams(VPackFunctionParameters& input) const {
input.emplace_back(_date->slice());
}
void validateResult(AqlValue const& result) const {
ASSERT_TRUE(result.isBoolean());
ASSERT_EQ(result.toBoolean(), _isValid);
}
private:
std::shared_ptr<arangodb::velocypack::Builder> _date;
bool _isValid;
};
TEST(DateFunctionsTest, IS_DATESTRING) {
fakeit::Mock<ExpressionContext> expressionContextMock;
ExpressionContext& expressionContext = expressionContextMock.get();
std::vector<TestDate> testees = {
#include "IS_DATESTRING.testcases"
};
arangodb::aql::Function fun("IS_DATESTRING", &Functions::IsDatestring);
arangodb::aql::AstNode node(NODE_TYPE_FCALL);
node.setData(static_cast<void const*>(&fun));
for (auto const& testee : testees) {
SmallVector<AqlValue>::allocator_type::arena_type arena;
SmallVector<AqlValue> params{arena};
testee.buildParams(params);
AqlValue res = Functions::IsDatestring(&expressionContext, node, params);
testee.validateResult(res);
// Free input parameters
for (auto& it : params) {
it.destroy();
}
}
}
} // namespace is_datestring
namespace date_compare {
struct TestDate {
public:
TestDate(std::vector<std::string> const args, bool v) : _isValid(v) {
_argBuilder.openArray();
for (auto const& it : args) {
_argBuilder.add(VPackValue(it));
}
_argBuilder.close();
}
std::string const testName() const {
return "Input: " + _argBuilder.toJson() + " => " + (_isValid ? "true" : "false");
}
void buildParams(VPackFunctionParameters& input) const {
for (VPackSlice it : VPackArrayIterator(_argBuilder.slice())) {
input.emplace_back(it);
}
}
void validateResult(AqlValue const& result) const {
ASSERT_TRUE(result.isBoolean());
ASSERT_EQ(result.toBoolean(), _isValid);
}
private:
arangodb::velocypack::Builder _argBuilder;
bool _isValid;
};
TEST(DateFunctionsTest, DATE_COMPARE) {
fakeit::Mock<ExpressionContext> expressionContextMock;
ExpressionContext& expressionContext = expressionContextMock.get();
std::vector<TestDate> testees = {
#include "DATE_COMPARE.testcases"
};
arangodb::aql::Function fun("DATE_COMPARE", &Functions::DateCompare);
arangodb::aql::AstNode node(NODE_TYPE_FCALL);
node.setData(static_cast<void const*>(&fun));
for (auto const& testee : testees) {
SmallVector<AqlValue>::allocator_type::arena_type arena;
SmallVector<AqlValue> params{arena};
testee.buildParams(params);
AqlValue res = Functions::DateCompare(&expressionContext, node, params);
testee.validateResult(res);
// Free input parameters
for (auto& it : params) {
it.destroy();
}
}
}
} // namespace date_compare
namespace date_diff {
class DateFunctionsTestDateDiff : public ::testing::Test {
protected:
fakeit::Mock<ExpressionContext> expressionContextMock;
ExpressionContext& expressionContext;
fakeit::Mock<transaction::Methods> trxMock;
transaction::Methods& trx;
// These dates differ by:
// 1 year
// 2 months
// 1 week
// 12 days
// 4 hours
// 5 minutes
// 6 seconds
// 123 milliseconds
std::string const earlierDate;
std::string const laterDate;
// Exact milisecond difference
double dateDiffMillis;
// Average number of days per month in the given dates
double avgDaysPerMonth;
SmallVector<AqlValue>::allocator_type::arena_type arena;
SmallVector<AqlValue> params;
VPackBuilder dateBuilder;
VPackBuilder flagBuilder;
VPackBuilder switchBuilder;
DateFunctionsTestDateDiff()
: expressionContext(expressionContextMock.get()),
trx(trxMock.get()),
earlierDate("2000-04-01T02:48:42.123"),
laterDate("2001-06-13T06:53:48.246"),
dateDiffMillis(37857906123),
avgDaysPerMonth(365.0/12.0),
params(arena) {
dateBuilder.openArray();
dateBuilder.add(VPackValue(earlierDate));
dateBuilder.add(VPackValue(laterDate));
dateBuilder.close();
}
void testCombinations(std::string const& f, double expected) {
arangodb::aql::Function fun("DATE_DIFF", &Functions::DateDiff);
arangodb::aql::AstNode node(NODE_TYPE_FCALL);
node.setData(static_cast<void const*>(&fun));
{
double eps = 0.05;
params.clear();
flagBuilder.clear();
flagBuilder.add(VPackValue(f));
params.emplace_back(dateBuilder.slice().at(0));
params.emplace_back(dateBuilder.slice().at(1));
params.emplace_back(flagBuilder.slice());
switchBuilder.add(VPackValue(true));
params.emplace_back(switchBuilder.slice());
AqlValue res = Functions::DateDiff(&expressionContext, node, params);
ASSERT_TRUE(res.isNumber());
double out = res.toDouble();
ASSERT_GE(out, expected - eps);
ASSERT_LE(out, expected + eps);
for (auto& it : params) {
it.destroy();
}
}
{
params.clear();
flagBuilder.clear();
flagBuilder.add(VPackValue(f));
params.emplace_back(dateBuilder.slice().at(0));
params.emplace_back(dateBuilder.slice().at(1));
params.emplace_back(flagBuilder.slice());
switchBuilder.add(VPackValue(false));
params.emplace_back(switchBuilder.slice());
AqlValue res = Functions::DateDiff(&expressionContext, node, params);
ASSERT_TRUE(res.isNumber());
ASSERT_EQ(std::round(res.toDouble()), std::round(expected));
for (auto& it : params) {
it.destroy();
}
}
{
double eps = 0.05;
params.clear();<|fim▁hole|> flagBuilder.add(VPackValue(f));
params.emplace_back(dateBuilder.slice().at(1));
params.emplace_back(dateBuilder.slice().at(0));
params.emplace_back(flagBuilder.slice());
switchBuilder.add(VPackValue(true));
params.emplace_back(switchBuilder.slice());
AqlValue res = Functions::DateDiff(&expressionContext, node, params);
ASSERT_TRUE(res.isNumber());
double out = res.toDouble();
ASSERT_GE(out, -(expected + eps));
ASSERT_LE(out, -(expected - eps));
for (auto& it : params) {
it.destroy();
}
}
{
params.clear();
flagBuilder.clear();
flagBuilder.add(VPackValue(f));
params.emplace_back(dateBuilder.slice().at(1));
params.emplace_back(dateBuilder.slice().at(0));
params.emplace_back(flagBuilder.slice());
switchBuilder.add(VPackValue(false));
params.emplace_back(switchBuilder.slice());
AqlValue res = Functions::DateDiff(&expressionContext, node, params);
ASSERT_TRUE(res.isNumber());
ASSERT_EQ(std::round(res.toDouble()), -std::round(expected));
for (auto& it : params) {
it.destroy();
}
}
}
};
TEST_F(DateFunctionsTestDateDiff, checking_millis) {
double expectedDiff = dateDiffMillis;
auto allFlags = TestDateModifierFlagFactory::createAllFlags(
TestDateModifierFlagFactory::FLAGS::MILLI);
for (auto const& f : allFlags) {
testCombinations(f, expectedDiff);
}
}
TEST_F(DateFunctionsTestDateDiff, checking_seconds) {
double expectedDiff = dateDiffMillis / 1000;
auto allFlags = TestDateModifierFlagFactory::createAllFlags(
TestDateModifierFlagFactory::FLAGS::SECOND);
for (auto const& f : allFlags) {
testCombinations(f, expectedDiff);
}
}
TEST_F(DateFunctionsTestDateDiff, checking_minutes) {
double expectedDiff = dateDiffMillis / (1000 * 60);
auto allFlags = TestDateModifierFlagFactory::createAllFlags(
TestDateModifierFlagFactory::FLAGS::MINUTE);
for (auto const& f : allFlags) {
testCombinations(f, expectedDiff);
}
}
TEST_F(DateFunctionsTestDateDiff, checking_hours) {
double expectedDiff = dateDiffMillis / (1000 * 60 * 60);
auto allFlags = TestDateModifierFlagFactory::createAllFlags(
TestDateModifierFlagFactory::FLAGS::HOUR);
for (auto const& f : allFlags) {
testCombinations(f, expectedDiff);
}
}
TEST_F(DateFunctionsTestDateDiff, checking_days) {
double expectedDiff = dateDiffMillis / (1000 * 60 * 60 * 24);
auto allFlags = TestDateModifierFlagFactory::createAllFlags(
TestDateModifierFlagFactory::FLAGS::DAY);
for (auto const& f : allFlags) {
testCombinations(f, expectedDiff);
}
}
TEST_F(DateFunctionsTestDateDiff, checking_weeks) {
double expectedDiff = dateDiffMillis / (1000 * 60 * 60 * 24 * 7);
auto allFlags = TestDateModifierFlagFactory::createAllFlags(
TestDateModifierFlagFactory::FLAGS::WEEK);
for (auto const& f : allFlags) {
testCombinations(f, expectedDiff);
}
}
TEST_F(DateFunctionsTestDateDiff, checking_months) {
double expectedDiff = dateDiffMillis / (1000 * 60 * 60 * 24) / avgDaysPerMonth;
auto allFlags = TestDateModifierFlagFactory::createAllFlags(
TestDateModifierFlagFactory::FLAGS::MONTH);
for (auto const& f : allFlags) {
testCombinations(f, expectedDiff);
}
}
TEST_F(DateFunctionsTestDateDiff, checking_years) {
double expectedDiff = dateDiffMillis / (1000 * 60 * 60 * 24) / 365;
auto allFlags = TestDateModifierFlagFactory::createAllFlags(
TestDateModifierFlagFactory::FLAGS::YEAR);
for (auto const& f : allFlags) {
testCombinations(f, expectedDiff);
}
}
TEST_F(DateFunctionsTestDateDiff, checking_leap_days) {
// TODO!
}
} // namespace date_diff
namespace date_subtract {
struct TestDate {
public:
TestDate(std::string const& json, std::string const& v)
: _input(nullptr), _result(v) {
// Make sure to only insert valid JSON.
// We are not testing the parser here.
_input = arangodb::velocypack::Parser::fromJson(json);
}
std::string const testName() const {
return _input->toJson() + " => " + _result;
}
void buildParams(VPackFunctionParameters& input) const {
VPackSlice s = _input->slice();
for (VPackSlice it : VPackArrayIterator(s)) {
input.emplace_back(it);
}
}
void validateResult(AqlValue const& result) const {
ASSERT_TRUE(result.isString());
auto res = result.slice();
std::string ref = res.copyString(); // Readability in test Tool
ASSERT_EQ(ref, _result);
}
private:
std::shared_ptr<arangodb::velocypack::Builder> _input;
std::string const _result;
};
TEST(DateFunctionsTest, DATE_SUBTRACT) {
fakeit::Mock<ExpressionContext> expressionContextMock;
ExpressionContext& expressionContext = expressionContextMock.get();
std::vector<TestDate> testees = {
#include "DATE_SUBTRACT.testcases"
};
arangodb::aql::Function fun("DATE_SUBTRACT", &Functions::DateSubtract);
arangodb::aql::AstNode node(NODE_TYPE_FCALL);
node.setData(static_cast<void const*>(&fun));
for (auto const& testee : testees) {
SmallVector<AqlValue>::allocator_type::arena_type arena;
SmallVector<AqlValue> params{arena};
testee.buildParams(params);
AqlValue res = Functions::DateSubtract(&expressionContext, node, params);
testee.validateResult(res);
res.destroy();
// Free input parameters
for (auto& it : params) {
it.destroy();
}
}
}
} // namespace date_subtract
} // namespace date_functions_aql
} // namespace tests
} // namespace arangodb<|fim▁end|> | flagBuilder.clear(); |
<|file_name|>issue_922.rs<|end_file_name|><|fim▁begin|>use juniper::*;
struct Query;
#[juniper::graphql_object]
impl Query {
fn characters() -> Vec<CharacterValue> {
vec![
Into::into(Human {
id: 0,
name: "human-32".to_owned(),
}),
Into::into(Droid {
id: 1,
name: "R2-D2".to_owned(),
}),
]
}
}
#[juniper::graphql_interface(for = [Human, Droid])]
trait Character {
fn id(&self) -> i32;
fn name(&self) -> String;
}
<|fim▁hole|>struct Human {
pub id: i32,
pub name: String,
}
#[juniper::graphql_interface]
impl Character for Human {
fn id(&self) -> i32 {
self.id
}
fn name(&self) -> String {
self.name.clone()
}
}
#[derive(juniper::GraphQLObject)]
#[graphql(impl = CharacterValue)]
struct Droid {
pub id: i32,
pub name: String,
}
#[juniper::graphql_interface]
impl Character for Droid {
fn id(&self) -> i32 {
self.id
}
fn name(&self) -> String {
self.name.clone()
}
}
type Schema = juniper::RootNode<'static, Query, EmptyMutation<()>, EmptySubscription<()>>;
#[tokio::test]
async fn test_fragment_on_interface() {
let query = r#"
query Query {
characters {
...CharacterFragment
}
}
fragment CharacterFragment on Character {
__typename
... on Human {
id
name
}
... on Droid {
id
name
}
}
"#;
let (res, errors) = juniper::execute(
query,
None,
&Schema::new(Query, EmptyMutation::new(), EmptySubscription::new()),
&Variables::new(),
&(),
)
.await
.unwrap();
assert_eq!(errors.len(), 0);
assert_eq!(
res,
graphql_value!({
"characters": [
{"__typename": "Human", "id": 0, "name": "human-32"},
{"__typename": "Droid", "id": 1, "name": "R2-D2"},
],
}),
);
let (res, errors) = juniper::execute_sync(
query,
None,
&Schema::new(Query, EmptyMutation::new(), EmptySubscription::new()),
&Variables::new(),
&(),
)
.unwrap();
assert_eq!(errors.len(), 0);
assert_eq!(
res,
graphql_value!({
"characters": [
{"__typename": "Human", "id": 0, "name": "human-32"},
{"__typename": "Droid", "id": 1, "name": "R2-D2"},
],
}),
);
}<|fim▁end|> | #[derive(juniper::GraphQLObject)]
#[graphql(impl = CharacterValue)] |
<|file_name|>wiki2.js<|end_file_name|><|fim▁begin|>(function($)
{
$(document).ready(function()
{
$('.nsfw').on('click', function()
{
if($(this).hasClass('show'))
{
$(this).removeClass('show');
}
else
{
$(this).addClass('show');
}
});
$('.snip').on('click', function()<|fim▁hole|> {
$(this).removeClass('show');
$(this).find('.message a').text('Read More');
}
else
{
$(this).addClass('show');
$(this).find('.message a').text('Show Less');
}
});
});
})(basic);<|fim▁end|> | {
if($(this).hasClass('show')) |
<|file_name|>api.py<|end_file_name|><|fim▁begin|># Copyright 2016 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals, absolute_import
from django.urls import reverse
import logging
import requests
from ci.git_api import GitAPI, GitException, copydoc
import re
import json
try:
from urllib.parse import quote_plus, urljoin
except ImportError:
from urllib import quote_plus
from urlparse import urljoin
logger = logging.getLogger('ci')
class GitLabAPI(GitAPI):
STATUS = ((GitAPI.PENDING, "pending"),
(GitAPI.ERROR, "failed"),
(GitAPI.SUCCESS, "success"),
(GitAPI.FAILURE, "failed"),
(GitAPI.RUNNING, "running"),
(GitAPI.CANCELED, "canceled"),
)
def __init__(self, config, access_user=None, token=None):
super(GitLabAPI, self).__init__(config, access_user=access_user, token=token)
self._api_url = '%s/api/v4' % config.get("api_url", "")
self._hostname = config.get("hostname", "unknown_gitlab")
self._prefix = "%s_" % self._hostname
self._html_url = config.get("html_url", "")<|fim▁hole|> self._repos_key = "%s_repos" % self._prefix
self._org_repos_key = "%s_org_repos" % self._prefix
self._user_key= "%s_user" % self._prefix
if access_user is not None and access_user.token:
token = json.loads(access_user.token)
# For backwards compatability, users that haven't signed in
# with the new OAuth2 application, their current token
# is a private token which requires a different http header to be set.
# The OAuth2 token data has the "token_type" key
# while the private token data just has the "access_token" key
if "token_type" in token:
self._session = self._access_user.start_session()
else:
self._headers["PRIVATE-TOKEN"] = token.get("access_token")
self._session = requests
elif self._token is not None:
# We assume the token that is passed in is a personal access token
# or private token
self._headers["PRIVATE-TOKEN"] = self._token
self._session = requests
else:
self._session = requests
@copydoc(GitAPI.sign_in_url)
def sign_in_url(self):
return reverse('ci:gitlab:sign_in', args=[self._hostname])
def _gitlab_id(self, owner, repo):
name = '%s/%s' % (owner, repo)
return quote_plus(name)
def _repo_url(self, path_with_namespace):
return '%s/projects/%s' % (self._api_url, quote_plus(path_with_namespace))
def _project_url(self, project_id):
"""
Get the projects API URL based on project ID.
Input:
project_id[int]: Project ID
"""
return "%s/projects/%s" % (self._api_url, project_id)
def _branch_by_id_url(self, repo_id, branch_id):
"""
Get the branch API URL using IDs instead of owner/repo/branch.
Input:
repo_id[int]: ID of the repo
branch_id[int]: ID of the branch
"""
return "%s/projects/%s/repository/branches/%s" % (self._api_url, repo_id, quote_plus(str(branch_id)))
@copydoc(GitAPI.branch_html_url)
def branch_html_url(self, owner, repo, branch):
return "%s/tree/%s" % (self.repo_html_url(owner, repo), branch)
@copydoc(GitAPI.repo_html_url)
def repo_html_url(self, owner, repo):
return "%s/%s/%s" % (self._html_url, owner, repo)
def _comment_api_url(self, project_id, pr_iid):
"""
Get the API URL for a comment.
Input:
project_id[int]: ID of the project
pr_iid[int]: Repo internal MR ID
"""
return "%s/projects/%s/merge_requests/%s/notes" % (self._api_url, project_id, pr_iid)
@copydoc(GitAPI.commit_html_url)
def commit_html_url(self, owner, repo, sha):
return '%s/commit/%s' % (self.repo_html_url(owner, repo), sha)
def _pr_html_url(self, repo_path, pr_iid):
return '{}/{}/merge_requests/{}'.format(self._html_url, repo_path, pr_iid)
@copydoc(GitAPI.get_all_repos)
def get_all_repos(self, owner):
repos = self._get_user_repos(owner)
repos.extend(self._get_user_org_repos(owner))
return repos
def _get_user_repos(self, username):
"""
Gets a list of repos username owns or is a collaborator on.
"""
url = "%s/projects" % self._api_url
get_data = {"simple": True}
data = self.get_all_pages(url, params=get_data)
owner_repo = []
if not self._bad_response and data:
for repo in data:
r = repo["path_with_namespace"]
if r.startswith("%s/" % username):
owner_repo.append(r)
owner_repo.sort()
return owner_repo
@copydoc(GitAPI.get_repos)
def get_repos(self, session):
if self._repos_key in session:
return session[self._repos_key]
username = session.get(self._user_key, "")
if username:
owner_repo = self._get_user_repos(username)
session[self._repos_key] = owner_repo
return owner_repo
@copydoc(GitAPI.get_branches)
def get_branches(self, path_with_namespace):
url = "%s/repository/branches" % (self._repo_url(path_with_namespace))
data = self.get_all_pages(url)
branches = []
if not self._bad_response and data:
for branch in data:
branches.append(branch['name'])
branches.sort()
return branches
def _get_user_org_repos(self, username):
"""
Get a list of organizations that the user is a member of.
"""
url = "%s/projects" % self._api_url
get_data = {"simple": True}
data = self.get_all_pages(url, params=get_data)
org_repo = []
if not self._bad_response and data:
for repo in data:
org = repo['path_with_namespace']
if not org.startswith("%s/" % username):
org_repo.append(org)
org_repo.sort()
return org_repo
def _status_str(self, status):
"""
Used to convert a GitAPI status into a string that GitLab wants.
"""
for status_pair in self.STATUS:
if status == status_pair[0]:
return status_pair[1]
return None
@copydoc(GitAPI.update_pr_status)
def update_pr_status(self, base, head, state, event_url, description, context, job_stage):
"""
This updates the status of a paritcular commit associated with a PR.
"""
if not self._update_remote:
return
if job_stage in [self.STATUS_START_RUNNING, self.STATUS_CONTINUE_RUNNING]:
# GitLab doesn't like setting status to "running" multiple times
# and there is no point since we are only updating the description
# and that doesn't show up anywhere
return
path_with_namespace = "%s/%s" % (head.user().name, head.repo().name)
data = {
'id': quote_plus(path_with_namespace),
'sha': head.sha,
'ref': head.branch.name,
'state': self._status_str(state),
'target_url': event_url,
'description': description,
'name': context,
}
url = "%s/statuses/%s?state=%s" % (self._repo_url(path_with_namespace),
head.sha,
self._status_str(state))
response = self.post(url, data=data)
if not self._bad_response and response.status_code not in [200, 201, 202]:
logger.warning("Error setting pr status %s\nSent data:\n%s\nReply:\n%s" % \
(url, self._format_json(data), self._format_json(response.json())))
elif not self._bad_response:
logger.info("Set pr status %s:\nSent Data:\n%s" % (url, self._format_json(data)))
def _is_group_member(self, group_id, username):
"""
Returns whether the user is a member of the group_id
"""
url = "%s/groups/%s/members" % (self._api_url, group_id)
data = self.get_all_pages(url)
if not self._bad_response or data:
for member in data:
if member.get('username') == username:
return True
return False
@copydoc(GitAPI.is_collaborator)
def is_collaborator(self, user, repo):
if repo.user == user:
# the user is the owner
return True
path_with_namespace = '%s/%s' % (repo.user.name, repo.name)
url = "%s/users" % self._repo_url(path_with_namespace)
extra = {"search": user.name}
response = self.get(url, params=extra)
if not self._bad_response:
data = response.json()
for member in data:
if member.get('username') == user.name:
return True
return False
@copydoc(GitAPI.pr_comment)
def pr_comment(self, url, msg):
if not self._update_remote:
return
comment = {'body': msg}
self.post(url, data=comment)
if not self._bad_response:
logger.info("Posted comment to %s.\nComment: %s" %(url, msg))
else:
self._add_error("Failed to leave comment at %s.\nComment: %s" %(url, msg))
@copydoc(GitAPI.last_sha)
def last_sha(self, owner, repo, branch):
path_with_namespace = '%s/%s' % (owner, repo)
url = "%s/repository/branches/%s" % (self._repo_url(path_with_namespace), quote_plus(str(branch)))
response = self.get(url)
if not self._bad_response:
data = response.json()
return data['commit']['id']
@copydoc(GitAPI.install_webhooks)
def install_webhooks(self, user, repo):
"""
Updates the webhook for this server on GitHub.
Input:
user[models.GitUser]: The user trying to update the web hooks.
repo[models.Repository]: The repository to set the web hook on.
Raises:
GitException if there are any errors.
"""
if not self._install_webhook:
return
path_with_namespace = '%s/%s' % (repo.user.name, repo.name)
hook_url = '%s/hooks' % self._repo_url(path_with_namespace)
callback_url = urljoin(self._civet_url, reverse('ci:gitlab:webhook', args=[user.build_key]))
data = self.get_all_pages(hook_url)
have_hook = False
if not self._bad_response and data:
for hook in data:
if hook.get('merge_requests_events') and hook.get('push_events') and hook.get('url') == callback_url:
have_hook = True
break
if have_hook:
return
add_hook = {
'id': self._gitlab_id(repo.user.name, repo.name),
'url': callback_url,
'push_events': 'true',
'merge_requests_events': 'true',
'issues_events': 'false',
'tag_push_events': 'false',
'note_events': 'false',
'enable_ssl_verification': 'false',
}
response = self.post(hook_url, data=add_hook)
if self._bad_response:
raise GitException(self._format_json(response.json()))
logger.info('Added webhook to %s for user %s' % (repo, user.name))
def _get_pr_changed_files(self, owner, repo, pr_iid):
"""
Gets a list of changed files in this PR.
Input:
owner[str]: name of the owner of the repo
repo[str]: name of the repository
pr_num[int]: PR number
Return:
list[str]: Filenames that have changed in the PR
"""
url = "%s/projects/%s/merge_requests/%s/changes" % (self._api_url, self._gitlab_id(owner, repo), pr_iid)
data = self.get_all_pages(url)
filenames = []
if not self._bad_response and data:
filenames = [ f['new_path'] for f in data['changes'] ]
filenames.sort()
if not filenames and not self._bad_response:
self._add_error("Didn't read any PR changed files at URL: %s\nData:\n%s" % (url, self._format_json(data)))
return filenames
def _get_project_access_level(self, path_with_namespace):
"""
Gets the access level for a project for the current authorized user.
Input:
owner[str]: owner of the project
repo[str]: name of the repo
"""
access_level_map = {10: "Guest", 20: "Reporter", 30: "Developer", 40: "Master", 50: "Owner"}
url = "%s/user" % self._api_url
user_id = None
# This will get the info on the currently authorized user
response = self.get(url)
if self._bad_response:
return "Unknown"
data = response.json()
user_id = data.get("id")
if not user_id:
return "Unknown"
# /projects/<project>/users doesn't seem to give the access level, so use members
url = "%s/members/%s" % (self._repo_url(path_with_namespace), user_id)
response = self.get(url)
if not self._bad_response:
data = response.json()
access_level = data.get("access_level")
return access_level_map.get(access_level, "Unknown")
# If we get here then the signed in user is not in projects/members but could
# be in groups/members. GitLab API sucks. See https://gitlab.com/gitlab-org/gitlab-ce/issues/18672
url = self._repo_url(path_with_namespace)
response = self.get(url)
if self._bad_response:
return "Unknown"
data = response.json()
namespace = data.get("namespace")
group_id = namespace.get("id")
url = "%s/groups/%s/members/%s" % (self._api_url, group_id, user_id)
response = self.get(url)
if self._bad_response:
return "Unknown"
data = response.json()
access_level = data.get("access_level")
return access_level_map.get(access_level, "Unknown")
@copydoc(GitAPI.get_pr_comments)
def get_pr_comments(self, url, username, comment_re):
data = self.get_all_pages(url)
comments = []
if not self._bad_response and data:
for c in data:
if c["author"]["username"] != username:
continue
if re.search(comment_re, c["body"]):
c["url"] = "%s/%s" % (url, c["id"])
comments.append(c)
return comments
@copydoc(GitAPI.remove_pr_comment)
def remove_pr_comment(self, comment):
if not self._update_remote:
return
url = comment.get("url")
self.delete(url)
if not self._bad_response:
logger.info("Removed comment: %s" % url)
@copydoc(GitAPI.edit_pr_comment)
def edit_pr_comment(self, comment, msg):
if not self._update_remote:
return
url = comment.get("url")
self.put(url, data={"body": msg})
if not self._bad_response:
logger.info("Edited PR comment: %s" % url)
@copydoc(GitAPI.is_member)
def is_member(self, team, user):
if user.name == team:
return True
return self._is_group_member(team, user.name)
@copydoc(GitAPI.get_open_prs)
def get_open_prs(self, owner, repo):
path_with_namespace = '%s/%s' % (owner, repo)
url = "%s/merge_requests" % self._repo_url(path_with_namespace)
params = {"state": "opened"}
data = self.get_all_pages(url, params=params)
if not self._bad_response and data is not None:
open_prs = []
for pr in data:
open_prs.append({"number": pr["iid"], "title": pr["title"], "html_url": pr["web_url"]})
return open_prs
return None
def _get_issues(self, path_with_namespace, title):
"""
Get a list of open issues owned by the authenticated user that have the given title
"""
url = "%s/issues" % self._repo_url(path_with_namespace)
params = {"state": "opened", "scope": "created-by-me", "search": title}
data = self.get_all_pages(url, params=params)
matched_issues = []
if not self._bad_response and data:
for i in data:
if i["title"] == title:
matched_issues.append(i)
return matched_issues
def _create_issue(self, path_with_namespace, title, body):
"""
Create an issue on a repo with the given title and body
"""
url = "%s/issues" % self._repo_url(path_with_namespace)
post_data = {"title": title, "description": body}
data = self.post(url, data=post_data)
if not self._bad_response and data:
logger.info("Created issue '%s': %s" % (title, data.json().get("web_url")))
def _edit_issue(self, path_with_namespace, issue_id, title, body):
"""
Modify the given issue on a repo with the given title and body
"""
url = "%s/issues/%s" % (self._repo_url(path_with_namespace), issue_id)
post_data = {"title": title, "description": body}
data = self.put(url, data=post_data)
if not self._bad_response and data:
logger.info("Updated issue '%s': %s" % (title, data.json().get("web_url")))
@copydoc(GitAPI.create_or_update_issue)
def create_or_update_issue(self, owner, repo, title, body, new_comment):
path_with_namespace = '%s/%s' % (owner, repo)
if not self._update_remote:
return
existing_issues = self._get_issues(path_with_namespace, title)
if existing_issues:
issue_id = existing_issues[-1]["iid"]
if new_comment:
url = "%s/issues/%s/notes" % (self._repo_url(path_with_namespace), issue_id)
self.pr_comment(url, body)
else:
self._edit_issue(path_with_namespace, issue_id, title, body)
else:
self._create_issue(path_with_namespace, title, body)
@copydoc(GitAPI.pr_review_comment)
def pr_review_comment(self, url, sha, filepath, position, msg):
self._add_error("GitLab function not implemented: pr_review_comment")
@copydoc(GitAPI.add_pr_label)
def add_pr_label(self, repo, pr_num, label_name):
self._add_error("GitLab function not implemented: add_pr_label")
@copydoc(GitAPI.remove_pr_label)
def remove_pr_label(self, repo, pr_num, label_name):
self._add_error("GitLab function not implemented: remove_pr_label")
@copydoc(GitAPI.automerge)
def automerge(self, repo, pr_num):
return False<|fim▁end|> | self._ssl_cert = config.get("ssl_cert", False) |
<|file_name|>arith.py<|end_file_name|><|fim▁begin|><|fim▁hole|> """Add two numbers"""
return x + y<|fim▁end|> | def add(x, y): |
<|file_name|>tee.go<|end_file_name|><|fim▁begin|>package main
import (
"flag"
"io"
"log"
"os"
"os/signal"
"syscall"
)
func init() {
log.SetFlags(0)
log.SetOutput(os.Stderr)
}
func main() {
var (
flags int = (os.O_WRONLY | os.O_CREATE)
exitval int
files []io.Writer = []io.Writer{os.Stdout}
)
appendFlag := flag.Bool("a", false, "Append the output to the files rather than overwriting them.")
interruptFlag := flag.Bool("i", false, "Ignore the SIGINT signal.")
flag.Usage = usage
flag.Parse()
if *interruptFlag {
signal.Ignore(syscall.SIGINT)
}
if *appendFlag {
flags |= os.O_APPEND
} else {
flags |= os.O_TRUNC
}
for _, arg := range flag.Args() {
if arg == "-" {
continue
}
if f, err := os.OpenFile(arg, flags, os.ModePerm); err != nil {
log.Printf("%s - %v", arg, err)
exitval = 1
} else {
defer f.Close()
files = append(files, f)
}
}
if _, err := io.Copy(io.MultiWriter(files...), os.Stdin); err != nil {
log.Printf("%v", err)
exitval = 1
}
<|fim▁hole|>}
func usage() {
log.Fatalln("usage: tee [-ai] [file ...]")
}<|fim▁end|> | os.Exit(exitval) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.