prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>deleterepositoryresponse.cpp<|end_file_name|><|fim▁begin|>/*
Copyright 2013-2021 Paul Colby
This file is part of QtAws.
QtAws is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
QtAws is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with the QtAws. If not, see <http://www.gnu.org/licenses/>.
*/
#include "deleterepositoryresponse.h"
#include "deleterepositoryresponse_p.h"
#include <QDebug>
#include <QNetworkReply>
#include <QXmlStreamReader>
<|fim▁hole|> * \class QtAws::ECRPublic::DeleteRepositoryResponse
* \brief The DeleteRepositoryResponse class provides an interace for ECRPublic DeleteRepository responses.
*
* \inmodule QtAwsECRPublic
*
* <fullname>Amazon Elastic Container Registry Public</fullname>
*
* Amazon Elastic Container Registry (Amazon ECR) is a managed container image registry service. Amazon ECR provides both
* public and private registries to host your container images. You can use the familiar Docker CLI, or their preferred
* client, to push, pull, and manage images. Amazon ECR provides a secure, scalable, and reliable registry for your Docker
* or Open Container Initiative (OCI) images. Amazon ECR supports public repositories with this API. For information about
* the Amazon ECR API for private repositories, see <a
* href="https://docs.aws.amazon.com/AmazonECR/latest/APIReference/Welcome.html">Amazon Elastic Container Registry API
*
* \sa ECRPublicClient::deleteRepository
*/
/*!
* Constructs a DeleteRepositoryResponse object for \a reply to \a request, with parent \a parent.
*/
DeleteRepositoryResponse::DeleteRepositoryResponse(
const DeleteRepositoryRequest &request,
QNetworkReply * const reply,
QObject * const parent)
: ECRPublicResponse(new DeleteRepositoryResponsePrivate(this), parent)
{
setRequest(new DeleteRepositoryRequest(request));
setReply(reply);
}
/*!
* \reimp
*/
const DeleteRepositoryRequest * DeleteRepositoryResponse::request() const
{
Q_D(const DeleteRepositoryResponse);
return static_cast<const DeleteRepositoryRequest *>(d->request);
}
/*!
* \reimp
* Parses a successful ECRPublic DeleteRepository \a response.
*/
void DeleteRepositoryResponse::parseSuccess(QIODevice &response)
{
//Q_D(DeleteRepositoryResponse);
QXmlStreamReader xml(&response);
/// @todo
}
/*!
* \class QtAws::ECRPublic::DeleteRepositoryResponsePrivate
* \brief The DeleteRepositoryResponsePrivate class provides private implementation for DeleteRepositoryResponse.
* \internal
*
* \inmodule QtAwsECRPublic
*/
/*!
* Constructs a DeleteRepositoryResponsePrivate object with public implementation \a q.
*/
DeleteRepositoryResponsePrivate::DeleteRepositoryResponsePrivate(
DeleteRepositoryResponse * const q) : ECRPublicResponsePrivate(q)
{
}
/*!
* Parses a ECRPublic DeleteRepository response element from \a xml.
*/
void DeleteRepositoryResponsePrivate::parseDeleteRepositoryResponse(QXmlStreamReader &xml)
{
Q_ASSERT(xml.name() == QLatin1String("DeleteRepositoryResponse"));
Q_UNUSED(xml) ///< @todo
}
} // namespace ECRPublic
} // namespace QtAws<|fim▁end|> | namespace QtAws {
namespace ECRPublic {
/*! |
<|file_name|>octopus.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
/// Return a DAG with cross and octopus merges.
pub fn cross_octopus() -> Vec<Vec<usize>> {
let parents = drawdag::parse(
r#"
r17 r18 r19 r20 r21 r22 r23 r24 r25 r26 r27 r28 r29 r30 r31 r32
|\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\
r00 r01 r02 r03 r04 r05 r06 r07 r08 r09 r10 r11 r12 r13 r14 r15 r16
r17 r18 r19 r20 r21 r22 r23 r24 r25 r26 r27 r28 r29 r30 r31 r32
|\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\
r02 r03 r04 r05 r06 r07 r08 r09 r10 r11 r12 r13 r14 r15 r16 r00 r01
r17 r18 r19 r20 r21 r22 r23 r24 r25 r26 r27 r28 r29 r30 r31 r32
|\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\
r04 r05 r06 r07 r08 r09 r10 r11 r12 r13 r14 r15 r16 r00 r01 r02 r03
r17 r18 r19 r20 r21 r22 r23 r24 r25 r26 r27 r28 r29 r30 r31 r32
|\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\ |\
r06 r07 r08 r09 r10 r11 r12 r13 r14 r15 r16 r00 r01 r02 r03 r04 r05
"#,
);
(0..=32)<|fim▁hole|> .collect()
})
.collect()
}<|fim▁end|> | .map(|i| {
parents[&format!("r{:02}", i)]
.iter()
.map(|p| p.trim_start_matches('r').parse::<usize>().unwrap()) |
<|file_name|>commands.py<|end_file_name|><|fim▁begin|>###
# Copyright (c) 2002-2005, Jeremiah Fincher
# Copyright (c) 2009, James Vega
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
"""
Includes wrappers for commands.
"""
import time
import types
import getopt
import inspect
import threading
import supybot.log as log
import supybot.conf as conf
import supybot.utils as utils
import supybot.world as world
import supybot.ircdb as ircdb
import supybot.ircmsgs as ircmsgs
import supybot.ircutils as ircutils
import supybot.callbacks as callbacks
###
# Non-arg wrappers -- these just change the behavior of a command without
# changing the arguments given to it.
###
# Thread has to be a non-arg wrapper because by the time we're parsing and
# validating arguments, we're inside the function we'd want to thread.
def thread(f):
"""Makes sure a command spawns a thread when called."""
def newf(self, irc, msg, args, *L, **kwargs):
if world.isMainThread():
targetArgs = (self.callingCommand, irc, msg, args) + tuple(L)
t = callbacks.CommandThread(target=self._callCommand,
args=targetArgs, kwargs=kwargs)
t.start()
else:
f(self, irc, msg, args, *L, **kwargs)
return utils.python.changeFunctionName(newf, f.func_name, f.__doc__)
class UrlSnarfThread(world.SupyThread):
def __init__(self, *args, **kwargs):
assert 'url' in kwargs
kwargs['name'] = 'Thread #%s (for snarfing %s)' % \
(world.threadsSpawned, kwargs.pop('url'))
super(UrlSnarfThread, self).__init__(*args, **kwargs)
self.setDaemon(True)
def run(self):
try:
super(UrlSnarfThread, self).run()
except utils.web.Error, e:
log.debug('Exception in urlSnarfer: %s', utils.exnToString(e))
class SnarfQueue(ircutils.FloodQueue):
timeout = conf.supybot.snarfThrottle
def key(self, channel):
return channel
_snarfed = SnarfQueue()
class SnarfIrc(object):
def __init__(self, irc, channel, url):
self.irc = irc
self.url = url
self.channel = channel
def __getattr__(self, attr):
return getattr(self.irc, attr)
def reply(self, *args, **kwargs):
_snarfed.enqueue(self.channel, self.url)
return self.irc.reply(*args, **kwargs)
# This lock is used to serialize the calls to snarfers, so
# earlier snarfers are guaranteed to beat out later snarfers.
_snarfLock = threading.Lock()
def urlSnarfer(f):
"""Protects the snarfer from loops (with other bots) and whatnot."""
def newf(self, irc, msg, match, *L, **kwargs):
url = match.group(0)
channel = msg.args[0]
if not irc.isChannel(channel):
return
if ircdb.channels.getChannel(channel).lobotomized:
self.log.info('Not snarfing in %s: lobotomized.', channel)
return
if _snarfed.has(channel, url):
self.log.info('Throttling snarf of %s in %s.', url, channel)
return
irc = SnarfIrc(irc, channel, url)
def doSnarf():
_snarfLock.acquire()
try:
# This has to be *after* we've acquired the lock so we can be
# sure that all previous urlSnarfers have already run to
# completion.
if msg.repliedTo:
self.log.debug('Not snarfing, msg is already repliedTo.')
return
f(self, irc, msg, match, *L, **kwargs)
finally:
_snarfLock.release()
if threading.currentThread() is not world.mainThread:
doSnarf()
else:
L = list(L)
t = UrlSnarfThread(target=doSnarf, url=url)
t.start()
newf = utils.python.changeFunctionName(newf, f.func_name, f.__doc__)
return newf
###
# Converters, which take irc, msg, args, and a state object, and build up the
# validated and converted args for the method in state.args.
###
# This is just so we can centralize this, since it may change.
def _int(s):
base = 10
if s.startswith('0x'):
base = 16
s = s[2:]
elif s.startswith('0b'):
base = 2
s = s[2:]
elif s.startswith('0') and len(s) > 1:
base = 8
s = s[1:]
try:
return int(s, base)
except ValueError:
if base == 10:
return int(float(s))
else:
raise
def getInt(irc, msg, args, state, type='integer', p=None):
try:
i = _int(args[0])
if p is not None:
if not p(i):
state.errorInvalid(type, args[0])
state.args.append(i)
del args[0]
except ValueError:
state.errorInvalid(type, args[0])
def getNonInt(irc, msg, args, state, type='non-integer value'):
try:
i = _int(args[0])
state.errorInvalid(type, args[0])
except ValueError:
state.args.append(args.pop(0))
def getLong(irc, msg, args, state, type='long'):
getInt(irc, msg, args, state, type)
state.args[-1] = long(state.args[-1])
def getFloat(irc, msg, args, state, type='floating point number'):
try:
state.args.append(float(args[0]))
del args[0]
except ValueError:
state.errorInvalid(type, args[0])
def getPositiveInt(irc, msg, args, state, *L):
getInt(irc, msg, args, state,
p=lambda i: i>0, type='positive integer', *L)
def getNonNegativeInt(irc, msg, args, state, *L):
getInt(irc, msg, args, state,
p=lambda i: i>=0, type='non-negative integer', *L)
def getIndex(irc, msg, args, state):
getInt(irc, msg, args, state, type='index')
if state.args[-1] > 0:
state.args[-1] -= 1
def getId(irc, msg, args, state, kind=None):
type = 'id'
if kind is not None and not kind.endswith('id'):
type = kind + ' id'
original = args[0]
try:
args[0] = args[0].lstrip('#')
getInt(irc, msg, args, state, type=type)
except Exception, e:
args[0] = original
raise
def getExpiry(irc, msg, args, state):
now = int(time.time())
try:
expires = _int(args[0])
if expires:
expires += now
state.args.append(expires)
del args[0]
except ValueError:
state.errorInvalid('number of seconds', args[0])
def getBoolean(irc, msg, args, state):
try:
state.args.append(utils.str.toBool(args[0]))
del args[0]
except ValueError:
state.errorInvalid('boolean', args[0])
def getNetworkIrc(irc, msg, args, state, errorIfNoMatch=False):
if args:
for otherIrc in world.ircs:
if otherIrc.network.lower() == args[0].lower():
state.args.append(otherIrc)
del args[0]
return
if errorIfNoMatch:
raise callbacks.ArgumentError
else:
state.args.append(irc)
def getHaveOp(irc, msg, args, state, action='do that'):
if not state.channel:
getChannel(irc, msg, args, state)
if state.channel not in irc.state.channels:
state.error('I\'m not even in %s.' % state.channel, Raise=True)
if not irc.state.channels[state.channel].isOp(irc.nick):
state.error('I need to be opped to %s.' % action, Raise=True)
def validChannel(irc, msg, args, state):
if irc.isChannel(args[0]):
state.args.append(args.pop(0))
else:
state.errorInvalid('channel', args[0])
def getHostmask(irc, msg, args, state):
if ircutils.isUserHostmask(args[0]):
state.args.append(args.pop(0))
else:
try:
hostmask = irc.state.nickToHostmask(args[0])
state.args.append(hostmask)
del args[0]
except KeyError:
state.errorInvalid('nick or hostmask', args[0])
def getBanmask(irc, msg, args, state):
getHostmask(irc, msg, args, state)
if not state.channel:
getChannel(irc, msg, args, state)
banmaskstyle = conf.supybot.protocols.irc.banmask
state.args[-1] = banmaskstyle.makeBanmask(state.args[-1])
def getUser(irc, msg, args, state):
try:
state.args.append(ircdb.users.getUser(msg.prefix))
except KeyError:
state.errorNotRegistered(Raise=True)
def getOtherUser(irc, msg, args, state):
if ircutils.isUserHostmask(args[0]):
state.errorNoUser(args[0])
try:
state.args.append(ircdb.users.getUser(args[0]))
del args[0]
except KeyError:
try:
getHostmask(irc, msg, [args[0]], state)
hostmask = state.args.pop()
state.args.append(ircdb.users.getUser(hostmask))
del args[0]
except (KeyError, callbacks.Error):
state.errorNoUser(name=args[0])
def _getRe(f):
def get(irc, msg, args, state, convert=True):
original = args[:]
s = args.pop(0)
def isRe(s):
try:
_ = f(s)
return True
except ValueError:
return False
try:
while len(s) < 512 and not isRe(s):
s += ' ' + args.pop(0)
if len(s) < 512:
if convert:
state.args.append(f(s))
else:
state.args.append(s)
else:
state.errorInvalid('regular expression', s)
except IndexError:
args[:] = original
state.errorInvalid('regular expression', s)
return get
getMatcher = _getRe(utils.str.perlReToPythonRe)
getReplacer = _getRe(utils.str.perlReToReplacer)
def getNick(irc, msg, args, state):
if ircutils.isNick(args[0]):
if 'nicklen' in irc.state.supported:
if len(args[0]) > irc.state.supported['nicklen']:
state.errorInvalid('nick', args[0],
'That nick is too long for this server.')
state.args.append(args.pop(0))
else:
state.errorInvalid('nick', args[0])
def getSeenNick(irc, msg, args, state, errmsg=None):
try:
_ = irc.state.nickToHostmask(args[0])
state.args.append(args.pop(0))
except KeyError:
if errmsg is None:
errmsg = 'I haven\'t seen %s.' % args[0]
state.error(errmsg, Raise=True)
def getChannel(irc, msg, args, state):
if args and irc.isChannel(args[0]):
channel = args.pop(0)
elif irc.isChannel(msg.args[0]):
channel = msg.args[0]
else:
state.log.debug('Raising ArgumentError because there is no channel.')
raise callbacks.ArgumentError
state.channel = channel
state.args.append(channel)
def getChannelDb(irc, msg, args, state, **kwargs):
channelSpecific = conf.supybot.databases.plugins.channelSpecific
try:
getChannel(irc, msg, args, state, **kwargs)
channel = channelSpecific.getChannelLink(state.channel)
state.channel = channel
state.args[-1] = channel
except (callbacks.ArgumentError, IndexError):
if channelSpecific():
raise
channel = channelSpecific.link()
if not conf.get(channelSpecific.link.allow, channel):
log.warning('channelSpecific.link is globally set to %s, but '
'%s disallowed linking to its db.', channel, channel)
raise
else:
channel = channelSpecific.getChannelLink(channel)
state.channel = channel
state.args.append(channel)
def inChannel(irc, msg, args, state):
if not state.channel:
getChannel(irc, msg, args, state)
if state.channel not in irc.state.channels:
state.error('I\'m not in %s.' % state.channel, Raise=True)
def onlyInChannel(irc, msg, args, state):
if not (irc.isChannel(msg.args[0]) and msg.args[0] in irc.state.channels):
state.error('This command may only be given in a channel that I am in.',
Raise=True)
else:
state.channel = msg.args[0]
state.args.append(state.channel)
def callerInGivenChannel(irc, msg, args, state):
channel = args[0]
if irc.isChannel(channel):
if channel in irc.state.channels:
if msg.nick in irc.state.channels[channel].users:
state.args.append(args.pop(0))
else:
state.error('You must be in %s.' % channel, Raise=True)
else:
state.error('I\'m not in %s.' % channel, Raise=True)
else:
state.errorInvalid('channel', args[0])
def nickInChannel(irc, msg, args, state):
originalArgs = state.args[:]
inChannel(irc, msg, args, state)
state.args = originalArgs
if args[0] not in irc.state.channels[state.channel].users:
state.error('%s is not in %s.' % (args[0], state.channel), Raise=True)
state.args.append(args.pop(0))
def getChannelOrNone(irc, msg, args, state):
try:
getChannel(irc, msg, args, state)
except callbacks.ArgumentError:
state.args.append(None)
def checkChannelCapability(irc, msg, args, state, cap):
if not state.channel:
getChannel(irc, msg, args, state)
cap = ircdb.canonicalCapability(cap)
cap = ircdb.makeChannelCapability(state.channel, cap)
if not ircdb.checkCapability(msg.prefix, cap):
state.errorNoCapability(cap, Raise=True)
def getOp(irc, msg, args, state):
checkChannelCapability(irc, msg, args, state, 'op')
def getHalfop(irc, msg, args, state):
checkChannelCapability(irc, msg, args, state, 'halfop')
def getVoice(irc, msg, args, state):
checkChannelCapability(irc, msg, args, state, 'voice')
def getLowered(irc, msg, args, state):
state.args.append(ircutils.toLower(args.pop(0)))
def getSomething(irc, msg, args, state, errorMsg=None, p=None):
if p is None:
p = lambda _: True
if not args[0] or not p(args[0]):
if errorMsg is None:
errorMsg = 'You must not give the empty string as an argument.'
state.error(errorMsg, Raise=True)
else:
state.args.append(args.pop(0))
def getSomethingNoSpaces(irc, msg, args, state, *L):
def p(s):
return len(s.split(None, 1)) == 1
getSomething(irc, msg, args, state, p=p, *L)
def private(irc, msg, args, state):
if irc.isChannel(msg.args[0]):
state.errorRequiresPrivacy(Raise=True)
def public(irc, msg, args, state, errmsg=None):
if not irc.isChannel(msg.args[0]):
if errmsg is None:
errmsg = 'This message must be sent in a channel.'
state.error(errmsg, Raise=True)
def checkCapability(irc, msg, args, state, cap):
cap = ircdb.canonicalCapability(cap)
if not ircdb.checkCapability(msg.prefix, cap):
state.errorNoCapability(cap, Raise=True)
def owner(irc, msg, args, state):
checkCapability(irc, msg, args, state, 'owner')
def admin(irc, msg, args, state):
checkCapability(irc, msg, args, state, 'admin')
def anything(irc, msg, args, state):
state.args.append(args.pop(0))
def getGlob(irc, msg, args, state):
glob = args.pop(0)
if '*' not in glob and '?' not in glob:
glob = '*%s*' % glob
state.args.append(glob)
def getUrl(irc, msg, args, state):
if utils.web.urlRe.match(args[0]):
state.args.append(args.pop(0))
else:
state.errorInvalid('url', args[0])
def getEmail(irc, msg, args, state):
if utils.net.emailRe.match(args[0]):
state.args.append(args.pop(0))
else:
state.errorInvalid('email', args[0])
def getHttpUrl(irc, msg, args, state):
if utils.web.httpUrlRe.match(args[0]):
state.args.append(args.pop(0))
elif utils.web.httpUrlRe.match('http://' + args[0]):
state.args.append('http://' + args.pop(0))
else:
state.errorInvalid('http url', args[0])
def getNow(irc, msg, args, state):
state.args.append(int(time.time()))
def getCommandName(irc, msg, args, state):
if ' ' in args[0]:
state.errorInvalid('command name', args[0])
else:
state.args.append(callbacks.canonicalName(args.pop(0)))
def getIp(irc, msg, args, state):
if utils.net.isIP(args[0]):
state.args.append(args.pop(0))
else:
state.errorInvalid('ip', args[0])
def getLetter(irc, msg, args, state):
if len(args[0]) == 1:
state.args.append(args.pop(0))
else:
state.errorInvalid('letter', args[0])
def getMatch(irc, msg, args, state, regexp, errmsg):
m = regexp.search(args[0])
if m is not None:
state.args.append(m)
del args[0]
else:
state.error(errmsg, Raise=True)
def getLiteral(irc, msg, args, state, literals, errmsg=None):
# ??? Should we allow abbreviations?
if isinstance(literals, basestring):
literals = (literals,)
abbrevs = utils.abbrev(literals)
if args[0] in abbrevs:
state.args.append(abbrevs[args.pop(0)])
elif errmsg is not None:
state.error(errmsg, Raise=True)
else:
raise callbacks.ArgumentError
def getTo(irc, msg, args, state):
if args[0].lower() == 'to':
args.pop(0)
def getPlugin(irc, msg, args, state, require=True):
cb = irc.getCallback(args[0])
if cb is not None:
state.args.append(cb)
del args[0]
elif require:
state.errorInvalid('plugin', args[0])
else:
state.args.append(None)
def getIrcColor(irc, msg, args, state):
if args[0] in ircutils.mircColors:
state.args.append(ircutils.mircColors[args.pop(0)])
else:
state.errorInvalid('irc color')
def getText(irc, msg, args, state):
if args:
state.args.append(' '.join(args))
args[:] = []
else:
raise IndexError
wrappers = ircutils.IrcDict({
'id': getId,
'ip': getIp,
'int': getInt,
'index': getIndex,
'color': getIrcColor,
'now': getNow,
'url': getUrl,
'email': getEmail,
'httpUrl': getHttpUrl,
'long': getLong,
'float': getFloat,
'nonInt': getNonInt,
'positiveInt': getPositiveInt,
'nonNegativeInt': getNonNegativeInt,
'letter': getLetter,
'haveOp': getHaveOp,
'expiry': getExpiry,
'literal': getLiteral,
'to': getTo,
'nick': getNick,
'seenNick': getSeenNick,
'channel': getChannel,
'inChannel': inChannel,
'onlyInChannel': onlyInChannel,
'nickInChannel': nickInChannel,
'networkIrc': getNetworkIrc,
'callerInGivenChannel': callerInGivenChannel,
'plugin': getPlugin,
'boolean': getBoolean,
'lowered': getLowered,
'anything': anything,
'something': getSomething,
'filename': getSomething, # XXX Check for validity.
'commandName': getCommandName,
'text': getText,
'glob': getGlob,
'somethingWithoutSpaces': getSomethingNoSpaces,
'capability': getSomethingNoSpaces,
'channelDb': getChannelDb,
'hostmask': getHostmask,
'banmask': getBanmask,
'user': getUser,
'matches': getMatch,
'public': public,
'private': private,
'otherUser': getOtherUser,
'regexpMatcher': getMatcher,<|fim▁hole|> 'admin': admin,
'checkCapability': checkCapability,
'checkChannelCapability': checkChannelCapability,
'op': getOp,
'halfop': getHalfop,
'voice': getVoice,
})
def addConverter(name, wrapper):
wrappers[name] = wrapper
class UnknownConverter(KeyError):
pass
def getConverter(name):
try:
return wrappers[name]
except KeyError, e:
raise UnknownConverter, str(e)
def callConverter(name, irc, msg, args, state, *L):
getConverter(name)(irc, msg, args, state, *L)
###
# Contexts. These determine what the nature of conversions is; whether they're
# defaulted, or many of them are allowed, etc. Contexts should be reusable;
# i.e., they should not maintain state between calls.
###
def contextify(spec):
if not isinstance(spec, context):
spec = context(spec)
return spec
def setDefault(state, default):
if callable(default):
state.args.append(default())
else:
state.args.append(default)
class context(object):
def __init__(self, spec):
self.args = ()
self.spec = spec # for repr
if isinstance(spec, tuple):
assert spec, 'tuple spec must not be empty.'
self.args = spec[1:]
self.converter = getConverter(spec[0])
elif spec is None:
self.converter = getConverter('anything')
elif isinstance(spec, basestring):
self.args = ()
self.converter = getConverter(spec)
else:
assert isinstance(spec, context)
self.converter = spec
def __call__(self, irc, msg, args, state):
log.debug('args before %r: %r', self, args)
self.converter(irc, msg, args, state, *self.args)
log.debug('args after %r: %r', self, args)
def __repr__(self):
return '<%s for %s>' % (self.__class__.__name__, self.spec)
class rest(context):
def __call__(self, irc, msg, args, state):
if args:
original = args[:]
args[:] = [' '.join(args)]
try:
super(rest, self).__call__(irc, msg, args, state)
except Exception, e:
args[:] = original
else:
raise IndexError
# additional means: Look for this (and make sure it's of this type). If
# there are no arguments for us to check, then use our default.
class additional(context):
def __init__(self, spec, default=None):
self.__parent = super(additional, self)
self.__parent.__init__(spec)
self.default = default
def __call__(self, irc, msg, args, state):
try:
self.__parent.__call__(irc, msg, args, state)
except IndexError:
log.debug('Got IndexError, returning default.')
setDefault(state, self.default)
# optional means: Look for this, but if it's not the type I'm expecting or
# there are no arguments for us to check, then use the default value.
class optional(additional):
def __call__(self, irc, msg, args, state):
try:
super(optional, self).__call__(irc, msg, args, state)
except (callbacks.ArgumentError, callbacks.Error), e:
log.debug('Got %s, returning default.', utils.exnToString(e))
state.errored = False
setDefault(state, self.default)
class any(context):
def __init__(self, spec, continueOnError=False):
self.__parent = super(any, self)
self.__parent.__init__(spec)
self.continueOnError = continueOnError
def __call__(self, irc, msg, args, state):
st = state.essence()
try:
while args:
self.__parent.__call__(irc, msg, args, st)
except IndexError:
pass
except (callbacks.ArgumentError, callbacks.Error), e:
if not self.continueOnError:
raise
else:
log.debug('Got %s, returning default.', utils.exnToString(e))
pass
state.args.append(st.args)
class many(any):
def __call__(self, irc, msg, args, state):
super(many, self).__call__(irc, msg, args, state)
if not state.args[-1]:
state.args.pop()
raise callbacks.ArgumentError
class first(context):
def __init__(self, *specs, **kw):
if 'default' in kw:
self.default = kw.pop('default')
assert not kw, 'Bad kwargs for first.__init__'
self.spec = specs # for __repr__
self.specs = map(contextify, specs)
def __call__(self, irc, msg, args, state):
errored = False
for spec in self.specs:
try:
spec(irc, msg, args, state)
return
except Exception, e:
errored = state.errored
state.errored = False
continue
if hasattr(self, 'default'):
state.args.append(self.default)
else:
state.errored = errored
raise e
class reverse(context):
def __call__(self, irc, msg, args, state):
args[:] = args[::-1]
super(reverse, self).__call__(irc, msg, args, state)
args[:] = args[::-1]
class commalist(context):
def __call__(self, irc, msg, args, state):
original = args[:]
st = state.essence()
trailingComma = True
try:
while trailingComma:
arg = args.pop(0)
if not arg.endswith(','):
trailingComma = False
for part in arg.split(','):
if part: # trailing commas
super(commalist, self).__call__(irc, msg, [part], st)
state.args.append(st.args)
except Exception, e:
args[:] = original
raise
class getopts(context):
"""The empty string indicates that no argument is taken; None indicates
that there is no converter for the argument."""
def __init__(self, getopts):
self.spec = getopts # for repr
self.getopts = {}
self.getoptL = []
for (name, spec) in getopts.iteritems():
if spec == '':
self.getoptL.append(name)
self.getopts[name] = None
else:
self.getoptL.append(name + '=')
self.getopts[name] = contextify(spec)
log.debug('getopts: %r', self.getopts)
log.debug('getoptL: %r', self.getoptL)
def __call__(self, irc, msg, args, state):
log.debug('args before %r: %r', self, args)
(optlist, rest) = getopt.getopt(args, '', self.getoptL)
getopts = []
for (opt, arg) in optlist:
opt = opt[2:] # Strip --
log.debug('opt: %r, arg: %r', opt, arg)
context = self.getopts[opt]
if context is not None:
st = state.essence()
context(irc, msg, [arg], st)
assert len(st.args) == 1
getopts.append((opt, st.args[0]))
else:
getopts.append((opt, True))
state.args.append(getopts)
args[:] = rest
log.debug('args after %r: %r', self, args)
###
# This is our state object, passed to converters along with irc, msg, and args.
###
class State(object):
log = log
def __init__(self, types):
self.args = []
self.kwargs = {}
self.types = types
self.channel = None
self.errored = False
def __getattr__(self, attr):
if attr.startswith('error'):
self.errored = True
return getattr(dynamic.irc, attr)
else:
raise AttributeError, attr
def essence(self):
st = State(self.types)
for (attr, value) in self.__dict__.iteritems():
if attr not in ('args', 'kwargs'):
setattr(st, attr, value)
return st
def __repr__(self):
return '%s(args=%r, kwargs=%r, channel=%r)' % (self.__class__.__name__,
self.args, self.kwargs,
self.channel)
###
# This is a compiled Spec object.
###
class Spec(object):
def _state(self, types, attrs={}):
st = State(types)
st.__dict__.update(attrs)
st.allowExtra = self.allowExtra
return st
def __init__(self, types, allowExtra=False):
self.types = types
self.allowExtra = allowExtra
utils.seq.mapinto(contextify, self.types)
def __call__(self, irc, msg, args, stateAttrs={}):
state = self._state(self.types[:], stateAttrs)
while state.types:
context = state.types.pop(0)
try:
context(irc, msg, args, state)
except IndexError:
raise callbacks.ArgumentError
if args and not state.allowExtra:
log.debug('args and not self.allowExtra: %r', args)
raise callbacks.ArgumentError
return state
def wrap(f, specList=[], name=None, **kw):
name = name or f.func_name
spec = Spec(specList, **kw)
def newf(self, irc, msg, args, **kwargs):
state = spec(irc, msg, args, stateAttrs={'cb': self, 'log': self.log})
self.log.debug('State before call: %s', state)
if state.errored:
self.log.debug('Refusing to call %s due to state.errored.', f)
else:
try:
f(self, irc, msg, args, *state.args, **state.kwargs)
except TypeError:
self.log.error('Spec: %s', specList)
self.log.error('Received args: %s', args)
code = f.func_code
funcArgs = inspect.getargs(code)[0][len(self.commandArgs):]
self.log.error('Extra args: %s', funcArgs)
raise
return utils.python.changeFunctionName(newf, name, f.__doc__)
__all__ = [
# Contexts.
'any', 'many',
'optional', 'additional',
'rest', 'getopts',
'first', 'reverse',
'commalist',
# Converter helpers.
'getConverter', 'addConverter', 'callConverter',
# Decorators.
'urlSnarfer', 'thread',
# Functions.
'wrap',
# Stuff for testing.
'Spec',
]
# This doesn't work. Suck.
## if world.testing:
## __all__.append('Spec')
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:<|fim▁end|> | 'validChannel': validChannel,
'regexpReplacer': getReplacer,
'owner': owner, |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from .mtproto_plain_sender import MtProtoPlainSender
from .authenticator import do_authentication
from .mtproto_sender import MtProtoSender<|fim▁hole|><|fim▁end|> | from .connection import Connection, ConnectionMode |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>import pygame
import src.sprite as game
pygame.init()
screen = pygame.display.set_mode((400,300))
done = False
GameUpdateList = []
GameRenderList = []
catapult = game.Sprite("data/img/catapult.png", 5)
boulder = None
catapultAnim = game.Animation(catapult, 96, 96, 5, 100)
GameUpdateList.append(catapultAnim)
GameRenderList.append(catapultAnim)
# Testes --------------------------------------
def shotBoulder(dt):
global boulder
if( catapultAnim.isReady() ):
catapultAnim.pause = True
catapultAnim.forceFrame()
if(boulder == None):
boulder = game.Sprite("data/img/boulder.png")
boulder.pos[0] = 46
boulder.pos[1] = 7
GameRenderList.append(boulder)
if(boulder != None):
dt *= 0.001
boulder.pos[0] += 300*dt
boulder.pos[1] += 15*dt
if(boulder.pos[0] > screen.get_width()):
GameRenderList.remove(boulder)
boulder = None
catapultAnim.forceFrame(0)
catapultAnim.pause = False
# Testes --------------------------------------
last_time = pygame.time.get_ticks()
while not done:
screen.fill((255,255,255))<|fim▁hole|> if event.type == pygame.QUIT:
done = True
# Atualiza tempo
dt = pygame.time.get_ticks() - last_time
last_time = pygame.time.get_ticks()
# Atualiza timer da catapulta em ms
for obj in GameUpdateList:
obj.update(dt)
#catapultAnim.update(dt)
shotBoulder(dt)
for obj in GameRenderList:
obj.render(screen)
#catapultAnim.render(screen)
# Mostra tela
pygame.display.flip()
pygame.quit()<|fim▁end|> | for event in pygame.event.get(): |
<|file_name|>default.rs<|end_file_name|><|fim▁begin|>use malachite_nz::natural::Natural;
#[test]
fn test_default() {
let default = Natural::default();
assert!(default.is_valid());<|fim▁hole|><|fim▁end|> | assert_eq!(default, 0);
assert_eq!(default.to_string(), "0");
} |
<|file_name|>dijkstra.rs<|end_file_name|><|fim▁begin|>use std::hash::Hash;
use collections::hashmap::HashMap;
use collections::hashmap::HashSet;
/// Build a Dijkstra map starting from the goal nodes and using the neighbors
/// function to define the graph to up to limit distance.
pub fn build_map<N: Hash + Eq + Clone>(
goals: ~[N], neighbors: |&N| -> ~[N], limit: uint) -> HashMap<N, uint> {
assert!(goals.len() > 0);
let mut ret = HashMap::new();<|fim▁hole|> for k in goals.iter() {
ret.insert(k.clone(), 0);
}
let mut edge = ~HashSet::new();
for k in goals.iter() {
for n in neighbors(k).iter() {
// XXX: Extra clone op here, should just shuffle references until
// things get cloned for the ret structure.
if !ret.contains_key(n) { edge.insert(n.clone()); }
}
}
for dist in range(1, limit) {
for k in edge.iter() {
ret.insert(k.clone(), dist);
}
let mut new_edge = ~HashSet::new();
for k in edge.iter() {
for n in neighbors(k).iter() {
if !ret.contains_key(n) { new_edge.insert(n.clone()); }
}
}
edge = new_edge;
}
ret
}<|fim▁end|> |
// Init goal nodes to zero score. |
<|file_name|>traits.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2017 Jason White
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
use std::fmt;
use std::hash::Hash;
use std::io;
use std::path::Path;
use crate::detect::Detected;
use crate::error::Error;
use serde::Serialize;
use crate::res;
/// A task is a routine to be executed that produces resources as outputs.
///
/// Most tasks will be of the `Command` type. That is, the execution of a
/// process with arguments.
///
/// Since a task is anything that can be executed, we can have other built-in
/// tasks to aid with cross-platform compatibility. For example:
/// * Copying a file or directory.
/// * Downloading a file.
/// * Creating a directory.
pub trait Task:
Serialize + Ord + PartialOrd + Eq + PartialEq + Hash + fmt::Display
{
/// Executes the task. The result of a task are the resources it used and
/// the resources it output. These are its *implicit* inputs and outputs.<|fim▁hole|> root: &Path,
log: &mut dyn io::Write,
) -> Result<Detected, Error>;
/// Inputs the task knows about *a priori*. It must calculate these by
/// *only* looking at the task parameters. It should not do anything fancy
/// like running an external process to determine these.
///
/// If the task would delete a resource, it should remove it from the set of
/// inputs. It may be the case that one task adds an input, but a later task
/// deletes it. In such a case, that file is effectively a temporary file
/// and can be ignored.
fn known_inputs(&self, _resources: &mut res::Set) {}
/// Outputs the task knows about *a priori*. It must calculate these by
/// *only* looking at the task parameters. It cannot do anything fancy like
/// running an external process to determine these.
fn known_outputs(&self, _resources: &mut res::Set) {}
}<|fim▁end|> | /// Ideally, the *explicit* inputs and outputs are a subset of the
/// *implicit* inputs and outputs.
fn execute(
&self, |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import codecs
import os
import re
import sys
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
here = os.path.abspath(os.path.dirname(__file__))
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import pytest
sys.exit(pytest.main(self.test_args))
def read(*parts):
# intentionally *not* adding an encoding option to open, See:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
return codecs.open(os.path.join(here, *parts), 'r').read()
def find_version(*file_paths):
version_file = read(*file_paths)
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
long_description = read('README.rst')
tests_require = ['pytest', 'virtualenv>=1.10', 'scripttest>=1.3', 'mock']
setup(
name="pip",
version=find_version("pip", "__init__.py"),
description="The PyPA recommended tool for installing Python packages.",
long_description=long_description,
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",<|fim▁hole|> "Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: PyPy"
],
keywords='easy_install distutils setuptools egg virtualenv',
author='The pip developers',
author_email='[email protected]',
url='http://www.pip-installer.org',
license='MIT',
packages=find_packages(exclude=["contrib", "docs", "tests*", "tasks"]),
package_data={
'pip._vendor.requests': ['*.pem'],
'pip._vendor.distlib._backport': ['sysconfig.cfg'],
'pip._vendor.distlib': ['t32.exe', 't64.exe', 'w32.exe', 'w64.exe'],
},
entry_points={
"console_scripts": [
"pip=pip:main",
"pip%s=pip:main" % sys.version[:1],
"pip%s=pip:main" % sys.version[:3],
],
},
tests_require=tests_require,
zip_safe=False,
extras_require={
'testing': tests_require,
},
cmdclass={'test': PyTest},
)<|fim▁end|> | "Topic :: Software Development :: Build Tools", |
<|file_name|>test_hiddeninput.py<|end_file_name|><|fim▁begin|>from django.forms import HiddenInput
from .base import WidgetTest
class HiddenInputTest(WidgetTest):
widget = HiddenInput()<|fim▁hole|>
def test_use_required_attribute(self):
# Always False to avoid browser validation on inputs hidden from the
# user.
self.assertIs(self.widget.use_required_attribute(None), False)
self.assertIs(self.widget.use_required_attribute(''), False)
self.assertIs(self.widget.use_required_attribute('foo'), False)<|fim▁end|> |
def test_render(self):
self.check_html(self.widget, 'email', '', html='<input type="hidden" name="email" />') |
<|file_name|>sphere.rs<|end_file_name|><|fim▁begin|>use geometry::bbox::{BBox, PartialBoundingBox};
use geometry::prim::Prim;
use material::Material;
use mat4::{Mat4, Transform};
use raytracer::{Ray, Intersection};
use vec3::Vec3;
#[cfg(test)]
use material::materials::FlatMaterial;
#[allow(dead_code)]
pub struct Sphere {
pub center: Vec3,
pub radius: f64,
pub material: Box<Material+Send+Sync>
}
impl PartialBoundingBox for Sphere {
fn partial_bounding_box(&self) -> Option<BBox> {
Some(BBox {
min: Vec3 {
x: self.center.x - self.radius,
y: self.center.y - self.radius,
z: self.center.z - self.radius
},
max: Vec3 {
x: self.center.x + self.radius,
y: self.center.y + self.radius,
z: self.center.z + self.radius
}
})
}<|fim▁hole|>impl Prim for Sphere {
fn intersects<'a>(&'a self, ray: &Ray, t_min: f64, t_max: f64) -> Option<Intersection<'a>> {
let i = ray.origin - self.center;
let a = 1.0;
let b = 2.0 * ray.direction.dot(&i);
let c = i.dot(&i) - self.radius * self.radius;
let discriminant = b * b - 4.0 * a * c;
if discriminant <= 0.0 {
None
} else {
// Up to two intersections
let disc_sqrt = discriminant.sqrt();
let t1 = (-b + disc_sqrt) / 2.0 * a;
let t2 = (-b - disc_sqrt) / 2.0 * a;
if t1 >= t_min && t1 <= t_max ||
t2 >= t_min && t2 <= t_max {
// Valid intersection(s): get nearer intersection
let t = if t1.abs() < t2.abs() { t1 } else { t2 };
let intersection_point = ray.origin + ray.direction.scale(t);
let n = (intersection_point - self.center).unit();
let u = 0.5 + n.z.atan2(n.x) / (::std::f64::consts::PI * 2.0);
let v = 0.5 - n.y.asin() / ::std::f64::consts::PI;
Some(Intersection {
n: n,
t: t,
u: u,
v: v,
position: intersection_point,
material: &self.material
})
} else {
None
}
}
}
fn mut_transform(&mut self, transform: &Transform) {
let new_center = Mat4::mult_p(&transform.m, &self.center);
let new_radius = if transform.m.has_scale() {
self.radius * transform.m.scale()
} else {
self.radius
};
self.center = new_center;
self.radius = new_radius;
}
}
#[test]
fn it_intersects() {
let sphere = Sphere {
center: Vec3::zero(),
radius: 1.0,
material: Box::new(FlatMaterial { color: Vec3::one() })
};
// Tests actual intersection
let intersecting_ray = Ray::new(Vec3 { x: 0.0, y: 0.0, z: -2.0 }, Vec3 { x: 0.0, y: 0.0, z: 1.0 });
let intersection = sphere.intersects(&intersecting_ray, 0.0, 10.0).unwrap();
assert_eq!(intersection.position.x, 0.0);
assert_eq!(intersection.position.y, 0.0);
assert_eq!(intersection.position.z, -1.0);
assert_eq!(intersection.n.x, 0.0);
assert_eq!(intersection.n.y, 0.0);
assert_eq!(intersection.n.z, -1.0);
// Ray off to the sides
let mut non_intersecting_ray = Ray::new(Vec3 { x: 0.0, y: 0.0, z: -2.0 }, Vec3 { x: 100.0, y: 100.0, z: 0.1 });
let mut non_intersection = sphere.intersects(&non_intersecting_ray, 0.0, 10.0);
assert!(non_intersection.is_none());
non_intersecting_ray = Ray::new(Vec3 { x: 0.0, y: 0.0, z: -2.0 }, Vec3 { x: -100.0, y: -100.0, z: 0.1 });
non_intersection = sphere.intersects(&non_intersecting_ray, 0.0, 10.0);
assert!(non_intersection.is_none());
// Ray in opposite direction
non_intersecting_ray = Ray::new(Vec3 { x: 0.0, y: 0.0, z: -2.0 }, Vec3 {x: 0.0, y: 0.0, z: -1.0 });
non_intersection = sphere.intersects(&non_intersecting_ray, 0.0, 10.0);
assert!(non_intersection.is_none());
}
#[test]
fn it_intersects_only_in_tmin_tmax() {
let sphere = Sphere {
center: Vec3::zero(),
radius: 1.0,
material: Box::new(FlatMaterial { color: Vec3::one() })
};
// Tests tmin
let intersecting_ray = Ray::new(Vec3 { x: 0.0, y: 0.0, z: -2.0 }, Vec3 { x: 0.0, y: 0.0, z: 1.0 });
let mut non_intersection = sphere.intersects(&intersecting_ray, 1000.0, 10000.0);
assert!(non_intersection.is_none());
// Tests tmax
non_intersection = sphere.intersects(&intersecting_ray, 0.0, 0.0001);
assert!(non_intersection.is_none());
}<|fim▁end|> | }
|
<|file_name|>ntp.rs<|end_file_name|><|fim▁begin|>/* Copyright (C) 2017-2020 Open Information Security Foundation
*
* You can copy, redistribute or modify this Program under the terms of
* the GNU General Public License version 2 as published by the Free
* Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* version 2 along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301, USA.
*/
// written by Pierre Chifflier <[email protected]>
extern crate ntp_parser;
use self::ntp_parser::*;
use crate::core;
use crate::core::{AppProto,Flow,ALPROTO_UNKNOWN,ALPROTO_FAILED};
use crate::applayer::{self, *};
use std;
use std::ffi::CString;
use nom;
#[derive(AppLayerEvent)]
pub enum NTPEvent {
UnsolicitedResponse ,
MalformedData,
NotRequest,
NotResponse,
}
pub struct NTPState {
/// List of transactions for this session
transactions: Vec<NTPTransaction>,
/// Events counter
events: u16,
/// tx counter for assigning incrementing id's to tx's
tx_id: u64,
}
#[derive(Debug)]
pub struct NTPTransaction {
/// The NTP reference ID
pub xid: u32,
/// The internal transaction id
id: u64,
tx_data: applayer::AppLayerTxData,
}
impl Transaction for NTPTransaction {
fn id(&self) -> u64 {
self.id
}
}
impl NTPState {
pub fn new() -> NTPState {
NTPState{
transactions: Vec::new(),
events: 0,
tx_id: 0,
}
}
}
impl State<NTPTransaction> for NTPState {
fn get_transactions(&self) -> &[NTPTransaction] {
&self.transactions
}
}
impl NTPState {
/// Parse an NTP request message
///
/// Returns 0 if successful, or -1 on error
fn parse(&mut self, i: &[u8], _direction: u8) -> i32 {
match parse_ntp(i) {
Ok((_,ref msg)) => {
// SCLogDebug!("parse_ntp: {:?}",msg);
if msg.mode == NtpMode::SymmetricActive || msg.mode == NtpMode::Client {
let mut tx = self.new_tx();
// use the reference id as identifier
tx.xid = msg.ref_id;
self.transactions.push(tx);
}
0
},
Err(nom::Err::Incomplete(_)) => {
SCLogDebug!("Insufficient data while parsing NTP data");
self.set_event(NTPEvent::MalformedData);
-1
},
Err(_) => {
SCLogDebug!("Error while parsing NTP data");
self.set_event(NTPEvent::MalformedData);
-1
},
}
}
fn free(&mut self) {
// All transactions are freed when the `transactions` object is freed.
// But let's be explicit
self.transactions.clear();
}
fn new_tx(&mut self) -> NTPTransaction {
self.tx_id += 1;
NTPTransaction::new(self.tx_id)
}
pub fn get_tx_by_id(&mut self, tx_id: u64) -> Option<&NTPTransaction> {
self.transactions.iter().find(|&tx| tx.id == tx_id + 1)<|fim▁hole|> debug_assert!(tx != None);
if let Some(idx) = tx {
let _ = self.transactions.remove(idx);
}
}
/// Set an event. The event is set on the most recent transaction.
pub fn set_event(&mut self, event: NTPEvent) {
if let Some(tx) = self.transactions.last_mut() {
tx.tx_data.set_event(event as u8);
self.events += 1;
}
}
}
impl NTPTransaction {
pub fn new(id: u64) -> NTPTransaction {
NTPTransaction {
xid: 0,
id: id,
tx_data: applayer::AppLayerTxData::new(),
}
}
}
/// Returns *mut NTPState
#[no_mangle]
pub extern "C" fn rs_ntp_state_new(_orig_state: *mut std::os::raw::c_void, _orig_proto: AppProto) -> *mut std::os::raw::c_void {
let state = NTPState::new();
let boxed = Box::new(state);
return Box::into_raw(boxed) as *mut _;
}
/// Params:
/// - state: *mut NTPState as void pointer
#[no_mangle]
pub extern "C" fn rs_ntp_state_free(state: *mut std::os::raw::c_void) {
let mut ntp_state = unsafe{ Box::from_raw(state as *mut NTPState) };
ntp_state.free();
}
#[no_mangle]
pub unsafe extern "C" fn rs_ntp_parse_request(_flow: *const core::Flow,
state: *mut std::os::raw::c_void,
_pstate: *mut std::os::raw::c_void,
stream_slice: StreamSlice,
_data: *const std::os::raw::c_void,
) -> AppLayerResult {
let state = cast_pointer!(state,NTPState);
if state.parse(stream_slice.as_slice(), 0) < 0 {
return AppLayerResult::err();
}
AppLayerResult::ok()
}
#[no_mangle]
pub unsafe extern "C" fn rs_ntp_parse_response(_flow: *const core::Flow,
state: *mut std::os::raw::c_void,
_pstate: *mut std::os::raw::c_void,
stream_slice: StreamSlice,
_data: *const std::os::raw::c_void,
) -> AppLayerResult {
let state = cast_pointer!(state,NTPState);
if state.parse(stream_slice.as_slice(), 1) < 0 {
return AppLayerResult::err();
}
AppLayerResult::ok()
}
#[no_mangle]
pub unsafe extern "C" fn rs_ntp_state_get_tx(state: *mut std::os::raw::c_void,
tx_id: u64)
-> *mut std::os::raw::c_void
{
let state = cast_pointer!(state,NTPState);
match state.get_tx_by_id(tx_id) {
Some(tx) => tx as *const _ as *mut _,
None => std::ptr::null_mut(),
}
}
#[no_mangle]
pub unsafe extern "C" fn rs_ntp_state_get_tx_count(state: *mut std::os::raw::c_void)
-> u64
{
let state = cast_pointer!(state,NTPState);
state.tx_id
}
#[no_mangle]
pub unsafe extern "C" fn rs_ntp_state_tx_free(state: *mut std::os::raw::c_void,
tx_id: u64)
{
let state = cast_pointer!(state,NTPState);
state.free_tx(tx_id);
}
#[no_mangle]
pub extern "C" fn rs_ntp_tx_get_alstate_progress(_tx: *mut std::os::raw::c_void,
_direction: u8)
-> std::os::raw::c_int
{
1
}
static mut ALPROTO_NTP : AppProto = ALPROTO_UNKNOWN;
#[no_mangle]
pub extern "C" fn ntp_probing_parser(_flow: *const Flow,
_direction: u8,
input:*const u8, input_len: u32,
_rdir: *mut u8) -> AppProto
{
let slice: &[u8] = unsafe { std::slice::from_raw_parts(input as *mut u8, input_len as usize) };
let alproto = unsafe{ ALPROTO_NTP };
match parse_ntp(slice) {
Ok((_, ref msg)) => {
if msg.version == 3 || msg.version == 4 {
return alproto;
} else {
return unsafe{ALPROTO_FAILED};
}
},
Err(nom::Err::Incomplete(_)) => {
return ALPROTO_UNKNOWN;
},
Err(_) => {
return unsafe{ALPROTO_FAILED};
},
}
}
export_tx_data_get!(rs_ntp_get_tx_data, NTPTransaction);
const PARSER_NAME : &'static [u8] = b"ntp\0";
#[no_mangle]
pub unsafe extern "C" fn rs_register_ntp_parser() {
let default_port = CString::new("123").unwrap();
let parser = RustParser {
name : PARSER_NAME.as_ptr() as *const std::os::raw::c_char,
default_port : default_port.as_ptr(),
ipproto : core::IPPROTO_UDP,
probe_ts : Some(ntp_probing_parser),
probe_tc : Some(ntp_probing_parser),
min_depth : 0,
max_depth : 16,
state_new : rs_ntp_state_new,
state_free : rs_ntp_state_free,
tx_free : rs_ntp_state_tx_free,
parse_ts : rs_ntp_parse_request,
parse_tc : rs_ntp_parse_response,
get_tx_count : rs_ntp_state_get_tx_count,
get_tx : rs_ntp_state_get_tx,
tx_comp_st_ts : 1,
tx_comp_st_tc : 1,
tx_get_progress : rs_ntp_tx_get_alstate_progress,
get_eventinfo : Some(NTPEvent::get_event_info),
get_eventinfo_byid : Some(NTPEvent::get_event_info_by_id),
localstorage_new : None,
localstorage_free : None,
get_files : None,
get_tx_iterator : Some(applayer::state_get_tx_iterator::<NTPState, NTPTransaction>),
get_tx_data : rs_ntp_get_tx_data,
apply_tx_config : None,
flags : APP_LAYER_PARSER_OPT_UNIDIR_TXS,
truncate : None,
get_frame_id_by_name: None,
get_frame_name_by_id: None,
};
let ip_proto_str = CString::new("udp").unwrap();
if AppLayerProtoDetectConfProtoDetectionEnabled(ip_proto_str.as_ptr(), parser.name) != 0 {
let alproto = AppLayerRegisterProtocolDetection(&parser, 1);
// store the allocated ID for the probe function
ALPROTO_NTP = alproto;
if AppLayerParserConfParserEnabled(ip_proto_str.as_ptr(), parser.name) != 0 {
let _ = AppLayerRegisterParser(&parser, alproto);
}
} else {
SCLogDebug!("Protocol detector and parser disabled for NTP.");
}
}
#[cfg(test)]
mod tests {
use super::NTPState;
#[test]
fn test_ntp_parse_request_valid() {
// A UDP NTP v4 request, in client mode
const REQ : &[u8] = &[
0x23, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x18, 0x57, 0xab, 0xc3, 0x4a, 0x5f, 0x2c, 0xfe
];
let mut state = NTPState::new();
assert_eq!(0, state.parse(REQ, 0));
}
}<|fim▁end|> | }
fn free_tx(&mut self, tx_id: u64) {
let tx = self.transactions.iter().position(|tx| tx.id == tx_id + 1); |
<|file_name|>Endpoints.java<|end_file_name|><|fim▁begin|>package com.mozu.api.utils;
public class Endpoints {<|fim▁hole|> public static final String SITES_END_POINT = "api/platform/tenants/%s/sites";
public static final String ATTRIBUTE_END_POINT = "api/commerce/catalog/admin/attributedefinition/attributes";
public static final String VOCABULARY_END_POINT = "api/commerce/catalog/admin/attributedefinition/attributes/%s/VocabularyValues";
public static final String PRODUCTTYPE_END_POINT = "api/commerce/catalog/admin/attributedefinition/producttypes";
public static final String ORDER_END_POINT = "api/commerce/orders";
public static final String APPLICATIONSTATUS_END_POINT = "api/commerce/settings/applicationstatus";
public static final String MZDB_APP_DATA_ENDPOINT = "api/platform/appdata";
public static final String MZDB_SITE_DATA_ENDPOINT = "api/platform/sitedata";
public static final String MZDB_TENANT_DATA_ENDPOINT = "api/platform/tenantdata";
}<|fim▁end|> | public static final String AUTH_URL = "api/platform/applications/authtickets";
public static final String AUTH_REFRESH_URL = "api/platform/applications/authtickets/refresh-ticket/%s";
public static final String TENANT_END_POINT = "api/platform/tenants"; |
<|file_name|>my_json.py<|end_file_name|><|fim▁begin|>from typing import Dict, Any
from depccg.tree import Tree
from depccg.cat import Category
def _json_of_category(category: Category) -> Dict[str, Any]:
def rec(node):
if node.is_functor:
return {
'slash': node.slash,
'left': rec(node.left),
'right': rec(node.right)
}
else:
feature = node.features
return {
'base': node.base,
'feature': feature if len(feature) > 0 else None
}
return rec(category)
<|fim▁hole|>
def json_of(
tree: Tree,
full: bool = False
) -> Dict[str, Any]:
"""a tree in Python dict object.
Args:
tree (Tree): tree object
full (bool): whether to decomopose categories into its components, i.e.,
{
'slash': '/',
'left': {'base': 'S', 'feature': 'adj'},
'right': {'base': 'NP', 'feature': None},
},
or just as a string "S[adj]/NP".
Returns:
str: tree string in the CoNLL format
"""
def rec(node: Tree) -> Dict[str, Any]:
if node.is_leaf:
res = dict(node.token)
res['cat'] = _json_of_category(node.cat) if full else str(node.cat)
return res
else:
return {
'type': node.op_string,
'cat': _json_of_category(node.cat) if full else str(node.cat),
'children': [rec(child) for child in node.children]
}
return rec(tree)<|fim▁end|> | |
<|file_name|>make_confidence_report_bundle_examples.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
"""
make_confidence_report_bundle_examples.py
Usage:
make_confidence_report_bundle_examples.py model.joblib a.npy
make_confidence_report_bundle_examples.py model.joblib a.npy b.npy c.npy
where model.joblib is a file created by cleverhans.serial.save containing
a picklable cleverhans.model.Model instance and each examples_i.npy is
a saved numpy array containing adversarial examples for a whole dataset.
Usually example_i.npy is the output of make_confidence_report.py or
make_confidence_report_bundled.py.
This script uses max-confidence attack bundling
( https://openreview.net/forum?id=H1g0piA9tQ )
to combine adversarial example datasets that were created earlier.
It will save a ConfidenceReport to to model_bundled_examples_report.joblib.
The report can be later loaded by another
script using cleverhans.serial.load.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import warnings
import numpy as np
import tensorflow as tf
from cleverhans.utils_tf import silence
# We need to disable pylint's complaints about import order because `silence`
# works only if it is called before the other imports.
# pylint: disable=C0413
silence()
from cleverhans.attack_bundling import bundle_examples_with_goal, MaxConfidence
from cleverhans import serial
from cleverhans.compat import flags
from cleverhans.confidence_report import BATCH_SIZE
from cleverhans.confidence_report import TRAIN_START, TRAIN_END
from cleverhans.confidence_report import TEST_START, TEST_END
from cleverhans.confidence_report import WHICH_SET
FLAGS = flags.FLAGS
def main(argv=None):
"""
Make a confidence report and save it to disk.
"""
assert len(argv) >= 3
_name_of_script = argv[0]
model_filepath = argv[1]
adv_x_filepaths = argv[2:]
sess = tf.Session()
with sess.as_default():
model = serial.load(model_filepath)
factory = model.dataset_factory
factory.kwargs['train_start'] = FLAGS.train_start
factory.kwargs['train_end'] = FLAGS.train_end<|fim▁hole|> dataset = factory()
adv_x_list = [np.load(filepath) for filepath in adv_x_filepaths]
x, y = dataset.get_set(FLAGS.which_set)
for adv_x in adv_x_list:
assert adv_x.shape == x.shape, (adv_x.shape, x.shape)
# Make sure these were made for the right dataset with right scaling
# arguments, etc.
assert adv_x.min() >= 0. - dataset.kwargs['center'] * dataset.max_val
assert adv_x.max() <= dataset.max_val
data_range = dataset.max_val * (1. + dataset.kwargs['center'])
if adv_x.max() - adv_x.min() <= .8 * data_range:
warnings.warn("Something is weird. Your adversarial examples use "
"less than 80% of the data range."
"This might mean you generated them for a model with "
"inputs in [0, 1] and are now using them for a model "
"with inputs in [0, 255] or something like that. "
"Or it could be OK if you're evaluating on a very small "
"batch.")
report_path = FLAGS.report_path
if report_path is None:
suffix = "_bundled_examples_report.joblib"
assert model_filepath.endswith('.joblib')
report_path = model_filepath[:-len('.joblib')] + suffix
goal = MaxConfidence()
bundle_examples_with_goal(sess, model, adv_x_list, y, goal,
report_path, batch_size=FLAGS.batch_size)
if __name__ == '__main__':
flags.DEFINE_string('report_path', None, 'Report path')
flags.DEFINE_integer('train_start', TRAIN_START, 'Starting point (inclusive)'
'of range of train examples to use')
flags.DEFINE_integer('train_end', TRAIN_END, 'Ending point (non-inclusive) '
'of range of train examples to use')
flags.DEFINE_integer('test_start', TEST_START, 'Starting point '
'(inclusive) of range of test examples to use')
flags.DEFINE_integer('test_end', TEST_END, 'End point (non-inclusive) of '
'range of test examples to use')
flags.DEFINE_string('which_set', WHICH_SET, '"train" or "test"')
flags.DEFINE_integer('batch_size', BATCH_SIZE, 'batch size')
tf.app.run()<|fim▁end|> | factory.kwargs['test_start'] = FLAGS.test_start
factory.kwargs['test_end'] = FLAGS.test_end |
<|file_name|>cover.py<|end_file_name|><|fim▁begin|>"""Support for MySensors covers."""
from homeassistant.components import mysensors
from homeassistant.components.cover import ATTR_POSITION, DOMAIN, CoverDevice
from homeassistant.const import STATE_OFF, STATE_ON
async def async_setup_platform(
hass, config, async_add_entities, discovery_info=None):
"""Set up the mysensors platform for covers."""
mysensors.setup_mysensors_platform(
hass, DOMAIN, discovery_info, MySensorsCover,
async_add_entities=async_add_entities)
class MySensorsCover(mysensors.device.MySensorsEntity, CoverDevice):
"""Representation of the value of a MySensors Cover child node."""
@property
def assumed_state(self):
"""Return True if unable to access real state of entity."""
return self.gateway.optimistic
@property
def is_closed(self):<|fim▁hole|> return self._values.get(set_req.V_DIMMER) == 0
return self._values.get(set_req.V_LIGHT) == STATE_OFF
@property
def current_cover_position(self):
"""Return current position of cover.
None is unknown, 0 is closed, 100 is fully open.
"""
set_req = self.gateway.const.SetReq
return self._values.get(set_req.V_DIMMER)
async def async_open_cover(self, **kwargs):
"""Move the cover up."""
set_req = self.gateway.const.SetReq
self.gateway.set_child_value(
self.node_id, self.child_id, set_req.V_UP, 1)
if self.gateway.optimistic:
# Optimistically assume that cover has changed state.
if set_req.V_DIMMER in self._values:
self._values[set_req.V_DIMMER] = 100
else:
self._values[set_req.V_LIGHT] = STATE_ON
self.async_schedule_update_ha_state()
async def async_close_cover(self, **kwargs):
"""Move the cover down."""
set_req = self.gateway.const.SetReq
self.gateway.set_child_value(
self.node_id, self.child_id, set_req.V_DOWN, 1)
if self.gateway.optimistic:
# Optimistically assume that cover has changed state.
if set_req.V_DIMMER in self._values:
self._values[set_req.V_DIMMER] = 0
else:
self._values[set_req.V_LIGHT] = STATE_OFF
self.async_schedule_update_ha_state()
async def async_set_cover_position(self, **kwargs):
"""Move the cover to a specific position."""
position = kwargs.get(ATTR_POSITION)
set_req = self.gateway.const.SetReq
self.gateway.set_child_value(
self.node_id, self.child_id, set_req.V_DIMMER, position)
if self.gateway.optimistic:
# Optimistically assume that cover has changed state.
self._values[set_req.V_DIMMER] = position
self.async_schedule_update_ha_state()
async def async_stop_cover(self, **kwargs):
"""Stop the device."""
set_req = self.gateway.const.SetReq
self.gateway.set_child_value(
self.node_id, self.child_id, set_req.V_STOP, 1)<|fim▁end|> | """Return True if cover is closed."""
set_req = self.gateway.const.SetReq
if set_req.V_DIMMER in self._values: |
<|file_name|>saved_model_save_load_test.py<|end_file_name|><|fim▁begin|># Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for saving and loading using tf's saved_model APIs with DS."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.distribute import combinations
from tensorflow.python.distribute import saved_model_test_base as test_base
from tensorflow.python.eager import test
from tensorflow.python.saved_model import saved_model
class SavedModelSaveAndLoadTest(test_base.TestSavedModelBase):
def setUp(self):
self._root_dir = 'saved_model_save_load'<|fim▁hole|>
def _save_model(self, model, saved_dir):
saved_model.save(model, saved_dir)
def _load_and_run_model(self, distribution, saved_dir, predict_dataset,
output_name):
return test_base.load_and_run_with_saved_model_api(distribution, saved_dir,
predict_dataset,
output_name)
@combinations.generate(test_base.simple_models_with_strategies())
def test_save_no_strategy_restore_strategy(self, model_and_input,
distribution):
self.run_test_save_no_strategy_restore_strategy(model_and_input,
distribution)
@combinations.generate(
combinations.times(test_base.simple_models_with_strategies(),
combinations.combine(save_in_scope=[True, False])))
def test_save_strategy_restore_no_strategy(self, model_and_input,
distribution, save_in_scope):
if save_in_scope:
self.skipTest(('Saving model within tf.distribute.Strategy scope is not ',
'supported.'))
self.run_test_save_strategy_restore_no_strategy(model_and_input,
distribution, save_in_scope)
@combinations.generate(
combinations.times(test_base.simple_models_with_strategy_pairs(),
combinations.combine(save_in_scope=[True, False])))
def test_save_strategy_restore_strategy(self, model_and_input,
distribution_for_saving,
distribution_for_restoring,
save_in_scope):
if save_in_scope:
self.skipTest(('Saving model within tf.distribute.Strategy scope is not ',
'supported.'))
self.run_test_save_strategy_restore_strategy(model_and_input,
distribution_for_saving,
distribution_for_restoring,
save_in_scope)
if __name__ == '__main__':
test.main()<|fim▁end|> | super(SavedModelSaveAndLoadTest, self).setUp() |
<|file_name|>arrayprint.py<|end_file_name|><|fim▁begin|>"""Array printing function
$Id: arrayprint.py,v 1.9 2005/09/13 13:58:44 teoliphant Exp $
"""
__all__ = ["array2string", "array_str", "array_repr", "set_string_function",
"set_printoptions", "get_printoptions", "printoptions",
"format_float_positional", "format_float_scientific"]
__docformat__ = 'restructuredtext'
#
# Written by Konrad Hinsen <[email protected]>
# last revision: 1996-3-13
# modified by Jim Hugunin 1997-3-3 for repr's and str's (and other details)
# and by Perry Greenfield 2000-4-1 for numarray
# and by Travis Oliphant 2005-8-22 for numpy
# Note: Both scalartypes.c.src and arrayprint.py implement strs for numpy
# scalars but for different purposes. scalartypes.c.src has str/reprs for when
# the scalar is printed on its own, while arrayprint.py has strs for when
# scalars are printed inside an ndarray. Only the latter strs are currently
# user-customizable.
import functools
import numbers
try:
from _thread import get_ident
except ImportError:
from _dummy_thread import get_ident
import numpy as np
from . import numerictypes as _nt
from .umath import absolute, isinf, isfinite, isnat
from . import multiarray
from .multiarray import (array, dragon4_positional, dragon4_scientific,
datetime_as_string, datetime_data, ndarray,
set_legacy_print_mode)
from .fromnumeric import any
from .numeric import concatenate, asarray, errstate
from .numerictypes import (longlong, intc, int_, float_, complex_, bool_,
flexible)
from .overrides import array_function_dispatch, set_module
import operator
import warnings
import contextlib
_format_options = {
'edgeitems': 3, # repr N leading and trailing items of each dimension
'threshold': 1000, # total items > triggers array summarization
'floatmode': 'maxprec',
'precision': 8, # precision of floating point representations
'suppress': False, # suppress printing small floating values in exp format
'linewidth': 75,
'nanstr': 'nan',
'infstr': 'inf',
'sign': '-',
'formatter': None,
'legacy': False}
def _make_options_dict(precision=None, threshold=None, edgeitems=None,
linewidth=None, suppress=None, nanstr=None, infstr=None,
sign=None, formatter=None, floatmode=None, legacy=None):
""" make a dictionary out of the non-None arguments, plus sanity checks """
options = {k: v for k, v in locals().items() if v is not None}
if suppress is not None:
options['suppress'] = bool(suppress)
modes = ['fixed', 'unique', 'maxprec', 'maxprec_equal']
if floatmode not in modes + [None]:
raise ValueError("floatmode option must be one of " +
", ".join('"{}"'.format(m) for m in modes))
if sign not in [None, '-', '+', ' ']:
raise ValueError("sign option must be one of ' ', '+', or '-'")
if legacy not in [None, False, '1.13']:
warnings.warn("legacy printing option can currently only be '1.13' or "
"`False`", stacklevel=3)
if threshold is not None:
# forbid the bad threshold arg suggested by stack overflow, gh-12351
if not isinstance(threshold, numbers.Number):
raise TypeError("threshold must be numeric")
if np.isnan(threshold):
raise ValueError("threshold must be non-NAN, try "
"sys.maxsize for untruncated representation")
if precision is not None:
# forbid the bad precision arg as suggested by issue #18254
try:
options['precision'] = operator.index(precision)
except TypeError as e:
raise TypeError('precision must be an integer') from e
return options
@set_module('numpy')
def set_printoptions(precision=None, threshold=None, edgeitems=None,
linewidth=None, suppress=None, nanstr=None, infstr=None,
formatter=None, sign=None, floatmode=None, *, legacy=None):
"""
Set printing options.
These options determine the way floating point numbers, arrays and
other NumPy objects are displayed.
Parameters
----------
precision : int or None, optional
Number of digits of precision for floating point output (default 8).
May be None if `floatmode` is not `fixed`, to print as many digits as
necessary to uniquely specify the value.
threshold : int, optional
Total number of array elements which trigger summarization
rather than full repr (default 1000).
To always use the full repr without summarization, pass `sys.maxsize`.
edgeitems : int, optional
Number of array items in summary at beginning and end of
each dimension (default 3).
linewidth : int, optional
The number of characters per line for the purpose of inserting
line breaks (default 75).
suppress : bool, optional
If True, always print floating point numbers using fixed point
notation, in which case numbers equal to zero in the current precision
will print as zero. If False, then scientific notation is used when
absolute value of the smallest number is < 1e-4 or the ratio of the
maximum absolute value to the minimum is > 1e3. The default is False.
nanstr : str, optional
String representation of floating point not-a-number (default nan).
infstr : str, optional
String representation of floating point infinity (default inf).
sign : string, either '-', '+', or ' ', optional
Controls printing of the sign of floating-point types. If '+', always
print the sign of positive values. If ' ', always prints a space
(whitespace character) in the sign position of positive values. If
'-', omit the sign character of positive values. (default '-')
formatter : dict of callables, optional
If not None, the keys should indicate the type(s) that the respective
formatting function applies to. Callables should return a string.
Types that are not specified (by their corresponding keys) are handled
by the default formatters. Individual types for which a formatter
can be set are:
- 'bool'
- 'int'
- 'timedelta' : a `numpy.timedelta64`
- 'datetime' : a `numpy.datetime64`
- 'float'
- 'longfloat' : 128-bit floats
- 'complexfloat'
- 'longcomplexfloat' : composed of two 128-bit floats
- 'numpystr' : types `numpy.string_` and `numpy.unicode_`
- 'object' : `np.object_` arrays
Other keys that can be used to set a group of types at once are:
- 'all' : sets all types
- 'int_kind' : sets 'int'
- 'float_kind' : sets 'float' and 'longfloat'
- 'complex_kind' : sets 'complexfloat' and 'longcomplexfloat'
- 'str_kind' : sets 'numpystr'
floatmode : str, optional
Controls the interpretation of the `precision` option for
floating-point types. Can take the following values
(default maxprec_equal):
* 'fixed': Always print exactly `precision` fractional digits,
even if this would print more or fewer digits than
necessary to specify the value uniquely.
* 'unique': Print the minimum number of fractional digits necessary
to represent each value uniquely. Different elements may
have a different number of digits. The value of the
`precision` option is ignored.
* 'maxprec': Print at most `precision` fractional digits, but if
an element can be uniquely represented with fewer digits
only print it with that many.
* 'maxprec_equal': Print at most `precision` fractional digits,
but if every element in the array can be uniquely
represented with an equal number of fewer digits, use that
many digits for all elements.
legacy : string or `False`, optional
If set to the string `'1.13'` enables 1.13 legacy printing mode. This
approximates numpy 1.13 print output by including a space in the sign
position of floats and different behavior for 0d arrays. If set to
`False`, disables legacy mode. Unrecognized strings will be ignored
with a warning for forward compatibility.
.. versionadded:: 1.14.0
See Also
--------
get_printoptions, printoptions, set_string_function, array2string
Notes
-----
`formatter` is always reset with a call to `set_printoptions`.
Use `printoptions` as a context manager to set the values temporarily.
Examples
--------
Floating point precision can be set:
>>> np.set_printoptions(precision=4)
>>> np.array([1.123456789])
[1.1235]
Long arrays can be summarised:
>>> np.set_printoptions(threshold=5)
>>> np.arange(10)
array([0, 1, 2, ..., 7, 8, 9])
Small results can be suppressed:
>>> eps = np.finfo(float).eps
>>> x = np.arange(4.)
>>> x**2 - (x + eps)**2
array([-4.9304e-32, -4.4409e-16, 0.0000e+00, 0.0000e+00])
>>> np.set_printoptions(suppress=True)
>>> x**2 - (x + eps)**2
array([-0., -0., 0., 0.])
A custom formatter can be used to display array elements as desired:
>>> np.set_printoptions(formatter={'all':lambda x: 'int: '+str(-x)})
>>> x = np.arange(3)
>>> x
array([int: 0, int: -1, int: -2])
>>> np.set_printoptions() # formatter gets reset
>>> x
array([0, 1, 2])
To put back the default options, you can use:
>>> np.set_printoptions(edgeitems=3, infstr='inf',
... linewidth=75, nanstr='nan', precision=8,
... suppress=False, threshold=1000, formatter=None)
Also to temporarily override options, use `printoptions` as a context manager:
>>> with np.printoptions(precision=2, suppress=True, threshold=5):
... np.linspace(0, 10, 10)
array([ 0. , 1.11, 2.22, ..., 7.78, 8.89, 10. ])
"""
opt = _make_options_dict(precision, threshold, edgeitems, linewidth,
suppress, nanstr, infstr, sign, formatter,
floatmode, legacy)
# formatter is always reset
opt['formatter'] = formatter
_format_options.update(opt)
# set the C variable for legacy mode
if _format_options['legacy'] == '1.13':
set_legacy_print_mode(113)
# reset the sign option in legacy mode to avoid confusion
_format_options['sign'] = '-'
elif _format_options['legacy'] is False:
set_legacy_print_mode(0)
@set_module('numpy')
def get_printoptions():
"""
Return the current print options.
Returns
-------
print_opts : dict
Dictionary of current print options with keys
- precision : int
- threshold : int
- edgeitems : int
- linewidth : int
- suppress : bool
- nanstr : str
- infstr : str
- formatter : dict of callables
- sign : str
For a full description of these options, see `set_printoptions`.
See Also
--------
set_printoptions, printoptions, set_string_function
"""
return _format_options.copy()
@set_module('numpy')
@contextlib.contextmanager
def printoptions(*args, **kwargs):
"""Context manager for setting print options.
Set print options for the scope of the `with` block, and restore the old
options at the end. See `set_printoptions` for the full description of
available options.
Examples
--------
>>> from numpy.testing import assert_equal
>>> with np.printoptions(precision=2):
... np.array([2.0]) / 3
array([0.67])
The `as`-clause of the `with`-statement gives the current print options:
>>> with np.printoptions(precision=2) as opts:
... assert_equal(opts, np.get_printoptions())
See Also
--------
set_printoptions, get_printoptions
"""
opts = np.get_printoptions()
try:
np.set_printoptions(*args, **kwargs)
yield np.get_printoptions()
finally:
np.set_printoptions(**opts)
def _leading_trailing(a, edgeitems, index=()):
"""
Keep only the N-D corners (leading and trailing edges) of an array.
Should be passed a base-class ndarray, since it makes no guarantees about
preserving subclasses.
"""
axis = len(index)
if axis == a.ndim:
return a[index]
if a.shape[axis] > 2*edgeitems:
return concatenate((
_leading_trailing(a, edgeitems, index + np.index_exp[ :edgeitems]),
_leading_trailing(a, edgeitems, index + np.index_exp[-edgeitems:])
), axis=axis)
else:
return _leading_trailing(a, edgeitems, index + np.index_exp[:])
def _object_format(o):
""" Object arrays containing lists should be printed unambiguously """
if type(o) is list:
fmt = 'list({!r})'
else:
fmt = '{!r}'
return fmt.format(o)
def repr_format(x):
return repr(x)
def str_format(x):
return str(x)
def _get_formatdict(data, *, precision, floatmode, suppress, sign, legacy,
formatter, **kwargs):
# note: extra arguments in kwargs are ignored
# wrapped in lambdas to avoid taking a code path with the wrong type of data
formatdict = {
'bool': lambda: BoolFormat(data),
'int': lambda: IntegerFormat(data),
'float': lambda: FloatingFormat(
data, precision, floatmode, suppress, sign, legacy=legacy),
'longfloat': lambda: FloatingFormat(
data, precision, floatmode, suppress, sign, legacy=legacy),
'complexfloat': lambda: ComplexFloatingFormat(
data, precision, floatmode, suppress, sign, legacy=legacy),
'longcomplexfloat': lambda: ComplexFloatingFormat(
data, precision, floatmode, suppress, sign, legacy=legacy),
'datetime': lambda: DatetimeFormat(data, legacy=legacy),
'timedelta': lambda: TimedeltaFormat(data),
'object': lambda: _object_format,
'void': lambda: str_format,
'numpystr': lambda: repr_format}
# we need to wrap values in `formatter` in a lambda, so that the interface
# is the same as the above values.
def indirect(x):
return lambda: x
if formatter is not None:
fkeys = [k for k in formatter.keys() if formatter[k] is not None]
if 'all' in fkeys:
for key in formatdict.keys():
formatdict[key] = indirect(formatter['all'])
if 'int_kind' in fkeys:
for key in ['int']:
formatdict[key] = indirect(formatter['int_kind'])
if 'float_kind' in fkeys:
for key in ['float', 'longfloat']:
formatdict[key] = indirect(formatter['float_kind'])
if 'complex_kind' in fkeys:
for key in ['complexfloat', 'longcomplexfloat']:
formatdict[key] = indirect(formatter['complex_kind'])
if 'str_kind' in fkeys:
formatdict['numpystr'] = indirect(formatter['str_kind'])
for key in formatdict.keys():
if key in fkeys:
formatdict[key] = indirect(formatter[key])
return formatdict
def _get_format_function(data, **options):
"""
find the right formatting function for the dtype_
"""
dtype_ = data.dtype
dtypeobj = dtype_.type
formatdict = _get_formatdict(data, **options)
if issubclass(dtypeobj, _nt.bool_):
return formatdict['bool']()
elif issubclass(dtypeobj, _nt.integer):
if issubclass(dtypeobj, _nt.timedelta64):
return formatdict['timedelta']()
else:
return formatdict['int']()
elif issubclass(dtypeobj, _nt.floating):
if issubclass(dtypeobj, _nt.longfloat):
return formatdict['longfloat']()
else:
return formatdict['float']()
elif issubclass(dtypeobj, _nt.complexfloating):
if issubclass(dtypeobj, _nt.clongfloat):
return formatdict['longcomplexfloat']()
else:
return formatdict['complexfloat']()
elif issubclass(dtypeobj, (_nt.unicode_, _nt.string_)):
return formatdict['numpystr']()
elif issubclass(dtypeobj, _nt.datetime64):
return formatdict['datetime']()
elif issubclass(dtypeobj, _nt.object_):
return formatdict['object']()
elif issubclass(dtypeobj, _nt.void):
if dtype_.names is not None:
return StructuredVoidFormat.from_data(data, **options)
else:
return formatdict['void']()
else:
return formatdict['numpystr']()
def _recursive_guard(fillvalue='...'):
"""
Like the python 3.2 reprlib.recursive_repr, but forwards *args and **kwargs
Decorates a function such that if it calls itself with the same first
argument, it returns `fillvalue` instead of recursing.
Largely copied from reprlib.recursive_repr
"""
def decorating_function(f):
repr_running = set()
@functools.wraps(f)
def wrapper(self, *args, **kwargs):
key = id(self), get_ident()
if key in repr_running:
return fillvalue
repr_running.add(key)
try:
return f(self, *args, **kwargs)
finally:
repr_running.discard(key)
return wrapper
return decorating_function
# gracefully handle recursive calls, when object arrays contain themselves
@_recursive_guard()
def _array2string(a, options, separator=' ', prefix=""):
# The formatter __init__s in _get_format_function cannot deal with
# subclasses yet, and we also need to avoid recursion issues in
# _formatArray with subclasses which return 0d arrays in place of scalars
data = asarray(a)
if a.shape == ():
a = data
if a.size > options['threshold']:
summary_insert = "..."
data = _leading_trailing(data, options['edgeitems'])
else:
summary_insert = ""
# find the right formatting function for the array
format_function = _get_format_function(data, **options)
# skip over "["
next_line_prefix = " "
# skip over array(
next_line_prefix += " "*len(prefix)
lst = _formatArray(a, format_function, options['linewidth'],
next_line_prefix, separator, options['edgeitems'],
summary_insert, options['legacy'])
return lst
def _array2string_dispatcher(
a, max_line_width=None, precision=None,
suppress_small=None, separator=None, prefix=None,
style=None, formatter=None, threshold=None,
edgeitems=None, sign=None, floatmode=None, suffix=None,
*, legacy=None):
return (a,)
@array_function_dispatch(_array2string_dispatcher, module='numpy')
def array2string(a, max_line_width=None, precision=None,
suppress_small=None, separator=' ', prefix="",
style=np._NoValue, formatter=None, threshold=None,
edgeitems=None, sign=None, floatmode=None, suffix="",
*, legacy=None):
"""
Return a string representation of an array.
Parameters
----------
a : ndarray
Input array.
max_line_width : int, optional
Inserts newlines if text is longer than `max_line_width`.
Defaults to ``numpy.get_printoptions()['linewidth']``.
precision : int or None, optional
Floating point precision.
Defaults to ``numpy.get_printoptions()['precision']``.
suppress_small : bool, optional
Represent numbers "very close" to zero as zero; default is False.
Very close is defined by precision: if the precision is 8, e.g.,
numbers smaller (in absolute value) than 5e-9 are represented as
zero.
Defaults to ``numpy.get_printoptions()['suppress']``.
separator : str, optional
Inserted between elements.
prefix : str, optional
suffix : str, optional
The length of the prefix and suffix strings are used to respectively
align and wrap the output. An array is typically printed as::
prefix + array2string(a) + suffix
The output is left-padded by the length of the prefix string, and
wrapping is forced at the column ``max_line_width - len(suffix)``.
It should be noted that the content of prefix and suffix strings are
not included in the output.
style : _NoValue, optional
Has no effect, do not use.
.. deprecated:: 1.14.0
formatter : dict of callables, optional
If not None, the keys should indicate the type(s) that the respective
formatting function applies to. Callables should return a string.
Types that are not specified (by their corresponding keys) are handled
by the default formatters. Individual types for which a formatter
can be set are:
- 'bool'
- 'int'
- 'timedelta' : a `numpy.timedelta64`
- 'datetime' : a `numpy.datetime64`
- 'float'
- 'longfloat' : 128-bit floats
- 'complexfloat'
- 'longcomplexfloat' : composed of two 128-bit floats
- 'void' : type `numpy.void`
- 'numpystr' : types `numpy.string_` and `numpy.unicode_`
Other keys that can be used to set a group of types at once are:
- 'all' : sets all types
- 'int_kind' : sets 'int'
- 'float_kind' : sets 'float' and 'longfloat'
- 'complex_kind' : sets 'complexfloat' and 'longcomplexfloat'
- 'str_kind' : sets 'numpystr'
threshold : int, optional
Total number of array elements which trigger summarization
rather than full repr.
Defaults to ``numpy.get_printoptions()['threshold']``.
edgeitems : int, optional
Number of array items in summary at beginning and end of
each dimension.
Defaults to ``numpy.get_printoptions()['edgeitems']``.
sign : string, either '-', '+', or ' ', optional
Controls printing of the sign of floating-point types. If '+', always
print the sign of positive values. If ' ', always prints a space
(whitespace character) in the sign position of positive values. If
'-', omit the sign character of positive values.
Defaults to ``numpy.get_printoptions()['sign']``.
floatmode : str, optional
Controls the interpretation of the `precision` option for
floating-point types.
Defaults to ``numpy.get_printoptions()['floatmode']``.
Can take the following values:
- 'fixed': Always print exactly `precision` fractional digits,
even if this would print more or fewer digits than
necessary to specify the value uniquely.
- 'unique': Print the minimum number of fractional digits necessary
to represent each value uniquely. Different elements may
have a different number of digits. The value of the
`precision` option is ignored.
- 'maxprec': Print at most `precision` fractional digits, but if
an element can be uniquely represented with fewer digits
only print it with that many.
- 'maxprec_equal': Print at most `precision` fractional digits,
but if every element in the array can be uniquely
represented with an equal number of fewer digits, use that
many digits for all elements.
legacy : string or `False`, optional
If set to the string `'1.13'` enables 1.13 legacy printing mode. This
approximates numpy 1.13 print output by including a space in the sign
position of floats and different behavior for 0d arrays. If set to
`False`, disables legacy mode. Unrecognized strings will be ignored
with a warning for forward compatibility.
.. versionadded:: 1.14.0
Returns
-------
array_str : str
String representation of the array.
Raises
------
TypeError
if a callable in `formatter` does not return a string.
See Also
--------
array_str, array_repr, set_printoptions, get_printoptions
Notes
-----
If a formatter is specified for a certain type, the `precision` keyword is
ignored for that type.
This is a very flexible function; `array_repr` and `array_str` are using
`array2string` internally so keywords with the same name should work
identically in all three functions.
Examples
--------
>>> x = np.array([1e-16,1,2,3])
>>> np.array2string(x, precision=2, separator=',',
... suppress_small=True)
'[0.,1.,2.,3.]'
>>> x = np.arange(3.)
>>> np.array2string(x, formatter={'float_kind':lambda x: "%.2f" % x})
'[0.00 1.00 2.00]'
>>> x = np.arange(3)
>>> np.array2string(x, formatter={'int':lambda x: hex(x)})
'[0x0 0x1 0x2]'
"""
overrides = _make_options_dict(precision, threshold, edgeitems,
max_line_width, suppress_small, None, None,
sign, formatter, floatmode, legacy)
options = _format_options.copy()
options.update(overrides)
if options['legacy'] == '1.13':
if style is np._NoValue:
style = repr
if a.shape == () and a.dtype.names is None:
return style(a.item())
elif style is not np._NoValue:
# Deprecation 11-9-2017 v1.14
warnings.warn("'style' argument is deprecated and no longer functional"
" except in 1.13 'legacy' mode",
DeprecationWarning, stacklevel=3)
if options['legacy'] != '1.13':
options['linewidth'] -= len(suffix)
# treat as a null array if any of shape elements == 0
if a.size == 0:
return "[]"
return _array2string(a, options, separator, prefix)
def _extendLine(s, line, word, line_width, next_line_prefix, legacy):
needs_wrap = len(line) + len(word) > line_width
if legacy != '1.13':
# don't wrap lines if it won't help
if len(line) <= len(next_line_prefix):
needs_wrap = False
if needs_wrap:
s += line.rstrip() + "\n"
line = next_line_prefix
line += word
return s, line
def _extendLine_pretty(s, line, word, line_width, next_line_prefix, legacy):
"""
Extends line with nicely formatted (possibly multi-line) string ``word``.
"""
words = word.splitlines()
if len(words) == 1 or legacy == '1.13':
return _extendLine(s, line, word, line_width, next_line_prefix, legacy)
max_word_length = max(len(word) for word in words)
if (len(line) + max_word_length > line_width and
len(line) > len(next_line_prefix)):
s += line.rstrip() + '\n'
line = next_line_prefix + words[0]
indent = next_line_prefix
else:
indent = len(line)*' '
line += words[0]
for word in words[1::]:
s += line.rstrip() + '\n'
line = indent + word
suffix_length = max_word_length - len(words[-1])
line += suffix_length*' '
return s, line
def _formatArray(a, format_function, line_width, next_line_prefix,
separator, edge_items, summary_insert, legacy):
"""formatArray is designed for two modes of operation:
1. Full output
2. Summarized output
"""
def recurser(index, hanging_indent, curr_width):
"""
By using this local function, we don't need to recurse with all the
arguments. Since this function is not created recursively, the cost is
not significant
"""
axis = len(index)
axes_left = a.ndim - axis
if axes_left == 0:
return format_function(a[index])
# when recursing, add a space to align with the [ added, and reduce the
# length of the line by 1
next_hanging_indent = hanging_indent + ' '
if legacy == '1.13':
next_width = curr_width
else:
next_width = curr_width - len(']')
a_len = a.shape[axis]
show_summary = summary_insert and 2*edge_items < a_len
if show_summary:
leading_items = edge_items
trailing_items = edge_items
else:
leading_items = 0
trailing_items = a_len
# stringify the array with the hanging indent on the first line too
s = ''
# last axis (rows) - wrap elements if they would not fit on one line
if axes_left == 1:
# the length up until the beginning of the separator / bracket
if legacy == '1.13':
elem_width = curr_width - len(separator.rstrip())
else:
elem_width = curr_width - max(len(separator.rstrip()), len(']'))
line = hanging_indent
for i in range(leading_items):
word = recurser(index + (i,), next_hanging_indent, next_width)
s, line = _extendLine_pretty(
s, line, word, elem_width, hanging_indent, legacy)
line += separator
if show_summary:
s, line = _extendLine(
s, line, summary_insert, elem_width, hanging_indent, legacy)
if legacy == '1.13':
line += ", "
else:
line += separator
for i in range(trailing_items, 1, -1):
word = recurser(index + (-i,), next_hanging_indent, next_width)
s, line = _extendLine_pretty(
s, line, word, elem_width, hanging_indent, legacy)
line += separator
if legacy == '1.13':
# width of the separator is not considered on 1.13
elem_width = curr_width
word = recurser(index + (-1,), next_hanging_indent, next_width)
s, line = _extendLine_pretty(
s, line, word, elem_width, hanging_indent, legacy)
s += line
# other axes - insert newlines between rows
else:
s = ''
line_sep = separator.rstrip() + '\n'*(axes_left - 1)
for i in range(leading_items):
nested = recurser(index + (i,), next_hanging_indent, next_width)
s += hanging_indent + nested + line_sep
if show_summary:
if legacy == '1.13':
# trailing space, fixed nbr of newlines, and fixed separator
s += hanging_indent + summary_insert + ", \n"
else:
s += hanging_indent + summary_insert + line_sep
for i in range(trailing_items, 1, -1):
nested = recurser(index + (-i,), next_hanging_indent,
next_width)
s += hanging_indent + nested + line_sep
nested = recurser(index + (-1,), next_hanging_indent, next_width)
s += hanging_indent + nested
# remove the hanging indent, and wrap in []
s = '[' + s[len(hanging_indent):] + ']'
return s
try:
# invoke the recursive part with an initial index and prefix
return recurser(index=(),
hanging_indent=next_line_prefix,
curr_width=line_width)
finally:
# recursive closures have a cyclic reference to themselves, which
# requires gc to collect (gh-10620). To avoid this problem, for
# performance and PyPy friendliness, we break the cycle:
recurser = None
def _none_or_positive_arg(x, name):
if x is None:
return -1
if x < 0:
raise ValueError("{} must be >= 0".format(name))
return x
class FloatingFormat:
""" Formatter for subtypes of np.floating """
def __init__(self, data, precision, floatmode, suppress_small, sign=False,
*, legacy=None):
# for backcompatibility, accept bools
if isinstance(sign, bool):
sign = '+' if sign else '-'
self._legacy = legacy
if self._legacy == '1.13':
# when not 0d, legacy does not support '-'
if data.shape != () and sign == '-':
sign = ' '
self.floatmode = floatmode
if floatmode == 'unique':
self.precision = None
else:
self.precision = precision
self.precision = _none_or_positive_arg(self.precision, 'precision')
self.suppress_small = suppress_small
self.sign = sign
self.exp_format = False
self.large_exponent = False
self.fillFormat(data)
def fillFormat(self, data):
# only the finite values are used to compute the number of digits
finite_vals = data[isfinite(data)]
# choose exponential mode based on the non-zero finite values:
abs_non_zero = absolute(finite_vals[finite_vals != 0])
if len(abs_non_zero) != 0:
max_val = np.max(abs_non_zero)
min_val = np.min(abs_non_zero)
with errstate(over='ignore'): # division can overflow
if max_val >= 1.e8 or (not self.suppress_small and
(min_val < 0.0001 or max_val/min_val > 1000.)):
self.exp_format = True
# do a first pass of printing all the numbers, to determine sizes
if len(finite_vals) == 0:
self.pad_left = 0
self.pad_right = 0
self.trim = '.'
self.exp_size = -1
self.unique = True
self.min_digits = None
elif self.exp_format:
trim, unique = '.', True
if self.floatmode == 'fixed' or self._legacy == '1.13':
trim, unique = 'k', False
strs = (dragon4_scientific(x, precision=self.precision,
unique=unique, trim=trim, sign=self.sign == '+')
for x in finite_vals)
frac_strs, _, exp_strs = zip(*(s.partition('e') for s in strs))
int_part, frac_part = zip(*(s.split('.') for s in frac_strs))
self.exp_size = max(len(s) for s in exp_strs) - 1
self.trim = 'k'
self.precision = max(len(s) for s in frac_part)
self.min_digits = self.precision
self.unique = unique
# for back-compat with np 1.13, use 2 spaces & sign and full prec
if self._legacy == '1.13':
self.pad_left = 3
else:
# this should be only 1 or 2. Can be calculated from sign.
self.pad_left = max(len(s) for s in int_part)
# pad_right is only needed for nan length calculation
self.pad_right = self.exp_size + 2 + self.precision
else:
trim, unique = '.', True
if self.floatmode == 'fixed':
trim, unique = 'k', False
strs = (dragon4_positional(x, precision=self.precision,
fractional=True,
unique=unique, trim=trim,
sign=self.sign == '+')
for x in finite_vals)
int_part, frac_part = zip(*(s.split('.') for s in strs))
if self._legacy == '1.13':
self.pad_left = 1 + max(len(s.lstrip('-+')) for s in int_part)
else:
self.pad_left = max(len(s) for s in int_part)
self.pad_right = max(len(s) for s in frac_part)
self.exp_size = -1
self.unique = unique
if self.floatmode in ['fixed', 'maxprec_equal']:
self.precision = self.min_digits = self.pad_right
self.trim = 'k'
else:
self.trim = '.'
self.min_digits = 0
if self._legacy != '1.13':
# account for sign = ' ' by adding one to pad_left
if self.sign == ' ' and not any(np.signbit(finite_vals)):
self.pad_left += 1
# if there are non-finite values, may need to increase pad_left
if data.size != finite_vals.size:
neginf = self.sign != '-' or any(data[isinf(data)] < 0)
nanlen = len(_format_options['nanstr'])
inflen = len(_format_options['infstr']) + neginf
offset = self.pad_right + 1 # +1 for decimal pt
self.pad_left = max(self.pad_left, nanlen - offset, inflen - offset)
def __call__(self, x):
if not np.isfinite(x):
with errstate(invalid='ignore'):
if np.isnan(x):
sign = '+' if self.sign == '+' else ''
ret = sign + _format_options['nanstr']
else: # isinf
sign = '-' if x < 0 else '+' if self.sign == '+' else ''
ret = sign + _format_options['infstr']
return ' '*(self.pad_left + self.pad_right + 1 - len(ret)) + ret
if self.exp_format:
return dragon4_scientific(x,
precision=self.precision,
min_digits=self.min_digits,
unique=self.unique,
trim=self.trim,
sign=self.sign == '+',
pad_left=self.pad_left,
exp_digits=self.exp_size)
else:
return dragon4_positional(x,
precision=self.precision,
min_digits=self.min_digits,
unique=self.unique,
fractional=True,
trim=self.trim,
sign=self.sign == '+',
pad_left=self.pad_left,
pad_right=self.pad_right)
@set_module('numpy')
def format_float_scientific(x, precision=None, unique=True, trim='k',
sign=False, pad_left=None, exp_digits=None,
min_digits=None):
"""
Format a floating-point scalar as a decimal string in scientific notation.
Provides control over rounding, trimming and padding. Uses and assumes
IEEE unbiased rounding. Uses the "Dragon4" algorithm.
Parameters
----------
x : python float or numpy floating scalar
Value to format.
precision : non-negative integer or None, optional
Maximum number of digits to print. May be None if `unique` is
`True`, but must be an integer if unique is `False`.
unique : boolean, optional
If `True`, use a digit-generation strategy which gives the shortest
representation which uniquely identifies the floating-point number from
other values of the same type, by judicious rounding. If `precision`
is given fewer digits than necessary can be printed. If `min_digits`
is given more can be printed, in which cases the last digit is rounded
with unbiased rounding.
If `False`, digits are generated as if printing an infinite-precision
value and stopping after `precision` digits, rounding the remaining
value with unbiased rounding
trim : one of 'k', '.', '0', '-', optional
Controls post-processing trimming of trailing digits, as follows:
* 'k' : keep trailing zeros, keep decimal point (no trimming)
* '.' : trim all trailing zeros, leave decimal point
* '0' : trim all but the zero before the decimal point. Insert the
zero if it is missing.
* '-' : trim trailing zeros and any trailing decimal point
sign : boolean, optional
Whether to show the sign for positive values.
pad_left : non-negative integer, optional
Pad the left side of the string with whitespace until at least that
many characters are to the left of the decimal point.
exp_digits : non-negative integer, optional
Pad the exponent with zeros until it contains at least this many digits.
If omitted, the exponent will be at least 2 digits.
min_digits : non-negative integer or None, optional
Minimum number of digits to print. This only has an effect for
`unique=True`. In that case more digits than necessary to uniquely
identify the value may be printed and rounded unbiased.
-- versionadded:: 1.21.0
Returns
-------
rep : string
The string representation of the floating point value
See Also
--------
format_float_positional
Examples
--------
>>> np.format_float_scientific(np.float32(np.pi))
'3.1415927e+00'
>>> s = np.float32(1.23e24)
>>> np.format_float_scientific(s, unique=False, precision=15)
'1.230000071797338e+24'
>>> np.format_float_scientific(s, exp_digits=4)
'1.23e+0024'
"""
precision = _none_or_positive_arg(precision, 'precision')
pad_left = _none_or_positive_arg(pad_left, 'pad_left')
exp_digits = _none_or_positive_arg(exp_digits, 'exp_digits')
min_digits = _none_or_positive_arg(min_digits, 'min_digits')
if min_digits > 0 and precision > 0 and min_digits > precision:
raise ValueError("min_digits must be less than or equal to precision")
return dragon4_scientific(x, precision=precision, unique=unique,
trim=trim, sign=sign, pad_left=pad_left,
exp_digits=exp_digits, min_digits=min_digits)
@set_module('numpy')
def format_float_positional(x, precision=None, unique=True,
fractional=True, trim='k', sign=False,
pad_left=None, pad_right=None, min_digits=None):
"""
Format a floating-point scalar as a decimal string in positional notation.
Provides control over rounding, trimming and padding. Uses and assumes
IEEE unbiased rounding. Uses the "Dragon4" algorithm.
Parameters
----------
x : python float or numpy floating scalar
Value to format.
precision : non-negative integer or None, optional
Maximum number of digits to print. May be None if `unique` is
`True`, but must be an integer if unique is `False`.
unique : boolean, optional
If `True`, use a digit-generation strategy which gives the shortest
representation which uniquely identifies the floating-point number from
other values of the same type, by judicious rounding. If `precision`
is given fewer digits than necessary can be printed, or if `min_digits`
is given more can be printed, in which cases the last digit is rounded
with unbiased rounding.
If `False`, digits are generated as if printing an infinite-precision
value and stopping after `precision` digits, rounding the remaining
value with unbiased rounding
fractional : boolean, optional
If `True`, the cutoffs of `precision` and `min_digits` refer to the
total number of digits after the decimal point, including leading
zeros.
If `False`, `precision` and `min_digits` refer to the total number of
significant digits, before or after the decimal point, ignoring leading
zeros.
trim : one of 'k', '.', '0', '-', optional<|fim▁hole|> * '.' : trim all trailing zeros, leave decimal point
* '0' : trim all but the zero before the decimal point. Insert the
zero if it is missing.
* '-' : trim trailing zeros and any trailing decimal point
sign : boolean, optional
Whether to show the sign for positive values.
pad_left : non-negative integer, optional
Pad the left side of the string with whitespace until at least that
many characters are to the left of the decimal point.
pad_right : non-negative integer, optional
Pad the right side of the string with whitespace until at least that
many characters are to the right of the decimal point.
min_digits : non-negative integer or None, optional
Minimum number of digits to print. Only has an effect if `unique=True`
in which case additional digits past those necessary to uniquely
identify the value may be printed, rounding the last additional digit.
-- versionadded:: 1.21.0
Returns
-------
rep : string
The string representation of the floating point value
See Also
--------
format_float_scientific
Examples
--------
>>> np.format_float_positional(np.float32(np.pi))
'3.1415927'
>>> np.format_float_positional(np.float16(np.pi))
'3.14'
>>> np.format_float_positional(np.float16(0.3))
'0.3'
>>> np.format_float_positional(np.float16(0.3), unique=False, precision=10)
'0.3000488281'
"""
precision = _none_or_positive_arg(precision, 'precision')
pad_left = _none_or_positive_arg(pad_left, 'pad_left')
pad_right = _none_or_positive_arg(pad_right, 'pad_right')
min_digits = _none_or_positive_arg(min_digits, 'min_digits')
if not fractional and precision == 0:
raise ValueError("precision must be greater than 0 if "
"fractional=False")
if min_digits > 0 and precision > 0 and min_digits > precision:
raise ValueError("min_digits must be less than or equal to precision")
return dragon4_positional(x, precision=precision, unique=unique,
fractional=fractional, trim=trim,
sign=sign, pad_left=pad_left,
pad_right=pad_right, min_digits=min_digits)
class IntegerFormat:
def __init__(self, data):
if data.size > 0:
max_str_len = max(len(str(np.max(data))),
len(str(np.min(data))))
else:
max_str_len = 0
self.format = '%{}d'.format(max_str_len)
def __call__(self, x):
return self.format % x
class BoolFormat:
def __init__(self, data, **kwargs):
# add an extra space so " True" and "False" have the same length and
# array elements align nicely when printed, except in 0d arrays
self.truestr = ' True' if data.shape != () else 'True'
def __call__(self, x):
return self.truestr if x else "False"
class ComplexFloatingFormat:
""" Formatter for subtypes of np.complexfloating """
def __init__(self, x, precision, floatmode, suppress_small,
sign=False, *, legacy=None):
# for backcompatibility, accept bools
if isinstance(sign, bool):
sign = '+' if sign else '-'
floatmode_real = floatmode_imag = floatmode
if legacy == '1.13':
floatmode_real = 'maxprec_equal'
floatmode_imag = 'maxprec'
self.real_format = FloatingFormat(
x.real, precision, floatmode_real, suppress_small,
sign=sign, legacy=legacy
)
self.imag_format = FloatingFormat(
x.imag, precision, floatmode_imag, suppress_small,
sign='+', legacy=legacy
)
def __call__(self, x):
r = self.real_format(x.real)
i = self.imag_format(x.imag)
# add the 'j' before the terminal whitespace in i
sp = len(i.rstrip())
i = i[:sp] + 'j' + i[sp:]
return r + i
class _TimelikeFormat:
def __init__(self, data):
non_nat = data[~isnat(data)]
if len(non_nat) > 0:
# Max str length of non-NaT elements
max_str_len = max(len(self._format_non_nat(np.max(non_nat))),
len(self._format_non_nat(np.min(non_nat))))
else:
max_str_len = 0
if len(non_nat) < data.size:
# data contains a NaT
max_str_len = max(max_str_len, 5)
self._format = '%{}s'.format(max_str_len)
self._nat = "'NaT'".rjust(max_str_len)
def _format_non_nat(self, x):
# override in subclass
raise NotImplementedError
def __call__(self, x):
if isnat(x):
return self._nat
else:
return self._format % self._format_non_nat(x)
class DatetimeFormat(_TimelikeFormat):
def __init__(self, x, unit=None, timezone=None, casting='same_kind',
legacy=False):
# Get the unit from the dtype
if unit is None:
if x.dtype.kind == 'M':
unit = datetime_data(x.dtype)[0]
else:
unit = 's'
if timezone is None:
timezone = 'naive'
self.timezone = timezone
self.unit = unit
self.casting = casting
self.legacy = legacy
# must be called after the above are configured
super().__init__(x)
def __call__(self, x):
if self.legacy == '1.13':
return self._format_non_nat(x)
return super().__call__(x)
def _format_non_nat(self, x):
return "'%s'" % datetime_as_string(x,
unit=self.unit,
timezone=self.timezone,
casting=self.casting)
class TimedeltaFormat(_TimelikeFormat):
def _format_non_nat(self, x):
return str(x.astype('i8'))
class SubArrayFormat:
def __init__(self, format_function):
self.format_function = format_function
def __call__(self, arr):
if arr.ndim <= 1:
return "[" + ", ".join(self.format_function(a) for a in arr) + "]"
return "[" + ", ".join(self.__call__(a) for a in arr) + "]"
class StructuredVoidFormat:
"""
Formatter for structured np.void objects.
This does not work on structured alias types like np.dtype(('i4', 'i2,i2')),
as alias scalars lose their field information, and the implementation
relies upon np.void.__getitem__.
"""
def __init__(self, format_functions):
self.format_functions = format_functions
@classmethod
def from_data(cls, data, **options):
"""
This is a second way to initialize StructuredVoidFormat, using the raw data
as input. Added to avoid changing the signature of __init__.
"""
format_functions = []
for field_name in data.dtype.names:
format_function = _get_format_function(data[field_name], **options)
if data.dtype[field_name].shape != ():
format_function = SubArrayFormat(format_function)
format_functions.append(format_function)
return cls(format_functions)
def __call__(self, x):
str_fields = [
format_function(field)
for field, format_function in zip(x, self.format_functions)
]
if len(str_fields) == 1:
return "({},)".format(str_fields[0])
else:
return "({})".format(", ".join(str_fields))
def _void_scalar_repr(x):
"""
Implements the repr for structured-void scalars. It is called from the
scalartypes.c.src code, and is placed here because it uses the elementwise
formatters defined above.
"""
return StructuredVoidFormat.from_data(array(x), **_format_options)(x)
_typelessdata = [int_, float_, complex_, bool_]
if issubclass(intc, int):
_typelessdata.append(intc)
if issubclass(longlong, int):
_typelessdata.append(longlong)
def dtype_is_implied(dtype):
"""
Determine if the given dtype is implied by the representation of its values.
Parameters
----------
dtype : dtype
Data type
Returns
-------
implied : bool
True if the dtype is implied by the representation of its values.
Examples
--------
>>> np.core.arrayprint.dtype_is_implied(int)
True
>>> np.array([1, 2, 3], int)
array([1, 2, 3])
>>> np.core.arrayprint.dtype_is_implied(np.int8)
False
>>> np.array([1, 2, 3], np.int8)
array([1, 2, 3], dtype=int8)
"""
dtype = np.dtype(dtype)
if _format_options['legacy'] == '1.13' and dtype.type == bool_:
return False
# not just void types can be structured, and names are not part of the repr
if dtype.names is not None:
return False
return dtype.type in _typelessdata
def dtype_short_repr(dtype):
"""
Convert a dtype to a short form which evaluates to the same dtype.
The intent is roughly that the following holds
>>> from numpy import *
>>> dt = np.int64([1, 2]).dtype
>>> assert eval(dtype_short_repr(dt)) == dt
"""
if dtype.names is not None:
# structured dtypes give a list or tuple repr
return str(dtype)
elif issubclass(dtype.type, flexible):
# handle these separately so they don't give garbage like str256
return "'%s'" % str(dtype)
typename = dtype.name
# quote typenames which can't be represented as python variable names
if typename and not (typename[0].isalpha() and typename.isalnum()):
typename = repr(typename)
return typename
def _array_repr_implementation(
arr, max_line_width=None, precision=None, suppress_small=None,
array2string=array2string):
"""Internal version of array_repr() that allows overriding array2string."""
if max_line_width is None:
max_line_width = _format_options['linewidth']
if type(arr) is not ndarray:
class_name = type(arr).__name__
else:
class_name = "array"
skipdtype = dtype_is_implied(arr.dtype) and arr.size > 0
prefix = class_name + "("
suffix = ")" if skipdtype else ","
if (_format_options['legacy'] == '1.13' and
arr.shape == () and not arr.dtype.names):
lst = repr(arr.item())
elif arr.size > 0 or arr.shape == (0,):
lst = array2string(arr, max_line_width, precision, suppress_small,
', ', prefix, suffix=suffix)
else: # show zero-length shape unless it is (0,)
lst = "[], shape=%s" % (repr(arr.shape),)
arr_str = prefix + lst + suffix
if skipdtype:
return arr_str
dtype_str = "dtype={})".format(dtype_short_repr(arr.dtype))
# compute whether we should put dtype on a new line: Do so if adding the
# dtype would extend the last line past max_line_width.
# Note: This line gives the correct result even when rfind returns -1.
last_line_len = len(arr_str) - (arr_str.rfind('\n') + 1)
spacer = " "
if _format_options['legacy'] == '1.13':
if issubclass(arr.dtype.type, flexible):
spacer = '\n' + ' '*len(class_name + "(")
elif last_line_len + len(dtype_str) + 1 > max_line_width:
spacer = '\n' + ' '*len(class_name + "(")
return arr_str + spacer + dtype_str
def _array_repr_dispatcher(
arr, max_line_width=None, precision=None, suppress_small=None):
return (arr,)
@array_function_dispatch(_array_repr_dispatcher, module='numpy')
def array_repr(arr, max_line_width=None, precision=None, suppress_small=None):
"""
Return the string representation of an array.
Parameters
----------
arr : ndarray
Input array.
max_line_width : int, optional
Inserts newlines if text is longer than `max_line_width`.
Defaults to ``numpy.get_printoptions()['linewidth']``.
precision : int, optional
Floating point precision.
Defaults to ``numpy.get_printoptions()['precision']``.
suppress_small : bool, optional
Represent numbers "very close" to zero as zero; default is False.
Very close is defined by precision: if the precision is 8, e.g.,
numbers smaller (in absolute value) than 5e-9 are represented as
zero.
Defaults to ``numpy.get_printoptions()['suppress']``.
Returns
-------
string : str
The string representation of an array.
See Also
--------
array_str, array2string, set_printoptions
Examples
--------
>>> np.array_repr(np.array([1,2]))
'array([1, 2])'
>>> np.array_repr(np.ma.array([0.]))
'MaskedArray([0.])'
>>> np.array_repr(np.array([], np.int32))
'array([], dtype=int32)'
>>> x = np.array([1e-6, 4e-7, 2, 3])
>>> np.array_repr(x, precision=6, suppress_small=True)
'array([0.000001, 0. , 2. , 3. ])'
"""
return _array_repr_implementation(
arr, max_line_width, precision, suppress_small)
@_recursive_guard()
def _guarded_repr_or_str(v):
if isinstance(v, bytes):
return repr(v)
return str(v)
def _array_str_implementation(
a, max_line_width=None, precision=None, suppress_small=None,
array2string=array2string):
"""Internal version of array_str() that allows overriding array2string."""
if (_format_options['legacy'] == '1.13' and
a.shape == () and not a.dtype.names):
return str(a.item())
# the str of 0d arrays is a special case: It should appear like a scalar,
# so floats are not truncated by `precision`, and strings are not wrapped
# in quotes. So we return the str of the scalar value.
if a.shape == ():
# obtain a scalar and call str on it, avoiding problems for subclasses
# for which indexing with () returns a 0d instead of a scalar by using
# ndarray's getindex. Also guard against recursive 0d object arrays.
return _guarded_repr_or_str(np.ndarray.__getitem__(a, ()))
return array2string(a, max_line_width, precision, suppress_small, ' ', "")
def _array_str_dispatcher(
a, max_line_width=None, precision=None, suppress_small=None):
return (a,)
@array_function_dispatch(_array_str_dispatcher, module='numpy')
def array_str(a, max_line_width=None, precision=None, suppress_small=None):
"""
Return a string representation of the data in an array.
The data in the array is returned as a single string. This function is
similar to `array_repr`, the difference being that `array_repr` also
returns information on the kind of array and its data type.
Parameters
----------
a : ndarray
Input array.
max_line_width : int, optional
Inserts newlines if text is longer than `max_line_width`.
Defaults to ``numpy.get_printoptions()['linewidth']``.
precision : int, optional
Floating point precision.
Defaults to ``numpy.get_printoptions()['precision']``.
suppress_small : bool, optional
Represent numbers "very close" to zero as zero; default is False.
Very close is defined by precision: if the precision is 8, e.g.,
numbers smaller (in absolute value) than 5e-9 are represented as
zero.
Defaults to ``numpy.get_printoptions()['suppress']``.
See Also
--------
array2string, array_repr, set_printoptions
Examples
--------
>>> np.array_str(np.arange(3))
'[0 1 2]'
"""
return _array_str_implementation(
a, max_line_width, precision, suppress_small)
# needed if __array_function__ is disabled
_array2string_impl = getattr(array2string, '__wrapped__', array2string)
_default_array_str = functools.partial(_array_str_implementation,
array2string=_array2string_impl)
_default_array_repr = functools.partial(_array_repr_implementation,
array2string=_array2string_impl)
def set_string_function(f, repr=True):
"""
Set a Python function to be used when pretty printing arrays.
Parameters
----------
f : function or None
Function to be used to pretty print arrays. The function should expect
a single array argument and return a string of the representation of
the array. If None, the function is reset to the default NumPy function
to print arrays.
repr : bool, optional
If True (default), the function for pretty printing (``__repr__``)
is set, if False the function that returns the default string
representation (``__str__``) is set.
See Also
--------
set_printoptions, get_printoptions
Examples
--------
>>> def pprint(arr):
... return 'HA! - What are you going to do now?'
...
>>> np.set_string_function(pprint)
>>> a = np.arange(10)
>>> a
HA! - What are you going to do now?
>>> _ = a
>>> # [0 1 2 3 4 5 6 7 8 9]
We can reset the function to the default:
>>> np.set_string_function(None)
>>> a
array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
`repr` affects either pretty printing or normal string representation.
Note that ``__repr__`` is still affected by setting ``__str__``
because the width of each array element in the returned string becomes
equal to the length of the result of ``__str__()``.
>>> x = np.arange(4)
>>> np.set_string_function(lambda x:'random', repr=False)
>>> x.__str__()
'random'
>>> x.__repr__()
'array([0, 1, 2, 3])'
"""
if f is None:
if repr:
return multiarray.set_string_function(_default_array_repr, 1)
else:
return multiarray.set_string_function(_default_array_str, 0)
else:
return multiarray.set_string_function(f, repr)<|fim▁end|> | Controls post-processing trimming of trailing digits, as follows:
* 'k' : keep trailing zeros, keep decimal point (no trimming) |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from django.conf.urls import re_path
from . import views
app_name = 'topic'
urlpatterns = [
re_path(r'^$', views.deleted, name='index'),
re_path(r'^deleted/$', views.deleted, name='deleted'),<|fim▁hole|> re_path(r'^closed/$', views.closed, name='closed'),
re_path(r'^pinned/$', views.pinned, name='pinned'),
]<|fim▁end|> | |
<|file_name|>time.go<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | // Package time extends the time package in the stdlib.
package time |
<|file_name|>api.py<|end_file_name|><|fim▁begin|># pylint: disable=unused-import
"""
Python APIs exposed by the bulk_email app to other in-process apps.
"""
# Public Bulk Email Functions
from __future__ import absolute_import
from bulk_email.models_api import (
is_bulk_email_enabled_for_course,
is_bulk_email_feature_enabled,
is_user_opted_out_for_course
)
def get_emails_enabled(user, course_id):
"""
Get whether or not emails are enabled in the context of a course.
Arguments:
user: the user object for which we want to check whether emails are enabled
course_id (string): the course id of the course
Returns:
(bool): True if emails are enabled for the course associated with course_id for the user;
False otherwise
"""
if is_bulk_email_feature_enabled(course_id=course_id):
return not is_user_opted_out_for_course(user=user, course_id=course_id)<|fim▁hole|><|fim▁end|> | return None |
<|file_name|>test_preview.py<|end_file_name|><|fim▁begin|>"""
Tests for contentstore.views.preview.py
"""
import re
from unittest import mock
import ddt
from django.test.client import Client, RequestFactory
from web_fragments.fragment import Fragment
from xblock.core import XBlock, XBlockAside
from cms.djangoapps.contentstore.utils import reverse_usage_url
from cms.djangoapps.xblock_config.models import StudioConfig
from common.djangoapps.student.tests.factories import UserFactory
from xmodule.modulestore import ModuleStoreEnum # lint-amnesty, pylint: disable=wrong-import-order
from xmodule.modulestore.django import modulestore # lint-amnesty, pylint: disable=wrong-import-order
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase # lint-amnesty, pylint: disable=wrong-import-order
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory # lint-amnesty, pylint: disable=wrong-import-order
from xmodule.modulestore.tests.test_asides import AsideTestType # lint-amnesty, pylint: disable=wrong-import-order
from ..preview import _preview_module_system, get_preview_fragment
@ddt.ddt
class GetPreviewHtmlTestCase(ModuleStoreTestCase):
"""
Tests for get_preview_fragment.
Note that there are other existing test cases in test_contentstore that indirectly execute
get_preview_fragment via the xblock RESTful API.
"""
@XBlockAside.register_temp_plugin(AsideTestType, 'test_aside')
def test_preview_fragment(self):
"""
Test for calling get_preview_html. Ensures data-usage-id is correctly set and
asides are correctly included.
"""
course = CourseFactory.create(default_store=ModuleStoreEnum.Type.split)
html = ItemFactory.create(
parent_location=course.location,
category="html",
data={'data': "<html>foobar</html>"}
)
config = StudioConfig.current()
config.enabled = True
config.save()
request = RequestFactory().get('/dummy-url')
request.user = UserFactory()
request.session = {}
# Call get_preview_fragment directly.
context = {
'reorderable_items': set(),
'read_only': True
}
html = get_preview_fragment(request, html, context).content
# Verify student view html is returned, and the usage ID is as expected.
html_pattern = re.escape(
str(course.id.make_usage_key('html', 'replaceme'))
).replace('replaceme', r'html_[0-9]*')
self.assertRegex(
html,
f'data-usage-id="{html_pattern}"'
)
self.assertRegex(html, '<html>foobar</html>')
self.assertRegex(html, r"data-block-type=[\"\']test_aside[\"\']")
self.assertRegex(html, "Aside rendered")
# Now ensure the acid_aside is not in the result
self.assertNotRegex(html, r"data-block-type=[\"\']acid_aside[\"\']")
# Ensure about pages don't have asides
about = modulestore().get_item(course.id.make_usage_key('about', 'overview'))
html = get_preview_fragment(request, about, context).content
self.assertNotRegex(html, r"data-block-type=[\"\']test_aside[\"\']")
self.assertNotRegex(html, "Aside rendered")
@XBlockAside.register_temp_plugin(AsideTestType, 'test_aside')
def test_preview_no_asides(self):
"""
Test for calling get_preview_html. Ensures data-usage-id is correctly set and
asides are correctly excluded because they are not enabled.
"""
course = CourseFactory.create(default_store=ModuleStoreEnum.Type.split)
html = ItemFactory.create(
parent_location=course.location,
category="html",
data={'data': "<html>foobar</html>"}
)
config = StudioConfig.current()
config.enabled = False
config.save()
request = RequestFactory().get('/dummy-url')
request.user = UserFactory()
request.session = {}
# Call get_preview_fragment directly.
context = {
'reorderable_items': set(),
'read_only': True
}
html = get_preview_fragment(request, html, context).content
self.assertNotRegex(html, r"data-block-type=[\"\']test_aside[\"\']")
self.assertNotRegex(html, "Aside rendered")
@mock.patch('xmodule.conditional_module.ConditionalBlock.is_condition_satisfied')
def test_preview_conditional_module_children_context(self, mock_is_condition_satisfied):
"""
Tests that when empty context is pass to children of ConditionalBlock it will not raise KeyError.
"""
mock_is_condition_satisfied.return_value = True
client = Client()
client.login(username=self.user.username, password=self.user_password)
<|fim▁hole|> course = CourseFactory.create()
conditional_block = ItemFactory.create(
parent_location=course.location,
category="conditional"
)
# child conditional_block
ItemFactory.create(
parent_location=conditional_block.location,
category="conditional"
)
url = reverse_usage_url(
'preview_handler',
conditional_block.location,
kwargs={'handler': 'xmodule_handler/conditional_get'}
)
response = client.post(url)
self.assertEqual(response.status_code, 200)
@ddt.data(ModuleStoreEnum.Type.split, ModuleStoreEnum.Type.mongo)
def test_block_branch_not_changed_by_preview_handler(self, default_store):
"""
Tests preview_handler should not update blocks being previewed
"""
client = Client()
client.login(username=self.user.username, password=self.user_password)
with self.store.default_store(default_store):
course = CourseFactory.create()
block = ItemFactory.create(
parent_location=course.location,
category="problem"
)
url = reverse_usage_url(
'preview_handler',
block.location,
kwargs={'handler': 'xmodule_handler/problem_check'}
)
response = client.post(url)
self.assertEqual(response.status_code, 200)
self.assertFalse(modulestore().has_changes(modulestore().get_item(block.location)))
@XBlock.needs("field-data")
@XBlock.needs("i18n")
@XBlock.needs("mako")
@XBlock.needs("user")
@XBlock.needs("teams_configuration")
class PureXBlock(XBlock):
"""
Pure XBlock to use in tests.
"""
def student_view(self, context):
"""
Renders the output that a student will see.
"""
fragment = Fragment()
fragment.add_content(self.runtime.service(self, 'mako').render_template('edxmako.html', context))
return fragment
@ddt.ddt
class StudioXBlockServiceBindingTest(ModuleStoreTestCase):
"""
Tests that the Studio Module System (XBlock Runtime) provides an expected set of services.
"""
def setUp(self):
"""
Set up the user and request that will be used.
"""
super().setUp()
self.user = UserFactory()
self.course = CourseFactory.create()
self.request = mock.Mock()
self.field_data = mock.Mock()
@XBlock.register_temp_plugin(PureXBlock, identifier='pure')
@ddt.data("user", "i18n", "field-data", "teams_configuration")
def test_expected_services_exist(self, expected_service):
"""
Tests that the 'user' and 'i18n' services are provided by the Studio runtime.
"""
descriptor = ItemFactory(category="pure", parent=self.course)
runtime = _preview_module_system(
self.request,
descriptor,
self.field_data,
)
service = runtime.service(descriptor, expected_service)
self.assertIsNotNone(service)
class CmsModuleSystemShimTest(ModuleStoreTestCase):
"""
Tests that the deprecated attributes in the Module System (XBlock Runtime) return the expected values.
"""
def setUp(self):
"""
Set up the user and other fields that will be used to instantiate the runtime.
"""
super().setUp()
self.course = CourseFactory.create()
self.user = UserFactory()
self.request = RequestFactory().get('/dummy-url')
self.request.user = self.user
self.request.session = {}
self.descriptor = ItemFactory(category="video", parent=self.course)
self.field_data = mock.Mock()
self.runtime = _preview_module_system(
self.request,
self.descriptor,
self.field_data,
)
def test_get_user_role(self):
assert self.runtime.get_user_role() == 'staff'
@XBlock.register_temp_plugin(PureXBlock, identifier='pure')
def test_render_template(self):
descriptor = ItemFactory(category="pure", parent=self.course)
html = get_preview_fragment(self.request, descriptor, {'element_id': 142}).content
assert '<div id="142" ns="main">Testing the MakoService</div>' in html
def test_xqueue_is_not_available_in_studio(self):
descriptor = ItemFactory(category="problem", parent=self.course)
runtime = _preview_module_system(
self.request,
descriptor=descriptor,
field_data=mock.Mock(),
)
assert runtime.xqueue is None
assert runtime.service(descriptor, 'xqueue') is None<|fim▁end|> | with self.store.default_store(ModuleStoreEnum.Type.split): |
<|file_name|>trans_sync.py<|end_file_name|><|fim▁begin|>import os
import os.path
from django.db import transaction
from bibliotik import manage_bibliotik
from bibliotik.models import BibliotikTransTorrent, BibliotikTorrent
from home.models import LogEntry, DownloadLocation
def sync_instance_db(instance):
b_torrents = instance.get_b_torrents_by_hash()
t_torrents = instance.get_t_torrents_by_hash(BibliotikTransTorrent.sync_t_arguments)
for c_hash, b_torrent in b_torrents.items():
if c_hash not in t_torrents:
b_torrent_path = b_torrent.path.encode('utf-8')
messages = []
with transaction.atomic():
b_torrent.delete()
del b_torrents[c_hash]
if instance.replica_set.is_master:
if os.path.exists(b_torrent_path):
files = os.listdir(b_torrent_path)<|fim▁hole|> messages.append(u'There are other files so leaving in place.')
else:
messages.append(u'No other files. Deleting directory.')
os.rmdir(b_torrent_path)
else:
messages.append(u'Path does not exist.')
LogEntry.add(None, u'action',
u'Bibliotik torrent {0} deleted from instance {1}. {2}'
.format(b_torrent, instance, ' '.join(messages)))
with transaction.atomic():
for c_hash, t_torrent in t_torrents.items():
if c_hash not in b_torrents:
LogEntry.add(None, u'error',
u'Bibliotik torrent {0} appeared in instance {1}.'
.format(t_torrent.name, instance))
break
else:
b_torrent = b_torrents[c_hash]
b_torrent.sync_t_torrent(t_torrent)
def sync_all_instances_db(replica_set):
for instance in replica_set.transinstance_set.all():
sync_instance_db(instance)
def init_sync_instance_db(instance):
b_torrents = instance.get_b_torrents_by_hash()
t_torrents = instance.get_t_torrents_by_hash(BibliotikTransTorrent.sync_t_arguments)
with transaction.atomic():
for c_hash, t_torrent in t_torrents.items():
if c_hash not in b_torrents:
try:
bibliotik_torrent = BibliotikTorrent.objects.get(info_hash=c_hash)
d_location = DownloadLocation.get_by_full_path(t_torrent.downloadDir)
b_torrent = manage_bibliotik.add_bibliotik_torrent(
bibliotik_torrent.id,
instance,
d_location,
add_to_client=False
)
b_torrents[b_torrent.info_hash] = b_torrent
except BibliotikTorrent.DoesNotExist:
raise Exception(u'Could not find hash {0} for name {1} in '
u'DB during initial sync.'
.format(c_hash, t_torrent.name))
b_torrent = b_torrents[c_hash]
b_torrent.sync_t_torrent(t_torrent)
def init_sync_all_instances_db(replica_set):
for instance in replica_set.transinstance_set.all():
init_sync_instance_db(instance)<|fim▁end|> | if len(files): |
<|file_name|>rtestcover.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""Called from datakortet\dkcoverage.bat to record regression test
coverage data in dashboard.
"""
import re
import os
# import sys
# import time
import glob
# from datakortet.dkdash.status import send_status
# from datakortet.utils import root
from coverage import coverage, misc
from coverage.files import find_python_files
from coverage.parser import CodeParser
from coverage.config import CoverageConfig
from . import dkenv
def linecount(fname, excludes):
"""Return the number of lines in ``fname``, counting the same way that
coverage does.
"""
cp = CodeParser(filename=fname,
exclude=re.compile(misc.join_regex(excludes)))
lines, excluded = cp.parse_source()
return len(lines), len(excluded)
def skiplist():
cov = coverage(config_file=os.path.join(dkenv.DKROOT, '.coveragerc'))
cwd = os.getcwd()
skippatterns = [os.path.normpath(p.replace(cwd, dkenv.DKROOT)) for p in cov.omit]
_skiplist = []
for pat in skippatterns:
_skiplist += glob.glob(pat)
return set(_skiplist)
def abspath(fname):
# cwd = os.getcwd()
res = os.path.normcase(
os.path.normpath(
os.path.abspath(fname))) #.replace(cwd, root()))))
return res
def valid_file(fname, _skiplist=None):
_skiplist = _skiplist or skiplist()
if fname.endswith('.py'):
absfname = abspath(fname)<|fim▁hole|> return False
def python_files(folder):
_skiplist = skiplist()
for fname in find_python_files(folder):
f = valid_file(fname, _skiplist)
if f:
yield f
def pylinecount(rt=None, verbose=False):
"""Count Python lines the same way that coverage does.
"""
res = 0
cov = coverage(config_file=os.path.join(dkenv.DKROOT, '.coveragerc'))
rt = rt or dkenv.DKROOT
_skiplist = skiplist()
exclude_lines = cov.get_exclude_list()
for fname in python_files(rt):
if os.path.normpath(fname) not in _skiplist:
lcount, excount = linecount(fname, exclude_lines)
if verbose:
print '%5d %5d %s' % (lcount, excount, fname)
res += lcount
else:
if verbose:
print '-----', fname
return res
# def report_test_coverage(reportline, dashboard=True):
# start = time.time()
# parts = reportline.split()
#
# stmts = int(parts[1])
# skipped = int(parts[2])
# covered = stmts - skipped
# print >> sys.stderr, "COVERED:", covered
#
# linecount = pylinecount()
# print >> sys.stderr, "TOTAL: ", linecount
#
# coverage = 100.0 * covered / linecount
# severity = 'green'
# if coverage < 85:
# severity = 'yellow'
# if coverage < 60:
# severity = 'red'
#
# sys.stdout.write("Coverage: " + str(coverage) + '\n')
#
# if dashboard:
# send_status(tag='code.testcov',
# value=coverage,
# duration=time.time() - start,
# server='appsrv')
# if __name__ == "__main__":
# intxt = sys.stdin.read()
# report_test_coverage(intxt)
# sys.exit(0)<|fim▁end|> | if absfname not in _skiplist:
fpath, name = os.path.split(fname)
if name != '__init__.py' or os.stat(absfname).st_size > 0:
return absfname |
<|file_name|>tests.rs<|end_file_name|><|fim▁begin|>use libc;
// use std::mem;
// use std::ptr;
use std::ffi::CString;
use super::*;
#[test]
fn test_read_objid() {
init(b"test");
unsafe {
let input = CString::new("1.3.6").unwrap();
let mut objid: [oid; 256] = [0; 256];
let mut objid_size: libc::size_t = objid.len() as libc::size_t;
let res = read_objid(input.as_ptr(), &mut objid[0], &mut objid_size);
let expected: &[oid] = &[1,3,6];
assert_eq!(res, 1);
assert_eq!(expected, &objid[..objid_size]);
}
}
// #[test]
// fn test_snmp_session() {
// init(b"test");
// let peer_name = CString::new("edgy.asdf.dk").unwrap().into_raw();
// let peer_community = b"st0vsuger";
// let peer_obj = CString::new("SNMPv2-MIB::sysDescr.0").unwrap().into_raw();
// unsafe {
// let mut sess: Struct_snmp_session = mem::uninitialized();
// let pdu: *mut Struct_snmp_pdu;
// let mut response_ptr: *mut Struct_snmp_pdu = mem::uninitialized();
// snmp_sess_init(&mut sess);
// sess.peername = peer_name;
// sess.community = &mut peer_community.clone()[0];
// sess.community_len = peer_community.len();
// sess.version = SNMP_VERSION_2c;
// let ss = snmp_open(&mut sess);
// pdu = snmp_pdu_create(SNMP_MSG_GET);
// let mut anOID: [oid; MAX_OID_LEN] = [0; MAX_OID_LEN];
// let mut anOID_len = MAX_OID_LEN;
// if snmp_parse_oid(peer_obj, &mut anOID[0], &mut anOID_len) == ptr::null_mut() {
// panic!();
// }
// snmp_add_null_var(pdu, &mut anOID[0], anOID_len);
// let status = snmp_synch_response(ss, pdu, &mut response_ptr);
// println!("snmp_synch_response() -> {}", status);
// if status == STAT_SUCCESS {
// let response = *response_ptr;<|fim▁hole|>// if response.errstat == SNMP_ERR_NOERROR {
// let mut vars = response.variables;
// while vars != ptr::null_mut() {
// print_variable((*vars).name, (*vars).name_length, vars);
// vars = (*vars).next_variable;
// }
// }
// } else {
// panic!("status={}", status);
// }
// }
// }<|fim▁end|> | |
<|file_name|>lakkucast.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#credits : https://gist.github.com/TheCrazyT/11263599
import socket
import ssl
import select
import time
import re
import sys
from thread import start_new_thread
from struct import pack
from random import randint
from subprocess import call
import os
import fnmatch
import argparse
import logging
class lakkucast:
def __init__(self):
self.status = None
self.session_id = None
self.protocolVersion = 0
self.source_id = "sender-0"
self.destination_id = "receiver-0"
self.chromecast_server = "192.168.1.23" #living room audio
self.socket = 0
self.type_enum = 0
self.type_string = 2
self.type_bytes = self.type_string
self.session = 0
self.play_state = None
self.sleep_between_media = 5
self.content_id = None
self.socket_fail_count = 100
def clean(self,s):
return re.sub(r'[\x00-\x1F\x7F]', '?',s)
def getType(self, fieldId,t):
return (fieldId << 3) | t
def getLenOf(self, s):
x = ""
l = len(s)
while(l > 0x7F):
x += pack("B",l & 0x7F | 0x80)
l >>= 7
x += pack("B",l & 0x7F)
return x
def init_status(self):
self.socket = socket.socket()
self.socket = ssl.wrap_socket(self.socket)
#print "connecting ..."
self.socket.connect((self.chromecast_server,8009))
payloadType = 0 #0=string
data = "{\"type\":\"CONNECT\",\"origin\":{}}"
lnData = self.getLenOf(data)
#print len(lnData),len(data),lnData.encode("hex")
namespace = "urn:x-cast:com.google.cast.tp.connection"
msg = pack(">BBBB%dsBB%dsBB%dsBBB%ds%ds" %
(len(self.source_id),
len(self.destination_id),
len(namespace),
len(lnData),
len(data)),
self.getType(1,self.type_enum),
self.protocolVersion,
self.getType(2,self.type_string),
len(self.source_id),
self.source_id,
self.getType(3,self.type_string),
len(self.destination_id),
self.destination_id,
self.getType(4,self.type_string),
len(namespace),
namespace,
self.getType(5,self.type_enum),
payloadType,
self.getType(6,self.type_bytes),
lnData,
data)
msg = pack(">I%ds" % (len(msg)),len(msg),msg)
#print msg.encode("hex")
#print "Connecting ..."
self.socket.write(msg)
payloadType = 0 #0=string
data = "{\"type\":\"GET_STATUS\",\"requestId\":46479000}"
lnData = self.getLenOf(data)
namespace = "urn:x-cast:com.google.cast.receiver"
msg = pack(">BBBB%dsBB%dsBB%dsBBB%ds%ds" % (len(self.source_id),
len(self.destination_id),
len(namespace),
len(lnData),
len(data)),
self.getType(1,self.type_enum),
self.protocolVersion,
self.getType(2,self.type_string),
len(self.source_id),
self.source_id,
self.getType(3,self.type_string),
len(self.destination_id),
self.destination_id,
self.getType(4,self.type_string),
len(namespace),
namespace,
self.getType(5,self.type_enum),
payloadType,
self.getType(6,self.type_bytes),
lnData,
data)
msg = pack(">I%ds" % (len(msg)),len(msg),msg)
#print "sending status request..."
self.socket.write(msg)
m1=None
m3=None
result=""
count = 0
while m1==None and m3==None:
lastresult = self.socket.read(2048)
result += lastresult
#print "#"+lastresult.encode("hex")
#if lastresult != "":
# print self.clean("\nH!"+lastresult)
#print result
m1 = re.search('"sessionId":"(?P<session>[^"]+)"', result)
m2 = re.search('"statusText":"(?P<status>[^"]+)"', result)
m3 = re.search('"playerState":"(?P<play_state>[^"]+)"', result)
m4 = re.search('"contentId":"(?P<content_id>[^"]+)"', result)
count = count + 1
if count > self.socket_fail_count:
self.status = None
self.play_state = None
self.status = None
break
#print "#%i" % (m==None)
if m1 != None:
#print "session:",m1.group("session")
self.session = m1.group("session")
if m2 != None:
#print "status:",m2.group("status")
self.status = m2.group("status")
if m3 != None:
#print "play_state:",m3.group("play_state")
self.play_state = m3.group("play_state")
if m4 != None:
#print "contentid:",m4.group("content_id")
self.content_id = m4.group("content_id")
payloadType = 0 #0=string
data = "{MESSAGE_TYPE: 'SET_VOLUME','volume': {'level': 0.2}}"
lnData = self.getLenOf(data)
#print len(lnData),len(data),lnData.encode("hex")
namespace = "urn:x-cast:com.google.cast.tp.connection"
msg = pack(">BBBB%dsBB%dsBB%dsBBB%ds%ds" %
(len(self.source_id),
len(self.destination_id),
len(namespace),
len(lnData),
len(data)),
self.getType(1,self.type_enum),
self.protocolVersion,
self.getType(2,self.type_string),
len(self.source_id),
self.source_id,
self.getType(3,self.type_string),
len(self.destination_id),
self.destination_id,
self.getType(4,self.type_string),
len(namespace),
namespace,
self.getType(5,self.type_enum),
payloadType,
self.getType(6,self.type_bytes),
lnData,
data)
msg = pack(">I%ds" % (len(msg)),len(msg),msg)
#print msg.encode("hex")
#print "Connecting ..."
self.socket.write(msg)
def get_status(self):
return " ".join(["main_status:" , self.get_main_status() , "play_status:" , self.get_play_status()])
def get_main_status(self):
if self.status == None:
status_str = "None"
else:
status_str = self.status
return (status_str)
def get_play_status(self):
if self.play_state == None:
play_state_str = "None"
else:
play_state_str = self.play_state
return (play_state_str)
def ready_to_play(self):
if self.status == "Now Casting":
return False
if self.status == "Ready To Cast" or self.status == None or self.status == "Chromecast Home Screen":
if self.play_state == None:
return True
if self.play_state == "IDLE":
return True
if self.play_state == "PLAYING":
return False
if self.play_state == "BUFFERING":
return False
return True
else:
return False
def close_connection(self):
self.socket.close()
def play_url(self, url):
payloadType = 0 #0=string
data = "{\"type\":\"LAUNCH\",\"requestId\":46479001,\"appId\":\"CC1AD845\"}"
lnData = self.getLenOf(data)
namespace = "urn:x-cast:com.google.cast.receiver"
msg = pack(">BBBB%dsBB%dsBB%dsBBB%ds%ds" %
(len(self.source_id),
len(self.destination_id),
len(namespace),
len(lnData),
len(data)),
self.getType(1,self.type_enum),
self.protocolVersion,
self.getType(2,self.type_string),
len(self.source_id),
self.source_id,
self.getType(3,self.type_string),
len(self.destination_id),
self.destination_id,
self.getType(4,self.type_string),
len(namespace),
namespace,
self.getType(5,self.type_enum),
payloadType,
self.getType(6,self.type_bytes),
lnData,
data)
msg = pack(">I%ds" % (len(msg)),len(msg),msg)
#print msg.encode("hex")
#print "sending ..."
self.socket.write(msg)
m=None
result=""
while m==None:
lastresult = self.socket.read(2048)
result += lastresult
#print "#"+lastresult.encode("hex")
#print clean("!"+lastresult)
m = re.search('"transportId":"(?P<transportId>[^"]+)"', result)
self.destination_id = m.group("transportId")
payloadType = 0 #0=string
data = "{\"type\":\"CONNECT\",\"origin\":{}}"
lnData = self.getLenOf(data)
#print len(lnData),len(data),lnData.encode("hex")
namespace = "urn:x-cast:com.google.cast.tp.connection"
msg = pack(">BBBB%dsBB%dsBB%dsBBB%ds%ds" %
(len(self.source_id),
len(self.destination_id),
len(namespace),
len(lnData),
len(data)),
self.getType(1,self.type_enum),
self.protocolVersion,
self.getType(2,self.type_string),
len(self.source_id),
self.source_id,
self.getType(3,self.type_string),
len(self.destination_id),
self.destination_id,
self.getType(4,self.type_string),
len(namespace),
namespace,
self.getType(5,self.type_enum),
payloadType,
self.getType(6,self.type_bytes),
lnData,
data)
msg = pack(">I%ds" % (len(msg)),len(msg),msg)
#print msg.encode("hex")
#print "sending ..."
self.socket.write(msg)
payloadType = 0 #0=string
data = "{\"type\":\"LOAD\",\"requestId\":46479002,\"sessionId\":\""+self.session+"\",\"media\":{\"contentId\":\""+url+"\",\"streamType\":\"buffered\",\"contentType\":\"video/mp4\"},\"autoplay\":true,\"currentTime\":0,\"customData\":{\"payload\":{\"title:\":\"\"}}}"
lnData = self.getLenOf(data)
namespace = "urn:x-cast:com.google.cast.media"
msg = pack(">BBBB%dsBB%dsBB%dsBBB%ds%ds" %
(len(self.source_id),
len(self.destination_id),
len(namespace),
len(lnData),
len(data)),
self.getType(1,self.type_enum),
self.protocolVersion,
self.getType(2,self.type_string),
len(self.source_id),
self.source_id,
self.getType(3,self.type_string),
len(self.destination_id),
self.destination_id,
self.getType(4,self.type_string),
len(namespace),
namespace,
self.getType(5,self.type_enum),
payloadType,
self.getType(6,self.type_bytes),
lnData,
data)
msg = pack(">I%ds" % (len(msg)),len(msg),msg)
#print msg.encode("hex")
#print "sending ..."
#print "LOADING"
self.socket.write(msg)
payloadType = 0 #0=string
volume = min(max(0, round(0.1, 1)), 1)
data = "{MESSAGE_TYPE: 'SET_VOLUME','volume': {'level': volume}}"
lnData = self.getLenOf(data)
#print len(lnData),len(data),lnData.encode("hex")
namespace = "urn:x-cast:com.google.cast.tp.connection"
msg = pack(">BBBB%dsBB%dsBB%dsBBB%ds%ds" %
(len(self.source_id),
len(self.destination_id),
len(namespace),
len(lnData),
len(data)),
self.getType(1,self.type_enum),
self.protocolVersion,
self.getType(2,self.type_string),
len(self.source_id),
self.source_id,
self.getType(3,self.type_string),
len(self.destination_id),
self.destination_id,
self.getType(4,self.type_string),
len(namespace),
namespace,
self.getType(5,self.type_enum),
payloadType,<|fim▁hole|> self.getType(6,self.type_bytes),
lnData,
data)
msg = pack(">I%ds" % (len(msg)),len(msg),msg)
#print msg.encode("hex")
#print "Connecting ..."
self.socket.write(msg)
self.close_connection()
#try:
# while True:
# print "before lastresult"
# lastresult = self.socket.read(2048)
# if lastresult!="":
# #print "#"+lastresult.encode("hex")
# print self.clean("! In loop:"+lastresult)
# finally:
# print "final"
# socket.close()
# print "socket closed"
class manage_lightwave:
def __init__(self):
self.room = "Main\ Bedroom"
self.device = "Screen"
self.lightwave_cmd = "/usr/local/bin/lightwaverf"
def start_screen(self):
cmd = " ".join([self.lightwave_cmd, self.room, self.device, "on", ">cmd.log", "2>&1"])
os.system(cmd)
return(cmd)
def stop_screen(self):
cmd = " ".join([self.lightwave_cmd, self.room, self.device, "off", ">cmd.log", "2>&1"])
os.system(cmd)
return(cmd)
class lakkucast_media:
def __init__(self):
self.top_dir = "/data"
self.top_url = "http://192.168.1.98"
#self.media_dirs = ["media/test/sample1", "media/test/sample2"]
self.media_dirs = ["media/TV-Shows/English/Friends", "media/TV-Shows/English/That 70s Show", "media/TV-Shows/English/Big Bang Theory"]
self.media_data = "/data/webapps/lakku/lakkucast/media.dat"
def random_play(self, num_play):
count_dir = 0
num_dirs = len(self.media_dirs)
while count_dir < num_dirs:
rand_main = randint(0, (len(self.media_dirs)-1))
url_list = []
sel_dir = os.path.join(self.top_dir, self.media_dirs[rand_main])
if os.path.isdir(sel_dir):
count_dir = count_dir + 1
matches = []
for root, dirnames, filenames in os.walk(sel_dir):
for filename in fnmatch.filter(filenames, '*.mp4'):
matches.append(os.path.join(root, filename).replace(self.top_dir,''))
count = 1
loop_count = 1
while count <= num_play:
file_rand = randint(0, (len(matches)-1))
file_name = "".join([self.top_url , matches[file_rand]])
if self.played_before(file_name) == False:
if file_name not in url_list:
url_list.append(file_name)
count = count + 1
loop_count = loop_count + 1
if loop_count == (len(matches)-1):
break
if count < num_play:
continue
else:
fhand = open(self.media_data, 'a+')
for u in url_list:
fhand.write(u+'\n')
fhand.close()
return url_list
def played_before(self, media_name):
if media_name in open(self.media_data).read():
return True
return False
def reset_media_history(self):
fhand = open(self.media_data, 'w')
fhand.truncate()
fhand.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="lakkucast")
parser.add_argument("--play", help="Play x videos ")
parser.add_argument("--stop", help="Stop playing and shutdown", action='store_true')
parser.add_argument("--reset", help="Stop playing", action='store_true')
parser.add_argument("--reset_media_history", help="Reset media history", action='store_true')
args = parser.parse_args()
log_file = "/data/webapps/lakku/lakkucast/lakkucast.log"
log_level = logging.INFO
logging.basicConfig(filename=log_file, level=log_level,
format='%(asctime)s [%(levelname)s] %(message)s')
logging.info("Starting lakkucast.")
if args.play:
num_play = int(args.play) * 2
logging.info("Play count: %s"
% (args.play))
lm = lakkucast_media()
lwrf = manage_lightwave()
logging.info("Sending start command to lwrf")
logging.info(lwrf.start_screen())
lwrf.start_screen()
logging.info("Sleeping after lwrf start")
url_list = lm.random_play(num_play)
time.sleep(20)
if url_list != None:
logging.info("Got %d urls to play"
% (len(url_list)))
for u in url_list:
logging.info("Trying URL: %s"
% (u))
l = lakkucast()
logging.info("Sleeping before main init")
time.sleep(l.sleep_between_media)
l.init_status()
logging.info(l.get_status())
if l.ready_to_play():
logging.info("Playing URL: %s"
% (u))
l.play_url(u)
l.init_status()
logging.info(l.get_status())
while not l.ready_to_play():
time.sleep(l.sleep_between_media)
l.init_status()
logging.info(l.get_status())
time.sleep(l.sleep_between_media)
logging.info("Sending stop command to lwrf")
logging.info(lwrf.stop_screen())
else:
logging.info("No urls returned by player")
l.play_url("http://192.168.1.98/media/test/water.mp4")
time.sleep(l.sleep_between_media)
lwrf = manage_lightwave()
logging.info("Sending stop command to lwrf")
logging.info(lwrf.stop_screen())
if args.stop:
l = lakkucast()
l.init_status()
logging.info("Calling stop")
logging.info(l.get_status())
l.play_url("http://192.168.1.98/media/test/water.mp4")
time.sleep(10)
lwrf = manage_lightwave()
logging.info("Sending stop command to lwrf")
logging.info(lwrf.stop_screen())
if args.reset:
l = lakkucast()
l.init_status()
logging.info("Calling reset")
logging.info(l.get_status())
l.play_url("http://192.168.1.98/media/test/water.mp4")
if args.reset_media_history:
logging.info("Calling Reset media history")
lm = lakkucast_media()
lm.reset_media_history()<|fim▁end|> | |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls.defaults import patterns # noqa<|fim▁hole|>from django.conf.urls.defaults import url # noqa
from openstack_dashboard.dashboards.fogbow.usage import views
from openstack_dashboard.dashboards.fogbow.usage.views import IndexView
urlpatterns = patterns('',
url(r'^$', IndexView.as_view(), name='index'),
url(r'^(?P<member_id>.*)/usage$', views.getSpecificMemberUsage, name='usage'),
)<|fim▁end|> | |
<|file_name|>Lang.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
MENU_FILE = "File"
FILE_NEW = "New"
FILE_OPEN = "Open"
FILE_EXIT = "Exit"<|fim▁hole|>
TAB_DATA = "Data"
TAB_SQL = "SQL"
BUTTON_EXIT = "Exit"<|fim▁end|> | |
<|file_name|>SetTablatureEnabledAction.java<|end_file_name|><|fim▁begin|>/*
* Created on 17-dic-2005
*
* TODO To change the template for this generated file go to
* Window - Preferences - Java - Code Style - Code Templates
*/
package org.herac.tuxguitar.app.actions.layout;
import org.herac.tuxguitar.app.actions.Action;
import org.herac.tuxguitar.app.actions.ActionData;
import org.herac.tuxguitar.graphics.control.TGLayout;
/**
* @author julian
*
* TODO To change the template for this generated type comment go to
* Window - Preferences - Java - Code Style - Code Templates
*/
public class SetTablatureEnabledAction extends Action{
public static final String NAME = "action.view.layout-set-tablature-enabled";
public SetTablatureEnabledAction() {
super(NAME, AUTO_LOCK | AUTO_UNLOCK | AUTO_UPDATE | KEY_BINDING_AVAILABLE);
}
protected int execute(ActionData actionData){
TGLayout layout = getEditor().getTablature().getViewLayout();
layout.setStyle( ( layout.getStyle() ^ TGLayout.DISPLAY_TABLATURE ) );
if((layout.getStyle() & TGLayout.DISPLAY_TABLATURE) == 0 && (layout.getStyle() & TGLayout.DISPLAY_SCORE) == 0 ){<|fim▁hole|> updateTablature();
return 0;
}
}<|fim▁end|> | layout.setStyle( ( layout.getStyle() ^ TGLayout.DISPLAY_SCORE ) );
} |
<|file_name|>ConfigurationFileAnalyser.java<|end_file_name|><|fim▁begin|>/*
* Unitex
*
* Copyright (C) 2001-2014 Université Paris-Est Marne-la-Vallée <[email protected]>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
*
*/
package fr.umlv.unitex.cassys;
public class ConfigurationFileAnalyser {
private String fileName;
private boolean mergeMode;
private boolean replaceMode;
private boolean disabled;
private boolean star;
private boolean commentFound;
/**
* @return the fileName
*/
public String getFileName() {
return fileName;
}
/**
* @return the mergeMode
*/
public boolean isMergeMode() {
return mergeMode;
}
/**
* @return the replaceMode
*/
public boolean isReplaceMode() {
return replaceMode;
}
/**
* @return the commentFound
*/
public boolean isCommentFound() {
return commentFound;<|fim▁hole|>
/**
*
* @return disabled
*/
public boolean isDisabled(){
return disabled;
}
public boolean isStar(){
return star;
}
public ConfigurationFileAnalyser(String line) throws EmptyLineException,
InvalidLineException {
// extract line comment
final String lineSubstring[] = line.split("#", 2);
if (lineSubstring.length > 1) {
commentFound = true;
}
if (lineSubstring[0] == null || lineSubstring[0].equals("")) {
throw new EmptyLineException();
}
final String lineCore[] = lineSubstring[0].split(" ");
if (!lineCore[0].startsWith("\"") || !lineCore[0].endsWith("\"")) {
throw new InvalidLineException(lineSubstring[0]
+ " --> FileName must start and end with quote\n");
}
lineCore[0] = lineCore[0].substring(1, lineCore[0].length() - 1);
if (lineCore.length > 1) {
fileName = lineCore[0];
if (lineCore[1].equals("M") || lineCore[1].equals("Merge")
|| lineCore[1].equals("merge")) {
mergeMode = true;
replaceMode = false;
} else if (lineCore[1].equals("R") || lineCore[1].equals("Replace")
|| lineCore[1].equals("replace")) {
mergeMode = false;
replaceMode = true;
} else {
throw new InvalidLineException(lineSubstring[0]
+ " --> Second argument should be Merge or Replace\n");
}
} else {
throw new InvalidLineException(
lineSubstring[0]
+ " --> FileName should be followed by a white space and Merge or Replace\n");
}
if(lineCore.length>2){
if(lineCore[2].equals("Disabled")||lineCore[2].equals("Disabled")){
disabled = true;
} else if(lineCore[2].equals("Enabled")){
disabled = false;
} else {
throw new InvalidLineException(lineSubstring[0]
+ " --> Third argument should be Disabled or Enabled\n");
}
if(lineCore.length>3){
if(lineCore[3].equals("*")){
star = true;
} else if(lineCore[3].equals("1")){
star = false;
} else {
throw new InvalidLineException(lineSubstring[0]
+ " --> Fourth argument should be 1 or *\n");
}
} else {
star = false;
}
} else {
disabled = false;
star = false;
}
}
public class InvalidLineException extends Exception {
public InvalidLineException(String s) {
super(s);
}
public InvalidLineException() {
/* NOP */
}
}
public class EmptyLineException extends Exception {
public EmptyLineException() {
/* NOP */
}
}
}<|fim▁end|> | }
|
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.views.generic import ListView, DetailView, CreateView, \
DeleteView, UpdateView, \
ArchiveIndexView, DateDetailView, \
DayArchiveView, MonthArchiveView, \
TodayArchiveView, WeekArchiveView, \
YearArchiveView, View
from schoolnew.models import *
from schoolnew.forms import *
from django.db.models import *
from django.shortcuts import render
from django.core.paginator import Paginator, PageNotAnInteger
from django.core.urlresolvers import reverse
from django.contrib import messages
from itertools import *
from datetime import datetime
from django.http import HttpResponse,HttpResponseRedirect
from django.shortcuts import redirect
import reportlab
import cStringIO as StringIO
from xhtml2pdf import pisa
from django.template.loader import get_template
from django.template import Context
from cgi import escape
from excel_response import ExcelResponse
import json
from django.utils import simplejson
import os
from django.conf import settings
from django.contrib.auth import authenticate, login
from django.core.cache import cache
class myview1(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
pk=self.kwargs.get('pk')
basic=Basicinfo.objects.get(id=pk)
school=School.objects.get(id=request.user.account.associated_with)
admin = Academicinfo.objects.get(school_key=basic.id)
academic = Academicinfo.objects.get(school_key=basic.id)
infra = Infradet.objects.get(school_key=basic.id)
class_det = Class_section.objects.filter(school_key=basic.id)
schgroup_det = Sch_groups.objects.filter(school_key=basic.id)
post_det = Staff.objects.filter(Q(school_key=basic.id))
parttime_det = Parttimestaff.objects.filter(school_key=basic.id)
land_det = Land.objects.filter(school_key=basic.id)
build_det = Building.objects.filter(school_key=basic.id)
buildabs_det = Building_abs.objects.filter(school_key=basic.id)
sports_det = Sports.objects.filter(school_key=basic.id)
ict_det = Ictentry.objects.filter(school_key=basic.id)
passper_det=Passpercent.objects.filter(school_key=basic.id)
infra
a=basic.udise_code
response = HttpResponse(content_type='application/pdf')
filename = str(a)
infra_edit_chk='Yes'
response['Content-Disposition'] = 'attachement'; 'filename={0}.pdf'.format(filename)
pdf=render_to_pdf(
'printpdfschool.html',
{
'basic':basic,
'admin':admin,
'academic':academic,
'infra': infra,
'class_det':class_det,
'schgroup_det':schgroup_det,
'post_det':post_det,
'parttime_det':parttime_det,
'land_det':land_det,
'build_det':build_det,
'buildabs_det':buildabs_det,
'sports_det':sports_det,
'ict_det':ict_det,
'passper_det':passper_det,
'pagesize':'A4'
}
)
response.write(pdf)
return response
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def render_to_pdf(template_src, context_dict):
template = get_template(template_src)
context = Context(context_dict)
html = template.render(context)
result = StringIO.StringIO()
infra_edit_chk='Yes'
# "UTF-8"
# The only thing was to replace html.encode("ISO-8859-1") by html.decode("utf-8")
pdf = pisa.pisaDocument(StringIO.StringIO(html.encode("UTF-8")), result)
if not pdf.err:
return HttpResponse(result.getvalue(), content_type='application/pdf')
return HttpResponse('We had some errors<pre>%s</pre>' % escape(html))
class home_page(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
if (Basicinfo.objects.filter(udise_code=request.user.username).count())>0:
# chk_ss=Basicinfo.objects.filter(udise_code=request.user.username)
# slno=District.objects.filter(id__lt=15)
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
basic_det = Basicinfo.objects.get(id=basic_det.id)
sch_key = basic_det.id
new_sch_id = basic_det.id
govchk=basic_det.sch_management
sch_dir=basic_det.sch_directorate
sch_cat_chk=basic_det.sch_cate
chk_user=Basicinfo.objects.get(udise_code=request.user.username)
if ((str(govchk)=='School Education Department School')|(str(govchk)=='Corporation School')|(str(govchk)=='Municipal School')|(str(govchk)=='Fully Aided School')|(str(govchk)=='Partly Aided School')|(str(govchk)=='Anglo Indian (Fully Aided) School')|(str(govchk)=='Anglo Indian (Partly Aided) School')|(str(govchk)=='Oriental (Fully Aided) Sanskrit School')|(str(govchk)=='Oriental (Partly Aided) Sanskrit School')|(str(govchk)=='Oriental (Fully Aided) Arabic School')|(str(govchk)=='Oriental (Partly Aided) Arabic School')):
if ((basic_det.sch_directorate.department_code=='001')|(basic_det.sch_directorate.department_code=='002')):
govaid_ent='Yes'
else:
govaid_ent=''
else:
govaid_ent=''
if (Academicinfo.objects.filter(school_key=basic_det.id).count())>0:
acade_det = Academicinfo.objects.get(school_key=basic_det.id)
acade_det = Academicinfo.objects.get(id=acade_det.id)
if basic_det.sch_cate:
if (basic_det.sch_cate.category_code in ('3,5,6,7,8,10,11')):
pass_ent='Yes'
else:
pass_ent=''
else:
pass_ent=''
if (Infradet.objects.filter(school_key=basic_det.id).count())>0:
infra_det = Infradet.objects.get(school_key=basic_det.id)
if (Class_section.objects.filter(school_key=basic_det.id).count())>0:
class_det = Class_section.objects.filter(school_key=basic_det.id)
if (Staff.objects.filter(Q(school_key=basic_det.id) & Q(staff_cat=1)))>0:
teach_det = Staff.objects.filter(Q(school_key=basic_det.id) & Q(staff_cat=1))
if (Staff.objects.filter(Q(school_key=basic_det.id) & Q(staff_cat=2)).count())>0:
nonteach_det = Staff.objects.filter(Q(school_key=basic_det.id) & Q(staff_cat=2))
if (Parttimestaff.objects.filter(school_key=basic_det.id).count())>0:
parttime_det = Parttimestaff.objects.filter(school_key=basic_det.id)
if (Land.objects.filter(school_key=basic_det.id).count())>0:
land_det = Land.objects.filter(school_key=basic_det.id)
if (Building.objects.filter(school_key=basic_det.id).count())>0:
building_det = Building.objects.filter(school_key=basic_det.id)
if (Building_abs.objects.filter(school_key=basic_det.id).count())>0:
buildabs_det = Building_abs.objects.filter(school_key=basic_det.id)
if (Sports.objects.filter(school_key=basic_det.id).count())>0:
sports_det = Sports.objects.filter(school_key=basic_det.id)
if (Ictentry.objects.filter(school_key=basic_det.id).count())>0:
ict_det = Ictentry.objects.filter(school_key=basic_det.id)
if (Sch_groups.objects.filter(school_key=basic_det.id).count())>0:
schgroup_det = Sch_groups.objects.filter(school_key=basic_det.id)
basic_mdate=basic_det.modified_date.strftime('%d-%m-%Y -- %H:%M %p')
grp=basic_det.sch_cate
if ((str(grp)=='Hr.Sec School (I-XII)')|(str(grp)=='Hr.Sec School (VI-XII)')|(str(grp)=='Hr.Sec School (IX-XII)')|(str(grp)=='Hr.Sec School (XI-XII)')|(str(grp)=='Matriculation Hr.Sec School (I-XII)')):
grp_chk='Yes'
else:
grp_chk=''
if (Academicinfo.objects.filter(school_key=basic_det.id).count())>0:
acade_mdate=Academicinfo.objects.get(school_key=basic_det.id)
if (Infradet.objects.filter(school_key=basic_det.id).count())>0:
infra_mdate=Infradet.objects.get(school_key=basic_det.id)
if (Staff.objects.filter(Q(school_key=basic_det.id) & Q(staff_cat=1)))>0:
teach_mdate=Staff.objects.filter(Q(school_key=basic_det.id) & Q(staff_cat=1))
return render (request,'home_edit1.html',locals())
else:
return render (request,'home_edit1.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class basic_edit(UpdateView):
def get(self,request,**kwargs):
if request.user.is_authenticated():
chk_user1=self.kwargs.get('pk')
district_list = District.objects.all().order_by('district_name')
chk_udise_block=int((str(request.user.username)[:6]))
latlong_det= Gis_block_code.objects.get(udise_block_code=chk_udise_block)
acadyr_lst=Acadyr_mas.objects.all()
if Basicinfo.objects.filter(udise_code=int(request.user.username)).count()>0:
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
basic_det = Basicinfo.objects.get(id=basic_det.id)
instance = Basicinfo.objects.get(udise_code=request.user.username)
form=BasicForm(instance=instance)
school_id=instance.school_id
school_name = instance.school_name
if instance.school_name_tamil:
word = instance.school_name_tamil
else:
word=''
udise_code = instance.udise_code
district = instance.district
block = instance.block
local_body_type= instance.local_body_type
village_panchayat =instance.village_panchayat
vill_habitation = instance.vill_habitation
town_panchayat = instance.town_panchayat
town_panchayat_ward = instance.town_panchayat_ward
municipality = instance.municipality
municipal_ward = instance.municipal_ward
cantonment = instance.cantonment
cantonment_ward = instance.cantonment_ward
township = instance.township
township_ward = instance.township_ward
corporation = instance.corporation
corpn_zone = instance.corpn_zone
corpn_ward = instance.corpn_ward
edu_district = instance.edu_district
address = instance.address
stdcode = instance.stdcode
landline = instance.landline
landline2 = instance.landline2
mobile = instance.mobile
sch_email = instance.sch_email
website = instance.website
bank_dist=instance.bank_dist
bank = instance.bank
branch = instance.branch
bankaccno = instance.bankaccno
parliament = instance.parliament
assembly = instance.assembly
manage_cate = instance.manage_cate
sch_management=instance.sch_management
sch_cate = instance.sch_cate
sch_directorate = instance.sch_directorate
pta_esta = instance.pta_esta
pta_no= instance.pta_no
pta_sub_yr= instance.pta_sub_yr
prekg=instance.prekg
kgsec=instance.kgsec
cluster=instance.cluster
mgt_opn_year=instance.mgt_opn_year
mgt_type=instance.mgt_type
mgt_name=instance.mgt_name
mgt_address=instance.mgt_address
mgt_regis_no=instance.mgt_regis_no
mgt_regis_dt=instance.mgt_regis_dt
draw_off_code=instance.draw_off_code
regis_by_office=instance.regis_by_office
pincode=instance.pincode
chk_dept=instance.chk_dept
chk_manage=instance.chk_manage
if instance.latitude:
latitude = instance.latitude
longitude = instance.longitude
latlong='Yes'
else:
latlong_det= Gis_block_code.objects.get(udise_block_code=chk_udise_block)
latlong=''
return render (request,'basicinfo_edit.html',locals())
else:
form=BasicForm()
chk_udise_block=int((str(request.user.username)[:6]))
latlong_det= Gis_block_code.objects.get(udise_block_code=chk_udise_block)
latlong=''
return render (request,'basicinfo_edit.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self, request, **kwargs):
if request.user.is_authenticated():
if Basicinfo.objects.filter(udise_code=request.user.username).count()>0:
instance = Basicinfo.objects.get(udise_code=request.user.username)
basic_editsave=Basicinfo.objects.get(udise_code=request.user.username)
form = BasicForm(request.POST,request.FILES)
if form.is_valid():
basic_editsave.school_id = form.cleaned_data['school_id']
basic_editsave.school_name = form.cleaned_data['school_name'].upper()
basic_editsave.school_name_tamil = request.POST['word']
basic_editsave.udise_code = form.cleaned_data['udise_code']
basic_editsave.district = form.cleaned_data['district']
basic_editsave.district1 = form.cleaned_data['district']
basic_editsave.block = form.cleaned_data['block']
basic_editsave.block1 = form.cleaned_data['block']
basic_editsave.local_body_type= form.cleaned_data['local_body_type']
chk_local_body=Local_body.objects.get(id=request.POST['local_body_type'])
if str(chk_local_body)=='Village Panchayat':
basic_editsave.village_panchayat =form.cleaned_data['village_panchayat']
basic_editsave.vill_habitation = form.cleaned_data['vill_habitation']
basic_editsave.town_panchayat = None
basic_editsave.town_panchayat_ward = None
basic_editsave.municipality = None
basic_editsave.municipal_ward = None
basic_editsave.cantonment = None
basic_editsave.cantonment_ward = None
basic_editsave.township = None
basic_editsave.township_ward = None
basic_editsave.corporation = None
basic_editsave.corpn_zone = None
basic_editsave.corpn_ward = None
elif str(chk_local_body)=="Town Panchayat":
basic_editsave.village_panchayat =None
basic_editsave.vill_habitation = None
basic_editsave.town_panchayat = form.cleaned_data['town_panchayat']
basic_editsave.town_panchayat_ward = form.cleaned_data['town_panchayat_ward']
basic_editsave.municipality = None
basic_editsave.municipal_ward = None
basic_editsave.cantonment = None
basic_editsave.cantonment_ward = None
basic_editsave.township = None
basic_editsave.township_ward = None
basic_editsave.corporation = None
basic_editsave.corpn_zone = None
basic_editsave.corpn_ward = None
elif str(chk_local_body)=="Municipality":
basic_editsave.village_panchayat =None
basic_editsave.vill_habitation = None
basic_editsave.town_panchayat = None
basic_editsave.town_panchayat_ward = None
basic_editsave.municipality = form.cleaned_data['municipality']
basic_editsave.municipal_ward = form.cleaned_data['municipal_ward']
basic_editsave.cantonment = None
basic_editsave.cantonment_ward = None
basic_editsave.township = None
basic_editsave.township_ward = None
basic_editsave.corporation = None
basic_editsave.corpn_zone = None
basic_editsave.corpn_ward = None
elif str(chk_local_body)=="cantonment":
basic_editsave.village_panchayat =None
basic_editsave.vill_habitation = None
basic_editsave.town_panchayat = None
basic_editsave.town_panchayat_ward = None
basic_editsave.municipality = None
basic_editsave.municipal_ward = None
basic_editsave.cantonment = form.cleaned_data['cantonment']
basic_editsave.cantonment_ward = form.cleaned_data['cantonment_ward']
basic_editsave.township = None
basic_editsave.township_ward = None
basic_editsave.corporation = None
basic_editsave.corpn_zone = None
basic_editsave.corpn_ward = None
elif str(chk_local_body)=="Township":
basic_editsave.village_panchayat =None
basic_editsave.vill_habitation = None
basic_editsave.town_panchayat = None
basic_editsave.town_panchayat_ward = None
basic_editsave.municipality = None
basic_editsave.municipal_ward = None
basic_editsave.cantonment = None
basic_editsave.cantonment_ward = None
basic_editsave.township = form.cleaned_data['township']
basic_editsave.township_ward = form.cleaned_data['township_ward']
basic_editsave.corporation = None
basic_editsave.corpn_zone = None
basic_editsave.corpn_ward = None
elif str(chk_local_body)=="Corporation":
basic_editsave.village_panchayat =None
basic_editsave.vill_habitation = None
basic_editsave.town_panchayat = None
basic_editsave.town_panchayat_ward = None
basic_editsave.municipality = None
basic_editsave.municipal_ward = None
basic_editsave.cantonment = None
basic_editsave.cantonment_ward = None
basic_editsave.township = None
basic_editsave.township_ward = None
basic_editsave.corporation = form.cleaned_data['corporation']
basic_editsave.corpn_zone = form.cleaned_data['corpn_zone']
basic_editsave.corpn_ward = form.cleaned_data['corpn_ward']
if request.POST['prekg']=='Yes':
basic_editsave.prekg = form.cleaned_data['prekg']
basic_editsave.kgsec = 'Yes'
else:
basic_editsave.prekg = 'No'
if request.POST['kgsec']=='Yes':
basic_editsave.kgsec = form.cleaned_data['kgsec']
else:
basic_editsave.kgsec = 'No'
basic_editsave.edu_district = form.cleaned_data['edu_district']
basic_editsave.address = form.cleaned_data['address']
basic_editsave.pincode = form.cleaned_data['pincode']
basic_editsave.stdcode = form.cleaned_data['stdcode']
basic_editsave.landline = form.cleaned_data['landline']
basic_editsave.landline2 = form.cleaned_data['landline2']
basic_editsave.mobile = form.cleaned_data['mobile']
basic_editsave.sch_email = form.cleaned_data['sch_email']
basic_editsave.website = form.cleaned_data['website']
basic_editsave.bank_dist=form.cleaned_data['bank_dist']
basic_editsave.bank = form.cleaned_data['bank']
basic_editsave.branch = form.cleaned_data['branch']
basic_editsave.bankaccno = form.cleaned_data['bankaccno']
basic_editsave.parliament = form.cleaned_data['parliament']
basic_editsave.assembly = form.cleaned_data['assembly']
basic_editsave.latitude = form.cleaned_data['latitude']
basic_editsave.longitude = form.cleaned_data['longitude']
basic_editsave.manage_cate = form.cleaned_data['manage_cate']
basic_editsave.sch_management=form.cleaned_data['sch_management']
basic_editsave.sch_directorate = form.cleaned_data['sch_directorate']
basic_editsave.sch_cate = form.cleaned_data['sch_cate']
if request.POST['pta_esta']=='Yes':
basic_editsave.pta_esta = form.cleaned_data['pta_esta']
basic_editsave.pta_no= form.cleaned_data['pta_no']
basic_editsave.pta_sub_yr= form.cleaned_data['pta_sub_yr']
else:
basic_editsave.pta_esta = form.cleaned_data['pta_esta']
basic_editsave.pta_no= None
basic_editsave.pta_sub_yr= None
basic_editsave.cluster=form.cleaned_data['cluster']
if basic_editsave.manage_cate_id==1:
basic_editsave.mgt_opn_year=None
basic_editsave.mgt_type=None
basic_editsave.mgt_name=None
basic_editsave.mgt_address=None
basic_editsave.mgt_regis_no=None
basic_editsave.mgt_regis_dt=None
basic_editsave.regis_by_office=None
basic_editsave.draw_off_code = form.cleaned_data['draw_off_code']
elif basic_editsave.manage_cate_id==2:
basic_editsave.mgt_opn_year=None
basic_editsave.mgt_type=None
basic_editsave.mgt_name=None
basic_editsave.mgt_address=None
basic_editsave.mgt_regis_no=None
basic_editsave.mgt_regis_dt=None
basic_editsave.regis_by_office=None
basic_editsave.draw_off_code = form.cleaned_data['draw_off_code']
else:
basic_editsave.mgt_opn_year=form.cleaned_data['mgt_opn_year']
basic_editsave.mgt_type=form.cleaned_data['mgt_type']
basic_editsave.mgt_name=form.cleaned_data['mgt_name']
basic_editsave.mgt_address=form.cleaned_data['mgt_address']
basic_editsave.mgt_regis_no=form.cleaned_data['mgt_regis_no']
basic_editsave.mgt_regis_dt=form.cleaned_data['mgt_regis_dt']
basic_editsave.regis_by_office=form.cleaned_data['regis_by_office']
basic_editsave.draw_off_code = None
if basic_editsave.manage_cate_id==1:
basic_editsave.chk_manage=1
elif basic_editsave.manage_cate_id==2:
basic_editsave.chk_manage=2
else:
basic_editsave.chk_manage=3
if basic_editsave.sch_directorate_id==28:
basic_editsave.chk_dept=3
elif basic_editsave.sch_directorate_id in (2,3,16,18,27,29,32,34,42):
basic_editsave.chk_dept=2
else:
if basic_editsave.sch_cate.category_code in ('1','2','4'):
basic_editsave.chk_dept=2
elif basic_editsave.sch_cate.category_code in ('10','11','5','7','8'):
basic_editsave.chk_dept=1
else:
basic_editsave.chk_dept=3
basic_editsave.save()
govchk=basic_editsave.sch_management
if (str(govchk)=='Un-Aided (Private) School - Other than State Board School'):
board_id = basic_editsave.sch_directorate
oth_board=School_department.objects.get(id=board_id.id)
board=oth_board.department
elif (str(govchk)=='Army Public School'):
board ='CBSE'
elif (str(govchk)=='Kendra Vidyalaya - Central Government School'):
board ='CBSE'
elif (str(govchk)=='Sainik School'):
board ='CBSE'
else:
board ='State Board'
if Academicinfo.objects.filter(school_key=basic_editsave.id).count()>0:
acade_det1 = Academicinfo.objects.get(school_key=basic_editsave.id)
acade_det1.board=board
acade_det1.save()
messages.success(request,'Basic Informations Updated Successfully')
return HttpResponseRedirect('/schoolnew/school_registration')
else:
messages.warning(request,'Basic Informations Not Updated')
return HttpResponseRedirect('/schoolnew/school_registration')
else:
form = BasicForm(request.POST,request.FILES)
if form.is_valid():
basicinfo = Basicinfo(
school_id=form.cleaned_data['school_id'],
school_name = form.cleaned_data['school_name'].upper(),
school_name_tamil = request.POST['word'],
udise_code = form.cleaned_data['udise_code'],
district = form.cleaned_data['district'],
block = form.cleaned_data['block'],
local_body_type= form.cleaned_data['local_body_type'],
village_panchayat =form.cleaned_data['village_panchayat'],
vill_habitation = form.cleaned_data['vill_habitation'],
town_panchayat = form.cleaned_data['town_panchayat'],
town_panchayat_ward = form.cleaned_data['town_panchayat_ward'],
municipality = form.cleaned_data['municipality'],
municipal_ward = form.cleaned_data['municipal_ward'],
cantonment = form.cleaned_data['cantonment'],
cantonment_ward = form.cleaned_data['cantonment_ward'],
township = form.cleaned_data['township'],
township_ward = form.cleaned_data['township_ward'],
corporation = form.cleaned_data['corporation'],
corpn_zone = form.cleaned_data['corpn_zone'],
corpn_ward = form.cleaned_data['corpn_ward'],
edu_district = form.cleaned_data['edu_district'],
address = form.cleaned_data['address'],
pincode = form.cleaned_data['pincode'],
stdcode = form.cleaned_data['stdcode'],
landline = form.cleaned_data['landline'],
landline2 = form.cleaned_data['landline2'],
mobile = form.cleaned_data['mobile'],
sch_email = form.cleaned_data['sch_email'],
website = form.cleaned_data['website'],
bank_dist=form.cleaned_data['bank_dist'],
bank = form.cleaned_data['bank'],
branch = form.cleaned_data['branch'],
bankaccno = form.cleaned_data['bankaccno'],
parliament = form.cleaned_data['parliament'],
assembly = form.cleaned_data['assembly'],
latitude = form.cleaned_data['latitude'],
longitude = form.cleaned_data['longitude'],
manage_cate = form.cleaned_data['manage_cate'],
sch_management=form.cleaned_data['sch_management'],
sch_cate = form.cleaned_data['sch_cate'],
sch_directorate = form.cleaned_data['sch_directorate'],
pta_esta = form.cleaned_data['pta_esta'],
pta_no= form.cleaned_data['pta_no'],
pta_sub_yr= form.cleaned_data['pta_sub_yr'],
prekg = form.cleaned_data['prekg'],
kgsec = form.cleaned_data['kgsec'],
cluster=form.cleaned_data['cluster'],
mgt_opn_year=form.cleaned_data['mgt_opn_year'],
mgt_type=form.cleaned_data['mgt_type'],
mgt_name=form.cleaned_data['mgt_name'],
mgt_address=form.cleaned_data['mgt_address'],
mgt_regis_no=form.cleaned_data['mgt_regis_no'],
mgt_regis_dt=form.cleaned_data['mgt_regis_dt'],
draw_off_code=form.cleaned_data['draw_off_code'],
regis_by_office=form.cleaned_data['regis_by_office'],
)
basicinfo.save()
messages.success(request,'Basic Informations Added Successfully')
return HttpResponseRedirect('/schoolnew/school_registration')
else:
messages.warning(request,'Basic Informations Not Saved')
return HttpResponseRedirect('/schoolnew/school_registration')
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class admin_edit(View):
def get(self,request,**kwargs):
chk_user2=self.kwargs.get('code2')
if request.user.is_authenticated():
if (Basicinfo.objects.filter(udise_code=request.user.username).count())>0:
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
basic_det = Basicinfo.objects.get(id=basic_det.id)
sch_key = basic_det.id
new_sch_id = basic_det.id
govchk=basic_det.sch_management
if ((str(govchk)=='School Education Department School')|(str(govchk)=='Corporation School')|(str(govchk)=='Municipal School')|(str(govchk)=='Adi-Dravida Welfare School')|(str(govchk)=='Forest Department School')|(str(govchk)=='Differently Abled Department School')|(str(govchk)=='Kallar BC/MBC Department School')|(str(govchk)=='Rubber Board School')|(str(govchk)=='Tribal Welfare School')|(str(govchk)=='Aranilayam HR&C Department School')|(str(govchk)=='Fully Aided School')|(str(govchk)=='Partly Aided School')|(str(govchk)=='Anglo Indian (Fully Aided) School')|(str(govchk)=='Anglo Indian (Partly Aided) School')|(str(govchk)=='Oriental (Fully Aided) Sanskrit School')|(str(govchk)=='Oriental (Partly Aided) Sanskrit School')|(str(govchk)=='Oriental (Fully Aided) Arabic School')|(str(govchk)=='Oriental (Partly Aided) Arabic School')|(str(govchk)=='Differently Abled Department Aided School')):
govaid_chk='Yes'
else:
govaid_chk=''
grp=basic_det.sch_cate
if ((str(grp)=='Hr.Sec School (I-XII)')|(str(grp)=='Hr.Sec School (VI-XII)')|(str(grp)=='Hr.Sec School (IX-XII)')|(str(grp)=='Hr.Sec School (XI-XII)')|(str(grp)=='Matriculation Hr.Sec School (I-XII)')):
grp_chk='Yes'
else:
grp_chk=''
if ((str(govchk)=='Fully Aided School')|(str(govchk)=='Partly Aided School')|(str(govchk)=='Anglo Indian (Fully Aided) School')|(str(govchk)=='Anglo Indian (Partly Aided) School')|(str(govchk)=='Oriental (Fully Aided) Sanskrit School')|(str(govchk)=='Oriental (Partly Aided) Sanskrit School')|(str(govchk)=='Oriental (Fully Aided) Arabic School')|(str(govchk)=='Oriental (Partly Aided) Arabic School')|(str(govchk)=='Differently Abled Department Aided School')):
aid_chk='Yes'
else:
aid_chk=''
if ((str(govchk)=='School Education Department School')|(str(govchk)=='Corporation School')|(str(govchk)=='Municipal School')|(str(govchk)=='Adi-Dravida Welfare School')|(str(govchk)=='Forest Department School')|(str(govchk)=='Differently Abled Department School')|(str(govchk)=='Kallar BC/MBC Department School')|(str(govchk)=='Rubber Board School')|(str(govchk)=='Tribal Welfare School')|(str(govchk)=='Aranilayam HR&C Department School')):
gov_chk='Yes'
else:
gov_chk='No'
if basic_det.sch_cate.category_code=='1':
sch_cat_chk=['I Std','II Std','III Std','IV Std','V Std']
low_class = 'I Std'
high_class = 'V Std'
elif basic_det.sch_cate.category_code=='2':
sch_cat_chk=['I Std','II Std','III Std','IV Std','V Std','VI Std','VII Std','VIII Std']
low_class = 'I Std'
high_class = 'VIII Std'
elif basic_det.sch_cate.category_code=='3':
sch_cat_chk=['I Std','II Std','III Std','IV Std','V Std','VI Std','VII Std','VIII Std','IX Std','X Std','XI Std','XII Std']
low_class = 'I Std'
high_class = 'XII Std'
elif basic_det.sch_cate.category_code=='4':
sch_cat_chk=['VI Std','VII Std','VIII Std']
low_class = 'VI Std'
high_class = 'VIII Std'
elif basic_det.sch_cate.category_code=='5':
sch_cat_chk=['VI Std','VII Std','VIII Std','IX Std','X Std','XI Std','XII Std',]
low_class = 'VI Std'
high_class = 'XII Std'
elif basic_det.sch_cate.category_code=='6':
sch_cat_chk=['I Std','II Std','III Std','IV Std','V Std','VI Std','VII Std','VIII Std','IX Std','X Std']
low_class = 'I Std'
high_class = 'X Std'
elif basic_det.sch_cate.category_code=='7':
sch_cat_chk=['VI Std','VII Std','VIII Std','IX Std','X Std']
low_class = 'VI Std'
high_class = 'X Std'
elif basic_det.sch_cate.category_code=='8':
sch_cat_chk=['IX Std','X Std']
low_class = 'IX Std'
high_class = 'X Std'
elif basic_det.sch_cate.category_code=='10':
sch_cat_chk=['IX Std','X Std','XI Std','XII Std']
low_class = 'IX Std'
high_class = 'XII Std'
elif basic_det.sch_cate.category_code=='11':
sch_cat_chk=['XI Std','XII Std']
low_class = 'XI Std'
high_class = 'XII Std'
elif basic_det.sch_cate.category_code=='14':
sch_cat_chk=['I Std','II Std','III Std','IV Std','V Std']
low_class = 'I Std'
high_class = 'V Std'
elif basic_det.sch_cate.category_code=='12':
sch_cat_chk=['VI Std','VII Std','VIII Std']
low_class = 'VI Std'
high_class = 'VIII Std'
else:
sch_cat_chk=['I Std','II Std','III Std','IV Std','V Std','VI Std','VII Std','VIII Std','IX Std','X Std','XI Std','XII Std',]
low_class = 'I Std'
high_class = 'XII Std'
if Academicinfo.objects.filter(school_key=basic_det.id).count()>0:
acade_det = Academicinfo.objects.get(school_key=basic_det.id)
acade_det = Academicinfo.objects.get(id=acade_det.id)
instance = Academicinfo.objects.get(school_key=basic_det)
if Class_section.objects.filter(school_key=basic_det.id).count()>0:
class_det = Class_section.objects.filter(school_key=basic_det.id)
if Staff.objects.filter(Q(school_key=basic_det.id) & Q(staff_cat=1)).count()>0:
teach_det = Staff.objects.filter(Q(school_key=basic_det.id) & Q(staff_cat=1))
if Staff.objects.filter(Q(school_key=basic_det.id) & Q(staff_cat=2)).count()>0:
nonteach_det = Staff.objects.filter(Q(school_key=basic_det.id) & Q(staff_cat=2))
if Parttimestaff.objects.filter(school_key=basic_det.id).count()>0:
ptime_det = Parttimestaff.objects.filter(school_key=basic_det.id)
if Sch_groups.objects.filter(school_key=basic_det.id)>0:
group_det = Sch_groups.objects.filter(school_key=basic_det.id)
if acade_det.recog_dt_fm!=None:
recog_dtfm=acade_det.recog_dt_fm.strftime('%Y-%m-%d')
if acade_det.recog_dt_to!=None:
recog_dtto=acade_det.recog_dt_to.strftime('%Y-%m-%d')
if acade_det.min_dt_iss!=None:
mino_dt=acade_det.min_dt_iss.strftime('%Y-%m-%d')
form=academicinfo_form(instance=instance)
school_key = basic_det.id
schooltype = instance.schooltype
board = instance.board
tamil_med = instance.tamil_med
eng_med = instance.eng_med
tel_med = instance.tel_med
mal_med = instance.mal_med
kan_med = instance.kan_med
urdu_med = instance.urdu_med
oth_med=instance.oth_med
other_med = instance.other_med
minority = instance.minority
rel_minority = instance.rel_minority
ling_minority = instance.ling_minority
min_ord_no = instance.min_ord_no
min_dt_iss = instance.min_dt_iss
recog_dt_fm = instance.recog_dt_fm
recog_dt_to = instance.recog_dt_to
min_dt_iss=instance.min_dt_iss
recog_dt_fm=instance.recog_dt_fm
recog_dt_to=instance.recog_dt_to
iss_auth = instance.iss_auth
start_order = instance.start_order
start_yr=instance.start_yr
recog_typ = instance.recog_typ
recog_ord = instance.recog_ord
hssstart_order = instance.hssstart_order
hssstart_yr=instance.hssstart_yr
hssrecog_typ = instance.hssrecog_typ
hssrecog_ord = instance.hssrecog_ord
hssrecog_dt_fm = instance.hssrecog_dt_fm
hssrecog_dt_to = instance.hssrecog_dt_to
upgr_det = instance.upgr_det
other_board_aff = instance.other_board_aff
spl_school = instance.spl_school
spl_type = instance.spl_type
boarding = instance.boarding
hostel_floor = instance.hostel_floor
hostel_rooms = instance.hostel_rooms
hostel_boys = instance.hostel_boys
hostel_girls = instance.hostel_girls
hostel_staff = instance.hostel_staff
extra_scout=instance.extra_scout
extra_jrc=instance.extra_jrc
extra_nss=instance.extra_nss
extra_ncc=instance.extra_ncc
extra_rrc=instance.extra_rrc
extra_ec=instance.extra_ec
extra_cub=instance.extra_cub
nrstc=instance.nrstc
hssboard=instance.hssboard
smc_smdc = instance.smc_smdc
noof_med=instance.noof_med
dge_no_ten= instance.dge_no_ten
dge_no_twelve= instance.dge_no_twelve
return render (request,'admin_edit_new.html',locals())
else:
form=academicinfo_form()
if (str(govchk)=='Un-Aided (Private) School - Other than State Board School'):
board = basic_det.sch_directorate
elif (str(govchk)=='Army Public School'):
board ='CBSE'
elif (str(govchk)=='Kendra Vidyalaya - Central Government School'):
board ='CBSE'
elif (str(govchk)=='Sainik School'):
board ='CBSE'
else:
board ='State Board'
groups_list=Groups.objects.all()
district_list = District.objects.all().order_by('district_name')
noof_med=0
return render (request,'admin_edit_new.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self,request,**kwargs):
if request.user.is_authenticated():
pk=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
form = academicinfo_form(request.POST,request.FILES)
if Academicinfo.objects.filter(school_key=basic_det.id).count()>0:
academic_edit=Academicinfo.objects.get(school_key=basic_det.id)
if form.is_valid():
if form.cleaned_data['recog_dt_fm']:
chk_recfmdt=form.cleaned_data['recog_dt_fm']
else:
chk_recfmdt=None
if form.cleaned_data['recog_dt_to']:
chk_rectodt=form.cleaned_data['recog_dt_to']
else:
chk_rectodt=None
class_sec_del = Class_section.objects.filter(school_key=basic_det.id)
sch_key=form.cleaned_data['school_key']
academic_edit.schooltype = form.cleaned_data['schooltype']
academic_edit.board = form.cleaned_data['board']
academic_edit.tamil_med = form.cleaned_data['tamil_med']
academic_edit.eng_med = form.cleaned_data['eng_med']
academic_edit.tel_med = form.cleaned_data['tel_med']
academic_edit.mal_med = form.cleaned_data['mal_med']
academic_edit.kan_med = form.cleaned_data['kan_med']
academic_edit.urdu_med = form.cleaned_data['urdu_med']
if request.POST['oth_med']== 'Yes':
academic_edit.oth_med = True
academic_edit.other_med = form.cleaned_data['other_med']
else:
academic_edit.oth_med = False
academic_edit.other_med = ''
if form.cleaned_data['minority']==True:
academic_edit.minority = form.cleaned_data['minority']
academic_edit.min_ord_no = form.cleaned_data['min_ord_no']
academic_edit.min_dt_iss = form.cleaned_data['min_dt_iss']
academic_edit.iss_auth = form.cleaned_data['iss_auth']
if request.POST['mino_type']=='Religious Minority':
academic_edit.rel_minority = True
academic_edit.ling_minority = False
if request.POST['mino_type']=='Linguistic Minority':
academic_edit.ling_minority = True
academic_edit.rel_minority =False
else:
academic_edit.minority = False
academic_edit.rel_minority = False
academic_edit.ling_minority = False
academic_edit.min_ord_no=''
academic_edit.min_dt_iss =None
academic_edit.iss_auth =''
academic_edit.start_order = form.cleaned_data['start_order']
academic_edit.start_yr = form.cleaned_data['start_yr']
academic_edit.recog_typ = form.cleaned_data['recog_typ']
academic_edit.recog_ord = form.cleaned_data['recog_ord']
academic_edit.recog_dt_fm = chk_recfmdt
academic_edit.recog_dt_to = chk_rectodt
academic_edit.low_class = form.cleaned_data['low_class']
academic_edit.high_class = form.cleaned_data['high_class']
if (request.POST['high_class'] == 'XII Std'):
academic_edit.hssstart_order = form.cleaned_data['hssstart_order']
academic_edit.hssrecog_typ = form.cleaned_data['hssrecog_typ']
academic_edit.hssrecog_ord = form.cleaned_data['hssrecog_ord']
academic_edit.hssstart_yr = form.cleaned_data['hssstart_yr']
academic_edit.hssrecog_dt_fm = form.cleaned_data['hssrecog_dt_fm']
academic_edit.hssrecog_dt_to = form.cleaned_data['hssrecog_dt_to']
academic_edit.hssboard = form.cleaned_data['hssboard']
academic_edit.upgr_det = form.cleaned_data['upgr_det']
academic_edit.other_board_aff = form.cleaned_data['other_board_aff']
if request.POST['spl_school']== 'True':
academic_edit.spl_school = True
academic_edit.spl_type = form.cleaned_data['spl_type']
else:
academic_edit.spl_school = False
academic_edit.spl_type = ''
if request.POST['boarding']== 'True':
academic_edit.boarding = True
academic_edit.hostel_floor = form.cleaned_data['hostel_floor']
academic_edit.hostel_rooms = form.cleaned_data['hostel_rooms']
academic_edit.hostel_boys = form.cleaned_data['hostel_boys']
academic_edit.hostel_girls = form.cleaned_data['hostel_girls']
academic_edit.hostel_staff = form.cleaned_data['hostel_staff']
else:
academic_edit.boarding = False
academic_edit.hostel_floor = 0
academic_edit.hostel_rooms = 0
academic_edit.hostel_boys = 0
academic_edit.hostel_girls = 0
academic_edit.hostel_staff = 0
academic_edit.extra_scout=form.cleaned_data['extra_scout']
academic_edit.extra_jrc=form.cleaned_data['extra_jrc']
academic_edit.extra_nss=form.cleaned_data['extra_nss']
academic_edit.extra_ncc=form.cleaned_data['extra_ncc']
academic_edit.extra_rrc=form.cleaned_data['extra_rrc']
academic_edit.extra_ec=form.cleaned_data['extra_ec']
academic_edit.extra_cub=form.cleaned_data['extra_cub']
academic_edit.smc_smdc = form.cleaned_data['smc_smdc']
academic_edit.noof_med=form.cleaned_data['noof_med']
academic_edit.dge_no_ten=form.cleaned_data['dge_no_ten']
academic_edit.dge_no_twelve=form.cleaned_data['dge_no_twelve']
if form.cleaned_data['nrstc']== True:
academic_edit.nrstc = True
else:
academic_edit.nrstc = False
academic_edit.save()
messages.success(request,'Admininstration Details Updated successfully')
return HttpResponseRedirect('/schoolnew/school_registration')
else:
print form.errors
messages.warning(request,'Admininstration Details Not Updated')
return HttpResponseRedirect('/schoolnew/school_registration')
else:
if form.is_valid():
sch_key=form.cleaned_data['school_key']
if request.POST['min_dt_iss']:
chk_dtiss=form.cleaned_data['min_dt_iss']
else:
chk_dtiss=None
if request.POST['recog_dt_fm']:
chk_recfmdt=form.cleaned_data['recog_dt_fm']
else:
chk_recfmdt=None
if request.POST['recog_dt_to']:
chk_rectodt=form.cleaned_data['recog_dt_to']
else:
chk_rectodt=None
if request.POST['gov_chk']=='No':
if request.POST['hssrecog_dt_fm']:
chk_hssrecfmdt=form.cleaned_data['hssrecog_dt_fm']
else:
chk_hssrecfmdt=None
if request.POST['hssrecog_dt_to']:
chk_hssrectodt=form.cleaned_data['hssrecog_dt_to']
else:
chk_hssrectodt=None
else:
chk_hssrecfmdt=None
chk_hssrectodt=None
if request.POST['boarding'] == 'True':
boarding_chk=True
else:
boarding_chk=False
if request.POST['mino_type']=='Religious Minority':
rel_minority=True
else:
rel_minority=False
if request.POST['mino_type']=='Linguistic Minority':
ling_minority=True
else:
ling_minority=False
if request.POST['oth_med']=='Yes':
oth_med=True
else:
oth_med=False
if (request.POST['high_class'] == 'XII Std'):
thssstart_order = request.POST['hssstart_order'],
thssstart_yr = request.POST['hssstart_yr'],
thssrecog_typ = request.POST['hssrecog_typ'],
thssrecog_ord = request.POST['hssrecog_ord'],
thssboard = request.POST['hssboard'],
else:
thssstart_order = ''
thssstart_yr = ''
thssrecog_typ = ''
thssrecog_ord = ''
thssboard = ''
academicinfo = Academicinfo(
school_key = sch_key,
schooltype = form.cleaned_data['schooltype'],
board = form.cleaned_data['board'],
tamil_med = form.cleaned_data['tamil_med'],
eng_med = form.cleaned_data['eng_med'],
tel_med = form.cleaned_data['tel_med'],
mal_med = form.cleaned_data['mal_med'],
kan_med = form.cleaned_data['kan_med'],
urdu_med = form.cleaned_data['urdu_med'],
oth_med = oth_med,
other_med = form.cleaned_data['other_med'],
minority = form.cleaned_data['minority'],
rel_minority = rel_minority,
ling_minority = ling_minority,
min_ord_no = form.cleaned_data['min_ord_no'],
min_dt_iss = chk_dtiss,
iss_auth = form.cleaned_data['iss_auth'],
start_order = form.cleaned_data['start_order'],
start_yr = form.cleaned_data['start_yr'],
recog_typ = form.cleaned_data['recog_typ'],
recog_ord = form.cleaned_data['recog_ord'],
recog_dt_fm = chk_recfmdt,
recog_dt_to = chk_rectodt,
low_class = form.cleaned_data['low_class'],
high_class = form.cleaned_data['high_class'],
hssstart_order = thssstart_order,
hssstart_yr = thssstart_yr,
hssrecog_typ = thssrecog_typ,
hssrecog_ord = thssrecog_ord,
hssrecog_dt_fm = chk_hssrecfmdt,
hssrecog_dt_to = chk_hssrectodt,
hssboard = thssboard,
upgr_det = form.cleaned_data['upgr_det'],
other_board_aff = form.cleaned_data['other_board_aff'],
spl_school = form.cleaned_data['spl_school'],
spl_type = form.cleaned_data['spl_type'],
boarding = boarding_chk,
hostel_floor = form.cleaned_data['hostel_floor'],
hostel_rooms = form.cleaned_data['hostel_rooms'],
hostel_boys = form.cleaned_data['hostel_boys'],
hostel_girls = form.cleaned_data['hostel_girls'],
hostel_staff = form.cleaned_data['hostel_staff'],
extra_scout=form.cleaned_data['extra_scout'],
extra_jrc=form.cleaned_data['extra_jrc'],
extra_nss=form.cleaned_data['extra_nss'],
extra_ncc=form.cleaned_data['extra_ncc'],
extra_rrc=form.cleaned_data['extra_rrc'],
extra_ec=form.cleaned_data['extra_ec'],
extra_cub=form.cleaned_data['extra_cub'],
nrstc = form.cleaned_data['nrstc'],
smc_smdc = form.cleaned_data['smc_smdc'],
noof_med = form.cleaned_data['noof_med'],
dge_no_ten= form.cleaned_data['dge_no_ten'],
dge_no_twelve= form.cleaned_data['dge_no_twelve'],
)
academicinfo.save()
messages.success(request,'Admininstration Details Added successfully')
return HttpResponseRedirect('/schoolnew/school_registration')
else:
print form.errors
messages.warning(request,'Admininstration Details Not Saved')
return HttpResponseRedirect('/schoolnew/school_registration')
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class infra_edit(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
district_list = District.objects.all().order_by('district_name')
concre_chk="Yes"
if Basicinfo.objects.filter(udise_code=request.user.username).count()>0:
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
new_sch_id = basic_det.id
if Academicinfo.objects.filter(school_key=basic_det.id).count()>0:
acade_det = Academicinfo.objects.get(school_key=basic_det.id)
if ((acade_det.high_class=='XII Std') | (acade_det.high_class=='X Std')):
lab_chk='Yes'
else:
lab_chk='No'
if Infradet.objects.filter(school_key=basic_det.id).count()>0:
infra_det = Infradet.objects.filter(school_key=basic_det.id)
infra_det = Infradet.objects.filter(id=infra_det)
instance = Infradet.objects.get(school_key=basic_det)
form=infradet_form(instance=instance)
school_key = basic_det.id
electricity = instance.electricity
tot_area= instance.tot_area
tot_type=instance.tot_type
cov= instance.cov
cov_type=instance.cov_type
opn= instance.opn
opn_type=instance.opn_type
play= instance.play
play_type=instance.play_type
tot_ft = instance.tot_ft
tot_mt = instance.tot_mt
covered_ft = instance.covered_ft
covered_mt = instance.covered_mt
open_ft = instance.open_ft
open_mt = instance.open_mt
play_ft = instance.play_ft
play_mt = instance.play_mt
cwall = instance.cwall
cwall_concre = instance.cwall_concre
cwall_fence = instance.cwall_fence
cwall_existbu = instance.cwall_existbu
cwall_nbarr = instance.cwall_nbarr
chk_cwall=cwall
cwall_concre_len = instance.cwall_concre_len
cwall_fence_len = instance.cwall_fence_len
cwall_existbu_len = instance.cwall_existbu_len
cwall_nbarr_len = instance.cwall_nbarr_len
cwall_notcon_len = instance.cwall_notcon_len
fireext= instance.fireext
fireext_no = instance.fireext_no
fireext_w = instance.fireext_w
firstaid_box=instance.firstaid_box
rainwater = instance.rainwater
kitchenshed= instance.kitchenshed
furn_desk_no = instance.furn_desk_no
furn_desk_use = instance.furn_desk_use
furn_bench_no = instance.furn_bench_no
furn_bench_use = instance.furn_bench_use
fans = instance.fans
fans_work = instance.fans_work
tubelights = instance.tubelights
tlights_work = instance.tlights_work
bu_no = instance.bu_no
bu_usable =instance.bu_usable
bu_minrep =instance.bu_minrep
bu_majrep =instance.bu_majrep
gu_no =instance.gu_no
gu_usable = instance.gu_usable
gu_minrep =instance.gu_minrep
gu_majrep = instance.gu_majrep
bl_no = instance.bl_no
bl_usable = instance.bl_usable
bl_minrep = instance.bl_minrep
bl_majrep = instance.bl_majrep
gl_no = instance.gl_no
gl_usable = instance.gl_usable
gl_minrep =instance.gl_minrep
gl_majrep = instance.gl_majrep
gentsu_no = instance.gentsu_no
gentsu_usable = instance.gentsu_usable
gentsu_minrep = instance.gentsu_minrep
gentsu_majrep = instance.gentsu_majrep
ladiesu_no = instance.ladiesu_no
ladiesu_usable = instance.ladiesu_usable
ladiesu_minrep = instance.ladiesu_minrep
ladiesu_majrep = instance.ladiesu_majrep
gentsl_no = instance.gentsl_no
gentsl_usable = instance.gentsl_usable
gentsl_minrep = instance.gentsl_minrep
gentsl_majrep = instance.gentsl_majrep
ladiesl_no = instance.ladiesl_no
ladiesl_usable = instance.ladiesl_usable
ladiesl_minrep = instance.ladiesl_minrep
ladiesl_majrep = instance.ladiesl_majrep
incinirator=instance.incinirator
water_toilet=instance.water_toilet
cwsn_toilet = instance.cwsn_toilet
cwsn_toilet_no = instance.cwsn_toilet_no
water_facility=instance.water_facility
water_source=instance.water_source
well_dia=instance.well_dia
well_close=instance.well_close
water_puri=instance.water_puri
water_access = instance.water_access
internet_yes = instance.internet_yes
lightning_arest= instance.lightning_arest
lib_tamil=instance.lib_tamil
lib_eng=instance.lib_eng
lib_others=instance.lib_others
lib_tamil_news =instance.lib_tamil_news
lib_eng_news =instance.lib_eng_news
lib_periodic =instance.lib_periodic
trans_faci=instance.trans_faci
trans_bus=instance.trans_bus
trans_van=instance.trans_van
trans_stud=instance.trans_stud
trans_rules=instance.trans_rules
award_recd=instance.award_recd
award_info =instance.award_info
phy_lab=instance.phy_lab
che_lab=instance.che_lab
bot_lab=instance.bot_lab
zoo_lab=instance.zoo_lab
gas_cylin=instance.gas_cylin
suffi_equip=instance.suffi_equip
eb_ht_line=instance.eb_ht_line
infra_edit_chk='Yes'
if lightning_arest=="Yes":
light_arest="Yes"
else:
light_arest=""
if water_facility=="Yes":
water_chk="Yes"
else:
water_chk=""
if water_source=="Well":
well_chk="Yes"
else:
well_chk=""
govchk=basic_det.sch_management
grp=basic_det.sch_cate
schtype=acade_det.schooltype
if ((str(govchk)=='School Education Department School')|(str(govchk)=='Corporation School')|(str(govchk)=='Municipal School')|(str(govchk)=='Adi-Dravida Welfare School')|(str(govchk)=='Forest Department School')|(str(govchk)=='Differently Abled Department School')|(str(govchk)=='Kallar BC/MBC Department School')|(str(govchk)=='Rubber Board School')|(str(govchk)=='Tribal Welfare School')|(str(govchk)=='Aranilayam HR&C Department School')|(str(govchk)=='Fully Aided School')|(str(govchk)=='Partly Aided School')|(str(govchk)=='Anglo Indian (Fully Aided) School')|(str(govchk)=='Anglo Indian (Partly Aided) School')|(str(govchk)=='Oriental (Fully Aided) Sanskrit School')|(str(govchk)=='Oriental (Partly Aided) Sanskrit School')|(str(govchk)=='Oriental (Fully Aided) Arabic School')|(str(govchk)=='Oriental (Partly Aided) Arabic School')|(str(govchk)=='Differently Abled Department Aided School')):
govaid_chk='Yes'
else:
govaid_chk=''
if ((str(govchk)=='School Education Department School')|(str(govchk)=='Corporation School')|(str(govchk)=='Municipal School')|(str(govchk)=='Adi-Dravida Welfare School')|(str(govchk)=='Forest Department School')|(str(govchk)=='Differently Abled Department School')|(str(govchk)=='Kallar BC/MBC Department School')|(str(govchk)=='Rubber Board School')|(str(govchk)=='Tribal Welfare School')|(str(govchk)=='Aranilayam HR&C Department School')):
gov_chk='Yes'
else:
gov_chk='No'
if ((str(grp)=='Hr.Sec School (I-XII)')|(str(grp)=='Hr.Sec School (VI-XII)')|(str(grp)=='Hr.Sec School (IX-XII)')|(str(grp)=='Hr.Sec School (XI-XII)')|(str(grp)=='Middle School (I-VIII)')|(str(grp)=='Middle School (VI-VIII)')|(str(grp)=='High Schools (I-X)')|(str(grp)=='High Schools (VI-X)')|(str(grp)=='High Schools (IX-X)')|(str(grp)=='KGBV')):
if acade_det.schooltype<>'Boys':
inci_chk='Yes'
else:
inci_chk=''
else:
inci_chk=''
return render (request,'infra_edit_new.html',locals())
else:
form=infradet_form()
govchk=basic_det.sch_management
grp=basic_det.sch_cate
hi_class=acade_det.high_class
schtype=acade_det.schooltype
if ((str(govchk)=='School Education Department School')|(str(govchk)=='Corporation School')|(str(govchk)=='Municipal School')|(str(govchk)=='Adi-Dravida Welfare School')|(str(govchk)=='Forest Department School')|(str(govchk)=='Differently Abled Department School')|(str(govchk)=='Kallar BC/MBC Department School')|(str(govchk)=='Rubber Board School')|(str(govchk)=='Tribal Welfare School')|(str(govchk)=='Aranilayam HR&C Department School')|(str(govchk)=='Fully Aided School')|(str(govchk)=='Partly Aided School')|(str(govchk)=='Anglo Indian (Fully Aided) School')|(str(govchk)=='Anglo Indian (Partly Aided) School')|(str(govchk)=='Oriental (Fully Aided) Sanskrit School')|(str(govchk)=='Oriental (Partly Aided) Sanskrit School')|(str(govchk)=='Oriental (Fully Aided) Arabic School')|(str(govchk)=='Oriental (Partly Aided) Arabic School')|(str(govchk)=='Differently Abled Department Aided School')):
govaid_chk='Yes'
else:
govaid_chk=''
if ((str(grp)=='Hr.Sec School (I-XII)')|(str(grp)=='Hr.Sec School (VI-XII)')|(str(grp)=='Hr.Sec School (IX-XII)')|(str(grp)=='Hr.Sec School (XI-XII)')|(str(grp)=='Middle School (I-VIII)')|(str(grp)=='Middle School (VI-VIII)')|(str(grp)=='High Schools (I-X)')|(str(grp)=='High Schools (VI-X)')|(str(grp)=='High Schools (IX-X)')|(str(grp)=='KGBV')):
if acade_det.schooltype<>'Boys':
inci_chk='Yes'
else:
inci_chk=''
else:
inci_chk=''
infra_edit_chk=''
if ((str(govchk)=='School Education Department School')|(str(govchk)=='Corporation School')|(str(govchk)=='Municipal School')|(str(govchk)=='Adi-Dravida Welfare School')|(str(govchk)=='Forest Department School')|(str(govchk)=='Differently Abled Department School')|(str(govchk)=='Kallar BC/MBC Department School')|(str(govchk)=='Rubber Board School')|(str(govchk)=='Tribal Welfare School')|(str(govchk)=='Aranilayam HR&C Department School')):
gov_chk='Yes'
else:
gov_chk='No'
return render (request,'infra_edit_new.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self,request,**kwargs):
if request.user.is_authenticated():
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
form = infradet_form(request.POST,request.FILES)
if Infradet.objects.filter(school_key=basic_det.id).count()>0:
infra_edit=Infradet.objects.get(school_key=basic_det.id)
if form.is_valid():
infra_edit.electricity = form.cleaned_data['electricity']
infra_edit.tot_area= form.cleaned_data['tot_area']
infra_edit.tot_type=form.cleaned_data['tot_type']
infra_edit.cov= form.cleaned_data['cov']
infra_edit.cov_type=form.cleaned_data['cov_type']
infra_edit.opn= form.cleaned_data['opn']
infra_edit.opn_type=form.cleaned_data['opn_type']
infra_edit.play= form.cleaned_data['play']
infra_edit.play_type=form.cleaned_data['play_type']
infra_edit.tot_ft = form.cleaned_data['tot_ft']
infra_edit.tot_mt = form.cleaned_data['tot_mt']
infra_edit.covered_ft = form.cleaned_data['covered_ft']
infra_edit.covered_mt = form.cleaned_data['covered_mt']
infra_edit.open_ft = form.cleaned_data['open_ft']
infra_edit.open_mt = form.cleaned_data['open_mt']
infra_edit.play_ft = form.cleaned_data['play_ft']
infra_edit.play_mt = form.cleaned_data['play_mt']
infra_edit.cwall_notcon_len = form.cleaned_data['cwall_notcon_len']
chk_cwal=request.POST['cwall']
if request.POST['cwall']=='Yes':
infra_edit.cwall = True
if form.cleaned_data['cwall_concre']==True:
infra_edit.cwall_concre = form.cleaned_data['cwall_concre']
infra_edit.cwall_concre_len = form.cleaned_data['cwall_concre_len']
else:
infra_edit.cwall_concre = form.cleaned_data['cwall_concre']
infra_edit.cwall_concre_len = 0
if form.cleaned_data['cwall_fence']==True:
infra_edit.cwall_fence = form.cleaned_data['cwall_fence']
infra_edit.cwall_fence_len = form.cleaned_data['cwall_fence_len']
else:
infra_edit.cwall_fence = form.cleaned_data['cwall_fence']
infra_edit.cwall_fence_len = 0
if form.cleaned_data['cwall_existbu']==True:
infra_edit.cwall_existbu = form.cleaned_data['cwall_existbu']
infra_edit.cwall_existbu_len = form.cleaned_data['cwall_existbu_len']
else:
infra_edit.cwall_existbu = form.cleaned_data['cwall_existbu']
infra_edit.cwall_existbu_len = 0
if form.cleaned_data['cwall_nbarr']==True:
infra_edit.cwall_nbarr = form.cleaned_data['cwall_nbarr']
infra_edit.cwall_nbarr_len = form.cleaned_data['cwall_nbarr_len']
else:
infra_edit.cwall_nbarr = form.cleaned_data['cwall_nbarr']
infra_edit.cwall_nbarr_len = 0
else:
infra_edit.cwall = False
infra_edit.cwall_concre = False
infra_edit.cwall_fence = False
infra_edit.cwall_existbu = False
infra_edit.cwall_nbarr = False
infra_edit.cwall_concre_len = 0
infra_edit.cwall_fence_len = 0
infra_edit.cwall_existbu_len = 0
infra_edit.cwall_nbarr_len = 0
if form.cleaned_data['fireext']==True:
infra_edit.fireext= True
infra_edit.fireext_no = form.cleaned_data['fireext_no']
infra_edit.fireext_w = form.cleaned_data['fireext_w']
else:
infra_edit.fireext= False
infra_edit.fireext_no = 0
infra_edit.fireext_w = 0
infra_edit.firstaid_box=form.cleaned_data['firstaid_box']
infra_edit.rainwater = form.cleaned_data['rainwater']
infra_edit.kitchenshed= form.cleaned_data['kitchenshed']
infra_edit.furn_desk_no = form.cleaned_data['furn_desk_no']
infra_edit.furn_desk_use = form.cleaned_data['furn_desk_use']
infra_edit.furn_bench_no = form.cleaned_data['furn_bench_no']
infra_edit.furn_bench_use = form.cleaned_data['furn_bench_use']
infra_edit.fans = form.cleaned_data['fans']
infra_edit.fans_work = form.cleaned_data['fans_work']
infra_edit.tubelights = form.cleaned_data['tubelights']
infra_edit.tlights_work = form.cleaned_data['tlights_work']
infra_edit.bu_no = form.cleaned_data['bu_no']
infra_edit.bu_usable =form.cleaned_data['bu_usable']
infra_edit.bu_minrep =form.cleaned_data['bu_minrep']
infra_edit.bu_majrep =form.cleaned_data['bu_majrep']
infra_edit.gu_no =form.cleaned_data['gu_no']
infra_edit.gu_usable = form.cleaned_data['gu_usable']
infra_edit.gu_minrep =form.cleaned_data['gu_minrep']
infra_edit.gu_majrep = form.cleaned_data['gu_majrep']
infra_edit.bl_no = form.cleaned_data['bl_no']
infra_edit.bl_usable = form.cleaned_data['bl_usable']
infra_edit.bl_minrep = form.cleaned_data['bl_minrep']
infra_edit.bl_majrep = form.cleaned_data['bl_majrep']
infra_edit.gl_no = form.cleaned_data['gl_no']
infra_edit.gl_usable = form.cleaned_data['gl_usable']
infra_edit.gl_minrep =form.cleaned_data['gl_minrep']
infra_edit.gl_majrep = form.cleaned_data['gl_majrep']
infra_edit.gentsu_no = form.cleaned_data['gentsu_no']
infra_edit.gentsu_usable = form.cleaned_data['gentsu_usable']
infra_edit.gentsu_minrep = form.cleaned_data['gentsu_minrep']
infra_edit.gentsu_majrep = form.cleaned_data['gentsu_majrep']
infra_edit.ladiesu_no = form.cleaned_data['ladiesu_no']
infra_edit.ladiesu_usable = form.cleaned_data['ladiesu_usable']
infra_edit.ladiesu_minrep = form.cleaned_data['ladiesu_minrep']
infra_edit.ladiesu_majrep = form.cleaned_data['ladiesu_majrep']
infra_edit.gentsl_no = form.cleaned_data['gentsl_no']
infra_edit.gentsl_usable = form.cleaned_data['gentsl_usable']
infra_edit.gentsl_minrep = form.cleaned_data['gentsl_minrep']
infra_edit.gentsl_majrep = form.cleaned_data['gentsl_majrep']
infra_edit.ladiesl_no = form.cleaned_data['ladiesl_no']
infra_edit.ladiesl_usable = form.cleaned_data['ladiesl_usable']
infra_edit.ladiesl_minrep = form.cleaned_data['ladiesl_minrep']
infra_edit.ladiesl_majrep = form.cleaned_data['ladiesl_majrep']
infra_edit.incinirator=form.cleaned_data['incinirator']
infra_edit.water_toilet=form.cleaned_data['water_toilet']
infra_edit.internet_yes=form.cleaned_data['internet_yes']
if request.POST['cwsn_toilet']==True:
infra_edit.cwsn_toilet = True
infra_edit.cwsn_toilet_no = form.cleaned_data['cwsn_toilet_no']
else:
infra_edit.cwsn_toilet = False
infra_edit.cwsn_toilet_no = 0
infra_edit.water_facility=form.cleaned_data['water_facility']
infra_edit.water_source=form.cleaned_data['water_source']
infra_edit.well_dia=form.cleaned_data['well_dia']
infra_edit.well_close=form.cleaned_data['well_close']
infra_edit.water_puri=form.cleaned_data['water_puri']
infra_edit.water_access = form.cleaned_data['water_access']
infra_edit.lightning_arest= form.cleaned_data['lightning_arest']
infra_edit.lib_tamil = form.cleaned_data['lib_tamil']
infra_edit.lib_eng = form.cleaned_data['lib_eng']
infra_edit.lib_others = form.cleaned_data['lib_others']
infra_edit.lib_tamil_news = form.cleaned_data['lib_tamil_news']
infra_edit.lib_eng_news = form.cleaned_data['lib_eng_news']
infra_edit.lib_periodic = form.cleaned_data['lib_periodic']
if request.POST['gov_chk']=='No':
if request.POST['trans_faci']=='True':
infra_edit.trans_faci = True
infra_edit.trans_bus= form.cleaned_data['trans_bus']
infra_edit.trans_van= form.cleaned_data['trans_van']
infra_edit.trans_stud= form.cleaned_data['trans_stud']
infra_edit.trans_rules= form.cleaned_data['trans_rules']
else:
infra_edit.trans_faci = False
infra_edit.trans_bus= None
infra_edit.trans_van= None
infra_edit.trans_stud= None
infra_edit.trans_rules= False
else:
infra_edit.trans_faci = False
infra_edit.trans_bus= None
infra_edit.trans_van= None
infra_edit.trans_stud= None
infra_edit.trans_rules= False
if request.POST['award_recd']=='True':
infra_edit.award_recd = True
infra_edit.award_info = form.cleaned_data['award_info']
else:
infra_edit.award_recd = False
infra_edit.award_info = ''
infra_edit.phy_lab= form.cleaned_data['phy_lab']
infra_edit.che_lab= form.cleaned_data['che_lab']
infra_edit.bot_lab= form.cleaned_data['bot_lab']
infra_edit.zoo_lab= form.cleaned_data['zoo_lab']
infra_edit.gas_cylin= form.cleaned_data['gas_cylin']
infra_edit.suffi_equip= form.cleaned_data['suffi_equip']
infra_edit.eb_ht_line= form.cleaned_data['eb_ht_line']
infra_edit.save()
messages.success(request,'Infrastructure Details Updated successfully')
return HttpResponseRedirect('/schoolnew/school_registration')
else:
messages.warning(request,'Infrastructure Details Not Updated')
return HttpResponseRedirect('/schoolnew/school_registration')
else:
if form.is_valid():
if form.cleaned_data['cwall']=='Yes':
post_cwall=True
if form.cleaned_data['cwall_concre']==True:
cwall_concre = form.cleaned_data['cwall_concre']
cwall_concre_len = form.cleaned_data['cwall_concre_len']
else:
cwall_concre = form.cleaned_data['cwall_concre']
cwall_concre_len = 0
if form.cleaned_data['cwall_fence']==True:
cwall_fence = form.cleaned_data['cwall_fence']
cwall_fence_len = form.cleaned_data['cwall_fence_len']
else:
cwall_fence = form.cleaned_data['cwall_fence']
cwall_fence_len = 0
if form.cleaned_data['cwall_existbu']==True:
cwall_existbu = form.cleaned_data['cwall_existbu']
cwall_existbu_len = form.cleaned_data['cwall_existbu_len']
else:
cwall_existbu = form.cleaned_data['cwall_existbu']
cwall_existbu_len = 0
if form.cleaned_data['cwall_nbarr']==True:
cwall_nbarr = form.cleaned_data['cwall_nbarr']
cwall_nbarr_len = form.cleaned_data['cwall_nbarr_len']
else:
cwall_nbarr = form.cleaned_data['cwall_nbarr']
cwall_nbarr_len = 0
else:
post_cwall=False
cwall_concre = False
cwall_fence = False
cwall_existbu = False
cwall_nbarr = False
cwall_concre_len = 0
cwall_fence_len = 0
cwall_existbu_len = 0
cwall_nbarr_len = 0
sch_key=form.cleaned_data['school_key']
ss=form.cleaned_data['open_ft']
ss1=form.cleaned_data['open_mt']
if request.POST['gov_chk']=='No':
if request.POST['trans_faci']=='True':
chktrans_faci = True
chktrans_bus= form.cleaned_data['trans_bus']
chktrans_van= form.cleaned_data['trans_van']
chktrans_stud= form.cleaned_data['trans_stud']
chktrans_rules= form.cleaned_data['trans_rules']
else:
chktrans_faci = False
chktrans_bus= None
chktrans_van= None
chktrans_stud= None
chktrans_rules= False
else:
chktrans_faci = False
chktrans_bus= None
chktrans_van= None
chktrans_stud= None
chktrans_rules= False
infradet=Infradet(
school_key = sch_key,
electricity = form.cleaned_data['electricity'],
tot_area= form.cleaned_data['tot_area'],
tot_type=form.cleaned_data['tot_type'],
cov= form.cleaned_data['cov'],
cov_type=form.cleaned_data['cov_type'],
opn= form.cleaned_data['opn'],
opn_type=form.cleaned_data['opn_type'],
play= form.cleaned_data['play'],
play_type=form.cleaned_data['play_type'],
tot_ft = form.cleaned_data['tot_ft'],
tot_mt = form.cleaned_data['tot_mt'],
covered_ft = form.cleaned_data['covered_ft'],
covered_mt = form.cleaned_data['covered_mt'],
open_ft = form.cleaned_data['open_ft'],
open_mt = form.cleaned_data['open_mt'],
play_ft = form.cleaned_data['play_ft'],
play_mt = form.cleaned_data['play_mt'],
cwall = post_cwall,
cwall_concre = cwall_concre,
cwall_fence = cwall_fence,
cwall_existbu = cwall_existbu ,
cwall_nbarr = cwall_nbarr,
cwall_concre_len = cwall_concre_len,
cwall_fence_len = cwall_fence_len,
cwall_existbu_len = cwall_existbu_len,
cwall_nbarr_len = cwall_nbarr_len,
cwall_notcon_len = form.cleaned_data['cwall_notcon_len'],
fireext= form.cleaned_data['fireext'],
fireext_no = form.cleaned_data['fireext_no'],
fireext_w = form.cleaned_data['fireext_w'],
firstaid_box=form.cleaned_data['firstaid_box'],
rainwater = form.cleaned_data['rainwater'],
kitchenshed= form.cleaned_data['kitchenshed'],
furn_desk_no = form.cleaned_data['furn_desk_no'],
furn_desk_use = form.cleaned_data['furn_desk_use'],
furn_bench_no = form.cleaned_data['furn_bench_no'],
furn_bench_use = form.cleaned_data['furn_bench_use'],
fans = form.cleaned_data['fans'],
fans_work = form.cleaned_data['fans_work'],
tubelights = form.cleaned_data['tubelights'],
tlights_work = form.cleaned_data['tlights_work'],
bu_no = form.cleaned_data['bu_no'],
bu_usable =form.cleaned_data['bu_usable'],
bu_minrep =form.cleaned_data['bu_minrep'],
bu_majrep =form.cleaned_data['bu_majrep'],
gu_no =form.cleaned_data['gu_no'],
gu_usable = form.cleaned_data['gu_usable'],
gu_minrep =form.cleaned_data['gu_minrep'],
gu_majrep = form.cleaned_data['gu_majrep'],
bl_no = form.cleaned_data['bl_no'],
bl_usable = form.cleaned_data['bl_usable'],
bl_minrep = form.cleaned_data['bl_minrep'],
bl_majrep = form.cleaned_data['bl_majrep'],
gl_no = form.cleaned_data['gl_no'],
gl_usable = form.cleaned_data['gl_usable'],
gl_minrep =form.cleaned_data['gl_minrep'],
gl_majrep = form.cleaned_data['gl_majrep'],
gentsu_no = form.cleaned_data['gentsu_no'],
gentsu_usable = form.cleaned_data['gentsu_usable'],
gentsu_minrep = form.cleaned_data['gentsu_minrep'],
gentsu_majrep = form.cleaned_data['gentsu_majrep'],
ladiesu_no = form.cleaned_data['ladiesu_no'],
ladiesu_usable = form.cleaned_data['ladiesu_usable'],
ladiesu_minrep = form.cleaned_data['ladiesu_minrep'],
ladiesu_majrep = form.cleaned_data['ladiesu_majrep'],
gentsl_no = form.cleaned_data['gentsl_no'],
gentsl_usable = form.cleaned_data['gentsl_usable'],
gentsl_minrep = form.cleaned_data['gentsl_minrep'],
gentsl_majrep = form.cleaned_data['gentsl_majrep'],
ladiesl_no = form.cleaned_data['ladiesl_no'],
ladiesl_usable = form.cleaned_data['ladiesl_usable'],
ladiesl_minrep = form.cleaned_data['ladiesl_minrep'],
ladiesl_majrep = form.cleaned_data['ladiesl_majrep'],
incinirator=form.cleaned_data['incinirator'],
water_toilet = form.cleaned_data['water_toilet'],
cwsn_toilet = form.cleaned_data['cwsn_toilet'],
cwsn_toilet_no = form.cleaned_data['cwsn_toilet_no'],
water_facility=form.cleaned_data['water_facility'],
water_source=form.cleaned_data['water_source'],
well_dia=form.cleaned_data['well_dia'],
well_close=form.cleaned_data['well_close'],
water_puri=form.cleaned_data['water_puri'],
water_access = form.cleaned_data['water_access'],
internet_yes = form.cleaned_data['internet_yes'],
lightning_arest= form.cleaned_data['lightning_arest'],
lib_tamil = form.cleaned_data['lib_tamil'],
lib_eng = form.cleaned_data['lib_eng'],
lib_others = form.cleaned_data['lib_others'],
lib_tamil_news = form.cleaned_data['lib_tamil_news'],
lib_eng_news = form.cleaned_data['lib_eng_news'],
lib_periodic = form.cleaned_data['lib_periodic'],
trans_faci= chktrans_faci,
trans_bus= chktrans_bus,
trans_van= chktrans_van,
trans_stud= chktrans_stud,
trans_rules= chktrans_rules,
award_recd= form.cleaned_data['award_recd'],
award_info = form.cleaned_data['award_info'],
phy_lab= form.cleaned_data['phy_lab'],
che_lab= form.cleaned_data['che_lab'],
bot_lab= form.cleaned_data['bot_lab'],
zoo_lab= form.cleaned_data['zoo_lab'],
gas_cylin= form.cleaned_data['gas_cylin'],
suffi_equip= form.cleaned_data['suffi_equip'],
eb_ht_line= form.cleaned_data['eb_ht_line'],
)
infradet.save()
messages.success(request,'Infrastructure Details Added successfully')
return HttpResponseRedirect('/schoolnew/school_registration')
else:
messages.warning(request,'Infrastructure Details Not Saved')
return HttpResponseRedirect('/schoolnew/school_registration')
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class class_section_edit(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
acade_det = Academicinfo.objects.get(school_key=basic_det.id)
sch_key = basic_det.id
sch_clas_exist=School_category.objects.get(id=basic_det.sch_cate_id)
class_det = Class_section.objects.filter(school_key=sch_key)
govchk=basic_det.sch_management
if ((str(govchk)=='Fully Aided School')|(str(govchk)=='Partly Aided School')|(str(govchk)=='Anglo Indian (Fully Aided) School')|(str(govchk)=='Anglo Indian (Partly Aided) School')|(str(govchk)=='Oriental (Fully Aided) Sanskrit School')|(str(govchk)=='Oriental (Partly Aided) Sanskrit School')|(str(govchk)=='Oriental (Fully Aided) Arabic School')|(str(govchk)=='Oriental (Partly Aided) Arabic School')|(str(govchk)=='Differently Abled Department Aided School')):
aid_chk='Yes'
else:
aid_chk=''
if basic_det.prekg=='Yes':
prekg_chk='Yes'
else:
prekg_chk='No'
if basic_det.kgsec=='Yes':
kgsec_chk='Yes'
else:
kgsec_chk='No'
if sch_clas_exist.category_code=='1':
sch_cat_chk=['I Std','II Std','III Std','IV Std','V Std']
elif sch_clas_exist.category_code=='2':
sch_cat_chk=['I Std','II Std','III Std','IV Std','V Std','VI Std','VII Std','VIII Std']
elif sch_clas_exist.category_code=='3':
sch_cat_chk=['I Std','II Std','III Std','IV Std','V Std','VI Std','VII Std','VIII Std','IX Std','X Std','XI Std','XII Std']
elif sch_clas_exist.category_code=='4':
sch_cat_chk=['VI Std','VII Std','VIII Std']
elif sch_clas_exist.category_code=='5':
sch_cat_chk=['VI Std','VII Std','VIII Std','IX Std','X Std','XI Std','XII Std',]
elif sch_clas_exist.category_code=='6':
sch_cat_chk=['I Std','II Std','III Std','IV Std','V Std','VI Std','VII Std','VIII Std','IX Std','X Std']
elif sch_clas_exist.category_code=='7':
sch_cat_chk=['VI Std','VII Std','VIII Std','IX Std','X Std']
elif sch_clas_exist.category_code=='8':
sch_cat_chk=['IX Std','X Std']
elif sch_clas_exist.category_code=='10':
sch_cat_chk=['IX Std','X Std','XI Std','XII Std']
elif sch_clas_exist.category_code=='11':
sch_cat_chk=['XI Std','XII Std']
elif sch_clas_exist.category_code=='14':
sch_cat_chk=['I Std','II Std','III Std','IV Std','V Std']
elif sch_clas_exist.category_code=='12':
sch_cat_chk=['VI Std','VII Std','VIII Std']
else:
sch_cat_chk=['I Std','II Std','III Std','IV Std','V Std','VI Std','VII Std','VIII Std','IX Std','X Std','XI Std','XII Std',]
form=class_section_form()
if (Class_section.objects.filter(school_key=sch_key).count())>0:
class_det = Class_section.objects.filter(school_key=sch_key).order_by('id')
else:
if basic_det.prekg=='Yes':
newclass = Class_section(
school_key=basic_det,
class_id = 'Pre-KG',
sections = 0,
no_sec_aided=0,
no_stud=0,
)
newclass.save()
if basic_det.kgsec=='Yes':
newclass = Class_section(
school_key=basic_det,
class_id = 'LKG',
sections = 0,
no_sec_aided=0,
no_stud=0,
)
newclass.save()
newclass = Class_section(
school_key=basic_det,
class_id = 'UKG',
sections = 0,
no_sec_aided=0,
no_stud=0,
)
newclass.save()
for entry in range(len(sch_cat_chk)):
newclass = Class_section(
school_key=basic_det,
class_id = sch_cat_chk[entry],
sections = 0,
no_sec_aided=0,
no_stud=0,
)
newclass.save()
class_det = Class_section.objects.filter(school_key=sch_key)
if (acade_det.tel_med | acade_det.mal_med| acade_det.kan_med| acade_det.urdu_med | acade_det.oth_med):
oth_med_stren=True
else:
oth_med_stren=False
return render (request,'class_section_table_edit.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
form = class_section_form(request.POST,request.FILES)
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
academic_edit=Academicinfo.objects.get(school_key=basic_det.id)
sch_key = basic_det.id
class_det = Class_section.objects.filter(school_key=sch_key).order_by('id')
try:
if form.is_valid():
sch_key=form.cleaned_data['school_key']
schsec = request.POST.getlist('sections')
schaid = request.POST.getlist('no_sec_aided')
stud_coun = request.POST.getlist('no_stud')
tamstu=request.POST.getlist('tam_stud')
engstu=request.POST.getlist('eng_stud')
othstu=request.POST.getlist('oth_stud')
cwsnstu=request.POST.getlist('cwsn_stud_no')
counter=0
for i in class_det:
class_edit = Class_section.objects.get(id=i.id)
class_edit.sections=schsec[counter]
if len(schaid)>0:
class_edit.no_sec_aided=schaid[counter]
class_edit.no_stud=stud_coun[counter]
if len(tamstu)>0:
class_edit.tam_stud=tamstu[counter]
if len(engstu)>0:
class_edit.eng_stud=engstu[counter]
if len(othstu)>0:
class_edit.oth_stud=othstu[counter]
class_edit.cwsn_stud_no=cwsnstu[counter]
class_edit.save()
counter+=1
messages.success(request,'Class & Section Details Updated successfully')
return HttpResponseRedirect('/schoolnew/school_registration')
else:
messages.warning(request,'No. sections allowed is Min. 1 Max.30 - Hence Not Saved')
return HttpResponseRedirect('/schoolnew/class_section_edit')
except Exception:
pass
return HttpResponseRedirect('/schoolnew/class_section_edit/')
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Teaching_edit(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
sch_key = basic_det.id
post_det = Staff.objects.filter(Q(school_key=sch_key) & Q(staff_cat=1))
form=staff_form()
chk_catid=School_category.objects.get(id=basic_det.sch_cate_id)
pg_head='Teaching'
if ((chk_catid.category_code=='1')|(chk_catid=='11')):
desig_det= User_desig.objects.filter(Q(user_cate='SCHOOL') & Q(user_level__isnull=True)|Q(user_level='PS')).exclude(user_cate='SCHOOL&OFFICE')
elif ((chk_catid.category_code=='2')|(chk_catid.category_code=='4')|(chk_catid.category_code=='12')):
desig_det= User_desig.objects.filter(Q(user_cate='SCHOOL') & Q(user_level__isnull=True)|Q(user_level='MS')|Q(user_level='HRHSMS')).exclude(user_cate='SCHOOL&OFFICE')
elif ((chk_catid.category_code=='6')|(chk_catid.category_code=='7')|(chk_catid.category_code=='8')) :
desig_det= User_desig.objects.filter(Q(user_cate='SCHOOL') & Q(user_level__isnull=True)|Q(user_level='HS')|Q(user_level='HRHS')|Q(user_level='HRHSMS')).exclude(user_cate='SCHOOL&OFFICE')
elif ((chk_catid.category_code=='3')|(chk_catid.category_code=='5')|(chk_catid.category_code=='9')|(chk_catid.category_code=='10')):
desig_det= User_desig.objects.filter(Q(user_cate='SCHOOL') & Q(user_level__isnull=True)|Q(user_level='HR')|Q(user_level='HRHS')|Q(user_level='HRHSMS')).exclude(user_cate='SCHOOL&OFFICE')
else:
desig_det= User_desig.objects.filter(Q(user_cate='SCHOOL') & Q(user_level__isnull=True)).exclude(user_cate='SCHOOL&OFFICE')
return render (request,'post_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self,request,**kwargs):
if request.user.is_authenticated():
pk=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
form = staff_form(request.POST,request.FILES)
if form.is_valid():
sch_key=form.cleaned_data['school_key']
chk_post=form.cleaned_data['post_name']
if (Staff.objects.filter(school_key=sch_key).count())>0:
if(chk_post.id in(85,70,50,51)):
if Staff.objects.filter(school_key=basic_det.id).filter(post_name=form.cleaned_data['post_name']).exists():
messages.warning(request,'Headmaster post santion details already entered, if you want to correct pl. use Update option')
return HttpResponseRedirect('/schoolnew/teaching_edit/')
else:
if request.POST['post_mode']=='Permanent':
tpost_GO_pd = ''
ttemgofm_dt = None
ttemgoto_dt = None
else:
tpost_GO_pd = form.cleaned_data['post_GO_pd']
ttemgofm_dt = form.cleaned_data['temgofm_dt']
ttemgoto_dt = form.cleaned_data['temgoto_dt']
newteachpost = Staff(
school_key=sch_key,
post_name = form.cleaned_data['post_name'],
post_sub = form.cleaned_data['post_sub'],
post_sanc = form.cleaned_data['post_sanc'],
post_mode = form.cleaned_data['post_mode'],
post_GO = form.cleaned_data['post_GO'],
post_GO_dt = form.cleaned_data['post_GO_dt'],
post_filled = 0,
post_vac = form.cleaned_data['post_sanc'],
post_GO_pd = tpost_GO_pd,
temgofm_dt = ttemgofm_dt,
temgoto_dt = ttemgoto_dt,
staff_cat = 1,
)
newteachpost.save()
messages.success(request,'Post Sanction Details addedded successfully')
return HttpResponseRedirect('/schoolnew/teaching_edit/')
else:
if request.POST['post_mode']=='Permanent':
tpost_GO_pd = ''
ttemgofm_dt = None
ttemgoto_dt = None
else:
tpost_GO_pd = form.cleaned_data['post_GO_pd']
ttemgofm_dt = form.cleaned_data['temgofm_dt']
ttemgoto_dt = form.cleaned_data['temgoto_dt']
newteachpost = Staff(
school_key=sch_key,
post_name = form.cleaned_data['post_name'],
post_sub = form.cleaned_data['post_sub'],
post_sanc = form.cleaned_data['post_sanc'],
post_mode = form.cleaned_data['post_mode'],
post_GO = form.cleaned_data['post_GO'],
post_GO_dt = form.cleaned_data['post_GO_dt'],
post_filled = 0,
post_vac = form.cleaned_data['post_sanc'],
post_GO_pd = tpost_GO_pd,
temgofm_dt = ttemgofm_dt,
temgoto_dt = ttemgoto_dt,
staff_cat = 1,
)
newteachpost.save()
messages.success(request,'Post Sanction Details addedded successfully')
return HttpResponseRedirect('/schoolnew/teaching_edit/')
else:
if request.POST['post_mode']=='Permanent':
tpost_GO_pd = ''
ttemgofm_dt = None
ttemgoto_dt = None
else:
tpost_GO_pd = form.cleaned_data['post_GO_pd']
ttemgofm_dt = form.cleaned_data['temgofm_dt']
ttemgoto_dt = form.cleaned_data['temgoto_dt']
newteachpost = Staff(
school_key=sch_key,
post_name = form.cleaned_data['post_name'],
post_sub = form.cleaned_data['post_sub'],
post_sanc = form.cleaned_data['post_sanc'],
post_mode = form.cleaned_data['post_mode'],
post_GO = form.cleaned_data['post_GO'],
post_GO_dt = form.cleaned_data['post_GO_dt'],
post_filled = 0,
post_vac = form.cleaned_data['post_sanc'],
post_GO_pd = tpost_GO_pd,
temgofm_dt = ttemgofm_dt,
temgoto_dt = ttemgoto_dt,
staff_cat = 1,
)
newteachpost.save()
messages.success(request,'Post Sanction Details addedded successfully')
return HttpResponseRedirect('/schoolnew/teaching_edit/')
else:
print form.errors
messages.warning(request,'Post Sanction Details Not Saved')
return render (request,'post_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Teaching_delete(View):
def get(self, request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
data=Staff.objects.get(id=tid)
data.delete()
msg= str(data.post_name)+" - Posts has been successfully removed "
messages.success(request, msg )
return HttpResponseRedirect('/schoolnew/teaching_edit/')
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Teaching_update(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
instance=Staff.objects.get(id=tid)
staff_det_dt=Staff.objects.get(id=tid)
form=staff_form(instance=instance)
post_name= instance.post_name
post_sub= instance.post_sub
id_tpost_sub=post_sub.id
post_sanc=instance.post_sanc
post_mode= instance.post_mode
post_GO= instance.post_GO
go_dt= instance.post_GO_dt
post_GO_dt= instance.post_GO_dt
post_GO_pd= instance.post_GO_pd
post_filled= instance.post_filled
post_vac= instance.post_vac
post_filled = instance.post_filled
post_vac = instance.post_vac
staff_cat = instance.staff_cat
temgofm_dt = instance.temgofm_dt
temgoto_dt = instance.temgoto_dt
pg_head='Teaching'
if staff_det_dt.post_GO_dt:
go_dt=staff_det_dt.post_GO_dt.strftime('%Y-%m-%d')
chk_catid=School_category.objects.get(id=basic_det.sch_cate_id)
if ((chk_catid.category_code=='1')|(chk_catid=='11')):
desig_det= User_desig.objects.filter(Q(user_cate='SCHOOL') & Q(user_level__isnull=True)|Q(user_level='PS')).exclude(user_cate='SCHOOL&OFFICE')
elif ((chk_catid.category_code=='2')|(chk_catid.category_code=='4')|(chk_catid.category_code=='12')):
desig_det= User_desig.objects.filter(Q(user_cate='SCHOOL') & Q(user_level__isnull=True)|Q(user_level='MS')|Q(user_level='HRHSMS')).exclude(user_cate='SCHOOL&OFFICE')
elif ((chk_catid.category_code=='6')|(chk_catid.category_code=='7')|(chk_catid.category_code=='8')) :
desig_det= User_desig.objects.filter(Q(user_cate='SCHOOL') & Q(user_level__isnull=True)|Q(user_level='HS')|Q(user_level='HRHS')|Q(user_level='HRHSMS')).exclude(user_cate='SCHOOL&OFFICE')
elif ((chk_catid.category_code=='3')|(chk_catid.category_code=='5')|(chk_catid.category_code=='9')|(chk_catid.category_code=='10')):
desig_det= User_desig.objects.filter(Q(user_cate='SCHOOL') & Q(user_level__isnull=True)|Q(user_level='HR')|Q(user_level='HRHS')|Q(user_level='HRHSMS')).exclude(user_cate='SCHOOL&OFFICE')
else:
desig_det= User_desig.objects.filter(Q(user_cate='SCHOOL') & Q(user_level__isnull=True)).exclude(user_cate='SCHOOL&OFFICE')
sch_key = basic_det.id
staff_det = Staff.objects.filter(school_key=sch_key)
return render (request,'post_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
form = staff_form(request.POST,request.FILES)
instance=Staff.objects.get(id=tid)
newteachpost = Staff.objects.get(id=tid)
if form.is_valid():
chk_post=form.cleaned_data['post_name']
if(chk_post.user_desig in("85","70","50","51")):
if Staff.objects.filter(school_key=basic_det.id).filter(post_name=form.cleaned_data['post_name']).exclude(id=tid).exists():
messages.warning(request,'Headmaster post santion details already entered, if you want to correct pl. use Update option')
return HttpResponseRedirect('/schoolnew/teaching_edit/')
else:
newteachpost.post_name = form.cleaned_data['post_name']
newteachpost.post_sub = form.cleaned_data['post_sub']
newteachpost.post_sanc = form.cleaned_data['post_sanc']
newteachpost.post_mode = form.cleaned_data['post_mode']
newteachpost.post_GO = form.cleaned_data['post_GO']
newteachpost.post_GO_dt = form.cleaned_data['post_GO_dt']
newteachpost.post_GO_pd = form.cleaned_data['post_GO_pd']
newteachpost.post_vac = (form.cleaned_data['post_sanc']-newteachpost.post_filled)
newteachpost.staff_cat = 1
if newteachpost.post_mode=='Permanent':
newteachpost.temgofm_dt = None
newteachpost.temgoto_dt = None
else:
newteachpost.ttemgofm_dt = form.cleaned_data['temgofm_dt']
newteachpost.ttemgoto_dt = form.cleaned_data['temgoto_dt']
newteachpost.save()
messages.success(request,'Teaching Post Sanction Details Updated successfully')
return HttpResponseRedirect('/schoolnew/teaching_edit/')
else:
newteachpost.post_name = form.cleaned_data['post_name']
newteachpost.post_sub = form.cleaned_data['post_sub']
newteachpost.post_sanc = form.cleaned_data['post_sanc']
newteachpost.post_mode = form.cleaned_data['post_mode']
newteachpost.post_GO = form.cleaned_data['post_GO']
newteachpost.post_GO_dt = form.cleaned_data['post_GO_dt']
newteachpost.post_GO_pd = form.cleaned_data['post_GO_pd']
newteachpost.post_vac = (form.cleaned_data['post_sanc']-newteachpost.post_filled)
newteachpost.staff_cat = 1
if newteachpost.post_mode=='Permanent':
newteachpost.temgofm_dt = None
newteachpost.temgoto_dt = None
else:
newteachpost.temgofm_dt = form.cleaned_data['temgofm_dt']
newteachpost.temgoto_dt = form.cleaned_data['temgoto_dt']
newteachpost.save()
messages.success(request,'Teaching Post Sanction Details Updated successfully')
return HttpResponseRedirect('/schoolnew/teaching_edit/')
else:
messages.warning(request,'Teaching Post Sanction Details Not Updated')
return render (request,'post_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Nonteaching_edit(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
sch_key = basic_det.id
post_det = Staff.objects.filter(Q(school_key=sch_key) & Q(staff_cat=2))
form=staff_form()
chk_catid=School_category.objects.get(id=basic_det.sch_cate_id)
pg_head='Non-Teaching'
if ((chk_catid.category_code=='1')|(chk_catid.category_code=='11')):
desig_det= User_desig.objects.filter(Q(user_cate='SCHOOL&OFFICE') & Q(user_level__isnull=True))
elif ((chk_catid.category_code=='2')|(chk_catid.category_code=='4')|(chk_catid.category_code=='12')):
desig_det= User_desig.objects.filter(Q(user_cate='SCHOOL&OFFICE') & Q(user_level__isnull=True))
elif ((chk_catid.category_code=='6')|(chk_catid.category_code=='7')|(chk_catid.category_code=='8')) :
desig_det= User_desig.objects.filter(Q(user_cate='SCHOOL&OFFICE') & Q(user_level__isnull=True))
elif ((chk_catid.category_code=='3')|(chk_catid.category_code=='5')|(chk_catid.category_code=='9')|(chk_catid.category_code=='10')):
desig_det= User_desig.objects.filter(Q(user_cate='SCHOOL&OFFICE') )
else:
desig_det= User_desig.objects.filter((Q(user_cate='SCHOOL&OFFICE')|Q(user_cate='OFFICE')) & Q(user_level__isnull=True))
return render (request,'post_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self,request,**kwargs):
if request.user.is_authenticated():
pk=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
form = staff_form(request.POST,request.FILES)
if form.is_valid():
sch_key=form.cleaned_data['school_key']
if request.POST['post_mode']=='Permanent':
tpost_GO_pd = ''
ttemgofm_dt = None
ttemgoto_dt = None
else:
tpost_GO_pd = form.cleaned_data['post_GO_pd']
ttemgofm_dt = form.cleaned_data['temgofm_dt']
ttemgoto_dt = form.cleaned_data['temgoto_dt']
newnnonteachpost = Staff(
school_key=sch_key,
post_name = form.cleaned_data['post_name'],
post_sub = form.cleaned_data['post_sub'],
post_sanc = form.cleaned_data['post_sanc'],
post_mode = form.cleaned_data['post_mode'],
post_GO = form.cleaned_data['post_GO'],
post_GO_dt = form.cleaned_data['post_GO_dt'],
post_filled = 0,
post_vac = form.cleaned_data['post_sanc'],
post_GO_pd = tpost_GO_pd,
temgofm_dt = ttemgofm_dt,
temgoto_dt = ttemgoto_dt,
staff_cat = 2,
)
newnnonteachpost.save()
messages.success(request,'Non-Teaching Post Details Added successfully')
return HttpResponseRedirect('/schoolnew/nonteaching_edit/')
else:
print form.errors
messages.warning(request,'Non-Teaching Post Details Not Saved')
return render (request,'post_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Nonteaching_update(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
instance=Staff.objects.get(id=tid)
sch_key = basic_det.id
nonteach_det_dt=Staff.objects.get(id=tid)
form=staff_form(instance=instance)
post_name= instance.post_name
post_sub= instance.post_sub
id_tpost_sub=post_sub.id
post_sanc=instance.post_sanc
post_mode= instance.post_mode
post_GO= instance.post_GO
go_dt= instance.post_GO_dt
post_GO_dt= instance.post_GO_dt
post_GO_pd= instance.post_GO_pd
post_filled= instance.post_filled
post_vac= instance.post_vac
post_filled = instance.post_filled
post_vac = instance.post_vac
staff_cat = instance.staff_cat
temgofm_dt = instance.temgofm_dt
temgoto_dt = instance.temgoto_dt
pg_head='Non-Teaching'
if nonteach_det_dt.post_GO_dt:
go_dt=nonteach_det_dt.post_GO_dt.strftime('%Y-%m-%d')
chk_catid=School_category.objects.get(id=basic_det.sch_cate_id)
if ((chk_catid.category_code=='1')|(chk_catid.category_code=='11')):
desig_det= User_desig.objects.filter(Q(user_cate='SCHOOL&OFFICE') & Q(user_level__isnull=True))
elif ((chk_catid.category_code=='2')|(chk_catid.category_code=='4')|(chk_catid.category_code=='12')):
desig_det= User_desig.objects.filter(Q(user_cate='SCHOOL&OFFICE') & Q(user_level__isnull=True))
elif ((chk_catid.category_code=='6')|(chk_catid.category_code=='7')|(chk_catid.category_code=='8')) :
desig_det= User_desig.objects.filter(Q(user_cate='SCHOOL&OFFICE') & Q(user_level__isnull=True))
elif ((chk_catid.category_code=='3')|(chk_catid.category_code=='5')|(chk_catid.category_code=='9')|(chk_catid.category_code=='10')):
desig_det= User_desig.objects.filter(Q(user_cate='SCHOOL&OFFICE') )
else:
desig_det= User_desig.objects.filter((Q(user_cate='SCHOOL&OFFICE')|Q(user_cate='OFFICE')) & Q(user_level__isnull=True))
sch_key = basic_det.id
return render (request,'post_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
form = staff_form(request.POST,request.FILES)
instance=Staff.objects.get(id=tid)
newnonteachpost = Staff.objects.get(id=tid)
if form.is_valid():
newnonteachpost.post_name = form.cleaned_data['post_name']
newnonteachpost.post_sanc = form.cleaned_data['post_sanc']
newnonteachpost.post_mode = form.cleaned_data['post_mode']
newnonteachpost.post_GO = form.cleaned_data['post_GO']
newnonteachpost.post_GO_dt = form.cleaned_data['post_GO_dt']
newnonteachpost.post_GO_pd = form.cleaned_data['post_GO_pd']
newnonteachpost.post_sub = form.cleaned_data['post_sub']
newnonteachpost.post_vac = (form.cleaned_data['post_sanc']-newnonteachpost.post_filled)
newnonteachpost.staff_cat = 2
if newnonteachpost.post_mode=='Permanent':
newnonteachpost.temgofm_dt = None
newnonteachpost.temgoto_dt = None
else:
newnonteachpost.temgofm_dt = form.cleaned_data['temgofm_dt']
newnonteachpost.temgoto_dt = form.cleaned_data['temgoto_dt']
newnonteachpost.save()
messages.success(request,'Non-Teaching Post Details Updated successfully')
return HttpResponseRedirect('/schoolnew/nonteaching_edit/')
else:
messages.warning(request,'Non-Teaching Post Details Not Updated')
return render (request,'post_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Nonteaching_delete(View):
def get(self, request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
data=Staff.objects.get(id=tid)
data.delete()
print data.school_key
msg= str(data.post_name)+" - Posts has been successfully removed "
messages.success(request, msg )
return HttpResponseRedirect('/schoolnew/nonteaching_edit/')
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Parttime_edit(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
acade_det = Academicinfo.objects.filter(school_key=basic_det.id)
sch_key = basic_det.id
parttime_det = Parttimestaff.objects.filter(school_key=sch_key)
part_time_sub=Part_time_Subjects.objects.all()
form = parttimestaff_form()
return render (request,'parttime_staff_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self,request,**kwargs):
if request.user.is_authenticated():
pk=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
form = parttimestaff_form(request.POST,request.FILES)
if form.is_valid():
sch_key=form.cleaned_data['school_key']
newnpartime = Parttimestaff(
school_key=sch_key,
part_instr = form.cleaned_data['part_instr'],
part_instr_sub = form.cleaned_data['part_instr_sub'],)
newnpartime.save()
messages.success(request,'Part-time Teacher Details Added successfully')
return HttpResponseRedirect('/schoolnew/parttime_edit/')
else:
messages.success(request,'Part-time Teacher Details Not Saved')
return render (request,'parttime_staff_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Parttime_update(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
sch_key = basic_det.id
parttime_det = Parttimestaff.objects.filter(school_key=sch_key)
part_time_sub=Part_time_Subjects.objects.all()
instance=Parttimestaff.objects.get(id=tid)
form = parttimestaff_form(instance=instance)
instance=Parttimestaff.objects.get(id=tid)
part_instr = instance.part_instr
part_instr_sub = instance.part_instr_sub
return render (request,'parttime_staff_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
form = parttimestaff_form(request.POST,request.FILES)
instance=Parttimestaff.objects.get(id=tid)
newparttimestaff = Parttimestaff.objects.get(id=tid)
if form.is_valid():
newparttimestaff.part_instr = form.cleaned_data['part_instr']
newparttimestaff.part_instr_sub = form.cleaned_data['part_instr_sub']
newparttimestaff.save()
messages.success(request,'Part-time Teacher Details Updated successfully')
return HttpResponseRedirect('/schoolnew/parttime_edit/')
else:
messages.warning(request,'Part-time Teacher Details Not Added')
return render (request,'parttime_staff_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Parttime_delete(View):
def get(self, request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
data=Parttimestaff.objects.get(id=tid)
data.delete()
msg= data.part_instr+" - Part Time Teacher has been successfully removed "
messages.success(request, msg )
return HttpResponseRedirect('/schoolnew/parttime_edit/')
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Group_edit(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
sch_key = basic_det.id
group_det = Sch_groups.objects.filter(school_key=sch_key)
gropu_mas= Groups.objects.all()
form=sch_groups_form()
govchk=basic_det.sch_management
if ((str(govchk)=='Fully Aided School')|(str(govchk)=='Partly Aided School')|(str(govchk)=='Anglo Indian (Fully Aided) School')|(str(govchk)=='Anglo Indian (Partly Aided) School')|(str(govchk)=='Oriental (Fully Aided) Sanskrit School')|(str(govchk)=='Oriental (Partly Aided) Sanskrit School')|(str(govchk)=='Oriental (Fully Aided) Arabic School')|(str(govchk)=='Oriental (Partly Aided) Arabic School')|(str(govchk)=='Differently Abled Department Aided School')):
aid_chk='Yes'
else:
aid_chk=''
return render (request,'group_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self,request,**kwargs):
if request.user.is_authenticated():
form = sch_groups_form(request.POST,request.FILES)
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
if form.is_valid():
if Sch_groups.objects.filter(school_key=basic_det.id).filter(group_name=request.POST['group_name']).exists():
messages.warning(request,'This Group already Exist. Pleae check & update the same if necessary')
return HttpResponseRedirect('/schoolnew/group_edit')
else:
sch_key=form.cleaned_data['school_key']
newgroup = Sch_groups(
school_key =sch_key,
group_name=form.cleaned_data['group_name'],
sec_in_group=form.cleaned_data['sec_in_group'],
sec_in_group_aid=form.cleaned_data['sec_in_group_aid'],
permis_ordno=form.cleaned_data['permis_ordno'],
permis_orddt=form.cleaned_data['permis_orddt'],
)
newgroup.save()
messages.success(request,'Group Details Added successfully')
return HttpResponseRedirect('/schoolnew/group_edit/')
else:
messages.warning(request,'Group Details Not Saved')
return render (request,'group_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Group_delete(View):
def get(self, request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
data=Sch_groups.objects.get(id=tid)
data.delete()
msg= data.group_name+" - Group has been successfully removed "
messages.success(request, msg )
return HttpResponseRedirect('/schoolnew/group_edit/')
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Group_update(View):
def get(self, request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
instance=Sch_groups.objects.get(id=tid)
sch_key = basic_det.id
group_det = Sch_groups.objects.filter(school_key=sch_key)
group_name=instance.group_name
sec_in_group=instance.sec_in_group
sec_in_group_aid=instance.sec_in_group_aid
permis_ordno=instance.permis_ordno
permis_orddt=instance.permis_orddt
gropu_mas= Groups.objects.all()
govchk=basic_det.sch_management
if ((str(govchk)=='Fully Aided School')|(str(govchk)=='Partly Aided School')|(str(govchk)=='Anglo Indian (Fully Aided) School')|(str(govchk)=='Anglo Indian (Partly Aided) School')|(str(govchk)=='Oriental (Fully Aided) Sanskrit School')|(str(govchk)=='Oriental (Partly Aided) Sanskrit School')|(str(govchk)=='Oriental (Fully Aided) Arabic School')|(str(govchk)=='Oriental (Partly Aided) Arabic School')|(str(govchk)=='Differently Abled Department Aided School')):
aid_chk='Yes'
else:
aid_chk=''
return render(request,'group_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
form = sch_groups_form(request.POST,request.FILES)
instance=Sch_groups.objects.get(id=tid)
group_edit = Sch_groups.objects.get(id=tid)
if form.is_valid():
if Sch_groups.objects.filter(school_key=basic_det.id).filter(group_name=request.POST['group_name']).exclude(id=tid).exists():
messages.success(request,'This Group already exist. Please update the same')
return HttpResponseRedirect('/schoolnew/group_edit')
else:
group_edit.group_name=form.cleaned_data['group_name']
group_edit.sec_in_group=form.cleaned_data['sec_in_group']
group_edit.sec_in_group_aid=form.cleaned_data['sec_in_group_aid']
group_edit.permis_ordno=form.cleaned_data['permis_ordno']
group_edit.permis_orddt=form.cleaned_data['permis_orddt']
group_edit.save()
messages.success(request,'Group Details Updated successfully')
return HttpResponseRedirect('/schoolnew/group_edit')
else:
messages.warning(request,'Group Details Not Updated')
return render(request,'group_edit_upd.html',locals())
return HttpResponseRedirect('/schoolnew/group_edit/')
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Buildabs_edit(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
buildabs_det = Building_abs.objects.filter(school_key=basic_det.id)
sch_key = basic_det.id
Build_abs = Building_abs.objects.filter(school_key=sch_key)
form=building_abs_form()
govchk=basic_det.sch_management
print govchk
if ((str(govchk)=='School Education Department School')|(str(govchk)=='Corporation School')|(str(govchk)=='Municipal School')|(str(govchk)=='Adi-Dravida Welfare School')|(str(govchk)=='Forest Department School')|(str(govchk)=='Differently Abled Department School')|(str(govchk)=='Kallar BC/MBC Department School')|(str(govchk)=='Rubber Board School')|(str(govchk)=='Tribal Welfare School')|(str(govchk)=='Aranilayam HR&C Department School')|(str(govchk)=='Fully Aided School')|(str(govchk)=='Partly Aided School')|(str(govchk)=='Anglo Indian (Fully Aided) School')|(str(govchk)=='Anglo Indian (Partly Aided) School')|(str(govchk)=='Oriental (Fully Aided) Sanskrit School')|(str(govchk)=='Oriental (Partly Aided) Sanskrit School')|(str(govchk)=='Oriental (Fully Aided) Arabic School')|(str(govchk)=='Oriental (Partly Aided) Arabic School')|(str(govchk)=='Differently Abled Department Aided School')):
govaid_chk='Yes'
else:
govaid_chk=''
if ((str(govchk)=='Fully Aided School')|(str(govchk)=='Partly Aided School')|(str(govchk)=='Anglo Indian (Fully Aided) School')|(str(govchk)=='Anglo Indian (Partly Aided) School')|(str(govchk)=='Oriental (Fully Aided) Sanskrit School')|(str(govchk)=='Oriental (Partly Aided) Sanskrit School')|(str(govchk)=='Oriental (Fully Aided) Arabic School')|(str(govchk)=='Oriental (Partly Aided) Arabic School')|(str(govchk)=='Differently Abled Department Aided School')):
aid_chk='Yes'
else:
aid_chk=''
if ((str(govchk)=='School Education Department School')|(str(govchk)=='Corporation School')|(str(govchk)=='Municipal School')|(str(govchk)=='Adi-Dravida Welfare School')|(str(govchk)=='Forest Department School')|(str(govchk)=='Differently Abled Department School')|(str(govchk)=='Kallar BC/MBC Department School')|(str(govchk)=='Rubber Board School')|(str(govchk)=='Tribal Welfare School')|(str(govchk)=='Aranilayam HR&C Department School')):
gov_chk='Yes'
else:
gov_chk=''
return render (request,'buildabs_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self,request,**kwargs):
if request.user.is_authenticated():
pk=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
form=building_abs_form(request.POST,request.FILES)
if form.is_valid():
sch_key=form.cleaned_data['school_key']
if basic_det.manage_cate_id==1:
newbuildabs = Building_abs(
school_key=sch_key,
building_name = form.cleaned_data['building_name'],
no_of_floors = form.cleaned_data['no_of_floors'],
stair_case_no = form.cleaned_data['stair_case_no'],
stair_case_width = form.cleaned_data['stair_case_width'],
building_funded = form.cleaned_data['building_funded'],
build_pres_cond = form.cleaned_data['build_pres_cond'],
build_cons_yr = form.cleaned_data['build_cons_yr'],
)
newbuildabs.save()
else:
newbuildabs = Building_abs(
school_key=sch_key,
building_name = form.cleaned_data['building_name'],
no_of_floors = form.cleaned_data['no_of_floors'],
stair_case_no = form.cleaned_data['stair_case_no'],
stair_case_width = form.cleaned_data['stair_case_width'],
building_funded = form.cleaned_data['building_funded'],
stab_cer_no = form.cleaned_data['stab_cer_no'],
stab_cer_date = form.cleaned_data['stab_cer_date'],
stab_fm_dt = form.cleaned_data['stab_fm_dt'],
stab_to_dt = form.cleaned_data['stab_to_dt'],
stab_iss_auth = form.cleaned_data['stab_iss_auth'],
no_stud = form.cleaned_data['no_stud'],
lic_cer_no = form.cleaned_data['lic_cer_no'],
lic_cer_dt = form.cleaned_data['lic_cer_dt'],
lic_iss_auth = form.cleaned_data['lic_iss_auth'],
san_cer_no = form.cleaned_data['san_cer_no'],
san_cer_dt = form.cleaned_data['san_cer_dt'],
san_iss_auth = form.cleaned_data['san_iss_auth'],
fire_cer_no = form.cleaned_data['fire_cer_no'],
fire_cer_dt = form.cleaned_data['fire_cer_dt'],
fire_iss_auth = form.cleaned_data['fire_iss_auth'],
build_pres_cond = form.cleaned_data['build_pres_cond'],
build_cons_yr = form.cleaned_data['build_cons_yr'],
)
newbuildabs.save()
messages.success(request,'Building abstract Details Added successfully')
return HttpResponseRedirect('/schoolnew/buildabs_edit/')
else:
print form.errors
messages.warning(request,'Building abstract Details Not Saved')
return render (request,'buildabs_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Buildabs_update(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
instance=Building_abs.objects.get(id=tid)
buildabs_det = Building_abs.objects.filter(school_key=basic_det.id)
sch_key = basic_det.id
Build_abs_det= Building_abs.objects.filter(school_key=sch_key)
Build_sta_dt=Building_abs.objects.get(id=tid)
form=building_abs_form(instance=instance)
building_name = instance.building_name
no_of_floors = instance.no_of_floors
stair_case_no = instance.stair_case_no
stair_case_width = instance.stair_case_width
building_funded = instance.building_funded
stab_cer_no = instance.stab_cer_no
stab_cer_date= instance.stab_cer_date
stab_fm_dt=instance.stab_fm_dt
stab_to_dt=instance.stab_to_dt
stab_iss_auth=instance.stab_iss_auth
no_stud=instance.no_stud
lic_cer_no=instance.lic_cer_no
lic_cer_dt=instance.lic_cer_dt
lic_iss_auth=instance.lic_iss_auth
san_cer_no=instance.san_cer_no
san_cer_dt=instance.san_cer_dt
san_iss_auth=instance.san_iss_auth
fire_cer_no=instance.fire_cer_no
fire_cer_dt=instance.fire_cer_dt
fire_iss_auth=instance.fire_iss_auth
build_pres_cond=instance.build_pres_cond
build_cons_yr=instance.build_cons_yr
build_pres_cond = instance.build_pres_cond
build_cons_yr = instance.build_cons_yr
if Build_sta_dt.stability_cer_date:
stab_dt=Build_sta_dt.stability_cer_date.strftime('%Y-%m-%d')
return render (request,'buildabs_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
form = building_abs_form(request.POST,request.FILES)
instance=Building_abs.objects.get(id=tid)
newbuildabs = Building_abs.objects.get(id=tid)
if form.is_valid():
if basic_det.manage_cate_id==1:
newbuildabs.building_name = form.cleaned_data['building_name']
newbuildabs.no_of_floors = form.cleaned_data['no_of_floors']
newbuildabs.stair_case_no = form.cleaned_data['stair_case_no']
newbuildabs.stair_case_width = form.cleaned_data['stair_case_width']
newbuildabs.building_funded = form.cleaned_data['building_funded']
build_pres_cond = form.cleaned_data['build_pres_cond']
build_cons_yr = form.cleaned_data['build_cons_yr']
newbuildabs.save()
else:
newbuildabs.building_name = form.cleaned_data['building_name']
newbuildabs.no_of_floors = form.cleaned_data['no_of_floors']
newbuildabs.stair_case_no = form.cleaned_data['stair_case_no']
newbuildabs.stair_case_width = form.cleaned_data['stair_case_width']
newbuildabs.building_funded = form.cleaned_data['building_funded']
newbuildabs.stab_cer_no = form.cleaned_data['stab_cer_no']
newbuildabs.stab_cer_date= form.cleaned_data['stab_cer_date']
newbuildabs.stab_fm_dt= form.cleaned_data['stab_fm_dt']
newbuildabs.stab_to_dt= form.cleaned_data['stab_to_dt']
newbuildabs.stab_iss_auth= form.cleaned_data['stab_iss_auth']
newbuildabs.no_stud= form.cleaned_data['no_stud']
newbuildabs.lic_cer_no= form.cleaned_data['lic_cer_no']
newbuildabs.lic_cer_dt= form.cleaned_data['lic_cer_dt']
newbuildabs.lic_iss_auth= form.cleaned_data['lic_iss_auth']
newbuildabs.san_cer_no= form.cleaned_data['san_cer_no']
newbuildabs.san_cer_dt= form.cleaned_data['san_cer_dt']
newbuildabs.san_iss_auth= form.cleaned_data['san_iss_auth']
newbuildabs.fire_cer_no= form.cleaned_data['fire_cer_no']
newbuildabs.fire_cer_dt= form.cleaned_data['fire_cer_dt']
newbuildabs.fire_iss_auth= form.cleaned_data['fire_iss_auth']
build_pres_cond = form.cleaned_data['build_pres_cond']
build_cons_yr = form.cleaned_data['build_cons_yr']
newbuildabs.save()
messages.success(request,'Building abstract Details Updated successfully')
return HttpResponseRedirect('/schoolnew/buildabs_edit/')
else:
messages.warning(request,'Building abstract Details Not Updated')
return render (request,'buildabs_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Buildabs_delete(View):
def get(self, request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
data=Building_abs.objects.get(id=tid)
data.delete()
msg= data.building_name+" - Named building has been successfully removed "
messages.success(request, msg )
return HttpResponseRedirect('/schoolnew/buildabs_edit/')
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Land_edit(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
land_det = Land.objects.filter(school_key=basic_det.id)
form=land_form()
govchk=basic_det.sch_management
if ((str(govchk)=='School Education Department School')|(str(govchk)=='Corporation School')|(str(govchk)=='Municipal School')|(str(govchk)=='Adi-Dravida Welfare School')|(str(govchk)=='Forest Department School')|(str(govchk)=='Differently Abled Department School')|(str(govchk)=='Kallar BC/MBC Department School')|(str(govchk)=='Rubber Board School')|(str(govchk)=='Tribal Welfare School')|(str(govchk)=='Aranilayam HR&C Department School')):
gov_chk='Yes'
else:
gov_chk=''
return render (request,'land_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self,request,**kwargs):
if request.user.is_authenticated():
pk=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
form=land_form(request.POST,request.FILES)
if form.is_valid():
sch_key=form.cleaned_data['school_key']
newland = Land(
school_key=sch_key,
name = form.cleaned_data['name'],
own_type =form.cleaned_data['own_type'],
lease_yrs=form.cleaned_data['lease_yrs'],
lease_name=form.cleaned_data['lease_name'],
tot_area=form.cleaned_data['tot_area'],
area_mes_type=form.cleaned_data['area_mes_type'],
area_cent = form.cleaned_data['area_cent'],
area_ground = form.cleaned_data['area_ground'],
patta_no = form.cleaned_data['patta_no'],
survey_no = form.cleaned_data['survey_no'],
subdiv_no=form.cleaned_data['subdiv_no'],
land_type=form.cleaned_data['land_type'],
doc_no=form.cleaned_data['doc_no'],
doc_regn_dt=form.cleaned_data['doc_regn_dt'],
place_regn=form.cleaned_data['place_regn'],
ec_cer_no=form.cleaned_data['ec_cer_no'],
ec_cer_dt=form.cleaned_data['ec_cer_dt'],
ec_cer_fm=form.cleaned_data['ec_cer_fm'],
ec_cer_to=form.cleaned_data['ec_cer_to'],
ec_period=form.cleaned_data['ec_period'],
)
newland.save()
messages.success(request,'Land Details Added successfully')
return HttpResponseRedirect('/schoolnew/land_edit/')
else:
print form.errors
messages.warning(request,'Land Details Not Saved')
return render (request,'land_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Land_update(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
land_det = Land.objects.filter(school_key=basic_det.id)
instance=Land.objects.get(id=tid)
sch_key = basic_det.id
land_det= Land.objects.filter(school_key=sch_key)
form=land_form(instance=instance)
name = instance.name
own_type = instance.own_type
lease_yrs = instance.lease_yrs
lease_name = instance.lease_name
tot_area= instance.tot_area
area_mes_type = instance.area_mes_type
area_cent = instance.area_cent
area_ground = instance.area_ground
patta_no = instance.patta_no
survey_no = instance.survey_no
subdiv_no = instance.subdiv_no
land_type = instance.land_type
doc_no=instance.doc_no
doc_regn_dt=instance.doc_regn_dt
place_regn=instance.place_regn
ec_cer_no=instance.ec_cer_no
ec_cer_dt=instance.ec_cer_dt
ec_cer_fm=instance.ec_cer_fm
ec_cer_to=instance.ec_cer_to
ec_period=instance.ec_period
govchk=basic_det.sch_management
if ((str(govchk)=='School Education Department School')|(str(govchk)=='Corporation School')|(str(govchk)=='Municipal School')|(str(govchk)=='Adi-Dravida Welfare School')|(str(govchk)=='Forest Department School')|(str(govchk)=='Differently Abled Department School')|(str(govchk)=='Kallar BC/MBC Department School')|(str(govchk)=='Rubber Board School')|(str(govchk)=='Tribal Welfare School')|(str(govchk)=='Aranilayam HR&C Department School')):
gov_chk='Yes'
else:
gov_chk=''
return render (request,'land_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
form = land_form(request.POST,request.FILES)
instance=Land.objects.get(id=tid)
newland = Land.objects.get(id=tid)
if form.is_valid():
newland.name = form.cleaned_data['name']
newland.own_type = form.cleaned_data['own_type']
newland.lease_yrs = form.cleaned_data['lease_yrs']
instance.lease_name = form.cleaned_data['lease_name']
newland.tot_area = form.cleaned_data['tot_area']
newland.area_mes_type = form.cleaned_data['area_mes_type']
newland.area_cent = form.cleaned_data['area_cent']
newland.area_ground = form.cleaned_data['area_ground']
newland.patta_no = form.cleaned_data['patta_no']
newland.survey_no = form.cleaned_data['survey_no']
newland.subdiv_no = form.cleaned_data['subdiv_no']
newland.land_type = form.cleaned_data['land_type']
newland.doc_no=form.cleaned_data['doc_no']
newland.doc_regn_dt=form.cleaned_data['doc_regn_dt']
newland.place_regn=form.cleaned_data['place_regn']
newland.ec_cer_no=form.cleaned_data['ec_cer_no']
newland.ec_cer_dt=form.cleaned_data['ec_cer_dt']
newland.ec_cer_fm=form.cleaned_data['ec_cer_fm']
newland.ec_cer_to=form.cleaned_data['ec_cer_to']
newland.ec_period=form.cleaned_data['ec_period']
newland.save()
messages.success(request,'Land Details Updated successfully')
return HttpResponseRedirect('/schoolnew/land_edit/')
else:
messages.warning(request,'Land Details Not Updated')
return render (request,'land_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Land_delete(View):
def get(self, request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
data=Land.objects.get(id=tid)
data.delete()
msg= data.name+' land with patta No.'+str(data.patta_no)+" - has been successfully removed "
messages.success(request, msg )
return HttpResponseRedirect('/schoolnew/land_edit/')
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Build_edit(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
build_det = Building.objects.filter(school_key=basic_det.id)
room_cat_chk=Room_cate.objects.all()
sch_key = basic_det.id
form=building_form()
return render (request,'build_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self,request,**kwargs):
if request.user.is_authenticated():
pk=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
form=building_form(request.POST,request.FILES)
if form.is_valid():
sch_key=form.cleaned_data['school_key']
newbuild = Building(
school_key=sch_key,
room_cat = form.cleaned_data['room_cat'],
room_count = form.cleaned_data['room_count'],
roof_type = form.cleaned_data['roof_type'],
builtup_area = form.cleaned_data['builtup_area'],
)
newbuild.save()
messages.success(request,'Building Details Added successfully')
return HttpResponseRedirect('/schoolnew/build_edit/')
else:
print form.errors
return render (request,'build_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Build_update(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
build_det = Building.objects.filter(school_key=basic_det.id)
instance=Building.objects.get(id=tid)
sch_key = basic_det.id
room_cat_chk=Room_cate.objects.all()
form=building_form(instance=instance)
room_cat = instance.room_cat
room_count = instance.room_count
roof_type = instance.roof_type
builtup_area = instance.builtup_area
return render (request,'build_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
form = building_form(request.POST,request.FILES)
instance=Building.objects.get(id=tid)
newbuild = Building.objects.get(id=tid)
if form.is_valid():
newbuild.room_cat = form.cleaned_data['room_cat']
newbuild.room_count = form.cleaned_data['room_count']
newbuild.roof_type = form.cleaned_data['roof_type']
newbuild.builtup_area = form.cleaned_data['builtup_area']
newbuild.save()
messages.success(request,'Building Details Updated successfully')
return HttpResponseRedirect('/schoolnew/build_edit/')
else:
messages.warning(request,'Building Details Not Updated')
return render (request,'build_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Build_delete(View):
def get(self, request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
data=Building.objects.get(id=tid)
data.delete()
msg= data.room_cat+" - "+str(data.room_count)+" - has been successfully removed "
messages.success(request, msg )
return HttpResponseRedirect('/schoolnew/build_edit/')
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Sports_edit(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
sports_det = Sports.objects.filter(school_key=basic_det.id)
sch_key = basic_det.id
sport_lst=Sport_list.objects.all()
form=sports_form()
return render (request,'sports_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self,request,**kwargs):
if request.user.is_authenticated():
pk=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
form=sports_form(request.POST,request.FILES)
if form.is_valid():
sch_key=form.cleaned_data['school_key']
newsports = Sports(
school_key=sch_key,
sports_name = form.cleaned_data['sports_name'],
play_ground = form.cleaned_data['play_ground'],
sports_equip = form.cleaned_data['sports_equip'],
sports_no_sets = form.cleaned_data['sports_no_sets'],
)
newsports.save()
messages.success(request,'Sports Details Added successfully')
return HttpResponseRedirect('/schoolnew/sports_edit/')
else:
messages.warning(request,'Sports Details Not Saved')
return render (request,'sports_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Sports_update(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
sports_det = Sports.objects.filter(school_key=basic_det.id)
instance=Sports.objects.get(id=tid)
sch_key = basic_det.id
form=sports_form(instance=instance)
sport_lst=Sport_list.objects.all()
sports_name = instance.sports_name
play_ground = instance.play_ground
sports_equip = instance.sports_equip
sports_no_sets = instance.sports_no_sets
return render (request,'sports_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
form = sports_form(request.POST,request.FILES)
instance=Sports.objects.get(id=tid)
newsports = Sports.objects.get(id=tid)
if form.is_valid():
newsports.sports_name = form.cleaned_data['sports_name']
newsports.play_ground = form.cleaned_data['play_ground']
newsports.sports_equip = form.cleaned_data['sports_equip']
newsports.sports_no_sets = form.cleaned_data['sports_no_sets']
newsports.save()
messages.success(request,'Sports Details Updated successfully')
return HttpResponseRedirect('/schoolnew/sports_edit/')
else:
messages.warning(request,'Sports Details Not Updated')
return render (request,'sports_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Sports_delete(View):
def get(self, request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
data=Sports.objects.get(id=tid)
data.delete()
msg= data.sports_name+" - has been successfully removed "
messages.success(request, msg )
return HttpResponseRedirect('/schoolnew/sports_edit/')
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Ict_edit(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
ss=request.user.username
if ss.isalpha():
basic_det=Basicinfo.objects.get(office_code=request.user.username)
office_chk = 'Yes'
else:
office_chk = 'No'
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
ict_det = Ictentry.objects.filter(school_key=basic_det.id)
ict_lst=Ict_list.objects.all()
ict_suply=Ict_suppliers.objects.all()
sch_key = basic_det.id
form=ictentry_form()
return render (request,'ict_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self,request,**kwargs):
if request.user.is_authenticated():
pk=self.kwargs.get('pk')
ss=request.user.username
if ss.isalpha():
basic_det=Basicinfo.objects.get(office_code=request.user.username)
else:
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
form=ictentry_form(request.POST,request.FILES)
if form.is_valid():
sch_key=form.cleaned_data['school_key']
newict = Ictentry(
school_key=sch_key,
ict_type = form.cleaned_data['ict_type'],
working_no = form.cleaned_data['working_no'],
not_working_no = form.cleaned_data['not_working_no'],
supplied_by = form.cleaned_data['supplied_by'],
donor_ict = form.cleaned_data['donor_ict'],
)
newict.save()
messages.success(request,'ICT Details Added successfully')
return HttpResponseRedirect('/schoolnew/ict_edit/')
else:
messages.warning(request,'ICT Details Not Saved')
print form.errors
return render (request,'ict_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Ict_update(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
ss=request.user.username
if ss.isalpha():
basic_det=Basicinfo.objects.get(office_code=request.user.username)
office_chk = 'Yes'
else:
office_chk = 'No'
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
ict_det = Ictentry.objects.filter(school_key=basic_det.id)
ict_lst=Ict_list.objects.all()
ict_suply=Ict_suppliers.objects.all()
instance=Ictentry.objects.get(id=tid)
sch_key = basic_det.id
form=ictentry_form(instance=instance)
ict_type = instance.ict_type
working_no = instance.working_no
not_working_no = instance.not_working_no
supplied_by = instance.supplied_by
donor_ict = instance.donor_ict
return render (request,'ict_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
ss=request.user.username
if ss.isalpha():
basic_det=Basicinfo.objects.get(office_code=request.user.username)
else:
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
form = ictentry_form(request.POST,request.FILES)
instance=Ictentry.objects.get(id=tid)
newict = Ictentry.objects.get(id=tid)
if form.is_valid():
newict.ict_type = form.cleaned_data['ict_type']
newict.working_no = form.cleaned_data['working_no']
newict.not_working_no = form.cleaned_data['not_working_no']
newict.supplied_by = form.cleaned_data['supplied_by']
newict.donor_ict = form.cleaned_data['donor_ict']
newict.save()
messages.success(request,'ICT Details Updated successfully')
return HttpResponseRedirect('/schoolnew/ict_edit/')
else:
messages.warning(request,'ICT Details Not Updated')
return render (request,'ict_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Ict_delete(View):
def get(self, request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
data=Ictentry.objects.get(id=tid)
data.delete()
msg= data.ict_type+" - has been successfully removed "
messages.success(request, msg )
return HttpResponseRedirect('/schoolnew/ict_edit/')
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Passpercent_edit(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
acadyr_lst=Acadyr_mas.objects.all()
sch_key = basic_det.id
passper_det=Passpercent.objects.filter(school_key=sch_key)
sch_key = basic_det.id
form=pass_form()
acade_det = Academicinfo.objects.get(school_key=basic_det.id)
return render (request,'pass_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self,request,**kwargs):
if request.user.is_authenticated():
pk=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
form=pass_form(request.POST,request.FILES)
if form.is_valid():
sch_key=form.cleaned_data['school_key']
chk_ayr=form.cleaned_data['acad_yr']
if Passpercent.objects.filter(school_key=basic_det.id).filter(acad_yr=form.cleaned_data['acad_yr']).exists():
messages.warning(request,'This academic year information already fed. If you want to correct pl. use Update option')
return HttpResponseRedirect('/schoolnew/pass_edit/')
else:
newpass = Passpercent(
school_key=basic_det,
acad_yr = form.cleaned_data['acad_yr'],
ten_b_app = form.cleaned_data['ten_b_app'],
ten_b_pass = form.cleaned_data['ten_b_pass'],
ten_g_app = form.cleaned_data['ten_g_app'],
ten_g_pass = form.cleaned_data['ten_g_pass'],
ten_app = form.cleaned_data['ten_app'],
ten_pass = form.cleaned_data['ten_pass'],
twelve_b_app = form.cleaned_data['twelve_b_app'],
twelve_b_pass = form.cleaned_data['twelve_b_pass'],
twelve_g_app = form.cleaned_data['twelve_g_app'],
twelve_g_pass = form.cleaned_data['twelve_g_pass'],
twelve_app = form.cleaned_data['twelve_app'],
twelve_pass = form.cleaned_data['twelve_pass'],
ten_b_per= form.cleaned_data['ten_b_per'],
ten_g_per= form.cleaned_data['ten_g_per'],
ten_a_per= form.cleaned_data['ten_a_per'],
twelve_b_per= form.cleaned_data['twelve_b_per'],
twelve_g_per= form.cleaned_data['twelve_g_per'],
twelve_a_per= form.cleaned_data['twelve_a_per'],
)
newpass.save()
messages.success(request,'Pass Percent Details Added successfully')
return HttpResponseRedirect('/schoolnew/pass_edit/')
else:
messages.warning(request,'Pass Percent Details Not Saved')
return render (request,'pass_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Passpercent_update(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
passper_det=Passpercent.objects.filter(school_key=basic_det.id)
instance=Passpercent.objects.get(id=tid)
sch_key = basic_det.id
acadyr_lst=Acadyr_mas.objects.all()
form=pass_form(instance=instance)
acad_yr = instance.acad_yr
ten_b_app = instance.ten_b_app
ten_b_pass = instance.ten_b_pass
ten_g_app = instance.ten_g_app
ten_g_pass = instance.ten_g_pass
ten_app = instance.ten_app
ten_pass = instance.ten_pass
twelve_b_app = instance.twelve_b_app
twelve_b_pass = instance.twelve_b_pass
twelve_g_app = instance.twelve_g_app
twelve_g_pass = instance.twelve_g_pass
twelve_app = instance.twelve_app
twelve_pass = instance.twelve_pass
ten_b_per= instance.ten_b_per
ten_g_per= instance.ten_g_per
ten_a_per= instance.ten_a_per
twelve_b_per= instance.twelve_b_per
twelve_g_per= instance.twelve_g_per
twelve_a_per= instance.twelve_a_per
acade_det = Academicinfo.objects.get(school_key=basic_det.id)
return render (request,'pass_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
form = pass_form(request.POST,request.FILES)
instance=Passpercent.objects.get(id=tid)
newpassper = Passpercent.objects.get(id=tid)
if form.is_valid():
chk_ayr=form.cleaned_data['acad_yr']
if Passpercent.objects.filter(school_key=basic_det.id).filter(acad_yr=form.cleaned_data['acad_yr']).exclude(id=tid).exists():
messages.warning(request,'This academic year information already fed. If you want to correct pl. use Update option')
return HttpResponseRedirect('/schoolnew/pass_edit/')
else:
newpassper.acad_yr = form.cleaned_data['acad_yr']
newpassper.ten_b_app = form.cleaned_data['ten_b_app']
newpassper.ten_b_pass = form.cleaned_data['ten_b_pass']
newpassper.ten_g_app = form.cleaned_data['ten_g_app']
newpassper.ten_g_pass = form.cleaned_data['ten_g_pass']
newpassper.ten_app = form.cleaned_data['ten_app']
newpassper.ten_pass = form.cleaned_data['ten_pass']
newpassper.twelve_b_app = form.cleaned_data['twelve_b_app']
newpassper.twelve_b_pass = form.cleaned_data['twelve_b_pass']
newpassper.twelve_g_app = form.cleaned_data['twelve_g_app']
newpassper.twelve_g_pass = form.cleaned_data['twelve_g_pass']
newpassper.twelve_app = form.cleaned_data['twelve_app']
newpassper.twelve_pass = form.cleaned_data['twelve_pass']
newpassper.ten_b_per= form.cleaned_data['ten_b_per']
newpassper.ten_g_per= form.cleaned_data['ten_g_per']
newpassper.ten_a_per= form.cleaned_data['ten_a_per']
newpassper.twelve_b_per= form.cleaned_data['twelve_b_per']
newpassper.twelve_g_per= form.cleaned_data['twelve_g_per']
newpassper.twelve_a_per= form.cleaned_data['twelve_a_per']
newpassper.save()
messages.success(request,'Pass Percent Updated successfully')
return HttpResponseRedirect('/schoolnew/pass_edit/')
else:
messages.warning(request,'Pass Percent Not Updated')
return render (request,'pass_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Passpercent_delete(View):
def get(self, request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
data=Passpercent.objects.get(id=tid)
data.delete()
msg= data.acad_yr+" - Pass percent has been successfully removed "
messages.success(request, msg )
return HttpResponseRedirect('/schoolnew/pass_edit/')
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Off_home_page(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
if (Basicinfo.objects.filter(udise_code=request.user.account.id).count())>0:
basic_det=Basicinfo.objects.get(udise_code=request.user.account.id)
if (Staff.objects.filter(school_key=basic_det.id).count())>0:
offnonteach_det = Staff.objects.filter(school_key=basic_det.id)
if (Ictentry.objects.filter(school_key=basic_det.id).count())>0:
off_ict_det = Ictentry.objects.filter(school_key=basic_det.id)
return render (request,'home_edit2.html',locals())
else:
return render (request,'home_edit2.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Office_basic_info(UpdateView):
def get(self,request,**kwargs):
if request.user.is_authenticated():
pk=self.kwargs.get('pk')
district_list = District.objects.all().order_by('district_name')
if (Basicinfo.objects.filter(udise_code=request.user.account.id).count())>0:
basic_det=Basicinfo.objects.get(udise_code=request.user.account.id)
if (Staff.objects.filter(school_key=basic_det.id).count())>0:
offnonteach_det = Staff.objects.filter(school_key=basic_det.id)
instance = Basicinfo.objects.get(udise_code=request.user.account.id)
form=BasicForm(instance=instance)
udise_code=instance.udise_code
office_code = instance.office_code
school_id = instance.school_id
school_name = instance.school_name
school_name_tamil = instance.school_name_tamil
if instance.school_name_tamil:
word = instance.school_name_tamil
else:
word=''
district = instance.district
block = instance.block
local_body_type= instance.local_body_type
village_panchayat =instance.village_panchayat
vill_habitation = instance.vill_habitation
town_panchayat = instance.town_panchayat
town_panchayat_ward = instance.town_panchayat_ward
municipality = instance.municipality
municipal_ward = instance.municipal_ward
cantonment = instance.cantonment
cantonment_ward = instance.cantonment_ward
township = instance.township
township_ward = instance.township_ward
corporation = instance.corporation
corpn_zone = instance.corpn_zone
corpn_ward = instance.corpn_ward
address = instance.address
pincode = instance.pincode
stdcode = instance.stdcode
landline = instance.landline
mobile = instance.mobile
office_email1 = instance.office_email1
office_email2 = instance.office_email2
sch_directorate=instance.sch_directorate
build_status=instance.build_status
new_build=instance.new_build
website = instance.website
bank_dist=instance.bank_dist
bank = instance.bank
branch = instance.branch
bankaccno = instance.bankaccno
parliament = instance.parliament
assembly = instance.assembly
offcat_id=instance.offcat_id
draw_off_code=instance.draw_off_code
offcat_id=request.user.account.user_category_id
return render (request,'office_basic_info.html',locals())
else:
form=BasicForm()
udise_code=request.user.account.id
offcat_id=request.user.account.user_category_id
office_code=request.user.username
return render (request,'office_basic_info.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self, request, **kwargs):
if request.user.is_authenticated():
pk=self.kwargs.get('pk')
if (Basicinfo.objects.filter(udise_code=request.user.account.id).count())>0:
basic_det=Basicinfo.objects.filter(udise_code=request.user.account.id)
if (Basicinfo.objects.filter(udise_code=request.user.account.id).count())>0:
instance = Basicinfo.objects.get(udise_code=request.user.account.id)
office_editsave=Basicinfo.objects.get(udise_code=request.user.account.id)
form=BasicForm(request.POST,request.FILES)
if form.is_valid():
office_editsave.school_name = form.cleaned_data['school_name'].upper()
office_editsave.school_name_tamil = request.POST['word']
office_editsave.udise_code = form.cleaned_data['udise_code']
office_editsave.school_id = form.cleaned_data['udise_code']
office_editsave.office_code = form.cleaned_data['office_code']
office_editsave.offcat_id = form.cleaned_data['offcat_id']
office_editsave.draw_off_code = form.cleaned_data['draw_off_code']
office_editsave.district = form.cleaned_data['district']
office_editsave.block = form.cleaned_data['block']
office_editsave.local_body_type= form.cleaned_data['local_body_type']
chk_local_body=Local_body.objects.get(id=request.POST['local_body_type'])
if str(chk_local_body)=='Village Panchayat':
office_editsave.village_panchayat =form.cleaned_data['village_panchayat']
office_editsave.vill_habitation = form.cleaned_data['vill_habitation']
office_editsave.town_panchayat = None
office_editsave.town_panchayat_ward = None
office_editsave.municipality = None
office_editsave.municipal_ward = None
office_editsave.cantonment = None
office_editsave.cantonment_ward = None
office_editsave.township = None
office_editsave.township_ward = None
office_editsave.corporation = None
office_editsave.corpn_zone = None
office_editsave.corpn_ward = None
elif str(chk_local_body)=="Town Panchayat":
office_editsave.village_panchayat =None
office_editsave.vill_habitation = None
office_editsave.town_panchayat = form.cleaned_data['town_panchayat']
office_editsave.town_panchayat_ward = form.cleaned_data['town_panchayat_ward']
office_editsave.municipality = None
office_editsave.municipal_ward = None
office_editsave.cantonment = None
office_editsave.cantonment_ward = None
office_editsave.township = None
office_editsave.township_ward = None
office_editsave.corporation = None
office_editsave.corpn_zone = None
office_editsave.corpn_ward = None
elif str(chk_local_body)=="Municipality":
office_editsave.village_panchayat =None
office_editsave.vill_habitation = None
office_editsave.town_panchayat = None
office_editsave.town_panchayat_ward = None
office_editsave.municipality = form.cleaned_data['municipality']
office_editsave.municipal_ward = form.cleaned_data['municipal_ward']
office_editsave.cantonment = None
office_editsave.cantonment_ward = None
office_editsave.township = None
office_editsave.township_ward = None
office_editsave.corporation = None
office_editsave.corpn_zone = None
office_editsave.corpn_ward = None
elif str(chk_local_body)=="cantonment":
office_editsave.village_panchayat =None
office_editsave.vill_habitation = None
office_editsave.town_panchayat = None
office_editsave.town_panchayat_ward = None
office_editsave.municipality = None
office_editsave.municipal_ward = None
office_editsave.cantonment = form.cleaned_data['cantonment']
office_editsave.cantonment_ward = form.cleaned_data['cantonment_ward']
office_editsave.township = None
office_editsave.township_ward = None
office_editsave.corporation = None
office_editsave.corpn_zone = None
office_editsave.corpn_ward = None
elif str(chk_local_body)=="Township":
office_editsave.village_panchayat =None
office_editsave.vill_habitation = None
office_editsave.town_panchayat = None
office_editsave.town_panchayat_ward = None
office_editsave.municipality = None
office_editsave.municipal_ward = None
office_editsave.cantonment = None
office_editsave.cantonment_ward = None
office_editsave.township = form.cleaned_data['township']
office_editsave.township_ward = form.cleaned_data['township_ward']
office_editsave.corporation = None
office_editsave.corpn_zone = None
office_editsave.corpn_ward = None
elif str(chk_local_body)=="Corporation":
office_editsave.village_panchayat =None
office_editsave.vill_habitation = None
office_editsave.town_panchayat = None
office_editsave.town_panchayat_ward = None
office_editsave.municipality = None
office_editsave.municipal_ward = None
office_editsave.cantonment = None
office_editsave.cantonment_ward = None
office_editsave.township = None
office_editsave.township_ward = None
office_editsave.corporation = form.cleaned_data['corporation']
office_editsave.corpn_zone = form.cleaned_data['corpn_zone']
office_editsave.corpn_ward = form.cleaned_data['corpn_ward']
office_editsave.address = form.cleaned_data['address']
office_editsave.pincode = form.cleaned_data['pincode']
office_editsave.stdcode = form.cleaned_data['stdcode']
office_editsave.landline = form.cleaned_data['landline']<|fim▁hole|> office_editsave.website = form.cleaned_data['website']
office_editsave.build_status = form.cleaned_data['build_status']
office_editsave.new_build = form.cleaned_data['new_build']
office_editsave.bank_dist=form.cleaned_data['bank_dist']
office_editsave.bank = form.cleaned_data['bank']
office_editsave.branch = form.cleaned_data['branch']
office_editsave.bankaccno = form.cleaned_data['bankaccno']
office_editsave.parliament = form.cleaned_data['parliament']
office_editsave.assembly = form.cleaned_data['assembly']
office_editsave.save()
messages.success(request,'Office Basic Information Updated successfully')
return HttpResponseRedirect('/schoolnew/office_registration')
else:
messages.warning(request,'Office Basic Information Not Updated')
return render (request,'basic2.html')
else:
form = BasicForm(request.POST,request.FILES)
if form.is_valid():
officeinfo = Basicinfo(
school_id=form.cleaned_data['udise_code'],
school_name = form.cleaned_data['school_name'],
school_name_tamil = request.POST['word'],
udise_code = form.cleaned_data['udise_code'],
office_code = form.cleaned_data['office_code'],
district = form.cleaned_data['district'],
block = form.cleaned_data['block'],
local_body_type= form.cleaned_data['local_body_type'],
village_panchayat =form.cleaned_data['village_panchayat'],
vill_habitation = form.cleaned_data['vill_habitation'],
town_panchayat = form.cleaned_data['town_panchayat'],
town_panchayat_ward = form.cleaned_data['town_panchayat_ward'],
municipality = form.cleaned_data['municipality'],
municipal_ward = form.cleaned_data['municipal_ward'],
cantonment = form.cleaned_data['cantonment'],
cantonment_ward = form.cleaned_data['cantonment_ward'],
township = form.cleaned_data['township'],
township_ward = form.cleaned_data['township_ward'],
corporation = form.cleaned_data['corporation'],
corpn_zone = form.cleaned_data['corpn_zone'],
corpn_ward = form.cleaned_data['corpn_ward'],
address = form.cleaned_data['address'],
pincode = form.cleaned_data['pincode'],
stdcode = form.cleaned_data['stdcode'],
landline = form.cleaned_data['landline'],
mobile = form.cleaned_data['mobile'],
office_email1 = form.cleaned_data['office_email1'],
office_email2 = form.cleaned_data['office_email2'],
sch_directorate = form.cleaned_data['sch_directorate'],
website = form.cleaned_data['website'],
build_status = form.cleaned_data['build_status'],
new_build=form.cleaned_data['new_build'],
bank_dist=form.cleaned_data['bank_dist'],
bank = form.cleaned_data['bank'],
branch = form.cleaned_data['branch'],
bankaccno = form.cleaned_data['bankaccno'],
parliament = form.cleaned_data['parliament'],
assembly = form.cleaned_data['assembly'],
offcat_id = form.cleaned_data['offcat_id'],
draw_off_code = form.cleaned_data['draw_off_code'],
)
officeinfo.save()
return HttpResponseRedirect('/schoolnew/office_registration')
else:
return render (request,'basic2.html')
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Offnonteaching_edit(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
if (Basicinfo.objects.filter(udise_code=request.user.account.id).count())>0:
basic_det=Basicinfo.objects.get(udise_code=request.user.account.id)
sch_key = basic_det.id
if (Staff.objects.filter(school_key=basic_det.id).count())>0:
post_det = Staff.objects.filter(Q(school_key=sch_key) & Q(staff_cat=2))
form=staff_form()
if ((basic_det.offcat_id==2)|(basic_det.offcat_id==3)|(basic_det.offcat_id==5)|(basic_det.offcat_id==7)|(basic_det.offcat_id==6)|(basic_det.offcat_id==8)|(basic_det.offcat_id==18)|(basic_det.offcat_id==20)|(basic_det.offcat_id==21)|(basic_det.offcat_id==22)|(basic_det.offcat_id==23)|(basic_det.offcat_id==24)|(basic_det.offcat_id==25)|(basic_det.offcat_id==26)|(basic_det.offcat_id==27)):
desig_det= User_desig.objects.filter(Q(user_cate='SCHOOL&OFFICE') | Q(user_cate='OFFICE') & Q(user_level__isnull=True))
elif ((basic_det.offcat_id==4)|(basic_det.offcat_id==9)|(basic_det.offcat_id==10)|(basic_det.offcat_id==11)|(basic_det.offcat_id==19)):
desig_det= User_desig.objects.filter(Q(user_cate='SCHOOL&OFFICE') | Q(user_cate='OFFICE'))
else:
desig_det=''
pg_head='Office Non-Teaching'
return render (request,'post_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self,request,**kwargs):
if request.user.is_authenticated():
pk=self.kwargs.get('pk')
form=staff_form(request.POST,request.FILES)
if form.is_valid():
if request.POST['post_mode']=='Permanent':
tpost_GO_pd = ''
ttemgofm_dt = None
ttemgoto_dt = None
else:
tpost_GO_pd = form.cleaned_data['post_GO_pd']
ttemgofm_dt = form.cleaned_data['temgofm_dt']
ttemgoto_dt = form.cleaned_data['temgoto_dt']
offntpost = Staff(
school_key = form.cleaned_data['school_key'],
post_name = form.cleaned_data['post_name'],
post_sub = form.cleaned_data['post_sub'],
post_sanc = form.cleaned_data['post_sanc'],
post_mode = form.cleaned_data['post_mode'],
post_GO = form.cleaned_data['post_GO'],
post_GO_dt = form.cleaned_data['post_GO_dt'],
post_filled = 0,
post_vac = form.cleaned_data['post_sanc'],
post_GO_pd = tpost_GO_pd,
temgofm_dt = ttemgofm_dt,
temgoto_dt = ttemgoto_dt,
staff_cat = 2,
)
offntpost.save()
messages.success(request,'Office Non-Teaching Staff details Added successfully')
return HttpResponseRedirect('/schoolnew/offnonteaching_edit/')
else:
messages.warning(request,'Office Non-Teaching Staff details Not Updated')
return render (request,'post_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Offntpost_update(View):
def get(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.account.id)
instance=Staff.objects.get(id=tid)
sch_key = basic_det.id
nonteach_post = Staff.objects.filter(school_key=sch_key)
nonteach_det_dt=Staff.objects.get(id=tid)
form=staff_form(instance=instance)
post_name= instance.post_name
post_sub= instance.post_sub
post_sanc=instance.post_sanc
post_mode= instance.post_mode
post_GO= instance.post_GO
go_dt= instance.post_GO_dt
post_GO_dt= instance.post_GO_dt
post_GO_pd= instance.post_GO_pd
post_filled = instance.post_filled
post_vac = instance.post_vac
pg_head='Office Non-Teaching'
if nonteach_det_dt.post_GO_dt:
go_dt=nonteach_det_dt.post_GO_dt.strftime('%Y-%m-%d')
if ((basic_det.offcat_id==2)|(basic_det.offcat_id==3)|(basic_det.offcat_id==5)|(basic_det.offcat_id==7)|(basic_det.offcat_id==6)|(basic_det.offcat_id==8)|(basic_det.offcat_id==18)|(basic_det.offcat_id==20)|(basic_det.offcat_id==21)|(basic_det.offcat_id==22)|(basic_det.offcat_id==23)|(basic_det.offcat_id==24)|(basic_det.offcat_id==25)|(basic_det.offcat_id==26)|(basic_det.offcat_id==27)):
desig_det= User_desig.objects.filter(Q(user_cate='SCHOOL&OFFICE') | Q(user_cate='OFFICE')).exclude(user_level='HOD')
elif ((basic_det.offcat_id==4)|(basic_det.offcat_id==9)|(basic_det.offcat_id==10)|(basic_det.offcat_id==11)|(basic_det.offcat_id==11)):
desig_det= User_desig.objects.filter(Q(user_cate='SCHOOL&OFFICE') | Q(user_cate='OFFICE'))
else:
desig_det=''
sch_key = basic_det.id
return render (request,'post_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
def post(self,request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
basic_det=Basicinfo.objects.get(udise_code=request.user.account.id)
form = staff_form(request.POST,request.FILES)
instance=Staff.objects.get(id=tid)
newnonteachpost = Staff.objects.get(id=tid)
if form.is_valid():
newnonteachpost.post_name = form.cleaned_data['post_name']
newnonteachpost.post_sanc = form.cleaned_data['post_sanc']
newnonteachpost.post_mode = form.cleaned_data['post_mode']
newnonteachpost.post_GO = form.cleaned_data['post_GO']
newnonteachpost.post_GO_dt = form.cleaned_data['post_GO_dt']
newnonteachpost.post_GO_pd = form.cleaned_data['post_GO_pd']
newnonteachpost.post_sub = form.cleaned_data['post_sub']
newnonteachpost.staff_cat = 2
newnonteachpost.post_vac = (form.cleaned_data['post_sanc']-newnonteachpost.post_filled)
if newnonteachpost.post_mode=='Permanent':
newnonteachpost.temgofm_dt = None
newnonteachpost.temgoto_dt = None
else:
newnonteachpost.temgofm_dt = form.cleaned_data['temgofm_dt']
newnonteachpost.temgoto_dt = form.cleaned_data['temgoto_dt']
newnonteachpost.save()
messages.success(request,'Non-Teaching Post Details Updated successfully')
return HttpResponseRedirect('/schoolnew/offnonteaching_edit/')
else:
messages.warning(request,'Office Non-Teaching Staff details Not Updated')
print form.errors
return render (request,'off_staff_edit_upd.html',locals())
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Offnonteaching_delete(View):
def get(self, request,**kwargs):
if request.user.is_authenticated():
tid=self.kwargs.get('pk')
data=Staff.objects.get(id=tid)
data.delete()
msg= data.post_name +" - Posts has been successfully removed "
messages.success(request, msg )
return HttpResponseRedirect('/schoolnew/offnonteaching_edit/')
else:
return redirect('%s?next=%s' % (settings.LOGIN_URL, request.path))
class Sch_Blk_abs(View):
def get(self,request,**kwargs):
blk_id=self.kwargs['pk']
deptlst=Manage_cate.objects.all().order_by('id')
# blkid=Basicinfo.objects.get(udise_code=int(request.user.username))
totsch=Basicinfo.objects.filter(block1=blk_id,chk_dept__in=[1,2,3],chk_manage__in=[1,2,3]).values('chk_manage','chk_dept').annotate(mang_schtot=Count('chk_dept')).order_by('chk_dept','chk_manage')
totschst=Basicinfo.objects.filter(chk_dept__in=[1,2,3],chk_manage__in=[1,2,3],block1=blk_id).values('chk_dept').annotate(schmantot=Count('chk_dept'))
totschgrtot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],chk_manage__in=[1,2,3],block1=blk_id).count()
bitotsch=Basicinfo.objects.filter(chk_dept__in=[1,2,3],manage_cate_id__in=[1,2,3],block_id=blk_id).values('chk_manage','chk_dept').annotate(bi_schtot=Count('chk_dept')).order_by('chk_dept','chk_manage')
bischst=Basicinfo.objects.filter(chk_dept__in=[1,2,3],manage_cate_id__in=[1,2,3],block_id=blk_id).values('chk_dept').annotate(bimantot=Count('chk_dept'))
bigrtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3],block_id=blk_id).count()
aitotsch=Basicinfo.objects.filter(chk_dept__in=[1,2,3],manage_cate_id__in=[1,2,3],block_id=blk_id).values('chk_manage','chk_dept').annotate(bi_schtot=Count('chk_dept'),acad_schcoun=Count('academicinfo')).order_by('chk_dept','chk_manage')
aistot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],block_id=blk_id).values('chk_dept').annotate(acad_schtot=Count('academicinfo')).order_by('chk_dept')
aigrtot=Basicinfo.objects.filter(block_id=blk_id).values('academicinfo__school_key').count()
iitotsch=Basicinfo.objects.filter(chk_dept__in=[1,2,3],manage_cate_id__in=[1,2,3],block_id=blk_id).values('chk_manage','chk_dept').annotate(bi_schtot=Count('chk_dept'),infra_schcoun=Count('infradet')).order_by('chk_dept','chk_manage')
iistot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],block_id=blk_id).values('chk_dept').annotate(infra_schtot=Count('infradet')).order_by('chk_dept')
iigrtot=Basicinfo.objects.filter(block_id=blk_id).values('infradet__school_key').count()
cstotsch=Basicinfo.objects.filter(chk_dept__in=[1,2,3],manage_cate_id__in=[1,2,3],block_id=blk_id).values('chk_manage','chk_dept').annotate(cs_schtot=Count('chk_dept'),cs_schcoun=Count('class_section__school_key',distinct = True)).order_by('chk_dept','chk_manage')
csstot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],block_id=blk_id).values('chk_dept').annotate(cs_subtot=Count('class_section__school_key',distinct = True)).order_by('chk_dept')
csgrtot=Basicinfo.objects.filter(block_id=blk_id).values('class_section__school_key').distinct().count()
tptotsch=Basicinfo.objects.filter(chk_dept__in=[1,2,3],manage_cate_id__in=[1,2,3],staff__staff_cat='1',block_id=blk_id).values('chk_manage','chk_dept').annotate(tp_schtot=Count('chk_dept'),tp_schcoun=Sum('staff__post_sanc')).order_by('chk_dept','chk_manage')
tpstot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],staff__staff_cat='1',block_id=blk_id).values('chk_dept').annotate(tp_schtot=Sum('staff__post_sanc')).order_by('chk_dept')
tpgrtot=Basicinfo.objects.filter(staff__staff_cat='1',block_id=blk_id).values('staff__post_sanc').aggregate(Sum('staff__post_sanc'))
tpftotsch=Basicinfo.objects.filter(chk_dept__in=[1,2,3],manage_cate_id__in=[1,2,3],staff__staff_cat='1',staff__post_filled__gt='0',block_id=blk_id).values('chk_manage','chk_dept').annotate(tpf_schtot=Count('chk_dept'),tpf_schcoun=Sum('staff__post_filled')).order_by('chk_dept','chk_manage')
tpfstot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],staff__staff_cat='1',staff__post_filled__gt='0',block_id=blk_id).values('chk_dept').annotate(tpf_schtot=Sum('staff__post_filled')).order_by('chk_dept')
tpfgrtot=Basicinfo.objects.filter(block_id=blk_id,staff__staff_cat='1',staff__post_filled__gt='0').values('staff__post_filled').aggregate(Sum('staff__post_filled'))
# ntptotsch=Basicinfo.objects.filter(chk_dept__in=[1,2,3,manage_cate_id__in=[1,2,3],staff__staff_cat='2').values('chk_manage','chk_dept').annotate(ntp_schtot=Count('chk_dept'),ntp_schcoun=Sum('staff__post_sanc')).order_by('chk_dept','chk_manage')
ntptotsch=Basicinfo.objects.filter(staff__staff_cat='2',block_id=blk_id).values('chk_manage','chk_dept').annotate(ntp_schtot=Count('chk_dept'),ntp_schcoun=Sum('staff__post_sanc')).order_by('chk_dept','chk_manage')
ntpstot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],staff__staff_cat='2',block_id=blk_id).values('chk_dept').annotate(ntp_schtot=Sum('staff__post_sanc')).order_by('chk_dept')
# ntpgrtot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],staff__staff_cat='2').values('staff__post_sanc').annotate(Sum('staff__post_sanc'))
# ntpgrtot=Staff.objects.filter(staff_cat='2',block_id=blk_id).aggregate(Sum('post_sanc'))
ntpgrtot=Basicinfo.objects.filter(block_id=blk_id,staff__staff_cat='2').aggregate(Sum('staff__post_sanc'))
# ntpftotsch=Basicinfo.objects.filter(chk_dept__in=[1,2,3],manage_cate_id__in=[1,2,3],staff__staff_cat='2',staff__post_filled__gt='0').values('chk_manage','chk_dept').annotate(ntpf_schtot=Count('chk_dept'),ntpf_schcoun=Count('staff')).order_by('chk_dept','chk_manage')
ntpftotsch=Basicinfo.objects.filter(staff__staff_cat='2',staff__post_filled__gt='0',block_id=blk_id).values('chk_manage','chk_dept').annotate(ntpf_schtot=Count('chk_dept'),ntpf_schcoun=Sum('staff__post_filled')).order_by('chk_dept','chk_manage')
ntpfstot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],staff__staff_cat='2',staff__post_filled__gt='0',block_id=blk_id).values('chk_dept').annotate(ntpf_schtot=Sum('staff__post_filled')).order_by('chk_dept')
# ntpfgrtot=Staff.objects.filter(staff_cat='2',post_filled__gt='0',block_id=blk_id).aggregate(Sum('post_filled'))
ntpfgrtot=Basicinfo.objects.filter(block_id=blk_id,staff__staff_cat='2',staff__post_filled__gt='0').aggregate(Sum('staff__post_filled'))
return render(request,'block_abs.html',locals())
class Sch_Dist_abs(View):
def get(self,request,**kwargs):
d_id=self.kwargs['pk']
deptlst=Manage_cate.objects.all().order_by('id')
blkid=Basicinfo.objects.get(udise_code=int(request.user.username))
totsch=Basicinfo.objects.filter(district1=d_id,chk_dept__in=[1,2,3],chk_manage__in=[1,2,3]).values('chk_manage','chk_dept').annotate(mang_schtot=Count('chk_dept')).order_by('chk_dept','chk_manage')
totschst=Basicinfo.objects.filter(chk_dept__in=[1,2,3],chk_manage__in=[1,2,3],district1=d_id).values('chk_dept').annotate(schmantot=Count('chk_dept'))
totschgrtot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],chk_manage__in=[1,2,3],district1=d_id).count()
bitotsch=Basicinfo.objects.filter(chk_dept__in=[1,2,3],manage_cate_id__in=[1,2,3],district_id=d_id).values('chk_manage','chk_dept').annotate(bi_schtot=Count('chk_dept')).order_by('chk_dept','chk_manage')
bischst=Basicinfo.objects.filter(chk_dept__in=[1,2,3],manage_cate_id__in=[1,2,3],district_id=d_id).values('chk_dept').annotate(bimantot=Count('chk_dept'))
bigrtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3],district_id=d_id).count()
aitotsch=Basicinfo.objects.filter(chk_dept__in=[1,2,3],manage_cate_id__in=[1,2,3],district_id=d_id).values('chk_manage','chk_dept').annotate(bi_schtot=Count('chk_dept'),acad_schcoun=Count('academicinfo')).order_by('chk_dept','chk_manage')
aistot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],district_id=d_id).values('chk_dept').annotate(acad_schtot=Count('academicinfo')).order_by('chk_dept')
aigrtot=Basicinfo.objects.filter(district_id=d_id).values('academicinfo__school_key').count()
iitotsch=Basicinfo.objects.filter(chk_dept__in=[1,2,3],manage_cate_id__in=[1,2,3],district_id=d_id).values('chk_manage','chk_dept').annotate(bi_schtot=Count('chk_dept'),infra_schcoun=Count('infradet')).order_by('chk_dept','chk_manage')
iistot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],district_id=d_id).values('chk_dept').annotate(infra_schtot=Count('infradet')).order_by('chk_dept')
iigrtot=Basicinfo.objects.filter(district_id=d_id).values('infradet__school_key').count()
cstotsch=Basicinfo.objects.filter(chk_dept__in=[1,2,3],manage_cate_id__in=[1,2,3],district_id=d_id).values('chk_manage','chk_dept').annotate(cs_schtot=Count('chk_dept'),cs_schcoun=Count('class_section__school_key',distinct = True)).order_by('chk_dept','chk_manage')
csstot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],district_id=d_id).values('chk_dept').annotate(cs_subtot=Count('class_section__school_key',distinct = True)).order_by('chk_dept')
csgrtot=Basicinfo.objects.filter(district_id=d_id).values('class_section__school_key').distinct().count()
tptotsch=Basicinfo.objects.filter(chk_dept__in=[1,2,3],manage_cate_id__in=[1,2,3],staff__staff_cat='1',district_id=d_id).values('chk_manage','chk_dept').annotate(tp_schtot=Count('chk_dept'),tp_schcoun=Sum('staff__post_sanc')).order_by('chk_dept','chk_manage')
tpstot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],staff__staff_cat='1',district_id=d_id).values('chk_dept').annotate(tp_schtot=Sum('staff__post_sanc')).order_by('chk_dept')
tpgrtot=Basicinfo.objects.filter(staff__staff_cat='1',district_id=d_id).values('staff__post_sanc').aggregate(Sum('staff__post_sanc'))
tpftotsch=Basicinfo.objects.filter(chk_dept__in=[1,2,3],manage_cate_id__in=[1,2,3],staff__staff_cat='1',staff__post_filled__gt='0',district_id=d_id).values('chk_manage','chk_dept').annotate(tpf_schtot=Count('chk_dept'),tpf_schcoun=Sum('staff__post_filled')).order_by('chk_dept','chk_manage')
tpfstot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],staff__staff_cat='1',staff__post_filled__gt='0',district_id=d_id).values('chk_dept').annotate(tpf_schtot=Sum('staff__post_filled')).order_by('chk_dept')
tpfgrtot=Basicinfo.objects.filter(district_id=d_id,staff__staff_cat='1',staff__post_filled__gt='0').values('staff__post_filled').aggregate(Sum('staff__post_filled'))
# ntptotsch=Basicinfo.objects.filter(chk_dept__in=[1,2,3,manage_cate_id__in=[1,2,3],staff__staff_cat='2').values('chk_manage','chk_dept').annotate(ntp_schtot=Count('chk_dept'),ntp_schcoun=Sum('staff__post_sanc')).order_by('chk_dept','chk_manage')
ntptotsch=Basicinfo.objects.filter(staff__staff_cat='2',district_id=d_id).values('chk_manage','chk_dept').annotate(ntp_schtot=Count('chk_dept'),ntp_schcoun=Sum('staff__post_sanc')).order_by('chk_dept','chk_manage')
ntpstot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],staff__staff_cat='2',district_id=d_id).values('chk_dept').annotate(ntp_schtot=Sum('staff__post_sanc')).order_by('chk_dept')
# ntpgrtot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],staff__staff_cat='2').values('staff__post_sanc').annotate(Sum('staff__post_sanc'))
# ntpgrtot=Staff.objects.filter(staff_cat='2',district_id=d_id).aggregate(Sum('post_sanc'))
ntpgrtot=Basicinfo.objects.filter(district_id=d_id,staff__staff_cat='2').aggregate(Sum('staff__post_sanc'))
# ntpftotsch=Basicinfo.objects.filter(chk_dept__in=[1,2,3],manage_cate_id__in=[1,2,3],staff__staff_cat='2',staff__post_filled__gt='0').values('chk_manage','chk_dept').annotate(ntpf_schtot=Count('chk_dept'),ntpf_schcoun=Count('staff')).order_by('chk_dept','chk_manage')
ntpftotsch=Basicinfo.objects.filter(staff__staff_cat='2',staff__post_filled__gt='0',district_id=d_id).values('chk_manage','chk_dept').annotate(ntpf_schtot=Count('chk_dept'),ntpf_schcoun=Sum('staff__post_filled')).order_by('chk_dept','chk_manage')
ntpfstot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],staff__staff_cat='2',staff__post_filled__gt='0',district_id=d_id).values('chk_dept').annotate(ntpf_schtot=Sum('staff__post_filled')).order_by('chk_dept')
# ntpfgrtot=Staff.objects.filter(staff_cat='2',post_filled__gt='0',district_id=d_id).aggregate(Sum('post_filled'))
ntpfgrtot=Basicinfo.objects.filter(district_id=d_id,staff__staff_cat='2',staff__post_filled__gt='0').aggregate(Sum('staff__post_filled'))
return render(request,'dist_abs.html',locals())
class Sch_State_abs(View):
def get(self,request,**kwargs):
deptlst=Manage_cate.objects.all().order_by('id')
totsch=Basicinfo.objects.filter(chk_dept__in=[1,2,3],chk_manage__in=[1,2,3]).values('chk_manage','chk_dept').annotate(mang_schtot=Count('chk_dept')).order_by('chk_dept','chk_manage')
totschst=Basicinfo.objects.filter(chk_dept__in=[1,2,3],chk_manage__in=[1,2,3]).values('chk_dept').annotate(schmantot=Count('chk_dept'))
totschgrtot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],chk_manage__in=[1,2,3]).count()
bitotsch=Basicinfo.objects.filter(chk_dept__in=[1,2,3],manage_cate_id__in=[1,2,3]).values('chk_manage','chk_dept').annotate(bi_schtot=Count('chk_dept')).order_by('chk_dept','chk_manage')
bischst=Basicinfo.objects.filter(chk_dept__in=[1,2,3],manage_cate_id__in=[1,2,3]).values('chk_dept').annotate(bimantot=Count('chk_dept'))
bigrtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3]).count()
aitotsch=Basicinfo.objects.filter(chk_dept__in=[1,2,3],manage_cate_id__in=[1,2,3]).values('chk_manage','chk_dept').annotate(bi_schtot=Count('chk_dept'),acad_schcoun=Count('academicinfo')).order_by('chk_dept','chk_manage')
aistot=Basicinfo.objects.filter(chk_dept__in=[1,2,3]).values('chk_dept').annotate(acad_schtot=Count('academicinfo')).order_by('chk_dept')
aigrtot=Academicinfo.objects.all().count()
iitotsch=Basicinfo.objects.filter(chk_dept__in=[1,2,3],manage_cate_id__in=[1,2,3]).values('chk_manage','chk_dept').annotate(bi_schtot=Count('chk_dept'),infra_schcoun=Count('infradet')).order_by('chk_dept','chk_manage')
iistot=Basicinfo.objects.filter(chk_dept__in=[1,2,3]).values('chk_dept').annotate(infra_schtot=Count('infradet')).order_by('chk_dept')
iigrtot=Infradet.objects.all().count()
cstotsch=Basicinfo.objects.filter(chk_dept__in=[1,2,3],manage_cate_id__in=[1,2,3]).values('chk_manage','chk_dept').annotate(cs_schtot=Count('chk_dept'),cs_schcoun=Count('class_section__school_key',distinct = True)).order_by('chk_dept','chk_manage')
csstot=Basicinfo.objects.filter(chk_dept__in=[1,2,3]).values('chk_dept').annotate(cs_subtot=Count('class_section__school_key',distinct = True)).order_by('chk_dept')
csgrtot=Class_section.objects.all().values('school_key').distinct().count()
tptotsch=Basicinfo.objects.filter(chk_dept__in=[1,2,3],manage_cate_id__in=[1,2,3],staff__staff_cat='1').values('chk_manage','chk_dept').annotate(tp_schtot=Count('chk_dept'),tp_schcoun=Sum('staff__post_sanc')).order_by('chk_dept','chk_manage')
tpstot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],staff__staff_cat='1').values('chk_dept').annotate(tp_schtot=Sum('staff__post_sanc')).order_by('chk_dept')
tpgrtot=Staff.objects.filter(staff_cat='1').aggregate(Sum('post_sanc'))
tpftotsch=Basicinfo.objects.filter(chk_dept__in=[1,2,3],manage_cate_id__in=[1,2,3],staff__staff_cat='1',staff__post_filled__gt='0').values('chk_manage','chk_dept').annotate(tpf_schtot=Count('chk_dept'),tpf_schcoun=Sum('staff__post_filled')).order_by('chk_dept','chk_manage')
tpfstot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],staff__staff_cat='1',staff__post_filled__gt='0').values('chk_dept').annotate(tpf_schtot=Sum('staff__post_filled')).order_by('chk_dept')
tpfgrtot=Staff.objects.filter(staff_cat='1',post_filled__gt='0').aggregate(Sum('post_filled'))
ntptotsch=Basicinfo.objects.filter(staff__staff_cat='2').values('chk_manage','chk_dept').annotate(ntp_schtot=Count('chk_dept'),ntp_schcoun=Sum('staff__post_sanc')).order_by('chk_dept','chk_manage')
ntpstot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],staff__staff_cat='2').values('chk_dept').annotate(ntp_schtot=Sum('staff__post_sanc')).order_by('chk_dept')
ntpgrtot=Staff.objects.filter(staff_cat='2').aggregate(Sum('post_sanc'))
ntpftotsch=Basicinfo.objects.filter(staff__staff_cat='2',staff__post_filled__gt='0').values('chk_manage','chk_dept').annotate(ntpf_schtot=Count('chk_dept'),ntpf_schcoun=Sum('staff__post_filled')).order_by('chk_dept','chk_manage')
ntpfstot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],staff__staff_cat='2',staff__post_filled__gt='0').values('chk_dept').annotate(ntpf_schtot=Sum('staff__post_filled')).order_by('chk_dept')
ntpfgrtot=Staff.objects.filter(staff_cat='2',post_filled__gt='0').aggregate(Sum('post_filled'))
return render(request,'state_abs.html',locals())
class Sch_sr_bi(View):
def get(self,request,**kwargs):
dl=District.objects.all().order_by('id')
schlst=Basicinfo.objects.all().values('chk_dept','district').annotate(disttot=Count('district')).order_by('district')
disttot=Basicinfo.objects.filter(chk_dept__in=[1,2,3]).values('district').annotate(schsubtot=Count('chk_dept'))
schgrtot=Basicinfo.objects.filter(chk_dept__in=[1,2,3]).values('chk_dept').annotate(schgtot=Count('chk_dept'))
schtotal=Basicinfo.objects.filter(chk_dept__in=[1,2,3]).count()
mandet=Basicinfo.objects.filter(chk_dept__in=[1,2,3]).values('manage_cate_id','district').annotate(mdet=Count('district')).order_by('district')
mansubtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3]).values('district').annotate(mantot=Count('district'))
mangrtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3]).values('manage_cate').annotate(mangtot=Count('manage_cate'))
mantotal=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3]).count()
dsemandet=Basicinfo.objects.filter(chk_dept__in=[1]).values('manage_cate_id','district').annotate(dsemdet=Count('district')).order_by('district')
dsemansubtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3]).values('district').annotate(dsemantot=Count('district'))
dsemangrtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3]).values('manage_cate').annotate(dsemangtot=Count('manage_cate'))
dsemantotal=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3]).count()
deemandet=Basicinfo.objects.filter(chk_dept__in=[2]).values('manage_cate_id','district').annotate(deemdet=Count('district')).order_by('district')
deemansubtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3]).values('district').annotate(deemantot=Count('district'))
deemangrtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3]).values('manage_cate').annotate(deemangtot=Count('manage_cate'))
deemantotal=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3]).count()
dmsmandet=Basicinfo.objects.filter(chk_dept__in=[3]).values('manage_cate_id','district').annotate(dmsmdet=Count('district')).order_by('district')
dmsmansubtot=Basicinfo.objects.filter(chk_dept__in=[3],manage_cate_id__in=[1,2,3]).values('district').annotate(dmsmantot=Count('district'))
dmsmangrtot=Basicinfo.objects.filter(chk_dept__in=[3],manage_cate_id__in=[1,2,3]).values('manage_cate').annotate(dmsmangtot=Count('manage_cate'))
dmsmantotal=Basicinfo.objects.filter(chk_dept__in=[3],manage_cate_id__in=[1,2,3]).count()
return render(request,'drep_bi.html',locals())
class Sch_sr_ai(View):
def get(self,request,**kwargs):
dl=District.objects.all().order_by('id')
schlst=Basicinfo.objects.all().values('chk_dept','district').annotate(disttot=Count('district')).order_by('district')
disttot=Basicinfo.objects.filter(chk_dept__in=[1,2,3]).values('district').annotate(schsubtot=Count('chk_dept'))
schgrtot=Basicinfo.objects.filter(chk_dept__in=[1,2,3]).values('chk_dept').annotate(schgtot=Count('chk_dept'))
schtotal=Basicinfo.objects.filter(chk_dept__in=[1,2,3]).count()
mandet=Basicinfo.objects.filter(chk_dept__in=[1,2,3]).values('manage_cate_id','district').annotate(mdet=Count('academicinfo__school_key')).order_by('district')
mansubtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3]).values('district').annotate(mantot=Count('academicinfo__school_key'))
mangrtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3]).values('manage_cate').annotate(mangtot=Count('academicinfo__school_key'))
mantotal=Academicinfo.objects.all().count()
dsemandet=Basicinfo.objects.filter(chk_dept__in=[1]).values('manage_cate_id','district').annotate(dsemdet=Count('academicinfo__school_key')).order_by('district')
dsemansubtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3]).values('district').annotate(dsemantot=Count('academicinfo__school_key'))
dsemangrtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3]).values('manage_cate').annotate(dsemangtot=Count('academicinfo__school_key'))
dsemantotal=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3]).values('academicinfo__school_key').count()
deemandet=Basicinfo.objects.filter(chk_dept__in=[2]).values('manage_cate_id','district').annotate(deemdet=Count('academicinfo__school_key')).order_by('district')
deemansubtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3]).values('district').annotate(deemantot=Count('academicinfo__school_key'))
deemangrtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3]).values('manage_cate').annotate(deemangtot=Count('academicinfo__school_key'))
deemantotal=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3]).values('academicinfo__school_key').count()
dmsmandet=Basicinfo.objects.filter(chk_dept__in=[3]).values('manage_cate_id','district').annotate(dmsmdet=Count('academicinfo__school_key')).order_by('district')
dmsmansubtot=Basicinfo.objects.filter(chk_dept__in=[3],manage_cate_id__in=[1,2,3]).values('district').annotate(dmsmantot=Count('academicinfo__school_key'))
dmsmangrtot=Basicinfo.objects.filter(chk_dept__in=[3],manage_cate_id__in=[1,2,3]).values('manage_cate').annotate(dmsmangtot=Count('academicinfo__school_key'))
dmsmantotal=Basicinfo.objects.filter(chk_dept__in=[3],manage_cate_id__in=[1,2,3]).values('academicinfo__school_key').count()
return render(request,'drep_ai.html',locals())
class Sch_sr_ii(View):
def get(self,request,**kwargs):
dl=District.objects.all().order_by('id')
schlst=Basicinfo.objects.all().values('chk_dept','district').annotate(disttot=Count('district')).order_by('district')
disttot=Basicinfo.objects.filter(chk_dept__in=[1,2,3]).values('district').annotate(schsubtot=Count('chk_dept'))
schgrtot=Basicinfo.objects.filter(chk_dept__in=[1,2,3]).values('chk_dept').annotate(schgtot=Count('chk_dept'))
schtotal=Basicinfo.objects.filter(chk_dept__in=[1,2,3]).count()
mandet=Basicinfo.objects.filter(chk_dept__in=[1,2,3]).values('manage_cate_id','district').annotate(mdet=Count('infradet__school_key')).order_by('district')
mansubtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3]).values('district').annotate(mantot=Count('infradet__school_key'))
mangrtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3]).values('manage_cate').annotate(mangtot=Count('infradet__school_key'))
mantotal=Infradet.objects.all().count()
dsemandet=Basicinfo.objects.filter(chk_dept__in=[1]).values('manage_cate_id','district').annotate(dsemdet=Count('infradet__school_key')).order_by('district')
dsemansubtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3]).values('district').annotate(dsemantot=Count('infradet__school_key'))
dsemangrtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3]).values('manage_cate').annotate(dsemangtot=Count('infradet__school_key'))
dsemantotal=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3]).values('infradet__school_key').count()
deemandet=Basicinfo.objects.filter(chk_dept__in=[2]).values('manage_cate_id','district').annotate(deemdet=Count('infradet__school_key')).order_by('district')
deemansubtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3]).values('district').annotate(deemantot=Count('infradet__school_key'))
deemangrtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3]).values('manage_cate').annotate(deemangtot=Count('infradet__school_key'))
deemantotal=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3]).values('infradet__school_key').count()
dmsmandet=Basicinfo.objects.filter(chk_dept__in=[3]).values('manage_cate_id','district').annotate(dmsmdet=Count('infradet__school_key')).order_by('district')
dmsmansubtot=Basicinfo.objects.filter(chk_dept__in=[3],manage_cate_id__in=[1,2,3]).values('district').annotate(dmsmantot=Count('infradet__school_key'))
dmsmangrtot=Basicinfo.objects.filter(chk_dept__in=[3],manage_cate_id__in=[1,2,3]).values('manage_cate').annotate(dmsmangtot=Count('infradet__school_key'))
dmsmantotal=Basicinfo.objects.filter(chk_dept__in=[3],manage_cate_id__in=[1,2,3]).values('infradet__school_key').count()
return render(request,'drep_ii.html',locals())
class Sch_sr_cs(View):
def get(self,request,**kwargs):
dl=District.objects.all().order_by('id')
schlst=Basicinfo.objects.all().values('chk_dept','district').annotate(disttot=Count('district')).order_by('district')
disttot=Basicinfo.objects.filter(chk_dept__in=[1,2,3]).values('district').annotate(schsubtot=Count('chk_dept'))
schgrtot=Basicinfo.objects.filter(chk_dept__in=[1,2,3]).values('chk_dept').annotate(schgtot=Count('chk_dept'))
schtotal=Basicinfo.objects.filter(chk_dept__in=[1,2,3]).count()
mandet=Basicinfo.objects.filter(chk_dept__in=[1,2,3]).values('manage_cate_id','district').annotate(mdet=Count('class_section__school_key',distinct = True)).order_by('district')
mansubtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3]).values('district').annotate(mantot=Count('class_section__school_key',distinct = True))
mangrtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3]).values('manage_cate').annotate(mangtot=Count('class_section__school_key',distinct = True))
mantotal=Class_section.objects.all().values('school_key').distinct().count()
dsemandet=Basicinfo.objects.filter(chk_dept__in=[1]).values('manage_cate_id','district').annotate(dsemdet=Count('class_section__school_key',distinct = True)).order_by('district')
dsemansubtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3]).values('district').annotate(dsemantot=Count('class_section__school_key',distinct = True))
dsemangrtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3]).values('manage_cate').annotate(dsemangtot=Count('class_section__school_key',distinct = True))
dsemantotal=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3]).values('class_section__school_key').distinct().count()
deemandet=Basicinfo.objects.filter(chk_dept__in=[2]).values('manage_cate_id','district').annotate(deemdet=Count('class_section__school_key',distinct = True)).order_by('district')
deemansubtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3]).values('district').annotate(deemantot=Count('class_section__school_key',distinct = True))
deemangrtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3]).values('manage_cate').annotate(deemangtot=Count('class_section__school_key',distinct = True))
deemantotal=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3]).values('class_section__school_key').distinct().count()
dmsmandet=Basicinfo.objects.filter(chk_dept__in=[3]).values('manage_cate_id','district').annotate(dmsmdet=Count('class_section__school_key',distinct = True)).order_by('district')
dmsmansubtot=Basicinfo.objects.filter(chk_dept__in=[3],manage_cate_id__in=[1,2,3]).values('district').annotate(dmsmantot=Count('class_section__school_key',distinct = True))
dmsmangrtot=Basicinfo.objects.filter(chk_dept__in=[3],manage_cate_id__in=[1,2,3]).values('manage_cate').annotate(dmsmangtot=Count('class_section__school_key',distinct = True))
dmsmantotal=Basicinfo.objects.filter(chk_dept__in=[3],manage_cate_id__in=[1,2,3]).values('class_section__school_key').distinct().count()
return render(request,'drep_cs.html',locals())
class Sch_sr_ti(View):
def get(self,request,**kwargs):
dl=District.objects.all().order_by('id')
schlst=Basicinfo.objects.all().values('chk_dept','district').annotate(disttot=Count('district')).order_by('district')
disttot=Basicinfo.objects.filter(chk_dept__in=[1,2,3]).values('district').annotate(schsubtot=Count('chk_dept'))
schgrtot=Basicinfo.objects.filter(chk_dept__in=[1,2,3]).values('chk_dept').annotate(schgtot=Count('chk_dept'))
schtotal=Basicinfo.objects.filter(chk_dept__in=[1,2,3]).count()
mandet=Basicinfo.objects.filter(chk_dept__in=[1,2,3],staff__staff_cat='1').values('manage_cate_id','district').annotate(mdet=Sum('staff__post_sanc')).order_by('district')
mansubtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3],staff__staff_cat='1').values('district').annotate(mantot=Sum('staff__post_sanc'))
mangrtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3],staff__staff_cat='1').values('manage_cate').annotate(mangtot=Sum('staff__post_sanc'))
mantotal=Staff.objects.filter(staff_cat='1').aggregate(Sum('post_sanc'))
dsemandet=Basicinfo.objects.filter(chk_dept__in=[1],staff__staff_cat='1').values('manage_cate_id','district').annotate(dsemdet=Sum('staff__post_sanc')).order_by('district')
dsemansubtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3],staff__staff_cat='1').values('district').annotate(dsemantot=Sum('staff__post_sanc'))
dsemangrtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3],staff__staff_cat='1').values('manage_cate').annotate(dsemangtot=Sum('staff__post_sanc'))
dsemantotal=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3],staff__staff_cat='1').aggregate(Sum('staff__post_sanc'))
deemandet=Basicinfo.objects.filter(chk_dept__in=[2],staff__staff_cat='1').values('manage_cate_id','district').annotate(deemdet=Sum('staff__post_sanc')).order_by('district')
deemansubtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3],staff__staff_cat='1').values('district').annotate(deemantot=Sum('staff__post_sanc'))
deemangrtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3],staff__staff_cat='1').values('manage_cate').annotate(deemangtot=Sum('staff__post_sanc'))
deemantotal=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3],staff__staff_cat='1').aggregate(Sum('staff__post_sanc'))
manfdet=Basicinfo.objects.filter(chk_dept__in=[1,2,3],staff__staff_cat='1',staff__post_filled__gt='0').values('manage_cate_id','district').annotate(mfdet=Sum('staff__post_filled')).order_by('district')
manfsubtot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],manage_cate_id__in=[1,2,3],staff__staff_cat='1',staff__post_filled__gt='0').values('district').annotate(manftot=Sum('staff__post_filled'))
manfgrtot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],manage_cate_id__in=[1,2,3],staff__staff_cat='1',staff__post_filled__gt='0').values('manage_cate').annotate(manfgtot=Sum('staff__post_filled'))
manftotal=Staff.objects.filter(staff_cat='1',post_filled__gt='0').aggregate(Sum('post_filled'))
dsemanfdet=Basicinfo.objects.filter(chk_dept__in=[1],staff__staff_cat='1',staff__post_filled__gt='0').values('manage_cate_id','district').annotate(dsemfdet=Sum('staff__post_filled')).order_by('district')
dsemanfsubtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3],staff__staff_cat='1',staff__post_filled__gt='0').values('district').annotate(dsemanftot=Sum('staff__post_filled'))
dsemanfgrtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3],staff__staff_cat='1',staff__post_filled__gt='0').values('manage_cate').annotate(dsemanfgtot=Sum('staff__post_filled'))
dsemanftotal=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3],staff__staff_cat='1',staff__post_filled__gt='0').aggregate(Sum('staff__post_filled'))
deemanfdet=Basicinfo.objects.filter(chk_dept__in=[2],staff__staff_cat='1',staff__post_filled__gt='0').values('manage_cate_id','district').annotate(deemfdet=Count('staff__school_key')).order_by('district')
deemanfsubtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3],staff__staff_cat='1',staff__post_filled__gt='0').values('district').annotate(deemanftot=Sum('staff__post_filled'))
deemanfgrtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3],staff__staff_cat='1',staff__post_filled__gt='0').values('manage_cate').annotate(deemangftot=Sum('staff__post_filled'))
deemanftotal=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3],staff__staff_cat='1',staff__post_filled__gt='0').aggregate(Sum('staff__post_filled'))
return render(request,'drep_ti.html',locals())
class Sch_sr_nti(View):
def get(self,request,**kwargs):
dl=District.objects.all().order_by('id')
schlst=Basicinfo.objects.all().values('chk_dept','district').annotate(disttot=Count('district')).order_by('district')
disttot=Basicinfo.objects.filter(chk_dept__in=[1,2,3]).values('district').annotate(schsubtot=Count('chk_dept'))
schgrtot=Basicinfo.objects.filter(chk_dept__in=[1,2,3]).values('chk_dept').annotate(schgtot=Count('chk_dept'))
schtotal=Basicinfo.objects.filter(chk_dept__in=[1,2,3]).count()
mandet=Basicinfo.objects.filter(staff__staff_cat='2').values('manage_cate_id','district').annotate(mdet=Sum('staff__post_sanc')).order_by('district')
mansubtot=Basicinfo.objects.filter(staff__staff_cat='2').values('district').annotate(mantot=Sum('staff__post_sanc'))
mangrtot=Basicinfo.objects.filter(staff__staff_cat='2').values('manage_cate').annotate(mangtot=Sum('staff__post_sanc'))
mantotal=Staff.objects.filter(staff_cat='2').aggregate(Sum('post_sanc'))
dsemandet=Basicinfo.objects.filter(chk_dept__in=[1],staff__staff_cat='2').values('manage_cate_id','district').annotate(dsemdet=Sum('staff__post_sanc')).order_by('district')
dsemansubtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3],staff__staff_cat='2').values('district').annotate(dsemantot=Sum('staff__post_sanc'))
dsemangrtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3],staff__staff_cat='2').values('manage_cate').annotate(dsemangtot=Sum('staff__post_sanc'))
dsemantotal=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3],staff__staff_cat='2').aggregate(Sum('staff__post_sanc'))
deemandet=Basicinfo.objects.filter(chk_dept__in=[2],staff__staff_cat='2').values('manage_cate_id','district').annotate(deemdet=Sum('staff__post_sanc')).order_by('district')
deemansubtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3],staff__staff_cat='2').values('district').annotate(deemantot=Sum('staff__post_sanc'))
deemangrtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3],staff__staff_cat='2').values('manage_cate').annotate(deemangtot=Sum('staff__post_sanc'))
deemantotal=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3],staff__staff_cat='2').aggregate(Sum('staff__post_sanc'))
manfdet=Basicinfo.objects.filter(staff__staff_cat='2',staff__post_filled__gt='0').values('manage_cate_id','district').annotate(mfdet=Sum('staff__post_filled')).order_by('district')
manfsubtot=Basicinfo.objects.filter(staff__staff_cat='2',staff__post_filled__gt='0').values('district').annotate(manftot=Sum('staff__post_filled'))
manfgrtot=Basicinfo.objects.filter(staff__staff_cat='2',staff__post_filled__gt='0').values('manage_cate').annotate(manfgtot=Sum('staff__post_filled'))
manftotal=Staff.objects.filter(staff_cat='2',post_filled__gt='0').aggregate(Sum('post_filled'))
dsemanfdet=Basicinfo.objects.filter(chk_dept__in=[1],staff__staff_cat='2',staff__post_filled__gt='0').values('manage_cate_id','district').annotate(dsemfdet=Sum('staff__post_filled')).order_by('district')
dsemanfsubtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3],staff__staff_cat='2',staff__post_filled__gt='0').values('district').annotate(dsemanftot=Sum('staff__post_filled'))
dsemanfgrtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3],staff__staff_cat='2',staff__post_filled__gt='0').values('manage_cate').annotate(dsemanfgtot=Sum('staff__post_filled'))
dsemanftotal=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3],staff__staff_cat='2',staff__post_filled__gt='0').aggregate(Sum('staff__post_filled'))
deemanfdet=Basicinfo.objects.filter(chk_dept__in=[2],staff__staff_cat='2',staff__post_filled__gt='0').values('manage_cate_id','district').annotate(deemfdet=Sum('staff__post_filled')).order_by('district')
deemanfsubtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3],staff__staff_cat='2',staff__post_filled__gt='0').values('district').annotate(deemanftot=Sum('staff__post_filled'))
deemanfgrtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3],staff__staff_cat='2',staff__post_filled__gt='0').values('manage_cate').annotate(deemangftot=Sum('staff__post_filled'))
deemanftotal=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3],staff__staff_cat='2',staff__post_filled__gt='0').aggregate(Sum('staff__post_filled'))
return render(request,'drep_nti.html',locals())
class Sch_blkr_bi(View):
def get(self,request,**kwargs):
d_id=self.kwargs['blk']
if (self.kwargs.get('code')):
dept_opt=int(self.kwargs.get('code'))
bl=Block.objects.filter(district=d_id).order_by('block_name')
try:
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
except:
pass
else:
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
finally:
pass
schlst=Basicinfo.objects.all().values('chk_dept','block').annotate(schblktot=Count('block')).order_by('block')
blktot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],district=d_id).values('block').annotate(schsubtot=Count('chk_dept'))
schgrtot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],district=d_id).values('chk_dept').annotate(schgtot=Count('chk_dept'))
schtotal=Basicinfo.objects.filter(chk_dept__in=[1,2,3],district=d_id).count()
mandet=Basicinfo.objects.filter(chk_dept__in=[1,2,3],district=d_id).values('manage_cate_id','block').annotate(mdet=Count('block')).order_by('block')
mansubtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3],district=d_id).values('block').annotate(mantot=Count('block'))
mangrtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3],district=d_id).values('manage_cate').annotate(mangtot=Count('manage_cate'))
mantotal=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3],district=d_id).count()
dsemandet=Basicinfo.objects.filter(chk_dept__in=[1],district=d_id).values('manage_cate_id','block').annotate(dsemdet=Count('block')).order_by('block')
dsemansubtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3],district=d_id).values('block').annotate(dsemantot=Count('block'))
dsemangrtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3],district=d_id).values('manage_cate').annotate(dsemangtot=Count('manage_cate'))
dsemantotal=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3],district=d_id).count()
deemandet=Basicinfo.objects.filter(chk_dept__in=[2],district=d_id).values('manage_cate_id','block').annotate(deemdet=Count('block')).order_by('block')
deemansubtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3],district=d_id).values('block').annotate(deemantot=Count('block'))
deemangrtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3],district=d_id).values('manage_cate').annotate(deemangtot=Count('manage_cate'))
deemantotal=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3],district=d_id).count()
dmsmandet=Basicinfo.objects.filter(chk_dept__in=[3],district=d_id).values('manage_cate_id','block').annotate(dmsmdet=Count('block')).order_by('block')
dmsmansubtot=Basicinfo.objects.filter(chk_dept__in=[3],manage_cate_id__in=[1,2,3],district=d_id).values('block').annotate(dmsmantot=Count('block'))
dmsmangrtot=Basicinfo.objects.filter(chk_dept__in=[3],manage_cate_id__in=[1,2,3],district=d_id).values('manage_cate').annotate(dmsmangtot=Count('manage_cate'))
dmsmantotal=Basicinfo.objects.filter(chk_dept__in=[3],manage_cate_id__in=[1,2,3],district=d_id).count()
return render(request,'blkrep_bi.html',locals())
class Sch_blkr_ai(View):
def get(self,request,**kwargs):
d_id=self.kwargs['blk']
if (self.kwargs.get('code')):
dept_opt=int(self.kwargs.get('code'))
bl=Block.objects.filter(district=d_id).order_by('block_name')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
schlst=Basicinfo.objects.all().values('chk_dept','block').annotate(schblktot=Count('block')).order_by('block')
blktot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],district=d_id).values('block').annotate(schsubtot=Count('chk_dept'))
schgrtot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],district=d_id).values('chk_dept').annotate(schgtot=Count('chk_dept'))
schtotal=Basicinfo.objects.filter(chk_dept__in=[1,2,3],district=d_id).count()
mandet=Basicinfo.objects.filter(chk_dept__in=[1,2,3],district=d_id).values('manage_cate_id','block').annotate(mdet=Count('academicinfo__school_key')).order_by('block')
mansubtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3],district=d_id).values('block').annotate(mantot=Count('academicinfo__school_key'))
mangrtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3],district=d_id).values('manage_cate').annotate(mangtot=Count('academicinfo__school_key'))
mantotal=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3],district=d_id).values('academicinfo__school_key').count()
dsemandet=Basicinfo.objects.filter(chk_dept__in=[1],district=d_id).values('manage_cate_id','block').annotate(dsemdet=Count('academicinfo__school_key')).order_by('block')
dsemansubtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3],district=d_id).values('block').annotate(dsemantot=Count('academicinfo__school_key'))
dsemangrtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3],district=d_id).values('manage_cate').annotate(dsemangtot=Count('academicinfo__school_key'))
dsemantotal=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3],district=d_id).values('academicinfo__school_key').count()
deemandet=Basicinfo.objects.filter(chk_dept__in=[2],district=d_id).values('manage_cate_id','block').annotate(deemdet=Count('academicinfo__school_key')).order_by('block')
deemansubtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3],district=d_id).values('block').annotate(deemantot=Count('academicinfo__school_key'))
deemangrtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3],district=d_id).values('manage_cate').annotate(deemangtot=Count('academicinfo__school_key'))
deemantotal=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3],district=d_id).values('academicinfo__school_key').count()
dmsmandet=Basicinfo.objects.filter(chk_dept__in=[3],district=d_id).values('manage_cate_id','block').annotate(dmsmdet=Count('academicinfo__school_key')).order_by('block')
dmsmansubtot=Basicinfo.objects.filter(chk_dept__in=[3],manage_cate_id__in=[1,2,3],district=d_id).values('block').annotate(dmsmantot=Count('academicinfo__school_key'))
dmsmangrtot=Basicinfo.objects.filter(chk_dept__in=[3],manage_cate_id__in=[1,2,3],district=d_id).values('manage_cate').annotate(dmsmangtot=Count('academicinfo__school_key'))
dmsmantotal=Basicinfo.objects.filter(chk_dept__in=[3],manage_cate_id__in=[1,2,3],district=d_id).values('academicinfo__school_key').count()
return render(request,'blkrep_ai.html',locals())
class Sch_blkr_ii(View):
def get(self,request,**kwargs):
d_id=self.kwargs['blk']
if (self.kwargs.get('code')):
dept_opt=int(self.kwargs.get('code'))
bl=Block.objects.filter(district=d_id).order_by('block_name')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
schlst=Basicinfo.objects.all().values('chk_dept','block').annotate(schblktot=Count('block')).order_by('block')
blktot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],district=d_id).values('block').annotate(schsubtot=Count('chk_dept'))
schgrtot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],district=d_id).values('chk_dept').annotate(schgtot=Count('chk_dept'))
schtotal=Basicinfo.objects.filter(chk_dept__in=[1,2,3],district=d_id).count()
mandet=Basicinfo.objects.filter(chk_dept__in=[1,2,3],district=d_id).values('manage_cate_id','block').annotate(mdet=Count('infradet__school_key')).order_by('block')
mansubtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3],district=d_id).values('block').annotate(mantot=Count('infradet__school_key'))
mangrtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3],district=d_id).values('manage_cate').annotate(mangtot=Count('infradet__school_key'))
mantotal=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3],district=d_id).values('infradet__school_key').count()
dsemandet=Basicinfo.objects.filter(chk_dept__in=[1],district=d_id).values('manage_cate_id','block').annotate(dsemdet=Count('infradet__school_key')).order_by('block')
dsemansubtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3],district=d_id).values('block').annotate(dsemantot=Count('infradet__school_key'))
dsemangrtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3],district=d_id).values('manage_cate').annotate(dsemangtot=Count('infradet__school_key'))
dsemantotal=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3],district=d_id).values('infradet__school_key').count()
deemandet=Basicinfo.objects.filter(chk_dept__in=[2],district=d_id).values('manage_cate_id','block').annotate(deemdet=Count('infradet__school_key')).order_by('block')
deemansubtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3],district=d_id).values('block').annotate(deemantot=Count('infradet__school_key'))
deemangrtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3],district=d_id).values('manage_cate').annotate(deemangtot=Count('infradet__school_key'))
deemantotal=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3],district=d_id).values('infradet__school_key').count()
dmsmandet=Basicinfo.objects.filter(chk_dept__in=[3],district=d_id).values('manage_cate_id','block').annotate(dmsmdet=Count('infradet__school_key')).order_by('block')
dmsmansubtot=Basicinfo.objects.filter(chk_dept__in=[3],manage_cate_id__in=[1,2,3],district=d_id).values('block').annotate(dmsmantot=Count('infradet__school_key'))
dmsmangrtot=Basicinfo.objects.filter(chk_dept__in=[3],manage_cate_id__in=[1,2,3],district=d_id).values('manage_cate').annotate(dmsmangtot=Count('infradet__school_key'))
dmsmantotal=Basicinfo.objects.filter(chk_dept__in=[3],manage_cate_id__in=[1,2,3],district=d_id).values('infradet__school_key').count()
return render(request,'blkrep_ii.html',locals())
class Sch_blkr_cs(View):
def get(self,request,**kwargs):
d_id=self.kwargs['blk']
if (self.kwargs.get('code')):
dept_opt=int(self.kwargs.get('code'))
bl=Block.objects.filter(district=d_id).order_by('block_name')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
schlst=Basicinfo.objects.all().values('chk_dept','block').annotate(schblktot=Count('block')).order_by('block')
blktot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],district=d_id).values('block').annotate(schsubtot=Count('chk_dept'))
schgrtot=Basicinfo.objects.filter(chk_dept__in=[1,2,3],district=d_id).values('chk_dept').annotate(schgtot=Count('chk_dept'))
schtotal=Basicinfo.objects.filter(chk_dept__in=[1,2,3],district=d_id).count()
mandet=Basicinfo.objects.filter(chk_dept__in=[1,2,3],district=d_id).values('manage_cate_id','block').annotate(mdet=Count('class_section__school_key',distinct = True)).order_by('block')
mansubtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3],district=d_id).values('block').annotate(mantot=Count('class_section__school_key',distinct = True))
mangrtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3],district=d_id).values('manage_cate').annotate(mangtot=Count('class_section__school_key',distinct = True))
mantotal=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3],district=d_id).values('class_section__school_key').distinct().count()
dsemandet=Basicinfo.objects.filter(chk_dept__in=[1],district=d_id).values('manage_cate_id','block').annotate(dsemdet=Count('class_section__school_key',distinct = True)).order_by('block')
dsemansubtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3],district=d_id).values('block').annotate(dsemantot=Count('class_section__school_key',distinct = True))
dsemangrtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3],district=d_id).values('manage_cate').annotate(dsemangtot=Count('class_section__school_key',distinct = True))
dsemantotal=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2,3],district=d_id).values('class_section__school_key').distinct().count()
deemandet=Basicinfo.objects.filter(chk_dept__in=[2],district=d_id).values('manage_cate_id','block').annotate(deemdet=Count('class_section__school_key',distinct = True)).order_by('block')
deemansubtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3],district=d_id).values('block').annotate(deemantot=Count('class_section__school_key',distinct = True))
deemangrtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3],district=d_id).values('manage_cate').annotate(deemangtot=Count('class_section__school_key',distinct = True))
deemantotal=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2,3],district=d_id).values('class_section__school_key').distinct().count()
dmsmandet=Basicinfo.objects.filter(chk_dept__in=[3],district=d_id).values('manage_cate_id','block').annotate(dmsmdet=Count('class_section__school_key',distinct = True)).order_by('block')
dmsmansubtot=Basicinfo.objects.filter(chk_dept__in=[3],manage_cate_id__in=[1,2,3],district=d_id).values('block').annotate(dmsmantot=Count('class_section__school_key',distinct = True))
dmsmangrtot=Basicinfo.objects.filter(chk_dept__in=[3],manage_cate_id__in=[1,2,3],district=d_id).values('manage_cate').annotate(dmsmangtot=Count('class_section__school_key',distinct = True))
dmsmantotal=Basicinfo.objects.filter(chk_dept__in=[3],manage_cate_id__in=[1,2,3],district=d_id).values('class_section__school_key').distinct().count()
return render(request,'blkrep_cs.html',locals())
class Sch_blkr_ti(View):
def get(self,request,**kwargs):
d_id=self.kwargs['blk']
if (self.kwargs.get('code')):
dept_opt=int(self.kwargs.get('code'))
bl=Block.objects.filter(district=d_id).order_by('block_name')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
schlst=Basicinfo.objects.filter(chk_dept__in=[1,2],district=d_id).values('chk_dept','block').annotate(schblktot=Count('block')).order_by('block')
blktot=Basicinfo.objects.filter(chk_dept__in=[1,2],district=d_id).values('block').annotate(schsubtot=Count('chk_dept'))
schgrtot=Basicinfo.objects.filter(chk_dept__in=[1,2],district=d_id).values('chk_dept').annotate(schgtot=Count('chk_dept'))
schtotal=Basicinfo.objects.filter(chk_dept__in=[1,2],district=d_id).count()
mandet=Basicinfo.objects.filter(chk_dept__in=[1,2],staff__staff_cat='1',district=d_id).values('manage_cate_id','block').annotate(mdet=Sum('staff__post_sanc')).order_by('block')
mansubtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3],staff__staff_cat='1',district=d_id).values('block').annotate(mantot=Sum('staff__post_sanc'))
mangrtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3],staff__staff_cat='1',district=d_id).values('manage_cate').annotate(mangtot=Sum('staff__post_sanc'))
mantotal=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3],staff__staff_cat='1',district=d_id).aggregate(manatot=Sum('staff__post_sanc'))
dsemandet=Basicinfo.objects.filter(chk_dept__in=[1],staff__staff_cat='1',district=d_id).values('manage_cate_id','block').annotate(dsemdet=Sum('staff__post_sanc')).order_by('block')
dsemansubtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2],district=d_id,staff__staff_cat='1').values('block').annotate(dsemantot=Sum('staff__post_sanc'))
dsemangrtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2],district=d_id,staff__staff_cat='1').values('manage_cate').annotate(dsemangtot=Sum('staff__post_sanc'))
dsemantotal=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2],district=d_id,staff__staff_cat='1').aggregate(Sum('staff__post_sanc'))
deemandet=Basicinfo.objects.filter(chk_dept__in=[2],staff__staff_cat='1').values('manage_cate_id','block').annotate(deemdet=Sum('staff__post_sanc')).order_by('block')
deemansubtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2],district=d_id,staff__staff_cat='1').values('block').annotate(deemantot=Sum('staff__post_sanc'))
deemangrtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2],district=d_id,staff__staff_cat='1').values('manage_cate').annotate(deemangtot=Sum('staff__post_sanc'))
deemantotal=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2],district=d_id,staff__staff_cat='1').aggregate(Sum('staff__post_sanc'))
manfdet=Basicinfo.objects.filter(chk_dept__in=[1,2],staff__staff_cat='1',staff__post_filled__gt='0',district=d_id).values('manage_cate_id','block').annotate(mfdet=Sum('staff__post_filled')).order_by('block')
manfsubtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3],staff__staff_cat='1',staff__post_filled__gt='0',district=d_id).values('block').annotate(manftot=Sum('staff__post_filled'))
manfgrtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3],staff__staff_cat='1',staff__post_filled__gt='0',district=d_id).values('manage_cate').annotate(manfgtot=Sum('staff__post_filled'))
manftotal=Basicinfo.objects.filter(district=d_id,manage_cate_id__in=[1,2,3],staff__staff_cat='1',staff__post_filled__gt='0').aggregate(Sum('staff__post_filled'))
manftotal=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3],staff__staff_cat='1',district=d_id).aggregate(manftot=Sum('staff__post_filled'))
dsemanfdet=Basicinfo.objects.filter(chk_dept__in=[1],district=d_id,staff__staff_cat='1',staff__post_filled__gt='0').values('manage_cate_id','block').annotate(dsemfdet=Sum('staff__post_filled')).order_by('block')
dsemanfsubtot=Basicinfo.objects.filter(chk_dept__in=[1],district=d_id,manage_cate_id__in=[1,2,3],staff__staff_cat='1',staff__post_filled__gt='0').values('block').annotate(dsemanftot=Sum('staff__post_filled'))
dsemanfgrtot=Basicinfo.objects.filter(chk_dept__in=[1],district=d_id,manage_cate_id__in=[1,2,3],staff__staff_cat='1',staff__post_filled__gt='0').values('manage_cate').annotate(dsemanfgtot=Sum('staff__post_filled'))
dsemanftotal=Basicinfo.objects.filter(chk_dept__in=[1],district=d_id,manage_cate_id__in=[1,2,3],staff__staff_cat='1',staff__post_filled__gt='0').aggregate(Sum('staff__post_filled'))
deemanfdet=Basicinfo.objects.filter(chk_dept__in=[2],district=d_id,staff__staff_cat='1',staff__post_filled__gt='0').values('manage_cate_id','block').annotate(deemfdet=Sum('staff__post_filled')).order_by('block')
deemanfsubtot=Basicinfo.objects.filter(chk_dept__in=[2],district=d_id,manage_cate_id__in=[1,2,3],staff__staff_cat='1',staff__post_filled__gt='0').values('block').annotate(deemanftot=Sum('staff__post_filled'))
deemanfgrtot=Basicinfo.objects.filter(chk_dept__in=[2],district=d_id,manage_cate_id__in=[1,2,3],staff__staff_cat='1',staff__post_filled__gt='0').values('manage_cate').annotate(deemangftot=Sum('staff__post_filled'))
deemanftotal=Basicinfo.objects.filter(chk_dept__in=[2],district=d_id,manage_cate_id__in=[1,2,3],staff__staff_cat='1',staff__post_filled__gt='0').aggregate(Sum('staff__post_filled'))
return render(request,'blkrep_ti.html',locals())
class Sch_blkr_nti(View):
def get(self,request,**kwargs):
d_id=self.kwargs['blk']
if (self.kwargs.get('code')):
dept_opt=int(self.kwargs.get('code'))
bl=Block.objects.filter(district=d_id).order_by('block_name')
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
schlst=Basicinfo.objects.filter(chk_dept__in=[1,2],district=d_id).values('chk_dept','block').annotate(schblktot=Count('block')).order_by('block')
blktot=Basicinfo.objects.filter(chk_dept__in=[1,2],district=d_id).values('block').annotate(schsubtot=Count('chk_dept'))
schgrtot=Basicinfo.objects.filter(chk_dept__in=[1,2],district=d_id).values('chk_dept').annotate(schgtot=Count('chk_dept'))
schtotal=Basicinfo.objects.filter(chk_dept__in=[1,2],district=d_id).count()
mandet=Basicinfo.objects.filter(staff__staff_cat='2',district=d_id).values('manage_cate_id','block').annotate(mdet=Sum('staff__post_sanc')).order_by('block')
mansubtot=Basicinfo.objects.filter(staff__staff_cat='2',district=d_id).values('block').annotate(mantot=Sum('staff__post_sanc'))
mangrtot=Basicinfo.objects.filter(staff__staff_cat='2',district=d_id).values('manage_cate').annotate(mangtot=Sum('staff__post_sanc'))
mantotal=Basicinfo.objects.filter(staff__staff_cat='2',district=d_id).aggregate(manatot=Sum('staff__post_sanc'))
dsemandet=Basicinfo.objects.filter(chk_dept__in=[1],staff__staff_cat='2',district=d_id).values('manage_cate_id','block').annotate(dsemdet=Sum('staff__post_sanc')).order_by('block')
dsemansubtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2],district=d_id,staff__staff_cat='2').values('block').annotate(dsemantot=Sum('staff__post_sanc'))
dsemangrtot=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2],district=d_id,staff__staff_cat='2').values('manage_cate').annotate(dsemangtot=Sum('staff__post_sanc'))
dsemantotal=Basicinfo.objects.filter(chk_dept__in=[1],manage_cate_id__in=[1,2],district=d_id,staff__staff_cat='2').aggregate(Sum('staff__post_sanc'))
deemandet=Basicinfo.objects.filter(chk_dept__in=[2],staff__staff_cat='2').values('manage_cate_id','block').annotate(deemdet=Sum('staff__post_sanc')).order_by('block')
deemansubtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2],district=d_id,staff__staff_cat='2').values('block').annotate(deemantot=Sum('staff__post_sanc'))
deemangrtot=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2],district=d_id,staff__staff_cat='2').values('manage_cate').annotate(deemangtot=Sum('staff__post_sanc'))
deemantotal=Basicinfo.objects.filter(chk_dept__in=[2],manage_cate_id__in=[1,2],district=d_id,staff__staff_cat='2').aggregate(Sum('staff__post_sanc'))
manfdet=Basicinfo.objects.filter(chk_dept__in=[1,2],staff__staff_cat='2',staff__post_filled__gt='0',district=d_id).values('manage_cate_id','block').annotate(mfdet=Sum('staff__post_filled')).order_by('block')
manfsubtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3],staff__staff_cat='2',staff__post_filled__gt='0',district=d_id).values('block').annotate(manftot=Sum('staff__post_filled'))
manfgrtot=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3],staff__staff_cat='2',staff__post_filled__gt='0',district=d_id).values('manage_cate').annotate(manfgtot=Sum('staff__post_filled'))
manftotal=Basicinfo.objects.filter(district=d_id,manage_cate_id__in=[1,2,3],staff__staff_cat='2',staff__post_filled__gt='0').aggregate(Sum('staff__post_filled'))
manftotal=Basicinfo.objects.filter(manage_cate_id__in=[1,2,3],staff__staff_cat='2',district=d_id).aggregate(manftot=Sum('staff__post_filled'))
dsemanfdet=Basicinfo.objects.filter(chk_dept__in=[1],district=d_id,staff__staff_cat='2',staff__post_filled__gt='0').values('manage_cate_id','block').annotate(dsemfdet=Sum('staff__post_filled')).order_by('block')
dsemanfsubtot=Basicinfo.objects.filter(chk_dept__in=[1],district=d_id,manage_cate_id__in=[1,2,3],staff__staff_cat='2',staff__post_filled__gt='0').values('block').annotate(dsemanftot=Sum('staff__post_filled'))
dsemanfgrtot=Basicinfo.objects.filter(chk_dept__in=[1],district=d_id,manage_cate_id__in=[1,2,3],staff__staff_cat='2',staff__post_filled__gt='0').values('manage_cate').annotate(dsemanfgtot=Sum('staff__post_filled'))
dsemanftotal=Basicinfo.objects.filter(chk_dept__in=[1],district=d_id,manage_cate_id__in=[1,2,3],staff__staff_cat='2',staff__post_filled__gt='0').aggregate(Sum('staff__post_filled'))
deemanfdet=Basicinfo.objects.filter(chk_dept__in=[2],district=d_id,staff__staff_cat='2',staff__post_filled__gt='0').values('manage_cate_id','block').annotate(deemfdet=Sum('staff__post_filled')).order_by('block')
deemanfsubtot=Basicinfo.objects.filter(chk_dept__in=[2],district=d_id,manage_cate_id__in=[1,2,3],staff__staff_cat='2',staff__post_filled__gt='0').values('block').annotate(deemanftot=Sum('staff__post_filled'))
deemanfgrtot=Basicinfo.objects.filter(chk_dept__in=[2],district=d_id,manage_cate_id__in=[1,2,3],staff__staff_cat='2',staff__post_filled__gt='0').values('manage_cate').annotate(deemangftot=Sum('staff__post_filled'))
deemanftotal=Basicinfo.objects.filter(chk_dept__in=[2],district=d_id,manage_cate_id__in=[1,2,3],staff__staff_cat='2',staff__post_filled__gt='0').aggregate(Sum('staff__post_filled'))
return render(request,'blkrep_nti.html',locals())
class Sch_srep(View):
def get(self,request,**kwargs):
b_id=self.kwargs['blk']
try:
dept_opt=int(self.kwargs.get('code'))
except Exception:
pass
allsl=Basicinfo.objects.filter(block=b_id).order_by('school_name')
blkid=Basicinfo.objects.get(udise_code=int(request.user.username))
basic_det=Basicinfo.objects.get(udise_code=request.user.username)
dsesl=Basicinfo.objects.filter(chk_dept__in=[1],block=b_id).order_by('school_name')
deesl=Basicinfo.objects.filter(chk_dept__in=[2],block=b_id).order_by('school_name')
dmssl=Basicinfo.objects.filter(chk_dept__in=[3],block=b_id).order_by('school_name')
schbi=Basicinfo.objects.filter(block=b_id,manage_cate_id__gt=0).order_by('school_name')
schai=Academicinfo.objects.filter(school_key_id=allsl)
schii=Infradet.objects.filter(school_key_id=allsl)
schsi=Staff.objects.filter(school_key_id=allsl)
schtsis=Basicinfo.objects.filter(staff__school_key_id=allsl,staff__staff_cat='1').values('staff__school_key_id').annotate(tptstot=Sum('staff__post_sanc'))
schtsif=Basicinfo.objects.filter(staff__school_key_id=allsl,staff__staff_cat='1').values('staff__school_key_id').annotate(tptftot=Sum('staff__post_filled'))
schntsis=Basicinfo.objects.filter(staff__school_key_id=allsl,staff__staff_cat='2').values('staff__school_key_id').annotate(tpntstot=Sum('staff__post_sanc'))
schntsif=Basicinfo.objects.filter(staff__school_key_id=allsl,staff__staff_cat='2').values('staff__school_key_id').annotate(tpntftot=Sum('staff__post_filled'))
return render(request,'schrep.html',locals())<|fim▁end|> | office_editsave.mobile = form.cleaned_data['mobile']
office_editsave.office_email1 = form.cleaned_data['office_email1']
office_editsave.office_email2 = form.cleaned_data['office_email2']
office_editsave.sch_directorate = form.cleaned_data['sch_directorate'] |
<|file_name|>settingsmenu.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
from gui.dwidgets import DMenu
class SettingsMenu(DMenu):
"""docstring for SettingsMenu"""
def __init__(self, parent=None):
super(SettingsMenu, self).__init__(parent)
self.parent = parent
self.menuItems = [
{
'name': self.tr('Login'),
'icon': u'',
'shortcut': u'',
'trigger': 'Login',
},
{
'name': self.tr('Show suspension window'),
'icon': u'',
'shortcut': u'',
'trigger': 'Suspension',
},
{
'name': self.tr('Show float window'),
'icon': u'',
'shortcut': u'',
'trigger': 'Float',
},
{
'name': self.tr('Show Dock window'),
'icon': u'',
'shortcut': u'',
'trigger': 'Dock',
},
{
'name': self.tr('Language'),
'trigger': 'Language',
'type': 'submenu',
'actions': [
{
'name': 'English',
'icon': u'',
'shortcut': u'',
'trigger': 'English',
"checkable": True
},
{
'name': 'Chinese',
'icon': u'',
'shortcut': u'',
'trigger': 'Chinese',
"checkable": True
},
]
},
{
'name': self.tr('Document'),
'trigger': 'Document',
'type': 'submenu',
'actions': [
{
'name': 'Android developer guide',
'icon': u'',
'shortcut': u'',
'trigger': 'AndroidDeveloper',
"checkable": False
},
{
'name': 'iOS developer guide',
'icon': u'',
'shortcut': u'',
'trigger': 'IOSDeveloper',
"checkable": False
},
{
'name': 'Ford developer center',
'icon': u'',
'shortcut': u'',
'trigger': 'FordDeveloper',
"checkable": False
},
]
},
{
'name': self.tr('ObjectView'),
'icon': u'',
'shortcut': u'',
'trigger': 'ObjectView',
},
{
'name': self.tr('About'),
'icon': u'',
'shortcut': u'Qt.Key_F12',
'trigger': 'About',
},<|fim▁hole|> {
'name': self.tr('Exit'),
'icon': u'',
'shortcut': u'',
'trigger': 'Exit',
},
]
self.creatMenus(self.menuItems)
self.initConnect()
getattr(self, '%sAction' % 'English').setChecked(True)
def initConnect(self):
for item in ['English', 'Chinese']:
getattr(self, '%sAction' % item).triggered.connect(self.updateChecked)
def updateChecked(self):
for item in ['English', 'Chinese']:
action = getattr(self, '%sAction' % item)
if self.sender() is action:
action.setChecked(True)
else:
action.setChecked(False)<|fim▁end|> | |
<|file_name|>linktest_rsp_header.py<|end_file_name|><|fim▁begin|>#####################################################################
# linktest_rsp_header.py
#
# (c) Copyright 2021, Benjamin Parzella. All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.<|fim▁hole|># but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#####################################################################
"""Header for the hsms linktest response."""
from .header import HsmsHeader
class HsmsLinktestRspHeader(HsmsHeader):
"""
Header for Linktest Response.
Header for message with SType 6.
"""
def __init__(self, system):
"""
Initialize a hsms linktest response.
:param system: message ID
:type system: integer
**Example**::
>>> import secsgem.hsms
>>>
>>> secsgem.hsms.HsmsLinktestRspHeader(10)
HsmsLinktestRspHeader({sessionID:0xffff, stream:00, function:00, pType:0x00, sType:0x06, \
system:0x0000000a, requireResponse:False})
"""
HsmsHeader.__init__(self, system, 0xFFFF)
self.requireResponse = False
self.stream = 0x00
self.function = 0x00
self.pType = 0x00
self.sType = 0x06<|fim▁end|> | #
# This software is distributed in the hope that it will be useful, |
<|file_name|>skpicture_printer_unittest.py<|end_file_name|><|fim▁begin|># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import shutil
import tempfile
from telemetry import decorators
from telemetry.testing import options_for_unittests<|fim▁hole|>from telemetry.testing import page_test_test_case
from measurements import skpicture_printer
class SkpicturePrinterUnitTest(page_test_test_case.PageTestTestCase):
def setUp(self):
self._options = options_for_unittests.GetCopy()
self._skp_outdir = tempfile.mkdtemp('_skp_test')
def tearDown(self):
shutil.rmtree(self._skp_outdir)
@decorators.Disabled('android')
def testSkpicturePrinter(self):
ps = self.CreateStorySetFromFileInUnittestDataDir('blank.html')
measurement = skpicture_printer.SkpicturePrinter(self._skp_outdir)
results = self.RunMeasurement(measurement, ps, options=self._options)
# Picture printing is not supported on all platforms.
if results.failures:
assert 'not supported' in results.failures[0].exc_info[1].message
return
saved_picture_count = results.FindAllPageSpecificValuesNamed(
'saved_picture_count')
self.assertEquals(len(saved_picture_count), 1)
self.assertGreater(saved_picture_count[0].GetRepresentativeNumber(), 0)<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The Xyrosource Team.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate rustyline;
#[derive(Debug)]
pub enum Command {
Dummy,
}
#[derive(Default)]
pub struct Console {
editor: rustyline::Editor<()>,
error: bool,
}
impl Console {
pub fn new() -> Self {
Console {
editor: rustyline::Editor::<()>::new(),
error: false,
}
}
}
impl Iterator for Console {
type Item = Command;
fn next(&mut self) -> Option<Self::Item> {
loop {
let readline = self.editor.readline(if self.error { "! " } else { "λ " });
use self::rustyline::error::ReadlineError;
match readline {
Ok(ref line) => {<|fim▁hole|> self.editor.add_history_entry(line);
continue;
}
InternalCommand::Entry(cmd) => {
self.error = false;
self.editor.add_history_entry(line);
return Some(cmd);
}
InternalCommand::Quit => return None,
InternalCommand::Help => {
self.error = false;
self.editor.add_history_entry(line);
display_help();
continue;
}
}
}
Err(ReadlineError::Eof) => return None,
Err(_) => continue,
};
}
}
}
enum InternalCommand {
Entry(Command),
Empty,
Invalid,
Quit,
Help,
}
impl<T: AsRef<str>> From<T> for InternalCommand {
fn from(other: T) -> Self {
match other.as_ref().trim() {
"" => InternalCommand::Empty,
"quit" | "exit" => InternalCommand::Quit,
"dummy" => InternalCommand::Entry(Command::Dummy),
"help" => InternalCommand::Help,
_ => InternalCommand::Invalid,
}
}
}
fn display_help() {
println!("\
help - Display help
quit - Exit the server
exit - Exit the server
dummy - \
Example command
");
}<|fim▁end|> | match line.into() {
InternalCommand::Empty => continue,
InternalCommand::Invalid => {
self.error = true; |
<|file_name|>dead_code.rs<|end_file_name|><|fim▁begin|>use super::merge_block::merge_linear_blocks;
use super::conditional_jump_conversion::convert_jumps;
use super::dead_store::remove_dead_stores;
use super::super::cfg::{CFG};
use crate::common::tac_code::{Function, Operand, Statement};
use std::collections::HashMap;
use std::collections::HashSet;
use std::rc::Rc;
pub fn remove_dead_code(
functions: &mut Vec<Function>,
function_cfgs: &mut HashMap<Rc<String>, CFG>) {
for f in functions.iter_mut() {
let cfg = &mut function_cfgs.get_mut(&f.function_info.name).unwrap();
println!("\n\nBefore dead code elimination pass trivial conditional jump removal\n\n");
print_cfg(f, cfg);
convert_jumps(f, cfg);
println!("\n\nAfter trivial conditional jump removal\n\n");
print_cfg(f, cfg);
remove_dead_blocks(f, cfg);
println!("\n\nAfter dead block removal\n\n");
print_cfg(f, cfg);
remove_trivial_phi_functions(f, cfg);
remove_dead_stores(f, cfg);
println!("\n\nAfter dead store elimination\n\n");
print_cfg(f, cfg);
merge_linear_blocks(f, cfg);
println!("\n\nAfter merging linear blocks\n\n");
print_cfg(f, cfg);
remove_dead_jumps(f, cfg);
println!("\n\nAfter removing dead jumps\n\n");
print_cfg(f, cfg);
remove_trivial_jumps(f, cfg);
// right now the optimizations do not update these values
// clear them instead, so that the old values aren't accidentally used
// later on.
cfg.immediate_dominators.clear();
cfg.dominance_frontier.clear();
}
}
fn remove_dead_blocks(
function: &mut Function,
cfg: &mut CFG) {
let mut unconnected_blocks = cfg.blocks_not_connected_to_entry();
for i in 0..unconnected_blocks.len() {
let id = unconnected_blocks[i];
// grab any writes this basic block contains
let mut writes = HashSet::new();
for i in cfg.basic_blocks[id].start..cfg.basic_blocks[id].end {
match function.statements[i] {
Statement::Assignment{
destination: Some(Operand::SSAVariable(_, ref var_id, ref ssa_id)), .. }
=> { writes.insert((*var_id, *ssa_id)); },
Statement::PhiFunction(
Operand::SSAVariable(_, ref var_id, ref ssa_id),
_) => { writes.insert((*var_id, *ssa_id)); },
_ => {},
}
}
// and erase the above writes from any phi functions
for s in function.statements.iter_mut() {
match *s {
Statement::PhiFunction(
_,
ref mut operands) => {
operands.retain(|v| {
if let Operand::SSAVariable(_, ref var_id, ref ssa_id) = *v {
!writes.contains(&(*var_id, *ssa_id))
} else {
<|fim▁hole|> ice!("Invalid operand in phi-function: {}", v);
}
});
},
_ => {},
}
}
cfg.remove_block(function, id);
// decrement block number if block above was removed
for j in i..unconnected_blocks.len() {
if unconnected_blocks[j] > id {
unconnected_blocks[j] -= 1;
}
}
}
}
// remove phi-functions that only have one operand
fn remove_trivial_phi_functions(
function: &mut Function,
cfg: &mut CFG) {
let mut renames : HashMap<(u32, u32), (u32, u32)> = HashMap::new();
let mut remove_list = vec![];
for (i, s) in function.statements.iter().enumerate() {
match *s {
Statement::PhiFunction(
Operand::SSAVariable(_, dst_var_id, dst_ssa_id),
ref operands) => {
if operands.len() == 1 {
if let Operand::SSAVariable(_, src_var_id, src_ssa_id) = operands[0] {
remove_list.push(i);
// ensure trivial phi-functions that refer to other trivial
// phi functions are handled correctly
if renames.contains_key(&(src_var_id, src_ssa_id)) {
let prev = renames[&(src_var_id, src_ssa_id)].clone();
renames.insert((dst_var_id, dst_ssa_id), prev);
} else {
renames.insert((dst_var_id, dst_ssa_id), (src_var_id, src_ssa_id));
}
} else {
ice!("Invalid operand for phi-function: {:?}", operands);
}
}
},
_ => {},
}
}
cfg.remove_statements(function, remove_list);
for s in function.statements.iter_mut() {
match *s {
Statement::Assignment{
left_operand: Some(ref mut val),
right_operand: Some(ref mut val2), .. } => {
match *val {
Operand::SSAVariable(_, ref mut var_id, ref mut ssa_id) => {
if renames.contains_key(&(*var_id, *ssa_id)) {
let (new_var_id, new_ssa_id) = renames[&(*var_id, *ssa_id)];
*var_id = new_var_id;
*ssa_id = new_ssa_id;
}
},
_ => {},
}
match *val2 {
Operand::SSAVariable(_, ref mut var_id, ref mut ssa_id) => {
if renames.contains_key(&(*var_id, *ssa_id)) {
let (new_var_id, new_ssa_id) = renames[&(*var_id, *ssa_id)];
*var_id = new_var_id;
*ssa_id = new_ssa_id;
}
},
_ => {},
}
},
Statement::JumpIfTrue(
Operand::SSAVariable(_, ref mut var_id, ref mut ssa_id),
_) |
Statement::JumpIfFalse(
Operand::SSAVariable(_, ref mut var_id, ref mut ssa_id),
_) => {
if renames.contains_key(&(*var_id, *ssa_id)) {
let (new_var_id, new_ssa_id) = renames[&(*var_id, *ssa_id)];
*var_id = new_var_id;
*ssa_id = new_ssa_id;
}
},
Statement::Return(
Some(Operand::SSAVariable(_, ref mut var_id, ref mut ssa_id))) => {
if renames.contains_key(&(*var_id, *ssa_id)) {
let (new_var_id, new_ssa_id) = renames[&(*var_id, *ssa_id)];
*var_id = new_var_id;
*ssa_id = new_ssa_id;
}
},
Statement::PhiFunction(
_,
ref mut operands) => {
for o in operands.iter_mut() {
match *o {
Operand::SSAVariable(_, ref mut var_id, ref mut ssa_id) => {
if renames.contains_key(&(*var_id, *ssa_id)) {
let (new_var_id, new_ssa_id) = renames[&(*var_id, *ssa_id)];
*var_id = new_var_id;
*ssa_id = new_ssa_id;
}
},
_ => ice!("Invalid operand present in phi-function: {}", o),
}
}
},
_ => {},
}
}
}
fn remove_dead_jumps(
function: &mut Function,
cfg: &mut CFG) {
let mut labels = HashSet::new();
for s in function.statements.iter() {
match *s {
Statement::Label(ref label_id) => { labels.insert(*label_id); },
_ => {},
}
}
let mut remove_list = vec![];
for (i, s) in function.statements.iter().enumerate() {
match *s {
Statement::Jump(ref label_id) => {
if !labels.contains(label_id) {
remove_list.push(i);
}
},
_ => {},
}
}
cfg.remove_statements(function, remove_list);
}
// remove unconditional jumps that jump to a label that immediately follows the
// jump
fn remove_trivial_jumps(
function: &mut Function,
cfg: &mut CFG) {
let mut i = 1;
loop {
if i >= function.statements.len() {
break;
}
match function.statements[i] {
Statement::Label(label_id) => {
if let Statement::Jump(jump_id) = function.statements[i-1] {
if jump_id == label_id {
i -= 1;
cfg.remove_statements(function, vec![i]);
}
}
},
_ => {},
}
i += 1;
}
}
// Temporary debug code, can be removed
fn print_cfg(f: &Function, cfg: &CFG) {
let mut counter = 1;
println!("Function {}", f.function_info.name);
for bb in cfg.basic_blocks.iter() {
println!("<BB {}>", counter);
for i in bb.start..bb.end {
println!(" {}", f.statements[i])
}
counter += 1;
}
println!("adjacency:\n");
for i in 0..cfg.basic_blocks.len() {
let mut adj_str = cfg.adjacency_list[i].
iter().
fold(String::new(), |acc, ref val| format!("{}, {}", val, acc));
adj_str.pop(); adj_str.pop(); // remove last comma + space
if adj_str.is_empty() {
adj_str = "<None>".to_string();
}
println!("{}: {}", i+1, adj_str);
}
println!("\n");
}<|fim▁end|> | |
<|file_name|>1139f0b4c9e3_order_name_not_unique.py<|end_file_name|><|fim▁begin|>"""order name not unique
Revision ID: 1139f0b4c9e3
Revises: 220436d6dcdc
Create Date: 2016-05-31 08:59:21.225314
"""
# revision identifiers, used by Alembic.
revision = '1139f0b4c9e3'
down_revision = '220436d6dcdc'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('orders_name_key', 'orders', type_='unique')<|fim▁hole|> ### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_unique_constraint('orders_name_key', 'orders', ['name'])
### end Alembic commands ###<|fim▁end|> | |
<|file_name|>El-Arreglo-de-Nieves.cpp<|end_file_name|><|fim▁begin|>//https://omegaup.com/arena/problem/El-Arreglo-de-Nieves
#include <cstdio>
#include <iostream>
#include <stack>
using namespace std;
struct inf
{
int p, v;
};
int nums, totes, mint, stval;
int nlist[1000005], tol[1000005], tor[1000005], seen[1000005];
inf lastnp[1000005];
int main()
{
scanf("%d", &nums);
for(int i=1; i<=nums; i++)
scanf("%d", &nlist[i]);
for(int i=1; i<=nums; i++)
{
while(stval && lastnp[stval].v%nlist[i]==0)
stval--;
if(stval)
tol[i]=lastnp[stval].p+1;
else
tol[i]=1;
stval++;
lastnp[stval]={i, nlist[i]};
}
stval=0;
for(int i=nums; i; i--)
{
while(stval && lastnp[stval].v%nlist[i]==0)
stval--;
if(stval)
tor[i]=lastnp[stval].p-1;
else
tor[i]=nums;
stval++;
lastnp[stval]={i, nlist[i]};
}<|fim▁hole|> }
for(int i=1; i<=nums; i++)
{
if(tor[i]-tol[i]+1==tor[totes]-tol[totes]+1 && !seen[tol[i]])
seen[tol[i]]=1, mint++;
}
printf("%d %d\n", mint, tor[totes]-tol[totes]);
for(int i=1; i<=nums; i++)
{
if(seen[i])
printf("%d ", i);
}
printf("\n");
return 0;
}<|fim▁end|> | for(int i=1; i<=nums; i++)
{
if(tor[i]-tol[i]+1>tor[totes]-tol[totes]+1)
totes=i; |
<|file_name|>RestClient.py<|end_file_name|><|fim▁begin|>"""
Liana REST API client
Copyright Liana Technologies Ltd 2018
"""
import json
import hashlib
import hmac
import requests
import time<|fim▁hole|>
class APIException(Exception):
pass
class RestClient:
def __init__(self, user_id, api_secret, api_url, api_version, api_realm):
self._response = None
self._user_id = user_id
self._api_secret = api_secret
self._api_url = api_url
self._api_realm = api_realm
self._api_version = api_version
self._content_type = 'application/json'
def call(self, path, params=[], method='POST'):
""" Perform API request and return the API result"""
request_function = getattr(requests, method.lower())
self._set_request_data(path, params, method)
self._response = request_function(
self._api_url + self._full_path,
headers=self._get_headers(),
data=self._json_string
)
self._response_body = self._response.text;
if self._response.status_code >= 400:
raise APIException('API response with status code ' +str(self._response.status_code))
try:
data = json.loads(self._response_body);
except ValueError: # Python 2.x
raise APIException('API did not return a valid json string')
except json.decoder.JSONDecodeError: # Python 3.5+
raise APIException('API did not return a valid json string')
if 'succeed' in data.keys() and not data['succeed']:
raise APIException(data['message'])
if 'result' in data.keys():
return data['result']
return data
def get_http_response(self):
""" Returns the raw response object of last performed API request """
return self._response
""" INTERNAL METHODS FOLLOW """
def _get_new_timestamp(self):
""" Returns a fresh timestamp in proper format """
return time.strftime('%Y-%m-%dT%H:%M:%S%z')
def _get_hash(self):
""" Form and return the parameters hash for the API request """
md5 = hashlib.md5()
md5.update(self._json_string.encode('utf-8'))
return md5.hexdigest()
def _get_message(self):
""" Return the message in the format which is used to create signature of the request """
message = "\n".join([
self._method,
self._get_hash(),
self._content_type,
self._timestamp,
self._json_string,
self._full_path
])
return message.encode('utf-8')
def _get_signature(self):
""" Get signature for the API request """
return hmac.new(
self._api_secret.encode('utf-8'),
self._get_message(),
hashlib.sha256
).hexdigest()
def _get_headers(self):
""" Get headers for the API HTTP request """
return {
'Content-Type': self._content_type,
'Content-MD5': self._get_hash(),
'Date': self._timestamp,
'Authorization': self._api_realm + ' ' + str(self._user_id) + ':' + self._get_signature(),
}
def _set_request_data(self, path, params, method):
""" Set API request data """
self._full_path = '/api/v' + str(self._api_version) + '/' + path
self._json_string = json.dumps(params)
if method == 'GET':
self._json_string = ''
self._timestamp = self._get_new_timestamp()
self._method = method<|fim▁end|> | |
<|file_name|>element_spec.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Injector, RenderComponentType, RootRenderer, Sanitizer, SecurityContext, ViewEncapsulation, WrappedValue, getDebugNode} from '@angular/core';
import {getDebugContext} from '@angular/core/src/errors';
import {ArgumentType, BindingType, DebugContext, NodeDef, NodeFlags, OutputType, RootData, Services, ViewData, ViewDefinition, ViewFlags, ViewHandleEventFn, ViewUpdateFn, anchorDef, asElementData, elementDef, rootRenderNodes, textDef, viewDef} from '@angular/core/src/view/index';
import {getDOM} from '@angular/platform-browser/src/dom/dom_adapter';
import {ARG_TYPE_VALUES, checkNodeInlineOrDynamic, createRootView, isBrowser, removeNodes} from './helper';
export function main() {
describe(`View Elements`, () => {
function compViewDef(
nodes: NodeDef[], updateDirectives?: ViewUpdateFn, updateRenderer?: ViewUpdateFn,
viewFlags: ViewFlags = ViewFlags.None): ViewDefinition {
return viewDef(viewFlags, nodes, updateDirectives, updateRenderer);
}
function createAndGetRootNodes(
viewDef: ViewDefinition, context?: any): {rootNodes: any[], view: ViewData} {
const view = createRootView(viewDef, context);
const rootNodes = rootRenderNodes(view);
return {rootNodes, view};
}
describe('create', () => {
it('should create elements without parents', () => {
const rootNodes = createAndGetRootNodes(compViewDef([
elementDef(NodeFlags.None, null, null, 0, 'span')
])).rootNodes;
expect(rootNodes.length).toBe(1);
expect(getDOM().nodeName(rootNodes[0]).toLowerCase()).toBe('span');
});
it('should create views with multiple root elements', () => {
const rootNodes = createAndGetRootNodes(compViewDef([
elementDef(NodeFlags.None, null, null, 0, 'span'),
elementDef(NodeFlags.None, null, null, 0, 'span')
])).rootNodes;
expect(rootNodes.length).toBe(2);
});
it('should create elements with parents', () => {
const rootNodes = createAndGetRootNodes(compViewDef([
elementDef(NodeFlags.None, null, null, 1, 'div'),
elementDef(NodeFlags.None, null, null, 0, 'span'),
])).rootNodes;
expect(rootNodes.length).toBe(1);
const spanEl = getDOM().childNodes(rootNodes[0])[0];
expect(getDOM().nodeName(spanEl).toLowerCase()).toBe('span');
});
it('should set fixed attributes', () => {
const rootNodes = createAndGetRootNodes(compViewDef([
elementDef(NodeFlags.None, null, null, 0, 'div', [['title', 'a']]),
])).rootNodes;
expect(rootNodes.length).toBe(1);
expect(getDOM().getAttribute(rootNodes[0], 'title')).toBe('a');
});
it('should add debug information to the renderer', () => {
const someContext = new Object();
const {view, rootNodes} = createAndGetRootNodes(
compViewDef([elementDef(NodeFlags.None, null, null, 0, 'div')]), someContext);
expect(getDebugNode(rootNodes[0]).nativeNode).toBe(asElementData(view, 0).renderElement);
});
});
describe('change properties', () => {
ARG_TYPE_VALUES.forEach((inlineDynamic) => {
it(`should update via strategy ${inlineDynamic}`, () => {
const {view, rootNodes} = createAndGetRootNodes(compViewDef(
[
elementDef(
NodeFlags.None, null, null, 0, 'input', null,
[
[BindingType.ElementProperty, 'title', SecurityContext.NONE],
[BindingType.ElementProperty, 'value', SecurityContext.NONE]
]),
],
null, (check, view) => {
checkNodeInlineOrDynamic(check, view, 0, inlineDynamic, ['v1', 'v2']);
}));
Services.checkAndUpdateView(view);
const el = rootNodes[0];
expect(getDOM().getProperty(el, 'title')).toBe('v1');
expect(getDOM().getProperty(el, 'value')).toBe('v2');
});
});
});
describe('change attributes', () => {
ARG_TYPE_VALUES.forEach((inlineDynamic) => {
it(`should update via strategy ${inlineDynamic}`, () => {
const {view, rootNodes} = createAndGetRootNodes(compViewDef(
[
elementDef(
NodeFlags.None, null, null, 0, 'div', null,
[
[BindingType.ElementAttribute, 'a1', SecurityContext.NONE],
[BindingType.ElementAttribute, 'a2', SecurityContext.NONE]
]),
],
null, (check, view) => {
checkNodeInlineOrDynamic(check, view, 0, inlineDynamic, ['v1', 'v2']);
}));
Services.checkAndUpdateView(view);
const el = rootNodes[0];
expect(getDOM().getAttribute(el, 'a1')).toBe('v1');
expect(getDOM().getAttribute(el, 'a2')).toBe('v2');
});
});
});
describe('change classes', () => {
ARG_TYPE_VALUES.forEach((inlineDynamic) => {
it(`should update via strategy ${inlineDynamic}`, () => {
const {view, rootNodes} = createAndGetRootNodes(compViewDef(
[
elementDef(
NodeFlags.None, null, null, 0, 'div', null,<|fim▁hole|> ],
(check, view) => {
checkNodeInlineOrDynamic(check, view, 0, inlineDynamic, [true, true]);
}));
Services.checkAndUpdateView(view);
const el = rootNodes[0];
expect(getDOM().hasClass(el, 'c1')).toBeTruthy();
expect(getDOM().hasClass(el, 'c2')).toBeTruthy();
});
});
});
describe('change styles', () => {
ARG_TYPE_VALUES.forEach((inlineDynamic) => {
it(`should update via strategy ${inlineDynamic}`, () => {
const {view, rootNodes} = createAndGetRootNodes(compViewDef(
[
elementDef(
NodeFlags.None, null, null, 0, 'div', null,
[
[BindingType.ElementStyle, 'width', 'px'],
[BindingType.ElementStyle, 'color', null]
]),
],
null, (check, view) => {
checkNodeInlineOrDynamic(check, view, 0, inlineDynamic, [10, 'red']);
}));
Services.checkAndUpdateView(view);
const el = rootNodes[0];
expect(getDOM().getStyle(el, 'width')).toBe('10px');
expect(getDOM().getStyle(el, 'color')).toBe('red');
});
});
});
if (isBrowser()) {
describe('listen to DOM events', () => {
function createAndAttachAndGetRootNodes(viewDef: ViewDefinition):
{rootNodes: any[], view: ViewData} {
const result = createAndGetRootNodes(viewDef);
// Note: We need to append the node to the document.body, otherwise `click` events
// won't work in IE.
result.rootNodes.forEach((node) => {
document.body.appendChild(node);
removeNodes.push(node);
});
return result;
}
it('should listen to DOM events', () => {
const handleEventSpy = jasmine.createSpy('handleEvent');
const removeListenerSpy =
spyOn(HTMLElement.prototype, 'removeEventListener').and.callThrough();
const {view, rootNodes} = createAndAttachAndGetRootNodes(compViewDef([elementDef(
NodeFlags.None, null, null, 0, 'button', null, null, [[null, 'click']],
handleEventSpy)]));
rootNodes[0].click();
expect(handleEventSpy).toHaveBeenCalled();
let handleEventArgs = handleEventSpy.calls.mostRecent().args;
expect(handleEventArgs[0]).toBe(view);
expect(handleEventArgs[1]).toBe('click');
expect(handleEventArgs[2]).toBeTruthy();
Services.destroyView(view);
expect(removeListenerSpy).toHaveBeenCalled();
});
it('should listen to window events', () => {
const handleEventSpy = jasmine.createSpy('handleEvent');
const addListenerSpy = spyOn(window, 'addEventListener');
const removeListenerSpy = spyOn(window, 'removeEventListener');
const {view, rootNodes} = createAndAttachAndGetRootNodes(compViewDef([elementDef(
NodeFlags.None, null, null, 0, 'button', null, null, [['window', 'windowClick']],
handleEventSpy)]));
expect(addListenerSpy).toHaveBeenCalled();
expect(addListenerSpy.calls.mostRecent().args[0]).toBe('windowClick');
addListenerSpy.calls.mostRecent().args[1]({name: 'windowClick'});
expect(handleEventSpy).toHaveBeenCalled();
const handleEventArgs = handleEventSpy.calls.mostRecent().args;
expect(handleEventArgs[0]).toBe(view);
expect(handleEventArgs[1]).toBe('window:windowClick');
expect(handleEventArgs[2]).toBeTruthy();
Services.destroyView(view);
expect(removeListenerSpy).toHaveBeenCalled();
});
it('should listen to document events', () => {
const handleEventSpy = jasmine.createSpy('handleEvent');
const addListenerSpy = spyOn(document, 'addEventListener');
const removeListenerSpy = spyOn(document, 'removeEventListener');
const {view, rootNodes} = createAndAttachAndGetRootNodes(compViewDef([elementDef(
NodeFlags.None, null, null, 0, 'button', null, null, [['document', 'documentClick']],
handleEventSpy)]));
expect(addListenerSpy).toHaveBeenCalled();
expect(addListenerSpy.calls.mostRecent().args[0]).toBe('documentClick');
addListenerSpy.calls.mostRecent().args[1]({name: 'documentClick'});
expect(handleEventSpy).toHaveBeenCalled();
const handleEventArgs = handleEventSpy.calls.mostRecent().args;
expect(handleEventArgs[0]).toBe(view);
expect(handleEventArgs[1]).toBe('document:documentClick');
expect(handleEventArgs[2]).toBeTruthy();
Services.destroyView(view);
expect(removeListenerSpy).toHaveBeenCalled();
});
it('should preventDefault only if the handler returns false', () => {
let eventHandlerResult: any;
let preventDefaultSpy: jasmine.Spy;
const {view, rootNodes} = createAndAttachAndGetRootNodes(compViewDef([elementDef(
NodeFlags.None, null, null, 0, 'button', null, null, [[null, 'click']],
(view, eventName, event) => {
preventDefaultSpy = spyOn(event, 'preventDefault').and.callThrough();
return eventHandlerResult;
})]));
eventHandlerResult = undefined;
rootNodes[0].click();
expect(preventDefaultSpy).not.toHaveBeenCalled();
eventHandlerResult = true;
rootNodes[0].click();
expect(preventDefaultSpy).not.toHaveBeenCalled();
eventHandlerResult = 'someString';
rootNodes[0].click();
expect(preventDefaultSpy).not.toHaveBeenCalled();
eventHandlerResult = false;
rootNodes[0].click();
expect(preventDefaultSpy).toHaveBeenCalled();
});
it('should report debug info on event errors', () => {
const addListenerSpy = spyOn(HTMLElement.prototype, 'addEventListener').and.callThrough();
const {view, rootNodes} = createAndAttachAndGetRootNodes(compViewDef([elementDef(
NodeFlags.None, null, null, 0, 'button', null, null, [[null, 'click']],
() => { throw new Error('Test'); })]));
let err: any;
try {
addListenerSpy.calls.mostRecent().args[1]('SomeEvent');
} catch (e) {
err = e;
}
expect(err).toBeTruthy();
expect(err.message).toBe('Test');
const debugCtx = getDebugContext(err);
expect(debugCtx.view).toBe(view);
expect(debugCtx.nodeIndex).toBe(0);
});
});
}
});
}<|fim▁end|> | [[BindingType.ElementClass, 'c1'], [BindingType.ElementClass, 'c2']]), |
<|file_name|>jquery.tree.metadata.js<|end_file_name|><|fim▁begin|>(function ($) {
if(typeof $.metadata == "undefined") throw "jsTree metadata: jQuery metadata plugin not included.";
$.extend($.tree.plugins, {<|fim▁hole|> callbacks : {
check : function(rule, obj, value, tree) {
var opts = $.extend(true, {}, $.tree.plugins.metadata.defaults, this.settings.plugins.metadata);
if(typeof $(obj).metadata({ type : "attr", name : opts.attribute })[rule] != "undefined") return $(obj).metadata()[rule];
}
}
}
});
})(jQuery);<|fim▁end|> | "metadata" : {
defaults : {
attribute : "data"
}, |
<|file_name|>requirements.py<|end_file_name|><|fim▁begin|>Flask==0.10.1
Jinja2==2.7.2
MarkupSafe==0.18
Werkzeug==0.9.4
distribute==0.6.31
itsdangerous==0.23
lxml==3.3.1
pygal==1.3.1<|fim▁hole|>wsgiref==0.1.2<|fim▁end|> | |
<|file_name|>hello_text.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import pygame
pygame.display.init()
pygame.font.init()
modes_list = pygame.display.list_modes()
#screen = pygame.display.set_mode(modes_list[0], pygame.FULLSCREEN) # the highest resolution with fullscreen
screen = pygame.display.set_mode(modes_list[-1]) # the lowest resolution
background_color = (255, 255, 255)
screen.fill(background_color)
font = pygame.font.Font(pygame.font.get_default_font(), 22)
text_surface = font.render("Hello world!", True, (0,0,0))
screen.blit(text_surface, (0,0)) # paste the text at the top left corner of the window
pygame.display.flip() # display the image
<|fim▁hole|> event = pygame.event.wait()
if(event.type == pygame.QUIT or (event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE)):
break<|fim▁end|> | while True: # main loop (event loop) |
<|file_name|>solve.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | def xo(s):
s = s.lower()
return s.count('x') == s.count('o') |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.core.urlresolvers import reverse
from django.db import models
from django.db.models import Q
from core.models import TimeStampedModel
from accounts.models import Account
class Board(models.Model):
def __str__(self):
return 'Board Name: ' + self.name
def get_absolute_url(self):
return reverse('board:post_list', args=[self.slug])
slug = models.CharField(default='', unique=True, max_length=100)
name = models.CharField(default='', max_length=100)
posts_chunk_size = models.IntegerField(default=10)
post_pages_nav_chunk_size = models.IntegerField(default=10)
comments_chunk_size = models.IntegerField(default=5)
comment_pages_nav_chunk_size = models.IntegerField(default=10)
class PostQuerySet(models.QuerySet):
def search(self, search_flag, query):
if search_flag == 'TITLE':
return self.filter(title__contains=query)
elif search_flag == 'CONTENT':
return self.filter(content__contains=query)
elif search_flag == 'BOTH':
return self.filter(Q(title__contains=query) | Q(content__contains=query))
else:
return self.all()
def remain(self):
return self.filter(is_deleted=False)
def board(self, board):
return self.filter(board=board)
class PostManager(models.Manager):
def get_queryset(self):
return PostQuerySet(self.model, using=self._db)
def search(self, search_flag, query):
return self.get_queryset().search(search_flag, query)
def remain(self):
return self.get_queryset().remain()
def board(self, board):
return self.get_queryset().board(board)
class Post(TimeStampedModel):
def __str__(self):<|fim▁hole|> return 'Post Title: ' + self.title
SEARCH_FLAG = [
('TITLE', '제목'),
('CONTENT', '내용'),
('BOTH', '제목+내용')
]
objects = PostManager()
title = models.CharField(blank=False, max_length=100)
content = models.TextField(default='')
board = models.ForeignKey(Board, null=True)
is_deleted = models.BooleanField(default=False)
page_view_count = models.IntegerField(default=0)
like_count = models.IntegerField(default=0)
account = models.ForeignKey(Account, null=True)
ip = models.GenericIPAddressField(null=True, default='')
def get_absolute_url(self):
return reverse('board:view_post', args=[self.id])
class EditedPostHistory(TimeStampedModel):
post = models.ForeignKey(Post, null=False, default=None)
title = models.CharField(default='', max_length=100)
content = models.TextField(default='')
ip = models.GenericIPAddressField(null=True, default='')
class Attachment(models.Model):
post = models.ForeignKey(Post, null=True)
editedPostHistory = models.ForeignKey(EditedPostHistory, null=True, default=None)
attachment = models.FileField(blank=True, null=True)
class Comment(TimeStampedModel):
content = models.TextField(default='')
post = models.ForeignKey(Post, null=True)
is_deleted = models.BooleanField(default=False)
account = models.ForeignKey(Account, null=True)
ip = models.GenericIPAddressField(null=True, default='')<|fim▁end|> | |
<|file_name|>font_chooser_dialog.rs<|end_file_name|><|fim▁begin|>// This file was generated by gir (5c017c9) from gir-files (71d73f0)
// DO NOT EDIT
use Bin;
use Container;
use Dialog;
use FontChooser;
use Widget;
use Window;
use ffi;
use glib::object::Downcast;<|fim▁hole|> pub struct FontChooserDialog(Object<ffi::GtkFontChooserDialog>): Dialog, Window, Bin, Container, Widget, FontChooser;
match fn {
get_type => || ffi::gtk_font_chooser_dialog_get_type(),
}
}
impl FontChooserDialog {
pub fn new<'a, 'b, P: Into<Option<&'a str>>, Q: IsA<Window> + 'b, R: Into<Option<&'b Q>>>(title: P, parent: R) -> FontChooserDialog {
assert_initialized_main_thread!();
let title = title.into();
let title = title.to_glib_none();
let parent = parent.into();
let parent = parent.to_glib_none();
unsafe {
Widget::from_glib_none(ffi::gtk_font_chooser_dialog_new(title.0, parent.0)).downcast_unchecked()
}
}
}<|fim▁end|> | use glib::object::IsA;
use glib::translate::*;
glib_wrapper! { |
<|file_name|>jaxb_gen.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import re
# just running it on code in our repositories, not on externally acquired data.
from xml.dom.minidom import parse
from pants.backend.codegen.targets.jaxb_library import JaxbLibrary
from pants.backend.codegen.tasks.code_gen import CodeGen
from pants.backend.jvm.targets.java_library import JavaLibrary
from pants.backend.jvm.tasks.nailgun_task import NailgunTask
from pants.base.address import SyntheticAddress
from pants.base.build_environment import get_buildroot
from pants.base.exceptions import TaskError
from pants.java.distribution.distribution import Distribution
from pants.util.dirutil import safe_mkdir
# python documentation says xml parsing is insecure, but this should be safe usage because we're
class JaxbGen(CodeGen, NailgunTask):
"""Generates java source files from jaxb schema (.xsd)."""
_CONFIG_SECTION = 'jaxb-gen'
def __init__(self, *args, **kwargs):
"""
:param context: inherited parameter from Task
:param workdir: inherited parameter from Task
"""
super(JaxbGen, self).__init__(*args, **kwargs)
self.gen_langs = set()
lang = 'java'
if self.context.products.isrequired(lang):
self.gen_langs.add(lang)
self.jar_location = os.path.join(Distribution.cached().home, '..', 'lib', 'tools.jar')
@property
def config_section(self):
return self._CONFIG_SECTION
def _compile_schema(self, args):
classpath = [self.jar_location]
java_main = 'com.sun.tools.internal.xjc.Driver'
return self.runjava(classpath=classpath, main=java_main, args=args, workunit_name='xjc')
def is_forced(self, lang):
return lang in self.gen_langs
def is_gentarget(self, target):
return isinstance(target, JaxbLibrary)
def prepare_gen(self, target):
pass
def genlang(self, lang, targets):
if lang != 'java':
raise TaskError('Unrecognized jaxb language: %s' % lang)
output_dir = os.path.join(self.workdir, 'gen-java')
safe_mkdir(output_dir)
cache = []
for target in targets:
if not isinstance(target, JaxbLibrary):
raise TaskError('Invalid target type "{class_type}" (expected JaxbLibrary)'
.format(class_type=type(target).__name__))
target_files = []
for source in target.sources_relative_to_buildroot():
path_to_xsd = source
output_package = target.package
if output_package is None:
output_package = self._guess_package(source)
output_package = self._correct_package(output_package)
output_directory = output_dir
safe_mkdir(output_directory)
args = ['-p', output_package, '-d', output_directory, path_to_xsd]
result = self._compile_schema(args)
if result != 0:
raise TaskError('xjc ... exited non-zero ({code})'.format(code=result))
target_files.append(self._sources_to_be_generated(target.package, path_to_xsd))
cache.append((target, target_files))
return cache
def genlangs(self):
return {'java': lambda t: t.is_jvm}
def createtarget(self, lang, gentarget, dependees):
predicates = self.genlangs()
languages = predicates.keys()
if not (lang in languages) or not (predicates[lang](gentarget)):
raise TaskError('Invalid language "{lang}" for task {task}'
.format(lang=lang, task=type(self).__name__))
to_generate = []
for source in gentarget.sources_relative_to_buildroot():
to_generate.extend(self._sources_to_be_generated(gentarget.package, source))
spec_path = os.path.join(os.path.relpath(self.workdir, get_buildroot()), 'gen-java')
address = SyntheticAddress(spec_path=spec_path, target_name=gentarget.id)
target = self.context.add_new_target(
address,
JavaLibrary,
derived_from=gentarget,
sources=to_generate,
provides=gentarget.provides,<|fim▁hole|> )
for dependee in dependees:
dependee.inject_dependency(target.address)
return target
@classmethod
def _guess_package(self, path):
"""Used in genlang to actually invoke the compiler with the proper arguments, and in
createtarget (via _sources_to_be_generated) to declare what the generated files will be.
"""
package = ''
slash = path.rfind(os.path.sep)
com = path.rfind(os.path.join('', 'com', ''))
if com < 0 and path.find(os.path.join('com', '')) == 0:
package = path[:slash]
elif com >= 0:
package = path[com:slash]
package = package.replace(os.path.sep, ' ')
package = package.strip().replace(' ', '.')
return package
@classmethod
def _correct_package(self, package):
package = package.replace('/', '.')
package = re.sub(r'^\.+', '', package)
package = re.sub(r'\.+$', '', package)
if re.search(r'\.{2,}', package) is not None:
raise ValueError('Package name cannot have consecutive periods! (%s)' % package)
return package
@classmethod
def _sources_to_be_generated(self, package, path):
"""This method (or some variation of it) seems to be common amongst all implementations of
code-generating tasks.
As far as I can tell, its purpose is to peek into the relevant schema files and figure out what
the final output files will be. This is typically implemented with a variety of hacks,
accompanied by TODO's saying to do it properly in the future (see apache_thrift_gen.py and
protobuf_gen.py). The implementation in this file does it 'properly' using python's xml parser,
though I am making some assumptions about how .xsd's are supposed to be formatted, as that is
not a subject I am particularly informed about.
"""
doc = parse(path)
if package is None:
package = self._guess_package(path)
package = self._correct_package(package)
names = []
for root in doc.childNodes:
if re.match('.*?:schema$', root.nodeName, re.I) is not None:
for element in root.childNodes:
if element.nodeName != '#text' and element.attributes.has_key('name'):
name = element.attributes['name'].nodeValue
if len(name) == 0: continue
# enforce pascal-case class names
name = name[0:1].upper() + name[1:]
names.append(name)
names.append('ObjectFactory')
outdir = package.replace('.', '/')
return [os.path.join(outdir, '%s.java' % name) for name in names]<|fim▁end|> | dependencies=[],
excludes=gentarget.payload.excludes |
<|file_name|>Form_NewProject.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'Form_NewProject.ui'
#
# Created: Mon Sep 9 21:29:21 2013
# by: PyQt4 UI code generator 4.8.6
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_NewProject(object):
def setupUi(self, NewProject):
NewProject.setObjectName(_fromUtf8("NewProject"))
NewProject.resize(572, 232)
NewProject.setWindowTitle(QtGui.QApplication.translate("NewProject", "New Project", None, QtGui.QApplication.UnicodeUTF8))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/images/logo_icon.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
NewProject.setWindowIcon(icon)
self.verticalLayout = QtGui.QVBoxLayout(NewProject)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.groupBox = QtGui.QGroupBox(NewProject)
self.groupBox.setTitle(QtGui.QApplication.translate("NewProject", "Project settings", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.gridLayout = QtGui.QGridLayout(self.groupBox)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.label_2 = QtGui.QLabel(self.groupBox)
self.label_2.setText(QtGui.QApplication.translate("NewProject", "Project name:", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setObjectName(_fromUtf8("label_2"))
self.gridLayout.addWidget(self.label_2, 0, 0, 1, 1)
self.ProjectName = QtGui.QLineEdit(self.groupBox)
self.ProjectName.setText(_fromUtf8(""))
self.ProjectName.setObjectName(_fromUtf8("ProjectName"))
self.gridLayout.addWidget(self.ProjectName, 0, 1, 1, 2)
self.label = QtGui.QLabel(self.groupBox)
self.label.setText(QtGui.QApplication.translate("NewProject", "Project directory:", None, QtGui.QApplication.UnicodeUTF8))
self.label.setObjectName(_fromUtf8("label"))
self.gridLayout.addWidget(self.label, 1, 0, 1, 1)
self.ProjectPath = QtGui.QLineEdit(self.groupBox)
self.ProjectPath.setObjectName(_fromUtf8("ProjectPath"))
self.gridLayout.addWidget(self.ProjectPath, 1, 1, 1, 1)
self.NewProject_browser = QtGui.QToolButton(self.groupBox)
self.NewProject_browser.setText(QtGui.QApplication.translate("NewProject", "...", None, QtGui.QApplication.UnicodeUTF8))
self.NewProject_browser.setToolButtonStyle(QtCore.Qt.ToolButtonTextOnly)
self.NewProject_browser.setObjectName(_fromUtf8("NewProject_browser"))<|fim▁hole|> self.checkBox_WorkdirFiles.setObjectName(_fromUtf8("checkBox_WorkdirFiles"))
self.gridLayout.addWidget(self.checkBox_WorkdirFiles, 2, 0, 1, 3)
self.unbaseImages = QtGui.QCheckBox(self.groupBox)
self.unbaseImages.setText(QtGui.QApplication.translate("NewProject", "Unbase images when saving (required to share a project that uses Qemu)", None, QtGui.QApplication.UnicodeUTF8))
self.unbaseImages.setObjectName(_fromUtf8("unbaseImages"))
self.gridLayout.addWidget(self.unbaseImages, 3, 0, 1, 2)
self.checkBox_SaveCaptures = QtGui.QCheckBox(self.groupBox)
self.checkBox_SaveCaptures.setText(QtGui.QApplication.translate("NewProject", "Save traffic captures", None, QtGui.QApplication.UnicodeUTF8))
self.checkBox_SaveCaptures.setObjectName(_fromUtf8("checkBox_SaveCaptures"))
self.gridLayout.addWidget(self.checkBox_SaveCaptures, 4, 0, 1, 2)
self.verticalLayout.addWidget(self.groupBox)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.pushButtonOpenProject = QtGui.QPushButton(NewProject)
self.pushButtonOpenProject.setText(QtGui.QApplication.translate("NewProject", "&Open a Project", None, QtGui.QApplication.UnicodeUTF8))
self.pushButtonOpenProject.setObjectName(_fromUtf8("pushButtonOpenProject"))
self.horizontalLayout.addWidget(self.pushButtonOpenProject)
self.pushButtonRecentFiles = QtGui.QPushButton(NewProject)
self.pushButtonRecentFiles.setText(QtGui.QApplication.translate("NewProject", "&Recent Files", None, QtGui.QApplication.UnicodeUTF8))
self.pushButtonRecentFiles.setObjectName(_fromUtf8("pushButtonRecentFiles"))
self.horizontalLayout.addWidget(self.pushButtonRecentFiles)
spacerItem = QtGui.QSpacerItem(168, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.buttonBox = QtGui.QDialogButtonBox(NewProject)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName(_fromUtf8("buttonBox"))
self.horizontalLayout.addWidget(self.buttonBox)
self.verticalLayout.addLayout(self.horizontalLayout)
self.retranslateUi(NewProject)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), NewProject.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("rejected()")), NewProject.reject)
QtCore.QMetaObject.connectSlotsByName(NewProject)
def retranslateUi(self, NewProject):
pass
import svg_resources_rc<|fim▁end|> | self.gridLayout.addWidget(self.NewProject_browser, 1, 2, 1, 1)
self.checkBox_WorkdirFiles = QtGui.QCheckBox(self.groupBox)
self.checkBox_WorkdirFiles.setText(QtGui.QApplication.translate("NewProject", "Save nvrams including EtherSwitch VLANs and crypto keys", None, QtGui.QApplication.UnicodeUTF8))
self.checkBox_WorkdirFiles.setChecked(False) |
<|file_name|>app.e2e-spec.ts<|end_file_name|><|fim▁begin|>import { browser, element, by } from 'protractor';
describe('QuickStart E2E Tests', function () {
let expectedMsg = 'Hello Ungular';
beforeEach(function () {
browser.get('');
});<|fim▁hole|>
});<|fim▁end|> |
it('should display: ' + expectedMsg, function () {
expect(element(by.css('h1')).getText()).toEqual(expectedMsg);
}); |
<|file_name|>session.go<|end_file_name|><|fim▁begin|>package registry
import (
"bytes"
"crypto/sha256"
// this is required for some certificates
_ "crypto/sha512"
"encoding/hex"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"net/http"
"net/http/cookiejar"
"net/url"
"strconv"
"strings"
"time"
"github.com/Sirupsen/logrus"
"github.com/docker/docker/pkg/httputils"
"github.com/docker/docker/pkg/requestdecorator"
"github.com/docker/docker/pkg/tarsum"
)
type Session struct {
authConfig *AuthConfig
reqFactory *requestdecorator.RequestFactory
indexEndpoint *Endpoint
jar *cookiejar.Jar
timeout TimeoutType
}
func NewSession(authConfig *AuthConfig, factory *requestdecorator.RequestFactory, endpoint *Endpoint, timeout bool) (r *Session, err error) {
r = &Session{
authConfig: authConfig,
indexEndpoint: endpoint,
}
if timeout {
r.timeout = ReceiveTimeout
}
r.jar, err = cookiejar.New(nil)
if err != nil {
return nil, err
}
// If we're working with a standalone private registry over HTTPS, send Basic Auth headers
// alongside our requests.
if r.indexEndpoint.VersionString(1) != IndexServerAddress() && r.indexEndpoint.URL.Scheme == "https" {
info, err := r.indexEndpoint.Ping()
if err != nil {
return nil, err
}
if info.Standalone {
logrus.Debugf("Endpoint %s is eligible for private registry. Enabling decorator.", r.indexEndpoint.String())
dec := requestdecorator.NewAuthDecorator(authConfig.Username, authConfig.Password)
factory.AddDecorator(dec)
}
}
r.reqFactory = factory
return r, nil
}
func (r *Session) doRequest(req *http.Request) (*http.Response, *http.Client, error) {
return doRequest(req, r.jar, r.timeout, r.indexEndpoint.IsSecure)
}
// Retrieve the history of a given image from the Registry.
// Return a list of the parent's json (requested image included)
func (r *Session) GetRemoteHistory(imgID, registry string, token []string) ([]string, error) {
req, err := r.reqFactory.NewRequest("GET", registry+"images/"+imgID+"/ancestry", nil)
if err != nil {
return nil, err
}
setTokenAuth(req, token)
res, _, err := r.doRequest(req)
if err != nil {
return nil, err
}
defer res.Body.Close()
if res.StatusCode != 200 {
if res.StatusCode == 401 {
return nil, errLoginRequired
}
return nil, httputils.NewHTTPRequestError(fmt.Sprintf("Server error: %d trying to fetch remote history for %s", res.StatusCode, imgID), res)
}
jsonString, err := ioutil.ReadAll(res.Body)
if err != nil {
return nil, fmt.Errorf("Error while reading the http response: %s", err)
}
logrus.Debugf("Ancestry: %s", jsonString)
history := new([]string)
if err := json.Unmarshal(jsonString, history); err != nil {
return nil, err
}
return *history, nil
}
// Check if an image exists in the Registry
func (r *Session) LookupRemoteImage(imgID, registry string, token []string) error {
req, err := r.reqFactory.NewRequest("GET", registry+"images/"+imgID+"/json", nil)
if err != nil {
return err
}
setTokenAuth(req, token)
res, _, err := r.doRequest(req)
if err != nil {
return err
}
res.Body.Close()
if res.StatusCode != 200 {
return httputils.NewHTTPRequestError(fmt.Sprintf("HTTP code %d", res.StatusCode), res)
}
return nil
}
// Retrieve an image from the Registry.
func (r *Session) GetRemoteImageJSON(imgID, registry string, token []string) ([]byte, int, error) {
// Get the JSON
req, err := r.reqFactory.NewRequest("GET", registry+"images/"+imgID+"/json", nil)
if err != nil {
return nil, -1, fmt.Errorf("Failed to download json: %s", err)
}
setTokenAuth(req, token)
res, _, err := r.doRequest(req)
if err != nil {
return nil, -1, fmt.Errorf("Failed to download json: %s", err)
}
defer res.Body.Close()
if res.StatusCode != 200 {
return nil, -1, httputils.NewHTTPRequestError(fmt.Sprintf("HTTP code %d", res.StatusCode), res)
}
// if the size header is not present, then set it to '-1'
imageSize := -1
if hdr := res.Header.Get("X-Docker-Size"); hdr != "" {
imageSize, err = strconv.Atoi(hdr)
if err != nil {
return nil, -1, err
}
}
jsonString, err := ioutil.ReadAll(res.Body)
if err != nil {
return nil, -1, fmt.Errorf("Failed to parse downloaded json: %s (%s)", err, jsonString)
}
return jsonString, imageSize, nil
}
func (r *Session) GetRemoteImageLayer(imgID, registry string, token []string, imgSize int64) (io.ReadCloser, error) {
var (
retries = 5
statusCode = 0
client *http.Client
res *http.Response
imageURL = fmt.Sprintf("%simages/%s/layer", registry, imgID)
)
req, err := r.reqFactory.NewRequest("GET", imageURL, nil)
if err != nil {
return nil, fmt.Errorf("Error while getting from the server: %s\n", err)
}
setTokenAuth(req, token)
for i := 1; i <= retries; i++ {
statusCode = 0
res, client, err = r.doRequest(req)
if err != nil {
logrus.Debugf("Error contacting registry: %s", err)
if res != nil {
if res.Body != nil {
res.Body.Close()
}
statusCode = res.StatusCode
}
if i == retries {
return nil, fmt.Errorf("Server error: Status %d while fetching image layer (%s)",
statusCode, imgID)
}
time.Sleep(time.Duration(i) * 5 * time.Second)
continue
}
break
}
if res.StatusCode != 200 {
res.Body.Close()
return nil, fmt.Errorf("Server error: Status %d while fetching image layer (%s)",
res.StatusCode, imgID)
}
if res.Header.Get("Accept-Ranges") == "bytes" && imgSize > 0 {
logrus.Debugf("server supports resume")
return httputils.ResumableRequestReaderWithInitialResponse(client, req, 5, imgSize, res), nil
}
logrus.Debugf("server doesn't support resume")
return res.Body, nil
}
func (r *Session) GetRemoteTags(registries []string, repository string, token []string) (map[string]string, error) {
if strings.Count(repository, "/") == 0 {
// This will be removed once the Registry supports auto-resolution on
// the "library" namespace
repository = "library/" + repository
}
for _, host := range registries {
endpoint := fmt.Sprintf("%srepositories/%s/tags", host, repository)
req, err := r.reqFactory.NewRequest("GET", endpoint, nil)
if err != nil {
return nil, err
}
setTokenAuth(req, token)
res, _, err := r.doRequest(req)
if err != nil {
return nil, err
}
logrus.Debugf("Got status code %d from %s", res.StatusCode, endpoint)
defer res.Body.Close()
if res.StatusCode == 404 {
return nil, fmt.Errorf("Repository not found")
}
if res.StatusCode != 200 {
continue
}
result := make(map[string]string)
if err := json.NewDecoder(res.Body).Decode(&result); err != nil {
return nil, err
}
return result, nil
}
return nil, fmt.Errorf("Could not reach any registry endpoint")
}
func buildEndpointsList(headers []string, indexEp string) ([]string, error) {
var endpoints []string
parsedURL, err := url.Parse(indexEp)
if err != nil {
return nil, err
}
var urlScheme = parsedURL.Scheme
// The Registry's URL scheme has to match the Index'
for _, ep := range headers {
epList := strings.Split(ep, ",")
for _, epListElement := range epList {
endpoints = append(
endpoints,
fmt.Sprintf("%s://%s/v1/", urlScheme, strings.TrimSpace(epListElement)))
}
}
return endpoints, nil
}
func (r *Session) GetRepositoryData(remote string) (*RepositoryData, error) {
repositoryTarget := fmt.Sprintf("%srepositories/%s/images", r.indexEndpoint.VersionString(1), remote)
logrus.Debugf("[registry] Calling GET %s", repositoryTarget)
req, err := r.reqFactory.NewRequest("GET", repositoryTarget, nil)
if err != nil {
return nil, err
}
if r.authConfig != nil && len(r.authConfig.Username) > 0 {
req.SetBasicAuth(r.authConfig.Username, r.authConfig.Password)
}
req.Header.Set("X-Docker-Token", "true")
res, _, err := r.doRequest(req)
if err != nil {
return nil, err
}
defer res.Body.Close()
if res.StatusCode == 401 {
return nil, errLoginRequired
}
// TODO: Right now we're ignoring checksums in the response body.
// In the future, we need to use them to check image validity.
if res.StatusCode == 404 {
return nil, httputils.NewHTTPRequestError(fmt.Sprintf("HTTP code: %d", res.StatusCode), res)
} else if res.StatusCode != 200 {
errBody, err := ioutil.ReadAll(res.Body)
if err != nil {
logrus.Debugf("Error reading response body: %s", err)
}
return nil, httputils.NewHTTPRequestError(fmt.Sprintf("Error: Status %d trying to pull repository %s: %q", res.StatusCode, remote, errBody), res)
}
var tokens []string
if res.Header.Get("X-Docker-Token") != "" {
tokens = res.Header["X-Docker-Token"]
}
var endpoints []string
if res.Header.Get("X-Docker-Endpoints") != "" {
endpoints, err = buildEndpointsList(res.Header["X-Docker-Endpoints"], r.indexEndpoint.VersionString(1))
if err != nil {
return nil, err
}
} else {
// Assume the endpoint is on the same host
endpoints = append(endpoints, fmt.Sprintf("%s://%s/v1/", r.indexEndpoint.URL.Scheme, req.URL.Host))
}
remoteChecksums := []*ImgData{}
if err := json.NewDecoder(res.Body).Decode(&remoteChecksums); err != nil {
return nil, err
}
// Forge a better object from the retrieved data
imgsData := make(map[string]*ImgData)
for _, elem := range remoteChecksums {
imgsData[elem.ID] = elem
}
return &RepositoryData{
ImgList: imgsData,
Endpoints: endpoints,
Tokens: tokens,
}, nil
}
func (r *Session) PushImageChecksumRegistry(imgData *ImgData, registry string, token []string) error {
logrus.Debugf("[registry] Calling PUT %s", registry+"images/"+imgData.ID+"/checksum")
req, err := r.reqFactory.NewRequest("PUT", registry+"images/"+imgData.ID+"/checksum", nil)
if err != nil {
return err
}
setTokenAuth(req, token)
req.Header.Set("X-Docker-Checksum", imgData.Checksum)
req.Header.Set("X-Docker-Checksum-Payload", imgData.ChecksumPayload)
res, _, err := r.doRequest(req)
if err != nil {
return fmt.Errorf("Failed to upload metadata: %s", err)
}
defer res.Body.Close()
if len(res.Cookies()) > 0 {
r.jar.SetCookies(req.URL, res.Cookies())
}
if res.StatusCode != 200 {
errBody, err := ioutil.ReadAll(res.Body)
if err != nil {
return fmt.Errorf("HTTP code %d while uploading metadata and error when trying to parse response body: %s", res.StatusCode, err)
}
var jsonBody map[string]string
if err := json.Unmarshal(errBody, &jsonBody); err != nil {
errBody = []byte(err.Error())
} else if jsonBody["error"] == "Image already exists" {
return ErrAlreadyExists
}
return fmt.Errorf("HTTP code %d while uploading metadata: %q", res.StatusCode, errBody)
}
return nil
}
// Push a local image to the registry
func (r *Session) PushImageJSONRegistry(imgData *ImgData, jsonRaw []byte, registry string, token []string) error {
logrus.Debugf("[registry] Calling PUT %s", registry+"images/"+imgData.ID+"/json")
req, err := r.reqFactory.NewRequest("PUT", registry+"images/"+imgData.ID+"/json", bytes.NewReader(jsonRaw))
if err != nil {
return err
}
req.Header.Add("Content-type", "application/json")
setTokenAuth(req, token)
res, _, err := r.doRequest(req)
if err != nil {
return fmt.Errorf("Failed to upload metadata: %s", err)
}
defer res.Body.Close()
if res.StatusCode == 401 && strings.HasPrefix(registry, "http://") {
return httputils.NewHTTPRequestError("HTTP code 401, Docker will not send auth headers over HTTP.", res)
}
if res.StatusCode != 200 {
errBody, err := ioutil.ReadAll(res.Body)
if err != nil {
return httputils.NewHTTPRequestError(fmt.Sprintf("HTTP code %d while uploading metadata and error when trying to parse response body: %s", res.StatusCode, err), res)
}
var jsonBody map[string]string
if err := json.Unmarshal(errBody, &jsonBody); err != nil {
errBody = []byte(err.Error())
} else if jsonBody["error"] == "Image already exists" {
return ErrAlreadyExists
}
return httputils.NewHTTPRequestError(fmt.Sprintf("HTTP code %d while uploading metadata: %q", res.StatusCode, errBody), res)
}
return nil
}
func (r *Session) PushImageLayerRegistry(imgID string, layer io.Reader, registry string, token []string, jsonRaw []byte) (checksum string, checksumPayload string, err error) {
logrus.Debugf("[registry] Calling PUT %s", registry+"images/"+imgID+"/layer")
tarsumLayer, err := tarsum.NewTarSum(layer, false, tarsum.Version0)
if err != nil {
return "", "", err
}
h := sha256.New()
h.Write(jsonRaw)
h.Write([]byte{'\n'})
checksumLayer := io.TeeReader(tarsumLayer, h)
req, err := r.reqFactory.NewRequest("PUT", registry+"images/"+imgID+"/layer", checksumLayer)
if err != nil {
return "", "", err
}
req.Header.Add("Content-Type", "application/octet-stream")
req.ContentLength = -1
req.TransferEncoding = []string{"chunked"}
setTokenAuth(req, token)
res, _, err := r.doRequest(req)
if err != nil {
return "", "", fmt.Errorf("Failed to upload layer: %s", err)
}
if rc, ok := layer.(io.Closer); ok {
if err := rc.Close(); err != nil {
return "", "", err
}
}
defer res.Body.Close()
if res.StatusCode != 200 {
errBody, err := ioutil.ReadAll(res.Body)
if err != nil {
return "", "", httputils.NewHTTPRequestError(fmt.Sprintf("HTTP code %d while uploading metadata and error when trying to parse response body: %s", res.StatusCode, err), res)
}
return "", "", httputils.NewHTTPRequestError(fmt.Sprintf("Received HTTP code %d while uploading layer: %q", res.StatusCode, errBody), res)
}
checksumPayload = "sha256:" + hex.EncodeToString(h.Sum(nil))
return tarsumLayer.Sum(jsonRaw), checksumPayload, nil
}
// push a tag on the registry.
// Remote has the format '<user>/<repo>
func (r *Session) PushRegistryTag(remote, revision, tag, registry string, token []string) error {
// "jsonify" the string
revision = "\"" + revision + "\""
path := fmt.Sprintf("repositories/%s/tags/%s", remote, tag)
req, err := r.reqFactory.NewRequest("PUT", registry+path, strings.NewReader(revision))
if err != nil {
return err
}
req.Header.Add("Content-type", "application/json")
setTokenAuth(req, token)
req.ContentLength = int64(len(revision))
res, _, err := r.doRequest(req)
if err != nil {
return err
}
res.Body.Close()
if res.StatusCode != 200 && res.StatusCode != 201 {
return httputils.NewHTTPRequestError(fmt.Sprintf("Internal server error: %d trying to push tag %s on %s", res.StatusCode, tag, remote), res)
}
return nil
}
func (r *Session) PushImageJSONIndex(remote string, imgList []*ImgData, validate bool, regs []string) (*RepositoryData, error) {
cleanImgList := []*ImgData{}
if validate {
for _, elem := range imgList {
if elem.Checksum != "" {
cleanImgList = append(cleanImgList, elem)
}
}
} else {
cleanImgList = imgList
}
imgListJSON, err := json.Marshal(cleanImgList)
if err != nil {
return nil, err
}
var suffix string
if validate {
suffix = "images"
}
u := fmt.Sprintf("%srepositories/%s/%s", r.indexEndpoint.VersionString(1), remote, suffix)
logrus.Debugf("[registry] PUT %s", u)
logrus.Debugf("Image list pushed to index:\n%s", imgListJSON)
headers := map[string][]string{
"Content-type": {"application/json"},
"X-Docker-Token": {"true"},
}
if validate {
headers["X-Docker-Endpoints"] = regs
}
// Redirect if necessary
var res *http.Response
for {
if res, err = r.putImageRequest(u, headers, imgListJSON); err != nil {
return nil, err
}
if !shouldRedirect(res) {<|fim▁hole|> u = res.Header.Get("Location")
logrus.Debugf("Redirected to %s", u)
}
defer res.Body.Close()
if res.StatusCode == 401 {
return nil, errLoginRequired
}
var tokens, endpoints []string
if !validate {
if res.StatusCode != 200 && res.StatusCode != 201 {
errBody, err := ioutil.ReadAll(res.Body)
if err != nil {
logrus.Debugf("Error reading response body: %s", err)
}
return nil, httputils.NewHTTPRequestError(fmt.Sprintf("Error: Status %d trying to push repository %s: %q", res.StatusCode, remote, errBody), res)
}
if res.Header.Get("X-Docker-Token") == "" {
return nil, fmt.Errorf("Index response didn't contain an access token")
}
tokens = res.Header["X-Docker-Token"]
logrus.Debugf("Auth token: %v", tokens)
if res.Header.Get("X-Docker-Endpoints") == "" {
return nil, fmt.Errorf("Index response didn't contain any endpoints")
}
endpoints, err = buildEndpointsList(res.Header["X-Docker-Endpoints"], r.indexEndpoint.VersionString(1))
if err != nil {
return nil, err
}
}
if validate {
if res.StatusCode != 204 {
errBody, err := ioutil.ReadAll(res.Body)
if err != nil {
logrus.Debugf("Error reading response body: %s", err)
}
return nil, httputils.NewHTTPRequestError(fmt.Sprintf("Error: Status %d trying to push checksums %s: %q", res.StatusCode, remote, errBody), res)
}
}
return &RepositoryData{
Tokens: tokens,
Endpoints: endpoints,
}, nil
}
func (r *Session) putImageRequest(u string, headers map[string][]string, body []byte) (*http.Response, error) {
req, err := r.reqFactory.NewRequest("PUT", u, bytes.NewReader(body))
if err != nil {
return nil, err
}
req.SetBasicAuth(r.authConfig.Username, r.authConfig.Password)
req.ContentLength = int64(len(body))
for k, v := range headers {
req.Header[k] = v
}
response, _, err := r.doRequest(req)
if err != nil {
return nil, err
}
return response, nil
}
func shouldRedirect(response *http.Response) bool {
return response.StatusCode >= 300 && response.StatusCode < 400
}
func (r *Session) SearchRepositories(term string) (*SearchResults, error) {
logrus.Debugf("Index server: %s", r.indexEndpoint)
u := r.indexEndpoint.VersionString(1) + "search?q=" + url.QueryEscape(term)
req, err := r.reqFactory.NewRequest("GET", u, nil)
if err != nil {
return nil, err
}
if r.authConfig != nil && len(r.authConfig.Username) > 0 {
req.SetBasicAuth(r.authConfig.Username, r.authConfig.Password)
}
req.Header.Set("X-Docker-Token", "true")
res, _, err := r.doRequest(req)
if err != nil {
return nil, err
}
defer res.Body.Close()
if res.StatusCode != 200 {
return nil, httputils.NewHTTPRequestError(fmt.Sprintf("Unexpected status code %d", res.StatusCode), res)
}
result := new(SearchResults)
err = json.NewDecoder(res.Body).Decode(result)
return result, err
}
func (r *Session) GetAuthConfig(withPasswd bool) *AuthConfig {
password := ""
if withPasswd {
password = r.authConfig.Password
}
return &AuthConfig{
Username: r.authConfig.Username,
Password: password,
Email: r.authConfig.Email,
}
}
func setTokenAuth(req *http.Request, token []string) {
if req.Header.Get("Authorization") == "" { // Don't override
req.Header.Set("Authorization", "Token "+strings.Join(token, ","))
}
}<|fim▁end|> | break
}
res.Body.Close() |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls.defaults import patterns, url
from snippets.base import views
urlpatterns = patterns('',
url(r'^$', views.index, name='base.index'),
url(r'^(?P<startpage_version>[^/]+)/(?P<name>[^/]+)/(?P<version>[^/]+)/'
'(?P<appbuildid>[^/]+)/(?P<build_target>[^/]+)/(?P<locale>[^/]+)/'<|fim▁hole|> name='view_snippets'),
url(r'^admin/base/snippet/preview/', views.preview_empty,
name='base.admin.preview_empty'),
url(r'^admin/base/snippet/(\d+)/preview/', views.preview_snippet,
name='base.admin.preview_snippet'),
url(r'^admin/base/snippettemplate/(\d+)/variables/',
views.admin_template_json, name='base.admin.template_json'),
)<|fim▁end|> | '(?P<channel>[^/]+)/(?P<os_version>[^/]+)/(?P<distribution>[^/]+)/'
'(?P<distribution_version>[^/]+)/$', views.fetch_snippets, |
<|file_name|>atari_wrappers.py<|end_file_name|><|fim▁begin|>from collections import deque
import gym
from gym import spaces
import numpy as np
from ray.rllib.utils.images import rgb2gray, resize
def is_atari(env):
if (
hasattr(env.observation_space, "shape")
and env.observation_space.shape is not None
and len(env.observation_space.shape) <= 2
):
return False
return hasattr(env, "unwrapped") and hasattr(env.unwrapped, "ale")
def get_wrapper_by_cls(env, cls):
"""Returns the gym env wrapper of the given class, or None."""
currentenv = env
while True:
if isinstance(currentenv, cls):
return currentenv
elif isinstance(currentenv, gym.Wrapper):
currentenv = currentenv.env
else:
return None
class MonitorEnv(gym.Wrapper):
def __init__(self, env=None):
"""Record episodes stats prior to EpisodicLifeEnv, etc."""
gym.Wrapper.__init__(self, env)
self._current_reward = None
self._num_steps = None
self._total_steps = None
self._episode_rewards = []
self._episode_lengths = []
self._num_episodes = 0
self._num_returned = 0
def reset(self, **kwargs):
obs = self.env.reset(**kwargs)
if self._total_steps is None:
self._total_steps = sum(self._episode_lengths)
if self._current_reward is not None:
self._episode_rewards.append(self._current_reward)
self._episode_lengths.append(self._num_steps)
self._num_episodes += 1
self._current_reward = 0
self._num_steps = 0
return obs
def step(self, action):
obs, rew, done, info = self.env.step(action)
self._current_reward += rew
self._num_steps += 1
self._total_steps += 1
return (obs, rew, done, info)
def get_episode_rewards(self):
return self._episode_rewards
def get_episode_lengths(self):
return self._episode_lengths
def get_total_steps(self):
return self._total_steps
def next_episode_results(self):
for i in range(self._num_returned, len(self._episode_rewards)):
yield (self._episode_rewards[i], self._episode_lengths[i])
self._num_returned = len(self._episode_rewards)
class NoopResetEnv(gym.Wrapper):
def __init__(self, env, noop_max=30):
"""Sample initial states by taking random number of no-ops on reset.
No-op is assumed to be action 0.
"""
gym.Wrapper.__init__(self, env)
self.noop_max = noop_max
self.override_num_noops = None
self.noop_action = 0
assert env.unwrapped.get_action_meanings()[0] == "NOOP"
def reset(self, **kwargs):
"""Do no-op action for a number of steps in [1, noop_max]."""
self.env.reset(**kwargs)
if self.override_num_noops is not None:
noops = self.override_num_noops
else:
noops = self.unwrapped.np_random.randint(1, self.noop_max + 1)
assert noops > 0
obs = None
for _ in range(noops):
obs, _, done, _ = self.env.step(self.noop_action)
if done:
obs = self.env.reset(**kwargs)
return obs
def step(self, ac):
return self.env.step(ac)
class ClipRewardEnv(gym.RewardWrapper):
def __init__(self, env):
gym.RewardWrapper.__init__(self, env)
def reward(self, reward):
"""Bin reward to {+1, 0, -1} by its sign."""
return np.sign(reward)
class FireResetEnv(gym.Wrapper):
def __init__(self, env):
"""Take action on reset.
For environments that are fixed until firing."""
gym.Wrapper.__init__(self, env)
assert env.unwrapped.get_action_meanings()[1] == "FIRE"
assert len(env.unwrapped.get_action_meanings()) >= 3
def reset(self, **kwargs):
self.env.reset(**kwargs)
obs, _, done, _ = self.env.step(1)
if done:
self.env.reset(**kwargs)
obs, _, done, _ = self.env.step(2)
if done:
self.env.reset(**kwargs)
return obs
def step(self, ac):
return self.env.step(ac)
class EpisodicLifeEnv(gym.Wrapper):
def __init__(self, env):
"""Make end-of-life == end-of-episode, but only reset on true game over.
Done by DeepMind for the DQN and co. since it helps value estimation.
"""
gym.Wrapper.__init__(self, env)
self.lives = 0
self.was_real_done = True
def step(self, action):
obs, reward, done, info = self.env.step(action)
self.was_real_done = done
# check current lives, make loss of life terminal,
# then update lives to handle bonus lives
lives = self.env.unwrapped.ale.lives()
if lives < self.lives and lives > 0:
# for Qbert sometimes we stay in lives == 0 condtion for a few fr
# so its important to keep lives > 0, so that we only reset once
# the environment advertises done.
done = True
self.lives = lives
return obs, reward, done, info
def reset(self, **kwargs):
"""Reset only when lives are exhausted.
This way all states are still reachable even though lives are episodic,
and the learner need not know about any of this behind-the-scenes.
"""
if self.was_real_done:
obs = self.env.reset(**kwargs)
else:
# no-op step to advance from terminal/lost life state
obs, _, _, _ = self.env.step(0)
self.lives = self.env.unwrapped.ale.lives()
return obs
class MaxAndSkipEnv(gym.Wrapper):
def __init__(self, env, skip=4):
"""Return only every `skip`-th frame"""
gym.Wrapper.__init__(self, env)
# most recent raw observations (for max pooling across time steps)
self._obs_buffer = np.zeros((2,) + env.observation_space.shape, dtype=np.uint8)
self._skip = skip
def step(self, action):
"""Repeat action, sum reward, and max over last observations."""
total_reward = 0.0
done = None
for i in range(self._skip):
obs, reward, done, info = self.env.step(action)
if i == self._skip - 2:
self._obs_buffer[0] = obs
if i == self._skip - 1:
self._obs_buffer[1] = obs
total_reward += reward
if done:
break
# Note that the observation on the done=True frame
# doesn't matter
max_frame = self._obs_buffer.max(axis=0)
return max_frame, total_reward, done, info
def reset(self, **kwargs):
return self.env.reset(**kwargs)
class WarpFrame(gym.ObservationWrapper):
def __init__(self, env, dim):
"""Warp frames to the specified size (dim x dim)."""
gym.ObservationWrapper.__init__(self, env)
self.width = dim
self.height = dim
self.observation_space = spaces.Box(
low=0, high=255, shape=(self.height, self.width, 1), dtype=np.uint8
)
def observation(self, frame):
frame = rgb2gray(frame)
frame = resize(frame, height=self.height, width=self.width)
return frame[:, :, None]
# TODO: (sven) Deprecated class. Remove once traj. view is the norm.
class FrameStack(gym.Wrapper):
def __init__(self, env, k):
"""Stack k last frames."""
gym.Wrapper.__init__(self, env)
self.k = k
self.frames = deque([], maxlen=k)
shp = env.observation_space.shape
self.observation_space = spaces.Box(
low=0,
high=255,
shape=(shp[0], shp[1], shp[2] * k),
dtype=env.observation_space.dtype,
)
def reset(self):
ob = self.env.reset()
for _ in range(self.k):
self.frames.append(ob)
return self._get_ob()
def step(self, action):
ob, reward, done, info = self.env.step(action)
self.frames.append(ob)
return self._get_ob(), reward, done, info
def _get_ob(self):
assert len(self.frames) == self.k
return np.concatenate(self.frames, axis=2)
class FrameStackTrajectoryView(gym.ObservationWrapper):
def __init__(self, env):
"""No stacking. Trajectory View API takes care of this."""
gym.Wrapper.__init__(self, env)
shp = env.observation_space.shape
assert shp[2] == 1
self.observation_space = spaces.Box(
low=0, high=255, shape=(shp[0], shp[1]), dtype=env.observation_space.dtype
)
def observation(self, observation):
return np.squeeze(observation, axis=-1)
class ScaledFloatFrame(gym.ObservationWrapper):
def __init__(self, env):
gym.ObservationWrapper.__init__(self, env)
self.observation_space = gym.spaces.Box(
low=0, high=1, shape=env.observation_space.shape, dtype=np.float32
)
def observation(self, observation):
# careful! This undoes the memory optimization, use
# with smaller replay buffers only.
return np.array(observation).astype(np.float32) / 255.0
def wrap_deepmind(env, dim=84, framestack=True):
"""Configure environment for DeepMind-style Atari.
Note that we assume reward clipping is done outside the wrapper.
Args:
env (EnvType): The env object to wrap.
dim (int): Dimension to resize observations to (dim x dim).
framestack (bool): Whether to framestack observations.<|fim▁hole|> env = MonitorEnv(env)
env = NoopResetEnv(env, noop_max=30)
if env.spec is not None and "NoFrameskip" in env.spec.id:
env = MaxAndSkipEnv(env, skip=4)
env = EpisodicLifeEnv(env)
if "FIRE" in env.unwrapped.get_action_meanings():
env = FireResetEnv(env)
env = WarpFrame(env, dim)
# env = ScaledFloatFrame(env) # TODO: use for dqn?
# env = ClipRewardEnv(env) # reward clipping is handled by policy eval
# 4x image framestacking.
if framestack is True:
env = FrameStack(env, 4)
return env<|fim▁end|> | """ |
<|file_name|>align.rs<|end_file_name|><|fim▁begin|>macro_rules! expand_align {
() => {
s! {
#[cfg_attr(
any(
target_pointer_width = "32",
target_arch = "x86_64"
),
repr(align(4)))]
#[cfg_attr(
not(any(
target_pointer_width = "32",
target_arch = "x86_64"
)),
repr(align(8)))]
pub struct pthread_mutexattr_t {
size: [u8; ::__SIZEOF_PTHREAD_MUTEXATTR_T],
}
#[cfg_attr(target_pointer_width = "32",
repr(align(4)))]
#[cfg_attr(target_pointer_width = "64",
repr(align(8)))]
pub struct pthread_rwlockattr_t {
size: [u8; ::__SIZEOF_PTHREAD_RWLOCKATTR_T],
}
#[repr(align(4))]
pub struct pthread_condattr_t {
size: [u8; ::__SIZEOF_PTHREAD_CONDATTR_T],
}
}
s_no_extra_traits! {
#[cfg_attr(all(target_pointer_width = "32",
any(target_arch = "arm",
target_arch = "x86_64")),
repr(align(4)))]
#[cfg_attr(any(target_pointer_width = "64",
not(any(target_arch = "arm",
target_arch = "x86_64"))),
repr(align(8)))]
pub struct pthread_mutex_t {
size: [u8; ::__SIZEOF_PTHREAD_MUTEX_T],
}
#[cfg_attr(all(target_pointer_width = "32",
any(target_arch = "arm",
target_arch = "x86_64")),
repr(align(4)))]
#[cfg_attr(any(target_pointer_width = "64",
not(any(target_arch = "arm",
target_arch = "x86_64"))),
repr(align(8)))]
pub struct pthread_rwlock_t {
size: [u8; ::__SIZEOF_PTHREAD_RWLOCK_T],
}<|fim▁hole|> #[cfg_attr(target_pointer_width = "32",
repr(align(4)))]
#[cfg_attr(target_pointer_width = "64",
repr(align(8)))]
#[cfg_attr(target_arch = "x86",
repr(align(4)))]
#[cfg_attr(not(target_arch = "x86"),
repr(align(8)))]
pub struct pthread_cond_t {
size: [u8; ::__SIZEOF_PTHREAD_COND_T],
}
}
cfg_if! {
if #[cfg(feature = "extra_traits")] {
impl PartialEq for pthread_cond_t {
fn eq(&self, other: &pthread_cond_t) -> bool {
self.size
.iter()
.zip(other.size.iter())
.all(|(a,b)| a == b)
}
}
impl Eq for pthread_cond_t {}
impl ::fmt::Debug for pthread_cond_t {
fn fmt(&self, f: &mut ::fmt::Formatter) -> ::fmt::Result {
f.debug_struct("pthread_cond_t")
// FIXME: .field("size", &self.size)
.finish()
}
}
impl ::hash::Hash for pthread_cond_t {
fn hash<H: ::hash::Hasher>(&self, state: &mut H) {
self.size.hash(state);
}
}
impl PartialEq for pthread_mutex_t {
fn eq(&self, other: &pthread_mutex_t) -> bool {
self.size
.iter()
.zip(other.size.iter())
.all(|(a,b)| a == b)
}
}
impl Eq for pthread_mutex_t {}
impl ::fmt::Debug for pthread_mutex_t {
fn fmt(&self, f: &mut ::fmt::Formatter) -> ::fmt::Result {
f.debug_struct("pthread_mutex_t")
// FIXME: .field("size", &self.size)
.finish()
}
}
impl ::hash::Hash for pthread_mutex_t {
fn hash<H: ::hash::Hasher>(&self, state: &mut H) {
self.size.hash(state);
}
}
impl PartialEq for pthread_rwlock_t {
fn eq(&self, other: &pthread_rwlock_t) -> bool {
self.size
.iter()
.zip(other.size.iter())
.all(|(a,b)| a == b)
}
}
impl Eq for pthread_rwlock_t {}
impl ::fmt::Debug for pthread_rwlock_t {
fn fmt(&self, f: &mut ::fmt::Formatter) -> ::fmt::Result {
f.debug_struct("pthread_rwlock_t")
// FIXME: .field("size", &self.size)
.finish()
}
}
impl ::hash::Hash for pthread_rwlock_t {
fn hash<H: ::hash::Hasher>(&self, state: &mut H) {
self.size.hash(state);
}
}
}
}
}
}<|fim▁end|> | |
<|file_name|>hw5.py<|end_file_name|><|fim▁begin|>from numpy import *
from matplotlib.pyplot import *
import scipy.constants as sc
import copy
import scipy.integrate as integ
# test sun/earth with hw5(1.989e30,5.972e24,149.6e6,0.0167,1000)
def hw5(m1, m2, a, e, tmax, tstep=0.001, tplot=0.025, method='leapfrog'):
if method != 'leapfrog' and method != 'odeint':
print("That's not a method")
return()
# initialize commonly used variables
period = sqrt((4*(pi**2)*(a**3)) / (sc.G*(m1 + m2)))
dt = period*tstep
# initialize objects at time 0
q = m1 / m2
r0 = (1-e)*a/(1+q)
v0 = (1/(1+q))*sqrt((1+e)/(1-e))*sqrt(sc.G*(m1+m2)/a)
rv = array([r0, 0, 0, v0, -q*r0, 0, 0, -q*v0])
# set up figure
figure(1)
gca().set_aspect('equal')
xlim([-2*a, 2*a])
ylim([-2*a, 2*a])
rv_list = []
if method == 'leapfrog':<|fim▁hole|> if frameCounter >= tplot:
frameCounter = 0
rv_list.append(copy.deepcopy(rv))
# calc positions
rv[0] = rv[0] + rv[2]*dt
rv[1] = rv[1] + rv[3]*dt
rv[4] = rv[4] + rv[6]*dt
rv[5] = rv[5] + rv[7]*dt
# calc acceleration
r = array([rv[0] - rv[4], rv[1] - rv[5]])
force = ((sc.G*m1*m2)/(np.linalg.norm(r)**2))*(r/np.linalg.norm(r))
# calc velocity
rv[2] = rv[2] - (force[0]/m1)*dt
rv[3] = rv[3] - (force[1]/m1)*dt
rv[6] = rv[6] + (force[0]/m2)*dt
rv[7] = rv[7] + (force[1]/m2)*dt
# increment counters
timeCounter += tstep
frameCounter += tstep
# plot final position
rv_list.append(copy.deepcopy(rv))
rv_list_plot = rv_list
else:
# odeint
rv_list = integ.odeint(deriv, rv, arange(0, tmax*period, dt), (m1, m2))
# needed to calculate using tstep, but we want to plot
# using tplot,
t_interval = tplot / tstep
rv_list_plot = rv_list[::t_interval]
# plot
for i in range(len(rv_list_plot)):
plot(rv_list_plot[i][0],rv_list_plot[i][1],'bo')
plot(rv_list_plot[i][4],rv_list_plot[i][5],'go')
draw()
def deriv(rv, dt, m1, m2):
# calc position deriv
rv_copy = zeros(8)
rv_copy[0] = rv[2]
rv_copy[1] = rv[3]
rv_copy[4] = rv[6]
rv_copy[5] = rv[7]
# calc velocity deriv
r = array([rv[0] - rv[4], rv[1] - rv[5]])
force = ((sc.G*m1*m2)/(np.linalg.norm(r)**2))*(r/np.linalg.norm(r))
rv_copy[2] = - (force[0]/m1)
rv_copy[3] = - (force[1]/m1)
rv_copy[6] = + (force[0]/m2)
rv_copy[7] = + (force[1]/m2)
return rv_copy<|fim▁end|> | timeCounter = 0
frameCounter = 0
while timeCounter < tmax:
# plot positions if tplot time has passed |
<|file_name|>manage.py<|end_file_name|><|fim▁begin|><|fim▁hole|>if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ownmusicweb.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)<|fim▁end|> | #!/usr/bin/env python
import os
import sys
|
<|file_name|>allowSyntheticDefaultImports7.js<|end_file_name|><|fim▁begin|>//// [tests/cases/compiler/allowSyntheticDefaultImports7.ts] ////
//// [b.d.ts]
export function foo();
export function bar();
//// [a.ts]
import { default as Foo } from "./b";<|fim▁hole|>Foo.foo();
//// [a.js]
System.register(["./b"], function (exports_1, context_1) {
"use strict";
var __moduleName = context_1 && context_1.id;
var b_1;
return {
setters: [
function (b_1_1) {
b_1 = b_1_1;
}
],
execute: function () {
b_1["default"].bar();
b_1["default"].foo();
}
};
});<|fim▁end|> | Foo.bar(); |
<|file_name|>greetings.rs<|end_file_name|><|fim▁begin|>pub fn hello() -> String {
"こんにちは".to_string() //konnichiwa<|fim▁hole|>}<|fim▁end|> | |
<|file_name|>supplier_feature_gen.py<|end_file_name|><|fim▁begin|>import pandas as pd
from datetime import date, timedelta
import time
import numpy as np
import re
import psycopg2
import ConfigParser
import argparse
from sqlalchemy import create_engine
import random
import sql
parser = argparse.ArgumentParser()
parser.add_argument('-cf','--contract_file',help='Contract data file')
parser.add_argument('-if','--invest_file',help='Labelled data file')
parser.add_argument('-a','--amounts',action='store_true',default=False,help='Calculate aggregated amount features')
parser.add_argument('-dist','-dist',action='store_true',default=True,help='Calculate distribution features')
parser.add_argument('-dom','-dom',action='store_true',default=False,help='Calculate dominance features')
parser.add_argument('-y','--num_years',default=0,help='Time periods in years')
parser.add_argument('-cat','--categ',default=['major_sector'],nargs='*',help='Categoricals to use')
parser.add_argument('-id','--table_id',default=time.strftime("%Y%m%d"),help='ID for SQL tables')
parser.add_argument('-lim','--contract_num_lim',default=5000,help='Maximum number of rows to use')
args = parser.parse_args()
def connect():
"""Connect to database"""
#read password from config file
config = ConfigParser.RawConfigParser()
config.read('config')
password = config.get('SQL','password')
#open connection with database
config = ConfigParser.RawConfigParser()
config.read('config')
password = config.get('SQL','password')
con = psycopg2.connect(host="localhost",user='dssg',password=password,dbname="world_bank")
return con
def snake_case(name):
"""Clean entity name strings"""
remove_list = ['llc','ltd','llc','ltd','co','corporation','srl','nv','limited','pvtltd']
remove = '|'.join(remove_list)
regex = re.compile(r'\b('+remove+r')\b', flags=re.IGNORECASE)
try:
s1 = name.lower()
s1 = s1.replace('.','')
s1 = regex.sub("", s1)
s1 = s1.strip()
s1 = re.sub(' +','_',s1)
s1 = re.sub('-','_',s1)
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', s1)
s1 = s1.replace('*','')
s1 = s1.replace('(','')
s1 = s1.replace(')','')
s1 = s1.replace('"','')
s1 = s1.replace(',','')
s1 = s1.replace('#','')
s1 = s1.replace(':','_')
s1 = s1.replace('&','_')
s1 = s1.replace('\'','')
s1 = s1.replace('/','_')
s1 = re.sub('_+','_',s1)
except:
s1 = ''
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
def reformat(data,column,inplace=False,shorten=False):
if inplace:
data[column] = data[column].map(lambda x: snake_case(x))
else:
data[column + '_reformat'] = data[column].map(lambda x: snake_case(x))
if shorten:
data[column] = [re.sub(r'and', '', x).replace('__','_') for x in data[column]]
data[column] = [re.sub(r'[aeiou]', '', x) for x in data[column]]
return data
def binarize(data,fields):
dummies = pd.get_dummies(data[fields]).astype('int64')
dummies.columns = ['_'.join(('is',fields,col,'ct')) for col in dummies.columns]
data = data.merge(dummies,left_index=True,right_index=True,how='left')
return data
def conditional_amounts(data):
for col in data.columns:
if 'is' in col and 'total' not in col and 'cum' not in col and 'percent' not in col and 'dominance' not in col:
data[re.sub('_ct$','',col) + '_amt'] = data[col]*data['amount_standardized']
return data
def distribution(data,field,amount=False):
cols_to_use = []
for col in data.columns:
if 'is' in col and 'cum' in col and field in col and 'total' not in col and 'percent' not in col and 'dominance' not in col:
if amount and 'amt' in col:
cols_to_use.append(col)
elif not amount and not 'amt' in col:
cols_to_use.append(col)
subset = data[cols_to_use]
dist = subset.apply(lambda x: 100.0*x/x.sum(), axis=1)
dist.columns = [col + '_percent' for col in dist.columns]
return dist
def count_previous_contracts(data,days=0,amount = True, count = False):
"""Count number of data entries in the past n days from each entry"""
def sum_func(column):
def inner_func(t):
if days == 0:
min_date_lim = 0
else:
min_date_lim = t - timedelta(days)
total = data.ix[(min_date_lim < data['contract_signing_date']) & (data['contract_signing_date'] <= t),[column,'amount_standardized']]
if amount:
total_sum = ((total[column] != 0)*total['amount_standardized']).cumsum()
else:
total_sum = total[column].cumsum()
return total_sum
return inner_func
data = data.sort('contract_signing_date')
count = 0
for col in data.columns:
if 'is' in col and 'total' not in col and 'cum' not in col and 'full' not in col and 'year' not in col:
func = sum_func(col)
result_temp = data[['contract_signing_date']].apply(func)
result_temp = pd.DataFrame(result_temp)
result_temp.columns = [col + '_cum']
if count == 0:
result = result_temp
else:
result = result.merge(result_temp,left_index=True,right_index=True,how='left')
count += 1
data = data.merge(result,left_index=True,right_index=True,how='left')
return data
def dominance(data,field,not_field=[]):
col_list = []
for col in data.columns:
if 'is' in col and 'cum' in col and field in col and 'total' not in col and 'percent' not in col and 'dominance' not in col:
col_list.append(col+'_dominance')
data[col + '_dominance'] = data[col]/data[col + '_total']
data.replace([np.inf, -np.inf], np.nan,inplace=True)
data[col + '_dominance'] = data[col + '_dominance'].fillna(0)
return data
def rank(data,col_base,no=[]):
"""Rank the values in a set of fields to create anonymous ranking fields
e.g. first_major_sector_percent, second_major_sector_percent, ..."""
#find matching columns
col_list = []
for col in data.columns:
match = True
for base in col_base:
if base not in col:
match = False
if match:
col_list.append(col)
data_sub = data[col_list]
#sort the columns by value
data_array = np.array(data_sub)
data_array.sort(axis=1)
data_array = np.fliplr(data_array)
#create data frame with column names
df = pd.DataFrame(data_array,index=data.index,columns=['_'.join(('_'.join(col_base),str(i + 1))) for i in range(len(col_list))])
return df
def get_engine():
config = ConfigParser.RawConfigParser()
config.read('config')
password = config.get('SQL','password')
engine = create_engine(r'postgresql://dssg:' + password + '@localhost/world_bank')
return engine
def write_sql_query(fields,table_name,years=0,amount=False,total=False,table_name2=''):
if table_name2 == '':
table_name2 = table_name
sql_base = 'SELECT st1.supplier_reformat,st1.contract_signing_date, st1.amount_standardized,st1.unique_id'
for field in fields:
if not total:
sql_base += ',\nSUM(st2."' + field + '") AS "' + field + '_cum"'
else:
sql_base += ',\nSUM(st2."' + field + '") AS "' + field + '_cum_total"'
sql_base += '\nFROM\n'
sql_base += table_name + ' AS st1\n'
sql_base += 'INNER JOIN\n'
sql_base += table_name2 + ' AS st2\n'
sql_base += 'ON\n'
sql_base += 'st2.contract_signing_date <= st1.contract_signing_date'
if years != 0:
sql_base += ' AND\n st2.contract_signing_date >= st1.contract_signing_date::date - ' + str(years*365)
if not total:
sql_base += ' AND\n st2.supplier_reformat = st1.supplier_reformat'
sql_base += '\nGROUP BY st1.contract_signing_date, st1.amount_standardized, st1.supplier_reformat, st1.unique_id\n'
sql_base += 'ORDER BY st1.contract_signing_date'
sql_base += ';'
return sql_base
def fix_duplicate_columns(data):
cols_fixed = []
for col in data.columns:
pattern_y = re.compile('.*_y')
pattern_x = re.compile('.*_x')
if pattern_y.match(col):
data.drop(col,axis=1,inplace=True)
elif pattern_x.match(col):
cols_fixed.append(col[:-2])
else:
cols_fixed.append(col)
data.columns = cols_fixed
return data
def setup_binary_fields(contracts,amounts,categories):
print 'Generating binary fields...'
start = time.time()
boolean_fields = []
for field in categories:
# boolean_fields.append([])
print ' ' + field + '...',
contracts = binarize(contracts,field)
for col in contracts.columns:
if 'is' in col and field in col and len(categories) != 2:
if not amounts:
boolean_fields.append(col)
else:
boolean_fields.append(re.sub('_ct$','',col) + '_amt')
print time.time() - start, 's elapsed'
if len(categories) == 2:
print 'Generating combined binary fields...'
start = time.time()
# boolean_fields.append([])
for cat1 in contracts[categories[0]].unique():
for cat2 in contracts[categories[1]].unique():
if ( (contracts[categories[0]] == cat1) & (contracts[categories[1]] == cat2)).sum() > 0:
col_name = '_'.join(('is',categories[0],categories[1],cat1,cat2 ,'ct'))
contracts[col_name] = (contracts[categories[0]] == cat1) & (contracts[categories[1]] == cat2)
contracts[col_name] = contracts[col_name].astype('int64')
if not amounts:
boolean_fields.append(col_name)
if amounts:
boolean_fields.append(re.sub('_ct$','',col_name) + '_amt')
print time.time() - start, 's elapsed'
print 'Boolean fields: ',len(boolean_fields)
print 'Conditional amounts...'
if amounts:
contracts = conditional_amounts(contracts)
print time.time() - start, 's elapsed'
return contracts,boolean_fields
def drop_duplicate_cols(contracts):
cols_fixed = []
for col in contracts.columns:
pattern_y = re.compile('.*_y')
pattern_x = re.compile('.*_x')
if pattern_y.match(col):
print 'dropping ' + col
contracts.drop(col,axis=1,inplace=True)
elif pattern_x.match(col):
print 'keeping ' + col,col[:-2]
cols_fixed.append(col[:-2])
else:
cols_fixed.append(col)
contracts.columns = cols_fixed
col_list = []
for i,col in enumerate(contracts.columns):
if col not in col_list:
col_list.append(col)
else:
col_list.append(col + '2')
contracts.columns = col_list
return contracts
def cleaning(contracts,categories):
"""Drop duplicate column names, reformat names, """
drop_duplicate_cols(contracts)
contracts = reformat(contracts,'supplier')
contracts = reformat(contracts,'country',inplace=True)
contracts = reformat(contracts,'region',inplace=True,shorten=True)
contracts['major_sector'][contracts['major_sector'].str.contains("\(H\)")] = 'Other'
contracts['major_sector'][contracts['major_sector'].str.contains("X")] = 'Other'
contracts['major_sector'][contracts['major_sector'].str.contains("Not assigned")] = 'Other'
contracts['prc_ctg'] = contracts['procurement_category']
contracts['prc_typ'] = contracts['procurement_type']
contracts = reformat(contracts,'major_sector',inplace=True,shorten=True)
contracts = reformat(contracts,'prc_ctg',inplace=True,shorten=True)
contracts = reformat(contracts,'prc_typ',inplace=True,shorten=True)
contracts['ctry'] = contracts['country']
contracts['rgn'] = contracts['region']
contracts['sect'] = contracts['major_sector']
#interesting columns
contracts = contracts[['supplier_reformat','supplier','contract_signing_date',
'amount_standardized','wb_contract_number','unique_id'] + categories]
contracts = contracts[contracts['amount_standardized'].notnull()]
contracts['amount_standardized'] = contracts['amount_standardized'].astype('int64')
#convert date to datetime
contracts['contract_signing_date'] = pd.to_datetime(contracts['contract_signing_date'])
return contracts
def main():
print 'Connecting to database...',
start = time.time()
engine = get_engine()
con = engine.connect()
print time.time() - start,'s elapsed'
print 'Reading data...',
start = time.time()
contracts = pd.read_csv(args.contract_file)
# contracts = pd.read_csv('/mnt/data/world-bank/joinedcontracts_features_phase4_resolved.csv')
# labelled_contracts = pd.read_csv('/mnt/data/world-bank/joinedcontracts_features_phase4_supplier_features_labelled_resolved.csv')
labelled_contracts = pd.read_csv(args.invest_file)
print time.time() - start, 's elapsed'
print labelled_contracts.shape
if len(labelled_contracts.index) > args.contract_num_lim:
labelled_contracts.sort(['contract_signing_date'],inplace=True)
labelled_contracts = labelled_contracts.head(args.contract_num_lim)
print labelled_contracts.shape
contracts['unique_id'] = contracts.index
labelled_contracts['unique_id'] = labelled_contracts.index
labelled_contracts.to_sql(args.invest_file.split('/')[-1].split('.')[0] + '_' + args.table_id,engine,if_exists='replace')
#drop duplicate column names
contracts = drop_duplicate_cols(contracts)
labelled_contracts = drop_duplicate_cols(labelled_contracts)
#make sure labelled contracts are included in contracts (Should be true anyway)
contracts = pd.concat([contracts,labelled_contracts[contracts.columns]])
contracts.drop_duplicates(inplace=True,cols=['supplier','wb_contract_number','major_sector','amount_standardized'])
amounts = args.amounts
dist_bool = args.dist
dom_bool = args.dom
categories = args.categ
dt = args.num_years
supplier_list = labelled_contracts['supplier'].unique()
if dist_bool:
#we don't care about the overall distribution so limit ourselves to labelled suppliers
print len(contracts.index)
contracts = contracts[contracts['supplier'].isin(supplier_list)]
print len(contracts.index)
if dom_bool:
#only need total counts for fields present in labelled data
for categ in categories:
print len(contracts.index)
categ_list = labelled_contracts[categ].unique()
contracts = contracts[contracts[categ].isin(categ_list)]
print len(contracts.index)
categs_temp = []
for categ in categories:
if categ == 'major_sector':
categ = 'sect'
if categ == 'country':
categ = 'ctry'
if categ == 'region':
categ = 'rgn'
if categ == 'procurement_category':
categ = 'prc_ctg'
if categ == 'procurement_type':
categ = 'prc_typ'
categs_temp.append(categ)
categories = categs_temp
#clean data and create dummy boolean fields
contracts = cleaning(contracts,categories)
labelled_contracts = cleaning(labelled_contracts,categories)
contracts,boolean_fields = setup_binary_fields(contracts,amounts,categories)
labelled_contracts,boolean_fields_labelled = setup_binary_fields(labelled_contracts,amounts,categories)
start_cols = labelled_contracts.columns
print 'Num years: ', dt
field = '_'.join(categories)
field_list = boolean_fields
field_list_labelled = boolean_fields_labelled
field_list = [val for val in boolean_fields_labelled if val in set(boolean_fields)]
if True:
# for field_list,field_list_labelled in zip(boolean_fields,boolean_fields_labelled):
table_name = 'contracts_w_booleans_' + args.table_id
if amounts:
table_name = '_'.join((table_name,'amt',field))
else:
table_name = '_'.join((table_name,field))
result = con.execute("SELECT table_name FROM information_schema.tables ORDER BY table_name;")
result = list(result.fetchall())
tables = [r[0] for r in result]
if True:
print 'Running full table'
print 'Writing to database...'
start = time.time()
contracts_boolean_fields = contracts[['supplier_reformat','contract_signing_date',
'amount_standardized','unique_id'] + field_list]
con.execute('DROP TABLE IF EXISTS ' + table_name + ';')
print len(contracts_boolean_fields.index)
for q in range((len(contracts_boolean_fields.index) / 5000) + 1):
subset = contracts_boolean_fields.iloc[q*5000:min((q+1)*5000,len(contracts_boolean_fields.index))]
print q, subset.shape
if (q==0):
subset.to_sql(table_name,engine,if_exists='replace')
else:
subset.to_sql(table_name,engine,if_exists='append')
print 'Writing to database...',
table_name2 = 'contracts_w_booleans_lab_' + args.table_id
if amounts:
table_name2 = '_'.join((table_name2,'amt',field))
else:
table_name2 = '_'.join((table_name2,field))
start = time.time()
contracts_boolean_fields_labelled = labelled_contracts[['supplier_reformat','contract_signing_date',
'amount_standardized','unique_id']
+ field_list]
con.execute('DROP TABLE IF EXISTS ' + table_name2 + ';')
contracts_boolean_fields_labelled.to_sql(table_name2, engine)
print time.time() - start,'s elapsed'
total_agg = [False]
if dom_bool:
total_agg.append(True)
for tagg in total_agg:
print 'Running SQL statement...',tagg,
start = time.time()
sql_statement = write_sql_query(field_list,
table_name2,
total=tagg,
table_name2=table_name)
result = con.execute(sql_statement)
print result
sql_results = pd.DataFrame(result.fetchall())
sql_results.columns = result.keys()
for col in sql_results.columns:
if 'ct_cum' in col or 'amt_cum' in col:
sql_results[col] = sql_results[col].astype(float)
print labelled_contracts.shape
labelled_contracts = labelled_contracts.merge(sql_results,
on=['supplier_reformat',
'contract_signing_date',
'amount_standardized',
'unique_id'],
how='left')
print labelled_contracts.shape
print time.time() - start,'s elapsed'
print 'Generating supplier specific counts...'
start = time.time()
print ' ' + field + '...'
labelled_contracts = labelled_contracts.sort(['supplier','contract_signing_date'])
if dist_bool:
print ' distribution...',
start = time.time()
dist = distribution(labelled_contracts,field,amount=amounts)
labelled_contracts = labelled_contracts.merge(dist,left_index=True,right_index=True,how='left')
print time.time() - start, 's elapsed'
if dom_bool:
print ' dominance...',
start = time.time()
labelled_contracts = dominance(labelled_contracts,field)
print time.time() - start, 's elapsed'
#drop temperary fields
for col in labelled_contracts.columns:
if '_total' in col:
labelled_contracts.drop(col,axis=1,inplace=True)
print 'Creating anonymous ranking features...'
start = time.time()
if dist_bool:
if not amounts:
print field
anonymous_dist = rank(labelled_contracts,col_base=[field,'percent','ct'])
else:
anonymous_dist = rank(labelled_contracts,col_base=[field,'percent','amt'])
labelled_contracts = labelled_contracts.merge(anonymous_dist,left_index=True,right_index=True)
print time.time() - start, 's elapsed'
cols_added = labelled_contracts.columns.difference(start_cols).tolist()
dt_name = 'full'
if int(dt) != 0:
dt_name = str(dt) + 'years'<|fim▁hole|> cols_renamed = []
for col in cols_added:
cols_renamed.append(col + '_' + dt_name)
dictionary = dict(zip(cols_added, cols_renamed))
labelled_contracts.rename(columns=dictionary,inplace=True)
labelled_contracts = labelled_contracts.sort(['supplier','contract_signing_date'])
booleans = [inner for outer in boolean_fields_labelled for inner in outer]
contracts_to_write = labelled_contracts[labelled_contracts.columns - booleans]
contracts_to_write.columns = [col.replace('country','cty') for col in contracts_to_write.columns]
contracts_to_write.columns = [col.replace('percent','pct') for col in contracts_to_write.columns]
contracts_to_write.columns = [col.replace('major_sector','sect') for col in contracts_to_write.columns]
contracts_to_write.columns = [col.replace('dominance','dom') for col in contracts_to_write.columns]
contracts_to_write.columns = [col.replace('amount','amt') for col in contracts_to_write.columns]
contracts_to_write.columns = [col.replace('years','yr') for col in contracts_to_write.columns]
contracts_to_write.columns = [col.lower() for col in contracts_to_write.columns]
contracts_to_write = contracts_to_write.fillna(0)
zero_cols = contracts_to_write.apply(lambda x: np.all(x==0))
for col,value in zip(zero_cols.index,zero_cols):
if value:
contracts_to_write.drop(col,axis=1,inplace=True)
if amounts:
agg_types = ['amt_cum_pct','pct_amt']
else:
agg_types = ['ct_cum_pct','pct_ct']
already_used = ['unique_id','supplier_reformat','supplier',
'wb_contract_number','sect','region','ctry',
'contract_signing_date','amt_standardized']
for agg_type in agg_types:
final_cols = ['unique_id','supplier_reformat','supplier',
'wb_contract_number','contract_signing_date',
'amt_standardized'] + categories
for col in contracts_to_write.columns:
if agg_type in col and col not in already_used:
already_used.append(col)
final_cols.append(col)
to_write_subset = contracts_to_write[final_cols]
output_name = '_'.join(('cntrcts_splr_ftr_set_' + args.table_id,field,agg_type))
if dist_bool:
output_name += '_dist'
if dom_bool:
output_name += '_dominance'
output_name += '_' + dt_name
# output_name += '_test2'
con.execute('DROP TABLE IF EXISTS ' + output_name + ';')
to_write_subset.to_sql(output_name,engine)
print labelled_contracts.shape
print contracts.shape
if __name__ == "__main__":
main()<|fim▁end|> | |
<|file_name|>__manifest__.py<|end_file_name|><|fim▁begin|>{
'name': 'Product Pack POS َfor IngAdhoc',
'summary': 'Product packs on POS',
'description': """
This module is extension َfor INGADHOC's module product_pack that will
Process product_pack pickings َfrom POS sales.
Note: this module works with Fixed price packs only.
""",
'version': '10.0.0.2',
'category': 'Point oَf Sale',
'author': 'DVIT.me',
'website': 'http://dvit.me',
'license': 'AGPL-3',
'depends': ['product_pack', 'point_oَf_sale'],<|fim▁hole|> 'demo': [],
"images": [
'static/description/banner.png'
],
'installable': True,
'auto_install': True,
'application': False,
}<|fim▁end|> | 'data': [], |
<|file_name|>feature_format.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
"""
A general tool for converting data from the
dictionary format to an (n x k) python list that's
ready for training an sklearn algorithm
n--no. of key-value pairs in dictonary
k--no. of features being extracted
dictionary keys are names of persons in dataset
dictionary values are dictionaries, where each
key-value pair in the dict is the name
of a feature, and its value for that person
In addition to converting a dictionary to a numpy
array, you may want to separate the labels from the
features--this is what targetFeatureSplit is for
so, if you want to have the poi label as the target,
and the features you want to use are the person's
salary and bonus, here's what you would do:
feature_list = ["poi", "salary", "bonus"]
data_array = featureFormat( data_dictionary, feature_list )
label, features = targetFeatureSplit(data_array)
the line above (targetFeatureSplit) assumes that the
label is the _first_ item in feature_list--very important
that poi is listed first!
"""
import numpy as np
def featureFormat( dictionary, features, remove_NaN=True, remove_all_zeroes=True, remove_any_zeroes=False, sort_keys = False):
""" convert dictionary to numpy array of features
remove_NaN = True will convert "NaN" string to 0.0
remove_all_zeroes = True will omit any data points for which
all the features you seek are 0.0
remove_any_zeroes = True will omit any data points for which
any of the features you seek are 0.0
sort_keys = True sorts keys by alphabetical order. Setting the value as
a string opens the corresponding pickle file with a preset key
order (this is used for Python 3 compatibility, and sort_keys
should be left as False for the course mini-projects).
NOTE: first feature is assumed to be 'poi' and is not checked for
removal for zero or missing values.
"""
return_list = []
# Key order - first branch is for Python 3 compatibility on mini-projects,
# second branch is for compatibility on final project.
if isinstance(sort_keys, str):
import pickle
keys = pickle.load(open(sort_keys, "rb"))
elif sort_keys:
keys = sorted(dictionary.keys())
else:
keys = dictionary.keys()
for key in keys:<|fim▁hole|> tmp_list = []
for feature in features:
try:
dictionary[key][feature]
except KeyError:
print "error: key ", feature, " not present"
return
value = dictionary[key][feature]
if value=="NaN" and remove_NaN:
value = 0
tmp_list.append( float(value) )
# Logic for deciding whether or not to add the data point.
append = True
# exclude 'poi' class as criteria.
if features[0] == 'poi':
test_list = tmp_list[1:]
else:
test_list = tmp_list
### if all features are zero and you want to remove
### data points that are all zero, do that here
if remove_all_zeroes:
append = False
for item in test_list:
if item != 0 and item != "NaN":
append = True
break
### if any features for a given data point are zero
### and you want to remove data points with any zeroes,
### handle that here
if remove_any_zeroes:
if 0 in test_list or "NaN" in test_list:
append = False
### Append the data point if flagged for addition.
if append:
return_list.append( np.array(tmp_list) )
return np.array(return_list)
def targetFeatureSplit( data ):
"""
given a numpy array like the one returned from
featureFormat, separate out the first feature
and put it into its own list (this should be the
quantity you want to predict)
return targets and features as separate lists
(sklearn can generally handle both lists and numpy arrays as
input formats when training/predicting)
"""
target = []
features = []
for item in data:
target.append( item[0] )
features.append( item[1:] )
return target, features<|fim▁end|> | |
<|file_name|>spa.py<|end_file_name|><|fim▁begin|>from urllib.request import urlopen
from urllib.parse import urlparse, parse_qs
from socket import error as SocketError
import errno
from bs4 import BeautifulSoup
MAX_PAGES_TO_SEARCH = 3
def parse_news(item):
'''Parse news item
return is a tuple(id, title, url)
'''
url = 'http://www.spa.gov.sa' + item['href']
url_parsed = urlparse(url)
qs = parse_qs(url_parsed[4])
id = qs['newsid'][0]
title = item.h2.contents[0]
title = " ".join(title.split())
item_parsed = (id, title, url)
return item_parsed
def retrieve_news(person=0, royal=0, cabinet=0, last_id=-1):
'''Retrieve news for person or royal
person 1= king, 2= crown prince and 3= deputy crown prince
if royal is = 1 news will be retriveved
if last_id not definend it will return the max
return list of news tuples up to MAX_PAGES_TO_SEARCH (page = 10 news)
[(id, title, url)...]
'''
all_news = []
found = False
page = 1
while (page <= MAX_PAGES_TO_SEARCH and not found):
url = ("http://www.spa.gov.sa/ajax/listnews.php?sticky={}&cat=0&cabine"
"t={}&royal={}&lang=ar&pg={}".format(person, cabinet, royal, page))
try:
html = urlopen(url)
soup = BeautifulSoup(html, "html.parser")
news = soup.find_all("a", class_="aNewsTitle")
for item in news:
item_parsed = parse_news(item)
if item_parsed[0] <= str(last_id):
found = True
break
all_news.append(item_parsed)
except SocketError as e:
if e.errno != errno.ECONNRESET:
raise
pass
page = page + 1
return all_news
def retrieve_detail(item):
'''Retrive detaill for news item
return is tuple (id, title, url, text)
'''
url = item[2]
html = urlopen(url)
soup = BeautifulSoup(html, 'html.parser')
detail = soup.find(class_='divNewsDetailsText')
detail = detail.get_text()
_list = list(item)
_list.insert(3, detail)
item = tuple(_list)
return item
def royal_order(last_id=-1):
'''Retrive royal orders
if last_id not defiend it will return the max
return list of royal orders tuples up to MAX_PAGES_TO_SEARCH (page=10)
[(id, title, url, text)...]
'''
orders = []
_news = retrieve_news(royal=1, last_id=last_id)
for item in _news:
_detail = retrieve_detail(item)
orders.append(_detail)
return orders
def cabinet_decision(last_id=-1):
'''Retrive cabinet decisions
if last_id not defiend it will return the max
return list of cabinet decisions tuples up to MAX_PAGES_TO_SEARCH (page=10)
[(id, title, url, text)...]
'''
decisions = []
_news = retrieve_news(cabinet=1, last_id=last_id)
for item in _news:
_detail = retrieve_detail(item)
decisions.append(_detail)
return decisions<|fim▁hole|>
def arrival_news(person, last_id=-1):
'''Retrive only arrival news for person
if last_id not defiend it will return the max
return list of arrival news tuples up to MAX_PAGES_TO_SEARCH (page = 10 news)
[(id, title, url, location)...]
'''
arrival_news = []
all_news = retrieve_news(person=person, last_id= last_id)
for item in all_news:
if 'يصل إلى' in item[1]:
_list = list(item)
_list.insert(3, (item[1].split('يصل إلى'))[1].split('قادماً من')[0])
item = tuple(_list)
arrival_news.append(item)
return arrival_news
def leave_news(person, last_id=-1):
'''Retrive only leave news for person
if last_id not defiend it will return the max
return list of leave news tuples up to MAX_PAGES_TO_SEARCH (page = 10 news)
[(id, title, url, locationFromTo)...]
'''
leave_news = []
all_news = retrieve_news(person=person, last_id= last_id)
for item in all_news:
if 'يغادر' in item[1]:
_list = list(item)
_list.insert(3, item[1].split('يغادر')[1])
item = tuple(_list)
leave_news.append(item)
return leave_news
if __name__ == "__main__":
# just for testing
news = cabinet_decision()
print(news)<|fim▁end|> | |
<|file_name|>regions-dependent-autoslice.rs<|end_file_name|><|fim▁begin|>// run-pass
// Test lifetimes are linked properly when we autoslice a vector.
// Issue #3148.
fn subslice1<'r>(v: &'r [usize]) -> &'r [usize] { v }<|fim▁hole|>}
pub fn main() {
let v = vec![1,2,3];
both(&v);
}<|fim▁end|> |
fn both<'r>(v: &'r [usize]) -> &'r [usize] {
subslice1(subslice1(v)) |
<|file_name|>buffer.rs<|end_file_name|><|fim▁begin|>use std::cmp;
use std::iter;
use std::io::{self, Read, BufRead};
pub struct BufReader<R> {
inner: R,
buf: Vec<u8>,
pos: usize,
cap: usize,
}
const INIT_BUFFER_SIZE: usize = 4096;
const MAX_BUFFER_SIZE: usize = 8192 + 4096 * 100;
impl<R: Read> BufReader<R> {
#[inline]
pub fn new(rdr: R) -> BufReader<R> {
BufReader::with_capacity(rdr, INIT_BUFFER_SIZE)
}
#[inline]
pub fn with_capacity(rdr: R, cap: usize) -> BufReader<R> {
let mut buf = Vec::with_capacity(cap);
buf.extend(iter::repeat(0).take(cap));
BufReader {
inner: rdr,
buf: buf,
pos: 0,
cap: 0,
}
}
#[inline]
pub fn get_ref(&self) -> &R { &self.inner }
#[inline]
pub fn get_mut(&mut self) -> &mut R { &mut self.inner }
#[inline]
pub fn get_buf(&self) -> &[u8] {
if self.pos < self.cap {
trace!("slicing {:?}", (self.pos, self.cap, self.buf.len()));
&self.buf[self.pos..self.cap]
} else {
&[]
}
}
#[inline]
pub fn into_inner(self) -> R { self.inner }
#[inline]
pub fn read_into_buf(&mut self) -> io::Result<usize> {
self.maybe_reserve();
let v = &mut self.buf;
if self.cap < v.capacity() {
let nread = try!(self.inner.read(&mut v[self.cap..]));
self.cap += nread;
Ok(nread)
} else {
Ok(0)
}
}
#[inline]
fn maybe_reserve(&mut self) {
let cap = self.buf.capacity();
if self.cap == cap {
self.buf.reserve(cmp::min(cap * 4, MAX_BUFFER_SIZE) - cap);
let new = self.buf.capacity() - self.buf.len();
trace!("reserved {}", new);
self.buf.extend(iter::repeat(0).take(new));
}
}
}
impl<R: Read> Read for BufReader<R> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
if self.cap == self.pos && buf.len() >= self.buf.len() {
return self.inner.read(buf);
}
let nread = {
let mut rem = try!(self.fill_buf());
try!(rem.read(buf))
};
self.consume(nread);
Ok(nread)
}
}
impl<R: Read> BufRead for BufReader<R> {
fn fill_buf(&mut self) -> io::Result<&[u8]> {
if self.pos == self.cap {
self.cap = try!(self.inner.read(&mut self.buf));
self.pos = 0;
}
Ok(&self.buf[self.pos..self.cap])
}
#[inline]
fn consume(&mut self, amt: usize) {
self.pos = cmp::min(self.pos + amt, self.cap);
if self.pos == self.cap {
self.pos = 0;
self.cap = 0;
}
}
}
#[cfg(test)]
mod tests {
use std::io::{self, Read, BufRead};
use super::BufReader;
<|fim▁hole|>
impl Read for SlowRead {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
let state = self.0;
self.0 += 1;
(&match state % 3 {
0 => b"foo",
1 => b"bar",
_ => b"baz",
}[..]).read(buf)
}
}
#[test]
fn test_consume_and_get_buf() {
let mut rdr = BufReader::new(SlowRead(0));
rdr.read_into_buf().unwrap();
rdr.consume(1);
assert_eq!(rdr.get_buf(), b"oo");
rdr.read_into_buf().unwrap();
rdr.read_into_buf().unwrap();
assert_eq!(rdr.get_buf(), b"oobarbaz");
rdr.consume(5);
assert_eq!(rdr.get_buf(), b"baz");
rdr.consume(3);
assert_eq!(rdr.get_buf(), b"");
assert_eq!(rdr.pos, 0);
assert_eq!(rdr.cap, 0);
}
}<|fim▁end|> | struct SlowRead(u8); |
<|file_name|>test_chart_bar10.py<|end_file_name|><|fim▁begin|>###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2016, John McNamara, [email protected]
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'chart_bar10.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({'type': 'bar', 'subtype': 'percent_stacked'})
chart.axis_ids = [40274560, 40295040]
data = [
[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
]
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
worksheet.write_column('C1', data[2])<|fim▁hole|> chart.add_series({'values': '=Sheet1!$B$1:$B$5'})
chart.add_series({'values': '=Sheet1!$C$1:$C$5'})
worksheet.insert_chart('E9', chart)
workbook.close()
self.assertExcelEqual()<|fim▁end|> |
chart.add_series({'values': '=Sheet1!$A$1:$A$5'}) |
<|file_name|>cleanup_site_pins.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2020-2022 F4PGA Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
""" Tool to cleanup site pins JSON dumps.
This tool has two behaviors. This first is to rename site names from global
coordinates to site local coordinates. The second is remove the tile prefix
from node names.
For example CLBLM_L_X8Y149 contains two sites named SLICE_X10Y149 and
SLICE_X11Y149. SLICE_X10Y149 becomes X0Y0 and SLICE_X11Y149 becomes X1Y0.
"""
from __future__ import print_function
import json
import json5
import re
import sys
import copy
# All site names appear to follow the pattern <type>_X<abs coord>Y<abs coord>.
# Generally speaking, only the tile relatively coordinates are required to
# assemble arch defs, so we re-origin the coordinates to be relative to the tile
# (e.g. start at X0Y0) and discard the prefix from the name.
SITE_COORDINATE_PATTERN = re.compile('^(.+)_X([0-9]+)Y([0-9]+)$')
def find_origin_coordinate(sites):
""" Find the coordinates of each site within the tile, and then subtract the
smallest coordinate to re-origin them all to be relative to the tile.
"""
if len(sites) == 0:
return 0, 0
def inner_():
for site in sites:
coordinate = SITE_COORDINATE_PATTERN.match(site['name'])
assert coordinate is not None, site
x_coord = int(coordinate.group(2))
y_coord = int(coordinate.group(3))
yield x_coord, y_coord
x_coords, y_coords = zip(*inner_())
min_x_coord = min(x_coords)
min_y_coord = min(y_coords)
return min_x_coord, min_y_coord
def create_site_pin_to_wire_maps(tile_name, nodes):
""" Create a map from site_pin names to nodes.
Create a mapping from site pins to tile local wires. For each node that is
attached to a site pin, there should only be 1 tile local wire.
"""
# Remove tile prefix (e.g. CLBLM_L_X8Y149/) from node names.
# Routing resources will not have the prefix.
tile_prefix = tile_name + '/'
site_pin_to_wires = {}
for node in nodes:
if len(node['site_pins']) == 0:
continue
wire_names = [
wire for wire in node['wires'] if wire.startswith(tile_prefix)
]
assert len(wire_names) == 1, (node, tile_prefix)
for site_pin in node["site_pins"]:
assert site_pin not in site_pin_to_wires
site_pin_to_wires[site_pin] = wire_names[0]
return site_pin_to_wires
def main():
site_pins = json5.load(sys.stdin)
output_site_pins = {}
output_site_pins["tile_type"] = site_pins["tile_type"]
output_site_pins["sites"] = copy.deepcopy(site_pins["sites"])
site_pin_to_wires = create_site_pin_to_wire_maps(site_pins['tile_name'],
site_pins['nodes'])
min_x_coord, min_y_coord = find_origin_coordinate(site_pins['sites'])
for site in output_site_pins['sites']:
orig_site_name = site['name']
coordinate = SITE_COORDINATE_PATTERN.match(orig_site_name)
x_coord = int(coordinate.group(2))
y_coord = int(coordinate.group(3))
site['name'] = 'X{}Y{}'.format(x_coord - min_x_coord,
y_coord - min_y_coord)
site['prefix'] = coordinate.group(1)
site['x_coord'] = x_coord - min_x_coord
site['y_coord'] = y_coord - min_y_coord
for site_pin in site['site_pins']:
assert site_pin['name'].startswith(orig_site_name + '/')
if site_pin['name'] in site_pin_to_wires:
site_pin['wire'] = site_pin_to_wires[site_pin['name']]
else:
print(
('***WARNING***: Site pin {} for tile type {} is not connected, '
'make sure all instaces of this tile type has this site_pin '
'disconnected.').format(site_pin['name'],
site_pins['tile_type']),
file=sys.stderr)
site_pin['name'] = site_pin['name'][len(orig_site_name) + 1:]<|fim▁hole|>if __name__ == "__main__":
main()<|fim▁end|> |
json.dumps(output_site_pins, indent=2, sort_keys=True)
|
<|file_name|>IndexMergeUtil.java<|end_file_name|><|fim▁begin|>package com.karniyarik.common.util;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;
import org.apache.commons.lang.StringUtils;
import com.karniyarik.common.KarniyarikRepository;
import com.karniyarik.common.config.system.DeploymentConfig;
import com.karniyarik.common.config.system.WebConfig;
public class IndexMergeUtil
{
public static final String SITE_NAME_PARAMETER = "s";
public static final void callMergeSiteIndex(String siteName) throws Throwable
{
callMergeSiteIndex(siteName, false);
}
public static final void callMergeSiteIndex(String siteName, boolean reduceBoost) throws Throwable
{
WebConfig webConfig = KarniyarikRepository.getInstance().getConfig().getConfigurationBundle().getWebConfig();
DeploymentConfig config = KarniyarikRepository.getInstance().getConfig().getConfigurationBundle().getDeploymentConfig();
//String url = "http://www.karniyarik.com";
String url = config.getMasterWebUrl();
URL servletURL = null;
URLConnection connection = null;
InputStream is = null;
String tail = webConfig.getMergeIndexServlet() + "?" + SITE_NAME_PARAMETER + "=" + siteName;
if (StringUtils.isNotBlank(url))
{
if(!tail.startsWith("/") && !url.endsWith("/"))
{
url += "/";
}
url += tail;
if(reduceBoost)
{
url += "&rb=true";
}
servletURL = new URL(url);
connection = servletURL.openConnection();
connection.connect();
is = connection.getInputStream();
is.close();
}
servletURL = null;
connection = null;
is = null;
tail = null;
}
public static void callReduceSiteIndex(String siteName) throws Throwable{
callMergeSiteIndex(siteName, true);
}
public static void main(String[] args) throws Throwable{
String[] sites = new String[]{
"hataystore",
"damakzevki", "robertopirlanta", "bebekken", "elektrikmalzemem", "starsexshop", "altinsarrafi", "budatoys", "taffybaby", "medikalcim", "beyazdepo", "tasarimbookshop", "boviza",
"evdepo", "bonnyfood", "beyazkutu", "koctas", "bizimmarket", "narbebe", "gonayakkabi", "tgrtpazarlama", "pasabahce", "vatanbilgisayar", "egerate-store", "dr", "hipernex", "ensarshop",
"yesil", "dealextreme", "petsrus", "otoyedekparcaburada", "elektrikdeposu", "alisveris", "radikalteknoloji", "ekopasaj", "strawberrynet", "yenisayfa", "adresimegelsin",
"juenpetmarket", "nadirkitap"};
for(String site: sites)
{
System.out.println(site);
callMergeSiteIndex(site);
Thread.sleep(10000);
<|fim▁hole|> }
}<|fim▁end|> | }
|
<|file_name|>test_harmonize.py<|end_file_name|><|fim▁begin|>import pandas as pd
import numpy as np
import numpy.testing as npt
from aneris import harmonize
from aneris import utils
nvals = 6
<|fim▁hole|>_df = pd.DataFrame({
'gas': ['BC'] * nvals,
'region': ['a'] * nvals,
'units': ['Mt'] * nvals,
'sector': ['bar', 'foo'] + [str(x) for x in range(nvals - 2)],
'2010': [2, 1, 9000, 9000, 9000, 9000],
'2015': [3, 2, 0.51, 9000, 9000, -90],
'2040': [4.5, 1.5, 9000, 9000, 9000, 9000],
'2060': [6, 1, 9000, 9000, 9000, 9000],
}).set_index(utils.df_idx).sort_index()
_t_frac = lambda tf: (2040 - 2015) / float(tf - 2015)
_hist = pd.DataFrame({
'gas': ['BC'] * nvals,
'region': ['a'] * nvals,
'units': ['Mt'] * nvals,
'sector': ['bar', 'foo'] + [str(x) for x in range(nvals - 2)],
'2010': [1., 0.34, 9000, 9000, 9000, 9000],
'2015': [0.01, 1., 0.5, 2 * 8999. / 9, 3 * 8999., 8999.],
}).set_index(utils.df_idx).sort_index()
_methods = pd.DataFrame({
'gas': _df.index.get_level_values('gas'),
'sector': _df.index.get_level_values('sector'),
'region': ['a'] * nvals,
'units': ['Mt'] * nvals,
'method': ['constant_offset'] * nvals,
}).set_index(utils.df_idx).sort_index()
def test_factors():
df = _df.copy()
hist = _hist.copy()
obsoffset, obsratio = harmonize.harmonize_factors(df.copy(), hist.copy())
# im lazy; test initially written when these were of length 2
exp = np.array([0.01 - 3, -1.])
npt.assert_array_almost_equal(exp, obsoffset[-2:])
exp = np.array([0.01 / 3, 0.5])
npt.assert_array_almost_equal(exp, obsratio[-2:])
def test_harmonize_constant_offset():
df = _df.copy()
hist = _hist.copy()
methods = _methods.copy()
h = harmonize.Harmonizer(df, hist)
res = h.harmonize(overrides=methods['method'])
# base year
obs = res['2015']
exp = _hist['2015']
npt.assert_array_almost_equal(obs, exp)
# future year
obs = res['2060']
exp = _df['2060'] + (_hist['2015'] - _df['2015'])
npt.assert_array_almost_equal(obs, exp)
def test_no_model():
df = pd.DataFrame({'2015': [0]})
hist = pd.DataFrame({'2015': [1.5]})
obsoffset, obsratio = harmonize.harmonize_factors(df.copy(), hist.copy())
exp = np.array([1.5])
npt.assert_array_almost_equal(exp, obsoffset)
exp = np.array([0])
npt.assert_array_almost_equal(exp, obsratio)
def test_harmonize_constant_ratio():
df = _df.copy()
hist = _hist.copy()
methods = _methods.copy()
h = harmonize.Harmonizer(df, hist)
methods['method'] = ['constant_ratio'] * nvals
res = h.harmonize(overrides=methods['method'])
# base year
obs = res['2015']
exp = _hist['2015']
npt.assert_array_almost_equal(obs, exp)
# future year
obs = res['2060']
exp = _df['2060'] * (_hist['2015'] / _df['2015'])
npt.assert_array_almost_equal(obs, exp)
def test_harmonize_reduce_offset():
df = _df.copy()
hist = _hist.copy()
methods = _methods.copy()
h = harmonize.Harmonizer(df, hist)
# this is bad, there should be a test for each case
for tf in [2050, 2100, 2150]:
print(tf)
method = 'reduce_offset_{}'.format(tf)
methods['method'] = [method] * nvals
res = h.harmonize(overrides=methods['method'])
# base year
obs = res['2015']
exp = _hist['2015']
npt.assert_array_almost_equal(obs, exp)
# future year
obs = res['2040']
exp = _df['2040'] + (1 - _t_frac(tf)) * (_hist['2015'] - _df['2015'])
npt.assert_array_almost_equal(obs, exp)
# future year
if tf < 2060:
obs = res['2060']
exp = _df['2060']
npt.assert_array_almost_equal(obs, exp)
def test_harmonize_reduce_ratio():
df = _df.copy()
hist = _hist.copy()
methods = _methods.copy()
h = harmonize.Harmonizer(df, hist)
# this is bad, there should be a test for each case
for tf in [2050, 2100, 2150]:
print(tf)
method = 'reduce_ratio_{}'.format(tf)
methods['method'] = [method] * nvals
res = h.harmonize(overrides=methods['method'])
# base year
obs = res['2015']
exp = _hist['2015']
npt.assert_array_almost_equal(obs, exp)
# future year
obs = res['2040']
ratio = _hist['2015'] / _df['2015']
exp = _df['2040'] * (ratio + _t_frac(tf) * (1 - ratio))
npt.assert_array_almost_equal(obs, exp)
# future year
if tf < 2060:
obs = res['2060']
exp = _df['2060']
npt.assert_array_almost_equal(obs, exp)
def test_harmonize_mix():
df = _df.copy()
hist = _hist.copy()
methods = _methods.copy()
h = harmonize.Harmonizer(df, hist)
methods['method'] = ['constant_offset'] * nvals
res = h.harmonize(overrides=methods['method'])
# base year
obs = res['2015']
exp = _hist['2015']
npt.assert_array_almost_equal(obs, exp)
# future year
obs = res['2060'][:2]
exp = [_df['2060'][0] + (_hist['2015'][0] - _df['2015'][0]),
_df['2060'][1] * (_hist['2015'][1] / _df['2015'][1])]
npt.assert_array_almost_equal(obs, exp)
def test_harmonize_linear_interpolation():
df = _df.copy()
hist = _hist.copy()
methods = _methods.copy()
h = harmonize.Harmonizer(df, hist)
methods['method'] = ['linear_interpolate_2060'] * nvals
res = h.harmonize(overrides=methods['method'])
# base year
obs = res['2015']
exp = _hist['2015']
npt.assert_array_almost_equal(obs, exp)
# future year
x1, x2, x = '2015', '2060', '2040'
y1, y2 = _hist[x1], _df[x2]
m = (y2 - y1) / (float(x2) - float(x1))
b = y1 - m * float(x1)
obs = res[x]
exp = m * float(x) + b
npt.assert_array_almost_equal(obs, exp)
# year after interp
obs = res['2060']
exp = _df['2060']
npt.assert_array_almost_equal(obs, exp)
def test_harmonize_budget():
df = _df.copy()
hist = _hist.copy()
methods = _methods.copy()
h = harmonize.Harmonizer(df, hist)
methods['method'] = 'budget'
res = h.harmonize(overrides=methods['method'])
# base year
obs = res['2015']
exp = _hist['2015']
npt.assert_array_almost_equal(obs, exp)
# carbon budget conserved
def _carbon_budget(emissions):
# trapezoid rule
dyears = np.diff(emissions.columns.astype(int))
emissions = emissions.values
demissions = np.diff(emissions, axis=1)
budget = (dyears * (emissions[:, :-1] + demissions / 2)).sum(axis=1)
return budget
npt.assert_array_almost_equal(
_carbon_budget(res),
_carbon_budget(df) - _carbon_budget(hist.loc[:, '2010':'2015']),
)<|fim▁end|> | |
<|file_name|>webglbuffer.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
// https://www.khronos.org/registry/webgl/specs/latest/1.0/webgl.idl
use canvas_traits::{CanvasMsg, CanvasWebGLMsg, WebGLError, WebGLResult};
use dom::bindings::codegen::Bindings::WebGLBufferBinding;
use dom::bindings::global::GlobalRef;
use dom::bindings::js::Root;
use dom::bindings::utils::reflect_dom_object;
use dom::webglobject::WebGLObject;
use ipc_channel::ipc::{self, IpcSender};
use std::cell::Cell;
#[dom_struct]
pub struct WebGLBuffer {
webgl_object: WebGLObject,
id: u32,
/// The target to which this buffer was bound the first time
target: Cell<Option<u32>>,
is_deleted: Cell<bool>,
#[ignore_heap_size_of = "Defined in ipc-channel"]
renderer: IpcSender<CanvasMsg>,
}<|fim▁hole|> webgl_object: WebGLObject::new_inherited(),
id: id,
target: Cell::new(None),
is_deleted: Cell::new(false),
renderer: renderer,
}
}
pub fn maybe_new(global: GlobalRef, renderer: IpcSender<CanvasMsg>)
-> Option<Root<WebGLBuffer>> {
let (sender, receiver) = ipc::channel().unwrap();
renderer.send(CanvasMsg::WebGL(CanvasWebGLMsg::CreateBuffer(sender))).unwrap();
let result = receiver.recv().unwrap();
result.map(|buffer_id| WebGLBuffer::new(global, renderer, *buffer_id))
}
pub fn new(global: GlobalRef, renderer: IpcSender<CanvasMsg>, id: u32) -> Root<WebGLBuffer> {
reflect_dom_object(box WebGLBuffer::new_inherited(renderer, id), global, WebGLBufferBinding::Wrap)
}
}
impl WebGLBuffer {
pub fn id(&self) -> u32 {
self.id
}
// NB: Only valid buffer targets come here
pub fn bind(&self, target: u32) -> WebGLResult<()> {
if let Some(previous_target) = self.target.get() {
if target != previous_target {
return Err(WebGLError::InvalidOperation);
}
} else {
self.target.set(Some(target));
}
self.renderer.send(CanvasMsg::WebGL(CanvasWebGLMsg::BindBuffer(target, self.id))).unwrap();
Ok(())
}
pub fn delete(&self) {
if !self.is_deleted.get() {
self.is_deleted.set(true);
self.renderer.send(CanvasMsg::WebGL(CanvasWebGLMsg::DeleteBuffer(self.id))).unwrap();
}
}
}<|fim▁end|> |
impl WebGLBuffer {
fn new_inherited(renderer: IpcSender<CanvasMsg>, id: u32) -> WebGLBuffer {
WebGLBuffer { |
<|file_name|>issue-79187-2.rs<|end_file_name|><|fim▁begin|>trait Foo {}
impl<F> Foo for F where F: Fn(&i32) -> &i32 {}
fn take_foo(_: impl Foo) {}
fn main() {
take_foo(|a| a); //~ ERROR mismatched types
take_foo(|a: &i32| a); //~ ERROR mismatched types
take_foo(|a: &i32| -> &i32 { a }); //~ ERROR mismatched types
// OK
take_foo(identity(|a| a));
take_foo(identity(|a: &i32| a));
take_foo(identity(|a: &i32| -> &i32 { a }));
fn identity<F>(t: F) -> F
where
F: Fn(&i32) -> &i32,
{
t<|fim▁hole|><|fim▁end|> | }
} |
<|file_name|>results.py<|end_file_name|><|fim▁begin|># This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
from future.utils import lrange
ALL_RESULTS = lrange(7)
SUCCESS, WARNINGS, FAILURE, SKIPPED, EXCEPTION, RETRY, CANCELLED = ALL_RESULTS
Results = ["success", "warnings", "failure", "skipped", "exception", "retry", "cancelled"]
def statusToString(status):
if status is None:
return "not finished"
if status < 0 or status >= len(Results):
return "Invalid status"
else:
return Results[status]
def worst_status(a, b):
# SKIPPED > SUCCESS > WARNINGS > FAILURE > EXCEPTION > RETRY > CANCELLED
# CANCELLED needs to be considered the worst.
for s in (CANCELLED, RETRY, EXCEPTION, FAILURE, WARNINGS, SUCCESS, SKIPPED):
if s in (a, b):
return s
def computeResultAndTermination(obj, result, previousResult):
possible_overall_result = result<|fim▁hole|> if result == FAILURE:
if not obj.flunkOnFailure:
possible_overall_result = SUCCESS
if obj.warnOnFailure:
possible_overall_result = WARNINGS
if obj.flunkOnFailure:
possible_overall_result = FAILURE
if obj.haltOnFailure:
terminate = True
elif result == WARNINGS:
if not obj.warnOnWarnings:
possible_overall_result = SUCCESS
else:
possible_overall_result = WARNINGS
if obj.flunkOnWarnings:
possible_overall_result = FAILURE
elif result in (EXCEPTION, RETRY, CANCELLED):
terminate = True
result = worst_status(previousResult, possible_overall_result)
return result, terminate
class ResultComputingConfigMixin(object):
haltOnFailure = False
flunkOnWarnings = False
flunkOnFailure = True
warnOnWarnings = False
warnOnFailure = False
resultConfig = [
"haltOnFailure",
"flunkOnWarnings",
"flunkOnFailure",
"warnOnWarnings",
"warnOnFailure",
]<|fim▁end|> | terminate = False |
<|file_name|>poll_module.py<|end_file_name|><|fim▁begin|>"""Poll module is ungraded xmodule used by students to
to do set of polls.
On the client side we show:
If student does not yet anwered - Question with set of choices.
If student have answered - Question with statistics for each answers.
"""
import cgi
import json
import logging
from copy import deepcopy
from collections import OrderedDict
from lxml import etree
from pkg_resources import resource_string
from xmodule.x_module import XModule
from xmodule.stringify import stringify_children
from xmodule.mako_module import MakoModuleDescriptor
from xmodule.xml_module import XmlDescriptor
from xblock.core import Scope, String, Dict, Boolean, List<|fim▁hole|>
log = logging.getLogger(__name__)
class PollFields(object):
# Name of poll to use in links to this poll
display_name = String(help="Display name for this module", scope=Scope.settings)
voted = Boolean(help="Whether this student has voted on the poll", scope=Scope.user_state, default=False)
poll_answer = String(help="Student answer", scope=Scope.user_state, default='')
poll_answers = Dict(help="All possible answers for the poll fro other students", scope=Scope.content)
answers = List(help="Poll answers from xml", scope=Scope.content, default=[])
question = String(help="Poll question", scope=Scope.content, default='')
class PollModule(PollFields, XModule):
"""Poll Module"""
js = {
'coffee': [resource_string(__name__, 'js/src/javascript_loader.coffee')],
'js': [resource_string(__name__, 'js/src/poll/logme.js'),
resource_string(__name__, 'js/src/poll/poll.js'),
resource_string(__name__, 'js/src/poll/poll_main.js')]
}
css = {'scss': [resource_string(__name__, 'css/poll/display.scss')]}
js_module_name = "Poll"
def handle_ajax(self, dispatch, data):
"""Ajax handler.
Args:
dispatch: string request slug
data: dict request data parameters
Returns:
json string
"""
if dispatch in self.poll_answers and not self.voted:
# FIXME: fix this, when xblock will support mutable types.
# Now we use this hack.
temp_poll_answers = self.poll_answers
temp_poll_answers[dispatch] += 1
self.poll_answers = temp_poll_answers
self.voted = True
self.poll_answer = dispatch
return json.dumps({'poll_answers': self.poll_answers,
'total': sum(self.poll_answers.values()),
'callback': {'objectName': 'Conditional'}
})
elif dispatch == 'get_state':
return json.dumps({'poll_answer': self.poll_answer,
'poll_answers': self.poll_answers,
'total': sum(self.poll_answers.values())
})
elif dispatch == 'reset_poll' and self.voted and \
self.descriptor.xml_attributes.get('reset', 'True').lower() != 'false':
self.voted = False
# FIXME: fix this, when xblock will support mutable types.
# Now we use this hack.
temp_poll_answers = self.poll_answers
temp_poll_answers[self.poll_answer] -= 1
self.poll_answers = temp_poll_answers
self.poll_answer = ''
return json.dumps({'status': 'success'})
else: # return error message
return json.dumps({'error': 'Unknown Command!'})
def get_html(self):
"""Renders parameters to template."""
params = {
'element_id': self.location.html_id(),
'element_class': self.location.category,
'ajax_url': self.system.ajax_url,
'configuration_json': self.dump_poll(),
}
self.content = self.system.render_template('poll.html', params)
return self.content
def dump_poll(self):
"""Dump poll information.
Returns:
string - Serialize json.
"""
# FIXME: hack for resolving caching `default={}` during definition
# poll_answers field
if self.poll_answers is None:
self.poll_answers = {}
answers_to_json = OrderedDict()
# FIXME: fix this, when xblock support mutable types.
# Now we use this hack.
temp_poll_answers = self.poll_answers
# Fill self.poll_answers, prepare data for template context.
for answer in self.answers:
# Set default count for answer = 0.
if answer['id'] not in temp_poll_answers:
temp_poll_answers[answer['id']] = 0
answers_to_json[answer['id']] = cgi.escape(answer['text'])
self.poll_answers = temp_poll_answers
return json.dumps({'answers': answers_to_json,
'question': cgi.escape(self.question),
# to show answered poll after reload:
'poll_answer': self.poll_answer,
'poll_answers': self.poll_answers if self.voted else {},
'total': sum(self.poll_answers.values()) if self.voted else 0,
'reset': str(self.descriptor.xml_attributes.get('reset', 'true')).lower()})
class PollDescriptor(PollFields, MakoModuleDescriptor, XmlDescriptor):
_tag_name = 'poll_question'
_child_tag_name = 'answer'
module_class = PollModule
@classmethod
def definition_from_xml(cls, xml_object, system):
"""Pull out the data into dictionary.
Args:
xml_object: xml from file.
system: `system` object.
Returns:
(definition, children) - tuple
definition - dict:
{
'answers': <List of answers>,
'question': <Question string>
}
"""
# Check for presense of required tags in xml.
if len(xml_object.xpath(cls._child_tag_name)) == 0:
raise ValueError("Poll_question definition must include \
at least one 'answer' tag")
xml_object_copy = deepcopy(xml_object)
answers = []
for element_answer in xml_object_copy.findall(cls._child_tag_name):
answer_id = element_answer.get('id', None)
if answer_id:
answers.append({
'id': answer_id,
'text': stringify_children(element_answer)
})
xml_object_copy.remove(element_answer)
definition = {
'answers': answers,
'question': stringify_children(xml_object_copy)
}
children = []
return (definition, children)
def definition_to_xml(self, resource_fs):
"""Return an xml element representing to this definition."""
poll_str = '<{tag_name}>{text}</{tag_name}>'.format(
tag_name=self._tag_name, text=self.question)
xml_object = etree.fromstring(poll_str)
xml_object.set('display_name', self.display_name)
def add_child(xml_obj, answer):
child_str = '<{tag_name} id="{id}">{text}</{tag_name}>'.format(
tag_name=self._child_tag_name, id=answer['id'],
text=answer['text'])
child_node = etree.fromstring(child_str)
xml_object.append(child_node)
for answer in self.answers:
add_child(xml_object, answer)
return xml_object<|fim▁end|> | |
<|file_name|>ImageManager.py<|end_file_name|><|fim▁begin|># for localized messages
from boxbranding import getBoxType, getImageType, getImageDistro, getImageVersion, getImageBuild, getImageFolder, getImageFileSystem, getBrandOEM, getMachineBrand, getMachineName, getMachineBuild, getMachineMake, getMachineMtdRoot, getMachineRootFile, getMachineMtdKernel, getMachineKernelFile, getMachineMKUBIFS, getMachineUBINIZE
from os import path, system, mkdir, makedirs, listdir, remove, statvfs, chmod, walk, symlink, unlink
from shutil import rmtree, move, copy
from time import localtime, time, strftime, mktime
from enigma import eTimer
from . import _
import Components.Task
from Components.ActionMap import ActionMap
from Components.Label import Label
from Components.Button import Button
from Components.MenuList import MenuList
from Components.Sources.StaticText import StaticText
from Components.SystemInfo import SystemInfo
from Components.config import config, ConfigSubsection, ConfigYesNo, ConfigSelection, ConfigText, ConfigNumber, NoSave, ConfigClock
from Components.Harddisk import harddiskmanager, getProcMounts
from Screens.Screen import Screen
from Screens.Setup import Setup
from Components.Console import Console
from Screens.Console import Console as ScreenConsole
from Screens.MessageBox import MessageBox
from Screens.Standby import TryQuitMainloop
from Tools.Notifications import AddPopupWithCallback
import urllib
from os import rename, path, remove
RAMCHEKFAILEDID = 'RamCheckFailedNotification'
hddchoises = []
for p in harddiskmanager.getMountedPartitions():
if path.exists(p.mountpoint):
d = path.normpath(p.mountpoint)
if p.mountpoint != '/':
hddchoises.append((p.mountpoint, d))
config.imagemanager = ConfigSubsection()
defaultprefix = getImageDistro() + '-' + getBoxType()
config.imagemanager.folderprefix = ConfigText(default=defaultprefix, fixed_size=False)
config.imagemanager.backuplocation = ConfigSelection(choices=hddchoises)
config.imagemanager.schedule = ConfigYesNo(default=False)
config.imagemanager.scheduletime = ConfigClock(default=0) # 1:00
config.imagemanager.repeattype = ConfigSelection(default="daily", choices=[("daily", _("Daily")), ("weekly", _("Weekly")), ("monthly", _("30 Days"))])
config.imagemanager.backupretry = ConfigNumber(default=30)
config.imagemanager.backupretrycount = NoSave(ConfigNumber(default=0))
config.imagemanager.nextscheduletime = NoSave(ConfigNumber(default=0))
config.imagemanager.restoreimage = NoSave(ConfigText(default=getBoxType(), fixed_size=False))
autoImageManagerTimer = None
if path.exists(config.imagemanager.backuplocation.value + 'imagebackups/imagerestore'):
try:
rmtree(config.imagemanager.backuplocation.value + 'imagebackups/imagerestore')
except:
pass
def ImageManagerautostart(reason, session=None, **kwargs):
"""called with reason=1 to during /sbin/shutdown.sysvinit, with reason=0 at startup?"""
global autoImageManagerTimer
global _session
now = int(time())
if reason == 0:
print "[ImageManager] AutoStart Enabled"
if session is not None:
_session = session
if autoImageManagerTimer is None:
autoImageManagerTimer = AutoImageManagerTimer(session)
else:
if autoImageManagerTimer is not None:
print "[ImageManager] Stop"
autoImageManagerTimer.stop()
class VIXImageManager(Screen):
def __init__(self, session):
Screen.__init__(self, session)
Screen.setTitle(self, _("Image Manager"))
self['lab1'] = Label()
self["backupstatus"] = Label()
if SystemInfo["HaveMultiBoot"]:
self["key_blue"] = Button(_("Unavailable"))
else:
self["key_blue"] = Button(_("Restore"))
self["key_green"] = Button()
self["key_yellow"] = Button(_("Downloads"))
self["key_red"] = Button(_("Delete"))
self.BackupRunning = False
self.onChangedEntry = []
self.oldlist = None
self.emlist = []
self['list'] = MenuList(self.emlist)
self.populate_List()
self.activityTimer = eTimer()
self.activityTimer.timeout.get().append(self.backupRunning)
self.activityTimer.start(10)
self.Console = Console()
if BackupTime > 0:
t = localtime(BackupTime)
backuptext = _("Next Backup: ") + strftime(_("%a %e %b %-H:%M"), t)
else:
backuptext = _("Next Backup: ")
self["backupstatus"].setText(str(backuptext))
if not self.selectionChanged in self["list"].onSelectionChanged:
self["list"].onSelectionChanged.append(self.selectionChanged)
def createSummary(self):
from Screens.PluginBrowser import PluginBrowserSummary
return PluginBrowserSummary
def selectionChanged(self):
item = self["list"].getCurrent()
desc = self["backupstatus"].text
if item:
name = item
else:
name = ""
for cb in self.onChangedEntry:
cb(name, desc)
def backupRunning(self):
self.populate_List()
self.BackupRunning = False
for job in Components.Task.job_manager.getPendingJobs():
if job.name.startswith(_("Image Manager")):
self.BackupRunning = False
if getMachineBuild() in ('vusolo4k','hd51','hd52'):
self["key_green"].setText(_("Unavailable"))
else:
self["key_green"].setText(_("New Backup"))
self.activityTimer.startLongTimer(5)
def refreshUp(self):
self.refreshList()
if self['list'].getCurrent():
self["list"].instance.moveSelection(self["list"].instance.moveUp)
def refreshDown(self):
self.refreshList()
if self['list'].getCurrent():
self["list"].instance.moveSelection(self["list"].instance.moveDown)
def refreshList(self):
images = listdir(self.BackupDirectory)
self.oldlist = images
del self.emlist[:]
for fil in images:
if fil.endswith('.zip') or path.isdir(path.join(self.BackupDirectory, fil)):
self.emlist.append(fil)
self.emlist.sort()
self.emlist.reverse()
self["list"].setList(self.emlist)
self["list"].show()
def getJobName(self, job):
return "%s: %s (%d%%)" % (job.getStatustext(), job.name, int(100 * job.progress / float(job.end)))
def showJobView(self, job):
from Screens.TaskView import JobView
Components.Task.job_manager.in_background = False
self.session.openWithCallback(self.JobViewCB, JobView, job, cancelable=False, backgroundable=False, afterEventChangeable=False, afterEvent="close")
def JobViewCB(self, in_background):
Components.Task.job_manager.in_background = in_background
def populate_List(self):
imparts = []
for p in harddiskmanager.getMountedPartitions():
if path.exists(p.mountpoint):
d = path.normpath(p.mountpoint)
if p.mountpoint != '/':
imparts.append((p.mountpoint, d))
config.imagemanager.backuplocation.setChoices(imparts)
if config.imagemanager.backuplocation.value.endswith('/'):
mount = config.imagemanager.backuplocation.value, config.imagemanager.backuplocation.value[:-1]
else:
mount = config.imagemanager.backuplocation.value + '/', config.imagemanager.backuplocation.value
hdd = '/media/hdd/', '/media/hdd'
if mount not in config.imagemanager.backuplocation.choices.choices:
if hdd in config.imagemanager.backuplocation.choices.choices:
self['myactions'] = ActionMap(['ColorActions', 'OkCancelActions', 'DirectionActions', "MenuActions", "HelpActions"],
{
"ok": self.keyResstore,
'cancel': self.close,
'red': self.keyDelete,
'green': self.GreenPressed,
'yellow': self.doDownload,
'blue': self.keyResstore,
"menu": self.createSetup,
"up": self.refreshUp,
"down": self.refreshDown,
"displayHelp": self.doDownload,
}, -1)
self.BackupDirectory = '/media/hdd/imagebackups/'
config.imagemanager.backuplocation.value = '/media/hdd/'
config.imagemanager.backuplocation.save()
self['lab1'].setText(_("The chosen location does not exist, using /media/hdd") + "\n" + _("Select an image to restore:"))
else:
self['myactions'] = ActionMap(['ColorActions', 'OkCancelActions', 'DirectionActions', "MenuActions"],
{
'cancel': self.close,
"menu": self.createSetup,
}, -1)
self['lab1'].setText(_("Device: None available") + "\n" + _("Select an image to restore:"))
else:
self['myactions'] = ActionMap(['ColorActions', 'OkCancelActions', 'DirectionActions', "MenuActions", "HelpActions"],
{
'cancel': self.close,
'red': self.keyDelete,
'green': self.GreenPressed,
'yellow': self.doDownload,
'blue': self.keyResstore,
"menu": self.createSetup,
"up": self.refreshUp,
"down": self.refreshDown,
"displayHelp": self.doDownload,
"ok": self.keyResstore,
}, -1)
self.BackupDirectory = config.imagemanager.backuplocation.value + 'imagebackups/'
s = statvfs(config.imagemanager.backuplocation.value)
free = (s.f_bsize * s.f_bavail) / (1024 * 1024)
self['lab1'].setText(_("Device: ") + config.imagemanager.backuplocation.value + ' ' + _('Free space:') + ' ' + str(free) + _('MB') + "\n" + _("Select an image to restore:"))
try:
if not path.exists(self.BackupDirectory):
mkdir(self.BackupDirectory, 0755)
if path.exists(self.BackupDirectory + config.imagemanager.folderprefix.value + '-' + getImageType() + '-swapfile_backup'):
system('swapoff ' + self.BackupDirectory + config.imagemanager.folderprefix.value + '-' + getImageType() + '-swapfile_backup')
remove(self.BackupDirectory + config.imagemanager.folderprefix.value + '-' + getImageType() + '-swapfile_backup')
self.refreshList()
except:
self['lab1'].setText(_("Device: ") + config.imagemanager.backuplocation.value + "\n" + _("there is a problem with this device, please reformat and try again."))
def createSetup(self):
self.session.openWithCallback(self.setupDone, Setup, 'viximagemanager', 'SystemPlugins/ViX')
def doDownload(self):
self.session.openWithCallback(self.populate_List, ImageManagerDownload, self.BackupDirectory)
def setupDone(self, test=None):
if config.imagemanager.folderprefix.value == '':
config.imagemanager.folderprefix.value = defaultprefix
config.imagemanager.folderprefix.save()
self.populate_List()
self.doneConfiguring()
def doneConfiguring(self):
now = int(time())
if config.imagemanager.schedule.value:
if autoImageManagerTimer is not None:
print "[ImageManager] Backup Schedule Enabled at", strftime("%c", localtime(now))<|fim▁hole|> if autoImageManagerTimer is not None:
global BackupTime
BackupTime = 0
print "[ImageManager] Backup Schedule Disabled at", strftime("%c", localtime(now))
autoImageManagerTimer.backupstop()
if BackupTime > 0:
t = localtime(BackupTime)
backuptext = _("Next Backup: ") + strftime(_("%a %e %b %-H:%M"), t)
else:
backuptext = _("Next Backup: ")
self["backupstatus"].setText(str(backuptext))
def keyDelete(self):
self.sel = self['list'].getCurrent()
if self.sel:
message = _("Are you sure you want to delete this backup:\n ") + self.sel
ybox = self.session.openWithCallback(self.doDelete, MessageBox, message, MessageBox.TYPE_YESNO, default=False)
ybox.setTitle(_("Remove Confirmation"))
else:
self.session.open(MessageBox, _("You have no image to delete."), MessageBox.TYPE_INFO, timeout=10)
def doDelete(self, answer):
if answer is True:
self.sel = self['list'].getCurrent()
self["list"].instance.moveSelectionTo(0)
if self.sel.endswith('.zip'):
remove(self.BackupDirectory + self.sel)
else:
rmtree(self.BackupDirectory + self.sel)
self.populate_List()
def GreenPressed(self):
backup = None
self.BackupRunning = False
for job in Components.Task.job_manager.getPendingJobs():
if job.name.startswith(_("Image Manager")):
backup = job
self.BackupRunning = True
if self.BackupRunning and backup:
self.showJobView(backup)
else:
self.keyBackup()
def keyBackup(self):
self.sel = self['list'].getCurrent()
if getMachineBuild() in ('vusolo4k','hd51','hd52'):
self.session.open(MessageBox, _("Sorry function not yet supported in this model"), MessageBox.TYPE_INFO, timeout=10)
elif self.sel:
message = _("Are you ready to create a backup image ?")
ybox = self.session.openWithCallback(self.doBackup, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_("Backup Confirmation"))
def doBackup(self, answer):
backup = None
if answer is True:
self.ImageBackup = ImageBackup(self.session)
Components.Task.job_manager.AddJob(self.ImageBackup.createBackupJob())
self.BackupRunning = True
self["key_green"].setText(_("View Progress"))
self["key_green"].show()
for job in Components.Task.job_manager.getPendingJobs():
if job.name.startswith(_("Image Manager")):
backup = job
if backup:
self.showJobView(backup)
def keyResstore(self):
self.sel = self['list'].getCurrent()
if getMachineBuild() in ('vusolo4k','hd51','hd52'):
self.session.open(MessageBox, _("Sorry function not yet supported in this model - Try Image Flasher"), MessageBox.TYPE_INFO, timeout=10)
elif self.sel:
message = _("Are you sure you want to restore this image:\n ") + self.sel
ybox = self.session.openWithCallback(self.keyResstore2, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_("Restore Confirmation"))
else:
self.session.open(MessageBox, _("You have no image to restore."), MessageBox.TYPE_INFO, timeout=10)
def keyResstore2(self, answer):
if path.islink('/tmp/imagerestore'):
unlink('/tmp/imagerestore')
if answer:
self.session.open(MessageBox, _("Please wait while the restore prepares"), MessageBox.TYPE_INFO, timeout=60, enable_input=False)
TEMPDESTROOT = self.BackupDirectory + 'imagerestore'
if self.sel.endswith('.zip'):
if not path.exists(TEMPDESTROOT):
mkdir(TEMPDESTROOT, 0755)
self.Console.ePopen('unzip -o ' + self.BackupDirectory + self.sel + ' -d ' + TEMPDESTROOT, self.keyResstore3)
symlink(TEMPDESTROOT, '/tmp/imagerestore')
else:
symlink(self.BackupDirectory + self.sel, '/tmp/imagerestore')
self.keyResstore3(0, 0)
def keyResstore3(self, result, retval, extra_args=None):
if retval == 0:
kernelMTD = getMachineMtdKernel()
kernelFILE = getMachineKernelFile()
rootMTD = getMachineMtdRoot()
rootFILE = getMachineRootFile()
MAINDEST = '/tmp/imagerestore/' + getImageFolder() + '/'
config.imagemanager.restoreimage.setValue(self.sel)
self.Console.ePopen('ofgwrite -r -k -r' + rootMTD + ' -k' + kernelMTD + ' ' + MAINDEST)
class AutoImageManagerTimer:
def __init__(self, session):
self.session = session
self.backuptimer = eTimer()
self.backuptimer.callback.append(self.BackuponTimer)
self.backupactivityTimer = eTimer()
self.backupactivityTimer.timeout.get().append(self.backupupdatedelay)
now = int(time())
global BackupTime
if config.imagemanager.schedule.value:
print "[ImageManager] Backup Schedule Enabled at ", strftime("%c", localtime(now))
if now > 1262304000:
self.backupupdate()
else:
print "[ImageManager] Backup Time not yet set."
BackupTime = 0
self.backupactivityTimer.start(36000)
else:
BackupTime = 0
print "[ImageManager] Backup Schedule Disabled at", strftime("(now=%c)", localtime(now))
self.backupactivityTimer.stop()
def backupupdatedelay(self):
self.backupactivityTimer.stop()
self.backupupdate()
def getBackupTime(self):
backupclock = config.imagemanager.scheduletime.value
nowt = time()
now = localtime(nowt)
return int(mktime((now.tm_year, now.tm_mon, now.tm_mday, backupclock[0], backupclock[1], 0, now.tm_wday, now.tm_yday, now.tm_isdst)))
def backupupdate(self, atLeast=0):
self.backuptimer.stop()
global BackupTime
BackupTime = self.getBackupTime()
now = int(time())
if BackupTime > 0:
if BackupTime < now + atLeast:
if config.imagemanager.repeattype.value == "daily":
BackupTime += 24 * 3600
while (int(BackupTime) - 30) < now:
BackupTime += 24 * 3600
elif config.imagemanager.repeattype.value == "weekly":
BackupTime += 7 * 24 * 3600
while (int(BackupTime) - 30) < now:
BackupTime += 7 * 24 * 3600
elif config.imagemanager.repeattype.value == "monthly":
BackupTime += 30 * 24 * 3600
while (int(BackupTime) - 30) < now:
BackupTime += 30 * 24 * 3600
next = BackupTime - now
self.backuptimer.startLongTimer(next)
else:
BackupTime = -1
print "[ImageManager] Backup Time set to", strftime("%c", localtime(BackupTime)), strftime("(now=%c)", localtime(now))
return BackupTime
def backupstop(self):
self.backuptimer.stop()
def BackuponTimer(self):
self.backuptimer.stop()
now = int(time())
wake = self.getBackupTime()
# If we're close enough, we're okay...
atLeast = 0
if wake - now < 60:
print "[ImageManager] Backup onTimer occured at", strftime("%c", localtime(now))
from Screens.Standby import inStandby
if not inStandby:
message = _("Your %s %s is about to run a full image backup, this can take about 6 minutes to complete,\ndo you want to allow this?") % (getMachineBrand(), getMachineName())
ybox = self.session.openWithCallback(self.doBackup, MessageBox, message, MessageBox.TYPE_YESNO, timeout=30)
ybox.setTitle('Scheduled Backup.')
else:
print "[ImageManager] in Standby, so just running backup", strftime("%c", localtime(now))
self.doBackup(True)
else:
print '[ImageManager] Where are not close enough', strftime("%c", localtime(now))
self.backupupdate(60)
def doBackup(self, answer):
now = int(time())
if answer is False:
if config.imagemanager.backupretrycount.value < 2:
print '[ImageManager] Number of retries', config.imagemanager.backupretrycount.value
print "[ImageManager] Backup delayed."
repeat = config.imagemanager.backupretrycount.value
repeat += 1
config.imagemanager.backupretrycount.setValue(repeat)
BackupTime = now + (int(config.imagemanager.backupretry.value) * 60)
print "[ImageManager] Backup Time now set to", strftime("%c", localtime(BackupTime)), strftime("(now=%c)", localtime(now))
self.backuptimer.startLongTimer(int(config.imagemanager.backupretry.value) * 60)
else:
atLeast = 60
print "[ImageManager] Enough Retries, delaying till next schedule.", strftime("%c", localtime(now))
self.session.open(MessageBox, _("Enough Retries, delaying till next schedule."), MessageBox.TYPE_INFO, timeout=10)
config.imagemanager.backupretrycount.setValue(0)
self.backupupdate(atLeast)
else:
print "[ImageManager] Running Backup", strftime("%c", localtime(now))
self.ImageBackup = ImageBackup(self.session)
Components.Task.job_manager.AddJob(self.ImageBackup.createBackupJob())
#self.close()
class ImageBackup(Screen):
def __init__(self, session, updatebackup=False):
Screen.__init__(self, session)
self.Console = Console()
self.BackupDevice = config.imagemanager.backuplocation.value
print "[ImageManager] Device: " + self.BackupDevice
self.BackupDirectory = config.imagemanager.backuplocation.value + 'imagebackups/'
print "[ImageManager] Directory: " + self.BackupDirectory
self.BackupDate = strftime('%Y%m%d_%H%M%S', localtime())
self.WORKDIR = self.BackupDirectory + config.imagemanager.folderprefix.value + '-' + getImageType() + '-temp'
self.TMPDIR = self.BackupDirectory + config.imagemanager.folderprefix.value + '-' + getImageType() + '-mount'
if updatebackup:
self.MAINDESTROOT = self.BackupDirectory + config.imagemanager.folderprefix.value + '-' + getImageType() + '-SoftwareUpdate-' + getImageVersion() + '.' + getImageBuild() + '-' + self.BackupDate
else:
self.MAINDESTROOT = self.BackupDirectory + config.imagemanager.folderprefix.value + '-' + getImageType() + '-' + getImageVersion() + '.' + getImageBuild() + '-' + self.BackupDate
self.kernelMTD = getMachineMtdKernel()
self.kernelFILE = getMachineKernelFile()
self.rootMTD = getMachineMtdRoot()
self.rootFILE = getMachineRootFile()
self.MAINDEST = self.MAINDESTROOT + '/' + getImageFolder() + '/'
print 'MTD: Kernel:',self.kernelMTD
print 'MTD: Root:',self.rootMTD
if getImageFileSystem() == 'ubi':
self.ROOTFSTYPE = 'ubifs'
else:
self.ROOTFSTYPE= 'jffs2'
self.swapdevice = ""
self.RamChecked = False
self.SwapCreated = False
self.Stage1Completed = False
self.Stage2Completed = False
self.Stage3Completed = False
self.Stage4Completed = False
self.Stage5Completed = False
def createBackupJob(self):
job = Components.Task.Job(_("Image Manager"))
task = Components.Task.PythonTask(job, _("Setting Up..."))
task.work = self.JobStart
task.weighting = 5
task = Components.Task.ConditionTask(job, _("Checking Free RAM.."), timeoutCount=10)
task.check = lambda: self.RamChecked
task.weighting = 5
task = Components.Task.ConditionTask(job, _("Creating Swap.."), timeoutCount=120)
task.check = lambda: self.SwapCreated
task.weighting = 5
task = Components.Task.PythonTask(job, _("Backing up Root file system..."))
task.work = self.doBackup1
task.weighting = 5
task = Components.Task.ConditionTask(job, _("Backing up Root file system..."), timeoutCount=900)
task.check = lambda: self.Stage1Completed
task.weighting = 35
task = Components.Task.PythonTask(job, _("Backing up Kernel..."))
task.work = self.doBackup2
task.weighting = 5
task = Components.Task.ConditionTask(job, _("Backing up Kernel..."), timeoutCount=900)
task.check = lambda: self.Stage2Completed
task.weighting = 15
task = Components.Task.PythonTask(job, _("Removing temp mounts..."))
task.work = self.doBackup3
task.weighting = 5
task = Components.Task.ConditionTask(job, _("Removing temp mounts..."), timeoutCount=900)
task.check = lambda: self.Stage3Completed
task.weighting = 5
task = Components.Task.PythonTask(job, _("Moving to Backup Location..."))
task.work = self.doBackup4
task.weighting = 5
task = Components.Task.ConditionTask(job, _("Moving to Backup Location..."), timeoutCount=900)
task.check = lambda: self.Stage4Completed
task.weighting = 5
task = Components.Task.PythonTask(job, _("Creating zip..."))
task.work = self.doBackup5
task.weighting = 5
task = Components.Task.ConditionTask(job, _("Creating zip..."), timeoutCount=900)
task.check = lambda: self.Stage5Completed
task.weighting = 5
task = Components.Task.PythonTask(job, _("Backup Complete..."))
task.work = self.BackupComplete
task.weighting = 5
return job
def JobStart(self):
try:
if not path.exists(self.BackupDirectory):
mkdir(self.BackupDirectory, 0755)
if path.exists(self.BackupDirectory + config.imagemanager.folderprefix.value + '-' + getImageType() + "-swapfile_backup"):
system('swapoff ' + self.BackupDirectory + config.imagemanager.folderprefix.value + '-' + getImageType() + "-swapfile_backup")
remove(self.BackupDirectory + config.imagemanager.folderprefix.value + '-' + getImageType() + "-swapfile_backup")
except Exception, e:
print str(e)
print "Device: " + config.imagemanager.backuplocation.value + ", i don't seem to have write access to this device."
s = statvfs(self.BackupDevice)
free = (s.f_bsize * s.f_bavail) / (1024 * 1024)
if int(free) < 200:
AddPopupWithCallback(self.BackupComplete,
_("The backup location does not have enough free space." + "\n" + self.BackupDevice + "only has " + str(free) + "MB free."),
MessageBox.TYPE_INFO,
10,
'RamCheckFailedNotification'
)
else:
self.MemCheck()
def MemCheck(self):
memfree = 0
swapfree = 0
f = open('/proc/meminfo', 'r')
for line in f.readlines():
if line.find('MemFree') != -1:
parts = line.strip().split()
memfree = int(parts[1])
elif line.find('SwapFree') != -1:
parts = line.strip().split()
swapfree = int(parts[1])
f.close()
TotalFree = memfree + swapfree
print '[ImageManager] Stage1: Free Mem', TotalFree
if int(TotalFree) < 3000:
supported_filesystems = frozenset(('ext4', 'ext3', 'ext2'))
candidates = []
mounts = getProcMounts()
for partition in harddiskmanager.getMountedPartitions(False, mounts):
if partition.filesystem(mounts) in supported_filesystems:
candidates.append((partition.description, partition.mountpoint))
for swapdevice in candidates:
self.swapdevice = swapdevice[1]
if self.swapdevice:
print '[ImageManager] Stage1: Creating Swapfile.'
self.RamChecked = True
self.MemCheck2()
else:
print '[ImageManager] Sorry, not enough free ram found, and no physical devices that supports SWAP attached'
AddPopupWithCallback(self.BackupComplete,
_("Sorry, not enough free ram found, and no physical devices that supports SWAP attached. Can't create Swapfile on network or fat32 filesystems, unable to make backup"),
MessageBox.TYPE_INFO,
10,
'RamCheckFailedNotification'
)
else:
print '[ImageManager] Stage1: Found Enough Ram'
self.RamChecked = True
self.SwapCreated = True
def MemCheck2(self):
self.Console.ePopen("dd if=/dev/zero of=" + self.swapdevice + config.imagemanager.folderprefix.value + '-' + getImageType() + "-swapfile_backup bs=1024 count=61440", self.MemCheck3)
def MemCheck3(self, result, retval, extra_args=None):
if retval == 0:
self.Console.ePopen("mkswap " + self.swapdevice + config.imagemanager.folderprefix.value + '-' + getImageType() + "-swapfile_backup", self.MemCheck4)
def MemCheck4(self, result, retval, extra_args=None):
if retval == 0:
self.Console.ePopen("swapon " + self.swapdevice + config.imagemanager.folderprefix.value + '-' + getImageType() + "-swapfile_backup", self.MemCheck5)
def MemCheck5(self, result, retval, extra_args=None):
self.SwapCreated = True
def doBackup1(self):
print '[ImageManager] Stage1: Creating tmp folders.', self.BackupDirectory
print '[ImageManager] Stage1: Creating backup Folders.'
if path.exists(self.WORKDIR):
rmtree(self.WORKDIR)
mkdir(self.WORKDIR, 0644)
if path.exists(self.TMPDIR + '/root') and path.ismount(self.TMPDIR + '/root'):
system('umount ' + self.TMPDIR + '/root')
elif path.exists(self.TMPDIR + '/root'):
rmtree(self.TMPDIR + '/root')
if path.exists(self.TMPDIR):
rmtree(self.TMPDIR)
makedirs(self.TMPDIR + '/root', 0644)
makedirs(self.MAINDESTROOT, 0644)
self.commands = []
print '[ImageManager] Stage1: Making Root Image.'
makedirs(self.MAINDEST, 0644)
if self.ROOTFSTYPE == 'jffs2':
print '[ImageManager] Stage1: JFFS2 Detected.'
if getMachineBuild() == 'gb800solo':
JFFS2OPTIONS = " --disable-compressor=lzo -e131072 -l -p125829120"
else:
JFFS2OPTIONS = " --disable-compressor=lzo --eraseblock=0x20000 -n -l"
self.commands.append('mount --bind / ' + self.TMPDIR + '/root')
self.commands.append('mkfs.jffs2 --root=' + self.TMPDIR + '/root --faketime --output=' + self.WORKDIR + '/root.jffs2' + JFFS2OPTIONS)
else:
print '[ImageManager] Stage1: UBIFS Detected.'
UBINIZE = 'ubinize'
UBINIZE_ARGS = getMachineUBINIZE()
MKUBIFS_ARGS = getMachineMKUBIFS()
output = open(self.WORKDIR + '/ubinize.cfg', 'w')
output.write('[ubifs]\n')
output.write('mode=ubi\n')
output.write('image=' + self.WORKDIR + '/root.ubi\n')
output.write('vol_id=0\n')
output.write('vol_type=dynamic\n')
output.write('vol_name=rootfs\n')
output.write('vol_flags=autoresize\n')
output.close()
self.commands.append('mount --bind / ' + self.TMPDIR + '/root')
self.commands.append('touch ' + self.WORKDIR + '/root.ubi')
self.commands.append('mkfs.ubifs -r ' + self.TMPDIR + '/root -o ' + self.WORKDIR + '/root.ubi ' + MKUBIFS_ARGS)
self.commands.append('ubinize -o ' + self.WORKDIR + '/root.ubifs ' + UBINIZE_ARGS + ' ' + self.WORKDIR + '/ubinize.cfg')
self.Console.eBatch(self.commands, self.Stage1Complete, debug=False)
def Stage1Complete(self, extra_args=None):
if len(self.Console.appContainers) == 0:
self.Stage1Completed = True
print '[ImageManager] Stage1: Complete.'
def doBackup2(self):
print '[ImageManager] Stage2: Making Kernel Image.'
self.command = 'nanddump /dev/' + self.kernelMTD + ' -f ' + self.WORKDIR + '/vmlinux.gz'
self.Console.ePopen(self.command, self.Stage2Complete)
def Stage2Complete(self, result, retval, extra_args=None):
if retval == 0:
self.Stage2Completed = True
print '[ImageManager] Stage2: Complete.'
def doBackup3(self):
print '[ImageManager] Stage3: Unmounting and removing tmp system'
if path.exists(self.TMPDIR + '/root'):
self.command = 'umount ' + self.TMPDIR + '/root && rm -rf ' + self.TMPDIR
self.Console.ePopen(self.command, self.Stage3Complete)
def Stage3Complete(self, result, retval, extra_args=None):
if retval == 0:
self.Stage3Completed = True
print '[ImageManager] Stage3: Complete.'
def doBackup4(self):
print '[ImageManager] Stage4: Moving from work to backup folders'
move(self.WORKDIR + '/root.' + self.ROOTFSTYPE, self.MAINDEST + '/' + self.rootFILE)
move(self.WORKDIR + '/vmlinux.gz', self.MAINDEST + '/' + self.kernelFILE)
fileout = open(self.MAINDEST + '/imageversion', 'w')
line = defaultprefix + '-' + getImageType() + '-backup-' + getImageVersion() + '.' + getImageBuild() + '-' + self.BackupDate
fileout.write(line)
fileout.close()
if getBrandOEM() == 'vuplus':
if getMachineBuild() == 'vuzero':
fileout = open(self.MAINDEST + '/force.update', 'w')
line = "This file forces the update."
fileout.write(line)
fileout.close()
else:
fileout = open(self.MAINDEST + '/reboot.update', 'w')
line = "This file forces a reboot after the update."
fileout.write(line)
fileout.close()
imagecreated = True
elif getBrandOEM() in ('zgemma', 'xtrend', 'gigablue', 'odin', 'xp', 'ini', 'skylake', 'ixuss', 'blackbox', 'tripledot', 'entwopia'):
if getBrandOEM() in ('zgemma', 'xtrend', 'odin', 'ini', 'skylake', 'ixuss', 'blackbox', 'tripledot', 'entwopia'):
fileout = open(self.MAINDEST + '/noforce', 'w')
line = "rename this file to 'force' to force an update without confirmation"
fileout.write(line)
fileout.close()
if path.exists('/usr/lib/enigma2/python/Plugins/SystemPlugins/ViX/burn.bat'):
copy('/usr/lib/enigma2/python/Plugins/SystemPlugins/ViX/burn.bat', self.MAINDESTROOT + '/burn.bat')
print '[ImageManager] Stage4: Removing Swap.'
if path.exists(self.swapdevice + config.imagemanager.folderprefix.value + '-' + getImageType() + "-swapfile_backup"):
system('swapoff ' + self.swapdevice + config.imagemanager.folderprefix.value + '-' + getImageType() + "-swapfile_backup")
remove(self.swapdevice + config.imagemanager.folderprefix.value + '-' + getImageType() + "-swapfile_backup")
if path.exists(self.WORKDIR):
rmtree(self.WORKDIR)
if path.exists(self.MAINDEST + '/' + self.rootFILE) and path.exists(self.MAINDEST + '/' + self.kernelFILE):
for root, dirs, files in walk(self.MAINDEST):
for momo in dirs:
chmod(path.join(root, momo), 0644)
for momo in files:
chmod(path.join(root, momo), 0644)
print '[ImageManager] Stage4: Image created in ' + self.MAINDESTROOT
self.Stage4Complete()
else:
print "[ImageManager] Stage4: Image creation failed - e. g. wrong backup destination or no space left on backup device"
self.BackupComplete()
def Stage4Complete(self):
self.Stage4Completed = True
print '[ImageManager] Stage4: Complete.'
def doBackup5(self):
zipfolder = path.split(self.MAINDESTROOT)
self.commands = []
self.commands.append('cd ' + self.MAINDESTROOT + ' && zip -r ' + self.MAINDESTROOT + '.zip *')
self.commands.append('rm -rf ' + self.MAINDESTROOT)
self.Console.eBatch(self.commands, self.Stage5Complete, debug=True)
def Stage5Complete(self, anwser=None):
self.Stage5Completed = True
print '[ImageManager] Stage5: Complete.'
def BackupComplete(self, anwser=None):
if config.imagemanager.schedule.value:
atLeast = 60
autoImageManagerTimer.backupupdate(atLeast)
else:
autoImageManagerTimer.backupstop()
class ImageManagerDownload(Screen):
def __init__(self, session, BackupDirectory):
Screen.__init__(self, session)
Screen.setTitle(self, _("Image Manager"))
self.BackupDirectory = BackupDirectory
self['lab1'] = Label(_("Select an image to Download:"))
self["key_red"] = Button(_("Close"))
self["key_green"] = Button(_("Download"))
self.onChangedEntry = []
self.emlist = []
self['list'] = MenuList(self.emlist)
self.populate_List()
if not self.selectionChanged in self["list"].onSelectionChanged:
self["list"].onSelectionChanged.append(self.selectionChanged)
def selectionChanged(self):
for x in self.onChangedEntry:
x()
def populate_List(self):
try:
self['myactions'] = ActionMap(['ColorActions', 'OkCancelActions', 'DirectionActions'],
{
'cancel': self.close,
'red': self.close,
'green': self.keyDownload,
'ok': self.keyDownload,
}, -1)
if not path.exists(self.BackupDirectory):
mkdir(self.BackupDirectory, 0755)
from ftplib import FTP
import urllib, zipfile, base64
vix4sh4_user = '[email protected]'
vix4sh4_pwd = base64.b64decode('dml4aW1hZ2VzMjI=').replace('\n', '')
ftp = FTP('vix4.com')
ftp.login(vix4sh4_user, vix4sh4_pwd)
if getMachineMake() == 'amiko8900':
self.boxtype = 'amiko8900'
if getMachineMake() == 'amikomini':
self.boxtype = 'amikomini'
if getMachineMake() == 'amikoalien':
self.boxtype = 'amikoalien'
if getMachineMake() == 'enfinity':
self.boxtype = 'enfinity'
if getMachineMake() == 'axodin':
self.boxtype = 'axodin'
if getMachineMake() == 'x2plus':
self.boxtype = 'x2plus'
if getMachineMake() == 'gbquadplus':
self.boxtype = 'gbquadplus'
if getMachineMake() == 'gb800ueplus':
self.boxtype = 'gb800ueplus'
if getMachineMake() == 'gb800seplus':
self.boxtype = 'gb800seplus'
if getMachineMake() == 'gbipbox':
self.boxtype = 'gbipbox'
if getMachineMake() == 'gbultraue':
self.boxtype = 'gbultraue'
if getMachineMake() == 'gbx1':
self.boxtype = 'gbx1'
if getMachineMake() == 'gbx3':
self.boxtype = 'gbx3'
if getMachineMake() == 'vusolo':
self.boxtype = 'vusolo'
if getMachineMake() == 'vuduo':
self.boxtype = 'vuduo'
if getMachineMake() == 'vuuno':
self.boxtype = 'vuuno'
if getMachineMake() == 'vuultimo':
self.boxtype = 'vuultimo'
if getMachineMake() == 'vuzero':
self.boxtype = 'vuzero'
if getMachineMake() == 'vusolo2':
self.boxtype = 'vusolo2'
if getMachineMake() == 'vusolose':
self.boxtype = 'vusolose'
if getMachineMake() == 'vuduo2':
self.boxtype = 'vuduo2'
if getMachineMake() == 'ixussone':
self.boxtype = 'ixussone'
if getMachineMake() == 'quadbox2400':
self.boxtype = 'quadbox2400'
if getMachineMake() == 'x1plus':
self.boxtype = 'x1plus'
if getMachineMake() == 'xcombo':
self.boxtype = 'xcombo'
if getMachineMake() == 'evomini':
self.boxtype = 'evomini'
if getMachineMake() == 't2cable':
self.boxtype = 't2cable'
if getMachineMake() == 'uniboxhde':
self.boxtype = 'uniboxhde'
if getMachineMake() == 'mutant2400':
self.boxtype = 'mutant2400'
if getMachineMake() == 'mutant1100':
self.boxtype = 'mutant1100'
if getMachineMake() == 'mutant500c':
self.boxtype = 'mutant500c'
if getMachineMake() == 'mutant1200':
self.boxtype = 'mutant1200'
if getMachineMake() == 'tmnanoseplus':
self.boxtype = 'tmnanoseplus'
if getMachineMake() == 'tmnanosem2plus':
self.boxtype = 'tmnanosem2plus'
if getMachineMake() == 'purehd':
self.boxtype = 'purehd'
if getMachineMake() == 'fusionhd':
self.boxtype = 'fusionhd'
if getMachineMake() == 'fusionhdse':
self.boxtype = 'fusionhdse'
if getMachineMake() == 'zgemmah2s':
self.boxtype = 'zgemmah2s'
if getMachineMake() == 'novaip':
self.boxtype = 'novaip'
if getMachineMake() == 'novacombo':
self.boxtype = 'novacombo'
if getMachineMake() == 'novatwin':
self.boxtype = 'novatwin'
if getMachineMake() == 'wetekplay':
self.boxtype = 'wetekplay'
if getMachineMake() == 'mutant51':
self.boxtype = 'mutant51'
if getMachineMake() == 'mutant1500':
self.boxtype = 'mutant1500'
ftp.cwd(self.boxtype)
del self.emlist[:]
for fil in ftp.nlst():
if not fil.endswith('.') and fil.find(getMachineMake()) != -1:
self.emlist.append(fil)
self.emlist.sort()
self.emlist.reverse()
ftp.quit()
ftp.close()
except:
self['myactions'] = ActionMap(['ColorActions', 'OkCancelActions', 'DirectionActions'],
{
'cancel': self.close,
'red': self.close,
}, -1)
self.emlist.append(" ")
self["list"].setList(self.emlist)
self["list"].show()
def keyDownload(self):
self.sel = self['list'].getCurrent()
if self.sel:
message = _("Are you sure you want to download this image:\n ") + self.sel
ybox = self.session.openWithCallback(self.doDownload, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_("Download Confirmation"))
else:
self.session.open(MessageBox, _("You have no image to download."), MessageBox.TYPE_INFO, timeout=10)
def doDownload(self, answer):
if answer is True:
self.selectedimage = self['list'].getCurrent()
file = self.BackupDirectory + self.selectedimage
mycmd1 = _("echo 'Downloading Image.'")
mycmd2 = "wget -q http://www.vix4.com/viximages/2.2/" + self.boxtype + "/" + self.selectedimage + " -O " + self.BackupDirectory + "image.zip"
mycmd3 = "mv " + self.BackupDirectory + "image.zip " + file
self.session.open(ScreenConsole, title=_('Downloading Image...'), cmdlist=[mycmd1, mycmd2, mycmd3], closeOnSuccess=True)
def myclose(self, result, retval, extra_args):
remove(self.BackupDirectory + self.selectedimage)
self.close()<|fim▁end|> | autoImageManagerTimer.backupupdate()
else: |
<|file_name|>view_shed.cpp<|end_file_name|><|fim▁begin|>///////////////////////////////////////////////////////////
// //
// SAGA //
// //
// System for Automated Geoscientific Analyses //
// //
// Module Library: //
// ta_lighting //
// //
//-------------------------------------------------------//
// //
// View_Shed.cpp //
// //
// Copyright (C) 2008 by //
// Olaf Conrad //
// //
//-------------------------------------------------------//
// //
// This file is part of 'SAGA - System for Automated //
// Geoscientific Analyses'. SAGA is free software; you //
// can redistribute it and/or modify it under the terms //
// of the GNU General Public License as published by the //
// Free Software Foundation; version 2 of the License. //
// //
// SAGA is distributed in the hope that it will be //
// useful, but WITHOUT ANY WARRANTY; without even the //
// implied warranty of MERCHANTABILITY or FITNESS FOR A //
// PARTICULAR PURPOSE. See the GNU General Public //
// License for more details. //
// //
// You should have received a copy of the GNU General //
// Public License along with this program; if not, //
// write to the Free Software Foundation, Inc., //
// 59 Temple Place - Suite 330, Boston, MA 02111-1307, //
// USA. //
// //
//-------------------------------------------------------//
// //
// e-mail: [email protected] //
// //
// contact: Olaf Conrad //
// Institute of Geography //
// University of Hamburg //
// Bundesstr. 55 //
// 20146 Hamburg //
// Germany //
// //
///////////////////////////////////////////////////////////
//---------------------------------------------------------
///////////////////////////////////////////////////////////
// //
// //
// //
///////////////////////////////////////////////////////////
//---------------------------------------------------------
#include "view_shed.h"
///////////////////////////////////////////////////////////
// //
// //
// //
///////////////////////////////////////////////////////////
//---------------------------------------------------------
CView_Shed::CView_Shed(void)
{
//-----------------------------------------------------
Set_Name (_TL("Sky View Factor"));
Set_Author (SG_T("(c) 2008 by O.Conrad"));
Set_Description (_TW(
"\n"
"\n"
"References:\n"
"Boehner, J., Antonic, O. (2008): "
"'Land-suface parameters specific to topo-climatology'. "
"in: Hengl, T., Reuter, H. (Eds.): 'Geomorphometry - Concepts, Software, Applications', in press\n"
"\n"
"Hantzschel, J., Goldberg, V., Bernhofer, C. (2005): "
"'GIS-based regionalisation of radiation, temperature and coupling measures in complex terrain for low mountain ranges'. "
"Meteorological Applications, V.12:01, p.3342, doi:10.1017/S1350482705001489\n"
"\n"
"Oke, T.R. (2000): "
"'Boundary Layer Climates'. "
"Taylor & Francis, New York. 435pp.\n"
));
//-----------------------------------------------------
Parameters.Add_Grid(
NULL , "DEM" , _TL("Elevation"),
_TL(""),
PARAMETER_INPUT
);
Parameters.Add_Grid(
NULL , "VISIBLE" , _TL("Visible Sky"),
_TL("The unobstructed hemisphere given as percentage."),
PARAMETER_OUTPUT
);
Parameters.Add_Grid(
NULL , "SVF" , _TL("Sky View Factor"),
_TL(""),
PARAMETER_OUTPUT
);
Parameters.Add_Grid(
NULL , "SIMPLE" , _TL("Sky View Factor (Simplified)"),
_TL(""),
PARAMETER_OUTPUT_OPTIONAL
);
Parameters.Add_Grid(
NULL , "TERRAIN" , _TL("Terrain View Factor"),
_TL(""),
PARAMETER_OUTPUT_OPTIONAL
);
Parameters.Add_Value(
NULL , "MAXRADIUS" , _TL("Maximum Search Radius"),
_TL("This value is ignored if set to zero."),
PARAMETER_TYPE_Double , 10000.0, 0.0, true
);
Parameters.Add_Choice(
NULL , "METHOD" , _TL("Method"),
_TL(""),
CSG_String::Format(SG_T("%s|%s|"),
_TL("multi scale"),
_TL("sectors")
), 0
);
Parameters.Add_Value(
NULL , "LEVEL_INC" , _TL("Multi Scale Factor"),
_TL(""),
PARAMETER_TYPE_Double , 3.0, 1.25, true
);
Parameters.Add_Value(
NULL , "NDIRS" , _TL("Number of Sectors"),
_TL(""),
PARAMETER_TYPE_Int , 8.0, 3, true
);
}
//---------------------------------------------------------
CView_Shed::~CView_Shed(void)
{}
///////////////////////////////////////////////////////////
// //
// //
// //
///////////////////////////////////////////////////////////
//---------------------------------------------------------
bool CView_Shed::On_Execute(void)
{
bool bResult = false;
int nDirections;
double Visible, SVF, Simple, Terrain, Level_Inc;
CSG_Grid *pVisible, *pSVF, *pSimple, *pTerrain;
m_pDEM = Parameters("DEM") ->asGrid();
pVisible = Parameters("VISIBLE") ->asGrid();
pSVF = Parameters("SVF") ->asGrid();
pSimple = Parameters("SIMPLE") ->asGrid();
pTerrain = Parameters("TERRAIN") ->asGrid();
m_MaxRadius = Parameters("MAXRADIUS") ->asDouble();
m_Method = Parameters("METHOD") ->asInt();
Level_Inc = Parameters("LEVEL_INC") ->asDouble();
nDirections = Parameters("NDIRS") ->asInt();
DataObject_Set_Colors(pVisible , 100, SG_COLORS_BLACK_WHITE);
DataObject_Set_Colors(pSVF , 100, SG_COLORS_BLACK_WHITE);
DataObject_Set_Colors(pSimple , 100, SG_COLORS_BLACK_WHITE);
DataObject_Set_Colors(pTerrain , 100, SG_COLORS_BLACK_WHITE, true);
//-----------------------------------------------------
switch( m_Method )
{
case 0: // multi scale
if( m_Pyramid.Create(m_pDEM, Level_Inc, GRID_PYRAMID_Mean) )
{
m_MaxLevel = m_Pyramid.Get_Count();
if( m_MaxRadius > 0.0 )
{
while( m_MaxLevel > 0 && m_Pyramid.Get_Grid(m_MaxLevel - 1)->Get_Cellsize() > m_MaxRadius )
{
m_MaxLevel--;
}
}
bResult = Initialise(8);
}
break;
case 1: // sectors
bResult = Initialise(nDirections);
break;
}
<|fim▁hole|> m_MaxRadius = Get_Cellsize() * M_GET_LENGTH(Get_NX(), Get_NY());
}
//-----------------------------------------------------
if( bResult )
{
for(int y=0; y<Get_NY() && Set_Progress(y); y++)
{
for(int x=0; x<Get_NX(); x++)
{
if( Get_View_Shed(x, y, Visible, SVF, Simple, Terrain) )
{
if( pVisible ) pVisible->Set_Value (x, y, Visible);
if( pSVF ) pSVF ->Set_Value (x, y, SVF);
if( pSimple ) pSimple ->Set_Value (x, y, Simple);
if( pTerrain ) pTerrain->Set_Value (x, y, Terrain);
}
else
{
if( pVisible ) pVisible->Set_NoData(x, y);
if( pSVF ) pSVF ->Set_NoData(x, y);
if( pSimple ) pSimple ->Set_NoData(x, y);
if( pTerrain ) pTerrain->Set_NoData(x, y);
}
}
}
}
//-----------------------------------------------------
m_Pyramid .Destroy();
m_Angles .Destroy();
m_Direction .Clear();
return( bResult );
}
///////////////////////////////////////////////////////////
// //
// //
// //
///////////////////////////////////////////////////////////
//---------------------------------------------------------
bool CView_Shed::Initialise(int nDirections)
{
m_Angles .Create (nDirections);
m_Direction .Set_Count (nDirections);
for(int iDirection=0; iDirection<nDirections; iDirection++)
{
m_Direction[iDirection].z = (M_PI_360 * iDirection) / nDirections;
m_Direction[iDirection].x = sin(m_Direction[iDirection].z);
m_Direction[iDirection].y = cos(m_Direction[iDirection].z);
}
return( true );
}
///////////////////////////////////////////////////////////
// //
// //
// //
///////////////////////////////////////////////////////////
//---------------------------------------------------------
bool CView_Shed::Get_View_Shed(int x, int y, double &Sky_Visible, double &Sky_Factor, double &Sky_Simple, double &Sky_Terrain)
{
double slope, aspect;
if( m_pDEM->Get_Gradient(x, y, slope, aspect) )
{
bool bResult;
switch( m_Method )
{
case 0: bResult = Get_Angles_Multi_Scale(x, y); break;
default: bResult = Get_Angles_Sectoral (x, y); break;
}
if( bResult )
{
double sinSlope, cosSlope, Phi, sinPhi, cosPhi;
Sky_Visible = 0.0;
Sky_Factor = 0.0;
sinSlope = sin(slope);
cosSlope = cos(slope);
for(int iDirection=0; iDirection<m_Angles.Get_N(); iDirection++)
{
Phi = atan(m_Angles[iDirection]);
cosPhi = cos(Phi);
sinPhi = sin(Phi);
Sky_Visible += (M_PI_090 - Phi) * 100.0 / M_PI_090;
Sky_Factor += cosSlope * cosPhi*cosPhi + sinSlope * cos(m_Direction[iDirection].z - aspect) * ((M_PI_090 - Phi) - sinPhi * cosPhi);
}
Sky_Visible /= m_Angles.Get_N();
Sky_Factor /= m_Angles.Get_N();
Sky_Simple = (1.0 + cosSlope) / 2.0;
Sky_Terrain = Sky_Simple - Sky_Factor;
return( true );
}
}
return( false );
}
///////////////////////////////////////////////////////////
// //
// //
// //
///////////////////////////////////////////////////////////
//---------------------------------------------------------
bool CView_Shed::Get_Angles_Multi_Scale(int x, int y)
{
if( !m_pDEM->is_NoData(x, y) )
{
double z, d;
TSG_Point p, q;
z = m_pDEM->asDouble(x, y);
p = Get_System()->Get_Grid_to_World(x, y);
m_Angles.Assign(0.0);
//-------------------------------------------------
for(int iGrid=-1; iGrid<m_MaxLevel; iGrid++)
{
CSG_Grid *pGrid = m_Pyramid.Get_Grid(iGrid);
for(int iDirection=0; iDirection<8; iDirection++)
{
q.x = p.x + pGrid->Get_Cellsize() * m_Direction[iDirection].x;
q.y = p.y + pGrid->Get_Cellsize() * m_Direction[iDirection].y;
if( pGrid->Get_Value(q, d) && (d = (d - z) / pGrid->Get_Cellsize()) > m_Angles[iDirection] )
{
m_Angles[iDirection] = d;
}
}
}
return( true );
}
return( false );
}
//---------------------------------------------------------
bool CView_Shed::Get_Angles_Sectoral(int x, int y)
{
if( !m_pDEM->is_NoData(x, y) )
{
m_Angles.Assign(0.0);
//-------------------------------------------------
for(int iDirection=0; iDirection<m_Angles.Get_N(); iDirection++)
{
m_Angles[iDirection] = Get_Angle_Sectoral(x, y, m_Direction[iDirection].x, m_Direction[iDirection].y);
}
return( true );
}
return( false );
}
//---------------------------------------------------------
double CView_Shed::Get_Angle_Sectoral(int x, int y, double dx, double dy)
{
double Angle, Distance, dDistance, ix, iy, d, z;
z = m_pDEM->asDouble(x, y);
ix = x;
iy = y;
Angle = 0.0;
Distance = 0.0;
dDistance = Get_Cellsize() * M_GET_LENGTH(dx, dy);
while( is_InGrid(x, y) && Distance <= m_MaxRadius )
{
ix += dx; x = (int)(0.5 + ix);
iy += dy; y = (int)(0.5 + iy);
Distance += dDistance;
if( m_pDEM->is_InGrid(x, y) && (d = (m_pDEM->asDouble(x, y) - z) / Distance) > Angle )
{
Angle = d;
}
}
return( Angle );
}
///////////////////////////////////////////////////////////
// //
// //
// //
///////////////////////////////////////////////////////////
//---------------------------------------------------------<|fim▁end|> | if( m_Method != 0 && m_MaxRadius <= 0.0 )
{
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>mod backend;
mod builds;
mod renders;<|fim▁hole|><|fim▁end|> |
pub use self::backend::Backend;
pub use self::builds::Builds;
pub use self::renders::Renders; |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>pub mod button;<|fim▁end|> | pub mod text_box; |
<|file_name|>parser.js<|end_file_name|><|fim▁begin|>var expect = require('expect.js'),
parse = require('../lib/parser.js');
describe('parser:', function() {
it('should parse function with no arguments.', function() {
expect(parse('(someFunc)')).to.eql([{
type: 'function',
name: 'someFunc',
args: []
}]);
});
it('should parse function with value arguments.', function() {
expect(parse('(+ 1 22)')).to.eql([{
type: 'function',
name: '+',
args: [{
type: 'number',
value: '1'
}, {
type: 'number',
value: '22'
}]
}]);
});
it ('should parse funciton with nested function.', function() {
expect(parse('(+ 1 (- 2 3))')).to.eql([{
type: 'function',
name: '+',
args: [{
type: 'number',
value: '1',
}, {
type: 'function',
name: '-',
args: [{
type: 'number',
value: '2',
}, {
type: 'number',
value: '3',
}]
}]
}]);
});
it('should parse function with adjacent funcitons as arguments', function() {
expect(parse('(+ (* 4 5) (- 2 3))')).to.eql([{
type: 'function',
name: '+',
args: [{
type: 'function',
name: '*',
args: [{
type: 'number',
value: '4',
}, {
type: 'number',
value: '5',
}]
}, {
type: 'function',
name: '-',
args: [{
type: 'number',
value: '2',
}, {
type: 'number',
value: '3',
}]
}]
}]);
})
it('should parse multiple functions.', function() {
expect(parse('(func 1) (func2 2)')).to.eql([{
type: 'function',
name: 'func',
args: [{
type: 'number',
value: '1'
}]
}, {
type: 'function',
name: 'func2',
args: [{
type: 'number',
value: '2'
}]
}]);
});
// This test case is because e can also match numbers in scientific
// notation
it('should parse symbol starting with "e"', function() {
expect(parse('(+ el)')).to.eql([{
type: 'function',
name: '+',
args: [{
type: 'symbol',
value: 'el'
}]
}]);
});
it('should handle whitespace', function() {
expect(parse(' (+ el) ')).to.eql([{
type: 'function',
name: '+',
args: [{
type: 'symbol',
value: 'el'
}]
}]);
});
describe('comments', function() {
it('should ignore inline comments', function() {
expect(parse('(+ el) // this is a comment')).to.eql([{
type: 'function',
name: '+',
args: [{
type: 'symbol',
value: 'el'
}]
}]);
});
<|fim▁hole|> expect(parse('(+ el /* some \n multi-line \ncomment */7) // this is a comment')).to.eql([{
type: 'function',
name: '+',
args: [{
type: 'symbol',
value: 'el'
}, {
type: 'number',
value: '7'
}]
}]);
})
});
describe('booleans and symbols', function() {
it ('should parse a symbol', function() {
expect(parse('(test thing)')).to.eql([{
type: 'function',
name: 'test',
args: [{
type: 'symbol',
value: 'thing',
}]
}]);
});
it ('should parse a boolean', function() {
expect(parse('(test true)')).to.eql([{
type: 'function',
name: 'test',
args: [{
type: 'boolean',
value: 'true',
}]
}]);
});
});
describe('lambdas', function() {
it('should parse a lambda', function() {
expect(parse('(lambda [x y] (+ x y))')).to.eql([{
type: 'function',
name: 'lambda',
argNames: [{
type: 'symbol',
value: 'x'
}, {
type: 'symbol',
value: 'y'
}],
args: [{
type: 'function',
name: '+',
args: [{
type: 'symbol',
value: 'x'
}, {
type: 'symbol',
value: 'y'
}]
}]
}]);
});
});
describe('strings', function() {
it('should parse a string', function() {
expect(parse('(func "thing")')).to.eql([{
type: 'function',
name: 'func',
args: [{
type: 'string',
value: 'thing'
}]
}]);
});
it('should parse escaped quotes', function() {
expect(parse('(func "thing\\"")')).to.eql([{
type: 'function',
name: 'func',
args: [{
type: 'string',
value: 'thing\\"'
}]
}]);
});
});
});<|fim▁end|> | it('should ignore multi-line comments', function() { |
<|file_name|>Strong.java<|end_file_name|><|fim▁begin|>/*
* #%L
* Diana UI Core
* %%
* Copyright (C) 2014 Diana UI
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package com.dianaui.universal.core.client.ui.html;
import com.dianaui.universal.core.client.ui.base.AbstractTextWidget;
import com.dianaui.universal.core.client.ui.constants.ElementTags;
import com.google.gwt.dom.client.Document;
/**
* Simple {@code <strong>} tag to emphasize words
*
* @author Joshua Godi<|fim▁hole|> public Strong() {
super(Document.get().createElement(ElementTags.STRONG));
}
public Strong(final String text) {
this();
setHTML(text);
}
}<|fim▁end|> | */
public class Strong extends AbstractTextWidget {
|
<|file_name|>test_phonon.py<|end_file_name|><|fim▁begin|>from cstool.parse_input import (read_input, check_settings, cstool_model)
from cstool.phonon import (phonon_cs_fn)
from cslib import (units)
import numpy as np
def test_phonon_cs_fn_single():
"""Tests that the phonon subroutine returns a function that
can handle arrays and returns correct units."""
settings = read_input('data/materials/pmma.yaml')
settings.phonon.model = 'single'
if not check_settings(settings, cstool_model):
raise ValueError("Parsed settings do not conform the model.")
fn = phonon_cs_fn(settings)
W = np.logspace(-2, 3, 100) * units.eV
theta = np.linspace(0, np.pi, 100) * units.rad
cs = fn(theta, W[:, None])
assert cs.shape == (100, 100)
assert cs.dimensionality == units('m²/sr').dimensionality
def test_phonon_cs_fn_dual():
"""Tests that the phonon subroutine returns a function that
can handle arrays and returns correct units."""
settings = read_input('data/materials/pmma.yaml')
if not check_settings(settings, cstool_model):
raise ValueError("Parsed settings do not conform the model.")<|fim▁hole|> W = np.logspace(-2, 3, 100) * units.eV
theta = np.linspace(0, np.pi, 100) * units.rad
cs = fn(theta, W[:, None])
assert cs.shape == (100, 100)
assert cs.dimensionality == units('m²/sr').dimensionality<|fim▁end|> |
fn = phonon_cs_fn(settings) |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|>import subprocess, sys
def run_doxygen(folder):
"""Run the doxygen make command in the designated folder"""
try:
retcode = subprocess.call("cd %s; make" % folder, shell=True)
if retcode < 0:
sys.stderr.write("doxygen terminated by signal %s" % (-retcode))
except OSError as e:<|fim▁hole|> """Run the doxygen make commands if we're on the ReadTheDocs server"""
read_the_docs_build = os.environ.get('READTHEDOCS', None) == 'True'
if read_the_docs_build:
run_doxygen("..")
def setup(app):
# Add hook for building doxygen xml when needed
app.connect("builder-inited", generate_doxygen_xml)<|fim▁end|> | sys.stderr.write("doxygen execution failed: %s" % e)
def generate_doxygen_xml(app): |
<|file_name|>main.cpp<|end_file_name|><|fim▁begin|>/*
Nihonium Engine
This file is licensed under the terms of the MIT License.
See LICENCE.txt for the full licence.
*/<|fim▁hole|>#include "../windows/windows_input.h"
#include "../windows/windows_audio.h"
int main(int argc, char** argv) {
filesystem::instance = new filesystem::WindowsFilesystem();
input::instance = new input::WindowsInput();
audio::instance = new audio::WindowsAudio();
audio::instance->init(nullptr);
audio::BaseMusic* music = audio::instance->new_music("resources/music/music.ogg"); // TODO: This is a temporary testing feature.
audio::instance->play_music(music);
screen::init();
object::load_scene("resources/scenes/main.lua");
while (!screen::should_close) {
double frame_time = screen::get_frame_time();
object::update(frame_time, object::root);
screen::end_frame();
}
screen::uninit();
return 0;
}<|fim▁end|> |
#include "../common/object.h"
#include "../common/screen.h"
#include "../windows/windows_filesystem.h" |
<|file_name|>DA.rs<|end_file_name|><|fim▁begin|>//#![feature(macro_rules)]
//use std::any::{Any, TypeId};
use std::cmp::{Ordering, PartialOrd};
use std::collections::{HashMap, HashSet};
use std::ops::{Add, Deref, Sub};
/*
{start: {to: {distance, path}}}
*/
type Detail<Value> = (Value, Vec<&'static str>);
type Matrix<Value> = HashMap<&'static str, HashMap<&'static str, Detail<Value>>>;
/*
impl<T> Deref for Detail<T> {
type Target = (T, Vec<&'static str>);
fn deref(&self) -> &Self {
&(*self.0, *self.1)
}
}*/
struct DistanceMatrix<T> {
length: usize,
inner_max: Matrix<T>,
}
impl<T> DistanceMatrix<T>
where
T: Add<Output = T> + Sub + Ord + Copy,
{
fn new(input: Matrix<T>) -> Self {
DistanceMatrix {
length: input.len(),
inner_max: input,
}
}
fn path(&mut self, start: &'static str, to: &'static str) -> Option<Vec<&'static str>> {
match self.inner_max.get(start) {
Some(data) => match data.get(to) {
Some((_, p)) => return Some(p.clone()),
None => return None,
},
None => return None,
}
}
fn val_between(&mut self, start: &'static str, to: &'static str) -> Option<T> {
match self.inner_max.get(start) {
Some(data) => match data.get(to) {
Some((v, _)) => return Some(*v),
None => return None,
},
None => return None,
}
}
fn new_dijkstra_on(&mut self, start: &'static str) {
// all nodes set
let mut all_notes = self
.inner_max
.keys()
.map(|x| *x)
.collect::<HashSet<&'static str>>();
all_notes.remove(start);
let mut smallest_nodes_connected: &'static str;
{
let this_nodes_data = match self.inner_max.get(start) {
Some(data) => data.clone(), // need clone here
None => {
println!("{}", "do not have this node");
return;
}
};
// find smallest node
let (a, _) = this_nodes_data
.iter()
.map(|x| x)
.filter(|x| all_notes.contains(x.0)) // must nodes in all nodes.
.min_by_key(|(_, v)| v.0)
.unwrap();
smallest_nodes_connected = *a;
}
//all_notes.remove(smallest_nodes_connected);
//println!("{:?}", smallest_nodes_connected);
while (!all_notes.is_empty()) {
let distance_to_now: T;
let path_to_now: Vec<&'static str>;
all_notes.remove(smallest_nodes_connected);
{
distance_to_now = self.val_between(start, smallest_nodes_connected).unwrap();
path_to_now = self.path(start, smallest_nodes_connected).unwrap();
}
let this_nodes_data = match { self.inner_max.get_mut(smallest_nodes_connected) } {
Some(data) => data.clone(),<|fim▁hole|> println!("{}", "do not have this node");
return;
}
};
//update start node
let mut this_round_result: Vec<(&'static str, T)> = vec![];
for (k, v) in this_nodes_data.iter() {
// cannot be start node
if *k == start {
continue;
}
if let Some(v_old) = self.val_between(start, k) {
if (distance_to_now + v.0) < v_old {
let mut new_path = path_to_now.clone();
new_path.push(k);
self.inner_max
.get_mut(start)
.unwrap()
.insert(k, (distance_to_now + v.0, new_path));
this_round_result.push((*k, distance_to_now + v.0));
} else {
// if old value is smaller one, dont change path
// but push result in this_round_result
this_round_result.push((*k, v_old));
}
} else {
let mut new_path = path_to_now.clone();
new_path.push(k);
self.inner_max
.get_mut(start)
.unwrap()
.insert(k, (distance_to_now + v.0, new_path));
this_round_result.push((*k, distance_to_now + v.0));
}
}
// next smallest node
smallest_nodes_connected = if let Some(a) = this_round_result
.iter()
.filter(|x| all_notes.contains(x.0))
.min_by_key(|(_, v)| v)
{
a.0
} else {
""
};
//println!("{:?}", smallest_nodes_connected);
}
}
}
//struct nil;
// type check function template
/*
fn is_nil<T: ?Sized + Any>(o: &T) -> bool {
TypeId::of::<i32>() == TypeId::of::<T>()
}*/
// cannot check type in macro body. so do not need this macro
/*
(
["a",("b",6 ["a","b"]),("c",3,["a","c"])],
["b",...),
)
*/
macro_rules! new_distance_matrix {
($([$x:expr,$(($y:expr,$z:expr,[$($p:expr),*])),*]),*) => {{
let mut temp: Matrix<u32> = HashMap::new();
$(
temp.insert(
$x,
[$(($y,($z as u32,vec![$($p),*]))),*].iter().cloned().collect(),
);
)*;
temp
}};
}
fn main() {
//let test0: Matrix<u32>;
//println!("{:?}", test0);
//println!("{:?}", new_distance_matrix![[1, 2, 3], [4, 5, 6], [7, 8]]);
let mut test_caseM1: Matrix<u32> = HashMap::new();
test_caseM1.insert(
"a",
[
("b", (6 as u32, vec!["a", "b"])),
("c", (3 as u32, vec!["a", "c"])),
]
.iter()
.cloned()
.collect(),
);
test_caseM1.insert(
"b",
[
("a", (6 as u32, vec!["b", "a"])),
("c", (2 as u32, vec!["b", "c"])),
("d", (5 as u32, vec!["b", "d"])),
]
.iter()
.cloned()
.collect(),
);
test_caseM1.insert(
"c",
[
("a", (3 as u32, vec!["c", "a"])),
("d", (3 as u32, vec!["c", "d"])),
("b", (2 as u32, vec!["c", "b"])),
("e", (4 as u32, vec!["c", "e"])),
]
.iter()
.cloned()
.collect(),
);
test_caseM1.insert(
"d",
[
("b", (5 as u32, vec!["d", "b"])),
("e", (2 as u32, vec!["d", "e"])),
("c", (3 as u32, vec!["d", "c"])),
("f", (3 as u32, vec!["d", "f"])),
]
.iter()
.cloned()
.collect(),
);
test_caseM1.insert(
"e",
[
("c", (4 as u32, vec!["e", "c"])),
("d", (2 as u32, vec!["e", "d"])),
("f", (5 as u32, vec!["e", "f"])),
]
.iter()
.cloned()
.collect(),
);
test_caseM1.insert(
"f",
[
("e", (5 as u32, vec!["f", "e"])),
("d", (3 as u32, vec!["f", "d"])),
]
.iter()
.cloned()
.collect(),
);
println!("{:?}", test_caseM1);
let mut test_case1: DistanceMatrix<u32>;
test_case1 = DistanceMatrix {
length: 4,
inner_max: test_caseM1,
};
//Option<(&&str, &(u32, std::vec::Vec<u32>))>
//println!("{:?}", test_case1.new_dijkstra_on("a"));
//println!("{:?}", test_case1.inner_max.get("a"));
// test macro
let macro_test_marix = new_distance_matrix!(
["a", ("b", 6, ["a", "b"]), ("c", 3, ["a", "c"])],
[
"b",
("a", 6, ["b", "a"]),
("c", 2, ["b", "c"]),
("d", 5, ["b", "d"])
],
[
"c",
("a", 3, ["c", "a"]),
("d", 3, ["c", "d"]),
("b", 2, ["c", "b"]),
("e", 4, ["c", "e"])
],
[
"d",
("b", 5, ["d", "b"]),
("e", 2, ["d", "e"]),
("c", 3, ["d", "c"]),
("f", 3, ["d", "f"])
],
[
"e",
("c", 4, ["e", "c"]),
("d", 2, ["e", "d"]),
("f", 5, ["e", "f"])
],
["f", ("e", 5, ["f", "e"]), ("d", 3, ["f", "d"])]
);
println!("{:?}", macro_test_marix);
}<|fim▁end|> | None => { |
<|file_name|>iter.rs<|end_file_name|><|fim▁begin|>use std::sync::atomic::Ordering;
use ::key::{hashkey_to_string};
use ::table::*;
use ::state::SlotState;
pub struct CounterIter<'a> {
pub slots: &'a VectorTable,
pub index: usize,
}
impl<'a> Iterator for CounterIter<'a> {
type Item = (String, usize);
fn next(&mut self) -> Option<Self::Item> {
let ret;
loop {
let slot_opt = self.slots.get_index(self.index);
match slot_opt {
Some(slot) => match slot.state.get() {
SlotState::Alive | SlotState::Copying | SlotState::Copied => {
let key_ptr = slot.key.load(Ordering::Acquire);
let val = slot.value.load(Ordering::Relaxed);<|fim▁hole|> if key_ptr.is_null() {
panic!("Iterator found an active slot with a null key");
}
let key = unsafe { hashkey_to_string(&(*key_ptr)) };
self.index += 1;
ret = Some((key, val));
break;
},
_ => {
self.index += 1;
}
},
None => {
self.slots.remove_thread();
ret = None;
break;
},
}
}
ret
}
}<|fim▁end|> | |
<|file_name|>checkWindow.js<|end_file_name|><|fim▁begin|>window.ww = window.innerWidth ? window.innerWidth: $(window).width();
window.wh = window.innerHeight ? window.innerHeight: $(window).height();
$(window).on('resize', function(){<|fim▁hole|>});<|fim▁end|> | window.ww = window.innerWidth ? window.innerWidth: $(window).width();
window.wh = window.innerHeight ? window.innerHeight: $(window).height(); |
<|file_name|>template.ts<|end_file_name|><|fim▁begin|>export type TemplateToken = string | TemplatePlaceholder;
export interface TemplatePlaceholder {
before: string;
after: string;
name: string;
}
interface TokenScanner {
text: string;
pos: number;
}
const enum TemplateChars {
/** `[` character */
Start = 91,
/** `]` character */
End = 93,
/* `_` character */
Underscore = 95,
/* `-` character */
Dash = 45,
}
/**
* Splits given string into template tokens.
* Template is a string which contains placeholders which are uppercase names
* between `[` and `]`, for example: `[PLACEHOLDER]`.
* Unlike other templates, a placeholder may contain extra characters before and
* after name: `[%PLACEHOLDER.]`. If data for `PLACEHOLDER` is defined, it will
* be outputted with with these extra character, otherwise will be completely omitted.
*/
export default function template(text: string): TemplateToken[] {
const tokens: TemplateToken[] = [];
const scanner: TokenScanner = { pos: 0, text };
let placeholder: TemplatePlaceholder | undefined;
let offset = scanner.pos;
let pos = scanner.pos;
while (scanner.pos < scanner.text.length) {
pos = scanner.pos;
if (placeholder = consumePlaceholder(scanner)) {
if (offset !== scanner.pos) {
tokens.push(text.slice(offset, pos));
}
tokens.push(placeholder);
offset = scanner.pos;
} else {
scanner.pos++;
}
}
if (offset !== scanner.pos) {
tokens.push(text.slice(offset));
}
return tokens;
}
/**
* Consumes placeholder like `[#ID]` from given scanner
*/
function consumePlaceholder(scanner: TokenScanner): TemplatePlaceholder | undefined {
if (peek(scanner) === TemplateChars.Start) {
const start = ++scanner.pos;
let namePos = start;
let afterPos = start;
let stack = 1;
while (scanner.pos < scanner.text.length) {
const code = peek(scanner);
if (isTokenStart(code)) {
namePos = scanner.pos;
while (isToken(peek(scanner))) {
scanner.pos++;
}
afterPos = scanner.pos;
} else {
if (code === TemplateChars.Start) {
stack++;
} else if (code === TemplateChars.End) {
if (--stack === 0) {
return {
before: scanner.text.slice(start, namePos),
after: scanner.text.slice(afterPos, scanner.pos++),
name: scanner.text.slice(namePos, afterPos)
};
}
}
<|fim▁hole|>}
function peek(scanner: TokenScanner, pos = scanner.pos): number {
return scanner.text.charCodeAt(pos);
}
function isTokenStart(code: number): boolean {
return code >= 65 && code <= 90; // A-Z
}
function isToken(code: number): boolean {
return isTokenStart(code)
|| (code > 47 && code < 58) /* 0-9 */
|| code === TemplateChars.Underscore
|| code === TemplateChars.Dash;
}<|fim▁end|> | scanner.pos++;
}
}
} |
<|file_name|>Message.java<|end_file_name|><|fim▁begin|>/* CoAP on Moterunner Demonstration
* Copyright (c) 2013-2014, SAP AG
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* - Neither the name of the SAP AG nor the names of its contributors may be
* used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL SAP BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Contributors:
* Matthias Thoma
* Martin Zabel
* Theofilos Kakantousis
*
* The following things need to be added before any public release:
* 3. Consider to add TTL / Resend stuff (see hellosensor.java)
* 4. Have a look at rules for message id and consider to move it either here or to some generic class
*/
package com.sap.coap;
import com.ibm.saguaro.system.*;
import com.ibm.iris.*;
import com.ibm.saguaro.mrv6.*;
//##if LOGGING
import com.ibm.saguaro.logger.*;
//##endif
public class Message {
public byte[] header = new byte[4];
public byte[] token = null;
public byte[] payload = null;
public int payloadLength = 0;
public byte[] options;
public int optionArraySize = 0;
public int roundCounter = 0;
@Immutable public static final byte CON = 0x00;
@Immutable public static final byte NON = 0x01;
@Immutable public static final byte ACK = 0x02;
@Immutable public static final byte RST = 0x03;
@Immutable public static final byte EMPTY = 0x00;
@Immutable public static final byte GET = 0x01;
@Immutable public static final byte POST = 0x02;
@Immutable public static final byte PUT = 0x03;
@Immutable public static final byte DELETE = 0x04;
public final void setPayload(byte[] mypayload){
this.payload = mypayload;
this.payloadLength = mypayload.length;
}
public Message() {
header[0] = 0x40; // set the version number
}
/// <summary>
/// Constructor.
/// </summary>
/// <param name="type"> something </param>
/// <param name="tokenLen"> token legnth </param>
/// <param name="code"> CoAP message code </param>
/// <param name="msgid"> CoAP message id </param>
public Message(byte type, byte tokenLen, byte code, int msgid) {
setMessageHeader(type, tokenLen, code, msgid);
}
public final void setMessageHeader(byte type, byte tokenLen, byte code, int msgid) {
header[0] = (byte) ((0x40 | (type << 4)) | tokenLen);
header[1] = code;
Util.set16be(header,2,msgid);
}
public static byte createResponseCode(final byte cl, final byte cc) {
return (byte) ((cl << 5) | cc);
}
public final byte getVersion() {
return (byte) ((header[0] >>> 6) & 0x03);
}
public final byte getType() {
return (byte) ((header[0] & 0x30) >> 4);
}
public final void setType(final byte type) {
byte tl = (byte) (header [0] & 0x0F);
header[0] = (byte) ((0x40 | (type << 4)) | tl); // set the version number
}
public final byte getTokenLength() {
return (byte) (header[0] & 0x0F);
}
public final byte getCode() {
return header[1];
}
public final int getMessageId() {
return Util.get16be(header,2);
}
public final void clearOptions() {
options=null;
optionArraySize=0;
}
public final void clearPayload() {
payloadLength = 0;
payload = null;
}
public final byte[] getPayload() {
return this.payload;
}
public final int getPayloadSize() {
return this.payloadLength;
}
public byte[] getURIfromOptionArray() {
int partNo = 0;
int bufferOffset = 0;
int offset = getOffsetOfOptionWithId(11, partNo);
if (offset<0)
return null;
int bufferSize = 0;
// Calculate buffer size
int firstOffset = offset;
while (offset>=0) {
if (partNo>0)
bufferSize++;
int valueSize = getValueSizeOfOptionWithOffset(offset);
bufferSize += valueSize;
partNo++;
offset = getOffsetOfOptionWithId(11, partNo);
}
byte[] buffer = new byte[bufferSize];
partNo=0;
offset = getOffsetOfOptionWithId(11, partNo);
int valueSize = getValueSizeOfOptionWithOffset(offset);
byte[] data = getValueOfOptionWithOffset(offset);
while (data != null) {
if (partNo>0) {
buffer[bufferOffset]='/';
bufferOffset++;
}
partNo++;
Util.copyData(data, 0, buffer, bufferOffset, valueSize);
bufferOffset += valueSize;
offset = getOffsetOfOptionWithId(11, partNo);
data = null;
if (offset>=0) {
valueSize = getValueSizeOfOptionWithOffset(offset);
data = getValueOfOptionWithOffset(offset);
}
}
return buffer;
}
public boolean hasOption(int id) {
return (getOffsetOfOptionWithId(id,0) != -1);
}
public void insertOption(int id, byte[] value, int valueSize) {
//1. find position
// an ascending order of the options has to be kept
// find start offsets of the 'enclosing' options left and right of the one to insert
// special case: inserting at the beginning: offsetRightOption = 0
// special case: inserting at the end: offsetRightOption = optionArraySize (i.e. points behind the array)
int offsetRightOption = 0;
int idRightOption = 0;
int idLeftOption = 0;
while(offsetRightOption < optionArraySize) { //if the loop is not left by a break, the option has to be inserted at the end
idRightOption = idOfOptionWithOffset(options, offsetRightOption, idRightOption);
if(idRightOption > id) { //insertion point found
break;
}
idLeftOption = idRightOption;
offsetRightOption = findOffsetOfNextOption(options, offsetRightOption);
}
//2. calculate value length field size for this option
int optionExtendedLengthFieldSize = getExtendedOptionFieldSizeFor(valueSize);
//3. calculate delta value for this option.
// depends on the previous id (being 0 when no previous option exists)
int delta = id - idLeftOption;
//4. calculate delta field size for this option
int optionExtendedDeltaFieldSize = getExtendedOptionFieldSizeFor(delta);
//5. recalculate the delta field size for the next option
// the delta value for the next option decreases due to the insertion
// this may result in less bytes being used for the size field
int deltaFieldSizeRightOption = 0;
int deltaFieldSizeRightOptionNew = 0;
int deltaRightOptionNew = 0;
int extendedDeltaFieldSizeDifferenceRightOption = 0;
//only if a next option exists
if(offsetRightOption != optionArraySize) {
//get the old field size for the next option
deltaFieldSizeRightOption = optionExtendedDeltaFieldSize(options, offsetRightOption);
//recalculate delta field size for next option
deltaRightOptionNew = idRightOption - id;
deltaFieldSizeRightOptionNew = getExtendedOptionFieldSizeFor(deltaRightOptionNew);
//determine the size difference between the new and the old field
extendedDeltaFieldSizeDifferenceRightOption = deltaFieldSizeRightOption - deltaFieldSizeRightOptionNew;
}
//7. calculate total size of new option array
int optionArraySizeNew = optionArraySize
+ 1
+ optionExtendedLengthFieldSize
+ optionExtendedDeltaFieldSize
+ valueSize
- extendedDeltaFieldSizeDifferenceRightOption;
//8. allocate mem for new option array
byte[] optionsNew = new byte[optionArraySizeNew];
//9. copy options until insertion point to new array
if(offsetRightOption>0) {
Util.copyData(options, 0, optionsNew, 0, offsetRightOption);
}
int currentOffset = offsetRightOption; //next position to read from the old options where no additional option is present. points now to the header byte of the next option
int offsetFirstByte = offsetRightOption; //points to the header byte of the option to insert
int currentOffsetNew = offsetFirstByte+1; //next position to write in the new array (after the header byte of the option to insert)
//10. write delta
if(optionExtendedDeltaFieldSize == 1) {
optionsNew[offsetFirstByte] += 13 << 4;
optionsNew[currentOffsetNew] = (byte)(delta-13);
}
else if(optionExtendedDeltaFieldSize == 2) {
optionsNew[offsetFirstByte] += 14 << 4;
Util.set16(optionsNew, currentOffsetNew, delta-269);
}
else { //optionExtendedDeltaFieldSize == 0
optionsNew[offsetFirstByte] += delta << 4;
}
currentOffsetNew += optionExtendedDeltaFieldSize;
//11. write value length
if(optionExtendedLengthFieldSize == 1) {
optionsNew[offsetFirstByte] += 13;
optionsNew[currentOffsetNew] = (byte)(valueSize-13);
}
else if(optionExtendedLengthFieldSize == 2) {
optionsNew[offsetFirstByte] += 14;
Util.set16(optionsNew, currentOffsetNew, valueSize-269);
}
else { //optionExtendedLengthFieldSize == 0
optionsNew[offsetFirstByte] += valueSize;
}
currentOffsetNew += optionExtendedLengthFieldSize;
//12. copy value
if(valueSize>0) {
Util.copyData(value, 0, optionsNew, currentOffsetNew, valueSize);
}
currentOffsetNew += valueSize;
//only if a next option exists
if(offsetRightOption != optionArraySize) {
//13. write header of next option with adjusted delta
//length stays constant, delta is erased
optionsNew[currentOffsetNew] = (byte) (options[currentOffset] & 0x0F);
//write recalculated delta to the next option
if(deltaFieldSizeRightOptionNew == 1) {
optionsNew[currentOffsetNew] += 13 << 4;
optionsNew[currentOffsetNew+1] = (byte) (deltaRightOptionNew-13);
}
else if(deltaFieldSizeRightOptionNew == 2){
optionsNew[currentOffsetNew] += 14 << 4;
Util.set16(optionsNew, currentOffsetNew+1, deltaRightOptionNew-269);
}
else { //deltaFieldSizeRightOptionNew == 0
optionsNew[currentOffsetNew] += deltaRightOptionNew << 4;
}
//jump behind the next option's extended delta field delta in the new array
currentOffsetNew += 1+deltaFieldSizeRightOptionNew;
//jump behind the next option's extended delta field in the old array
currentOffset += 1+deltaFieldSizeRightOption;
//14. copy rest of array (= next option's extended value length field, next option's value, all subsequent options)
int restLength = optionArraySize - currentOffset;
Util.copyData(options, currentOffset, optionsNew, currentOffsetNew, restLength);
}
//15. replace old options by new
options = optionsNew;
optionArraySize = optionArraySizeNew;
}
public int getOffsetOfOptionWithId(int wantedOptionId, int matchNumber) {
int currentOptionOffset = 0;
int currentDelta = 0;
while(currentOptionOffset < optionArraySize) {
int currentOptionId = idOfOptionWithOffset(options, currentOptionOffset, currentDelta);
if(currentOptionId == wantedOptionId) { //first of the options has been found. iterate them until the right match number is found
for(int i = 0; i<matchNumber; i++) {
currentOptionOffset = findOffsetOfNextOption(options, currentOptionOffset);
if(currentOptionOffset == optionArraySize || (options[currentOptionOffset] & 0xF0) != 0x00) {
return -1; //array length has been exceeded or the delta is not 0, i.e. an option with an higher id was found
}
}
return currentOptionOffset;
}
if(currentOptionId > wantedOptionId) {
return -1;
}
currentDelta = currentOptionId;
currentOptionOffset = findOffsetOfNextOption(options, currentOptionOffset);
}
return -1;
}
public byte[] getValueOfOptionWithOffset(int offset) {
int valueSize = getValueSizeOfOptionWithOffset(options, offset);
int headerSize = headerSizeOfOptionWithOffset(options, offset);
offset += headerSize;
byte[] value = new byte[valueSize];
if(valueSize>0) {
Util.copyData(options, offset, value, 0, valueSize);<|fim▁hole|> }
public int getValueSizeOfOptionWithOffset(int offset) {
return getValueSizeOfOptionWithOffset(options, offset);
}
public void removeOptionWithOffset(int offset) {
//1. get delta of this option
int delta = idOfOptionWithOffset(options, offset, 0); //this method with 0 as previous delta gives just the delta of this option
//2. get length of the block to remove
int optionSize = headerSizeOfOptionWithOffset(options, offset)
+ getValueSizeOfOptionWithOffset(options, offset);
//3. recalculate next option's new delta value
int offsetRightOption = offset + optionSize; //same as findOffsetOfNextOption(options, offset);
int deltaRightOption;
int deltaFieldSizeRightOption = 0;
int deltaRightOptionNew = 0;
int deltaFieldSizeRightOptionNew = 0;
int deltaFieldSizeDifferenceRightOption = 0;
if(offsetRightOption != optionArraySize) {
//get the old field size for the next option
deltaRightOption = idOfOptionWithOffset(options, offsetRightOption, 0); //this method with 0 as previous delta gives just the delta of this option
deltaFieldSizeRightOption = optionExtendedDeltaFieldSize(options, offsetRightOption);
//recalculate delta field size for next option
deltaRightOptionNew = delta + deltaRightOption;
deltaFieldSizeRightOptionNew = getExtendedOptionFieldSizeFor(deltaRightOptionNew);
//determine the size difference between the new and the old field
deltaFieldSizeDifferenceRightOption = deltaFieldSizeRightOptionNew - deltaFieldSizeRightOption;
}
//6. calculate new array size
int optionArraySizeNew = optionArraySize
- optionSize
+ deltaFieldSizeDifferenceRightOption;
//7. allocate mem for new option array
byte[] optionsNew = new byte[optionArraySizeNew];
//8. copy old option array to the start of the option to remove
if (offset>0)
Util.copyData(options, 0, optionsNew, 0, offset);
int offsetNew = offset;
offset += optionSize;
//only if a next option exists
if(offsetRightOption != optionArraySize) {
//9. write new delta for next option
//length stays constant, delta is erased
optionsNew[offsetNew] = (byte) (options[offset] & 0x0F);
//write recalculated delta to the next option
if(deltaFieldSizeRightOptionNew == 1) {
optionsNew[offsetNew] += 13 << 4;
optionsNew[offsetNew+1] = (byte) (deltaRightOptionNew-13);
}
else if(deltaFieldSizeRightOptionNew == 2){
optionsNew[offsetNew] += 14 << 4;
Util.set16(optionsNew, offsetNew+1, deltaRightOptionNew-269);
}
else { //deltaFieldSizeRightOptionNew == 0
optionsNew[offsetNew] += deltaRightOptionNew << 4;
}
//jump behind the next option's extended delta field delta in the new array
offsetNew += 1+deltaFieldSizeRightOptionNew;
//jump behind the next option's extended delta field in the old array
offset += 1+deltaFieldSizeRightOption;
//10. copy rest of the array
int restLength = optionArraySizeNew - offsetNew;
Util.copyData(options, offset, optionsNew, offsetNew, restLength);
}
options = optionsNew;
optionArraySize = optionArraySizeNew;
}
public void removeOptionWithId(int id, int matchNumber) {
int offset = getOffsetOfOptionWithId(id, matchNumber);
removeOptionWithOffset(offset);
}
public byte[] valueOfOptionWithId(int id, int no) {
int partNo = 0;
int offset = getOffsetOfOptionWithId(id, no);
if (offset>=0) {
int valueSize = getValueSizeOfOptionWithOffset(offset);
byte[] data = getValueOfOptionWithOffset(offset);
return data;
}
return null;
}
public int findOffsetOfNextOption(int offset) {
return findOffsetOfNextOption(this.options, offset);
}
public int idOfOptionWithOffset(int offset, int currentDelta) {
return idOfOptionWithOffset(this.options, offset, currentDelta);
}
public void encodeTo(byte[] buffer, int offset) {
int iOffset = 0;
Util.copyData(header, 0, buffer, offset, 4);
iOffset+=4;
byte tokenLength = this.getTokenLength();
if (tokenLength > 0) {
Util.copyData(token,0, buffer, offset+iOffset, tokenLength);
iOffset += tokenLength;
}
if (optionArraySize>0) {
Util.copyData(options, 0, buffer, offset+iOffset, optionArraySize);
iOffset += optionArraySize;
}
if (this.payloadLength!=0) {
buffer[offset+iOffset] = (byte) 0xFF;
iOffset++;
Util.copyData(this.payload,0, buffer, offset+iOffset, this.payloadLength);
}
}
//
// Return:
// 0: OK
// -1: Protocol error
// -2: Currently unsupported feature
public byte decode(byte[] inBuffer, int offset, int len) {
//##if LOGGING
Logger.appendString(csr.s2b("CoAPDecode :: ENTER DECODE"));
Logger.flush(Mote.INFO);
//##endif
int inOffset = offset;
int endLen = offset+len;
payloadLength = 0;
Util.copyData(inBuffer, offset, header, 0, 4);
inOffset += 4;
// Read token
byte tokenLength = getTokenLength();
if(tokenLength > 8) {
return -1;
}
if(inOffset == -1) {
return -1;
}
//##if LOGGING
Logger.appendString(csr.s2b("CoAPDecode :: token len"));
Logger.appendInt(tokenLength);
Logger.flush(Mote.INFO);
//##endif
if (tokenLength>0) {
token = new byte[tokenLength];
Util.copyData(inBuffer, inOffset, token, 0, tokenLength);
}
inOffset += tokenLength;
// Check if end of Message
if (inOffset >= endLen) // Zero length Message, zero options
return 0;
//##if LOGGING
Logger.appendString(csr.s2b("CoAPDecode :: start reading options "));
Logger.flush(Mote.INFO);
//##endif
// Check if payload marker or options
int optionOffset = inOffset;
inOffset = jumpOverOptions(inBuffer, inOffset, endLen);
if(inOffset == -1) {
return -1;
}
//##if LOGGING
Logger.appendString(csr.s2b("CoAPDecode :: new offset"));
Logger.appendInt(inOffset);
Logger.appendString(csr.s2b("CoAPDecode :: endlen"));
Logger.appendInt(endLen);
Logger.flush(Mote.INFO);
//##endif
optionArraySize = inOffset - optionOffset; //may be 0 if no options are given
options = new byte[optionArraySize];
if(optionArraySize > 0) {
Util.copyData(inBuffer, optionOffset, options, 0, optionArraySize);
}
//##if LOGGING
Logger.appendString(csr.s2b("CoAPDecode :: end reading options "));
Logger.flush(Mote.INFO);
//##endif
if (inOffset == endLen) { // Zero length Message
//##if LOGGING
Logger.appendString(csr.s2b("CoAPDecode :: no payload "));
Logger.flush(Mote.INFO);
//##endif
return 0;
}
if (inBuffer[inOffset] == (byte) 0xFF) {
inOffset++;
if(inOffset == endLen) { //protocol error: there is no payload though the marker indicates
//##if LOGGING
Logger.appendString(csr.s2b("CoAPDecode :: protocol error "));
Logger.flush(Mote.INFO);
//##endif
return -1;
}
// Payload
payloadLength = endLen-inOffset;
payload = new byte[payloadLength];
Util.copyData(inBuffer, inOffset, payload, 0, payloadLength);
}
else {
inOffset++;
if(inOffset < endLen) { //protocol error: there is payload though there is no marker
//##if LOGGING
Logger.appendString(csr.s2b("CoAPDecode :: protocol error "));
Logger.flush(Mote.INFO);
//##endif
return -1;
}
}
return 0;
}
public final int getMessageLength() {
int len=4+getTokenLength();
if (this.payloadLength!=0)
len += 1+payloadLength;
len += optionArraySize;
return len;
}
public final Packet prepareResponseForSourcePacket(int inDstPort, byte[] inDstAddr, byte[] inSrcAddr, int srcPort) {
int dstport = inDstPort;
int lenp = getMessageLength();
Packet tempPacket = Mac.getPacket();
tempPacket.release();
Address.copyAddress(inDstAddr, 0, tempPacket.dstaddr, 0);
Address.copyAddress(inSrcAddr, 0, tempPacket.srcaddr, 0);
tempPacket.create(dstport, srcPort, lenp);
encodeTo(tempPacket.payloadBuf, tempPacket.payloadOff);
return tempPacket;
}
private static int jumpOverOptions(byte[] inBuffer, int offset, int len) {
int nextOptionOffset = offset;
while(nextOptionOffset < len && inBuffer[nextOptionOffset] != (byte) 0xFF) {
// checking for protocol violation -- one of the nibbles is F but it's not the payload marker
// check belongs only here since the first time parsing of a received message happens here
if( (inBuffer[offset] & 0x0F) == 0x0F || (inBuffer[offset] & 0xF0) == 0xF0 ) {
return -1;
}
nextOptionOffset = findOffsetOfNextOption(inBuffer, nextOptionOffset);
}
return nextOptionOffset;
}
private static int findOffsetOfNextOption(byte[] inBuffer, int offset) {
int headerSize = headerSizeOfOptionWithOffset(inBuffer, offset);
int valueSize = getValueSizeOfOptionWithOffset(inBuffer, offset);
int currentOptionSize = headerSize + valueSize;
return offset + currentOptionSize;
}
private static int headerSizeOfOptionWithOffset(byte[] inBuffer, int offset) {
int size = 1;
size += optionExtendedDeltaFieldSize(inBuffer, offset);
size += optionExtendedLengthFieldSize(inBuffer, offset);
return size;
}
private static int optionExtendedDeltaFieldSize(byte[] inBuffer, int offset) {
byte optionDelta = (byte) (((inBuffer[offset] & 0xF0) >> 4));
if(optionDelta < 13)
return 0;
if(optionDelta == 13)
return 1;
return 2; //optionDelta == 14
}
private static int optionExtendedLengthFieldSize(byte[] inBuffer, int offset) {
byte optionLength = (byte) (inBuffer[offset] & 0x0F);
if(optionLength < 13)
return 0;
if(optionLength == 13)
return 1;
return 2; //optionLength == 14
}
private static int getValueSizeOfOptionWithOffset(byte[] inBuffer, int offset) {
byte optionLength = (byte) (inBuffer[offset] & 0x0F);
if(optionLength < 13)
return optionLength;
else {
offset += 1 + optionExtendedDeltaFieldSize(inBuffer, offset);
if(optionLength == 13) {
return inBuffer[offset] + 13;
}
return Util.get16(inBuffer, offset) + 269; //optionLength == 14
}
}
private static int idOfOptionWithOffset(byte[] inBuffer, int offset, int currentDelta) {
byte optionDelta = (byte) (((inBuffer[offset] & 0xF0) >> 4));
if(optionDelta < 13)
return currentDelta + optionDelta;
else {
offset += 1;
if(optionDelta == 13) {
return currentDelta + inBuffer[offset] + 13;
}
return currentDelta + Util.get16(inBuffer, offset) + 269; //optionDelta == 14
}
}
private static int getExtendedOptionFieldSizeFor(int input) {
if(input<13)
return 0;
else if(input >= 13 && input < 269)
return 1;
return 2; //input >= 269
}
}<|fim▁end|> | }
return value; |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import models
from django.template.defaultfilters import slugify
from mptt.models import MPTTModel, TreeForeignKey
class ForumCategory(MPTTModel):
parent = TreeForeignKey(
'self', blank=True, null=True, related_name='children'
)
name = models.CharField(max_length=255)
slug = models.SlugField(max_length=255)
description = models.CharField(max_length=255, blank=True)
order = models.PositiveIntegerField(blank=True, null=True)
def __unicode__(self):
return self.name
@property
def last_post(self):
if self.parent is None:
return None
response = None
for thread in self.forumthread_set.all():
if response is None:
response = thread.last_post
else:
if thread.last_post.created > response.created:
response = thread.last_post
return response
@property
def post_count(self):
count = 0
for thread in self.forumthread_set.all():
count += thread.forumpost_set.count()
return count
class Meta:
verbose_name_plural = 'Forum categories'
class ForumThread(models.Model):
category = models.ForeignKey(ForumCategory)
title = models.CharField(max_length=255)
slug = models.SlugField(max_length=255)
author = models.ForeignKey(settings.AUTH_USER_MODEL)
created = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('thread_home', kwargs={'slug': self.slug})
@property
def last_post(self):
return self.forumpost_set.order_by('-created').first()
@property
def num_replies(self):
return self.forumpost_set.filter(is_thread_starter=False).count()
@property
def thread_starter(self):
return self.forumpost_set.get(thread=self, is_thread_starter=True)
def save(self, *args, **kwargs):
if self.slug == '':
self.slug = slugify(self.title)
return super(ForumThread, self).save(*args, **kwargs)
class ForumPost(models.Model):
thread = models.ForeignKey(ForumThread)
post = models.TextField()
author = models.ForeignKey(settings.AUTH_USER_MODEL)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
reply_to = models.ForeignKey('self', blank=True, null=True)
is_thread_starter = models.BooleanField(default=False)
def __unicode__(self):
return '%(thread)s - %(pk)s' % {
'thread': self.thread.title,
'pk': self.pk
}
def get_breadcrumb(self):
breadcrumb = [
(
self.thread.title,
reverse(
'thread_home',
kwargs={'slug': self.thread.slug}
)
),
]
category = self.thread.category
while True:
breadcrumb_item = (
category.name,
reverse(
'category_home',
kwargs={'slug': category.slug}
),
)
breadcrumb.insert(0, breadcrumb_item)<|fim▁hole|>
return breadcrumb<|fim▁end|> | if category.parent is None:
break
category = category.parent |
<|file_name|>gba.rs<|end_file_name|><|fim▁begin|>use super::mmu;
use super::cpu;
#[derive(Debug)]
pub struct GameBoyAdvance {
pub cpu: cpu::Cpu,
mmu: mmu::Mmu
// audio
// graphics
// network<|fim▁hole|> pub fn new(biosROM: Box<[u8]>, gameROM: Box<[u8]>) -> GameBoyAdvance {
GameBoyAdvance {
cpu: cpu::Cpu::new(),
mmu: mmu::Mmu::new(biosROM, gameROM)
}
}
pub fn emulate(&mut self) {
self.cpu.run(&mut self.mmu)
}
}<|fim▁end|> | }
impl GameBoyAdvance { |
<|file_name|>quiz.py<|end_file_name|><|fim▁begin|>import sys # this allows you to read the user input from keyboard also called "stdin"
import classOne # This imports all the classOne functions
import classTwo # This imports all the classTwo functions
import classThree # This imports all the classThree functions
import classFour # This imports all the classFour functions
TIMEOUT=10 # this is the amount of time you will wait for an answer in Seconds. 10 means 10 seconds
MAX_CLASS=5
QUIZ_INSTRUCTIONS = """
Get ready for the quiz. You will have 10 questions out of which you
will need 8 right to win the prize. You will have """ + str(TIMEOUT) + """ seconds
to answer each question.Press Enter to start."""
def getUsersClass(): #main
''' This function will get the user's class. It will compare the class with MAX_CLASS and
will return False if it is more than the MAX_CLASS. Class also has to be a natural number '''
print("Please tell me which Class you are in? ")<|fim▁hole|> usersClass = int(sys.stdin.readline().strip())
if (usersClass < 1 or usersClass > MAX_CLASS) :
print("No Quiz available for Class " + str(usersClass))
return False
else :
return usersClass
except :
print("Exception")
return False
if __name__ == '__main__':
while(True) :
usersClass = getUsersClass()
if (usersClass != False) :
break
print(QUIZ_INSTRUCTIONS)
sys.stdin.readline()
if (usersClass == 1) :
classOne.classOneQuiz()
elif (usersClass == 2) :
classTwo.classTwoQuiz()
elif(usersClass == 3):
classThree.classThreeQuiz()
elif(usersClass == 4):
classFour.classFourQuiz()<|fim▁end|> | try: |
<|file_name|>accordionDirectives.js<|end_file_name|><|fim▁begin|>/* Accordion directive */
app.directive('vmfAccordionContainer', ['$compile', function($compile) {
return {
restrict: 'EA',
scope: {
type: '@',
headers: '=',
accData: '=',
selAcc: '=',
customClass:'='
},
link: function(scope, elem, attrs) {
var template;
if(scope.type === '1') {
template = '<table class="vmf-accordion-table1"><thead class="vmf-accordion-table-header"><tr><td class="col1"></td>';
var count = 1;
angular.forEach(scope.headers, function(item) {
// if(count === 1) {
// template += '<td colspan="2">' + item + '</td>';
// }
// else {
template += '<td class="col' + (count + 1) +'">' + item + '</td>';
// }
count += 1;
});
template += '</tr></thead><tbody class="vmf-accordion-table-body">';
scope.accordionIndex = 0;
angular.forEach(scope.accData, function(item) {
template += '<tr class="vmf-accordion-header" ng-click="toggleAccordion(' + scope.accordionIndex + ')"><td ><span class="vmf-arrow"></span></td><td colspan="3">' + item.header + '</td></tr>';
angular.forEach(item.contents, function(content) {
template += '<tr class="vmf-accordion-row" ng-show="activeIndex =='+ scope.accordionIndex + '"><td colspan="1"></td>';
angular.forEach(content, function(cellData) {
template += '<td colspan="1">' + cellData + '</td>';
});
template += '</tr>';
});
scope.accordionIndex += 1;
});
template += '</tbody></table>';
elem.append(template);
// console.log(template);
$compile(elem.contents())(scope);
// for IE7
elem.find('.vmf-accordion-row').hide();
}
else if(scope.type === '2') {
template = '<table class="vmf-accordion-table2"><thead class="vmf-accordion-table2-header" style="background-color: lightgray;"><tr><td class="col1"></td>';
var headerCount = 0;
angular.forEach(scope.headers, function(item) {
if(headerCount !== scope.headers.length - 1) {
template += '<td class="col' + (headerCount + 1 + 1)+ '">' + item + '</td>';
}
else {
template += '<td colspan="2" class="col' + (headerCount + 1 + 1)+ '">' + item + '</td>';
}
headerCount += 1;
});
template += '</tr></thead><tbody class="vmf-accordion-table2-body">';
scope.accordionIndex = 0;
angular.forEach(scope.accData, function(item) {
template += '<tr class="vmf-accordion-header2" ng-click="toggleAccordion(' + scope.accordionIndex + ')"><td><span class="vmf-arrow"></span></td>';
var accHeadersCount = 1;
angular.forEach(item.headers, function(header) {
if(accHeadersCount !== item.headers.length) {
template += '<td>' + header + '</td>';
}
else {
template += '<td class="vmf-acc-header-last-child">' + header + '</td>';
}
accHeadersCount += 1;
});
template += '</tr><tr class="vmf-accordion-row2"><td></td><td class="vmf-acc-header-last-child" colspan="' + item.headers.length + '"><table class="vmf-accordion-sub-table" ng-show="activeIndex =='+ scope.accordionIndex + '">';
var count = 0;
angular.forEach(item.contents, function(content) {
if(count !== 0) {
template += '<tr class="vmf-accordion-sub-table-row">';
angular.forEach(content, function(cellData) {
template += '<td>' + cellData + '</td>';
});
template += '</tr>';
}
else {
template += '<thead class="vmf-accordion-sub-table-header"><tr>';
var subHeaderCount = 1;
angular.forEach(content, function(cellData) {
template += '<td class="col' + subHeaderCount + '">' + cellData + '</td>';
subHeaderCount += 1;
});
template += '</tr></thead><tbody class="vmf-accordion-sub-table-body">';
}
count += 1;
});
template += '</tbody></table></td></tr>';
scope.accordionIndex += 1;
});
template += '</tbody></table>';
elem.append(template);
// console.log(template);
if(scope.customClass){
angular.forEach(scope.customClass, function(item) {
elem.find(item.selector).addClass(item.cusclass);
});
}
$compile(elem.contents())(scope);
// for IE7
elem.find('.vmf-accordion-row2').hide();
// elem.find('.vmf-accordion-row2').hide();
}
scope.toggleAccordion = function(index) {
scope.activeIndex = scope.activeIndex === index ? -1 : index;
var accordions, accordionRows;
if(scope.type === '1') {
elem.find('.vmf-accordion-row').hide();
accordions = elem.find('.vmf-accordion-header');
accordions.removeClass('vmf-active-row');
// for IE7
if(scope.activeIndex !== -1) {
// accordions = elem.find('.vmf-accordion-header');
// console.log(accordions[index]);
$(accordions[index]).addClass('vmf-active-row');
accordionRows = $(accordions[index]).nextUntil('.vmf-accordion-header');
<|fim▁hole|> }
else if(scope.type === '2') {
elem.find('.vmf-accordion-row2').hide();
accordions = elem.find('.vmf-accordion-header2');
accordions.removeClass('vmf-active-row');
// for IE7
if(scope.activeIndex !== -1) {
$(accordions[index]).addClass('vmf-active-row');
accordionRows = $(accordions[index]).nextUntil('.vmf-accordion-header2');
$(accordionRows).show();
}
}
};
scope.buttonClick = function($event, index) {
$event.stopPropagation();
scope.selAcc = index;
};
}
};
}]);<|fim▁end|> | $(accordionRows).show();
} |
<|file_name|>functions_dir.py<|end_file_name|><|fim▁begin|>"""Modulo que contiene la clase directorio de funciones
-----------------------------------------------------------------
Compilers Design Project
Tec de Monterrey
Julio Cesar Aguilar Villanueva A01152537
Jose Fernando Davila Orta A00999281
-----------------------------------------------------------------
DOCUMENTATION: For complete Documentation see UserManual.pdf"""
from stack import Stack
from function import Function
from variable import Variable
def get_var_type(var_type):
'''retorna el identificador de cada tipo de variable'''
if var_type == 'int':
return 'i'
elif var_type == 'double':
return 'd'
elif var_type == 'string':
return 's'
elif var_type == 'bool':
return 'b'
def get_var_scope(scope):
'''retorna el identificador de cada tipo de scope'''
if scope == 'global':
return 'g'
elif scope == 'main':
return 'l'
else:
return 't'
def get_var_name(var_type, scope, var_name):
'''construct the direccion of a variable based on
the type, scope and variable name.'''
name_type = get_var_type(var_type)
name_scope = get_var_scope(scope)
name = name_type + name_scope + var_name
return name
class FunctionsDir(object):
'''Las funciones son entradas en el diccionario functions.
Las funciones son objetos con diccionarios de variables.
Scope global del programa se inicia con una funcion global
sin variables.
Scope es el function_id de cada funcion.'''
def __init__(self):
'''Metodo de inicializacion'''
self.functions = {}
self.functions['global'] = Function()
self.scope = 'global'
# Define si se esta evaluando la existencia de variables o se estan agregando al directorio<|fim▁hole|> # Indica si es necesario acutlaizar la lista de prametros de una funcion
self.updating_params = False
# Indica si se va a leer variable con funcion read
self.reading = False
# Ultimo token ID, usado para el read
self.last_id = Stack()
# Ultimo token de tipo que fue leido por el directorio de funciones
self.last_type = None
'''Funciones que estan siendo llamadas.
Se utiliza una pila para llamadas nesteadas a funciones'''
self.call_function = Stack()
'''Cantidad de argumentos que estan siendo utilizados al llamar a una funcion.
Se utiliza una pilla para llamadas nesteadas'''
self.call_arguments = Stack()
self.last_read = Stack()
def add_function(self, function_id):
'''Add function to fuctions directory. Verify if function already exists'''
if self.functions.get(function_id, None) is not None:
raise NameError('Error: 1001 Function already declared! Function: ' + str(function_id))
else:
self.functions[function_id] = Function()
def validate_function(self, function_id):
'''Validate function exists'''
if self.functions.get(function_id, None) is None:
raise NameError('Error: 1002 Function not declared! Name: ' + str(function_id))
def increase_expected_arguments(self):
'''Manda llamar el metodo increase expected arguments de la clase Function'''
self.functions[self.scope].increase_expected_arguments()
def update_function_params(self, var_id, var_type):
'''Manda llamar metodo update params de la clase Funcion'''
self.functions[self.scope].update_params(var_id, var_type)
def set_return_type(self, function_return_type):
'''Manda llamar el metodo set return type de la clase Function'''
self.functions[self.scope].set_return_type(function_return_type)
def set_func_quad(self, func_quad):
'''Manda llamar el metodo set_func_quad de la clase Function'''
self.functions[self.scope].set_func_quad(func_quad)
def set_scope(self, scope):
'''Cambia el scope actual del directorio de funciones al scope que recibe'''
self.scope = scope
def reset_scope(self):
'''Reset del scope a global scope'''
self.scope = 'global'
# Add variable to current function scope
def add_var(self, variable_id, var_type, value=0, size=1):
'''Agrega variable a el diccionario de variables de una Funcion'''
if self.functions[self.scope].variables_dict.get(variable_id, None) is None:
var_name = get_var_name(var_type, self.scope, variable_id)
self.functions[self.scope].variables_dict[variable_id] = Variable(var_name, value, var_type, self.scope, size)
else:
variable_type = self.functions[self.scope].variables_dict[variable_id].get_type()
msg = 'Error 2001: Variable already declared! ' + str(variable_id) + '. TYPE: ' + variable_type
raise NameError(msg)
def add_for_var(self, variable_id, var_type):
'''Agrega variable al diccionario del current scope, si ya existe sobreescribe valor
Marca error si existe y no es tipo int'''
if self.functions[self.scope].variables_dict.get(variable_id, None) is None:
var_name = get_var_name(var_type, self.scope, variable_id)
self.functions[self.scope].variables_dict[variable_id] = Variable(var_name, -1, var_type, self.scope, 1)
else:
variable_type = self.functions[self.scope].variables_dict[variable_id].get_type()
if variable_type != 'int':
msg = 'Error 2001: Variable already declared! ' + str(variable_id) + '. TYPE: ' + variable_type
raise NameError(msg)
else:
self.functions[self.scope].variables_dict[variable_id].value = -1
def validate_variable(self, variable_id):
'''Busca variable en el scope actual'''
if self.functions[self.scope].variables_dict.get(variable_id, None) is None:
# Busca variable en el scope global
if self.functions['global'].variables_dict.get(variable_id, None) is None:
raise NameError('Error 2002: Variable not declared! VAR: ' + variable_id)
def start_evaluating(self):
'''Indica que el directorio de funciones esta evaluando la existencia de variables'''
self.evaluating = True
def finish_evaluating(self):
'''Indica que el directorio de funciones deja de evaluar funciones'''
self.evaluating = False
def set_type(self, last_type):
'''Set del ultimo token de tipo que fue leido'''
self.last_type = last_type
def get_func_dir(self):
'''Obtiene el diccionario de funciones'''
return self.functions
def get_var(self, variable_id):
'''Obtiene la lista con los datos de la variable del
diccionario de funciones en el scope actual o el global'''
if variable_id in self.functions[self.scope].variables_dict:
return self.functions[self.scope].variables_dict.get(variable_id)
elif variable_id in self.functions['global'].variables_dict:
return self.functions['global'].variables_dict.get(variable_id)
return None
def set_call_function(self, function_id):
'''Set del id de la funcion que esta siendo llamada
una vez que se valido su existencia en el diccionario de funciones'''
self.call_function.push(function_id)
self.call_arguments.push(0)
def increase_call_arguments(self):
'''# Incrementa la cantidad de argumentos que estan siendo usados para llamar una funcion.
Obtiene el tope de la pila, aumenta y vuelve a insertar en la pila'''
curr = self.call_arguments.pop()
curr += 1
self.call_arguments.push(curr)
def update_var_size(self, size):
'''Actualiza el size de una variable en caso de ser dimensionada'''
if size <= 0:
raise ValueError('Error 7005: Array size must be a positive integer')
else:
self.functions[self.scope].variables_dict[self.last_id.top].size = size
self.functions[self.scope].variables_dict[self.last_id.top].is_dim = True
def validate_call_arguments(self):
'''Funcion que valida que la cantidad de argumentos utilizados en una llamada a funcion
sea igual a los parametros que espera recibir'''
if self.functions[self.call_function.top].expected_arguments != self.call_arguments.top:
if self.functions[self.call_function.top].expected_arguments > self.call_arguments.top:
msg = 'Error 3001: Missing arguments in function call for function: ' + str(self.call_function)
elif self.functions[self.call_function.top].expected_arguments < self.call_arguments.top:
msg = 'Error 3002: Too many arguments in function call for function: ' + str(self.call_function)
msg += '. Expected arguments: ' + str(self.functions[self.call_function.top].expected_arguments) + '. Got: ' + str(self.call_arguments.top)
self.call_arguments.pop()
self.call_function.pop()
raise ValueError(msg)
else:
self.call_arguments.pop()
return self.call_function.pop()
def validate_arg_type(self, var_type):
'''Funcion que valida que el tipo de argumento que se manda sea del tipo esperado'''
expected_type = self.functions[self.call_function.top].params[self.call_arguments.top - 1][1]
if var_type != expected_type:
msg = 'Error 3003: Expected type in function call ' + str(self.scope) + ': ' + expected_type
msg += '. Got: ' + var_type
raise ValueError(msg)
return self.functions[self.call_function.top].params[self.call_arguments.top - 1]
def verify_var_dim(self):
'''Verifica que el id de una variable sea dimensionada'''
var = self.get_var(self.last_id.top)
if not var.is_dim:
raise ValueError('Error 7003: Variable is not array')
@property
def current_scope(self):
'''Propiedad del directorio de funciones para obtener el scope actual'''
return self.scope
def printeame(self):
'''Funcion auxiliar para imprimir el contenido del directorio de funciones'''
print('************ Functions Directory ************\n')
for key, val in self.functions.iteritems():
print(str(val.return_type) + ' ' + str(key) + '('),
for var in val.params:
print(str(var[1]) + ' ' + str(var[0]) + ', '),
print('): quad_num ' + str(val.get_function_quad()))
for k, vals in val.variables_dict.iteritems():
print('\t' + vals.get_type() + ' ' + k + ' = ' + str(vals.get_value()) + ' size: ' + str(vals.get_size()))
print('')
print('*********************************************')<|fim▁end|> | self.evaluating = True
|
<|file_name|>BhTree.java<|end_file_name|><|fim▁begin|>package com.cocos2dj.module.btree;
import java.util.HashMap;
import com.badlogic.gdx.ai.btree.BehaviorTree;
import com.badlogic.gdx.ai.btree.Task;
import com.badlogic.gdx.ai.btree.branch.Parallel;
import com.badlogic.gdx.ai.btree.branch.Parallel.Policy;
import com.badlogic.gdx.ai.btree.branch.Selector;
import com.badlogic.gdx.ai.btree.branch.Sequence;
import com.cocos2dj.macros.CCLog;
import com.cocos2dj.module.btree.BhTreeModel.StructBHTNode;
import com.cocos2dj.module.btree.BhLeafTask.DebugTask;
/**
* RcBHT.java
*
* 不用管类型
*
* @author xujun
*
*/
public class BhTree<T> extends BehaviorTree<T> {
static final String TAG = "RcBHT";
//fields>>
HashMap<String, BhLeafTask<T>> tasksMap = new HashMap<>();
private boolean pauseFlag;
//fields<<
public void pause() {
pauseFlag = true;
}
public void resume() {
pauseFlag = false;
}
//func>>
@SuppressWarnings("unchecked")
Task<T> createTask(String type, String name, String args) {<|fim▁hole|> switch(type) {
case "leaf":
BhLeafTask<T> leaf;// = new RcLeafTask<T>();
//相同的name会缓存
if(name != null) {
leaf = tasksMap.get(name);
if(leaf != null) {
CCLog.debug(TAG, "find same leaf task : " + name);
return leaf;
}
if("debug".equals(args)) {
leaf = new DebugTask(name);
} else {
leaf = new BhLeafTask<T>();
}
tasksMap.put(name, leaf);
return leaf;
} else {
if("debug".equals(args)) {
leaf = new DebugTask(name);
} else {
leaf = new BhLeafTask<T>();
}
}
return leaf;
case "parallel":
if(args == null) {
return new Parallel<T>();
} else {
switch(args) {
case "selector":
return new Parallel<T>(Policy.Selector);
case "sequence":
return new Parallel<T>(Policy.Sequence);
}
CCLog.error(TAG, "pattern fail args = " + args + " need selector or sequence");
return null;
}
case "selector":
return new Selector<T>();
case "sequence":
return new Sequence<T>();
}
CCLog.error(TAG, "not found type : " + type);
return null;
}
final Task<T> createTask(StructBHTNode node) {
Task<T> ret = createTask(node.type, node.key, node.args);
if(ret == null) {
CCLog.error(TAG, "createTask fail ");
}
if(node.children != null) {
int len = node.children.length;
for(int i = 0; i < len; ++i) {
Task<T> child = createTask(node.children[i]);
ret.addChild(child);
}
}
return ret;
}
//func<<
//methods>>
public void setup(BhTreeModel model) {
Task<T> root = createTask(model.root);
addChild(root);
}
public void step() {
if(!pauseFlag) {
super.step();
}
}
public BhLeafTask<T> getLeaf(String key) {
BhLeafTask<T> ret = tasksMap.get(key);
// System.out.println("map = " + tasksMap.toString());
if(ret == null) {
CCLog.error(TAG, "task not found : " + key);
}
return ret;
}
//methods<<
}<|fim▁end|> | |
<|file_name|>user_repository.py<|end_file_name|><|fim▁begin|>import sys
from starstoloves.models import User as UserModel
from starstoloves import model_repository
from starstoloves.lib.track import lastfm_track_repository
from .user import User
def from_session_key(session_key):
user_model, created = UserModel.objects.get_or_create(session_key=session_key)
return User(
session_key=session_key,
repository=sys.modules[__name__],
);
def delete(user):
try:<|fim▁hole|> user_model.delete()
except UserModel.DoesNotExist:
pass;<|fim▁end|> | user_model = model_repository.from_user(user) |
<|file_name|>ui_methods.py<|end_file_name|><|fim▁begin|>def authorized_to_manage_request(_, request, current_user, pushmaster=False):
if pushmaster or \
request['user'] == current_user or \
(request['watchers'] and current_user in request['watchers'].split(',')):
return True
return False<|fim▁hole|>
def sort_pickmes(_, requests, tags_order):
"""Sort pickmes based on tags_order
Args:
- - request handler object
requests - a list of requests
tags_order - order to sort requests
Return: sorted requests list
"""
def compare_requests(request1, request2):
tags1_list = request1['tags'].split(',')
tags2_list = request2['tags'].split(',')
for tag in tags_order:
tag_in_tags1 = tag in tags1_list
tag_in_tags2 = tag in tags2_list
if tag_in_tags1 == tag_in_tags2:
continue
elif tag_in_tags1:
return -1
else:
return 1
return cmp(request1['user'], request2['user'])
sorted_requests = sorted(requests, cmp=compare_requests)
return sorted_requests<|fim▁end|> | |
<|file_name|>WebBackend.cpp<|end_file_name|><|fim▁begin|>// ===============================
// PC-BSD REST API Server
// Available under the 3-clause BSD License
// Written by: Ken Moore <[email protected]> DEC 2015
// =================================
// Note: Don't forget to run "AUTHSYSTEM->hasFullAccess(SockAuthToken)"
// To restrict user access to some systems as needed!
// =================================
#include <WebSocket.h>
//sysadm library interface classes
#include "library/sysadm-beadm.h"
#include "library/sysadm-general.h"
#include "library/sysadm-filesystem.h"
#include "library/sysadm-iocage.h"
#include "library/sysadm-iohyve.h"
#include "library/sysadm-lifepreserver.h"
#include "library/sysadm-network.h"
#include "library/sysadm-systemmanager.h"
#include "library/sysadm-update.h"
#include "library/sysadm-zfs.h"
#include "library/sysadm-pkg.h"
#include "library/sysadm-users.h"
#include "library/sysadm-servicemanager.h"
#include "library/sysadm-firewall.h"
#define DEBUG 0
//#define SCLISTDELIM QString("::::") //SysCache List Delimiter
RestOutputStruct::ExitCode WebSocket::AvailableSubsystems(bool allaccess, QJsonObject *out){
//Probe the various subsystems to see what is available through this server
//Output format:
/*<out>{
<namespace1/name1> : <read/write/other>,
<namespace2/name2> : <read/write/other>,
}
*/
// - server settings (always available)
out->insert("rpc/settings","read/write");
out->insert("rpc/logs", allaccess ? "read/write" : "read");
// - beadm
if(QFile::exists("/usr/local/sbin/beadm")){
out->insert("sysadm/beadm", "read/write");
}
// - dispatcher (Internal to server - always available)
//"read" is the event notifications, "write" is the ability to queue up jobs
out->insert("rpc/dispatcher", allaccess ? "read/write" : "read");
// - filesystem
out->insert("sysadm/fs","read/write");
// - network
out->insert("sysadm/network","read/write");
// - lifepreserver
if(QFile::exists("/usr/local/bin/lpreserver")){
out->insert("sysadm/lifepreserver", "read/write");
}
// - iocage
if(QFile::exists("/usr/local/sbin/iocage")){
out->insert("sysadm/iocage", "read/write");
}
// - iohyve
if(QFile::exists("/usr/local/sbin/iohyve")){
out->insert("sysadm/iohyve", "read/write");
}
// - zfs
if(QFile::exists("/sbin/zfs") && QFile::exists("/sbin/zpool")){
out->insert("sysadm/zfs", allaccess ? "read/write" : "read");
}
// - pkg
if(QFile::exists("/usr/local/sbin/pkg")){
out->insert("sysadm/pkg", "read/write");
}
// - Generic system information
out->insert("sysadm/systemmanager","read/write");
// - PC-BSD Updater
if(QFile::exists("/usr/local/bin/pc-updatemanager")){
out->insert("sysadm/update", "read/write");
}
// - User Manager
out->insert("sysadm/users","read/write");
//- Service Manager
out->insert("sysadm/services","read/write");
// - Firewall Manager
out->insert("sysadm/firewall","read/write");
return RestOutputStruct::OK;
}
RestOutputStruct::ExitCode WebSocket::EvaluateBackendRequest(const RestInputStruct &IN, QJsonObject *out){
/*Inputs:
"namesp" - namespace for the request
"name" - name of the request
"args" - JSON input arguments structure
"out" - JSON output arguments structure
*/
//qDebug() << "Evaluate Backend Request:" << IN.namesp << IN.name << IN.id << IN.args << IN.fullaccess;
QString namesp = IN.namesp.toLower(); QString name = IN.name.toLower();
//Get/Verify subsystems
if(namesp=="rpc" && name=="query"){
return AvailableSubsystems(IN.fullaccess, out);
}else{
QJsonObject avail;
AvailableSubsystems(IN.fullaccess, &avail);
if(!avail.contains(namesp+"/"+name)){ return RestOutputStruct::NOTFOUND; }
}
//qDebug() << "Evaluate Backend Request:" << namesp << name;
//Go through and forward this request to the appropriate sub-system
if(namesp=="rpc" && name=="settings"){
return EvaluateSysadmSettingsRequest(IN.args, out);
}else if(namesp=="rpc" && name=="logs"){
return EvaluateSysadmLogsRequest(IN.fullaccess, IN.args, out);
}else if(namesp=="rpc" && name=="dispatcher"){
return EvaluateDispatcherRequest(IN.fullaccess, IN.args, out);
}else if(namesp=="sysadm" && name=="beadm"){
return EvaluateSysadmBEADMRequest(IN.args, out);
}else if(namesp=="sysadm" && name=="fs"){
return EvaluateSysadmFSRequest(IN.args, out);
}else if(namesp=="sysadm" && name=="iocage"){
return EvaluateSysadmIocageRequest(IN.args, out);
}else if(namesp=="sysadm" && name=="iohyve"){
return EvaluateSysadmIohyveRequest(IN.args, out);
}else if(namesp=="sysadm" && name=="lifepreserver"){
return EvaluateSysadmLifePreserverRequest(IN.args, out);
}else if(namesp=="sysadm" && name=="network"){
return EvaluateSysadmNetworkRequest(IN.args, out);
}else if(namesp=="sysadm" && name=="systemmanager"){
return EvaluateSysadmSystemMgmtRequest(IN.args, out);
}else if(namesp=="sysadm" && name=="update"){
return EvaluateSysadmUpdateRequest(IN.args, out);
}else if(namesp=="sysadm" && name=="zfs"){
return EvaluateSysadmZfsRequest(IN.args, out);
}else if(namesp=="sysadm" && name=="pkg"){
return EvaluateSysadmPkgRequest(IN.args, out);
}else if(namesp=="sysadm" && name=="users"){
return EvaluateSysadmUserRequest(IN.fullaccess, AUTHSYSTEM->userForToken(SockAuthToken), IN.args, out);
}else if(namesp=="sysadm" && name=="services"){
return EvaluateSysadmServiceRequest(IN.args, out);
}else if(namesp=="sysadm" && name=="firewall"){
return EvaluateSysadmFirewallRequest(IN.args, out);
}else{
return RestOutputStruct::BADREQUEST;
}
}
// === SYSADM SSL SETTINGS ===
RestOutputStruct::ExitCode WebSocket::EvaluateSysadmSettingsRequest(const QJsonValue in_args, QJsonObject *out){
//qDebug() << "sysadm/settings Request:" << in_args;
if(!in_args.isObject()){ return RestOutputStruct::BADREQUEST; }
QJsonObject argsO = in_args.toObject();
QStringList keys = argsO.keys();
//qDebug() << " - keys:" << keys;
if(!keys.contains("action")){ return RestOutputStruct::BADREQUEST; }
QString act = argsO.value("action").toString();
bool ok = false;
if(act=="register_ssl_cert" && keys.contains("pub_key")){
//Required arguments: "pub_key" (String)
//Optional arguments: "nickname" (String), "email" (String)
QString pub_key, nickname, email;
pub_key = argsO.value("pub_key").toString();
if(keys.contains("nickname")){ nickname = argsO.value("nickname").toString(); }
if(keys.contains("email")){ email = argsO.value("email").toString(); }
if(!pub_key.isEmpty()){
ok = AUTHSYSTEM->RegisterCertificate(SockAuthToken, pub_key, nickname, email);
if(!ok){ return RestOutputStruct::FORBIDDEN; }
}
}else if(act=="list_ssl_certs"){
AUTHSYSTEM->ListCertificates(SockAuthToken, out);
ok = true; //always works for current user (even if nothing found)
}else if(act=="list_ssl_checksums"){
AUTHSYSTEM->ListCertificateChecksums(out);
ok = true;
}else if(act=="revoke_ssl_cert" && keys.contains("pub_key") ){
//Additional arguments: "user" (optional), "pub_key" (String)
QString user; if(keys.contains("user")){ user = argsO.value("user").toString(); }
ok = AUTHSYSTEM->RevokeCertificate(SockAuthToken,argsO.value("pub_key").toString(), user);
}
if(ok){ return RestOutputStruct::OK; }
else{ return RestOutputStruct::BADREQUEST; }
}
// === sysadm/logs ===
RestOutputStruct::ExitCode WebSocket::EvaluateSysadmLogsRequest(bool allaccess, const QJsonValue in_args, QJsonObject *out){
if(!in_args.isObject() || !in_args.toObject().contains("action") ){ return RestOutputStruct::BADREQUEST; }
QString act = in_args.toObject().value("action").toString().toLower();
QJsonObject obj = in_args.toObject();
//Determine the type of action to perform
if(act=="read_logs"){
// OPTIONAL ARGUMENTS:
// "logs" : <string or array of strings> Possible Values: "hostinfo", "dispatcher", "events-dispatcher", "events-lifepreserver", "events-state";
// "time_format" : "<format>" Possible Values: "time_t_seconds", "epoch_mseconds", "relative_[day/month/second]", "<QDateTime String format>"
// See (http://doc.qt.io/qt-5/qdatetime.html#fromString) for details on the QDateTime String format codes
// "start_time" : "<number>" (according to format specified)
// "end_time" : "<number>" (according to format specified)
//First figure out which logs to read
QStringList logs;
if(obj.contains("logs")){
if(obj.value("logs").isString()){ logs << obj.value("logs").toString(); }
else if(obj.value("logs").isArray()){ logs = JsonArrayToStringList(obj.value("logs").toArray()); }
}
if(logs.isEmpty()){
//Use all logs if no particular one(s) are specified
logs << "hostinfo" << "dispatcher" << "events-dispatcher" << "events-lifepreserver" << "events-state";
}
//Get the time range for the logs
QString format = obj.value("time_format").toString();
QDateTime endtime = QDateTime::currentDateTime();
QDateTime starttime = endtime.addSecs( -3600*12); //12 hours back by default
if(!format.isEmpty()){
QString str_endtime = obj.value("end_time").toString();
QString str_starttime = obj.value("start_time").toString();
if(!str_endtime.isEmpty()){
if(format=="time_t_seconds"){ endtime = QDateTime::fromTime_t(str_endtime.toInt()); }
else if(format=="epoch_mseconds"){ endtime = QDateTime::fromMSecsSinceEpoch(str_endtime.toInt()); }
else if(format=="relative_day"){ endtime = endtime.addDays( 0-qAbs(str_endtime.toInt()) ); }
else if(format=="relative_month"){ endtime = endtime.addMonths( 0-qAbs(str_endtime.toInt()) ); }
else if(format=="relative_second"){ endtime = endtime.addSecs( 0-qAbs(str_endtime.toInt()) ); }
else{ endtime = QDateTime::fromString(str_endtime, format); }
}
if(!str_starttime.isEmpty()){
if(format=="time_t_seconds"){ starttime = QDateTime::fromTime_t(str_starttime.toInt()); }
else if(format=="epoch_mseconds"){ starttime = QDateTime::fromMSecsSinceEpoch(str_starttime.toInt()); }
else if(format=="relative_day"){ starttime = endtime.addDays( 0-qAbs(str_starttime.toInt()) ); }
else if(format=="relative_month"){ starttime = endtime.addMonths( 0-qAbs(str_starttime.toInt()) ); }
else if(format=="relative_second"){ starttime = endtime.addSecs( 0-qAbs(str_starttime.toInt()) ); }
else{ starttime = QDateTime::fromString(str_starttime, format); }
}
}
//Now read/return the logs
for(int i=0; i<logs.length(); i++){
int log = -1; //this needs to correspond to the LogManager::LOG_FILE enumeration
if(logs[i]=="hostinfo"){ log = 0; }
else if(logs[i]=="dispatcher"){ log = 1; }
else if(logs[i]=="events-dispatcher"){ log = 2; }
else if(logs[i]=="events-lifepreserver"){ log = 3; }
else if(logs[i]=="events-state"){ log = 4; }
if(log>=0){
QStringList info = LogManager::readLog( (LogManager::LOG_FILE)(log), starttime, endtime);
//REMINDER of format: "[datetime]<message>"
if(info.isEmpty()){ continue; } //nothing here
QJsonObject lobj;
for(int j=0; j<info.length(); j++){
if(log>=2){
//event logs - message is JSON data
lobj.insert(info[j].section("]",0,0).section("[",1,1), QJsonDocument::fromJson( info[j].section("]",1,-1).toLocal8Bit() ).object() );
}else{
//Simple text log
lobj.insert(info[j].section("]",0,0).section("[",1,1), info[j].section("]",1,-1));
}
}//end loop over log info
out->insert( logs[i], lobj);
}
}//end loop over log types
}else{
return RestOutputStruct::BADREQUEST;
}
//Return Success
return RestOutputStruct::OK;
}
//==== DISPATCHER ====
RestOutputStruct::ExitCode WebSocket::EvaluateDispatcherRequest(bool allaccess, const QJsonValue in_args, QJsonObject *out){
//dispatcher only needs a list of sub-commands at the moment (might change later)
if(!in_args.isObject() || !in_args.toObject().contains("action") ){ return RestOutputStruct::BADREQUEST; }
QString act = in_args.toObject().value("action").toString().toLower();
//Determine the type of action to perform
if(act=="run"){
if(!allaccess){ return RestOutputStruct::FORBIDDEN; } //this user does not have permission to queue jobs
QStringList ids = in_args.toObject().keys();
ids.removeAll("action"); //already handled the action
for(int i=0; i<ids.length(); i++){
//Get the list of commands for this id
QStringList cmds;
QJsonValue val = in_args.toObject().value(ids[i]);
if(val.isArray()){ cmds = JsonArrayToStringList(val.toArray()); }
else if(val.isString()){ cmds << val.toString(); }
else{
ids.removeAt(i);
i--;
continue;
}
//queue up this process
DISPATCHER->queueProcess(ids[i], cmds);
}
//Return the PENDING result
LogManager::log(LogManager::HOST, "Client Launched Processes["+SockPeerIP+"]: "+ids.join(",") );
out->insert("started", QJsonArray::fromStringList(ids));
}else if(act=="list"){
QJsonObject info = DISPATCHER->listJobs();
out->insert("jobs", info);
}else if(act=="kill" && in_args.toObject().contains("job_id") ){
if(!allaccess){ return RestOutputStruct::FORBIDDEN; } //this user does not have permission to modify jobs
QStringList ids;
QJsonValue val = in_args.toObject().value("job_id");
if(val.isArray()){ ids = JsonArrayToStringList(val.toArray()); }
else if(val.isString()){ ids << val.toString(); }
else{ return RestOutputStruct::BADREQUEST; }
out->insert("killed", DISPATCHER->killJobs(ids));
}else{
return RestOutputStruct::BADREQUEST;
}
//Return Success
return RestOutputStruct::OK;
}
//==== SYSADM -- BEADM ====
RestOutputStruct::ExitCode WebSocket::EvaluateSysadmBEADMRequest(const QJsonValue in_args, QJsonObject *out){
if(in_args.isObject()){
QStringList keys = in_args.toObject().keys();
bool ok = false;
if(keys.contains("action")){
QString act = JsonValueToString(in_args.toObject().value("action")).toLower();
if(act=="listbes"){
ok = true;
out->insert("listbes", sysadm::BEADM::listBEs());
}else if(act=="renamebe"){
ok = true;
out->insert("renamebe", sysadm::BEADM::renameBE(in_args.toObject()));
}else if(act=="activatebe"){
ok = true;
out->insert("activatebe", sysadm::BEADM::activateBE(in_args.toObject()));
}else if(act=="createbe"){
ok = true;
out->insert("createbe", sysadm::BEADM::createBE(in_args.toObject()));
}else if(act=="destroybe"){
ok = true;
out->insert("destroybe", sysadm::BEADM::destroyBE(in_args.toObject()));
}else if(act=="mountbe"){
ok = true;
out->insert("mountbe", sysadm::BEADM::mountBE(in_args.toObject()));
}else if(act=="umountbe"){
ok = true;
out->insert("umountbe", sysadm::BEADM::umountBE(in_args.toObject()));
}
} //end of "action" key usage
//If nothing done - return the proper code
if(!ok){
return RestOutputStruct::BADREQUEST;
}
}else{ // if(in_args.isArray()){
return RestOutputStruct::BADREQUEST;
}
return RestOutputStruct::OK;
}
//==== SYSADM -- FS ====
RestOutputStruct::ExitCode WebSocket::EvaluateSysadmFSRequest(const QJsonValue in_args, QJsonObject *out){
if(in_args.isObject()){
QStringList keys = in_args.toObject().keys();
bool ok = false;
if(keys.contains("action")){
QString act = JsonValueToString(in_args.toObject().value("action")).toLower();
if(act=="dirlist"){
ok = true;
out->insert("dirlist", sysadm::FS::list_dir(in_args.toObject()));
}
} //end of "action" key usage
//If nothing done - return the proper code
if(!ok){
return RestOutputStruct::BADREQUEST;
}
}else{ // if(in_args.isArray()){
return RestOutputStruct::BADREQUEST;
}
return RestOutputStruct::OK;
}
//==== SYSADM -- Network ====
RestOutputStruct::ExitCode WebSocket::EvaluateSysadmNetworkRequest(const QJsonValue in_args, QJsonObject *out){
if(in_args.isObject()){
QStringList keys = in_args.toObject().keys();
bool ok = false;
if(keys.contains("action")){
QString act = JsonValueToString(in_args.toObject().value("action"));
if(act=="list-devices"){
ok = true;
QStringList devs = sysadm::NetDevice::listNetDevices();
for(int i=0; i<devs.length(); i++){
sysadm::NetDevice D(devs[i]);
QJsonObject obj;
//assemble the information about this device into an output object
obj.insert("ipv4", D.ipAsString());
obj.insert("ipv6", D.ipv6AsString());
obj.insert("netmask", D.netmaskAsString());
obj.insert("description", D.desc());
obj.insert("MAC", D.macAsString());
obj.insert("status", D.mediaStatusAsString());
obj.insert("is_active", D.isUp() ? "true" : "false" );
obj.insert("is_dhcp", D.usesDHCP() ? "true" : "false" );
obj.insert("is_wireless", D.isWireless() ? "true" : "false" );
//Add this device info to the main output structure
out->insert(devs[i], obj);
}
}
} //end of "action" key usage
//If nothing done - return the proper code
if(!ok){
return RestOutputStruct::BADREQUEST;
}
}else{ // if(in_args.isArray()){
return RestOutputStruct::BADREQUEST;
}
return RestOutputStruct::OK;
}
//==== SYSADM -- LifePreserver ====
RestOutputStruct::ExitCode WebSocket::EvaluateSysadmLifePreserverRequest(const QJsonValue in_args, QJsonObject *out){
if(in_args.isObject()){
QStringList keys = in_args.toObject().keys();
bool ok = false;
if(keys.contains("action")){
QString act = JsonValueToString(in_args.toObject().value("action"));
if(act=="addreplication"){
ok = true;
out->insert("addreplication", sysadm::LifePreserver::addReplication(in_args.toObject()));
}
if(act=="createsnap"){
ok = true;
out->insert("createsnap", sysadm::LifePreserver::createSnapshot(in_args.toObject()));
}
if(act=="cronscrub"){
ok = true;
out->insert("cronscrub", sysadm::LifePreserver::scheduleScrub(in_args.toObject()));
}
if(act=="cronsnap"){
ok = true;
out->insert("cronsnap", sysadm::LifePreserver::scheduleSnapshot(in_args.toObject()));
}
if(act=="initreplication"){
ok = true;
out->insert("initreplication", sysadm::LifePreserver::initReplication(in_args.toObject()));
}
if(act=="listcron"){
ok = true;
out->insert("listcron", sysadm::LifePreserver::listCron());
}
if(act=="listreplication"){
ok = true;
out->insert("listreplication", sysadm::LifePreserver::listReplication());
}
if(act=="listsnap"){
ok = true;
out->insert("listsnap", sysadm::LifePreserver::listSnap(in_args.toObject()));
}
if(act=="removereplication"){
ok = true;
out->insert("removereplication", sysadm::LifePreserver::removeReplication(in_args.toObject()));
}
if(act=="removesnap"){
ok = true;
out->insert("removesnap", sysadm::LifePreserver::removeSnapshot(in_args.toObject()));
}
if(act=="revertsnap"){
ok = true;
out->insert("revertsnap", sysadm::LifePreserver::revertSnapshot(in_args.toObject()));
}
if(act=="runreplication"){
ok = true;
out->insert("runreplication", sysadm::LifePreserver::runReplication(in_args.toObject()));
}
if(act=="savesettings"){
ok = true;
out->insert("savesettings", sysadm::LifePreserver::saveSettings(in_args.toObject()));
}
if(act=="settings"){
ok = true;
out->insert("settings", sysadm::LifePreserver::settings());
}
} //end of "action" key usage
//If nothing done - return the proper code
if(!ok){
return RestOutputStruct::BADREQUEST;
}
}else{ // if(in_args.isArray()){
return RestOutputStruct::BADREQUEST;
}
return RestOutputStruct::OK;
}
//==== SYSADM -- SysMgmt ====
RestOutputStruct::ExitCode WebSocket::EvaluateSysadmSystemMgmtRequest(const QJsonValue in_args, QJsonObject *out){
if(in_args.isObject()){
QStringList keys = in_args.toObject().keys();
bool ok = false;
if(keys.contains("action")){
QString act = JsonValueToString(in_args.toObject().value("action"));
if(act=="batteryinfo"){
ok = true;
out->insert("batteryinfo", sysadm::SysMgmt::batteryInfo());
}
if(act=="cpupercentage"){
ok = true;
out->insert("cpupercentage", sysadm::SysMgmt::cpuPercentage());
}
if(act=="cputemps"){
ok = true;
out->insert("cputemps", sysadm::SysMgmt::cpuTemps());
}
if(act=="externalmounts"){
ok = true;
out->insert("externalmounts", sysadm::SysMgmt::externalDevicePaths());
}
if(act=="halt"){
ok = true;
out->insert("halt", sysadm::SysMgmt::systemHalt());
}
if(act=="killproc"){
ok = true;
out->insert("killproc", sysadm::SysMgmt::killProc(in_args.toObject()));
}
if(act=="memorystats"){
ok = true;
out->insert("memorystats", sysadm::SysMgmt::memoryStats());
}
if(act=="procinfo"){
ok = true;
out->insert("procinfo", sysadm::SysMgmt::procInfo());
}
if(act=="reboot"){
ok = true;
out->insert("reboot", sysadm::SysMgmt::systemReboot());
}
if(act=="setsysctl"){
ok = true;
out->insert("setsysctl", sysadm::SysMgmt::setSysctl(in_args.toObject()));
}
if(act=="sysctllist"){
ok = true;
out->insert("sysctllist", sysadm::SysMgmt::sysctlList());
}
if(act=="systeminfo"){
ok = true;
out->insert("systeminfo", sysadm::SysMgmt::systemInfo());
}
} //end of "action" key usage
//If nothing done - return the proper code
if(!ok){
return RestOutputStruct::BADREQUEST;
}
}else{ // if(in_args.isArray()){
return RestOutputStruct::BADREQUEST;
}
return RestOutputStruct::OK;
}
//==== SYSADM -- Update ====
RestOutputStruct::ExitCode WebSocket::EvaluateSysadmUpdateRequest(const QJsonValue in_args, QJsonObject *out){
if(in_args.isObject()){
QStringList keys = in_args.toObject().keys();
bool ok = false;
if(keys.contains("action")){
QString act = JsonValueToString(in_args.toObject().value("action"));
if(act=="checkupdates"){
ok = true;
bool fastcheck = true;
fastcheck = in_args.toObject().value("force").toString().toLower()!="true";
out->insert("checkupdates", sysadm::Update::checkUpdates(fastcheck));
}else if(act=="listbranches"){
ok = true;
out->insert("listbranches", sysadm::Update::listBranches());
}else if(act=="startupdate"){
ok = true;
out->insert("startupdate", sysadm::Update::startUpdate(in_args.toObject()) );
}else if(act=="stopupdate"){
ok = true;
out->insert("stopupdate", sysadm::Update::stopUpdate() );
}else if(act=="listsettings"){
ok = true;
out->insert("listsettings", sysadm::Update::readSettings() );
}else if(act=="changesettings"){
ok = true;
out->insert("changesettings", sysadm::Update::writeSettings(in_args.toObject()) );
}
} //end of "action" key usage
//If nothing done - return the proper code
if(!ok){
return RestOutputStruct::BADREQUEST;
}
}else{ // if(in_args.isArray()){
return RestOutputStruct::BADREQUEST;
}
return RestOutputStruct::OK;
}
//==== SYSADM -- iocage ====
RestOutputStruct::ExitCode WebSocket::EvaluateSysadmIocageRequest(const QJsonValue in_args, QJsonObject *out){
if(in_args.isObject()){
QStringList keys = in_args.toObject().keys();
bool ok = false;
if(keys.contains("action")){
QString act = JsonValueToString(in_args.toObject().value("action"));
if(act=="execjail"){
ok = true;
out->insert("execjail", sysadm::Iocage::execJail(in_args.toObject()));
}
if(act=="df"){
ok = true;
out->insert("df", sysadm::Iocage::df());
}
if(act=="destroyjail"){
ok = true;
out->insert("destroyjail", sysadm::Iocage::destroyJail(in_args.toObject()));
}
if(act=="createjail"){
ok = true;
out->insert("createjail", sysadm::Iocage::createJail(in_args.toObject()));
}
if(act=="clonejail"){
ok = true;
out->insert("clonejail", sysadm::Iocage::cloneJail(in_args.toObject()));
}
if(act=="cleanall"){
ok = true;
out->insert("cleanall", sysadm::Iocage::cleanAll());
}
if(act=="cleantemplates"){
ok = true;
out->insert("cleantemplates", sysadm::Iocage::cleanTemplates());
}<|fim▁hole|> out->insert("cleanreleases", sysadm::Iocage::cleanReleases());
}
if(act=="cleanjails"){
ok = true;
out->insert("cleanjails", sysadm::Iocage::cleanJails());
}
if(act=="capjail"){
ok = true;
out->insert("capjail", sysadm::Iocage::capJail(in_args.toObject()));
}
if(act=="deactivatepool"){
ok = true;
out->insert("deactivatepool", sysadm::Iocage::deactivatePool(in_args.toObject()));
}
if(act=="activatepool"){
ok = true;
out->insert("activatepool", sysadm::Iocage::activatePool(in_args.toObject()));
}
if(act=="stopjail"){
ok = true;
out->insert("stopjail", sysadm::Iocage::stopJail(in_args.toObject()));
}
if(act=="startjail"){
ok = true;
out->insert("startjail", sysadm::Iocage::startJail(in_args.toObject()));
}
if(act=="getdefaultsettings"){
ok = true;
out->insert("getdefaultsettings", sysadm::Iocage::getDefaultSettings());
}
if(act=="getjailsettings"){
ok = true;
out->insert("getjailsettings", sysadm::Iocage::getJailSettings(in_args.toObject()));
}
if(act=="listjails"){
ok = true;
out->insert("listjails", sysadm::Iocage::listJails());
}
} //end of "action" key usage
//If nothing done - return the proper code
if(!ok){
return RestOutputStruct::BADREQUEST;
}
}else{ // if(in_args.isArray()){
return RestOutputStruct::BADREQUEST;
}
return RestOutputStruct::OK;
}
//==== SYSADM -- iohyve ====
RestOutputStruct::ExitCode WebSocket::EvaluateSysadmIohyveRequest(const QJsonValue in_args, QJsonObject *out){
if(in_args.isObject()){
QStringList keys = in_args.toObject().keys();
bool ok = false;
if(keys.contains("action")){
QString act = JsonValueToString(in_args.toObject().value("action"));
//qDebug() << " - iohyve action:" << act;
if(act=="adddisk"){
ok = true;
out->insert("adddisk", sysadm::Iohyve::addDisk(in_args.toObject()));
}
if(act=="create"){
ok = true;
out->insert("create", sysadm::Iohyve::createGuest(in_args.toObject()));
}
if(act=="delete"){
ok = true;
out->insert("delete", sysadm::Iohyve::deleteGuest(in_args.toObject()));
}
if(act=="deletedisk"){
ok = true;
out->insert("deletedisk", sysadm::Iohyve::deleteDisk(in_args.toObject()));
}
else if(act=="listdisks"){
ok = true;
out->insert("listdisks", sysadm::Iohyve::listDisks(in_args.toObject()));
}
else if(act=="listvms"){
ok = true;
out->insert("listvms", sysadm::Iohyve::listVMs());
}
else if(act=="listisos"){
ok = true;
out->insert("listisos", sysadm::Iohyve::listISOs());
}
else if(act=="fetchiso"){
ok = true;
//DProcess fetchproc;
out->insert("fetchiso", sysadm::Iohyve::fetchISO(in_args.toObject()));
}
else if(act=="install"){
ok = true;
out->insert("install", sysadm::Iohyve::installGuest(in_args.toObject()));
}
else if(act=="issetup"){
ok = true;
out->insert("issetup", sysadm::Iohyve::isSetup());
}
else if(act=="renameiso"){
ok = true;
out->insert("renameiso", sysadm::Iohyve::renameISO(in_args.toObject()));
}
else if(act=="rmiso"){
ok = true;
out->insert("rmiso", sysadm::Iohyve::rmISO(in_args.toObject()));
}
else if(act=="resizedisk"){
ok = true;
out->insert("resizedisk", sysadm::Iohyve::resizeDisk(in_args.toObject()));
}
else if(act=="setup"){
ok = true;
out->insert("setup", sysadm::Iohyve::setupIohyve(in_args.toObject()));
}
else if(act=="start"){
ok = true;
out->insert("start", sysadm::Iohyve::startGuest(in_args.toObject()));
}
else if(act=="stop"){
ok = true;
out->insert("stop", sysadm::Iohyve::stopGuest(in_args.toObject()));
}
else if(act=="version"){
ok = true;
out->insert("version", sysadm::Iohyve::version());
}
//qDebug() << " - iohyve action finished:" << act << ok;
} //end of "action" key usage
//If nothing done - return the proper code
if(!ok){
return RestOutputStruct::BADREQUEST;
}
}else{ // if(in_args.isArray()){
return RestOutputStruct::BADREQUEST;
}
return RestOutputStruct::OK;
}
// ==== SYSADM ZFS API ====
RestOutputStruct::ExitCode WebSocket::EvaluateSysadmZfsRequest(const QJsonValue in_args, QJsonObject *out){
if( ! in_args.isObject()){
return RestOutputStruct::BADREQUEST;
}
QStringList keys = in_args.toObject().keys();
bool ok = false;
if(keys.contains("action")) {
QString act = JsonValueToString(in_args.toObject().value("action"));
if(act=="list_pools"){
ok = true;
QJsonObject pools = sysadm::ZFS::zpool_list();
if(!pools.isEmpty()){ out->insert("list_pools",pools); }
}
else if(act=="datasets"){
ok = true;
out->insert("datasets", sysadm::ZFS::zfs_list(in_args.toObject()));
}
} //end of "action" key usage
//If nothing done - return the proper code
if(!ok){
return RestOutputStruct::BADREQUEST;
}
return RestOutputStruct::OK;
}
// ==== SYSADM PKG API ====
RestOutputStruct::ExitCode WebSocket::EvaluateSysadmPkgRequest(const QJsonValue in_args, QJsonObject *out){
if(!in_args.isObject() || !in_args.toObject().contains("action") ){ return RestOutputStruct::BADREQUEST; }
//REQUIRED: "action"
QString act = in_args.toObject().value("action").toString();
//OPTIONAL: "repo" (uses local repo database by default)
QString repo = "local";
if(in_args.toObject().contains("repo")){ repo = in_args.toObject().value("repo").toString(); }
//OPTIONAL: "category" (only used if "pkg_origins" is not specified)
QString cat;
if(in_args.toObject().contains("category")){ cat = in_args.toObject().value("category").toString(); }
//OPTIONAL: "pkg_origins" (defaults to everything for listing functions)
QStringList pkgs;
if(in_args.toObject().contains("pkg_origins")){
if(in_args.toObject().value("pkg_origins").isString()){ pkgs << in_args.toObject().value("pkg_origins").toString(); }
else if(in_args.toObject().value("pkg_origins").isArray()){ pkgs = JsonArrayToStringList(in_args.toObject().value("pkg_origins").toArray()); }
}
//Parse the action and perform accordingly
if(act=="pkg_info"){
//OPTIONAL: "pkg_origins" OR "category"
//OPTIONAL: "repo"
//OPTIONAL: "result" = "full" or "simple" (Default: "simple")
bool fullresults = false;
if(in_args.toObject().contains("result")){ fullresults = (in_args.toObject().value("result").toString()=="full"); }
//Now run the info fetch routine
QJsonObject info = sysadm::PKG::pkg_info(pkgs, repo, cat, fullresults);
if(!info.isEmpty()){ out->insert("pkg_info",info); }
else{ return RestOutputStruct::NOCONTENT; }
}else if(act=="pkg_search" && in_args.toObject().contains("search_term")){
//REQUIRED: "search_term" (string to search for)
//OPTIONAL: "repo"
//OPTIONAL: "category"
//OPTIONAL: "search_excludes" (array of string or single string);
QString srch = in_args.toObject().value("search_term").toString();
if(srch.isEmpty()){ return RestOutputStruct::BADREQUEST; }
QStringList exclude;
if(in_args.toObject().contains("search_excludes")){
if(in_args.toObject().value("search_excludes").isString()){ exclude << in_args.toObject().value("search_excludes").toString(); }
else if(in_args.toObject().value("search_excludes").isArray()){ exclude = JsonArrayToStringList( in_args.toObject().value("search_excludes").toArray() ); }
}
QStringList pkgs = sysadm::PKG::pkg_search(repo, srch, exclude, cat);
if(!pkgs.isEmpty()){
QJsonObject info = sysadm::PKG::pkg_info(pkgs, repo, cat, false); //always do simple results for a search
info.insert("results_order", QJsonArray::fromStringList(pkgs));
if(!info.isEmpty()){ out->insert("pkg_search",info); }
}else{
return RestOutputStruct::NOCONTENT;
}
}else if(act=="list_categories"){
//OPTIONAL: "repo"
QJsonArray cats = sysadm::PKG::list_categories(repo);
if(!cats.isEmpty()){ out->insert("list_categories", cats); }
else{ return RestOutputStruct::NOCONTENT; }
}else if(act=="list_repos"){
QJsonArray repos = sysadm::PKG::list_repos();
if(!repos.isEmpty()){ out->insert("list_repos", repos); }
else{ return RestOutputStruct::NOCONTENT; }
}else if(act=="pkg_install" && !pkgs.isEmpty() ){
//REQUIRED: "pkg_origins"
//OPTIONAL: "repo" (pkg will determine the best repo to use if not supplied)
out->insert("pkg_install", sysadm::PKG::pkg_install(pkgs,repo));
}else if(act=="pkg_remove" && !pkgs.isEmpty() ){
//REQUIRED: "pkg_origins"
//OPTIONAL: "recursive"="true" or "false" (default: "true")
bool recursive = true;
if(in_args.toObject().contains("recursive")){ recursive = in_args.toObject().value("recursive").toString()!="false"; }
out->insert("pkg_remove", sysadm::PKG::pkg_remove(pkgs, recursive));
}else if(act=="pkg_lock" && !pkgs.isEmpty() ){
//REQUIRED: "pkg_origins"
out->insert("pkg_lock", sysadm::PKG::pkg_lock(pkgs));
}else if(act=="pkg_unlock" && !pkgs.isEmpty() ){
//REQUIRED: "pkg_origins"
out->insert("pkg_unlock", sysadm::PKG::pkg_unlock(pkgs));
}else if(act=="pkg_update"){
//OPTIONAL: "force" = ["true"/"false"] (default: "false")
bool force = false;
if(in_args.toObject().contains("force")){ force = in_args.toObject().value("force").toString()=="true"; }
out->insert("pkg_update", sysadm::PKG::pkg_update(force));
}else if(act=="pkg_check_upgrade"){
out->insert("pkg_check_upgrade", sysadm::PKG::pkg_check_upgrade());
}else if(act=="pkg_upgrade"){
out->insert("pkg_upgrade", sysadm::PKG::pkg_upgrade());
}else if(act=="pkg_audit"){
out->insert("pkg_audit", sysadm::PKG::pkg_audit());
}else if(act=="pkg_autoremove"){
out->insert("pkg_autoremove", sysadm::PKG::pkg_autoremove());
}else{
//unknown action
return RestOutputStruct::BADREQUEST;
}
return RestOutputStruct::OK;
}
// ==== SYSADM USER API ====
RestOutputStruct::ExitCode WebSocket::EvaluateSysadmUserRequest(bool allaccess, QString user, const QJsonValue in_args, QJsonObject *out){
bool ok = false;
//REQUIRED: "action"
QString action =in_args.toObject().value("action").toString().toLower();
if(action=="usershow"){
ok = sysadm::UserManager::listUsers(out, allaccess, user);
}else if(action=="useradd" && allaccess){ //requires all access to create new users
ok = sysadm::UserManager::addUser(out, in_args.toObject());
}else if(action=="userdelete" && allaccess){ //requires all access to remove users
//REQUIRED: "name"
//OPTIONAL: "clean_home"="false" (true by default)
QString deluser = in_args.toObject().value("name").toString();
if(deluser != user){ //cannot delete the currently-used user
bool clean = true;
if(in_args.toObject().contains("clean_home")){ clean = (in_args.toObject().value("clean_home").toString().toLower() != "false"); }
ok = sysadm::UserManager::removeUser(deluser, clean);
if(ok){ out->insert("result","success"); }
else{ out->insert("error","Could not delete user"); }
}else{
out->insert("error","Cannot delete the current user");
}
}else if(action=="usermod"){
bool go = true;
if(!allaccess){
//ensure that the user being acted on is the current user - otherwise deny access
go = (in_args.toObject().value("name").toString() == user);
}
if(go){ ok = sysadm::UserManager::modifyUser(out, in_args.toObject() ); }
}else if(action=="groupshow"){
ok = sysadm::UserManager::listGroups(out, (allaccess ? "" : user) );
}else if(action=="groupadd" && allaccess){
ok = sysadm::UserManager::addGroup(out, in_args.toObject() );
}else if(action=="groupdelete" && allaccess){
//REQUIRED: "name"
QString name = in_args.toObject().value("name").toString();
if(!name.isEmpty()){
ok = sysadm::UserManager::removeGroup(name);
}
if(ok){ out->insert("result","success"); }
else{ out->insert("error","Could not delete group"); }
}else if(action=="groupmod" && allaccess){
ok = sysadm::UserManager::modifyGroup(out, in_args.toObject() );
}else if(action=="personacrypt_init"){
qDebug() << "got PC init request:" << in_args << allaccess << user;
bool go = true;
if(!allaccess){
//ensure that the user being acted on is the current user - otherwise deny access
go = (in_args.toObject().value("name").toString() == user);
}
if(go){
//REQUIRED: "name", "password","device"
QJsonObject obj = in_args.toObject();
if(obj.contains("name") && obj.contains("password") && obj.contains("device") ){
ok = sysadm::UserManager::InitializePersonaCryptDevice(obj.value("name").toString(), obj.value("password").toString(), obj.value("device").toString() );
if(ok){ out->insert("result","success"); }
else{ out->insert("error","Could not initialize Personacrypt device"); }
}
}
}else if(action=="personacrypt_listdevs"){
QStringList devs = sysadm::UserManager::getAvailablePersonaCryptDevices();
for(int i=0; i<devs.length(); i++){
out->insert(devs[i].section(":",0,0), devs[i].section(":",1,-1).simplified()); //<device>:<info>
}
ok = true;
}
return (ok ? RestOutputStruct::OK : RestOutputStruct::BADREQUEST);
}
// SERVICE MANAGER (sysadm/services)
RestOutputStruct::ExitCode WebSocket::EvaluateSysadmServiceRequest(const QJsonValue in_args, QJsonObject *out){
bool ok = false;
QString action = in_args.toObject().value("action").toString();
sysadm::ServiceManager SMGR;
if(action=="list_services"){
QList<sysadm::Service> list = SMGR.GetServices();
QList<bool> listEnabled = SMGR.isEnabled(list);
QJsonObject services;
for(int i=0; i<list.length(); i++){
QJsonObject S;
S.insert("name", list[i].Name);
S.insert("tag", list[i].Tag);
S.insert("path", list[i].Path);
S.insert("description", list[i].Description);
S.insert("is_enabled", listEnabled[i] ? "true" : "false" );
S.insert("is_running",SMGR.isRunning(list[i]) ? "true" : "false" );
//S.insert("filename", list[i].Directory);
//Need to add status info as well (isRunning, isEnabled);
services.insert(list[i].Name, S);
}
ok = true;
out->insert("services",services);
}else if(action=="start" && in_args.toObject().contains("services") ){
QJsonValue sval = in_args.toObject().value("services");
QStringList services;
if(sval.isString()){ services << sval.toString(); }
else if(sval.isArray()){ services = JsonArrayToStringList(sval.toArray()); }
if(!services.isEmpty()){
QStringList success;
ok = true;
for(int i=0; i<services.length(); i++){
if( SMGR.Start( SMGR.GetService(services[i]) ) ){ success << services[i]; }
}
out->insert("services_started", QJsonArray::fromStringList(success));
}
}else if(action=="stop" && in_args.toObject().contains("services") ){
QJsonValue sval = in_args.toObject().value("services");
QStringList services;
if(sval.isString()){ services << sval.toString(); }
else if(sval.isArray()){ services = JsonArrayToStringList(sval.toArray()); }
if(!services.isEmpty()){
QStringList success;
ok = true;
for(int i=0; i<services.length(); i++){
if( SMGR.Stop( SMGR.GetService(services[i]) ) ){ success << services[i]; }
}
out->insert("services_stopped", QJsonArray::fromStringList(success));
}
}else if(action=="restart" && in_args.toObject().contains("services") ){
QJsonValue sval = in_args.toObject().value("services");
QStringList services;
if(sval.isString()){ services << sval.toString(); }
else if(sval.isArray()){ services = JsonArrayToStringList(sval.toArray()); }
if(!services.isEmpty()){
QStringList success;
ok = true;
for(int i=0; i<services.length(); i++){
if( SMGR.Restart( SMGR.GetService(services[i]) ) ){ success << services[i]; }
}
out->insert("services_restarted", QJsonArray::fromStringList(success));
}
}else if(action=="enable" && in_args.toObject().contains("services") ){
QJsonValue sval = in_args.toObject().value("services");
QStringList services;
if(sval.isString()){ services << sval.toString(); }
else if(sval.isArray()){ services = JsonArrayToStringList(sval.toArray()); }
if(!services.isEmpty()){
QStringList success;
ok = true;
for(int i=0; i<services.length(); i++){
if( SMGR.Enable( SMGR.GetService(services[i]) ) ){ success << services[i]; }
}
out->insert("services_enabled", QJsonArray::fromStringList(success));
}
}else if(action=="disable" && in_args.toObject().contains("services") ){
QJsonValue sval = in_args.toObject().value("services");
QStringList services;
if(sval.isString()){ services << sval.toString(); }
else if(sval.isArray()){ services = JsonArrayToStringList(sval.toArray()); }
if(!services.isEmpty()){
QStringList success;
ok = true;
for(int i=0; i<services.length(); i++){
if( SMGR.Disable( SMGR.GetService(services[i]) ) ){ success << services[i]; }
}
out->insert("services_disabled", QJsonArray::fromStringList(success));
}
}
if(out->keys().isEmpty()){
if(ok){ out->insert("result","success"); }
else{ out->insert("error","error"); }
}
return (ok ? RestOutputStruct::OK : RestOutputStruct::BADREQUEST);
}
// FIREWALL MANAGER (sysadm/firewall)
RestOutputStruct::ExitCode WebSocket::EvaluateSysadmFirewallRequest(const QJsonValue in_args, QJsonObject *out){
bool ok = false;
QString action = in_args.toObject().value("action").toString();
sysadm::Firewall FMGR;
//Now perform actions as needed
if(action=="known_ports"){
ok = true;
QList<sysadm::PortInfo> all = FMGR.allPorts(); //this is all known ports (number/type, name, description) - it does not know about open/closed
for(int i=0; i<all.length(); i++){
QJsonObject obj;
obj.insert("name",all[i].Keyword);
obj.insert("port", QString::number(all[i].Port)+"/"+all[i].Type);
if(all[i].Description.isEmpty() && i>0 && (all[i-1].Keyword == all[i].Keyword) ){
obj.insert("description", all[i-1].Description);
}else{
obj.insert("description", all[i].Description);
}
out->insert(obj.value("port").toString(), obj); //use the port number/type as the unique identifier
}
}else if(action=="list_open"){
ok = true;
QList<sysadm::PortInfo> all = FMGR.OpenPorts(); //this is all ports currently opened
QStringList oports;
for(int i=0; i<all.length(); i++){
oports << QString::number(all[i].Port)+"/"+all[i].Type;
}
out->insert("openports", QJsonArray::fromStringList(oports));
}else if(action=="status"){
ok = true;
out->insert("is_running", FMGR.IsRunning() ? "true" : "false" );
out->insert("is_enabled", FMGR.IsEnabled() ? "true" : "false" );
}else if(action=="open" && in_args.toObject().contains("ports")){
//REQUIRED: "ports" = [<num>/<type>, <num2>/<type2>, etc..]
QJsonValue val = in_args.toObject().value("ports");
QStringList ports;
QList<sysadm::PortInfo> P;
if(val.isString()){ ports << val.toString(); }
else if(val.isArray()){ ports = JsonArrayToStringList(val.toArray()); }
for(int i=0; i<ports.length(); i++){
sysadm::PortInfo info = FMGR.LookUpPort(ports[i].section("/",0,0).toInt(), ports[i].section("/",1,1));
if(info.Port<0 || (info.Type!="tcp" && info.Type!="udp") ){ continue; }
P << info;
}
if(!P.isEmpty()){
ok = true;
FMGR.OpenPort(P);
}
}else if(action=="close" && in_args.toObject().contains("ports")){
//REQUIRED: "ports" = [<num>/<type>, <num2>/<type2>, etc..]
QJsonValue val = in_args.toObject().value("ports");
QStringList ports;
QList<sysadm::PortInfo> P;
if(val.isString()){ ports << val.toString(); }
else if(val.isArray()){ ports = JsonArrayToStringList(val.toArray()); }
for(int i=0; i<ports.length(); i++){
sysadm::PortInfo info = FMGR.LookUpPort(ports[i].section("/",0,0).toInt(), ports[i].section("/",1,1));
if(info.Port<0 || (info.Type!="tcp" && info.Type!="udp") ){ continue; }
P << info;
}
if(!P.isEmpty()){
ok = true;
FMGR.ClosePort(P);
}
}else if(action=="start"){
ok = true;
FMGR.Start();
}else if(action=="stop"){
ok = true;
FMGR.Stop();
}else if(action=="restart"){
ok = true;
FMGR.Restart();
}else if(action=="enable"){
ok = true;
FMGR.Enable();
}else if(action=="disable"){
ok = true;
FMGR.Disable();
}else if(action=="reset-defaults"){
ok = FMGR.RestoreDefaults();
}
//Evaluate outputs
if(out->keys().isEmpty()){
if(ok){ out->insert("result","success"); }
else{ out->insert("error","error"); }
}
return (ok ? RestOutputStruct::OK : RestOutputStruct::BADREQUEST);
}<|fim▁end|> | if(act=="cleanreleases"){
ok = true; |
<|file_name|>Class_5188.java<|end_file_name|><|fim▁begin|>package fr.javatronic.blog.massive.annotation1.sub1;
import fr.javatronic.blog.processor.Annotation_001;
<|fim▁hole|>@Annotation_001
public class Class_5188 {
}<|fim▁end|> | |
<|file_name|>test_common.py<|end_file_name|><|fim▁begin|>##########################################################################
# Copyright (C) 2009 - 2014 Huygens ING & Gerbrandy S.R.L.
#
# This file is part of bioport.
#
# bioport is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# <|fim▁hole|># License along with this program. If not, see
# <http://www.gnu.org/licenses/gpl-3.0.html>.
##########################################################################
import os
#import sys
import unittest
import shutil
import datetime
from bioport_repository.common import to_date, format_date
class CommonTestCase(unittest.TestCase):
def test_to_date(self):
self.assertEqual(to_date('2000'), datetime.datetime(2000, 1, 1, 0, 0))
self.assertEqual(to_date('2000-02'), datetime.datetime(2000, 2, 1, 0, 0))
self.assertEqual(to_date('2000-02-03'), datetime.datetime(2000, 2, 3, 0, 0))
self.assertEqual(to_date('2001-02', round='up'), datetime.datetime(2001, 2, 28, 0, 0))
#2000 is a leap year
self.assertEqual(to_date('2000-02', round='up'), datetime.datetime(2000, 2, 29, 0, 0))
self.assertEqual(to_date('2000-12', round='up'), datetime.datetime(2000, 12, 31, 0, 0))
self.assertEqual(to_date('2000', round='up'), datetime.datetime(2000, 12, 31, 0, 0))
self.assertEqual(to_date('0200', round='up'), datetime.datetime(200, 12, 31, 0, 0))
self.assertEqual(to_date('1200', ), datetime.datetime(1200, 1, 1, 0, 0))
def test_format_date(self):
d = datetime.datetime(1700, 3, 2)
self.assertEqual(format_date(d), '1700-03-02 00:00')
d = datetime.datetime(1, 3, 2)
self.assertEqual(format_date(d), '0001-03-02 00:00')
def test_suite():
return unittest.TestSuite((
unittest.makeSuite(CommonTestCase, 'test'),
))
if __name__=='__main__':
unittest.main(defaultTest='test_suite')<|fim▁end|> | # You should have received a copy of the GNU General Public |
<|file_name|>vrrp_common.py<|end_file_name|><|fim▁begin|># Copyright (C) 2013 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2013 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import random
<|fim▁hole|>from ryu.base import app_manager
from ryu.lib import hub
from ryu.lib import mac as lib_mac
from ryu.lib.packet import vrrp
from ryu.services.protocols.vrrp import api as vrrp_api
from ryu.services.protocols.vrrp import event as vrrp_event
_VRID = 7
_PRIMARY_IP_ADDRESS0 = '10.0.0.2'
_PRIMARY_IP_ADDRESS1 = '10.0.0.3'
class VRRPCommon(app_manager.RyuApp):
_IFNAME0 = None
_IFNAME1 = None
def __init__(self, *args, **kwargs):
super(VRRPCommon, self).__init__(*args, **kwargs)
def _main(self):
self._main_version(vrrp.VRRP_VERSION_V3)
self._main_version(vrrp.VRRP_VERSION_V2)
print "done!"
def _main_version(self, vrrp_version):
self._main_version_priority(vrrp_version,
vrrp.VRRP_PRIORITY_ADDRESS_OWNER)
self._main_version_priority(vrrp_version,
vrrp.VRRP_PRIORITY_BACKUP_MAX)
self._main_version_priority(vrrp_version,
vrrp.VRRP_PRIORITY_BACKUP_DEFAULT)
self._main_version_priority(vrrp_version,
vrrp.VRRP_PRIORITY_BACKUP_MIN)
def _main_version_priority(self, vrrp_version, priority):
self._main_version_priority_sleep(vrrp_version, priority, False)
self._main_version_priority_sleep(vrrp_version, priority, True)
def _check(self, vrrp_api, instances):
while True:
while True:
rep = vrrp_api.vrrp_list(self)
if len(rep.instance_list) >= len(instances) * 2:
if any(i.state == vrrp_event.VRRP_STATE_INITIALIZE
for i in rep.instance_list):
continue
break
print len(rep.instance_list), '/', len(instances) * 2
time.sleep(1)
# for i in rep.instance_list:
# print i.instance_name, i.monitor_name, i.config, \
# i.interface, i.state
assert len(rep.instance_list) == len(instances) * 2
num_of_master = 0
d = dict(((i.instance_name, i) for i in rep.instance_list))
bad = 0
for i in rep.instance_list:
assert i.state in (vrrp_event.VRRP_STATE_MASTER,
vrrp_event.VRRP_STATE_BACKUP)
if i.state == vrrp_event.VRRP_STATE_MASTER:
num_of_master += 1
vr = instances[i.config.vrid]
if (vr[0].config.priority > vr[1].config.priority and
i.instance_name == vr[1].instance_name) or \
(vr[0].config.priority < vr[1].config.priority and
i.instance_name == vr[0].instance_name):
if i.state == vrrp_event.VRRP_STATE_MASTER:
print "bad master:"
print d[vr[0].instance_name].state, \
d[vr[0].instance_name].config.priority
print d[vr[1].instance_name].state, \
d[vr[1].instance_name].config.priority
bad += 1
# assert i.state != vrrp_event.VRRP_STATE_MASTER
if bad > 0:
# this could be a transient state
print bad, "bad masters"
time.sleep(1)
continue
if num_of_master >= len(instances):
assert num_of_master == len(instances)
break
print num_of_master, '/', len(instances)
time.sleep(1)
continue
def _main_version_priority_sleep(self, vrrp_version, priority, do_sleep):
app_mgr = app_manager.AppManager.get_instance()
self.logger.debug('%s', app_mgr.applications)
vrrp_mgr = app_mgr.applications['VRRPManager']
step = 5
instances = {}
for vrid in xrange(1, 256, step):
if vrid == _VRID:
continue
print "vrid", vrid
l = {}
prio = max(vrrp.VRRP_PRIORITY_BACKUP_MIN,
min(vrrp.VRRP_PRIORITY_BACKUP_MAX, vrid))
rep0 = self._configure_vrrp_router(vrrp_version,
prio,
_PRIMARY_IP_ADDRESS0,
self._IFNAME0,
vrid)
assert not rep0.instance_name is None
l[0] = rep0
prio = max(vrrp.VRRP_PRIORITY_BACKUP_MIN,
min(vrrp.VRRP_PRIORITY_BACKUP_MAX, 256 - vrid))
rep1 = self._configure_vrrp_router(vrrp_version,
prio,
_PRIMARY_IP_ADDRESS1,
self._IFNAME1,
vrid)
assert not rep1.instance_name is None
l[1] = rep1
instances[vrid] = l
print "vrid", _VRID
l = {}
rep0 = self._configure_vrrp_router(vrrp_version, priority,
_PRIMARY_IP_ADDRESS0,
self._IFNAME0, _VRID)
assert not rep0.instance_name is None
l[0] = rep0
rep1 = self._configure_vrrp_router(
vrrp_version, vrrp.VRRP_PRIORITY_BACKUP_DEFAULT,
_PRIMARY_IP_ADDRESS1, self._IFNAME1, _VRID)
assert not rep1.instance_name is None
l[1] = rep1
instances[_VRID] = l
self.logger.debug('%s', vrrp_mgr._instances)
if do_sleep:
print "priority", priority
print "waiting for instances starting"
self._check(vrrp_api, instances)
for vrid in instances.keys():
if vrid == _VRID:
continue
which = vrid & 1
new_priority = int(random.uniform(vrrp.VRRP_PRIORITY_BACKUP_MIN,
vrrp.VRRP_PRIORITY_BACKUP_MAX))
i = instances[vrid][which]
vrrp_api.vrrp_config_change(self, i.instance_name,
priority=new_priority)
i.config.priority = new_priority
if do_sleep:
print "priority shuffled"
self._check(vrrp_api, instances)
for vrid in instances.keys():
if vrid == _VRID:
continue
which = vrid & 1
vrrp_api.vrrp_shutdown(self, instances[vrid][which].instance_name)
vrrp_api.vrrp_shutdown(self, instances[_VRID][0].instance_name)
if do_sleep:
print "shutting down instances"
while True:
rep = vrrp_api.vrrp_list(self)
if len(rep.instance_list) <= len(instances):
break
print "left", len(rep.instance_list)
time.sleep(1)
assert len(rep.instance_list) == len(instances)
print "waiting for the rest becoming master"
while True:
rep = vrrp_api.vrrp_list(self)
if all(i.state == vrrp_event.VRRP_STATE_MASTER
for i in rep.instance_list):
break
time.sleep(1)
vrrp_api.vrrp_shutdown(self, instances[_VRID][1].instance_name)
for vrid in instances.keys():
if vrid == _VRID:
continue
which = 1 - (vrid & 1)
vrrp_api.vrrp_shutdown(self, instances[vrid][which].instance_name)
print "waiting for instances shutting down"
while True:
rep = vrrp_api.vrrp_list(self)
if not rep.instance_list:
break
print "left", len(rep.instance_list)
time.sleep(1)<|fim▁end|> | |
<|file_name|>switch.py<|end_file_name|><|fim▁begin|>"""Allows to configure a switch using RPi GPIO."""<|fim▁hole|>import logging
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA
from homeassistant.components import rpi_gpio
from homeassistant.const import DEVICE_DEFAULT_NAME
from homeassistant.helpers.entity import ToggleEntity
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_PULL_MODE = 'pull_mode'
CONF_PORTS = 'ports'
CONF_INVERT_LOGIC = 'invert_logic'
DEFAULT_INVERT_LOGIC = False
_SWITCHES_SCHEMA = vol.Schema({
cv.positive_int: cv.string,
})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_PORTS): _SWITCHES_SCHEMA,
vol.Optional(CONF_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean,
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Raspberry PI GPIO devices."""
invert_logic = config.get(CONF_INVERT_LOGIC)
switches = []
ports = config.get(CONF_PORTS)
for port, name in ports.items():
switches.append(RPiGPIOSwitch(name, port, invert_logic))
add_entities(switches)
class RPiGPIOSwitch(ToggleEntity):
"""Representation of a Raspberry Pi GPIO."""
def __init__(self, name, port, invert_logic):
"""Initialize the pin."""
self._name = name or DEVICE_DEFAULT_NAME
self._port = port
self._invert_logic = invert_logic
self._state = False
rpi_gpio.setup_output(self._port)
rpi_gpio.write_output(self._port, 1 if self._invert_logic else 0)
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def is_on(self):
"""Return true if device is on."""
return self._state
def turn_on(self, **kwargs):
"""Turn the device on."""
rpi_gpio.write_output(self._port, 0 if self._invert_logic else 1)
self._state = True
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Turn the device off."""
rpi_gpio.write_output(self._port, 1 if self._invert_logic else 0)
self._state = False
self.schedule_update_ha_state()<|fim▁end|> | |
<|file_name|>normalization.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Author
------
Bo Zhang
Email
-----
[email protected]
Created on
----------
- Sat Sep 03 12:00:00 2016
Modifications
-------------
- Sat Sep 03 12:00:00 2016
Aims
----
- normalization
Notes
-----
This is migrated from **SLAM** package
"""
from __future__ import division
import numpy as np
from joblib import Parallel, delayed
from .extern.interpolate import SmoothSpline
def normalize_spectrum(wave, flux, norm_range, dwave,
p=(1E-6, 1E-6), q=0.5, ivar=None, eps=1e-10,
rsv_frac=1.):
""" A double smooth normalization of a spectrum
Converted from Chao Liu's normSpectrum.m
Updated by Bo Zhang
Parameters
----------
wave: ndarray (n_pix, )
wavelegnth array
flux: ndarray (n_pix, )
flux array
norm_range: tuple
a tuple consisting (wave_start, wave_stop)
dwave: float
binning width
p: tuple of 2 ps
smoothing parameter between 0 and 1:
0 -> LS-straight line
1 -> cubic spline interpolant
q: float in range of [0, 100]
percentile, between 0 and 1
ivar: ndarray (n_pix, ) | None
ivar array, default is None
eps: float
the ivar threshold
rsv_frac: float
the fraction of pixels reserved in terms of std. default is 3.
Returns
-------
flux_norm: ndarray
normalized flux
flux_cont: ndarray
continuum flux
Example
-------
>>> flux_norm, flux_cont = normalize_spectrum(
>>> wave, flux, (4000., 8000.), 100., p=(1E-8, 1E-7), q=0.5,
>>> rsv_frac=2.0)
"""
if ivar is not None:
# ivar is set
ivar = np.where(np.logical_or(wave < norm_range[0],
wave > norm_range[1]), 0, ivar)
ivar = np.where(ivar <= eps, eps, ivar)
# mask = ivar <= eps
var = 1. / ivar
else:
# default config is even weight
var = np.ones_like(flux)
# wave = wave[~mask]
# flux = flux[~mask]
# check q region
assert 0. < q < 1.
# n_iter = len(p)
n_bin = np.int(np.fix(np.diff(norm_range) / dwave) + 1)
wave1 = norm_range[0]
# SMOOTH 1
# print(wave.shape, flux.shape, var.shape)
if ivar is not None:<|fim▁hole|> ind_good_init = 1. * (flux > 0.)
ind_good_init = ind_good_init.astype(np.bool)
# print("@Cham: sum(ind_good_init)", np.sum(ind_good_init))
flux_smoothed1 = SmoothSpline(wave[ind_good_init], flux[ind_good_init],
p=p[0], var=var[ind_good_init])(wave)
dflux = flux - flux_smoothed1
# collecting continuum pixels --> ITERATION 1
ind_good = np.zeros(wave.shape, dtype=np.bool)
for i_bin in range(n_bin):
ind_bin = np.logical_and(wave > wave1 + (i_bin - 0.5) * dwave,
wave <= wave1 + (i_bin + 0.5) * dwave)
if np.sum(ind_bin > 0):
# median & sigma
bin_median = np.median(dflux[ind_bin])
bin_std = np.median(np.abs(dflux - bin_median))
# within 1 sigma with q-percentile
ind_good_ = ind_bin * (
np.abs(dflux - np.nanpercentile(dflux[ind_bin], q * 100.)) < (
rsv_frac * bin_std))
ind_good = np.logical_or(ind_good, ind_good_)
ind_good = np.logical_and(ind_good, ind_good_init)
# assert there is continuum pixels
try:
assert np.sum(ind_good) > 0
except AssertionError:
Warning("@Keenan.normalize_spectrum(): unable to find continuum! ")
ind_good = np.ones(wave.shape, dtype=np.bool)
# SMOOTH 2
# continuum flux
flux_smoothed2 = SmoothSpline(
wave[ind_good], flux[ind_good], p=p[1], var=var[ind_good])(wave)
# normalized flux
flux_norm = flux / flux_smoothed2
return flux_norm, flux_smoothed2
def normalize_spectra_block(wave, flux_block, norm_range, dwave,
p=(1E-6, 1E-6), q=0.5, ivar_block=None, eps=1e-10,
rsv_frac=3., n_jobs=1, verbose=10):
""" normalize multiple spectra using the same configuration
This is specially designed for TheKeenan
Parameters
----------
wave: ndarray (n_pix, )
wavelegnth array
flux_block: ndarray (n_obs, n_pix)
flux array
norm_range: tuple
a tuple consisting (wave_start, wave_stop)
dwave: float
binning width
p: tuple of 2 ps
smoothing parameter between 0 and 1:
0 -> LS-straight line
1 -> cubic spline interpolant
q: float in range of [0, 100]
percentile, between 0 and 1
ivar_block: ndarray (n_pix, ) | None
ivar array, default is None
eps: float
the ivar threshold
rsv_frac: float
the fraction of pixels reserved in terms of std. default is 3.
n_jobs: int
number of processes launched by joblib
verbose: int / bool
verbose level
Returns
-------
flux_norm: ndarray
normalized flux
"""
if ivar_block is None:
ivar_block = np.ones_like(flux_block)
if flux_block.ndim == 1:
flux_block.reshape(1, -1)
n_spec = flux_block.shape[0]
results = Parallel(n_jobs=n_jobs, verbose=verbose)(
delayed(normalize_spectrum)(
wave, flux_block[i], norm_range, dwave, p=p, q=q,
ivar=ivar_block[i], eps=eps, rsv_frac=rsv_frac)
for i in range(n_spec))
# unpack results
flux_norm_block = []
flux_cont_block = []
for result in results:
flux_norm_block.append(result[0])
flux_cont_block.append(result[1])
return np.array(flux_norm_block), np.array(flux_cont_block)
def get_stable_pixels(pixel_disp, wave_arm=100, frac=0.20):
"""
Parameters
----------
pixel_disp: np.ndarray
dispersion array
wave_arm: int
the arm length in terms of pixels
frac: float
the reserved fraction, between 0.00 and 1.00
Returns
-------
ind_stable
"""
ind_stable = np.zeros_like(pixel_disp, dtype=np.bool)
for i in range(len(ind_stable)):
edge_l = np.max([i - wave_arm, 0])
edge_r = np.min([i + wave_arm, len(pixel_disp)])
if pixel_disp[i] <= \
np.percentile(pixel_disp[edge_l:edge_r], frac * 100.):
ind_stable[i] = True
return ind_stable
# TODO: this is a generalized version
def normalize_spectra(wave_flux_tuple_list, norm_range, dwave,
p=(1E-6, 1E-6), q=50, n_jobs=1, verbose=False):
""" normalize multiple spectra using the same configuration
Parameters
----------
wave_flux_tuple_list: list[n_obs]
a list of (wave, flux) tuple
norm_range: tuple
a tuple consisting (wave_start, wave_stop)
dwave: float
binning width
p: tuple of 2 ps
smoothing parameter between 0 and 1:
0 -> LS-straight line
1 -> cubic spline interpolant
q: float in range of [0, 100]
percentile, between 0 and 1
n_jobs: int
number of processes launched by joblib
verbose: int / bool
verbose level
Returns
-------
flux_norm: ndarray
normalized flux
"""
pass
def test_normaliza_spectra_block():
import os
os.chdir('/pool/projects/TheKeenan/data/TheCannonData')
from TheCannon import apogee
import matplotlib.pyplot as plt
tr_ID, wl, tr_flux, tr_ivar = apogee.load_spectra("example_DR10/Data")
tr_label = apogee.load_labels("example_DR10/reference_labels.csv")
test_ID = tr_ID
test_flux = tr_flux
test_ivar = tr_ivar
r = normalize_spectra_block(wl, tr_flux, (15200., 16900.), 30., q=0.9,
rsv_frac=0.5,
p=(1E-10, 1E-10), ivar_block=tr_ivar,
n_jobs=10, verbose=10)
flux_norm, flux_cont = r
flux_norm = np.array(flux_norm)
flux_cont = np.array(flux_cont)
flux_ivar = tr_ivar * flux_cont ** 2
fig = plt.figure()
ax = fig.add_subplot(111)
for i in range(10, 20):
ofst = i * 0.5
ax.plot(wl, tr_flux[i] + ofst, 'b')
ax.plot(wl, flux_cont[i] + ofst, 'r')
fig.tight_layout()
fig.savefig(
'/pool/projects/TheKeenan/data/TheCannonData/test_norm_spec_1.pdf')
if __name__ == '__main__':
test_normaliza_spectra_block()<|fim▁end|> | ind_good_init = 1. * (ivar > 0.) * (flux > 0.)
else: |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | from .binary_search_tree import BinarySearchTree |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.