prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import os
from setuptools import setup, find_packages
<|fim▁hole|> os.path.dirname(__file__), filename)).read()
entry_points = """
[zc.buildout]
default = svetlyak40wt.recipe.symlinks:Symlinks
[zc.buildout.uninstall]
default = svetlyak40wt.recipe.symlinks:uninstall_symlinks
"""
setup(
version = '0.2.1',
name = 'svetlyak40wt.recipe.symlinks',
description = 'Simple recipe to collect symbolic links in one directory.',
long_description = read('README.md'),
classifiers = [
'License :: OSI Approved :: BSD License',
'Framework :: Buildout',
'Programming Language :: Python',
],
keywords = 'buildout recipe',
author = 'Alexander Artemenko',
author_email = '[email protected]',
url = 'http://github.com/svetlyak40wt/svetlyak40wt.recipe.symlinks',
license = 'New BSD License',
packages = find_packages(),
namespace_packages = ['svetlyak40wt', 'svetlyak40wt.recipe'],
include_package_data = True,
install_requires = [
'zc.buildout',
'zc.recipe.egg',
'setuptools',
],
zip_safe = False,
entry_points = entry_points,
)<|fim▁end|> | def read(filename):
return open(os.path.join( |
<|file_name|>create.js<|end_file_name|><|fim▁begin|>#!/usr/bin/env node
// requires
var webcredits = require('../lib/webcredits.js');
var program = require('commander');
/**
* version as a command
*/
function bin(argv) {
// setup config
var config = require('../config/dbconfig.js');
program
.option('-c, --currency <currency>', 'Currency')
.option('-d, --database <database>', 'Database')
.option('-w, --wallet <wallet>', 'Wallet')
.parse(argv);
var defaultCurrency = 'https://w3id.org/cc#bit';
var defaultDatabase = 'webcredits';<|fim▁hole|> config.wallet = program.wallet || config.wallet || defaultWallet;
webcredits.createDB(config, function(err, ret) {
if (err) {
console.error(err);
} else {
console.log(ret);
}
});
}
// If one import this file, this is a module, otherwise a library
if (require.main === module) {
bin(process.argv);
}
module.exports = bin;<|fim▁end|> | var defaultWallet = 'https://localhost/wallet/test#this';
config.currency = program.currency || config.currency || defaultCurrency;
config.database = program.database || config.database || defaultDatabase; |
<|file_name|>game.js<|end_file_name|><|fim▁begin|>import React from 'react'
import { useSelector } from 'react-redux'
import Container from 'react-bootstrap/Container'
import Row from 'react-bootstrap/Row'
import Col from 'react-bootstrap/Col'
import MoveSelector from '../containers/move-selector'
import Footer from '../containers/footer'
import Player from '../containers/player'
import WelcomeDlg from '../containers/welcome-dlg'
import History from '../features/history'
import { GlobalState } from '../reducers/consts'<|fim▁hole|>const Game = () => {
const showWelcome = useSelector(state => state.globalState === GlobalState.New)
return (
<>
<WelcomeDlg show={showWelcome} />
<Container>
<Row>
<Col>
<Player color='white' />
</Col>
<Col>
<Player color='black' right />
</Col>
</Row>
<Row>
<Col className='px-0'>
<MoveSelector />
</Col>
<Col sm='3' className='pr-0 pl-1'>
<History />
</Col>
</Row>
<Row>
<Col className='px-0'>
<Footer />
</Col>
</Row>
</Container>
</>
)
}
export default Game<|fim▁end|> | |
<|file_name|>validator.schema.test.ts<|end_file_name|><|fim▁begin|>import * as fs from 'fs';
import * as mocha from 'mocha'
import {expect} from 'chai'
import {Validator} from '../src/validator/validator'
describe('Validate Schemas', () => {
var validator: Validator;
var _self = this;
before((done) => {
_self.validator = new Validator();
_self.validator.Initialize()
.then(() => done())<|fim▁hole|> .catch((err: any) => done(err));
})
it('BasicValidation', (done) => {
var fileContent = fs.readFileSync('./test/baseFiles/validParam.json', 'utf8').toString();
let paramSchema: any = JSON.parse(fileContent);
_self.validator.validateSchema('./test/baseFiles/validTemplate.json', paramSchema.parameters)
.then((result: Error[]) => {
if(result) expect(result).to.be.empty;
done();
})
.catch((err: any) => done(err));
});
it('Invalid Json', () => {
var errors = _self.validator.validateJson('./test/baseFiles/invalidJson.json');
expect(errors).to.be.not.undefined;
});
});<|fim▁end|> | |
<|file_name|>InputAdornment.js<|end_file_name|><|fim▁begin|>import _extends from "@babel/runtime/helpers/esm/extends";
import _objectWithoutProperties from "@babel/runtime/helpers/esm/objectWithoutProperties";
import * as React from 'react';
import PropTypes from 'prop-types';
import clsx from 'clsx';
import Typography from '../Typography';
import withStyles from '../styles/withStyles';
import FormControlContext, { useFormControl } from '../FormControl/FormControlContext';
export var styles = {
/* Styles applied to the root element. */
root: {
display: 'flex',
height: '0.01em',
// Fix IE 11 flexbox alignment. To remove at some point.
maxHeight: '2em',
alignItems: 'center',
whiteSpace: 'nowrap'
},
/* Styles applied to the root element if `variant="filled"`. */
filled: {
'&$positionStart:not($hiddenLabel)': {
marginTop: 16
}
},
/* Styles applied to the root element if `position="start"`. */
positionStart: {
marginRight: 8
},
/* Styles applied to the root element if `position="end"`. */
positionEnd: {
marginLeft: 8
},
/* Styles applied to the root element if `disablePointerEvents=true`. */
disablePointerEvents: {
pointerEvents: 'none'
},
/* Styles applied if the adornment is used inside <FormControl hiddenLabel />. */
hiddenLabel: {},
/* Styles applied if the adornment is used inside <FormControl margin="dense" />. */
marginDense: {}
};
var InputAdornment = /*#__PURE__*/React.forwardRef(function InputAdornment(props, ref) {
var children = props.children,
classes = props.classes,
className = props.className,
_props$component = props.component,
Component = _props$component === void 0 ? 'div' : _props$component,
_props$disablePointer = props.disablePointerEvents,<|fim▁hole|> disableTypography = _props$disableTypogra === void 0 ? false : _props$disableTypogra,
position = props.position,
variantProp = props.variant,
other = _objectWithoutProperties(props, ["children", "classes", "className", "component", "disablePointerEvents", "disableTypography", "position", "variant"]);
var muiFormControl = useFormControl() || {};
var variant = variantProp;
if (variantProp && muiFormControl.variant) {
if (process.env.NODE_ENV !== 'production') {
if (variantProp === muiFormControl.variant) {
console.error('Material-UI: The `InputAdornment` variant infers the variant prop ' + 'you do not have to provide one.');
}
}
}
if (muiFormControl && !variant) {
variant = muiFormControl.variant;
}
return /*#__PURE__*/React.createElement(FormControlContext.Provider, {
value: null
}, /*#__PURE__*/React.createElement(Component, _extends({
className: clsx(classes.root, className, disablePointerEvents && classes.disablePointerEvents, muiFormControl.hiddenLabel && classes.hiddenLabel, variant === 'filled' && classes.filled, {
'start': classes.positionStart,
'end': classes.positionEnd
}[position], muiFormControl.margin === 'dense' && classes.marginDense),
ref: ref
}, other), typeof children === 'string' && !disableTypography ? /*#__PURE__*/React.createElement(Typography, {
color: "textSecondary"
}, children) : children));
});
process.env.NODE_ENV !== "production" ? InputAdornment.propTypes = {
// ----------------------------- Warning --------------------------------
// | These PropTypes are generated from the TypeScript type definitions |
// | To update them edit the d.ts file and run "yarn proptypes" |
// ----------------------------------------------------------------------
/**
* The content of the component, normally an `IconButton` or string.
*/
children: PropTypes.node,
/**
* Override or extend the styles applied to the component.
*/
classes: PropTypes.object,
/**
* @ignore
*/
className: PropTypes.string,
/**
* The component used for the root node.
* Either a string to use a HTML element or a component.
*/
component: PropTypes.elementType,
/**
* Disable pointer events on the root.
* This allows for the content of the adornment to focus the input on click.
* @default false
*/
disablePointerEvents: PropTypes.bool,
/**
* If children is a string then disable wrapping in a Typography component.
* @default false
*/
disableTypography: PropTypes.bool,
/**
* The position this adornment should appear relative to the `Input`.
*/
position: PropTypes.oneOf(['end', 'start']),
/**
* The variant to use.
* Note: If you are using the `TextField` component or the `FormControl` component
* you do not have to set this manually.
*/
variant: PropTypes.oneOf(['filled', 'outlined', 'standard'])
} : void 0;
export default withStyles(styles, {
name: 'MuiInputAdornment'
})(InputAdornment);<|fim▁end|> | disablePointerEvents = _props$disablePointer === void 0 ? false : _props$disablePointer,
_props$disableTypogra = props.disableTypography, |
<|file_name|>metering_agent.py<|end_file_name|><|fim▁begin|># Copyright (C) 2013 eNovance SAS <[email protected]>
#
# Author: Sylvain Afchain <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
import eventlet
from oslo.config import cfg
from neutron.agent.common import config
from neutron.agent import rpc as agent_rpc
from neutron.common import constants as constants
from neutron.common import topics
from neutron.common import utils
from neutron import context
from neutron import manager
from neutron.openstack.common import importutils
from neutron.openstack.common import log as logging
from neutron.openstack.common import loopingcall
from neutron.openstack.common.notifier import api as notifier_api
from neutron.openstack.common import periodic_task
from neutron.openstack.common.rpc import proxy
from neutron.openstack.common import service
from neutron import service as neutron_service
LOG = logging.getLogger(__name__)
class MeteringPluginRpc(proxy.RpcProxy):
BASE_RPC_API_VERSION = '1.0'
def __init__(self, host):
super(MeteringPluginRpc,
self).__init__(topic=topics.METERING_AGENT,
default_version=self.BASE_RPC_API_VERSION)
def _get_sync_data_metering(self, context):
try:
return self.call(context,
self.make_msg('get_sync_data_metering',
host=self.host),
topic=topics.METERING_PLUGIN)
except Exception:
LOG.exception(_("Failed synchronizing routers"))
class MeteringAgent(MeteringPluginRpc, manager.Manager):
Opts = [
cfg.StrOpt('driver',
default='neutron.services.metering.drivers.noop.'
'noop_driver.NoopMeteringDriver',
help=_("Metering driver")),
cfg.IntOpt('measure_interval', default=30,
help=_("Interval between two metering measures")),
cfg.IntOpt('report_interval', default=300,
help=_("Interval between two metering reports")),
]
def __init__(self, host, conf=None):
self.conf = conf or cfg.CONF
self._load_drivers()
self.root_helper = config.get_root_helper(self.conf)
self.context = context.get_admin_context_without_session()
self.metering_info = {}
self.metering_loop = loopingcall.FixedIntervalLoopingCall(
self._metering_loop
)
measure_interval = self.conf.measure_interval
self.last_report = 0
self.metering_loop.start(interval=measure_interval)
self.host = host
self.label_tenant_id = {}
self.routers = {}
self.metering_infos = {}
super(MeteringAgent, self).__init__(host=self.conf.host)
def _load_drivers(self):
"""Loads plugin-driver from configuration."""
LOG.info(_("Loading Metering driver %s"), self.conf.driver)
if not self.conf.driver:
raise SystemExit(_('A metering driver must be specified'))
self.metering_driver = importutils.import_object(
self.conf.driver, self, self.conf)
def _metering_notification(self):
for label_id, info in self.metering_infos.items():
data = {'label_id': label_id,
'tenant_id': self.label_tenant_id.get(label_id),
'pkts': info['pkts'],
'bytes': info['bytes'],
'time': info['time'],
'first_update': info['first_update'],
'last_update': info['last_update'],
'host': self.host}
LOG.debug(_("Send metering report: %s"), data)
notifier_api.notify(self.context,
notifier_api.publisher_id('metering'),
'l3.meter',
notifier_api.CONF.default_notification_level,
data)
info['pkts'] = 0
info['bytes'] = 0
info['time'] = 0
def _purge_metering_info(self):
ts = int(time.time())
report_interval = self.conf.report_interval
for label_id, info in self.metering_info.items():
if info['last_update'] > ts + report_interval:
del self.metering_info[label_id]
def _add_metering_info(self, label_id, pkts, bytes):
ts = int(time.time())
info = self.metering_infos.get(label_id, {'bytes': 0,
'pkts': 0,
'time': 0,
'first_update': ts,
'last_update': ts})
info['bytes'] += bytes
info['pkts'] += pkts
info['time'] += ts - info['last_update']
info['last_update'] = ts
self.metering_infos[label_id] = info
return info
def _add_metering_infos(self):
self.label_tenant_id = {}
for router in self.routers.values():
tenant_id = router['tenant_id']
labels = router.get(constants.METERING_LABEL_KEY, [])
for label in labels:
label_id = label['id']
self.label_tenant_id[label_id] = tenant_id
tenant_id = self.label_tenant_id.get
accs = self._get_traffic_counters(self.context, self.routers.values())
if not accs:
return
for label_id, acc in accs.items():
self._add_metering_info(label_id, acc['pkts'], acc['bytes'])
def _metering_loop(self):
self._add_metering_infos()<|fim▁hole|> report_interval = self.conf.report_interval
if delta > report_interval:
self._metering_notification()
self._purge_metering_info()
self.last_report = ts
@utils.synchronized('metering-agent')
def _invoke_driver(self, context, meterings, func_name):
try:
return getattr(self.metering_driver, func_name)(context, meterings)
except RuntimeError:
LOG.exception(_("Driver %(driver)s does not implement %(func)s"),
{'driver': cfg.CONF.metering_driver,
'func': func_name})
@periodic_task.periodic_task(run_immediately=True)
def _sync_routers_task(self, context):
routers = self._get_sync_data_metering(self.context)
if not routers:
return
self._update_routers(context, routers)
def router_deleted(self, context, router_id):
self._add_metering_infos()
if router_id in self.routers:
del self.routers[router_id]
return self._invoke_driver(context, router_id,
'remove_router')
def routers_updated(self, context, routers=None):
if not routers:
routers = self._get_sync_data_metering(self.context)
if not routers:
return
self._update_routers(context, routers)
def _update_routers(self, context, routers):
for router in routers:
self.routers[router['id']] = router
return self._invoke_driver(context, routers,
'update_routers')
def _get_traffic_counters(self, context, routers):
LOG.debug(_("Get router traffic counters"))
return self._invoke_driver(context, routers, 'get_traffic_counters')
def update_metering_label_rules(self, context, routers):
LOG.debug(_("Update metering rules from agent"))
return self._invoke_driver(context, routers,
'update_metering_label_rules')
def add_metering_label(self, context, routers):
LOG.debug(_("Creating a metering label from agent"))
return self._invoke_driver(context, routers,
'add_metering_label')
def remove_metering_label(self, context, routers):
self._add_metering_infos()
LOG.debug(_("Delete a metering label from agent"))
return self._invoke_driver(context, routers,
'remove_metering_label')
class MeteringAgentWithStateReport(MeteringAgent):
def __init__(self, host, conf=None):
super(MeteringAgentWithStateReport, self).__init__(host=host,
conf=conf)
self.state_rpc = agent_rpc.PluginReportStateAPI(topics.PLUGIN)
self.agent_state = {
'binary': 'neutron-metering-agent',
'host': host,
'topic': topics.METERING_AGENT,
'configurations': {
'metering_driver': self.conf.driver,
'measure_interval':
self.conf.measure_interval,
'report_interval': self.conf.report_interval
},
'start_flag': True,
'agent_type': constants.AGENT_TYPE_METERING}
report_interval = cfg.CONF.AGENT.report_interval
self.use_call = True
if report_interval:
self.heartbeat = loopingcall.FixedIntervalLoopingCall(
self._report_state)
self.heartbeat.start(interval=report_interval)
def _report_state(self):
try:
self.state_rpc.report_state(self.context, self.agent_state,
self.use_call)
self.agent_state.pop('start_flag', None)
self.use_call = False
except AttributeError:
# This means the server does not support report_state
LOG.warn(_("Neutron server does not support state report."
" State report for this agent will be disabled."))
self.heartbeat.stop()
return
except Exception:
LOG.exception(_("Failed reporting state!"))
def agent_updated(self, context, payload):
LOG.info(_("agent_updated by server side %s!"), payload)
def main():
eventlet.monkey_patch()
conf = cfg.CONF
conf.register_opts(MeteringAgent.Opts)
config.register_agent_state_opts_helper(conf)
config.register_root_helper(conf)
conf(project='neutron')
config.setup_logging(conf)
server = neutron_service.Service.create(
binary='neutron-metering-agent',
topic=topics.METERING_AGENT,
report_interval=cfg.CONF.AGENT.report_interval,
manager='neutron.services.metering.agents.'
'metering_agent.MeteringAgentWithStateReport')
service.launch(server).wait()<|fim▁end|> |
ts = int(time.time())
delta = ts - self.last_report
|
<|file_name|>beam_job_domain_test.py<|end_file_name|><|fim▁begin|># coding: utf-8
#
# Copyright 2021 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for core.domain.beam_job_domain."""
from __future__ import annotations
import datetime
from core import utils
from core.domain import beam_job_domain
from core.jobs.batch_jobs import model_validation_jobs
from core.platform import models
from core.tests import test_utils
MYPY = False
if MYPY: # pragma: no cover
from mypy_imports import beam_job_models
(beam_job_models,) = models.Registry.import_models([models.NAMES.beam_job])
class BeamJobTests(test_utils.TestBase):
NOW = datetime.datetime.utcnow()
def test_usage(self) -> None:
job = beam_job_domain.BeamJob(
model_validation_jobs.AuditAllStorageModelsJob)
self.assertEqual(job.name, 'AuditAllStorageModelsJob')
def test_in_terminal_state(self) -> None:
cancelled_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.CANCELLED.value,
self.NOW, self.NOW, True)
drained_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.DRAINED.value,
self.NOW, self.NOW, True)
updated_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.UPDATED.value,
self.NOW, self.NOW, True)
done_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.DONE.value,
self.NOW, self.NOW, True)
failed_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.FAILED.value,
self.NOW, self.NOW, True)
cancelling_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.CANCELLING.value,
self.NOW, self.NOW, True)
draining_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.DRAINING.value,
self.NOW, self.NOW, True)
pending_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.PENDING.value,
self.NOW, self.NOW, True)
running_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.RUNNING.value,
self.NOW, self.NOW, True)
stopped_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.STOPPED.value,
self.NOW, self.NOW, True)
unknown_beam_job_run = beam_job_domain.BeamJobRun(
'123', 'FooJob', beam_job_models.BeamJobState.UNKNOWN.value,
self.NOW, self.NOW, True)
self.assertTrue(cancelled_beam_job_run.in_terminal_state)<|fim▁hole|> self.assertTrue(drained_beam_job_run.in_terminal_state)
self.assertTrue(updated_beam_job_run.in_terminal_state)
self.assertTrue(done_beam_job_run.in_terminal_state)
self.assertTrue(failed_beam_job_run.in_terminal_state)
self.assertFalse(cancelling_beam_job_run.in_terminal_state)
self.assertFalse(draining_beam_job_run.in_terminal_state)
self.assertFalse(pending_beam_job_run.in_terminal_state)
self.assertFalse(running_beam_job_run.in_terminal_state)
self.assertFalse(stopped_beam_job_run.in_terminal_state)
self.assertFalse(unknown_beam_job_run.in_terminal_state)
def test_to_dict(self) -> None:
job = beam_job_domain.BeamJob(
model_validation_jobs.AuditAllStorageModelsJob)
self.assertEqual(job.to_dict(), {'name': 'AuditAllStorageModelsJob'})
class BeamJobRunTests(test_utils.TestBase):
NOW = datetime.datetime.utcnow()
def test_usage(self) -> None:
run = beam_job_domain.BeamJobRun(
'123', 'FooJob', 'RUNNING', self.NOW, self.NOW, True)
self.assertEqual(run.job_id, '123')
self.assertEqual(run.job_name, 'FooJob')
self.assertEqual(run.job_state, 'RUNNING')
self.assertEqual(run.job_started_on, self.NOW)
self.assertEqual(run.job_updated_on, self.NOW)
self.assertTrue(run.job_is_synchronous)
def test_to_dict(self) -> None:
run = beam_job_domain.BeamJobRun(
'123', 'FooJob', 'RUNNING', self.NOW, self.NOW, True)
self.assertEqual(run.to_dict(), {
'job_id': '123',
'job_name': 'FooJob',
'job_state': 'RUNNING',
'job_started_on_msecs': utils.get_time_in_millisecs(self.NOW),
'job_updated_on_msecs': utils.get_time_in_millisecs(self.NOW),
'job_is_synchronous': True,
})
class AggregateBeamJobRunResultTests(test_utils.TestBase):
def test_usage(self) -> None:
result = beam_job_domain.AggregateBeamJobRunResult('abc', '123')
self.assertEqual(result.stdout, 'abc')
self.assertEqual(result.stderr, '123')
def test_to_dict(self) -> None:
result = beam_job_domain.AggregateBeamJobRunResult('abc', '123')
self.assertEqual(result.to_dict(), {
'stdout': 'abc',
'stderr': '123',
})<|fim▁end|> | |
<|file_name|>Agent.cpp<|end_file_name|><|fim▁begin|>#include "Agent.h"
#include <algorithm>
Agent::Agent()
{
}
Agent::Agent(float _speed, float _health, const glm::vec2 & _startPos,
const GameEngine::GLTexture & _texture, GameEngine::ColorRGBA8 & _color, std::weak_ptr<World> _world) :
m_movementSpeed(_speed), m_health(_health), m_worldPos(_startPos), m_texture(_texture), m_color(_color), m_world(_world)
{
}
Agent::~Agent() {}
void Agent::Draw(GameEngine::SpriteBatch & _batch)
{
const glm::vec4 uvRect(0.0f, 0.0f, 1.0f, 1.0f);
glm::vec4 destRect;
destRect.x = m_worldPos.x; //bottom left world pos
destRect.y = m_worldPos.y; //bottom left world pos
destRect.z = AGENT_DIAMETER; //width
destRect.w = AGENT_DIAMETER; //height
_batch.Draw(destRect, uvRect, m_texture.id, 0.0f, m_color, m_direction);
}
bool Agent::CollideWithLevel()
{
std::vector<glm::vec2> collideTilePositions;
// Check the four corners
// First corner (bottom left)
CheckTilePosition(collideTilePositions, m_worldPos.x, m_worldPos.y);
// Second Corner (bottom right)
CheckTilePosition(collideTilePositions, m_worldPos.x + AGENT_DIAMETER, m_worldPos.y);
// Third Corner (top left)
CheckTilePosition(collideTilePositions, m_worldPos.x, m_worldPos.y + AGENT_DIAMETER);
// Fourth Corner (top right)
CheckTilePosition(collideTilePositions, m_worldPos.x + AGENT_DIAMETER, m_worldPos.y + AGENT_DIAMETER);
// Check if there was no collision
if (collideTilePositions.empty())
{
return false;
}
//Store the world center position of the player to use it for sorting
glm::vec2 localWorld = m_worldPos + glm::vec2(AGENT_RADIUS);
/*sort the tiles to collide based on distance from the center of the player,
so that you collide with the nearest walls first and avoid the getting stuck on walls bug */
std::sort(collideTilePositions.begin(), collideTilePositions.end(), [&localWorld](const glm::vec2& _p1, const glm::vec2& _p2)
{
auto distance1 = glm::distance(localWorld, _p1);
auto distance2 = glm::distance(localWorld, _p2);
return distance1 < distance2;
});
// Do the collision starting from closes tile to collide with to furthest
for (auto& tilePos : collideTilePositions)
{
CollideWithTile(tilePos);
}
<|fim▁hole|>bool Agent::CollideWithAgent(Agent* _agent)
{
// direction vector between the two agents (center of this agent minus center of other agent)
glm::vec2 direction = GetCenterPos() - _agent->GetCenterPos();
// Length of the direction vector
float distance = glm::length(direction);
// Depth of the collision
float collisionDepth = AGENT_DIAMETER - distance;
// If collision depth > 0 then we did collide
if (collisionDepth > 0)
{
// Get the direction times the collision depth so we can push them away from each other
glm::vec2 collisionDepthVec = glm::normalize(direction) * collisionDepth;
// Push them in opposite directions
m_worldPos += collisionDepthVec / 2.0f;
_agent->m_worldPos -= collisionDepthVec / 2.0f;
return true;
}
return false;
}
void Agent::ApplyDamage(float _damage)
{
m_health -= _damage;
}
void Agent::CheckTilePosition(std::vector<glm::vec2>& _collideTilePositions, float _x, float _y)
{
//Get the node/tile at this agent's world pos
std::weak_ptr<Node> node = m_world.lock()->GetWorldGrid().lock()->GetNodeAt(glm::vec2(_x, _y));
//if this is not a walkable tile, then collide with it
if (!node.lock()->walkable)
{
_collideTilePositions.push_back(node.lock()->worldPos);
}
}
void Agent::CollideWithTile(const glm::vec2 & _tilePos)
{
constexpr float TILE_RADIUS = TILE_WIDTH / 2.0f;
// The minimum distance before a collision occurs
constexpr float MIN_DISTANCE = AGENT_RADIUS + TILE_RADIUS;
// Center position of the agent
glm::vec2 centerAgentPos = m_worldPos + glm::vec2(AGENT_RADIUS);
// direction vector from the agent to the tile
glm::vec2 direction = centerAgentPos - _tilePos;
// Get the depth of the collision
float xDepth = MIN_DISTANCE - abs(direction.x);
float yDepth = MIN_DISTANCE - abs(direction.y);
// If both the depths are > 0, then we collided
if (xDepth > 0 && yDepth > 0)
{
// Check which collision depth is less
if (std::max(xDepth, 0.0f) < std::max(yDepth, 0.0f))
{
// X collsion depth is smaller so we push in X direction
if (direction.x < 0)
{
m_worldPos.x -= xDepth;
}
else
{
m_worldPos.x += xDepth;
}
}
else
{
// Y collsion depth is smaller so we push in X direction
if (direction.y < 0)
{
m_worldPos.y -= yDepth;
}
else
{
m_worldPos.y += yDepth;
}
}
}
}<|fim▁end|> | return true;
}
|
<|file_name|>hu.py<|end_file_name|><|fim▁begin|># coding: utf8
{
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN',
'%Y-%m-%d': '%Y.%m.%d.',
'%Y-%m-%d %H:%M:%S': '%Y.%m.%d. %H:%M:%S',
'%s rows deleted': '%s sorok t\xc3\xb6rl\xc5\x91dtek',
'%s rows updated': '%s sorok friss\xc3\xadt\xc5\x91dtek',
'Available databases and tables': 'El\xc3\xa9rhet\xc5\x91 adatb\xc3\xa1zisok \xc3\xa9s t\xc3\xa1bl\xc3\xa1k',
'Cannot be empty': 'Nem lehet \xc3\xbcres',
'Check to delete': 'T\xc3\xb6rl\xc3\xa9shez v\xc3\xa1laszd ki',
'Client IP': 'Client IP',
'Controller': 'Controller',
'Copyright': 'Copyright',
'Current request': 'Jelenlegi lek\xc3\xa9rdez\xc3\xa9s',
'Current response': 'Jelenlegi v\xc3\xa1lasz',
'Current session': 'Jelenlegi folyamat',
'DB Model': 'DB Model',
'Database': 'Adatb\xc3\xa1zis',
'Delete:': 'T\xc3\xb6r\xc3\xb6l:',
'Description': 'Description',
'E-mail': 'E-mail',
'Edit': 'Szerkeszt',
'Edit This App': 'Alkalmaz\xc3\xa1st szerkeszt',
'Edit current record': 'Aktu\xc3\xa1lis bejegyz\xc3\xa9s szerkeszt\xc3\xa9se',
'First name': 'First name',<|fim▁hole|>'Import/Export': 'Import/Export',
'Index': 'Index',
'Internal State': 'Internal State',
'Invalid Query': 'Hib\xc3\xa1s lek\xc3\xa9rdez\xc3\xa9s',
'Invalid email': 'Invalid email',
'Last name': 'Last name',
'Layout': 'Szerkezet',
'Main Menu': 'F\xc5\x91men\xc3\xbc',
'Menu Model': 'Men\xc3\xbc model',
'Name': 'Name',
'New Record': '\xc3\x9aj bejegyz\xc3\xa9s',
'No databases in this application': 'Nincs adatb\xc3\xa1zis ebben az alkalmaz\xc3\xa1sban',
'Origin': 'Origin',
'Password': 'Password',
'Powered by': 'Powered by',
'Query:': 'Lek\xc3\xa9rdez\xc3\xa9s:',
'Record ID': 'Record ID',
'Registration key': 'Registration key',
'Reset Password key': 'Reset Password key',
'Role': 'Role',
'Rows in table': 'Sorok a t\xc3\xa1bl\xc3\xa1ban',
'Rows selected': 'Kiv\xc3\xa1lasztott sorok',
'Stylesheet': 'Stylesheet',
'Sure you want to delete this object?': 'Biztos t\xc3\xb6rli ezt az objektumot?',
'Table name': 'Table name',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.',
'Timestamp': 'Timestamp',
'Update:': 'Friss\xc3\xadt:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.',
'User ID': 'User ID',
'View': 'N\xc3\xa9zet',
'Welcome to web2py': 'Isten hozott a web2py-ban',
'appadmin is disabled because insecure channel': 'az appadmin a biztons\xc3\xa1gtalan csatorna miatt letiltva',
'cache': 'gyors\xc3\xadt\xc3\xb3t\xc3\xa1r',
'change password': 'jelsz\xc3\xb3 megv\xc3\xa1ltoztat\xc3\xa1sa',
'click here for online examples': 'online p\xc3\xa9ld\xc3\xa1k\xc3\xa9rt kattints ide',
'click here for the administrative interface': 'az adminisztr\xc3\xa1ci\xc3\xb3s fel\xc3\xbclet\xc3\xa9rt kattints ide',
'customize me!': 'v\xc3\xa1ltoztass meg!',
'data uploaded': 'adat felt\xc3\xb6ltve',
'database': 'adatb\xc3\xa1zis',
'database %s select': 'adatb\xc3\xa1zis %s kiv\xc3\xa1laszt\xc3\xa1s',
'db': 'db',
'design': 'design',
'done!': 'k\xc3\xa9sz!',
'edit profile': 'profil szerkeszt\xc3\xa9se',
'export as csv file': 'export\xc3\xa1l csv f\xc3\xa1jlba',
'insert new': '\xc3\xbaj beilleszt\xc3\xa9se',
'insert new %s': '\xc3\xbaj beilleszt\xc3\xa9se %s',
'invalid request': 'hib\xc3\xa1s k\xc3\xa9r\xc3\xa9s',
'login': 'bel\xc3\xa9p',
'logout': 'kil\xc3\xa9p',
'lost password': 'elveszett jelsz\xc3\xb3',
'new record inserted': '\xc3\xbaj bejegyz\xc3\xa9s felv\xc3\xa9ve',
'next 100 rows': 'k\xc3\xb6vetkez\xc5\x91 100 sor',
'or import from csv file': 'vagy bet\xc3\xb6lt\xc3\xa9s csv f\xc3\xa1jlb\xc3\xb3l',
'previous 100 rows': 'el\xc5\x91z\xc5\x91 100 sor',
'record': 'bejegyz\xc3\xa9s',
'record does not exist': 'bejegyz\xc3\xa9s nem l\xc3\xa9tezik',
'record id': 'bejegyz\xc3\xa9s id',
'register': 'regisztr\xc3\xa1ci\xc3\xb3',
'selected': 'kiv\xc3\xa1lasztott',
'state': '\xc3\xa1llapot',
'table': 't\xc3\xa1bla',
'unable to parse csv file': 'nem lehet a csv f\xc3\xa1jlt beolvasni',
}<|fim▁end|> | 'Group ID': 'Group ID',
'Hello World': 'Hello Vil\xc3\xa1g', |
<|file_name|>host-template.component.ts<|end_file_name|><|fim▁begin|>import { Component, Input } from '@angular/core';
import { Config } from '../../config';
@Component({
selector: 'host-template',
styles: [`
textarea {
width: 400px;
height: 100px;
}
`],
template: `
<div *ngIf="hostfile"><|fim▁hole|> <h3>Hosts file:</h3>
<input type="checkbox" [(ngModel)]="hostfile.isActive" /><span>Replace host files on all nodes with that template:</span><br />
<textarea [disabled]="!hostfile.isActive" [(ngModel)]="hostfile.template"></textarea>
</div>
`
})
export class HostTemplateComponent {
@Input()
hostfile: any;
}<|fim▁end|> | |
<|file_name|>transit.rs<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2015, 2016 Saurav Sachidanand
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
#![allow(non_snake_case)]
extern crate astro;
use astro::*;
#[test]
#[allow(unused_variables)]
fn time() {
let eq_point1 = coords::EqPoint{
asc: 40.68021_f64.to_radians(),
dec: 18.04761_f64.to_radians()
};
let eq_point2 = coords::EqPoint{
asc: 41.73129_f64.to_radians(),
dec: 18.44092_f64.to_radians()
};
let eq_point3 = coords::EqPoint{
asc: 42.78204_f64.to_radians(),
dec: 18.82742_f64.to_radians()
};
let geograph_point = coords::GeographPoint{
long: 71.0833_f64.to_radians(),
lat: 42.3333_f64.to_radians(),
};
let Theta0 = 177.74208_f64.to_radians();
let deltaT = time::delta_t(1988, 3);
let (h_rise, m_rise, s_rise) = transit::time(
&transit::TransitType::Rise,
&transit::TransitBody::StarOrPlanet,
&geograph_point,
&eq_point1,
&eq_point2,
&eq_point3,
Theta0,
deltaT,<|fim▁hole|> 0.0
);
assert_eq!((h_rise, m_rise), (12, 25));
let (h_transit, m_transit, s_transit) = transit::time(
&transit::TransitType::Transit,
&transit::TransitBody::StarOrPlanet,
&geograph_point,
&eq_point1,
&eq_point2,
&eq_point3,
Theta0,
deltaT,
0.0
);
assert_eq!((h_transit, m_transit), (19, 40));
let (h_set, m_set, s_set) = transit::time(
&transit::TransitType::Set,
&transit::TransitBody::StarOrPlanet,
&geograph_point,
&eq_point1,
&eq_point2,
&eq_point3,
Theta0,
deltaT,
0.0
);
assert_eq!((h_set, m_set), (2, 54));
}<|fim▁end|> | |
<|file_name|>lean.ts<|end_file_name|><|fim▁begin|>/*
* This file is part of CoCalc: Copyright © 2020 Sagemath, Inc.
* License: AGPLv3 s.t. "Commons Clause" – see LICENSE.md for details
*/
import * as CodeMirror from "codemirror";
/*
This is just a first tiny quick step. To finish this:
- Look at https://codemirror.net/demo/simplemode.html for how this works.
- Put everything from https://github.com/leanprover/vscode-lean/blob/master/syntaxes/lean.json in here.
playgroud to see the alternative of negative look ahead in action: https://regex101.com/r/lop9Se/1
*/
// This is redundant with the regexp's below, but we need this to do completions
// before the terms are ever used.
export const completions: string[] = "import|prelude|theory|definition|def|abbreviation|instance|renaming|hiding|exposing|parameter|parameters|begin|constant|constants|lemma|variable|variables|theorem|example|open|axiom|inductive|coinductive|with|structure|universe|universes|alias|precedence|reserve|postfix|prefix|infix|infixl|infixr|notation|end|using|namespace|section|local|set_option|extends|include|omit|class|classes|instances|raw|run_cmd|print|eval|reduce|check|help|exit|calc|have|this|match|do|suffices|show|by|in|at|let|forall|fun|exists|assume|from|Prop|Type|Sort".split(
"|"
);
completions.sort();
<|fim▁hole|> start: [
{ regex: /"(?:[^\\]|\\.)*?(?:"|$)/, token: "string" },
{ regex: /\/-/, token: "comment", next: "blockcomment" },
{
regex: /#(print|eval|reduce|check|help|exit)\b/,
token: "variable-3",
},
{ regex: /--.*/, token: "comment" },
{ regex: /[-+\/*=<>!]+/, token: "operator" },
{
regex: /((?!\.).{1}|^)\b(import|prelude|theory|definition|def|abbreviation|instance|renaming|hiding|exposing|parameter|parameters|begin|constant|constants|lemma|variable|variables|theorem|example|open|axiom|inductive|coinductive|with|structure|universe|universes|alias|precedence|reserve|postfix|prefix|infix|infixl|infixr|notation|end|using|namespace|section|local|set_option|extends|include|omit|class|classes|instances|raw|run_cmd)\b/,
token: "keyword",
},
{
regex: /((?!\.).{1}|^)\b(calc|have|this|match|do|suffices|show|by|in|at|let|forall|fun|exists|assume|from)\b/,
token: "variable-2",
},
{
regex: /\b(Prop|Type|Sort)\b/,
token: "atom",
},
{
regex: /0x[a-f\d]+|[-+]?(?:\.\d+|\d+\.?\d*)(?:e[-+]?\d+)?/i,
token: "number",
},
{ regex: /\/-.*?-\//, token: "comment" },
{ regex: /begin/, indent: true },
{ regex: /end/, dedent: true },
{ regex: /[a-z$][\w$]*/, token: "variable" },
{ regex: /b?"/, token: "string", next: "string" },
],
string: [
{ regex: /"/, token: "string", next: "start" },
{ regex: /(?:[^\\"]|\\(?:.|$))*/, token: "string" },
],
blockcomment: [
{ regex: /.*?-\//, token: "comment", next: "start" },
{ regex: /.*/, token: "comment" },
],
meta: {
dontIndentStates: ["comment"],
lineComment: "--",
},
});<|fim▁end|> | (CodeMirror as any).defineSimpleMode("lean", { |
<|file_name|>no-eq-branch-fail.rs<|end_file_name|><|fim▁begin|>#![warn(indirect_structural_match)]
struct NoEq;
enum Foo {
Bar,
Baz,
Qux(NoEq),
}
// Even though any of these values can be compared structurally, we still disallow it in a pattern
// because `Foo` does not impl `PartialEq`.<|fim▁hole|>const BAR_BAZ: Foo = if 42 == 42 {
Foo::Baz
} else {
Foo::Bar
};
fn main() {
match Foo::Qux(NoEq) {
BAR_BAZ => panic!(),
//~^ ERROR must be annotated with `#[derive(PartialEq, Eq)]`
_ => {}
}
}<|fim▁end|> | |
<|file_name|>task_02.py<|end_file_name|><|fim▁begin|><|fim▁hole|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Performing mathematical calculations """
WEEKS = (((19 % 10) + 100) + (2 ** 8)) / 7<|fim▁end|> | |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import fnmatch
import os
import re
import sys
from setuptools import find_packages, setup, Command
from setuptools.command.install import install as InstallCommandBase
from setuptools.dist import Distribution
# This version string is semver compatible, but incompatible with pip.
# For pip, we will remove all '-' characters from this string, and use the
# result for pip.
_VERSION = '1.8.0'
REQUIRED_PACKAGES = [
'absl-py >= 0.1.6',
'astor >= 0.6.0',
'gast >= 0.2.0',
'numpy >= 1.13.3',
'six >= 1.10.0',
'protobuf >= 3.4.0',
'tensorboard >= 1.8.0, < 1.9.0',
'termcolor >= 1.1.0',
]
if sys.byteorder == 'little':
# grpcio does not build correctly on big-endian machines due to lack of
# BoringSSL support.
# See https://github.com/tensorflow/tensorflow/issues/17882.
REQUIRED_PACKAGES.append('grpcio >= 1.8.6')
project_name = 'tensorflow'
if '--project_name' in sys.argv:
project_name_idx = sys.argv.index('--project_name')
project_name = sys.argv[project_name_idx + 1]
sys.argv.remove('--project_name')
sys.argv.pop(project_name_idx)
# python3 requires wheel 0.26
if sys.version_info.major == 3:
REQUIRED_PACKAGES.append('wheel >= 0.26')
else:
REQUIRED_PACKAGES.append('wheel')
# mock comes with unittest.mock for python3, need to install for python2
REQUIRED_PACKAGES.append('mock >= 2.0.0')
# tf-nightly should depend on tb-nightly
if 'tf_nightly' in project_name:
for i, pkg in enumerate(REQUIRED_PACKAGES):
if 'tensorboard' in pkg:
REQUIRED_PACKAGES[i] = 'tb-nightly >= 1.8.0a0, < 1.9.0a0'
break
# weakref.finalize and enum were introduced in Python 3.4
if sys.version_info < (3, 4):
REQUIRED_PACKAGES.append('backports.weakref >= 1.0rc1')
REQUIRED_PACKAGES.append('enum34 >= 1.1.6')
# pylint: disable=line-too-long
CONSOLE_SCRIPTS = [
'freeze_graph = tensorflow.python.tools.freeze_graph:run_main',
'toco_from_protos = tensorflow.contrib.lite.toco.python.toco_from_protos:main',
'toco = tensorflow.contrib.lite.toco.python.toco_wrapper:main',
'saved_model_cli = tensorflow.python.tools.saved_model_cli:main',
# We need to keep the TensorBoard command, even though the console script
# is now declared by the tensorboard pip package. If we remove the
# TensorBoard command, pip will inappropriately remove it during install,
# even though the command is not removed, just moved to a different wheel.
'tensorboard = tensorboard.main:run_main',<|fim▁hole|>]
# pylint: enable=line-too-long
# remove the tensorboard console script if building tf_nightly
if 'tf_nightly' in project_name:
CONSOLE_SCRIPTS.remove('tensorboard = tensorboard.main:run_main')
TEST_PACKAGES = [
'scipy >= 0.15.1',
]
class BinaryDistribution(Distribution):
def has_ext_modules(self):
return True
class InstallCommand(InstallCommandBase):
"""Override the dir where the headers go."""
def finalize_options(self):
ret = InstallCommandBase.finalize_options(self)
self.install_headers = os.path.join(self.install_purelib,
'tensorflow', 'include')
return ret
class InstallHeaders(Command):
"""Override how headers are copied.
The install_headers that comes with setuptools copies all files to
the same directory. But we need the files to be in a specific directory
hierarchy for -I <include_dir> to work correctly.
"""
description = 'install C/C++ header files'
user_options = [('install-dir=', 'd',
'directory to install header files to'),
('force', 'f',
'force installation (overwrite existing files)'),
]
boolean_options = ['force']
def initialize_options(self):
self.install_dir = None
self.force = 0
self.outfiles = []
def finalize_options(self):
self.set_undefined_options('install',
('install_headers', 'install_dir'),
('force', 'force'))
def mkdir_and_copy_file(self, header):
install_dir = os.path.join(self.install_dir, os.path.dirname(header))
# Get rid of some extra intervening directories so we can have fewer
# directories for -I
install_dir = re.sub('/google/protobuf_archive/src', '', install_dir)
# Copy eigen code into tensorflow/include.
# A symlink would do, but the wheel file that gets created ignores
# symlink within the directory hierarchy.
# NOTE(keveman): Figure out how to customize bdist_wheel package so
# we can do the symlink.
if 'external/eigen_archive/' in install_dir:
extra_dir = install_dir.replace('external/eigen_archive', '')
if not os.path.exists(extra_dir):
self.mkpath(extra_dir)
self.copy_file(header, extra_dir)
if not os.path.exists(install_dir):
self.mkpath(install_dir)
return self.copy_file(header, install_dir)
def run(self):
hdrs = self.distribution.headers
if not hdrs:
return
self.mkpath(self.install_dir)
for header in hdrs:
(out, _) = self.mkdir_and_copy_file(header)
self.outfiles.append(out)
def get_inputs(self):
return self.distribution.headers or []
def get_outputs(self):
return self.outfiles
def find_files(pattern, root):
"""Return all the files matching pattern below root dir."""
for path, _, files in os.walk(root):
for filename in fnmatch.filter(files, pattern):
yield os.path.join(path, filename)
matches = ['../' + x for x in find_files('*', 'external') if '.py' not in x]
so_lib_paths = [
i for i in os.listdir('.')
if os.path.isdir(i) and fnmatch.fnmatch(i, '_solib_*')
]
for path in so_lib_paths:
matches.extend(
['../' + x for x in find_files('*', path) if '.py' not in x]
)
if os.name == 'nt':
EXTENSION_NAME = 'python/_pywrap_tensorflow_internal.pyd'
else:
EXTENSION_NAME = 'python/_pywrap_tensorflow_internal.so'
headers = (list(find_files('*.h', 'tensorflow/core')) +
list(find_files('*.h', 'tensorflow/stream_executor')) +
list(find_files('*.h', 'google/protobuf_archive/src')) +
list(find_files('*', 'third_party/eigen3')) +
list(find_files('*', 'external/eigen_archive')))
setup(
name=project_name,
version=_VERSION.replace('-', ''),
description='TensorFlow helps the tensors flow',
long_description='',
url='https://www.tensorflow.org/',
author='Google Inc.',
author_email='[email protected]',
# Contained modules and scripts.
packages=find_packages(),
entry_points={
'console_scripts': CONSOLE_SCRIPTS,
},
headers=headers,
install_requires=REQUIRED_PACKAGES,
tests_require=REQUIRED_PACKAGES + TEST_PACKAGES,
# Add in any packaged data.
include_package_data=True,
package_data={
'tensorflow': [
EXTENSION_NAME,
] + matches,
},
zip_safe=False,
distclass=BinaryDistribution,
cmdclass={
'install_headers': InstallHeaders,
'install': InstallCommand,
},
# PyPI package information.
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
],
license='Apache 2.0',
keywords='tensorflow tensor machine learning',)<|fim▁end|> | |
<|file_name|>player.py<|end_file_name|><|fim▁begin|>from . import Event
class PlayerEvent(Event):
def setup(s):
s.username = s.username.encode('ascii')
#Raised in manager
class PlayerJoin(PlayerEvent):
username = Event.Arg(required=True)
ip = Event.Arg(required=True)
class PlayerQuit(PlayerEvent):
username = Event.Arg(required=True)
reason = Event.Arg(required=True)
class PlayerChat(PlayerEvent):
username = Event.Arg(required=True)
message = Event.Arg(required=True)
class PlayerDeath(PlayerEvent):
text = Event.Arg()
username = Event.Arg(required=True)
cause = Event.Arg(required=True)
killer = Event.Arg()
weapon = Event.Arg()
format = Event.Arg(default="{username} died")
def get_text(self, **kw):
d = dict(((k, getattr(self, k)) for k in ('username', 'killer', 'weapon')))<|fim▁hole|> d.update(kw)
return self.format.format(**d)
def setup(self):
self.text = self.get_text()<|fim▁end|> | |
<|file_name|>DeviceActivity.java<|end_file_name|><|fim▁begin|>package cn.edu.siso.rlxapf;
import android.content.Intent;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
public class DeviceActivity extends AppCompatActivity {
private Button devicePrefOk = null;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_device);<|fim▁hole|> @Override
public void onClick(View v) {
Intent intent = new Intent(DeviceActivity.this, MainActivity.class);
startActivity(intent);
DeviceActivity.this.finish();
}
});
getSupportFragmentManager().beginTransaction().replace(
R.id.device_pref, new DevicePrefFragment()).commit();
}
}<|fim▁end|> |
devicePrefOk = (Button) findViewById(R.id.device_pref_ok);
devicePrefOk.setOnClickListener(new View.OnClickListener() { |
<|file_name|>test_wait4.py<|end_file_name|><|fim▁begin|>"""This test checks for correct wait4() behavior.
"""
import os
import time
from test.fork_wait import ForkWait
from test.test_support import run_unittest, reap_children, get_attribute
# If either of these do not exist, skip this test.
get_attribute(os, 'fork')
get_attribute(os, 'wait4')
class Wait4Test(ForkWait):
def wait_impl(self, cpid):
for i in range(10):
# wait4() shouldn't hang, but some of the buildbots seem to hang
# in the forking tests. This is an attempt to fix the problem.
spid, status, rusage = os.wait4(cpid, os.WNOHANG)
if spid == cpid:
break
time.sleep(1.0)
self.assertEqual(spid, cpid)
self.assertEqual(status, 0, "cause = %d, exit = %d" % (status&0xff, status>>8))
self.assertTrue(rusage)
def test_main():
run_unittest(Wait4Test)
reap_children()
<|fim▁hole|><|fim▁end|> | if __name__ == "__main__":
test_main() |
<|file_name|>UserController.js<|end_file_name|><|fim▁begin|>(function(){
angular
.module('users')
.controller('UserController', [
'userService', '$mdSidenav', '$mdBottomSheet', '$log', '$q',
UserController
]);<|fim▁hole|> * Main Controller for the Angular Material Starter App
* @param $scope
* @param $mdSidenav
* @param avatarsService
* @constructor
*/
function UserController( userService, $mdSidenav, $mdBottomSheet, $log, $q) {
var self = this;
self.selected = null;
self.users = [ ];
self.selectUser = selectUser;
self.toggleList = toggleUsersList;
self.share = share;
// Load all registered users
userService
.loadAllUsers()
.then( function( users ) {
self.users = [].concat(users);
self.selected = users[0];
});
// *********************************
// Internal methods
// *********************************
/**
* First hide the bottomsheet IF visible, then
* hide or Show the 'left' sideNav area
*/
function toggleUsersList() {
var pending = $mdBottomSheet.hide() || $q.when(true);
pending.then(function(){
$mdSidenav('left').toggle();
});
}
/**
* Select the current avatars
* @param menuId
*/
function selectUser ( user ) {
self.selected = angular.isNumber(user) ? $scope.users[user] : user;
self.toggleList();
}
/**
* Show the bottom sheet
*/
function share($event) {
var user = self.selected;
$mdBottomSheet.show({
parent: angular.element(document.getElementById('content')),
templateUrl: '/src/users/view/contactSheet.html',
controller: [ '$mdBottomSheet', UserSheetController],
controllerAs: "vm",
bindToController : true,
targetEvent: $event
}).then(function(clickedItem) {
clickedItem && $log.debug( clickedItem.name + ' clicked!');
});
/**
* Bottom Sheet controller for the Avatar Actions
*/
function UserSheetController( $mdBottomSheet ) {
this.user = user;
this.items = [
{ name: 'Github' , icon: 'github' , icon_url: 'assets/svg/github.svg', urlPath: "https://github.com/hassanabidpk/"},
{ name: 'Twitter' , icon: 'twitter' , icon_url: 'assets/svg/twitter.svg', urlPath: "https://twitter.com/hassanabidpk"},
{ name: 'Google+' , icon: 'google_plus' , icon_url: 'assets/svg/google_plus.svg', urlPath: "https://plus.google.com/+HassanAbid/"},
{ name: 'Linkedin' , icon: 'linkedin' , icon_url: 'assets/svg/linkedin.svg', urlPath: "https://kr.linkedin.com/pub/hassan-abid/12/700/66b"}
];
this.performAction = function(action) {
window.location.href = action.urlPath;
$mdBottomSheet.hide(action);
};
}
}
}
})();<|fim▁end|> |
/** |
<|file_name|>CGRTTI.cpp<|end_file_name|><|fim▁begin|>//===--- CGCXXRTTI.cpp - Emit LLVM Code for C++ RTTI descriptors ----------===//
//
// The LLVM Compiler Infrastructure<|fim▁hole|>//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
//
// This contains code dealing with C++ code generation of RTTI descriptors.
//
//===----------------------------------------------------------------------===//
#include "CodeGenModule.h"
#include "CGCXXABI.h"
#include "clang/AST/RecordLayout.h"
#include "clang/AST/Type.h"
#include "clang/Frontend/CodeGenOptions.h"
#include "CGObjCRuntime.h"
using namespace clang;
using namespace CodeGen;
namespace {
class RTTIBuilder {
CodeGenModule &CGM; // Per-module state.
llvm::LLVMContext &VMContext;
/// Fields - The fields of the RTTI descriptor currently being built.
SmallVector<llvm::Constant *, 16> Fields;
/// GetAddrOfTypeName - Returns the mangled type name of the given type.
llvm::GlobalVariable *
GetAddrOfTypeName(QualType Ty, llvm::GlobalVariable::LinkageTypes Linkage);
/// GetAddrOfExternalRTTIDescriptor - Returns the constant for the RTTI
/// descriptor of the given type.
llvm::Constant *GetAddrOfExternalRTTIDescriptor(QualType Ty);
/// BuildVTablePointer - Build the vtable pointer for the given type.
void BuildVTablePointer(const Type *Ty);
/// BuildSIClassTypeInfo - Build an abi::__si_class_type_info, used for single
/// inheritance, according to the Itanium C++ ABI, 2.9.5p6b.
void BuildSIClassTypeInfo(const CXXRecordDecl *RD);
/// BuildVMIClassTypeInfo - Build an abi::__vmi_class_type_info, used for
/// classes with bases that do not satisfy the abi::__si_class_type_info
/// constraints, according ti the Itanium C++ ABI, 2.9.5p5c.
void BuildVMIClassTypeInfo(const CXXRecordDecl *RD);
/// BuildPointerTypeInfo - Build an abi::__pointer_type_info struct, used
/// for pointer types.
void BuildPointerTypeInfo(QualType PointeeTy);
/// BuildObjCObjectTypeInfo - Build the appropriate kind of
/// type_info for an object type.
void BuildObjCObjectTypeInfo(const ObjCObjectType *Ty);
/// BuildPointerToMemberTypeInfo - Build an abi::__pointer_to_member_type_info
/// struct, used for member pointer types.
void BuildPointerToMemberTypeInfo(const MemberPointerType *Ty);
public:
RTTIBuilder(CodeGenModule &CGM) : CGM(CGM),
VMContext(CGM.getModule().getContext()) { }
// Pointer type info flags.
enum {
/// PTI_Const - Type has const qualifier.
PTI_Const = 0x1,
/// PTI_Volatile - Type has volatile qualifier.
PTI_Volatile = 0x2,
/// PTI_Restrict - Type has restrict qualifier.
PTI_Restrict = 0x4,
/// PTI_Incomplete - Type is incomplete.
PTI_Incomplete = 0x8,
/// PTI_ContainingClassIncomplete - Containing class is incomplete.
/// (in pointer to member).
PTI_ContainingClassIncomplete = 0x10
};
// VMI type info flags.
enum {
/// VMI_NonDiamondRepeat - Class has non-diamond repeated inheritance.
VMI_NonDiamondRepeat = 0x1,
/// VMI_DiamondShaped - Class is diamond shaped.
VMI_DiamondShaped = 0x2
};
// Base class type info flags.
enum {
/// BCTI_Virtual - Base class is virtual.
BCTI_Virtual = 0x1,
/// BCTI_Public - Base class is public.
BCTI_Public = 0x2
};
/// BuildTypeInfo - Build the RTTI type info struct for the given type.
///
/// \param Force - true to force the creation of this RTTI value
llvm::Constant *BuildTypeInfo(QualType Ty, bool Force = false);
};
}
llvm::GlobalVariable *
RTTIBuilder::GetAddrOfTypeName(QualType Ty,
llvm::GlobalVariable::LinkageTypes Linkage) {
SmallString<256> OutName;
llvm::raw_svector_ostream Out(OutName);
CGM.getCXXABI().getMangleContext().mangleCXXRTTIName(Ty, Out);
Out.flush();
StringRef Name = OutName.str();
// We know that the mangled name of the type starts at index 4 of the
// mangled name of the typename, so we can just index into it in order to
// get the mangled name of the type.
llvm::Constant *Init = llvm::ConstantDataArray::getString(VMContext,
Name.substr(4));
llvm::GlobalVariable *GV =
CGM.CreateOrReplaceCXXRuntimeVariable(Name, Init->getType(), Linkage);
GV->setInitializer(Init);
return GV;
}
llvm::Constant *RTTIBuilder::GetAddrOfExternalRTTIDescriptor(QualType Ty) {
// Mangle the RTTI name.
SmallString<256> OutName;
llvm::raw_svector_ostream Out(OutName);
CGM.getCXXABI().getMangleContext().mangleCXXRTTI(Ty, Out);
Out.flush();
StringRef Name = OutName.str();
// Look for an existing global.
llvm::GlobalVariable *GV = CGM.getModule().getNamedGlobal(Name);
if (!GV) {
// Create a new global variable.
GV = new llvm::GlobalVariable(CGM.getModule(), CGM.Int8PtrTy,
/*Constant=*/true,
llvm::GlobalValue::ExternalLinkage, 0, Name);
}
return llvm::ConstantExpr::getBitCast(GV, CGM.Int8PtrTy);
}
/// TypeInfoIsInStandardLibrary - Given a builtin type, returns whether the type
/// info for that type is defined in the standard library.
static bool TypeInfoIsInStandardLibrary(const BuiltinType *Ty) {
// Itanium C++ ABI 2.9.2:
// Basic type information (e.g. for "int", "bool", etc.) will be kept in
// the run-time support library. Specifically, the run-time support
// library should contain type_info objects for the types X, X* and
// X const*, for every X in: void, std::nullptr_t, bool, wchar_t, char,
// unsigned char, signed char, short, unsigned short, int, unsigned int,
// long, unsigned long, long long, unsigned long long, float, double,
// long double, char16_t, char32_t, and the IEEE 754r decimal and
// half-precision floating point types.
switch (Ty->getKind()) {
case BuiltinType::Void:
case BuiltinType::NullPtr:
case BuiltinType::Bool:
case BuiltinType::WChar_S:
case BuiltinType::WChar_U:
case BuiltinType::Char_U:
case BuiltinType::Char_S:
case BuiltinType::UChar:
case BuiltinType::SChar:
case BuiltinType::Short:
case BuiltinType::UShort:
case BuiltinType::Int:
case BuiltinType::UInt:
case BuiltinType::Long:
case BuiltinType::ULong:
case BuiltinType::LongLong:
case BuiltinType::ULongLong:
case BuiltinType::Half:
case BuiltinType::Float:
case BuiltinType::Double:
case BuiltinType::LongDouble:
case BuiltinType::Char16:
case BuiltinType::Char32:
case BuiltinType::Int128:
case BuiltinType::UInt128:
return true;
case BuiltinType::Dependent:
#define BUILTIN_TYPE(Id, SingletonId)
#define PLACEHOLDER_TYPE(Id, SingletonId) \
case BuiltinType::Id:
#include "clang/AST/BuiltinTypes.def"
llvm_unreachable("asking for RRTI for a placeholder type!");
case BuiltinType::ObjCId:
case BuiltinType::ObjCClass:
case BuiltinType::ObjCSel:
llvm_unreachable("FIXME: Objective-C types are unsupported!");
}
llvm_unreachable("Invalid BuiltinType Kind!");
}
static bool TypeInfoIsInStandardLibrary(const PointerType *PointerTy) {
QualType PointeeTy = PointerTy->getPointeeType();
const BuiltinType *BuiltinTy = dyn_cast<BuiltinType>(PointeeTy);
if (!BuiltinTy)
return false;
// Check the qualifiers.
Qualifiers Quals = PointeeTy.getQualifiers();
Quals.removeConst();
if (!Quals.empty())
return false;
return TypeInfoIsInStandardLibrary(BuiltinTy);
}
/// IsStandardLibraryRTTIDescriptor - Returns whether the type
/// information for the given type exists in the standard library.
static bool IsStandardLibraryRTTIDescriptor(QualType Ty) {
// Type info for builtin types is defined in the standard library.
if (const BuiltinType *BuiltinTy = dyn_cast<BuiltinType>(Ty))
return TypeInfoIsInStandardLibrary(BuiltinTy);
// Type info for some pointer types to builtin types is defined in the
// standard library.
if (const PointerType *PointerTy = dyn_cast<PointerType>(Ty))
return TypeInfoIsInStandardLibrary(PointerTy);
return false;
}
/// ShouldUseExternalRTTIDescriptor - Returns whether the type information for
/// the given type exists somewhere else, and that we should not emit the type
/// information in this translation unit. Assumes that it is not a
/// standard-library type.
static bool ShouldUseExternalRTTIDescriptor(CodeGenModule &CGM, QualType Ty) {
ASTContext &Context = CGM.getContext();
// If RTTI is disabled, don't consider key functions.
if (!Context.getLangOpts().RTTI) return false;
if (const RecordType *RecordTy = dyn_cast<RecordType>(Ty)) {
const CXXRecordDecl *RD = cast<CXXRecordDecl>(RecordTy->getDecl());
if (!RD->hasDefinition())
return false;
if (!RD->isDynamicClass())
return false;
return !CGM.getVTables().ShouldEmitVTableInThisTU(RD);
}
return false;
}
/// IsIncompleteClassType - Returns whether the given record type is incomplete.
static bool IsIncompleteClassType(const RecordType *RecordTy) {
return !RecordTy->getDecl()->isCompleteDefinition();
}
/// ContainsIncompleteClassType - Returns whether the given type contains an
/// incomplete class type. This is true if
///
/// * The given type is an incomplete class type.
/// * The given type is a pointer type whose pointee type contains an
/// incomplete class type.
/// * The given type is a member pointer type whose class is an incomplete
/// class type.
/// * The given type is a member pointer type whoise pointee type contains an
/// incomplete class type.
/// is an indirect or direct pointer to an incomplete class type.
static bool ContainsIncompleteClassType(QualType Ty) {
if (const RecordType *RecordTy = dyn_cast<RecordType>(Ty)) {
if (IsIncompleteClassType(RecordTy))
return true;
}
if (const PointerType *PointerTy = dyn_cast<PointerType>(Ty))
return ContainsIncompleteClassType(PointerTy->getPointeeType());
if (const MemberPointerType *MemberPointerTy =
dyn_cast<MemberPointerType>(Ty)) {
// Check if the class type is incomplete.
const RecordType *ClassType = cast<RecordType>(MemberPointerTy->getClass());
if (IsIncompleteClassType(ClassType))
return true;
return ContainsIncompleteClassType(MemberPointerTy->getPointeeType());
}
return false;
}
/// getTypeInfoLinkage - Return the linkage that the type info and type info
/// name constants should have for the given type.
static llvm::GlobalVariable::LinkageTypes
getTypeInfoLinkage(CodeGenModule &CGM, QualType Ty) {
// Itanium C++ ABI 2.9.5p7:
// In addition, it and all of the intermediate abi::__pointer_type_info
// structs in the chain down to the abi::__class_type_info for the
// incomplete class type must be prevented from resolving to the
// corresponding type_info structs for the complete class type, possibly
// by making them local static objects. Finally, a dummy class RTTI is
// generated for the incomplete type that will not resolve to the final
// complete class RTTI (because the latter need not exist), possibly by
// making it a local static object.
if (ContainsIncompleteClassType(Ty))
return llvm::GlobalValue::InternalLinkage;
switch (Ty->getLinkage()) {
case NoLinkage:
case InternalLinkage:
case UniqueExternalLinkage:
return llvm::GlobalValue::InternalLinkage;
case ExternalLinkage:
if (!CGM.getLangOpts().RTTI) {
// RTTI is not enabled, which means that this type info struct is going
// to be used for exception handling. Give it linkonce_odr linkage.
return llvm::GlobalValue::LinkOnceODRLinkage;
}
if (const RecordType *Record = dyn_cast<RecordType>(Ty)) {
const CXXRecordDecl *RD = cast<CXXRecordDecl>(Record->getDecl());
if (RD->hasAttr<WeakAttr>())
return llvm::GlobalValue::WeakODRLinkage;
if (RD->isDynamicClass())
return CGM.getVTableLinkage(RD);
}
return llvm::GlobalValue::LinkOnceODRLinkage;
}
llvm_unreachable("Invalid linkage!");
}
// CanUseSingleInheritance - Return whether the given record decl has a "single,
// public, non-virtual base at offset zero (i.e. the derived class is dynamic
// iff the base is)", according to Itanium C++ ABI, 2.95p6b.
static bool CanUseSingleInheritance(const CXXRecordDecl *RD) {
// Check the number of bases.
if (RD->getNumBases() != 1)
return false;
// Get the base.
CXXRecordDecl::base_class_const_iterator Base = RD->bases_begin();
// Check that the base is not virtual.
if (Base->isVirtual())
return false;
// Check that the base is public.
if (Base->getAccessSpecifier() != AS_public)
return false;
// Check that the class is dynamic iff the base is.
const CXXRecordDecl *BaseDecl =
cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl());
if (!BaseDecl->isEmpty() &&
BaseDecl->isDynamicClass() != RD->isDynamicClass())
return false;
return true;
}
void RTTIBuilder::BuildVTablePointer(const Type *Ty) {
// abi::__class_type_info.
static const char * const ClassTypeInfo =
"_ZTVN10__cxxabiv117__class_type_infoE";
// abi::__si_class_type_info.
static const char * const SIClassTypeInfo =
"_ZTVN10__cxxabiv120__si_class_type_infoE";
// abi::__vmi_class_type_info.
static const char * const VMIClassTypeInfo =
"_ZTVN10__cxxabiv121__vmi_class_type_infoE";
const char *VTableName = 0;
switch (Ty->getTypeClass()) {
#define TYPE(Class, Base)
#define ABSTRACT_TYPE(Class, Base)
#define NON_CANONICAL_UNLESS_DEPENDENT_TYPE(Class, Base) case Type::Class:
#define NON_CANONICAL_TYPE(Class, Base) case Type::Class:
#define DEPENDENT_TYPE(Class, Base) case Type::Class:
#include "clang/AST/TypeNodes.def"
llvm_unreachable("Non-canonical and dependent types shouldn't get here");
case Type::LValueReference:
case Type::RValueReference:
llvm_unreachable("References shouldn't get here");
case Type::Builtin:
// GCC treats vector and complex types as fundamental types.
case Type::Vector:
case Type::ExtVector:
case Type::Complex:
case Type::Atomic:
// FIXME: GCC treats block pointers as fundamental types?!
case Type::BlockPointer:
// abi::__fundamental_type_info.
VTableName = "_ZTVN10__cxxabiv123__fundamental_type_infoE";
break;
case Type::ConstantArray:
case Type::IncompleteArray:
case Type::VariableArray:
// abi::__array_type_info.
VTableName = "_ZTVN10__cxxabiv117__array_type_infoE";
break;
case Type::FunctionNoProto:
case Type::FunctionProto:
// abi::__function_type_info.
VTableName = "_ZTVN10__cxxabiv120__function_type_infoE";
break;
case Type::Enum:
// abi::__enum_type_info.
VTableName = "_ZTVN10__cxxabiv116__enum_type_infoE";
break;
case Type::Record: {
const CXXRecordDecl *RD =
cast<CXXRecordDecl>(cast<RecordType>(Ty)->getDecl());
if (!RD->hasDefinition() || !RD->getNumBases()) {
VTableName = ClassTypeInfo;
} else if (CanUseSingleInheritance(RD)) {
VTableName = SIClassTypeInfo;
} else {
VTableName = VMIClassTypeInfo;
}
break;
}
case Type::ObjCObject:
// Ignore protocol qualifiers.
Ty = cast<ObjCObjectType>(Ty)->getBaseType().getTypePtr();
// Handle id and Class.
if (isa<BuiltinType>(Ty)) {
VTableName = ClassTypeInfo;
break;
}
assert(isa<ObjCInterfaceType>(Ty));
// Fall through.
case Type::ObjCInterface:
if (cast<ObjCInterfaceType>(Ty)->getDecl()->getSuperClass()) {
VTableName = SIClassTypeInfo;
} else {
VTableName = ClassTypeInfo;
}
break;
case Type::ObjCObjectPointer:
case Type::Pointer:
// abi::__pointer_type_info.
VTableName = "_ZTVN10__cxxabiv119__pointer_type_infoE";
break;
case Type::MemberPointer:
// abi::__pointer_to_member_type_info.
VTableName = "_ZTVN10__cxxabiv129__pointer_to_member_type_infoE";
break;
}
llvm::Constant *VTable =
CGM.getModule().getOrInsertGlobal(VTableName, CGM.Int8PtrTy);
llvm::Type *PtrDiffTy =
CGM.getTypes().ConvertType(CGM.getContext().getPointerDiffType());
// The vtable address point is 2.
llvm::Constant *Two = llvm::ConstantInt::get(PtrDiffTy, 2);
VTable = llvm::ConstantExpr::getInBoundsGetElementPtr(VTable, Two);
VTable = llvm::ConstantExpr::getBitCast(VTable, CGM.Int8PtrTy);
Fields.push_back(VTable);
}
// maybeUpdateRTTILinkage - Will update the linkage of the RTTI data structures
// from available_externally to the correct linkage if necessary. An example of
// this is:
//
// struct A {
// virtual void f();
// };
//
// const std::type_info &g() {
// return typeid(A);
// }
//
// void A::f() { }
//
// When we're generating the typeid(A) expression, we do not yet know that
// A's key function is defined in this translation unit, so we will give the
// typeinfo and typename structures available_externally linkage. When A::f
// forces the vtable to be generated, we need to change the linkage of the
// typeinfo and typename structs, otherwise we'll end up with undefined
// externals when linking.
static void
maybeUpdateRTTILinkage(CodeGenModule &CGM, llvm::GlobalVariable *GV,
QualType Ty) {
// We're only interested in globals with available_externally linkage.
if (!GV->hasAvailableExternallyLinkage())
return;
// Get the real linkage for the type.
llvm::GlobalVariable::LinkageTypes Linkage = getTypeInfoLinkage(CGM, Ty);
// If variable is supposed to have available_externally linkage, we don't
// need to do anything.
if (Linkage == llvm::GlobalVariable::AvailableExternallyLinkage)
return;
// Update the typeinfo linkage.
GV->setLinkage(Linkage);
// Get the typename global.
SmallString<256> OutName;
llvm::raw_svector_ostream Out(OutName);
CGM.getCXXABI().getMangleContext().mangleCXXRTTIName(Ty, Out);
Out.flush();
StringRef Name = OutName.str();
llvm::GlobalVariable *TypeNameGV = CGM.getModule().getNamedGlobal(Name);
assert(TypeNameGV->hasAvailableExternallyLinkage() &&
"Type name has different linkage from type info!");
// And update its linkage.
TypeNameGV->setLinkage(Linkage);
}
llvm::Constant *RTTIBuilder::BuildTypeInfo(QualType Ty, bool Force) {
// We want to operate on the canonical type.
Ty = CGM.getContext().getCanonicalType(Ty);
// Check if we've already emitted an RTTI descriptor for this type.
SmallString<256> OutName;
llvm::raw_svector_ostream Out(OutName);
CGM.getCXXABI().getMangleContext().mangleCXXRTTI(Ty, Out);
Out.flush();
StringRef Name = OutName.str();
llvm::GlobalVariable *OldGV = CGM.getModule().getNamedGlobal(Name);
if (OldGV && !OldGV->isDeclaration()) {
maybeUpdateRTTILinkage(CGM, OldGV, Ty);
return llvm::ConstantExpr::getBitCast(OldGV, CGM.Int8PtrTy);
}
// Check if there is already an external RTTI descriptor for this type.
bool IsStdLib = IsStandardLibraryRTTIDescriptor(Ty);
if (!Force && (IsStdLib || ShouldUseExternalRTTIDescriptor(CGM, Ty)))
return GetAddrOfExternalRTTIDescriptor(Ty);
// Emit the standard library with external linkage.
llvm::GlobalVariable::LinkageTypes Linkage;
if (IsStdLib)
Linkage = llvm::GlobalValue::ExternalLinkage;
else
Linkage = getTypeInfoLinkage(CGM, Ty);
// Add the vtable pointer.
BuildVTablePointer(cast<Type>(Ty));
// And the name.
llvm::GlobalVariable *TypeName = GetAddrOfTypeName(Ty, Linkage);
Fields.push_back(llvm::ConstantExpr::getBitCast(TypeName, CGM.Int8PtrTy));
switch (Ty->getTypeClass()) {
#define TYPE(Class, Base)
#define ABSTRACT_TYPE(Class, Base)
#define NON_CANONICAL_UNLESS_DEPENDENT_TYPE(Class, Base) case Type::Class:
#define NON_CANONICAL_TYPE(Class, Base) case Type::Class:
#define DEPENDENT_TYPE(Class, Base) case Type::Class:
#include "clang/AST/TypeNodes.def"
llvm_unreachable("Non-canonical and dependent types shouldn't get here");
// GCC treats vector types as fundamental types.
case Type::Builtin:
case Type::Vector:
case Type::ExtVector:
case Type::Complex:
case Type::BlockPointer:
// Itanium C++ ABI 2.9.5p4:
// abi::__fundamental_type_info adds no data members to std::type_info.
break;
case Type::LValueReference:
case Type::RValueReference:
llvm_unreachable("References shouldn't get here");
case Type::ConstantArray:
case Type::IncompleteArray:
case Type::VariableArray:
// Itanium C++ ABI 2.9.5p5:
// abi::__array_type_info adds no data members to std::type_info.
break;
case Type::FunctionNoProto:
case Type::FunctionProto:
// Itanium C++ ABI 2.9.5p5:
// abi::__function_type_info adds no data members to std::type_info.
break;
case Type::Enum:
// Itanium C++ ABI 2.9.5p5:
// abi::__enum_type_info adds no data members to std::type_info.
break;
case Type::Record: {
const CXXRecordDecl *RD =
cast<CXXRecordDecl>(cast<RecordType>(Ty)->getDecl());
if (!RD->hasDefinition() || !RD->getNumBases()) {
// We don't need to emit any fields.
break;
}
if (CanUseSingleInheritance(RD))
BuildSIClassTypeInfo(RD);
else
BuildVMIClassTypeInfo(RD);
break;
}
case Type::ObjCObject:
case Type::ObjCInterface:
BuildObjCObjectTypeInfo(cast<ObjCObjectType>(Ty));
break;
case Type::ObjCObjectPointer:
BuildPointerTypeInfo(cast<ObjCObjectPointerType>(Ty)->getPointeeType());
break;
case Type::Pointer:
BuildPointerTypeInfo(cast<PointerType>(Ty)->getPointeeType());
break;
case Type::MemberPointer:
BuildPointerToMemberTypeInfo(cast<MemberPointerType>(Ty));
break;
case Type::Atomic:
// No fields, at least for the moment.
break;
}
llvm::Constant *Init = llvm::ConstantStruct::getAnon(Fields);
llvm::GlobalVariable *GV =
new llvm::GlobalVariable(CGM.getModule(), Init->getType(),
/*Constant=*/true, Linkage, Init, Name);
// If there's already an old global variable, replace it with the new one.
if (OldGV) {
GV->takeName(OldGV);
llvm::Constant *NewPtr =
llvm::ConstantExpr::getBitCast(GV, OldGV->getType());
OldGV->replaceAllUsesWith(NewPtr);
OldGV->eraseFromParent();
}
// GCC only relies on the uniqueness of the type names, not the
// type_infos themselves, so we can emit these as hidden symbols.
// But don't do this if we're worried about strict visibility
// compatibility.
if (const RecordType *RT = dyn_cast<RecordType>(Ty)) {
const CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
CGM.setTypeVisibility(GV, RD, CodeGenModule::TVK_ForRTTI);
CGM.setTypeVisibility(TypeName, RD, CodeGenModule::TVK_ForRTTIName);
} else {
Visibility TypeInfoVisibility = DefaultVisibility;
if (CGM.getCodeGenOpts().HiddenWeakVTables &&
Linkage == llvm::GlobalValue::LinkOnceODRLinkage)
TypeInfoVisibility = HiddenVisibility;
// The type name should have the same visibility as the type itself.
Visibility ExplicitVisibility = Ty->getVisibility();
TypeName->setVisibility(CodeGenModule::
GetLLVMVisibility(ExplicitVisibility));
TypeInfoVisibility = minVisibility(TypeInfoVisibility, Ty->getVisibility());
GV->setVisibility(CodeGenModule::GetLLVMVisibility(TypeInfoVisibility));
}
GV->setUnnamedAddr(true);
return llvm::ConstantExpr::getBitCast(GV, CGM.Int8PtrTy);
}
/// ComputeQualifierFlags - Compute the pointer type info flags from the
/// given qualifier.
static unsigned ComputeQualifierFlags(Qualifiers Quals) {
unsigned Flags = 0;
if (Quals.hasConst())
Flags |= RTTIBuilder::PTI_Const;
if (Quals.hasVolatile())
Flags |= RTTIBuilder::PTI_Volatile;
if (Quals.hasRestrict())
Flags |= RTTIBuilder::PTI_Restrict;
return Flags;
}
/// BuildObjCObjectTypeInfo - Build the appropriate kind of type_info
/// for the given Objective-C object type.
void RTTIBuilder::BuildObjCObjectTypeInfo(const ObjCObjectType *OT) {
// Drop qualifiers.
const Type *T = OT->getBaseType().getTypePtr();
assert(isa<BuiltinType>(T) || isa<ObjCInterfaceType>(T));
// The builtin types are abi::__class_type_infos and don't require
// extra fields.
if (isa<BuiltinType>(T)) return;
ObjCInterfaceDecl *Class = cast<ObjCInterfaceType>(T)->getDecl();
ObjCInterfaceDecl *Super = Class->getSuperClass();
// Root classes are also __class_type_info.
if (!Super) return;
QualType SuperTy = CGM.getContext().getObjCInterfaceType(Super);
// Everything else is single inheritance.
llvm::Constant *BaseTypeInfo = RTTIBuilder(CGM).BuildTypeInfo(SuperTy);
Fields.push_back(BaseTypeInfo);
}
/// BuildSIClassTypeInfo - Build an abi::__si_class_type_info, used for single
/// inheritance, according to the Itanium C++ ABI, 2.95p6b.
void RTTIBuilder::BuildSIClassTypeInfo(const CXXRecordDecl *RD) {
// Itanium C++ ABI 2.9.5p6b:
// It adds to abi::__class_type_info a single member pointing to the
// type_info structure for the base type,
llvm::Constant *BaseTypeInfo =
RTTIBuilder(CGM).BuildTypeInfo(RD->bases_begin()->getType());
Fields.push_back(BaseTypeInfo);
}
namespace {
/// SeenBases - Contains virtual and non-virtual bases seen when traversing
/// a class hierarchy.
struct SeenBases {
llvm::SmallPtrSet<const CXXRecordDecl *, 16> NonVirtualBases;
llvm::SmallPtrSet<const CXXRecordDecl *, 16> VirtualBases;
};
}
/// ComputeVMIClassTypeInfoFlags - Compute the value of the flags member in
/// abi::__vmi_class_type_info.
///
static unsigned ComputeVMIClassTypeInfoFlags(const CXXBaseSpecifier *Base,
SeenBases &Bases) {
unsigned Flags = 0;
const CXXRecordDecl *BaseDecl =
cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl());
if (Base->isVirtual()) {
// Mark the virtual base as seen.
if (!Bases.VirtualBases.insert(BaseDecl)) {
// If this virtual base has been seen before, then the class is diamond
// shaped.
Flags |= RTTIBuilder::VMI_DiamondShaped;
} else {
if (Bases.NonVirtualBases.count(BaseDecl))
Flags |= RTTIBuilder::VMI_NonDiamondRepeat;
}
} else {
// Mark the non-virtual base as seen.
if (!Bases.NonVirtualBases.insert(BaseDecl)) {
// If this non-virtual base has been seen before, then the class has non-
// diamond shaped repeated inheritance.
Flags |= RTTIBuilder::VMI_NonDiamondRepeat;
} else {
if (Bases.VirtualBases.count(BaseDecl))
Flags |= RTTIBuilder::VMI_NonDiamondRepeat;
}
}
// Walk all bases.
for (CXXRecordDecl::base_class_const_iterator I = BaseDecl->bases_begin(),
E = BaseDecl->bases_end(); I != E; ++I)
Flags |= ComputeVMIClassTypeInfoFlags(I, Bases);
return Flags;
}
static unsigned ComputeVMIClassTypeInfoFlags(const CXXRecordDecl *RD) {
unsigned Flags = 0;
SeenBases Bases;
// Walk all bases.
for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
E = RD->bases_end(); I != E; ++I)
Flags |= ComputeVMIClassTypeInfoFlags(I, Bases);
return Flags;
}
/// BuildVMIClassTypeInfo - Build an abi::__vmi_class_type_info, used for
/// classes with bases that do not satisfy the abi::__si_class_type_info
/// constraints, according ti the Itanium C++ ABI, 2.9.5p5c.
void RTTIBuilder::BuildVMIClassTypeInfo(const CXXRecordDecl *RD) {
llvm::Type *UnsignedIntLTy =
CGM.getTypes().ConvertType(CGM.getContext().UnsignedIntTy);
// Itanium C++ ABI 2.9.5p6c:
// __flags is a word with flags describing details about the class
// structure, which may be referenced by using the __flags_masks
// enumeration. These flags refer to both direct and indirect bases.
unsigned Flags = ComputeVMIClassTypeInfoFlags(RD);
Fields.push_back(llvm::ConstantInt::get(UnsignedIntLTy, Flags));
// Itanium C++ ABI 2.9.5p6c:
// __base_count is a word with the number of direct proper base class
// descriptions that follow.
Fields.push_back(llvm::ConstantInt::get(UnsignedIntLTy, RD->getNumBases()));
if (!RD->getNumBases())
return;
llvm::Type *LongLTy =
CGM.getTypes().ConvertType(CGM.getContext().LongTy);
// Now add the base class descriptions.
// Itanium C++ ABI 2.9.5p6c:
// __base_info[] is an array of base class descriptions -- one for every
// direct proper base. Each description is of the type:
//
// struct abi::__base_class_type_info {
// public:
// const __class_type_info *__base_type;
// long __offset_flags;
//
// enum __offset_flags_masks {
// __virtual_mask = 0x1,
// __public_mask = 0x2,
// __offset_shift = 8
// };
// };
for (CXXRecordDecl::base_class_const_iterator I = RD->bases_begin(),
E = RD->bases_end(); I != E; ++I) {
const CXXBaseSpecifier *Base = I;
// The __base_type member points to the RTTI for the base type.
Fields.push_back(RTTIBuilder(CGM).BuildTypeInfo(Base->getType()));
const CXXRecordDecl *BaseDecl =
cast<CXXRecordDecl>(Base->getType()->getAs<RecordType>()->getDecl());
int64_t OffsetFlags = 0;
// All but the lower 8 bits of __offset_flags are a signed offset.
// For a non-virtual base, this is the offset in the object of the base
// subobject. For a virtual base, this is the offset in the virtual table of
// the virtual base offset for the virtual base referenced (negative).
CharUnits Offset;
if (Base->isVirtual())
Offset =
CGM.getVTableContext().getVirtualBaseOffsetOffset(RD, BaseDecl);
else {
const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
Offset = Layout.getBaseClassOffset(BaseDecl);
};
OffsetFlags = uint64_t(Offset.getQuantity()) << 8;
// The low-order byte of __offset_flags contains flags, as given by the
// masks from the enumeration __offset_flags_masks.
if (Base->isVirtual())
OffsetFlags |= BCTI_Virtual;
if (Base->getAccessSpecifier() == AS_public)
OffsetFlags |= BCTI_Public;
Fields.push_back(llvm::ConstantInt::get(LongLTy, OffsetFlags));
}
}
/// BuildPointerTypeInfo - Build an abi::__pointer_type_info struct,
/// used for pointer types.
void RTTIBuilder::BuildPointerTypeInfo(QualType PointeeTy) {
Qualifiers Quals;
QualType UnqualifiedPointeeTy =
CGM.getContext().getUnqualifiedArrayType(PointeeTy, Quals);
// Itanium C++ ABI 2.9.5p7:
// __flags is a flag word describing the cv-qualification and other
// attributes of the type pointed to
unsigned Flags = ComputeQualifierFlags(Quals);
// Itanium C++ ABI 2.9.5p7:
// When the abi::__pbase_type_info is for a direct or indirect pointer to an
// incomplete class type, the incomplete target type flag is set.
if (ContainsIncompleteClassType(UnqualifiedPointeeTy))
Flags |= PTI_Incomplete;
llvm::Type *UnsignedIntLTy =
CGM.getTypes().ConvertType(CGM.getContext().UnsignedIntTy);
Fields.push_back(llvm::ConstantInt::get(UnsignedIntLTy, Flags));
// Itanium C++ ABI 2.9.5p7:
// __pointee is a pointer to the std::type_info derivation for the
// unqualified type being pointed to.
llvm::Constant *PointeeTypeInfo =
RTTIBuilder(CGM).BuildTypeInfo(UnqualifiedPointeeTy);
Fields.push_back(PointeeTypeInfo);
}
/// BuildPointerToMemberTypeInfo - Build an abi::__pointer_to_member_type_info
/// struct, used for member pointer types.
void RTTIBuilder::BuildPointerToMemberTypeInfo(const MemberPointerType *Ty) {
QualType PointeeTy = Ty->getPointeeType();
Qualifiers Quals;
QualType UnqualifiedPointeeTy =
CGM.getContext().getUnqualifiedArrayType(PointeeTy, Quals);
// Itanium C++ ABI 2.9.5p7:
// __flags is a flag word describing the cv-qualification and other
// attributes of the type pointed to.
unsigned Flags = ComputeQualifierFlags(Quals);
const RecordType *ClassType = cast<RecordType>(Ty->getClass());
// Itanium C++ ABI 2.9.5p7:
// When the abi::__pbase_type_info is for a direct or indirect pointer to an
// incomplete class type, the incomplete target type flag is set.
if (ContainsIncompleteClassType(UnqualifiedPointeeTy))
Flags |= PTI_Incomplete;
if (IsIncompleteClassType(ClassType))
Flags |= PTI_ContainingClassIncomplete;
llvm::Type *UnsignedIntLTy =
CGM.getTypes().ConvertType(CGM.getContext().UnsignedIntTy);
Fields.push_back(llvm::ConstantInt::get(UnsignedIntLTy, Flags));
// Itanium C++ ABI 2.9.5p7:
// __pointee is a pointer to the std::type_info derivation for the
// unqualified type being pointed to.
llvm::Constant *PointeeTypeInfo =
RTTIBuilder(CGM).BuildTypeInfo(UnqualifiedPointeeTy);
Fields.push_back(PointeeTypeInfo);
// Itanium C++ ABI 2.9.5p9:
// __context is a pointer to an abi::__class_type_info corresponding to the
// class type containing the member pointed to
// (e.g., the "A" in "int A::*").
Fields.push_back(RTTIBuilder(CGM).BuildTypeInfo(QualType(ClassType, 0)));
}
llvm::Constant *CodeGenModule::GetAddrOfRTTIDescriptor(QualType Ty,
bool ForEH) {
// Return a bogus pointer if RTTI is disabled, unless it's for EH.
// FIXME: should we even be calling this method if RTTI is disabled
// and it's not for EH?
if (!ForEH && !getContext().getLangOpts().RTTI)
return llvm::Constant::getNullValue(Int8PtrTy);
if (ForEH && Ty->isObjCObjectPointerType() &&
LangOpts.ObjCRuntime.isGNUFamily())
return ObjCRuntime->GetEHType(Ty);
return RTTIBuilder(*this).BuildTypeInfo(Ty);
}
void CodeGenModule::EmitFundamentalRTTIDescriptor(QualType Type) {
QualType PointerType = Context.getPointerType(Type);
QualType PointerTypeConst = Context.getPointerType(Type.withConst());
RTTIBuilder(*this).BuildTypeInfo(Type, true);
RTTIBuilder(*this).BuildTypeInfo(PointerType, true);
RTTIBuilder(*this).BuildTypeInfo(PointerTypeConst, true);
}
void CodeGenModule::EmitFundamentalRTTIDescriptors() {
QualType FundamentalTypes[] = { Context.VoidTy, Context.NullPtrTy,
Context.BoolTy, Context.WCharTy,
Context.CharTy, Context.UnsignedCharTy,
Context.SignedCharTy, Context.ShortTy,
Context.UnsignedShortTy, Context.IntTy,
Context.UnsignedIntTy, Context.LongTy,
Context.UnsignedLongTy, Context.LongLongTy,
Context.UnsignedLongLongTy, Context.FloatTy,
Context.DoubleTy, Context.LongDoubleTy,
Context.Char16Ty, Context.Char32Ty };
for (unsigned i = 0; i < sizeof(FundamentalTypes)/sizeof(QualType); ++i)
EmitFundamentalRTTIDescriptor(FundamentalTypes[i]);
}<|fim▁end|> | |
<|file_name|>PrimitiveLongIntVisitor.java<|end_file_name|><|fim▁begin|>/*<|fim▁hole|> *
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.collection.primitive;
public interface PrimitiveLongIntVisitor<E extends Exception>
{
/**
* Visit the given entry.
*
* @param key The key of the entry.
* @param value The value of the entry.
* @return 'true' to signal that the iteration should be stopped, 'false' to signal that the iteration should
* continue if there are more entries to look at.
* @throws E any thrown exception of type 'E' will bubble up through the 'visit' method.
*/
boolean visited( long key, int value ) throws E;
}<|fim▁end|> | * Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j. |
<|file_name|>_KeyNavMixin.js<|end_file_name|><|fim▁begin|>//>>built
define("dijit/_KeyNavMixin",["dojo/_base/array","dojo/_base/declare","dojo/dom-attr","dojo/keys","dojo/_base/lang","dojo/on","dijit/registry","dijit/_FocusMixin"],function(_1,_2,_3,_4,_5,on,_6,_7){
return _2("dijit._KeyNavMixin",_7,{tabIndex:"0",childSelector:null,postCreate:function(){
this.inherited(arguments);
_3.set(this.domNode,"tabIndex",this.tabIndex);
if(!this._keyNavCodes){
var _8=this._keyNavCodes={};
_8[_4.HOME]=_5.hitch(this,"focusFirstChild");
_8[_4.END]=_5.hitch(this,"focusLastChild");
_8[this.isLeftToRight()?_4.LEFT_ARROW:_4.RIGHT_ARROW]=_5.hitch(this,"_onLeftArrow");
_8[this.isLeftToRight()?_4.RIGHT_ARROW:_4.LEFT_ARROW]=_5.hitch(this,"_onRightArrow");
_8[_4.UP_ARROW]=_5.hitch(this,"_onUpArrow");
_8[_4.DOWN_ARROW]=_5.hitch(this,"_onDownArrow");
}
var _9=this,_a=typeof this.childSelector=="string"?this.childSelector:_5.hitch(this,"childSelector");
this.own(on(this.domNode,"keypress",_5.hitch(this,"_onContainerKeypress")),on(this.domNode,"keydown",_5.hitch(this,"_onContainerKeydown")),on(this.domNode,"focus",_5.hitch(this,"_onContainerFocus")),on(this.containerNode,on.selector(_a,"focusin"),function(_b){
_9._onChildFocus(_6.getEnclosingWidget(this),_b);
}));
},_onLeftArrow:function(){
},_onRightArrow:function(){
},_onUpArrow:function(){
},_onDownArrow:function(){
},focus:function(){
this.focusFirstChild();
},_getFirstFocusableChild:function(){
return this._getNextFocusableChild(null,1);
},_getLastFocusableChild:function(){
return this._getNextFocusableChild(null,-1);
},focusFirstChild:function(){
this.focusChild(this._getFirstFocusableChild());
},focusLastChild:function(){
this.focusChild(this._getLastFocusableChild());
},focusChild:function(_c,_d){
if(!_c){
return;
}
if(this.focusedChild&&_c!==this.focusedChild){
this._onChildBlur(this.focusedChild);
}
_c.set("tabIndex",this.tabIndex);
_c.focus(_d?"end":"start");
},_onContainerFocus:function(_e){
if(_e.target!==this.domNode||this.focusedChild){
return;
}
this.focus();
},_onFocus:function(){
_3.set(this.domNode,"tabIndex","-1");
this.inherited(arguments);
},_onBlur:function(_f){
_3.set(this.domNode,"tabIndex",this.tabIndex);
if(this.focusedChild){
this.focusedChild.set("tabIndex","-1");
this.lastFocusedChild=this.focusedChild;
this._set("focusedChild",null);
}
this.inherited(arguments);
},_onChildFocus:function(_10){
if(_10&&_10!=this.focusedChild){
if(this.focusedChild&&!this.focusedChild._destroyed){
this.focusedChild.set("tabIndex","-1");
}
_10.set("tabIndex",this.tabIndex);
this.lastFocused=_10;
this._set("focusedChild",_10);
}
},_searchString:"",multiCharSearchDuration:1000,onKeyboardSearch:function(_11,evt,_12,_13){
if(_11){
this.focusChild(_11);
}
},_keyboardSearchCompare:function(_14,_15){
var _16=_14.domNode,_17=_14.label||(_16.focusNode?_16.focusNode.label:"")||_16.innerText||_16.textContent||"",_18=_17.replace(/^\s+/,"").substr(0,_15.length).toLowerCase();
return (!!_15.length&&_18==_15)?-1:0;
},_onContainerKeydown:function(evt){
var _19=this._keyNavCodes[evt.keyCode];
if(_19){
<|fim▁hole|>evt.stopPropagation();
evt.preventDefault();
this._searchString="";
}else{
if(evt.keyCode==_4.SPACE&&this._searchTimer&&!(evt.ctrlKey||evt.altKey||evt.metaKey)){
evt.stopImmediatePropagation();
evt.preventDefault();
this._keyboardSearch(evt," ");
}
}
},_onContainerKeypress:function(evt){
if(evt.charCode<_4.SPACE||evt.ctrlKey||evt.altKey||evt.metaKey||(evt.charCode==_4.SPACE&&this._searchTimer)){
return;
}
evt.preventDefault();
evt.stopPropagation();
this._keyboardSearch(evt,String.fromCharCode(evt.charCode).toLowerCase());
},_keyboardSearch:function(evt,_1a){
var _1b=null,_1c,_1d=0,_1e=_5.hitch(this,function(){
if(this._searchTimer){
this._searchTimer.remove();
}
this._searchString+=_1a;
var _1f=/^(.)\1*$/.test(this._searchString);
var _20=_1f?1:this._searchString.length;
_1c=this._searchString.substr(0,_20);
this._searchTimer=this.defer(function(){
this._searchTimer=null;
this._searchString="";
},this.multiCharSearchDuration);
var _21=this.focusedChild||null;
if(_20==1||!_21){
_21=this._getNextFocusableChild(_21,1);
if(!_21){
return;
}
}
var _22=_21;
do{
var rc=this._keyboardSearchCompare(_21,_1c);
if(!!rc&&_1d++==0){
_1b=_21;
}
if(rc==-1){
_1d=-1;
break;
}
_21=this._getNextFocusableChild(_21,1);
}while(_21!=_22);
});
_1e();
this.onKeyboardSearch(_1b,evt,_1c,_1d);
},_onChildBlur:function(){
},_getNextFocusableChild:function(_23,dir){
var _24=_23;
do{
if(!_23){
_23=this[dir>0?"_getFirst":"_getLast"]();
if(!_23){
break;
}
}else{
_23=this._getNext(_23,dir);
}
if(_23!=null&&_23!=_24&&_23.isFocusable()){
return _23;
}
}while(_23!=_24);
return null;
},_getFirst:function(){
return null;
},_getLast:function(){
return null;
},_getNext:function(_25,dir){
if(_25){
_25=_25.domNode;
while(_25){
_25=_25[dir<0?"previousSibling":"nextSibling"];
if(_25&&"getAttribute" in _25){
var w=_6.byNode(_25);
if(w){
return w;
}
}
}
}
return null;
}});
});<|fim▁end|> | _19(evt,this.focusedChild);
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub mod file;<|fim▁hole|>pub mod hash_map;<|fim▁end|> | |
<|file_name|>0006_auto__add_contact.py<|end_file_name|><|fim▁begin|># encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Contact'
db.create_table('storybase_user_contact', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('storybase.fields.ShortTextField')(blank=True)),
('info', self.gf('django.db.models.fields.TextField')(blank=True)),
))
db.send_create_signal('storybase_user', ['Contact'])
def backwards(self, orm):
# Deleting model 'Contact'
db.delete_table('storybase_user_contact')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'storybase_asset.asset': {
'Meta': {'object_name': 'Asset'},
'asset_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'asset_id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'blank': 'True'}),
'attribution': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'datasets': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'assets'", 'blank': 'True', 'to': "orm['storybase_asset.DataSet']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'license': ('django.db.models.fields.CharField', [], {'default': "'CC BY-NC-SA'", 'max_length': '25'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'assets'", 'null': 'True', 'to': "orm['auth.User']"}),
'published': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'section_specific': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'source_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "u'draft'", 'max_length': '10'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '10'})
},
'storybase_asset.dataset': {
'Meta': {'object_name': 'DataSet'},
'attribution': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'dataset_created': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'dataset_id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'datasets'", 'null': 'True', 'to': "orm['auth.User']"}),
'published': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'source': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "u'draft'", 'max_length': '10'})
},
'storybase_story.story': {
'Meta': {'object_name': 'Story'},
'assets': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'stories'", 'blank': 'True', 'to': "orm['storybase_asset.Asset']"}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'stories'", 'null': 'True', 'to': "orm['auth.User']"}),
'byline': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'featured_assets': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'featured_in_stories'", 'blank': 'True', 'to': "orm['storybase_asset.Asset']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'license': ('django.db.models.fields.CharField', [], {'default': "'CC BY-NC-SA'", 'max_length': '25'}),
'on_homepage': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'organizations': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'stories'", 'blank': 'True', 'to': "orm['storybase_user.Organization']"}),
'projects': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'stories'", 'blank': 'True', 'to': "orm['storybase_user.Project']"}),
'published': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '50', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "u'draft'", 'max_length': '10'}),
'story_id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'blank': 'True'}),
'structure_type': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
'storybase_user.contact': {
'Meta': {'object_name': 'Contact'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'info': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'name': ('storybase.fields.ShortTextField', [], {'blank': 'True'})
},
'storybase_user.organization': {
'Meta': {'object_name': 'Organization'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'curated_stories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'curated_in_organizations'", 'blank': 'True', 'through': "orm['storybase_user.OrganizationStory']", 'to': "orm['storybase_story.Story']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'organizations'", 'blank': 'True', 'to': "orm['auth.User']"}),
'organization_id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '50', 'blank': 'True'}),
'website_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'storybase_user.organizationstory': {
'Meta': {'object_name': 'OrganizationStory'},
'added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storybase_user.Organization']"}),
'story': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storybase_story.Story']"}),
'weight': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'storybase_user.organizationtranslation': {
'Meta': {'unique_together': "(('organization', 'language'),)", 'object_name': 'OrganizationTranslation'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'en'", 'max_length': '15'}),
'last_edited': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('storybase.fields.ShortTextField', [], {}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storybase_user.Organization']"}),<|fim▁hole|> 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'curated_stories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'curated_in_projects'", 'blank': 'True', 'through': "orm['storybase_user.ProjectStory']", 'to': "orm['storybase_story.Story']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'projects'", 'blank': 'True', 'to': "orm['auth.User']"}),
'organizations': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'projects'", 'blank': 'True', 'to': "orm['storybase_user.Organization']"}),
'project_id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '50', 'blank': 'True'}),
'website_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'storybase_user.projectstory': {
'Meta': {'object_name': 'ProjectStory'},
'added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storybase_user.Project']"}),
'story': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storybase_story.Story']"}),
'weight': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'storybase_user.projecttranslation': {
'Meta': {'unique_together': "(('project', 'language'),)", 'object_name': 'ProjectTranslation'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'en'", 'max_length': '15'}),
'name': ('storybase.fields.ShortTextField', [], {}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['storybase_user.Project']"}),
'translation_id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'blank': 'True'})
}
}
complete_apps = ['storybase_user']<|fim▁end|> | 'translation_id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'blank': 'True'})
},
'storybase_user.project': {
'Meta': {'object_name': 'Project'}, |
<|file_name|>LicenseHeaderBearTest.py<|end_file_name|><|fim▁begin|>import os
from queue import Queue
from bears.general.LicenseHeaderBear import LicenseHeaderBear
from coalib.testing.LocalBearTestHelper import LocalBearTestHelper
from coalib.results.Result import Result
from coalib.settings.Section import Section
from coalib.settings.Setting import Setting
def get_testfile_path(name):
return os.path.join(os.path.dirname(__file__),
'licenseheader_test_files',
name)
def load_testfile(name):
with open(get_testfile_path(name)) as f:
output = f.readlines()
return output
class LicenseHeaderBearTest(LocalBearTestHelper):
def setUp(self):
self.section = Section('name')
self.uut = LicenseHeaderBear(self.section, Queue())
def test_copyright_without_author(self):
file_contents = load_testfile('CopyrightWithoutAuthor.java')
self.check_validity(self.uut, file_contents)
def test_copyright_with_given_author(self):
file_contents = load_testfile('copyright_with_given_author.txt')
self.section.append(Setting('author_name', 'The coala developers'))
self.check_validity(
self.uut,
file_contents)
def test_copyright_with_different_author(self):
file_contents = load_testfile('copyright_with_different_author.txt')
self.section.append(Setting('author_name', 'The coala developers'))
self.check_results(
self.uut,
file_contents,
[Result.from_values('LicenseHeaderBear',
'Copyright notice with different/no author '
'present.',
file=get_testfile_path('copyright_with_diff'
'erent_author.txt'))],
filename=get_testfile_path('copyright_with_'
'different_author.txt'))
def test_no_copyright(self):
file_contents = load_testfile('no_copyright.py')<|fim▁hole|> self.check_results(
self.uut,
file_contents,
[Result.from_values('LicenseHeaderBear',
'Copyright notice not present.',
file=get_testfile_path('no_copyright.py'))],
filename=get_testfile_path('no_copyright.py'))<|fim▁end|> | |
<|file_name|>time.py<|end_file_name|><|fim▁begin|>from yapsy.IPlugin import IPlugin
from manager.message import Message
import protocol.control as control
import schedule
class Time(IPlugin, Message):
def __init__(self):
IPlugin.__init__(self)
Message.__init__(self)
self.text = control.SPEED_1 + '\x0B9 \x0B8' + control.NEW_LINE + control.CALL_TIME<|fim▁hole|><|fim▁end|> | self.schedule = schedule.every(3).hours |
<|file_name|>transifex.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os, sys
from polib import pofile
from config import CONFIGURATION
from extract import SOURCE_WARN
from execute import execute
<|fim▁hole|>def push():
execute('tx push -s')
def pull():
for locale in CONFIGURATION.locales:
if locale != CONFIGURATION.source_locale:
#execute('tx pull -l %s' % locale)
execute('tx pull --all')
clean_translated_locales()
def clean_translated_locales():
"""
Strips out the warning from all translated po files
about being an English source file.
"""
for locale in CONFIGURATION.locales:
if locale != CONFIGURATION.source_locale:
clean_locale(locale)
def clean_locale(locale):
"""
Strips out the warning from all of a locale's translated po files
about being an English source file.
Iterates over machine-generated files.
"""
dirname = CONFIGURATION.get_messages_dir(locale)
for filename in ('django-partial.po', 'djangojs.po', 'mako.po'):
clean_file(dirname.joinpath(filename))
def clean_file(file):
"""
Strips out the warning from a translated po file about being an English source file.
Replaces warning with a note about coming from Transifex.
"""
po = pofile(file)
if po.header.find(SOURCE_WARN) != -1:
new_header = get_new_header(po)
new = po.header.replace(SOURCE_WARN, new_header)
po.header = new
po.save()
def get_new_header(po):
team = po.metadata.get('Language-Team', None)
if not team:
return TRANSIFEX_HEADER % TRANSIFEX_URL
else:
return TRANSIFEX_HEADER % team
if __name__ == '__main__':
if len(sys.argv)<2:
raise Exception("missing argument: push or pull")
arg = sys.argv[1]
if arg == 'push':
push()
elif arg == 'pull':
pull()
else:
raise Exception("unknown argument: (%s)" % arg)<|fim▁end|> | TRANSIFEX_HEADER = 'Translations in this file have been downloaded from %s'
TRANSIFEX_URL = 'https://www.transifex.com/projects/p/edx-studio/'
|
<|file_name|>2_6_attraction.rs<|end_file_name|><|fim▁begin|>// The Nature of Code
// Daniel Shiffman
// http://natureofcode.com
//
// Example 2-6: Attraction
use nannou::prelude::*;
fn main() {
nannou::app(model).update(update).run();
}
struct Model {
mover: Mover,
attractor: Attractor,
}
struct Mover {
position: Point2,
velocity: Vector2,
acceleration: Vector2,
mass: f32,
}
// A type for a draggable attractive body in our world
struct Attractor {
mass: f32, // Maxx, tied to size
position: Point2, // position
dragging: bool, // Is the object being dragged?
roll_over: bool, // Is the mouse over the ellipse?
drag_offset: Vector2, // holds the offset for when the object is clicked on
}
impl Attractor {
const G: f32 = 1.0; // Gravitational Constant
fn new(rect: Rect) -> Self {
let position = rect.xy();
let mass = 20.0;
let drag_offset = vec2(0.0, 0.0);
let dragging = false;
let roll_over = false;
Attractor {
position,
mass,
drag_offset,
dragging,
roll_over,
}
}
fn attract(&self, m: &Mover) -> Vector2 {
let mut force = self.position - m.position; // Calculate direction of force
let mut d = force.magnitude(); // Distance between objects
d = d.max(5.0).min(25.0); // Limiting the distance to eliminate "extreme" results for very cose or very far object
force = force.normalize(); // Normalize vector (distance doesn't matter, we just want this vector for direction)
let strength = (Attractor::G * self.mass * m.mass) / (d * d); // Calculate gravitational force magnitude
force * strength // Get force vector --> magnitude * direction
}
// Method to display
fn display(&self, draw: &Draw) {
let gray = if self.dragging {
0.2
} else if self.roll_over {
0.4
} else {
0.75
};
draw.ellipse()
.xy(self.position)
.w_h(self.mass * 2.0, self.mass * 2.0)
.rgba(gray, gray, gray, 0.8)
.stroke(BLACK)
.stroke_weight(4.0);
}
// The methods below are for mouse interaction
fn clicked(&mut self, mx: f32, my: f32) {
let d = self.position.distance(pt2(mx, my));
if d < self.mass {
self.dragging = true;
self.drag_offset.x = self.position.x - mx;
self.drag_offset.y = self.position.y - my;
}
}
fn hover(&mut self, mx: f32, my: f32) {
let d = self.position.distance(pt2(mx, my));
if d < self.mass {
self.roll_over = true;
} else {
self.roll_over = false;
}
}
fn stop_dragging(&mut self) {
self.dragging = false;
}
fn drag(&mut self, mx: f32, my: f32) {
if self.dragging {
self.position.x = mx + self.drag_offset.x;
self.position.y = my + self.drag_offset.y;
}
}
}
impl Mover {
fn new() -> Self {
let position = pt2(80.0, 130.0);
let velocity = vec2(1.0, 0.0);
let acceleration = vec2(0.0, 0.0);
let mass = 1.0;
Mover {
position,
velocity,
acceleration,
mass,
}
}
fn apply_force(&mut self, force: Vector2) {
let f = force / self.mass;
self.acceleration += f;
}
fn update(&mut self) {
self.velocity += self.acceleration;
self.position += self.velocity;
self.acceleration *= 0.0;
}
fn display(&self, draw: &Draw) {
draw.ellipse()
.xy(self.position)
.w_h(16.0, 16.0)
.gray(0.3)
.stroke(BLACK)
.stroke_weight(2.0);
}
fn _check_edges(&mut self, rect: Rect) {
if self.position.x > rect.right() {
self.position.x = rect.left();
} else if self.position.x < rect.left() {
self.position.x = rect.right();
}
if self.position.y < rect.bottom() {
self.velocity.y *= -1.0;
self.position.y = rect.bottom();
}
}
}
fn model(app: &App) -> Model {
let rect = Rect::from_w_h(640.0, 360.0);
app.new_window()
.size(rect.w() as u32, rect.h() as u32)
.event(event)
.view(view)
.build()
.unwrap();
let mover = Mover::new();
let attractor = Attractor::new(rect);
Model { mover, attractor }
}
fn event(app: &App, m: &mut Model, event: WindowEvent) {
match event {
MousePressed(_button) => {
m.attractor.clicked(app.mouse.x, app.mouse.y);
}
MouseReleased(_buttom) => {
m.attractor.stop_dragging();
}
_other => (),
}
}
fn update(app: &App, m: &mut Model, _update: Update) {
let force = m.attractor.attract(&m.mover);
m.mover.apply_force(force);
m.mover.update();
m.attractor.drag(app.mouse.x, app.mouse.y);
m.attractor.hover(app.mouse.x, app.mouse.y);
}
fn view(app: &App, m: &Model, frame: Frame) {
// Begin drawing
let draw = app.draw();
draw.background().color(WHITE);
<|fim▁hole|> draw.to_frame(app, &frame).unwrap();
}<|fim▁end|> | m.attractor.display(&draw);
m.mover.display(&draw);
// Write the result of our drawing to the window's frame. |
<|file_name|>util.rs<|end_file_name|><|fim▁begin|>use super::engine::{Action, Round};
use super::state::GameState;
use crate::cards::BasicCard;
pub fn format_hand(hand: &[BasicCard], gs: &GameState) -> String {
let mut cards: Vec<_> = hand.iter().collect();
cards.sort_by(|a, b| gs.display_order(a, b));
let sc: Vec<_> = cards.iter().map(|x| format!("{}", x)).collect();
sc.join(" ")
}
pub fn format_state(gs: &GameState) -> String {
format!(
"Player 1: Score {}, Hand: {}\nPlayer 2: Score {}, Hand: {}\nTrump: {}\n",
gs.score[0],
&format_hand(&gs.hands[0], gs),
gs.score[1],
&format_hand(&gs.hands[1], gs),<|fim▁hole|>
pub fn format_round(round: &Round) -> String {
format!(
"{}{}\n",
&format_state(round.get_state()),
round.get_phase().format(round.get_state())
)
}
pub fn format_action(action: &Action) -> String {
format!("Player {} plays {}.", action.player + 1, action.card)
}<|fim▁end|> | gs.trump
)
} |
<|file_name|>schedule_events.js<|end_file_name|><|fim▁begin|>'use strict'
class ScheduleEvents {
constructor (aws) {
// Authenticated `aws` object in `lib/main.js`
this.lambda = new aws.Lambda({
apiVersion: '2015-03-31'
})
this.cloudwatchevents = new aws.CloudWatchEvents({
apiVersion: '2015-10-07'
})
}
_ruleDescription (params) {
if ('ScheduleDescription' in params && params.ScheduleDescription != null) {
return `${params.ScheduleDescription}`
}
return `${params.ScheduleName} - ${params.ScheduleExpression}`
}
_functionName (params) {
return params.FunctionArn.split(':').pop()
}
_putRulePrams (params) {
return {
Name: params.ScheduleName,
Description: this._ruleDescription(params),
State: params.ScheduleState,
ScheduleExpression: params.ScheduleExpression
}
}
_putRule (params) {
// return RuleArn if created
return new Promise((resolve, reject) => {
const _params = this._putRulePrams(params)
this.cloudwatchevents.putRule(_params, (err, rule) => {
if (err) reject(err)
resolve(rule)
})
})
}
_addPermissionParams (params) {
return {
Action: 'lambda:InvokeFunction',
FunctionName: this._functionName(params),
Principal: 'events.amazonaws.com',
SourceArn: params.RuleArn,
StatementId: params.ScheduleName
}
}
_addPermission (params) {
return new Promise((resolve, reject) => {
const _params = this._addPermissionParams(params)
this.lambda.addPermission(_params, (err, data) => {
if (err) {
if (err.code !== 'ResourceConflictException') reject(err)
// If it exists it will result in an error but there is no problem.
resolve('Permission already set')
}
resolve(data)
})
})
}
_putTargetsParams (params) {
return {
Rule: params.ScheduleName,
Targets: [{
Arn: params.FunctionArn,
Id: this._functionName(params),
Input: params.hasOwnProperty('Input') ? JSON.stringify(params.Input) : ''
}]
}
}
_putTargets (params) {
return new Promise((resolve, reject) => {
const _params = this._putTargetsParams(params)
this.cloudwatchevents.putTargets(_params, (err, data) => {
// even if it is already registered, it will not be an error.
if (err) reject(err)
resolve(data)
})
})
}
add (params) {
return Promise.resolve().then(() => {
return this._putRule(params)
}).then(rule => {
return this._addPermission(Object.assign(params, rule))
}).then(data => {<|fim▁hole|> })
}
}
module.exports = ScheduleEvents<|fim▁end|> | return this._putTargets(params) |
<|file_name|>tasks.py<|end_file_name|><|fim▁begin|>from invoke import task, run
#from fabric.api import local, lcd, get, env
#from fabric.operations import require, prompt
#from fabric.utils import abort
import requests
import rdflib
import getpass
import os.path
import os
import setlr
from os import listdir
from rdflib import *
import logging
CHEAR_DIR='chear.d/'
HHEAR_DIR='hhear.d/'
SETL_FILE='ontology.setl.ttl'
ontology_setl = Namespace('https://hadatac.org/setl/')
setl = Namespace('http://purl.org/twc/vocab/setl/')
prov = Namespace('http://www.w3.org/ns/prov#')
dc = Namespace('http://purl.org/dc/terms/')
pv = Namespace('http://purl.org/net/provenance/ns#')
logging_level = logging.INFO
logging.basicConfig(level=logging_level)
@task
def buildchear(ctx):
setl_graph = Graph()
setl_graph.parse(SETL_FILE,format="turtle")
cwd = os.getcwd()
formats = ['ttl','owl','json']
ontology_output_files = [setl_graph.resource(URIRef('file://'+cwd+'/chear.'+x)) for x in formats]
print (len(setl_graph))
for filename in os.listdir(CHEAR_DIR):
if not filename.endswith('.ttl') or filename.startswith('#'):
continue
print('Adding fragment', filename)
fragment = setl_graph.resource(BNode())
for ontology_output_file in ontology_output_files:
print(ontology_output_file.identifier, list(ontology_output_file[prov.wasGeneratedBy]))
ontology_output_file.value(prov.wasGeneratedBy).add(prov.used, fragment)
fragment.add(RDF.type, setlr.void.Dataset)
fragment_extract = setl_graph.resource(BNode())
fragment.add(prov.wasGeneratedBy, fragment_extract)
fragment_extract.add(RDF.type, setl.Extract)
fragment_extract.add(prov.used, URIRef('file://'+CHEAR_DIR+filename))
setlr._setl(setl_graph)
@task
def buildhhear(ctx):
setl_graph = Graph()
setl_graph.parse('hhear-ontology.setl.ttl',format="turtle")
cwd = os.getcwd()
formats = ['ttl','owl','json']
ontology_output_files = [setl_graph.resource(URIRef('file://'+cwd+'/hhear.'+x)) for x in formats]
print (len(setl_graph))
for filename in os.listdir(HHEAR_DIR):<|fim▁hole|> fragment = setl_graph.resource(BNode())
for ontology_output_file in ontology_output_files:
print(ontology_output_file.identifier, list(ontology_output_file[prov.wasGeneratedBy]))
ontology_output_file.value(prov.wasGeneratedBy).add(prov.used, fragment)
fragment.add(RDF.type, setlr.void.Dataset)
fragment_extract = setl_graph.resource(BNode())
fragment.add(prov.wasGeneratedBy, fragment_extract)
fragment_extract.add(RDF.type, setl.Extract)
fragment_extract.add(prov.used, URIRef('file://'+HHEAR_DIR+filename))
setlr._setl(setl_graph)
@task
def chear2hhear(c, inputfile, outputfile):
import openpyxl
import re
import pandas as pd
mappings = {}
mappings.update(dict([(row['label_uri'], row['numeric_uri'])
for i, row in pd.read_csv('sio_mappings.csv').iterrows()]))
mappings.update(dict([(row['label_uri'], row['numeric_uri'])
for i, row in pd.read_csv('chear2hhear_mappings.csv').iterrows()]))
wb = openpyxl.load_workbook(inputfile)
for sheet in wb:
for row in sheet.rows:
for cell in row:
if isinstance(cell.value, str):
cellValues = []
for c in re.split('\\s*[,&]\\s*', cell.value):
if c in mappings:
print('Replacing',c,'with',mappings[c])
c = mappings[c]
cellValues.append(c)
cell.value = ', '.join(cellValues)
wb.save(outputfile)<|fim▁end|> | if not filename.endswith('.ttl') or filename.startswith('#'):
continue
print('Adding fragment', filename)
|
<|file_name|>inline.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![deny(unsafe_code)]
use app_units::Au;
use block::AbsoluteAssignBSizesTraversal;
use context::LayoutContext;
use display_list_builder::DisplayListBuildState;
use display_list_builder::{FragmentDisplayListBuilding, InlineFlowDisplayListBuilding};
use euclid::{Point2D, Size2D};
use floats::{FloatKind, Floats, PlacementInfo};
use flow::{EarlyAbsolutePositionInfo, MutableFlowUtils, OpaqueFlow};
use flow::{self, BaseFlow, Flow, FlowClass, ForceNonfloatedFlag, IS_ABSOLUTELY_POSITIONED};
use flow_ref;
use fragment::{CoordinateSystem, Fragment, FragmentBorderBoxIterator, Overflow};
use fragment::{SpecificFragmentInfo};
use gfx::display_list::{OpaqueNode, StackingContext, StackingContextId};
use gfx::font::FontMetrics;
use gfx::font_context::FontContext;
use incremental::{BUBBLE_ISIZES, REFLOW, REFLOW_OUT_OF_FLOW, REPAINT, RESOLVE_GENERATED_CONTENT};
use layout_debug;
use model::IntrinsicISizesContribution;
use range::{Range, RangeIndex};
use std::cmp::max;
use std::collections::VecDeque;
use std::sync::Arc;
use std::{fmt, isize, mem};
use style::computed_values::{display, overflow_x, position, text_align, text_justify};
use style::computed_values::{text_overflow, vertical_align, white_space};
use style::logical_geometry::{LogicalRect, LogicalSize, WritingMode};
use style::properties::{ComputedValues, ServoComputedValues};
use style::values::computed::LengthOrPercentage;
use text;
use unicode_bidi;
use util;
use util::print_tree::PrintTree;
use wrapper::PseudoElementType;
// From gfxFontConstants.h in Firefox
static FONT_SUBSCRIPT_OFFSET_RATIO: f32 = 0.20;
static FONT_SUPERSCRIPT_OFFSET_RATIO: f32 = 0.34;
/// `Line`s are represented as offsets into the child list, rather than
/// as an object that "owns" fragments. Choosing a different set of line
/// breaks requires a new list of offsets, and possibly some splitting and
/// merging of TextFragments.
///
/// A similar list will keep track of the mapping between CSS fragments and
/// the corresponding fragments in the inline flow.
///
/// After line breaks are determined, render fragments in the inline flow may
/// overlap visually. For example, in the case of nested inline CSS fragments,
/// outer inlines must be at least as large as the inner inlines, for
/// purposes of drawing noninherited things like backgrounds, borders,
/// outlines.
///
/// N.B. roc has an alternative design where the list instead consists of
/// things like "start outer fragment, text, start inner fragment, text, end inner
/// fragment, text, end outer fragment, text". This seems a little complicated to
/// serve as the starting point, but the current design doesn't make it
/// hard to try out that alternative.
///
/// Line fragments also contain some metadata used during line breaking. The
/// green zone is the area that the line can expand to before it collides
/// with a float or a horizontal wall of the containing block. The block-start
/// inline-start corner of the green zone is the same as that of the line, but
/// the green zone can be taller and wider than the line itself.
#[derive(RustcEncodable, Debug, Clone)]
pub struct Line {
/// A range of line indices that describe line breaks.
///
/// For example, consider the following HTML and rendered element with
/// linebreaks:
///
/// ~~~html
/// <span>I <span>like truffles, <img></span> yes I do.</span>
/// ~~~
///
/// ~~~text
/// +------------+
/// | I like |
/// | truffles, |
/// | +----+ |
/// | | | |
/// | +----+ yes |
/// | I do. |
/// +------------+
/// ~~~
///
/// The ranges that describe these lines would be:
///
/// | [0, 2) | [2, 3) | [3, 5) | [5, 6) |
/// |----------|-------------|-------------|----------|
/// | 'I like' | 'truffles,' | '<img> yes' | 'I do.' |
pub range: Range<FragmentIndex>,
/// The bidirectional embedding level runs for this line, in visual order.
///
/// Can be set to `None` if the line is 100% left-to-right.
pub visual_runs: Option<Vec<(Range<FragmentIndex>, u8)>>,
/// The bounds are the exact position and extents of the line with respect
/// to the parent box.
///
/// For example, for the HTML below...
///
/// ~~~html
/// <div><span>I <span>like truffles, <img></span></div>
/// ~~~
///
/// ...the bounds would be:
///
/// ~~~text
/// +-----------------------------------------------------------+
/// | ^ |
/// | | |
/// | origin.y |
/// | | |
/// | v |
/// |< - origin.x ->+ - - - - - - - - +---------+---- |
/// | | | | ^ |
/// | | | <img> | size.block |
/// | I like truffles, | | v |
/// | + - - - - - - - - +---------+---- |
/// | | | |
/// | |<------ size.inline ------>| |
/// | |
/// | |
/// +-----------------------------------------------------------+
/// ~~~
pub bounds: LogicalRect<Au>,
/// The green zone is the greatest extent from which a line can extend to
/// before it collides with a float.
///
/// ~~~text
/// +-----------------------+
/// |::::::::::::::::: |
/// |:::::::::::::::::FFFFFF|
/// |============:::::FFFFFF|
/// |:::::::::::::::::FFFFFF|
/// |:::::::::::::::::FFFFFF|
/// |::::::::::::::::: |
/// | FFFFFFFFF |
/// | FFFFFFFFF |
/// | FFFFFFFFF |
/// | |
/// +-----------------------+
///
/// === line
/// ::: green zone
/// FFF float
/// ~~~
pub green_zone: LogicalSize<Au>,
/// The inline metrics for this line.
pub inline_metrics: InlineMetrics,
}
impl Line {
fn new(writing_mode: WritingMode,
minimum_block_size_above_baseline: Au,
minimum_depth_below_baseline: Au)
-> Line {
Line {
range: Range::empty(),
visual_runs: None,
bounds: LogicalRect::zero(writing_mode),
green_zone: LogicalSize::zero(writing_mode),
inline_metrics: InlineMetrics::new(minimum_block_size_above_baseline,
minimum_depth_below_baseline,
minimum_block_size_above_baseline),
}
}
}
int_range_index! {
#[derive(RustcEncodable)]
#[doc = "The index of a fragment in a flattened vector of DOM elements."]
struct FragmentIndex(isize)
}
/// Arranges fragments into lines, splitting them up as necessary.
struct LineBreaker {
/// The floats we need to flow around.
floats: Floats,
/// The resulting fragment list for the flow, consisting of possibly-broken fragments.
new_fragments: Vec<Fragment>,
/// The next fragment or fragments that we need to work on.
work_list: VecDeque<Fragment>,
/// The line we're currently working on.
pending_line: Line,
/// The lines we've already committed.
lines: Vec<Line>,
/// The index of the last known good line breaking opportunity. The opportunity will either
/// be inside this fragment (if it is splittable) or immediately prior to it.
last_known_line_breaking_opportunity: Option<FragmentIndex>,
/// The current position in the block direction.
cur_b: Au,
/// The computed value of the indentation for the first line (`text-indent`, CSS 2.1 § 16.1).
first_line_indentation: Au,
/// The minimum block-size above the baseline for each line, as specified by the line height
/// and font style.
minimum_block_size_above_baseline: Au,
/// The minimum depth below the baseline for each line, as specified by the line height and
/// font style.
minimum_depth_below_baseline: Au,
}
impl LineBreaker {
/// Creates a new `LineBreaker` with a set of floats and the indentation of the first line.
fn new(float_context: Floats,
first_line_indentation: Au,
minimum_block_size_above_baseline: Au,
minimum_depth_below_baseline: Au)
-> LineBreaker {
LineBreaker {
new_fragments: Vec::new(),
work_list: VecDeque::new(),
pending_line: Line::new(float_context.writing_mode,
minimum_block_size_above_baseline,
minimum_depth_below_baseline),
floats: float_context,
lines: Vec::new(),
cur_b: Au(0),
last_known_line_breaking_opportunity: None,
first_line_indentation: first_line_indentation,
minimum_block_size_above_baseline: minimum_block_size_above_baseline,
minimum_depth_below_baseline: minimum_depth_below_baseline,
}
}
/// Resets the `LineBreaker` to the initial state it had after a call to `new`.
fn reset_scanner(&mut self) {
self.lines = Vec::new();
self.new_fragments = Vec::new();
self.cur_b = Au(0);
self.reset_line();
}
/// Reinitializes the pending line to blank data.
fn reset_line(&mut self) -> Line {
self.last_known_line_breaking_opportunity = None;
mem::replace(&mut self.pending_line, Line::new(self.floats.writing_mode,
self.minimum_block_size_above_baseline,
self.minimum_depth_below_baseline))
}
/// Reflows fragments for the given inline flow.
fn scan_for_lines(&mut self, flow: &mut InlineFlow, layout_context: &LayoutContext) {
self.reset_scanner();
// Create our fragment iterator.
debug!("LineBreaker: scanning for lines, {} fragments", flow.fragments.len());
let mut old_fragments = mem::replace(&mut flow.fragments, InlineFragments::new());
let old_fragment_iter = old_fragments.fragments.into_iter();
// TODO(pcwalton): This would likely be better as a list of dirty line indices. That way we
// could resynchronize if we discover during reflow that all subsequent fragments must have
// the same position as they had in the previous reflow. I don't know how common this case
// really is in practice, but it's probably worth handling.
self.lines = Vec::new();
// Do the reflow.
self.reflow_fragments(old_fragment_iter, flow, layout_context);
// Perform unicode bidirectional layout.
let para_level = flow.base.writing_mode.to_bidi_level();
// The text within a fragment is at a single bidi embedding level (because we split
// fragments on level run boundaries during flow construction), so we can build a level
// array with just one entry per fragment.
let levels: Vec<u8> = self.new_fragments.iter().map(|fragment| match fragment.specific {
SpecificFragmentInfo::ScannedText(ref info) => info.run.bidi_level,
_ => para_level
}).collect();
let mut lines = mem::replace(&mut self.lines, Vec::new());
// If everything is LTR, don't bother with reordering.
let has_rtl = levels.iter().cloned().any(unicode_bidi::is_rtl);
if has_rtl {
// Compute and store the visual ordering of the fragments within the line.
for line in &mut lines {
let range = line.range.begin().to_usize()..line.range.end().to_usize();
let runs = unicode_bidi::visual_runs(range, &levels);
line.visual_runs = Some(runs.iter().map(|run| {
let start = FragmentIndex(run.start as isize);
let len = FragmentIndex(run.len() as isize);
(Range::new(start, len), levels[run.start])
}).collect());
}
}
// Place the fragments back into the flow.
old_fragments.fragments = mem::replace(&mut self.new_fragments, vec![]);
flow.fragments = old_fragments;
flow.lines = lines;
}
/// Reflows the given fragments, which have been plucked out of the inline flow.
fn reflow_fragments<'a, I>(&mut self,
mut old_fragment_iter: I,
flow: &'a InlineFlow,
layout_context: &LayoutContext)
where I: Iterator<Item=Fragment> {
loop {
// Acquire the next fragment to lay out from the work list or fragment list, as
// appropriate.
let fragment = match self.next_unbroken_fragment(&mut old_fragment_iter) {
None => break,
Some(fragment) => fragment,
};
// Try to append the fragment.
self.reflow_fragment(fragment, flow, layout_context);
}
if !self.pending_line_is_empty() {
debug!("LineBreaker: partially full line {} at end of scanning; committing it",
self.lines.len());
self.flush_current_line()
}
}
/// Acquires a new fragment to lay out from the work list or fragment list as appropriate.
/// Note that you probably don't want to call this method directly in order to be incremental-
/// reflow-safe; try `next_unbroken_fragment` instead.
fn next_fragment<I>(&mut self, old_fragment_iter: &mut I) -> Option<Fragment>
where I: Iterator<Item=Fragment> {
self.work_list.pop_front().or_else(|| old_fragment_iter.next())
}
/// Acquires a new fragment to lay out from the work list or fragment list, merging it with any
/// subsequent fragments as appropriate. In effect, what this method does is to return the next
/// fragment to lay out, undoing line break operations that any previous reflows may have
/// performed. You probably want to be using this method instead of `next_fragment`.
fn next_unbroken_fragment<I>(&mut self, old_fragment_iter: &mut I) -> Option<Fragment>
where I: Iterator<Item=Fragment> {
let mut result = match self.next_fragment(old_fragment_iter) {
None => return None,
Some(fragment) => fragment,
};
loop {
let candidate = match self.next_fragment(old_fragment_iter) {
None => return Some(result),
Some(fragment) => fragment,
};
let need_to_merge = match (&mut result.specific, &candidate.specific) {
(&mut SpecificFragmentInfo::ScannedText(ref mut result_info),
&SpecificFragmentInfo::ScannedText(ref candidate_info)) => {
result_info.selected() == candidate_info.selected() &&
util::arc_ptr_eq(&result_info.run, &candidate_info.run) &&
inline_contexts_are_equal(&result.inline_context,
&candidate.inline_context)
}
_ => false,
};
if need_to_merge {
result.merge_with(candidate);
continue
}
self.work_list.push_front(candidate);
return Some(result)
}
}
/// Commits a line to the list.
fn flush_current_line(&mut self) {
debug!("LineBreaker: flushing line {}: {:?}", self.lines.len(), self.pending_line);
self.strip_trailing_whitespace_from_pending_line_if_necessary();
self.lines.push(self.pending_line.clone());
self.cur_b = self.pending_line.bounds.start.b + self.pending_line.bounds.size.block;
self.reset_line();
}
/// Removes trailing whitespace from the pending line if necessary. This is done right before
/// flushing it.
fn strip_trailing_whitespace_from_pending_line_if_necessary(&mut self) {
if self.pending_line.range.is_empty() {
return
}
let last_fragment_index = self.pending_line.range.end() - FragmentIndex(1);
let mut fragment = &mut self.new_fragments[last_fragment_index.get() as usize];
let mut old_fragment_inline_size = None;
if let SpecificFragmentInfo::ScannedText(_) = fragment.specific {
old_fragment_inline_size = Some(fragment.border_box.size.inline +
fragment.margin.inline_start_end());
}
fragment.strip_trailing_whitespace_if_necessary();
if let SpecificFragmentInfo::ScannedText(ref mut scanned_text_fragment_info) =
fragment.specific {
let scanned_text_fragment_info = &mut **scanned_text_fragment_info;
let range = &mut scanned_text_fragment_info.range;
scanned_text_fragment_info.content_size.inline =
scanned_text_fragment_info.run.metrics_for_range(range).advance_width;
fragment.border_box.size.inline = scanned_text_fragment_info.content_size.inline +
fragment.border_padding.inline_start_end();
self.pending_line.bounds.size.inline = self.pending_line.bounds.size.inline -
(old_fragment_inline_size.unwrap() -
(fragment.border_box.size.inline + fragment.margin.inline_start_end()));
}
}
// FIXME(eatkinson): this assumes that the tallest fragment in the line determines the line
// block-size. This might not be the case with some weird text fonts.
fn new_inline_metrics_for_line(&self, new_fragment: &Fragment, layout_context: &LayoutContext)
-> InlineMetrics {
self.pending_line.inline_metrics.max(&new_fragment.inline_metrics(layout_context))
}
fn new_block_size_for_line(&self, new_fragment: &Fragment, layout_context: &LayoutContext)
-> Au {
max(self.pending_line.bounds.size.block,
self.new_inline_metrics_for_line(new_fragment, layout_context).block_size())
}
/// Computes the position of a line that has only the provided fragment. Returns the bounding
/// rect of the line's green zone (whose origin coincides with the line's origin) and the
/// actual inline-size of the first fragment after splitting.
fn initial_line_placement(&self,
flow: &InlineFlow,
first_fragment: &Fragment,
ceiling: Au)
-> (LogicalRect<Au>, Au) {
debug!("LineBreaker: trying to place first fragment of line {}; fragment size: {:?}, \
splittable: {}",
self.lines.len(),
first_fragment.border_box.size,
first_fragment.can_split());
// Initially, pretend a splittable fragment has zero inline-size. We will move it later if
// it has nonzero inline-size and that causes problems.
let placement_inline_size = if first_fragment.can_split() {
Au(0)
} else {
first_fragment.margin_box_inline_size() + self.indentation_for_pending_fragment()
};
// Try to place the fragment between floats.
let line_bounds = self.floats.place_between_floats(&PlacementInfo {
size: LogicalSize::new(self.floats.writing_mode,
placement_inline_size,
first_fragment.border_box.size.block),
ceiling: ceiling,
max_inline_size: flow.base.position.size.inline,
kind: FloatKind::Left,
});
// Simple case: if the fragment fits, then we can stop here.
if line_bounds.size.inline > first_fragment.margin_box_inline_size() {
debug!("LineBreaker: fragment fits on line {}", self.lines.len());
return (line_bounds, first_fragment.margin_box_inline_size());
}
// If not, but we can't split the fragment, then we'll place the line here and it will
// overflow.
if !first_fragment.can_split() {
debug!("LineBreaker: line doesn't fit, but is unsplittable");
}
(line_bounds, first_fragment.margin_box_inline_size())
}
/// Performs float collision avoidance. This is called when adding a fragment is going to
/// increase the block-size, and because of that we will collide with some floats.
///
/// We have two options here:
/// 1) Move the entire line so that it doesn't collide any more.
/// 2) Break the line and put the new fragment on the next line.
///
/// The problem with option 1 is that we might move the line and then wind up breaking anyway,
/// which violates the standard. But option 2 is going to look weird sometimes.
///
/// So we'll try to move the line whenever we can, but break if we have to.
///
/// Returns false if and only if we should break the line.
fn avoid_floats(&mut self,
flow: &InlineFlow,
in_fragment: Fragment,
new_block_size: Au)
-> bool {
debug!("LineBreaker: entering float collision avoider!");
// First predict where the next line is going to be.
let (next_line, first_fragment_inline_size) =
self.initial_line_placement(flow,
&in_fragment,
self.pending_line.bounds.start.b);
let next_green_zone = next_line.size;
let new_inline_size = self.pending_line.bounds.size.inline + first_fragment_inline_size;
// Now, see if everything can fit at the new location.
if next_green_zone.inline >= new_inline_size && next_green_zone.block >= new_block_size {
debug!("LineBreaker: case=adding fragment collides vertically with floats: moving \
line");
self.pending_line.bounds.start = next_line.start;
self.pending_line.green_zone = next_green_zone;
debug_assert!(!self.pending_line_is_empty(), "Non-terminating line breaking");
self.work_list.push_front(in_fragment);
return true
}
debug!("LineBreaker: case=adding fragment collides vertically with floats: breaking line");
self.work_list.push_front(in_fragment);
false
}
/// Tries to append the given fragment to the line, splitting it if necessary. Commits the
/// current line if needed.
fn reflow_fragment(&mut self,
mut fragment: Fragment,
flow: &InlineFlow,
layout_context: &LayoutContext) {
// Undo any whitespace stripping from previous reflows.
fragment.reset_text_range_and_inline_size();
// Determine initial placement for the fragment if we need to.
//
// Also, determine whether we can legally break the line before, or inside, this fragment.
let fragment_is_line_break_opportunity = if self.pending_line_is_empty() {
fragment.strip_leading_whitespace_if_necessary();
let (line_bounds, _) = self.initial_line_placement(flow, &fragment, self.cur_b);
self.pending_line.bounds.start = line_bounds.start;
self.pending_line.green_zone = line_bounds.size;
false
} else {
fragment.white_space().allow_wrap()
};
debug!("LineBreaker: trying to append to line {} (fragment size: {:?}, green zone: {:?}): \
{:?}",
self.lines.len(),
fragment.border_box.size,
self.pending_line.green_zone,
fragment);
// NB: At this point, if `green_zone.inline < self.pending_line.bounds.size.inline` or
// `green_zone.block < self.pending_line.bounds.size.block`, then we committed a line that
// overlaps with floats.
let green_zone = self.pending_line.green_zone;
let new_block_size = self.new_block_size_for_line(&fragment, layout_context);
if new_block_size > green_zone.block {
// Uh-oh. Float collision imminent. Enter the float collision avoider!
if !self.avoid_floats(flow, fragment, new_block_size) {
self.flush_current_line();
}
return
}
// Record the last known good line break opportunity if this is one.
if fragment_is_line_break_opportunity {
self.last_known_line_breaking_opportunity = Some(self.pending_line.range.end())
}
// If we must flush the line after finishing this fragment due to `white-space: pre`,
// detect that.
let line_flush_mode = if fragment.white_space().preserve_newlines() {
if fragment.requires_line_break_afterward_if_wrapping_on_newlines() {
LineFlushMode::Flush
} else {
LineFlushMode::No
}
} else {
LineFlushMode::No
};
// If we're not going to overflow the green zone vertically, we might still do so
// horizontally. We'll try to place the whole fragment on this line and break somewhere if
// it doesn't fit.
let indentation = self.indentation_for_pending_fragment();
let new_inline_size = self.pending_line.bounds.size.inline +
fragment.margin_box_inline_size() + indentation;
if new_inline_size <= green_zone.inline {
debug!("LineBreaker: fragment fits without splitting");
self.push_fragment_to_line(layout_context, fragment, line_flush_mode);
return
}
// If the wrapping mode prevents us from splitting, then back up and split at the last
// known good split point.
if !fragment.white_space().allow_wrap() {
debug!("LineBreaker: fragment can't split; falling back to last known good split point");
self.split_line_at_last_known_good_position(layout_context, fragment, line_flush_mode);
return;
}
// Split it up!
let available_inline_size = green_zone.inline -
self.pending_line.bounds.size.inline -
indentation;
let inline_start_fragment;
let inline_end_fragment;
let split_result = match fragment.calculate_split_position(available_inline_size,
self.pending_line_is_empty()) {
None => {
// We failed to split. Defer to the next line if we're allowed to; otherwise,
// rewind to the last line breaking opportunity.
if fragment_is_line_break_opportunity {
debug!("LineBreaker: fragment was unsplittable; deferring to next line");
self.work_list.push_front(fragment);
self.flush_current_line();
} else {
self.split_line_at_last_known_good_position(layout_context,
fragment,
LineFlushMode::No);
}
return
}
Some(split_result) => split_result,
};
inline_start_fragment = split_result.inline_start.as_ref().map(|x| {
fragment.transform_with_split_info(x, split_result.text_run.clone())
});
inline_end_fragment = split_result.inline_end.as_ref().map(|x| {
fragment.transform_with_split_info(x, split_result.text_run.clone())
});
// Push the first fragment onto the line we're working on and start off the next line with
// the second fragment. If there's no second fragment, the next line will start off empty.
match (inline_start_fragment, inline_end_fragment) {
(Some(inline_start_fragment), Some(inline_end_fragment)) => {
self.push_fragment_to_line(layout_context,
inline_start_fragment,
LineFlushMode::Flush);
self.work_list.push_front(inline_end_fragment)
},
(Some(fragment), None) => {
self.push_fragment_to_line(layout_context, fragment, line_flush_mode);
}
(None, Some(fragment)) => {
// Yes, this can happen!
self.flush_current_line();
self.work_list.push_front(fragment)
}
(None, None) => {}
}
}
/// Pushes a fragment to the current line unconditionally, possibly truncating it and placing
/// an ellipsis based on the value of `text-overflow`. If `flush_line` is `Flush`, then flushes
/// the line afterward;
fn push_fragment_to_line(&mut self,
layout_context: &LayoutContext,
fragment: Fragment,
line_flush_mode: LineFlushMode) {
let indentation = self.indentation_for_pending_fragment();
if self.pending_line_is_empty() {
debug_assert!(self.new_fragments.len() <= (isize::MAX as usize));
self.pending_line.range.reset(FragmentIndex(self.new_fragments.len() as isize),
FragmentIndex(0));
}
// Determine if an ellipsis will be necessary to account for `text-overflow`.
let mut need_ellipsis = false;
let available_inline_size = self.pending_line.green_zone.inline -
self.pending_line.bounds.size.inline - indentation;
match (fragment.style().get_inheritedtext().text_overflow,
fragment.style().get_box().overflow_x) {
(text_overflow::T::clip, _) | (_, overflow_x::T::visible) => {}
(text_overflow::T::ellipsis, _) => {
need_ellipsis = fragment.margin_box_inline_size() > available_inline_size;
}
}
if !need_ellipsis {
self.push_fragment_to_line_ignoring_text_overflow(fragment, layout_context);
} else {
let ellipsis = fragment.transform_into_ellipsis(layout_context);
if let Some(truncation_info) =
fragment.truncate_to_inline_size(available_inline_size -
ellipsis.margin_box_inline_size()) {
let fragment = fragment.transform_with_split_info(&truncation_info.split,
truncation_info.text_run);
self.push_fragment_to_line_ignoring_text_overflow(fragment, layout_context);
}
self.push_fragment_to_line_ignoring_text_overflow(ellipsis, layout_context);
}
if line_flush_mode == LineFlushMode::Flush {
self.flush_current_line()
}
}
/// Pushes a fragment to the current line unconditionally, without placing an ellipsis in the
/// case of `text-overflow: ellipsis`.
fn push_fragment_to_line_ignoring_text_overflow(&mut self,
fragment: Fragment,
layout_context: &LayoutContext) {
let indentation = self.indentation_for_pending_fragment();
self.pending_line.range.extend_by(FragmentIndex(1));
if !fragment.is_inline_absolute() {
self.pending_line.bounds.size.inline = self.pending_line.bounds.size.inline +
fragment.margin_box_inline_size() +
indentation;
self.pending_line.inline_metrics =
self.new_inline_metrics_for_line(&fragment, layout_context);
self.pending_line.bounds.size.block =
self.new_block_size_for_line(&fragment, layout_context);
}
self.new_fragments.push(fragment);
}
fn split_line_at_last_known_good_position(&mut self,
layout_context: &LayoutContext,
cur_fragment: Fragment,
line_flush_mode: LineFlushMode) {
let last_known_line_breaking_opportunity =
match self.last_known_line_breaking_opportunity {
None => {
// No line breaking opportunity exists at all for this line. Overflow.
self.push_fragment_to_line(layout_context, cur_fragment, line_flush_mode);
return;
}
Some(last_known_line_breaking_opportunity) => last_known_line_breaking_opportunity,
};
self.work_list.push_front(cur_fragment);
for fragment_index in (last_known_line_breaking_opportunity.get()..
self.pending_line.range.end().get()).rev() {
debug_assert!(fragment_index == (self.new_fragments.len() as isize) - 1);
self.work_list.push_front(self.new_fragments.pop().unwrap());
}
// FIXME(pcwalton): This should actually attempt to split the last fragment if
// possible to do so, to handle cases like:
//
// (available width)
// +-------------+
// The alphabet
// (<em>abcdefghijklmnopqrstuvwxyz</em>)
//
// Here, the last known-good split point is inside the fragment containing
// "The alphabet (", which has already been committed by the time we get to this
// point. Unfortunately, the existing splitting API (`calculate_split_position`)
// has no concept of "split right before the last non-whitespace position". We'll
// need to add that feature to the API to handle this case correctly.
self.pending_line.range.extend_to(last_known_line_breaking_opportunity);
self.flush_current_line();
}
/// Returns the indentation that needs to be applied before the fragment we're reflowing.
fn indentation_for_pending_fragment(&self) -> Au {
if self.pending_line_is_empty() && self.lines.is_empty() {
self.first_line_indentation
} else {
Au(0)
}
}
/// Returns true if the pending line is empty and false otherwise.
fn pending_line_is_empty(&self) -> bool {
self.pending_line.range.length() == FragmentIndex(0)
}
}
/// Represents a list of inline fragments, including element ranges.
#[derive(RustcEncodable, Clone)]
pub struct InlineFragments {
/// The fragments themselves.
pub fragments: Vec<Fragment>,
}
impl InlineFragments {
/// Creates an empty set of inline fragments.
pub fn new() -> InlineFragments {
InlineFragments {
fragments: vec![],
}
}
/// Returns the number of inline fragments.
pub fn len(&self) -> usize {
self.fragments.len()
}
/// Returns true if this list contains no fragments and false if it contains at least one
/// fragment.
pub fn is_empty(&self) -> bool {
self.fragments.is_empty()
}
/// A convenience function to return the fragment at a given index.
pub fn get(&self, index: usize) -> &Fragment {
&self.fragments[index]
}
/// A convenience function to return a mutable reference to the fragment at a given index.
pub fn get_mut(&mut self, index: usize) -> &mut Fragment {
&mut self.fragments[index]
}
}
/// Flows for inline layout.
#[derive(RustcEncodable)]
pub struct InlineFlow {
/// Data common to all flows.
pub base: BaseFlow,
/// A vector of all inline fragments. Several fragments may correspond to one node/element.
pub fragments: InlineFragments,
/// A vector of ranges into fragments that represents line positions. These ranges are disjoint
/// and are the result of inline layout. This also includes some metadata used for positioning
/// lines.
pub lines: Vec<Line>,
/// The minimum block-size above the baseline for each line, as specified by the line height
/// and font style.
pub minimum_block_size_above_baseline: Au,
/// The minimum depth below the baseline for each line, as specified by the line height and
/// font style.
pub minimum_depth_below_baseline: Au,
/// The amount of indentation to use on the first line. This is determined by our block parent
/// (because percentages are relative to the containing block, and we aren't in a position to
/// compute things relative to our parent's containing block).
pub first_line_indentation: Au,
}
impl InlineFlow {
pub fn from_fragments(fragments: InlineFragments, writing_mode: WritingMode) -> InlineFlow {
let mut flow = InlineFlow {
base: BaseFlow::new(None, writing_mode, ForceNonfloatedFlag::ForceNonfloated),
fragments: fragments,
lines: Vec::new(),
minimum_block_size_above_baseline: Au(0),
minimum_depth_below_baseline: Au(0),
first_line_indentation: Au(0),
};
if flow.fragments.fragments.iter().any(Fragment::is_unscanned_generated_content) {
flow.base.restyle_damage.insert(RESOLVE_GENERATED_CONTENT);
}
flow
}
/// Returns the distance from the baseline for the logical block-start inline-start corner of
/// this fragment, taking into account the value of the CSS `vertical-align` property.
/// Negative values mean "toward the logical block-start" and positive values mean "toward the
/// logical block-end".
///
/// The extra boolean is set if and only if `largest_block_size_for_top_fragments` and/or
/// `largest_block_size_for_bottom_fragments` were updated. That is, if the box has a `top` or
/// `bottom` value for `vertical-align`, true is returned.
fn distance_from_baseline(fragment: &Fragment,
ascent: Au,
parent_text_block_start: Au,
parent_text_block_end: Au,
block_size_above_baseline: &mut Au,
depth_below_baseline: &mut Au,
largest_block_size_for_top_fragments: &mut Au,
largest_block_size_for_bottom_fragments: &mut Au,
layout_context: &LayoutContext)
-> (Au, bool) {
let (mut offset_from_baseline, mut largest_size_updated) = (Au(0), false);
for style in fragment.inline_styles() {
// Ignore `vertical-align` values for table cells.
let box_style = style.get_box();
match box_style.display {
display::T::inline | display::T::block | display::T::inline_block => {}
_ => continue,
}
match box_style.vertical_align {
vertical_align::T::baseline => {}
vertical_align::T::middle => {
// TODO: x-height value should be used from font info.
// TODO: Doing nothing here passes our current reftests but doesn't work in
// all situations. Add vertical align reftests and fix this.
},
vertical_align::T::sub => {
let sub_offset = (parent_text_block_start + parent_text_block_end)
.scale_by(FONT_SUBSCRIPT_OFFSET_RATIO);
offset_from_baseline = offset_from_baseline + sub_offset
},
vertical_align::T::super_ => {
let super_offset = (parent_text_block_start + parent_text_block_end)
.scale_by(FONT_SUPERSCRIPT_OFFSET_RATIO);
offset_from_baseline = offset_from_baseline - super_offset
},
vertical_align::T::text_top => {
let fragment_block_size = *block_size_above_baseline +
*depth_below_baseline;
let prev_depth_below_baseline = *depth_below_baseline;
*block_size_above_baseline = parent_text_block_start;
*depth_below_baseline = fragment_block_size - *block_size_above_baseline;
offset_from_baseline = offset_from_baseline + *depth_below_baseline -
prev_depth_below_baseline
},
vertical_align::T::text_bottom => {
let fragment_block_size = *block_size_above_baseline +
*depth_below_baseline;
let prev_depth_below_baseline = *depth_below_baseline;
*depth_below_baseline = parent_text_block_end;
*block_size_above_baseline = fragment_block_size - *depth_below_baseline;
offset_from_baseline = offset_from_baseline + *depth_below_baseline -
prev_depth_below_baseline
},
vertical_align::T::top => {
if !largest_size_updated {
largest_size_updated = true;
*largest_block_size_for_top_fragments =
max(*largest_block_size_for_top_fragments,
*block_size_above_baseline + *depth_below_baseline);
offset_from_baseline = offset_from_baseline +
*block_size_above_baseline
}
},
vertical_align::T::bottom => {
if !largest_size_updated {
largest_size_updated = true;
*largest_block_size_for_bottom_fragments =
max(*largest_block_size_for_bottom_fragments,
*block_size_above_baseline + *depth_below_baseline);
offset_from_baseline = offset_from_baseline - *depth_below_baseline
}
},
vertical_align::T::LengthOrPercentage(LengthOrPercentage::Length(length)) => {
offset_from_baseline = offset_from_baseline - length
}
vertical_align::T::LengthOrPercentage(LengthOrPercentage::Percentage(p)) => {
let line_height = fragment.calculate_line_height(layout_context);
let percent_offset = line_height.scale_by(p);
offset_from_baseline = offset_from_baseline - percent_offset
}
vertical_align::T::LengthOrPercentage(LengthOrPercentage::Calc(calc)) => {
let line_height = fragment.calculate_line_height(layout_context);
let percent_offset = line_height.scale_by(calc.percentage());
offset_from_baseline = offset_from_baseline - percent_offset - calc.length()
}
}
}
(offset_from_baseline - ascent, largest_size_updated)
}
/// Sets fragment positions in the inline direction based on alignment for one line. This
/// performs text justification if mandated by the style.
fn set_inline_fragment_positions(fragments: &mut InlineFragments,
line: &Line,
line_align: text_align::T,
indentation: Au,
is_last_line: bool) {
// Figure out how much inline-size we have.
let slack_inline_size = max(Au(0), line.green_zone.inline - line.bounds.size.inline);
// Compute the value we're going to use for `text-justify`.
if fragments.fragments.is_empty() {
return
}
let text_justify = fragments.fragments[0].style().get_inheritedtext().text_justify;
// Translate `left` and `right` to logical directions.
let is_ltr = fragments.fragments[0].style().writing_mode.is_bidi_ltr();
let line_align = match (line_align, is_ltr) {
(text_align::T::left, true) |
(text_align::T::servo_left, true) |
(text_align::T::right, false) |
(text_align::T::servo_right, false) => text_align::T::start,
(text_align::T::left, false) |
(text_align::T::servo_left, false) |
(text_align::T::right, true) |
(text_align::T::servo_right, true) => text_align::T::end,
_ => line_align
};
// Set the fragment inline positions based on that alignment, and justify the text if
// necessary.
let mut inline_start_position_for_fragment = line.bounds.start.i + indentation;
match line_align {
text_align::T::justify if !is_last_line && text_justify != text_justify::T::none => {
InlineFlow::justify_inline_fragments(fragments, line, slack_inline_size)
}
text_align::T::justify | text_align::T::start => {}
text_align::T::center | text_align::T::servo_center => {
inline_start_position_for_fragment = inline_start_position_for_fragment +
slack_inline_size.scale_by(0.5)
}
text_align::T::end => {
inline_start_position_for_fragment = inline_start_position_for_fragment +
slack_inline_size
}
text_align::T::left |
text_align::T::servo_left |
text_align::T::right |
text_align::T::servo_right => unreachable!()
}
// Lay out the fragments in visual order.
let run_count = match line.visual_runs {
Some(ref runs) => runs.len(),
None => 1
};
for run_idx in 0..run_count {
let (range, level) = match line.visual_runs {
Some(ref runs) if is_ltr => runs[run_idx],
Some(ref runs) => runs[run_count - run_idx - 1], // reverse order for RTL runs
None => (line.range, 0)
};
// If the bidi embedding direction is opposite the layout direction, lay out this
// run in reverse order.
let reverse = unicode_bidi::is_ltr(level) != is_ltr;
let fragment_indices = if reverse {
(range.end().get() - 1..range.begin().get() - 1).step_by(-1)
} else {
(range.begin().get()..range.end().get()).step_by(1)
};
for fragment_index in fragment_indices {
let fragment = fragments.get_mut(fragment_index as usize);
inline_start_position_for_fragment = inline_start_position_for_fragment +
fragment.margin.inline_start;
let border_start = if fragment.style.writing_mode.is_bidi_ltr() == is_ltr {
inline_start_position_for_fragment
} else {
line.green_zone.inline - inline_start_position_for_fragment
- fragment.margin.inline_end
- fragment.border_box.size.inline
};
fragment.border_box = LogicalRect::new(fragment.style.writing_mode,
border_start,
fragment.border_box.start.b,
fragment.border_box.size.inline,
fragment.border_box.size.block);
fragment.update_late_computed_inline_position_if_necessary();
if !fragment.is_inline_absolute() {
inline_start_position_for_fragment = inline_start_position_for_fragment +
fragment.border_box.size.inline + fragment.margin.inline_end;
}
}
}
}
/// Justifies the given set of inline fragments, distributing the `slack_inline_size` among all
/// of them according to the value of `text-justify`.
fn justify_inline_fragments(fragments: &mut InlineFragments,
line: &Line,
slack_inline_size: Au) {
// Fast path.
if slack_inline_size == Au(0) {
return<|fim▁hole|> // First, calculate the number of expansion opportunities (spaces, normally).
let mut expansion_opportunities = 0i32;
for fragment_index in line.range.each_index() {
let fragment = fragments.get(fragment_index.to_usize());
let scanned_text_fragment_info = match fragment.specific {
SpecificFragmentInfo::ScannedText(ref info) if !info.range.is_empty() => info,
_ => continue
};
let fragment_range = scanned_text_fragment_info.range;
for slice in scanned_text_fragment_info.run.character_slices_in_range(&fragment_range) {
expansion_opportunities += slice.glyphs.space_count_in_range(&slice.range) as i32
}
}
// Then distribute all the space across the expansion opportunities.
let space_per_expansion_opportunity = slack_inline_size.to_f64_px() /
(expansion_opportunities as f64);
for fragment_index in line.range.each_index() {
let fragment = fragments.get_mut(fragment_index.to_usize());
let mut scanned_text_fragment_info = match fragment.specific {
SpecificFragmentInfo::ScannedText(ref mut info) if !info.range.is_empty() => info,
_ => continue
};
let fragment_range = scanned_text_fragment_info.range;
// FIXME(pcwalton): This is an awful lot of uniqueness making. I don't see any easy way
// to get rid of it without regressing the performance of the non-justified case,
// though.
let run = Arc::make_mut(&mut scanned_text_fragment_info.run);
{
let glyph_runs = Arc::make_mut(&mut run.glyphs);
for mut glyph_run in &mut *glyph_runs {
let mut range = glyph_run.range.intersect(&fragment_range);
if range.is_empty() {
continue
}
range.shift_by(-glyph_run.range.begin());
let glyph_store = Arc::make_mut(&mut glyph_run.glyph_store);
glyph_store.distribute_extra_space_in_range(&range,
space_per_expansion_opportunity);
}
}
// Recompute the fragment's border box size.
let new_inline_size = run.advance_for_range(&fragment_range);
let new_size = LogicalSize::new(fragment.style.writing_mode,
new_inline_size,
fragment.border_box.size.block);
fragment.border_box = LogicalRect::from_point_size(fragment.style.writing_mode,
fragment.border_box.start,
new_size);
}
}
/// Sets final fragment positions in the block direction for one line. Assumes that the
/// fragment positions were initially set to the distance from the baseline first.
fn set_block_fragment_positions(fragments: &mut InlineFragments,
line: &Line,
line_distance_from_flow_block_start: Au,
baseline_distance_from_block_start: Au,
largest_depth_below_baseline: Au) {
for fragment_index in line.range.each_index() {
// If any of the inline styles say `top` or `bottom`, adjust the vertical align
// appropriately.
//
// FIXME(#5624, pcwalton): This passes our current reftests but isn't the right thing
// to do.
let fragment = fragments.get_mut(fragment_index.to_usize());
let mut vertical_align = vertical_align::T::baseline;
for style in fragment.inline_styles() {
match (style.get_box().display, style.get_box().vertical_align) {
(display::T::inline, vertical_align::T::top) |
(display::T::block, vertical_align::T::top) |
(display::T::inline_block, vertical_align::T::top) => {
vertical_align = vertical_align::T::top;
break
}
(display::T::inline, vertical_align::T::bottom) |
(display::T::block, vertical_align::T::bottom) |
(display::T::inline_block, vertical_align::T::bottom) => {
vertical_align = vertical_align::T::bottom;
break
}
_ => {}
}
}
match vertical_align {
vertical_align::T::top => {
fragment.border_box.start.b = fragment.border_box.start.b +
line_distance_from_flow_block_start
}
vertical_align::T::bottom => {
fragment.border_box.start.b = fragment.border_box.start.b +
line_distance_from_flow_block_start +
baseline_distance_from_block_start +
largest_depth_below_baseline;
}
_ => {
fragment.border_box.start.b = fragment.border_box.start.b +
line_distance_from_flow_block_start + baseline_distance_from_block_start
}
}
fragment.update_late_computed_block_position_if_necessary();
}
}
/// Computes the minimum ascent and descent for each line. This is done during flow
/// construction.
///
/// `style` is the style of the block.
pub fn compute_minimum_ascent_and_descent(&self,
font_context: &mut FontContext,
style: &ServoComputedValues)
-> (Au, Au) {
// As a special case, if this flow contains only hypothetical fragments, then the entire
// flow is hypothetical and takes up no space. See CSS 2.1 § 10.3.7.
if self.fragments.fragments.iter().all(|fragment| fragment.is_hypothetical()) {
return (Au(0), Au(0))
}
let font_style = style.get_font_arc();
let font_metrics = text::font_metrics_for_style(font_context, font_style);
let line_height = text::line_height_from_style(style, &font_metrics);
let inline_metrics = InlineMetrics::from_font_metrics(&font_metrics, line_height);
let mut block_size_above_baseline = inline_metrics.block_size_above_baseline;
let mut depth_below_baseline = inline_metrics.depth_below_baseline;
// According to CSS 2.1 § 10.8, `line-height` of any inline element specifies the minimal
// height of line boxes within the element.
for frag in &self.fragments.fragments {
match frag.inline_context {
Some(ref inline_context) => {
for node in &inline_context.nodes {
let font_style = node.style.get_font_arc();
let font_metrics = text::font_metrics_for_style(font_context, font_style);
let line_height = text::line_height_from_style(&*node.style, &font_metrics);
let inline_metrics = InlineMetrics::from_font_metrics(&font_metrics,
line_height);
block_size_above_baseline = max(block_size_above_baseline,
inline_metrics.block_size_above_baseline);
depth_below_baseline = max(depth_below_baseline,
inline_metrics.depth_below_baseline);
}
}
None => {}
}
}
(block_size_above_baseline, depth_below_baseline)
}
fn update_restyle_damage(&mut self) {
let mut damage = self.base.restyle_damage;
for frag in &self.fragments.fragments {
damage.insert(frag.restyle_damage());
}
self.base.restyle_damage = damage;
}
fn containing_block_range_for_flow_surrounding_fragment_at_index(&self,
fragment_index: FragmentIndex)
-> Range<FragmentIndex> {
let mut start_index = fragment_index;
while start_index > FragmentIndex(0) &&
self.fragments
.fragments[(start_index - FragmentIndex(1)).get() as usize]
.is_positioned() {
start_index = start_index - FragmentIndex(1)
}
let mut end_index = fragment_index + FragmentIndex(1);
while end_index < FragmentIndex(self.fragments.fragments.len() as isize) &&
self.fragments.fragments[end_index.get() as usize].is_positioned() {
end_index = end_index + FragmentIndex(1)
}
Range::new(start_index, end_index - start_index)
}
fn containing_block_range_for_flow(&self, opaque_flow: OpaqueFlow) -> Range<FragmentIndex> {
match self.fragments.fragments.iter().position(|fragment| {
match fragment.specific {
SpecificFragmentInfo::InlineAbsolute(ref inline_absolute) => {
OpaqueFlow::from_flow(&*inline_absolute.flow_ref) == opaque_flow
}
SpecificFragmentInfo::InlineAbsoluteHypothetical(
ref inline_absolute_hypothetical) => {
OpaqueFlow::from_flow(&*inline_absolute_hypothetical.flow_ref) == opaque_flow
}
_ => false,
}
}) {
Some(index) => {
let index = FragmentIndex(index as isize);
self.containing_block_range_for_flow_surrounding_fragment_at_index(index)
}
None => {
// FIXME(pcwalton): This is quite wrong. We should only return the range
// surrounding the inline fragments that constitute the containing block. But this
// suffices to get Google looking right.
Range::new(FragmentIndex(0),
FragmentIndex(self.fragments.fragments.len() as isize))
}
}
}
}
impl Flow for InlineFlow {
fn class(&self) -> FlowClass {
FlowClass::Inline
}
fn as_inline(&self) -> &InlineFlow {
self
}
fn as_mut_inline(&mut self) -> &mut InlineFlow {
self
}
fn bubble_inline_sizes(&mut self) {
self.update_restyle_damage();
let _scope = layout_debug_scope!("inline::bubble_inline_sizes {:x}", self.base.debug_id());
let writing_mode = self.base.writing_mode;
for kid in self.base.child_iter() {
flow::mut_base(kid).floats = Floats::new(writing_mode);
}
let mut intrinsic_sizes_for_flow = IntrinsicISizesContribution::new();
let mut intrinsic_sizes_for_inline_run = IntrinsicISizesContribution::new();
let mut intrinsic_sizes_for_nonbroken_run = IntrinsicISizesContribution::new();
for fragment in &mut self.fragments.fragments {
let intrinsic_sizes_for_fragment = fragment.compute_intrinsic_inline_sizes().finish();
match fragment.style.get_inheritedtext().white_space {
white_space::T::nowrap => {
intrinsic_sizes_for_nonbroken_run.union_nonbreaking_inline(
&intrinsic_sizes_for_fragment)
}
white_space::T::pre => {
intrinsic_sizes_for_nonbroken_run.union_nonbreaking_inline(
&intrinsic_sizes_for_fragment);
// Flush the intrinsic sizes we've been gathering up in order to handle the
// line break, if necessary.
if fragment.requires_line_break_afterward_if_wrapping_on_newlines() {
intrinsic_sizes_for_inline_run.union_inline(
&intrinsic_sizes_for_nonbroken_run.finish());
intrinsic_sizes_for_nonbroken_run = IntrinsicISizesContribution::new();
intrinsic_sizes_for_flow.union_block(
&intrinsic_sizes_for_inline_run.finish());
intrinsic_sizes_for_inline_run = IntrinsicISizesContribution::new();
}
}
white_space::T::pre_wrap |
white_space::T::pre_line => {
// Flush the intrinsic sizes we were gathering up for the nonbroken run, if
// necessary.
intrinsic_sizes_for_inline_run.union_inline(
&intrinsic_sizes_for_nonbroken_run.finish());
intrinsic_sizes_for_nonbroken_run = IntrinsicISizesContribution::new();
intrinsic_sizes_for_nonbroken_run.union_inline(&intrinsic_sizes_for_fragment);
// Flush the intrinsic sizes we've been gathering up in order to handle the
// line break, if necessary.
if fragment.requires_line_break_afterward_if_wrapping_on_newlines() {
intrinsic_sizes_for_inline_run.union_inline(
&intrinsic_sizes_for_nonbroken_run.finish());
intrinsic_sizes_for_nonbroken_run = IntrinsicISizesContribution::new();
intrinsic_sizes_for_flow.union_block(
&intrinsic_sizes_for_inline_run.finish());
intrinsic_sizes_for_inline_run = IntrinsicISizesContribution::new();
}
}
white_space::T::normal => {
// Flush the intrinsic sizes we were gathering up for the nonbroken run, if
// necessary.
intrinsic_sizes_for_inline_run.union_inline(
&intrinsic_sizes_for_nonbroken_run.finish());
intrinsic_sizes_for_nonbroken_run = IntrinsicISizesContribution::new();
intrinsic_sizes_for_nonbroken_run.union_inline(&intrinsic_sizes_for_fragment);
}
}
fragment.restyle_damage.remove(BUBBLE_ISIZES);
}
// Flush any remaining nonbroken-run and inline-run intrinsic sizes.
intrinsic_sizes_for_inline_run.union_inline(&intrinsic_sizes_for_nonbroken_run.finish());
intrinsic_sizes_for_flow.union_block(&intrinsic_sizes_for_inline_run.finish());
// Finish up the computation.
self.base.intrinsic_inline_sizes = intrinsic_sizes_for_flow.finish()
}
/// Recursively (top-down) determines the actual inline-size of child contexts and fragments.
/// When called on this context, the context has had its inline-size set by the parent context.
fn assign_inline_sizes(&mut self, _: &LayoutContext) {
let _scope = layout_debug_scope!("inline::assign_inline_sizes {:x}", self.base.debug_id());
// Initialize content fragment inline-sizes if they haven't been initialized already.
//
// TODO: Combine this with `LineBreaker`'s walk in the fragment list, or put this into
// `Fragment`.
debug!("InlineFlow::assign_inline_sizes: floats in: {:?}", self.base.floats);
let inline_size = self.base.block_container_inline_size;
let container_mode = self.base.block_container_writing_mode;
self.base.position.size.inline = inline_size;
{
let this = &mut *self;
for fragment in this.fragments.fragments.iter_mut() {
let border_collapse = fragment.style.get_inheritedtable().border_collapse;
fragment.compute_border_and_padding(inline_size, border_collapse);
fragment.compute_block_direction_margins(inline_size);
fragment.compute_inline_direction_margins(inline_size);
fragment.assign_replaced_inline_size_if_necessary(inline_size);
}
}
// If there are any inline-block kids, propagate explicit block and inline
// sizes down to them.
let block_container_explicit_block_size = self.base.block_container_explicit_block_size;
for kid in self.base.child_iter() {
let kid_base = flow::mut_base(kid);
kid_base.block_container_inline_size = inline_size;
kid_base.block_container_writing_mode = container_mode;
kid_base.block_container_explicit_block_size = block_container_explicit_block_size;
}
}
/// Calculate and set the block-size of this flow. See CSS 2.1 § 10.6.1.
fn assign_block_size(&mut self, layout_context: &LayoutContext) {
let _scope = layout_debug_scope!("inline::assign_block_size {:x}", self.base.debug_id());
// Divide the fragments into lines.
//
// TODO(pcwalton, #226): Get the CSS `line-height` property from the style of the
// containing block to determine the minimum line block size.
//
// TODO(pcwalton, #226): Get the CSS `line-height` property from each non-replaced inline
// element to determine its block-size for computing the line's own block-size.
//
// TODO(pcwalton): Cache the line scanner?
debug!("assign_block_size_inline: floats in: {:?}", self.base.floats);
// Assign the block-size and late-computed inline-sizes for the inline fragments.
let containing_block_block_size =
self.base.block_container_explicit_block_size;
for fragment in &mut self.fragments.fragments {
fragment.update_late_computed_replaced_inline_size_if_necessary();
fragment.assign_replaced_block_size_if_necessary(containing_block_block_size);
}
// Reset our state, so that we handle incremental reflow correctly.
//
// TODO(pcwalton): Do something smarter, like Gecko and WebKit?
self.lines.clear();
// Determine how much indentation the first line wants.
let mut indentation = if self.fragments.is_empty() {
Au(0)
} else {
self.first_line_indentation
};
// Perform line breaking.
let mut scanner = LineBreaker::new(self.base.floats.clone(),
indentation,
self.minimum_block_size_above_baseline,
self.minimum_depth_below_baseline);
scanner.scan_for_lines(self, layout_context);
// Now, go through each line and lay out the fragments inside.
let mut line_distance_from_flow_block_start = Au(0);
let line_count = self.lines.len();
for line_index in 0..line_count {
let line = &mut self.lines[line_index];
// Lay out fragments in the inline direction, and justify them if necessary.
InlineFlow::set_inline_fragment_positions(&mut self.fragments,
line,
self.base.flags.text_align(),
indentation,
line_index + 1 == line_count);
// Set the block-start position of the current line.
// `line_height_offset` is updated at the end of the previous loop.
line.bounds.start.b = line_distance_from_flow_block_start;
// Calculate the distance from the baseline to the block-start and block-end of the
// line.
let mut largest_block_size_above_baseline = self.minimum_block_size_above_baseline;
let mut largest_depth_below_baseline = self.minimum_depth_below_baseline;
// Calculate the largest block-size among fragments with 'top' and 'bottom' values
// respectively.
let (mut largest_block_size_for_top_fragments,
mut largest_block_size_for_bottom_fragments) = (Au(0), Au(0));
for fragment_index in line.range.each_index() {
let fragment = &mut self.fragments.fragments[fragment_index.to_usize()];
let InlineMetrics {
mut block_size_above_baseline,
mut depth_below_baseline,
ascent
} = fragment.inline_metrics(layout_context);
// To calculate text-top and text-bottom value when `vertical-align` is involved,
// we should find the top and bottom of the content area of the parent fragment.
// "Content area" is defined in CSS 2.1 § 10.6.1.
//
// TODO: We should extract em-box info from the font size of the parent and
// calculate the distances from the baseline to the block-start and the block-end
// of the parent's content area.
// We should calculate the distance from baseline to the top of parent's content
// area. But for now we assume it's the font size.
//
// CSS 2.1 does not state which font to use. This version of the code uses
// the parent's font.
// Calculate the final block-size above the baseline for this fragment.
//
// The no-update flag decides whether `largest_block_size_for_top_fragments` and
// `largest_block_size_for_bottom_fragments` are to be updated or not. This will be
// set if and only if the fragment has `vertical-align` set to `top` or `bottom`.
let (distance_from_baseline, no_update_flag) =
InlineFlow::distance_from_baseline(
fragment,
ascent,
self.minimum_block_size_above_baseline,
self.minimum_depth_below_baseline,
&mut block_size_above_baseline,
&mut depth_below_baseline,
&mut largest_block_size_for_top_fragments,
&mut largest_block_size_for_bottom_fragments,
layout_context);
// Unless the current fragment has `vertical-align` set to `top` or `bottom`,
// `largest_block_size_above_baseline` and `largest_depth_below_baseline` are
// updated.
if !no_update_flag {
largest_block_size_above_baseline = max(block_size_above_baseline,
largest_block_size_above_baseline);
largest_depth_below_baseline = max(depth_below_baseline,
largest_depth_below_baseline);
}
// Temporarily use `fragment.border_box.start.b` to mean "the distance from the
// baseline". We will assign the real value later.
fragment.border_box.start.b = distance_from_baseline
}
// Calculate the distance from the baseline to the top of the largest fragment with a
// value for `bottom`. Then, if necessary, update `largest_block-size_above_baseline`.
largest_block_size_above_baseline =
max(largest_block_size_above_baseline,
largest_block_size_for_bottom_fragments - largest_depth_below_baseline);
// Calculate the distance from baseline to the bottom of the largest fragment with a
// value for `top`. Then, if necessary, update `largest_depth_below_baseline`.
largest_depth_below_baseline =
max(largest_depth_below_baseline,
largest_block_size_for_top_fragments - largest_block_size_above_baseline);
// Now, the distance from the logical block-start of the line to the baseline can be
// computed as `largest_block-size_above_baseline`.
let baseline_distance_from_block_start = largest_block_size_above_baseline;
// Compute the final positions in the block direction of each fragment. Recall that
// `fragment.border_box.start.b` was set to the distance from the baseline above.
InlineFlow::set_block_fragment_positions(&mut self.fragments,
line,
line_distance_from_flow_block_start,
baseline_distance_from_block_start,
largest_depth_below_baseline);
// This is used to set the block-start position of the next line in the next loop.
line.bounds.size.block = largest_block_size_above_baseline +
largest_depth_below_baseline;
line_distance_from_flow_block_start = line_distance_from_flow_block_start +
line.bounds.size.block;
// We're no longer on the first line, so set indentation to zero.
indentation = Au(0)
} // End of `lines.iter_mut()` loop.
// Assign block sizes for any inline-block descendants.
let thread_id = self.base.thread_id;
for kid in self.base.child_iter() {
if flow::base(kid).flags.contains(IS_ABSOLUTELY_POSITIONED) ||
flow::base(kid).flags.is_float() {
continue
}
kid.assign_block_size_for_inorder_child_if_necessary(layout_context, thread_id);
}
if self.contains_positioned_fragments() {
// Assign block-sizes for all flows in this absolute flow tree.
// This is preorder because the block-size of an absolute flow may depend on
// the block-size of its containing block, which may also be an absolute flow.
(&mut *self as &mut Flow).traverse_preorder_absolute_flows(
&mut AbsoluteAssignBSizesTraversal(layout_context));
}
self.base.position.size.block = match self.lines.last() {
Some(ref last_line) => last_line.bounds.start.b + last_line.bounds.size.block,
None => Au(0),
};
self.base.floats = scanner.floats.clone();
let writing_mode = self.base.floats.writing_mode;
self.base.floats.translate(LogicalSize::new(writing_mode,
Au(0),
-self.base.position.size.block));
let containing_block_size = LogicalSize::new(writing_mode,
Au(0),
self.base.position.size.block);
self.mutate_fragments(&mut |f: &mut Fragment| {
match f.specific {
SpecificFragmentInfo::InlineBlock(ref mut info) => {
let block = flow_ref::deref_mut(&mut info.flow_ref);
flow::mut_base(block).early_absolute_position_info = EarlyAbsolutePositionInfo {
relative_containing_block_size: containing_block_size,
relative_containing_block_mode: writing_mode,
};
}
SpecificFragmentInfo::InlineAbsolute(ref mut info) => {
let block = flow_ref::deref_mut(&mut info.flow_ref);
flow::mut_base(block).early_absolute_position_info = EarlyAbsolutePositionInfo {
relative_containing_block_size: containing_block_size,
relative_containing_block_mode: writing_mode,
};
}
_ => (),
}
});
self.base.restyle_damage.remove(REFLOW_OUT_OF_FLOW | REFLOW);
for fragment in &mut self.fragments.fragments {
fragment.restyle_damage.remove(REFLOW_OUT_OF_FLOW | REFLOW);
}
}
fn compute_absolute_position(&mut self, _: &LayoutContext) {
// First, gather up the positions of all the containing blocks (if any).
//
// FIXME(pcwalton): This will get the absolute containing blocks inside `...` wrong in the
// case of something like:
//
// <span style="position: relative">
// Foo
// <span style="display: inline-block">...</span>
// </span>
let mut containing_block_positions = Vec::new();
let container_size = Size2D::new(self.base.block_container_inline_size, Au(0));
for (fragment_index, fragment) in self.fragments.fragments.iter().enumerate() {
match fragment.specific {
SpecificFragmentInfo::InlineAbsolute(_) => {
let containing_block_range =
self.containing_block_range_for_flow_surrounding_fragment_at_index(
FragmentIndex(fragment_index as isize));
let first_fragment_index = containing_block_range.begin().get() as usize;
debug_assert!(first_fragment_index < self.fragments.fragments.len());
let first_fragment = &self.fragments.fragments[first_fragment_index];
let padding_box_origin = (first_fragment.border_box -
first_fragment.style.logical_border_width()).start;
containing_block_positions.push(
padding_box_origin.to_physical(self.base.writing_mode, container_size));
}
SpecificFragmentInfo::InlineBlock(_) if fragment.is_positioned() => {
let containing_block_range =
self.containing_block_range_for_flow_surrounding_fragment_at_index(
FragmentIndex(fragment_index as isize));
let first_fragment_index = containing_block_range.begin().get() as usize;
debug_assert!(first_fragment_index < self.fragments.fragments.len());
let first_fragment = &self.fragments.fragments[first_fragment_index];
let padding_box_origin = (first_fragment.border_box -
first_fragment.style.logical_border_width()).start;
containing_block_positions.push(
padding_box_origin.to_physical(self.base.writing_mode, container_size));
}
_ => {}
}
}
// Then compute the positions of all of our fragments.
let mut containing_block_positions = containing_block_positions.iter();
for fragment in &mut self.fragments.fragments {
let stacking_relative_border_box =
fragment.stacking_relative_border_box(&self.base.stacking_relative_position,
&self.base
.early_absolute_position_info
.relative_containing_block_size,
self.base
.early_absolute_position_info
.relative_containing_block_mode,
CoordinateSystem::Parent);
let stacking_relative_content_box =
fragment.stacking_relative_content_box(&stacking_relative_border_box);
let mut clip = self.base.clip.clone();
fragment.adjust_clipping_region_for_children(&mut clip,
&stacking_relative_border_box,
false);
let is_positioned = fragment.is_positioned();
match fragment.specific {
SpecificFragmentInfo::InlineBlock(ref mut info) => {
let flow = flow_ref::deref_mut(&mut info.flow_ref);
flow::mut_base(flow).clip = clip;
let block_flow = flow.as_mut_block();
block_flow.base.late_absolute_position_info =
self.base.late_absolute_position_info;
let stacking_relative_position = self.base.stacking_relative_position;
if is_positioned {
let padding_box_origin = containing_block_positions.next().unwrap();
block_flow.base
.late_absolute_position_info
.stacking_relative_position_of_absolute_containing_block =
stacking_relative_position + *padding_box_origin;
}
block_flow.base.stacking_relative_position =
stacking_relative_content_box.origin;
block_flow.base.stacking_relative_position_of_display_port =
self.base.stacking_relative_position_of_display_port;
}
SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => {
let flow = flow_ref::deref_mut(&mut info.flow_ref);
flow::mut_base(flow).clip = clip;
let block_flow = flow.as_mut_block();
block_flow.base.late_absolute_position_info =
self.base.late_absolute_position_info;
block_flow.base.stacking_relative_position =
stacking_relative_border_box.origin;
block_flow.base.stacking_relative_position_of_display_port =
self.base.stacking_relative_position_of_display_port;
}
SpecificFragmentInfo::InlineAbsolute(ref mut info) => {
let flow = flow_ref::deref_mut(&mut info.flow_ref);
flow::mut_base(flow).clip = clip;
let block_flow = flow.as_mut_block();
block_flow.base.late_absolute_position_info =
self.base.late_absolute_position_info;
let stacking_relative_position = self.base.stacking_relative_position;
let padding_box_origin = containing_block_positions.next().unwrap();
block_flow.base
.late_absolute_position_info
.stacking_relative_position_of_absolute_containing_block =
stacking_relative_position + *padding_box_origin;
block_flow.base.stacking_relative_position =
stacking_relative_border_box.origin;
block_flow.base.stacking_relative_position_of_display_port =
self.base.stacking_relative_position_of_display_port;
}
_ => {}
}
}
}
fn update_late_computed_inline_position_if_necessary(&mut self, _: Au) {}
fn update_late_computed_block_position_if_necessary(&mut self, _: Au) {}
fn collect_stacking_contexts(&mut self,
parent_id: StackingContextId,
contexts: &mut Vec<Box<StackingContext>>)
-> StackingContextId {
self.collect_stacking_contexts_for_inline(parent_id, contexts)
}
fn build_display_list(&mut self, state: &mut DisplayListBuildState) {
self.build_display_list_for_inline(state);
for fragment in &mut self.fragments.fragments {
fragment.restyle_damage.remove(REPAINT);
}
}
fn repair_style(&mut self, _: &Arc<ServoComputedValues>) {}
fn compute_overflow(&self) -> Overflow {
let mut overflow = Overflow::new();
let flow_size = self.base.position.size.to_physical(self.base.writing_mode);
let relative_containing_block_size =
&self.base.early_absolute_position_info.relative_containing_block_size;
for fragment in &self.fragments.fragments {
overflow.union(&fragment.compute_overflow(&flow_size, &relative_containing_block_size))
}
overflow
}
fn iterate_through_fragment_border_boxes(&self,
iterator: &mut FragmentBorderBoxIterator,
level: i32,
stacking_context_position: &Point2D<Au>) {
// FIXME(#2795): Get the real container size.
for fragment in &self.fragments.fragments {
if !iterator.should_process(fragment) {
continue
}
let stacking_relative_position = &self.base.stacking_relative_position;
let relative_containing_block_size =
&self.base.early_absolute_position_info.relative_containing_block_size;
let relative_containing_block_mode =
self.base.early_absolute_position_info.relative_containing_block_mode;
iterator.process(fragment,
level,
&fragment.stacking_relative_border_box(stacking_relative_position,
relative_containing_block_size,
relative_containing_block_mode,
CoordinateSystem::Own)
.translate(stacking_context_position))
}
}
fn mutate_fragments(&mut self, mutator: &mut FnMut(&mut Fragment)) {
for fragment in &mut self.fragments.fragments {
(*mutator)(fragment)
}
}
fn contains_positioned_fragments(&self) -> bool {
self.fragments.fragments.iter().any(|fragment| fragment.is_positioned())
}
fn contains_relatively_positioned_fragments(&self) -> bool {
self.fragments.fragments.iter().any(|fragment| {
fragment.style.get_box().position == position::T::relative
})
}
fn generated_containing_block_size(&self, for_flow: OpaqueFlow) -> LogicalSize<Au> {
let mut containing_block_size = LogicalSize::new(self.base.writing_mode, Au(0), Au(0));
for index in self.containing_block_range_for_flow(for_flow).each_index() {
let fragment = &self.fragments.fragments[index.get() as usize];
if fragment.is_absolutely_positioned() {
continue
}
containing_block_size.inline = containing_block_size.inline +
fragment.border_box.size.inline;
containing_block_size.block = max(containing_block_size.block,
fragment.border_box.size.block);
}
containing_block_size
}
fn print_extra_flow_children(&self, print_tree: &mut PrintTree) {
for fragment in &self.fragments.fragments {
print_tree.add_item(format!("{:?}", fragment));
}
}
}
impl fmt::Debug for InlineFlow {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f,
"{:?}({:x}) {:?}",
self.class(),
self.base.debug_id(),
flow::base(self))
}
}
#[derive(Clone)]
pub struct InlineFragmentNodeInfo {
pub address: OpaqueNode,
pub style: Arc<ServoComputedValues>,
pub selected_style: Arc<ServoComputedValues>,
pub pseudo: PseudoElementType<()>,
pub flags: InlineFragmentNodeFlags,
}
bitflags! {
flags InlineFragmentNodeFlags: u8 {
const FIRST_FRAGMENT_OF_ELEMENT = 0x01,
const LAST_FRAGMENT_OF_ELEMENT = 0x02,
}
}
impl fmt::Debug for InlineFragmentNodeInfo {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self.flags.bits())
}
}
#[derive(Clone)]
pub struct InlineFragmentContext {
pub nodes: Vec<InlineFragmentNodeInfo>,
}
impl InlineFragmentContext {
pub fn new() -> InlineFragmentContext {
InlineFragmentContext {
nodes: vec!(),
}
}
#[inline]
pub fn contains_node(&self, node_address: OpaqueNode) -> bool {
self.nodes.iter().position(|node| node.address == node_address).is_some()
}
fn ptr_eq(&self, other: &InlineFragmentContext) -> bool {
if self.nodes.len() != other.nodes.len() {
return false
}
for (this_node, other_node) in self.nodes.iter().zip(&other.nodes) {
if !util::arc_ptr_eq(&this_node.style, &other_node.style) {
return false
}
}
true
}
}
fn inline_contexts_are_equal(inline_context_a: &Option<InlineFragmentContext>,
inline_context_b: &Option<InlineFragmentContext>)
-> bool {
match (inline_context_a, inline_context_b) {
(&Some(ref inline_context_a), &Some(ref inline_context_b)) => {
inline_context_a.ptr_eq(inline_context_b)
}
(&None, &None) => true,
(&Some(_), &None) | (&None, &Some(_)) => false,
}
}
/// Block-size above the baseline, depth below the baseline, and ascent for a fragment. See CSS 2.1
/// § 10.8.1.
#[derive(Clone, Copy, Debug, RustcEncodable)]
pub struct InlineMetrics {
pub block_size_above_baseline: Au,
pub depth_below_baseline: Au,
pub ascent: Au,
}
impl InlineMetrics {
/// Creates a new set of inline metrics.
pub fn new(block_size_above_baseline: Au, depth_below_baseline: Au, ascent: Au)
-> InlineMetrics {
InlineMetrics {
block_size_above_baseline: block_size_above_baseline,
depth_below_baseline: depth_below_baseline,
ascent: ascent,
}
}
/// Calculates inline metrics from font metrics and line block-size per CSS 2.1 § 10.8.1.
#[inline]
pub fn from_font_metrics(font_metrics: &FontMetrics, line_height: Au) -> InlineMetrics {
let leading = line_height - (font_metrics.ascent + font_metrics.descent);
// Calculating the half leading here and then using leading - half_leading
// below ensure that we don't introduce any rounding accuracy issues here.
// The invariant is that the resulting total line height must exactly
// equal the requested line_height.
let half_leading = leading.scale_by(0.5);
InlineMetrics {
block_size_above_baseline: font_metrics.ascent + half_leading,
depth_below_baseline: font_metrics.descent + leading - half_leading,
ascent: font_metrics.ascent,
}
}
/// Calculates inline metrics from font metrics and line block-size per CSS 2.1 § 10.8.1.
#[inline]
pub fn from_block_height(font_metrics: &FontMetrics,
block_height: Au,
block_start_margin: Au,
block_end_margin: Au)
-> InlineMetrics {
let leading = block_height + block_start_margin + block_end_margin -
(font_metrics.ascent + font_metrics.descent);
InlineMetrics {
block_size_above_baseline: font_metrics.ascent + leading.scale_by(0.5),
depth_below_baseline: font_metrics.descent + leading.scale_by(0.5),
ascent: font_metrics.ascent + leading.scale_by(0.5) - block_start_margin,
}
}
pub fn block_size(&self) -> Au {
self.block_size_above_baseline + self.depth_below_baseline
}
pub fn max(&self, other: &InlineMetrics) -> InlineMetrics {
InlineMetrics {
block_size_above_baseline: max(self.block_size_above_baseline,
other.block_size_above_baseline),
depth_below_baseline: max(self.depth_below_baseline, other.depth_below_baseline),
ascent: max(self.ascent, other.ascent),
}
}
}
#[derive(Copy, Clone, PartialEq)]
enum LineFlushMode {
No,
Flush,
}<|fim▁end|> | }
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use std::fmt;
use std::cmp::{max, min};
use termion;
use app::word::Bucket;
pub mod brush;
//---
#[allow(dead_code)]
#[derive(Debug)]
pub enum HAlignment {
AlignLeft,
AlignMiddle,
AlignRight
}
#[allow(dead_code)]
#[derive(Debug)]
pub enum VAlignment {
AlignTop,
AlignCenter,
AlignBottom
}
#[derive(Debug)]
pub struct Alignment {
vert: VAlignment,
hori: HAlignment
}
impl Alignment {
#[allow(unused)]
pub fn centered() -> Alignment {
Alignment {
vert: VAlignment::AlignCenter,
hori: HAlignment::AlignMiddle
}
}
#[allow(unused)]
pub fn top_left() -> Alignment {
Alignment {
vert: VAlignment::AlignTop,
hori: HAlignment::AlignLeft
}
}
#[allow(unused)]
pub fn bottom_right() -> Alignment {
Alignment {
vert: VAlignment::AlignBottom,
hori: HAlignment::AlignRight
}
}
}
//---
#[allow(dead_code)]
#[derive(Debug)]
pub enum Measurement<T> {
Value(T),
Infinite
}
//---
#[derive(Debug)]
pub struct AdaptativeDim {
pub width: Measurement<u16>,
pub height: Measurement<u16>
}
//---
#[derive(Debug)]
pub struct Constraint {
pub origin: Pos,
pub dim: AdaptativeDim,
pub align: Alignment
}
//---
#[derive(Debug, PartialEq)]
pub struct Pos {
// TODO keep it only visible inside the crate
pub x: u16,
// TODO keep it only visible inside the crate
pub y: u16
}
impl Pos {
pub fn shift(&self, incrx: i16, incry: i16) -> Pos {
Pos {
x: (self.x as i16 + incrx) as u16,
y: (self.y as i16 + incry) as u16
}
}
}
impl<'a> Into<termion::cursor::Goto> for &'a Pos {
fn into(self) -> termion::cursor::Goto {
termion::cursor::Goto(self.x, self.y)
}
}
impl fmt::Display for Pos {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// write!(f, "{}", self.into::<termion::cursor::Goto>()) // WHY doesn't this work ?
write!(f, "{}", Into::<termion::cursor::Goto>::into(self))
}
}
//---
#[derive(Debug, PartialEq)]
pub struct Dim {
// TODO keep it only visible inside the crate
pub w: u16,
// TODO keep it only visible inside the crate
pub h: u16
}
impl Dim {
pub fn shrink(&self, incrw: u16, incrh: u16) -> Dim {
Dim { w: self.w - incrw, h: self.h - incrh }
}
pub fn grow(&self, incrw: u16, incrh: u16) -> Dim {
Dim { w: self.w + incrw, h: self.h + incrh }
}
}
impl Into<AdaptativeDim> for Dim {
fn into(self) -> AdaptativeDim {
AdaptativeDim {
width: Measurement::Value(self.w),
height: Measurement::Value(self.h)
}
}
}
pub fn term_dim() -> Dim {
let size = termion::terminal_size().expect("no size of terminal");
Dim {
h: size.1 - 2,
w: size.0 - 1
}
}
//---
#[derive(Debug, PartialEq)]
pub struct BoundingBox {
// TODO keep it only visible inside the crate
pub x: u16,
// TODO keep it only visible inside the crate
pub y: u16,
// TODO keep it only visible inside the crate
pub w: u16,
// TODO keep it only visible inside the crate
pub h: u16
}
impl BoundingBox {
pub fn pos(&self) -> Pos {
Pos { x: self.x, y: self.y }
}
pub fn dim(&self) -> Dim {
Dim { w: self.w, h: self.h }
}
}
//---
#[allow(dead_code)]
#[derive(Debug, PartialEq)]
pub enum LayoutError {
// index of the word which overflows
TooWide(usize),
// index of the word which overflows
TooManyWords(usize)
}
impl Constraint {
pub fn organize(&self, bucket: &Bucket) -> Result<(Vec<Pos>, BoundingBox), LayoutError> {
let (mut poss, surface, rows) = try!(self.split_roughly(&bucket));
let bbox = self.align(&mut poss, &surface, &rows);
Ok((poss, bbox))
}
fn align(&self, rough_pos: &mut Vec<Pos>, area_size: &Dim, rows_length: &Vec<u16>) -> BoundingBox {
if rough_pos.len() == 0 {
return BoundingBox { x: self.origin.x, y: self.origin.y, w: 0, h: 0 };
}
let mut bbox = BoundingBox { x: u16::max_value(), y: 0, w: 0, h: 0 };
let offset_y = match self.dim.height {
Measurement::Infinite => 0,
Measurement::Value(height) => {
debug_assert!(area_size.h <= height, format!("{} <= {}", area_size.h, height));
match self.align.vert {
VAlignment::AlignTop => 0,
VAlignment::AlignCenter => (height - area_size.h) / 2,
VAlignment::AlignBottom => height - area_size.h
}
}
};
bbox.y = rough_pos.first().expect("not possible").y;
bbox.h = rough_pos.last().expect("not possible").y - bbox.y + 1;
bbox.y = bbox.y + offset_y;
for ref mut pos in rough_pos.into_iter() {
let row_length = rows_length[(pos.y - self.origin.y) as usize];
bbox.w = max(bbox.w, row_length);
let offset_x = match self.dim.width {
Measurement::Infinite => 0,
Measurement::Value(width) => {
debug_assert!(row_length <= width, format!("{} <= {}", row_length, width));
match self.align.hori {
HAlignment::AlignLeft => 0,
HAlignment::AlignMiddle => (width - row_length) / 2,
HAlignment::AlignRight => width - row_length
}
}
};
bbox.x = min(bbox.x, pos.x + offset_x);
**pos = Pos {
x: pos.x + offset_x,
y: pos.y + offset_y
};
}
bbox
}
fn split_roughly(&self, bucket: &Bucket) -> Result<(Vec<Pos>, Dim, Vec<u16>), LayoutError> {
if bucket.words.is_empty() { return Ok((Vec::new(), Dim { w: 0, h: 0 }, Vec::new())); }
let sep: u16 = 1;
let mut planning: Vec<Pos> = Vec::new();
let mut rows_length = Vec::new();
let mut last_len: u16 = 0;
let mut start_the_row = true;
let mut right_side = 0;
for (i, word) in bucket.words.iter().enumerate() {
let len = (*word).raw.len() as u16;
let (gap, start_x, start_y): (u16, _, _);
{
let last_pos = planning.last().unwrap_or(&self.origin);
gap = if start_the_row { 0 } else { sep };
start_x = last_pos.x + last_len + gap;
start_y = last_pos.y;
}
// check if this fit horizontally
let pos = match self.dim.width {
// if the word itself is too wide for the constraint
Measurement::Value(frame_width) if len > frame_width => {
return Err(LayoutError::TooWide(i))
}
// if the word fit following the last word in the same row
Measurement::Infinite => {
Pos {
x: start_x,
y: start_y
}
}
// if the word fit following the last word in the same row
Measurement::Value(frame_width) if start_x + len - self.origin.x <= frame_width => {
Pos {
x: start_x,
y: start_y
}
}
// if the word make the current row overflows
Measurement::Value(_) => {
// check if this fit vertically
match self.dim.height {
// if the new row overflows the constraint
Measurement::Value(frame_height) if start_y + 1 - self.origin.y >= frame_height => {
return Err(LayoutError::TooManyWords(i))
}
// the word is now the starter of a new row
Measurement::Value(_) | Measurement::Infinite => {
Pos {
x: self.origin.x,
y: start_y + 1
}
}
}
}
};
start_the_row = pos.y != start_y;
if start_the_row {
let previous_row_length = start_x - gap;
rows_length.push(previous_row_length - self.origin.x);
right_side = max(right_side, previous_row_length);
}
last_len = len;
planning.push(pos);
}
let bottom_line;
{
let last_pos = planning.last().expect("not possible");
rows_length.push(last_pos.x + last_len - self.origin.x);
bottom_line = last_pos.y - self.origin.y;
}
Ok((planning, Dim { w: right_side, h: bottom_line }, rows_length))
}
}
#[cfg(test)]
mod test {
#[test]
fn word_overflow_frame_width() {
use super::*;
let enough_height_for_all = 1;
let c = Constraint {
origin: Pos { x: 0, y: 0 },
dim: AdaptativeDim {<|fim▁hole|> };
let input_bucket = Bucket::new(vec!["larger"]);
let index_of_word_larger = 0;
assert_eq!(c.organize(&input_bucket), Err(LayoutError::TooWide(index_of_word_larger)));
}
#[test]
fn word_overflow_frame_height() {
use super::*;
let enough_width_for_all = 10;
let c = Constraint {
origin: Pos { x: 0, y: 0 },
dim: AdaptativeDim {
height: Measurement::Value(1 as u16),
width: Measurement::Value(enough_width_for_all as u16)
},
align: Alignment::top_left()
};
let input_bucket = Bucket::new(vec!["fit", "stalker"]);
let index_of_word_stalker = 1;
assert_eq!(c.organize(&input_bucket), Err(LayoutError::TooManyWords(index_of_word_stalker)));
}
#[test]
fn perfect_fit() {
use super::*;
// fixed inputs
let words = vec!["first", "second", "third"];
let gap = 1;
// deduced inputs
let width = words[0].len() as u16 + gap + words[1].len() as u16;
let c = Constraint {
origin: Pos { x: 0, y: 0 },
dim: AdaptativeDim {
height: Measurement::Value(2 as u16),
width: Measurement::Value(width as u16)
},
align: Alignment::top_left()
};
let expected_positions = vec![
Pos { x: 0, y: 0 },
Pos { x: words[0].len() as u16 + gap, y: 0 },
Pos { x: 0, y: 1 }
];
// test
let final_positions = c.organize(&Bucket::new(words)).expect("positioning failed").0;
assert_eq!(final_positions, expected_positions);
}
#[test]
fn keep_on_one_line() {
use super::*;
let c = Constraint {
origin: Pos { x: 0, y: 0 },
dim: AdaptativeDim {
height: Measurement::Value(1 as u16),
// not relevant as long as not null
width: Measurement::Infinite
},
align: Alignment::top_left()
};
let input_bucket = Bucket::new(vec!["first", "second", "third"]);
let expected_positions = vec![Pos { x: 0, y: 0 }, Pos { x: 6, y: 0 }, Pos { x: 13, y: 0 }];
let final_positions = c.organize(&input_bucket).expect("positioning failed").0;
assert_eq!(final_positions, expected_positions);
}
#[test]
fn auto_add_rows() {
use super::*;
let c = Constraint {
origin: Pos { x: 0, y: 0 },
dim: AdaptativeDim {
height: Measurement::Infinite,
width: Measurement::Value(6 as u16) // not relevant as long as minimal word len
},
align: Alignment::top_left()
};
let input_bucket = Bucket::new(vec!["first", "second", "third"]);
let expected_positions = vec![Pos { x: 0, y: 0 }, Pos { x: 0, y: 1 }, Pos { x: 0, y: 2 }];
let final_positions = c.organize(&input_bucket).expect("positioning failed").0;
assert_eq!(final_positions, expected_positions);
}
#[test]
fn center_content() {
use super::*;
// fixed inputs
let words = vec!["first", "second", "third"];
let gap = 1;
let offset_first_line: u16 = 2;
// deduced inputs
let width = offset_first_line + words[0].len() as u16 + gap + words[1].len() as u16 + offset_first_line;
assert!(offset_first_line * 2 < words[2].len() as u16, "pre-condition failed");
let offset_second_line: u16 = (width - words[2].len() as u16) / 2;
let c = Constraint {
origin: Pos { x: 0, y: 0 },
dim: AdaptativeDim {
height: Measurement::Infinite,
width: Measurement::Value(width as u16)
},
align: Alignment::centered()
};
let expected_positions = vec![
Pos { x: offset_first_line, y: 0 },
Pos { x: offset_first_line + gap + words[0].len() as u16, y: 0 },
Pos { x: offset_second_line, y: 1 }
];
// test
let final_positions = c.organize(&Bucket::new(words)).expect("positioning failed").0;
assert_eq!(final_positions, expected_positions);
}
#[test]
fn opposite_align() {
use super::*;
// fixed inputs
let words = vec!["first", "second", "third"];
let gap = 1;
let offset_first_line: u16 = 2;
// deduced inputs
assert!(offset_first_line < words[2].len() as u16, "pre-condition failed");
let width = offset_first_line + words[0].len() as u16 + gap + words[1].len() as u16;
let offset_second_line: u16 = width - words[2].len() as u16;
let c = Constraint {
origin: Pos { x: 0, y: 0 },
dim: AdaptativeDim {
height: Measurement::Infinite,
width: Measurement::Value(width as u16)
},
align: Alignment::bottom_right()
};
let expected_positions = vec![
Pos { x: offset_first_line, y: 0 },
Pos { x: offset_first_line + gap + words[0].len() as u16, y: 0 },
Pos { x: offset_second_line, y: 1 }
];
// test
let final_positions = c.organize(&Bucket::new(words)).expect("positioning failed").0;
assert_eq!(final_positions, expected_positions);
}
}
//---
#[derive(Debug)]
pub struct Layout {
pub frame: BoundingBox,
// TODO keep it only visible inside the crate
pub positions: Vec<Pos> // TODO keep it only visible inside the crate
}
pub fn layout(constraint: &Constraint, bucket: &Bucket) -> Result<Layout, LayoutError> {
let (poses, bbox) = try!(constraint.organize(bucket));
debug_assert!(bbox.x >= constraint.origin.x,
format!("post-condition failed on x ({} >= {})", bbox.x, constraint.origin.x));
debug_assert!(bbox.y >= constraint.origin.y,
format!("post-condition failed on y ({} >= {})", bbox.y, constraint.origin.y));
match constraint.dim.width {
Measurement::Value(w) => {
debug_assert!(bbox.w <= w,
format!("post-condition failed on w ({} <= {})", bbox.w, w));
}
_ => {}
}
match constraint.dim.height {
Measurement::Value(h) => {
debug_assert!(bbox.h <= h,
format!("post-condition failed on h ({} <= {})", bbox.h, h));
}
_ => {}
}
Ok(Layout {
frame: bbox,
positions: poses
})
}
//---
#[macro_export]
macro_rules! write_iter {
(&mut $dst:ident, $fmt:expr, $iter_a:expr) => {{
let mut res = Ok(());
for ref a in $iter_a.iter() {
match write!(&mut $dst, $fmt, &a) {
Ok(_) => (),
Err(w) => { res = Err(w); break; }
}
}
res
}};
(&mut $dst:ident, $fmt:expr, $iter_a:expr, $iter_b:expr) => {{
let mut res = Ok(());
for (ref a, ref b) in $iter_a.iter().zip($iter_b.iter()) {
match write!(&mut $dst, $fmt, &a, &b) {
Ok(_) => (),
Err(w) => { res = Err(w); break; }
}
}
res
}};
}<|fim▁end|> | height: Measurement::Value(enough_height_for_all as u16),
width: Measurement::Value(5 as u16)
},
align: Alignment::top_left() |
<|file_name|>util.js<|end_file_name|><|fim▁begin|>export const ATTR_ID = 'data-referid'
export let info = {
component: {
amount: 0,
mounts: 0,
unmounts: 0
}
}
export let getId = () => Math.random().toString(36).substr(2)
export let pipe = (fn1, fn2) => function(...args) {
fn1.apply(this, args)
return fn2.apply(this, args)
}
export let createCallbackStore = name => {
let store = []
return {
name,
clear() {
while (store.length) {
store.shift()()
}
},
push(item) {
store.push(item)
},
store
}
}
export let wrapNative = (obj, method, fn) => {
let nativeMethod = obj[method]
let wrapper = function(...args) {
fn.apply(this, args)
return nativeMethod.apply(this, args)
}
obj[method] = wrapper
return () => obj[method] = nativeMethod
}
if (!Object.assign) {
Object.assign = (target, ...args) => {
args.forEach(source => {
for (let key in source) {<|fim▁hole|> }
target[key] = source[key]
}
})
return target
}
}<|fim▁end|> | if (!source.hasOwnProperty(key)) {
continue |
<|file_name|>leetcode_problems.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding=utf-8 -*-
<|fim▁hole|>import argparse
import requests
from lxml import html as lxml_html
try:
import html
except ImportError:
import HTMLParser
html = HTMLParser.HTMLParser()
try:
import cPickle as pk
except ImportError:
import pickle as pk
class LeetcodeProblems(object):
def get_problems_info(self):
leetcode_url = 'https://leetcode.com/problemset/algorithms'
res = requests.get(leetcode_url)
if not res.ok:
print('request error')
sys.exit()
cm = res.text
cmt = cm.split('tbody>')[-2]
indexs = re.findall(r'<td>(\d+)</td>', cmt)
problem_urls = ['https://leetcode.com' + url \
for url in re.findall(
r'<a href="(/problems/.+?)"', cmt)]
levels = re.findall(r"<td value='\d*'>(.+?)</td>", cmt)
tinfos = zip(indexs, levels, problem_urls)
assert (len(indexs) == len(problem_urls) == len(levels))
infos = []
for info in tinfos:
res = requests.get(info[-1])
if not res.ok:
print('request error')
sys.exit()
tree = lxml_html.fromstring(res.text)
title = tree.xpath('//meta[@property="og:title"]/@content')[0]
description = tree.xpath('//meta[@property="description"]/@content')
if not description:
description = tree.xpath('//meta[@property="og:description"]/@content')[0]
else:
description = description[0]
description = html.unescape(description.strip())
tags = tree.xpath('//div[@id="tags"]/following::a[@class="btn btn-xs btn-primary"]/text()')
infos.append(
{
'title': title,
'level': info[1],
'index': int(info[0]),
'description': description,
'tags': tags
}
)
with open('leecode_problems.pk', 'wb') as g:
pk.dump(infos, g)
return infos
def to_text(self, pm_infos):
if self.args.index:
key = 'index'
elif self.args.title:
key = 'title'
elif self.args.tag:
key = 'tags'
elif self.args.level:
key = 'level'
else:
key = 'index'
infos = sorted(pm_infos, key=lambda i: i[key])
text_template = '## {index} - {title}\n' \
'~{level}~ {tags}\n' \
'{description}\n' + '\n' * self.args.line
text = ''
for info in infos:
if self.args.rm_blank:
info['description'] = re.sub(r'[\n\r]+', r'\n', info['description'])
text += text_template.format(**info)
with open('leecode problems.txt', 'w') as g:
g.write(text)
def run(self):
if os.path.exists('leecode_problems.pk') and not self.args.redownload:
with open('leecode_problems.pk', 'rb') as f:
pm_infos = pk.load(f)
else:
pm_infos = self.get_problems_info()
print('find %s problems.' % len(pm_infos))
self.to_text(pm_infos)
def handle_args(argv):
p = argparse.ArgumentParser(description='extract all leecode problems to location')
p.add_argument('--index', action='store_true', help='sort by index')
p.add_argument('--level', action='store_true', help='sort by level')
p.add_argument('--tag', action='store_true', help='sort by tag')
p.add_argument('--title', action='store_true', help='sort by title')
p.add_argument('--rm_blank', action='store_true', help='remove blank')
p.add_argument('--line', action='store', type=int, default=10, help='blank of two problems')
p.add_argument('-r', '--redownload', action='store_true', help='redownload data')
args = p.parse_args(argv[1:])
return args
def main(argv):
args = handle_args(argv)
x = LeetcodeProblems()
x.args = args
x.run()
if __name__ == '__main__':
argv = sys.argv
main(argv)<|fim▁end|> | import sys
import re
import os |
<|file_name|>RegisteredServicesReloadDisablingBeanFactoryPostProcessor.java<|end_file_name|><|fim▁begin|>package net.unicon.cas.addons.serviceregistry;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.config.BeanFactoryPostProcessor;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
/**
* <code>BeanFactoryPostProcessor</code> to remove 2 quartz beans responsible for reloading the default services registry's registered services.
* <p/>
* Useful in cases where other facilities are responsible for reloading in-memory services cache, for example on-demand reloading
* of JSON services registry, etc.
* <p/><|fim▁hole|> * This bean just needs to be declared in CAS' application context and upon bootstrap Spring will call back into it and
* 2 scheduling quartz beans dedicated for services registry reloading thread will be removed from the final application context
* effectively disabling the default reloading behavior.
*
* @author Dmitriy Kopylenko
* @author Unicon, inc.
* @since 1.8
*/
public class RegisteredServicesReloadDisablingBeanFactoryPostProcessor implements BeanFactoryPostProcessor {
private static final String JOB_DETAIL_BEAN_NAME = "serviceRegistryReloaderJobDetail";
private static final String JOB_TRIGGER_BEAN_NAME = "periodicServiceRegistryReloaderTrigger";
private static final Logger logger = LoggerFactory.getLogger(RegisteredServicesReloadDisablingBeanFactoryPostProcessor.class);
public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException {
logger.debug("Removing [{}] bean definition from the application context...", JOB_DETAIL_BEAN_NAME);
BeanDefinitionRegistry.class.cast(beanFactory).removeBeanDefinition(JOB_DETAIL_BEAN_NAME);
logger.debug("Removing [{}] bean definition from the application context...", JOB_TRIGGER_BEAN_NAME);
BeanDefinitionRegistry.class.cast(beanFactory).removeBeanDefinition(JOB_TRIGGER_BEAN_NAME);
}
}<|fim▁end|> | |
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|>import pytest
from tests.functional.services.api.images import (
add_image,
delete_image_by_id,
get_image_id,
wait_for_image_to_analyze,
)
from tests.functional.services.utils.http_utils import get_api_conf
@pytest.fixture(scope="package")
def add_image_with_teardown_package_scope(request):
def _add_image_with_teardown(tag, api_conf=get_api_conf):
# add image
add_resp = add_image(tag, api_conf)
image_id = get_image_id(add_resp)
wait_for_image_to_analyze(image_id, api_conf)
# add teardown
request.addfinalizer(lambda: delete_image_by_id(image_id, api_conf))<|fim▁hole|>
return _add_image_with_teardown<|fim▁end|> |
return add_resp |
<|file_name|>match-tag.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// pretty-expanded FIXME #23616
enum color {
rgb(isize, isize, isize),
rgba(isize, isize, isize, isize),
hsl(isize, isize, isize),
}<|fim▁hole|> match c {
color::rgb(r, _, _) => { x = r; }
color::rgba(_, _, _, a) => { x = a; }
color::hsl(_, s, _) => { x = s; }
}
return x;
}
pub fn main() {
let gray: color = color::rgb(127, 127, 127);
let clear: color = color::rgba(50, 150, 250, 0);
let red: color = color::hsl(0, 255, 255);
assert_eq!(process(gray), 127);
assert_eq!(process(clear), 0);
assert_eq!(process(red), 255);
}<|fim▁end|> |
fn process(c: color) -> isize {
let mut x: isize; |
<|file_name|>Repairpply.js<|end_file_name|><|fim▁begin|>// JavaScript Document
var flag1=true;
var flag2=true;
$(function () {
/*********************/
$.ajax({
type : 'POST',
dataType : 'json',
url : 'baseNeiName.do',
async : true,
cache : false,
error : function(request) {
bootbox.alert({
message : "请求异常",
size : 'small'
});
},
success : function(data) {
var i = 0;
for ( var item in data) {
$("#baselistid").after(
"<option value="+data[i].id+">"
+ data[i].name + "</option>");
i++;
}
}
});
/**************************/
/*########*/
$(document).on("click", "#Submit", function() {
var projectname=$("#projectname").val();<|fim▁hole|> var baselist=$("#baselist").val();
var reason=$("#reason").val();
var strmoney=/^[0-9]*$/.test(budget);
var money=budget.substring(1,0);
if(projectname==""){
bootbox.alert({
message : "请填写项目名称",
size : 'small'
});
return 0;
}
else if(name==""){
bootbox.alert({
message : "请填写报修人",
size : 'small'
});
return 0;
}
else if(address==""){
bootbox.alert({
message : "请填写具体位置",
size : 'small'
});
return 0;
}
else if(budget==""){
bootbox.alert({
message : "请填写预算金额",
size : 'small'
});
return 0;
}
else if(strmoney==false){
bootbox.alert({
message : "预算金额只能为数字",
size : 'small'
});
return 0;
}
else if(budget.length>1&&money==0){
bootbox.alert({
message : "请填写正确的预算金额格式,第一个数字不能为零",
size : 'small'
});
return 0;
}
else if(baselist=="请选择"){
bootbox.alert({
message : "请选择基地",
size : 'small'
});
return 0;
}
else if(reason==""){
bootbox.alert({
message : "请填写原因",
size : 'small'
});
return 0;
}
if (!flag1) {
bootbox.alert({
message: "上传资料仅限于rar,zip压缩包格式",
size: 'small'
});
$("#applyfile").val('');
return;
}
if (!flag2) {
bootbox.alert({
message: "上传资料大小不能大于10M",
size: 'small'
});
$("#applyfile").val('');
return;
}
/*************/
$("#applyform").submit();
/*************/
})
$('#applyfile').change(function() {
var filepath = $(this).val();
var file_size = this.files[0].size;
var size = file_size / 1024;
var extStart = filepath.lastIndexOf(".");
var ext = filepath.substring(extStart, filepath.length).toUpperCase();
if (ext != ".RAR" && ext != ".ZIP") {
bootbox.alert({
message: "上传资料仅限于rar,zip压缩包格式",
size: 'small'
});
$("#applyfile").val('');
flag1=false;
return;
}
if (size > 1024 * 10) {
bootbox.alert({
message: "上传资料大小不能大于10M",
size: 'small'
});
$("#applyfile").val('');
flag2=false;
return;
}
flag1=true;
flag2=true;
});
/*########*/
});<|fim▁end|> | var name=$("#name").val();
var address=$("#address").val();
var budget=$("#budget").val();
budget=budget.trim(); |
<|file_name|>_VF.py<|end_file_name|><|fim▁begin|>import torch
import sys
import types
class VFModule(types.ModuleType):
def __init__(self, name):
super(VFModule, self).__init__(name)<|fim▁hole|>
def __getattr__(self, attr):
return getattr(self.vf, attr)
sys.modules[__name__] = VFModule(__name__)<|fim▁end|> | self.vf = torch._C._VariableFunctions |
<|file_name|>pub-ident-fn-or-struct.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license<|fim▁hole|>// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub S (foo) bar
//~^ ERROR missing `fn` or `struct` for function or struct definition
fn main() {}<|fim▁end|> | |
<|file_name|>message_widget.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, division, print_function
import os
from time import ctime
from qtpy import QtWidgets
from glue import core
from glue.utils.qt import load_ui
class MessageWidget(QtWidgets.QWidget, core.hub.HubListener):
""" This simple class displays all messages broadcast
by a hub. It is mainly intended for debugging """
def __init__(self):
QtWidgets.QWidget.__init__(self)<|fim▁hole|> self.ui.messageTable.setColumnCount(3)
labels = ['Time', 'Message', 'Sender']
self.ui.messageTable.setHorizontalHeaderLabels(labels)
def register_to_hub(self, hub):
# catch all messages
hub.subscribe(self, core.message.Message,
handler=self.process_message,
filter=lambda x: True)
def process_message(self, message):
row = self.ui.messageTable.rowCount() * 0
self.ui.messageTable.insertRow(0)
tm = QtWidgets.QTableWidgetItem(ctime().split()[3])
typ = str(type(message)).split("'")[-2].split('.')[-1]
mtyp = QtWidgets.QTableWidgetItem(typ)
typ = str(type(message.sender)).split("'")[-2].split('.')[-1]
sender = QtWidgets.QTableWidgetItem(typ)
self.ui.messageTable.setItem(row, 0, tm)
self.ui.messageTable.setItem(row, 1, mtyp)
self.ui.messageTable.setItem(row, 2, sender)
self.ui.messageTable.resizeColumnsToContents()<|fim▁end|> | self.ui = load_ui('message_widget.ui', self,
directory=os.path.dirname(__file__)) |
<|file_name|>IPipelineStep.ts<|end_file_name|><|fim▁begin|>/**
* Interface for pipeline steps.
*/<|fim▁hole|>
export interface IPipelineStep {
mainCondition(uniteConfiguration: UniteConfiguration, engineVariables: EngineVariables): boolean | undefined;
initialise(logger: ILogger,
fileSystem: IFileSystem,
uniteConfiguration: UniteConfiguration,
engineVariables: EngineVariables,
mainCondition: boolean): Promise<number>;
configure(logger: ILogger,
fileSystem: IFileSystem,
uniteConfiguration: UniteConfiguration,
engineVariables: EngineVariables,
mainCondition: boolean): Promise<number>;
finalise(logger: ILogger,
fileSystem: IFileSystem,
uniteConfiguration: UniteConfiguration,
engineVariables: EngineVariables,
mainCondition: boolean): Promise<number>;
}<|fim▁end|> | import { IFileSystem } from "unitejs-framework/dist/interfaces/IFileSystem";
import { ILogger } from "unitejs-framework/dist/interfaces/ILogger";
import { UniteConfiguration } from "../configuration/models/unite/uniteConfiguration";
import { EngineVariables } from "../engine/engineVariables"; |
<|file_name|>room_test.go<|end_file_name|><|fim▁begin|>package hipchat
import (
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"os"
"reflect"
"testing"
)
func TestRoomGet(t *testing.T) {
setup()
defer teardown()
mux.HandleFunc("/room/1", func(w http.ResponseWriter, r *http.Request) {
if m := "GET"; m != r.Method {
t.Errorf("Request method = %v, want %v", r.Method, m)
}
fmt.Fprintf(w, `
{
"id":1,
"name":"n",
"links":{"self":"s"},
"Participants":[
{"Name":"n1"},
{"Name":"n2"}
],
"Owner":{"Name":"n1"}
}`)
})
want := &Room{
ID: 1,
Name: "n",
Links: RoomLinks{Links: Links{Self: "s"}},
Participants: []User{{Name: "n1"}, {Name: "n2"}},
Owner: User{Name: "n1"},
}
room, _, err := client.Room.Get("1")
if err != nil {
t.Fatalf("Room.Get returns an error %v", err)
}
if !reflect.DeepEqual(want, room) {
t.Errorf("Room.Get returned %+v, want %+v", room, want)
}
}
func TestRoomList(t *testing.T) {
setup()
defer teardown()
mux.HandleFunc("/room", func(w http.ResponseWriter, r *http.Request) {
if m := "GET"; m != r.Method {
t.Errorf("Request method %s, want %s", r.Method, m)
}
fmt.Fprintf(w, `
{
"items": [{"id":1,"name":"n"}],
"startIndex":1,
"maxResults":1,
"links":{"Self":"s"}
}`)
})
want := &Rooms{Items: []Room{{ID: 1, Name: "n"}}, StartIndex: 1, MaxResults: 1, Links: PageLinks{Links: Links{Self: "s"}}}
rooms, _, err := client.Room.List()
if err != nil {
t.Fatalf("Room.List returns an error %v", err)
}
if !reflect.DeepEqual(want, rooms) {
t.Errorf("Room.List returned %+v, want %+v", rooms, want)
}
}
func TestRoomNotification(t *testing.T) {
setup()
defer teardown()
args := &NotificationRequest{Message: "m", MessageFormat: "text"}
mux.HandleFunc("/room/1/notification", func(w http.ResponseWriter, r *http.Request) {
if m := "POST"; m != r.Method {
t.Errorf("Request method %s, want %s", r.Method, m)
}
v := new(NotificationRequest)
json.NewDecoder(r.Body).Decode(v)
if !reflect.DeepEqual(v, args) {
t.Errorf("Request body %+v, want %+v", v, args)
}
w.WriteHeader(http.StatusNoContent)
})
_, err := client.Room.Notification("1", args)
if err != nil {
t.Fatalf("Room.Notification returns an error %v", err)
}
}
func TestRoomShareFile(t *testing.T) {
setup()
defer teardown()
tempFile, err := ioutil.TempFile(os.TempDir(), "hipfile")
tempFile.WriteString("go gophers")
defer os.Remove(tempFile.Name())
want := "--hipfileboundary\n" +
"Content-Type: application/json; charset=UTF-8\n" +
"Content-Disposition: attachment; name=\"metadata\"\n\n" +
"{\"message\": \"Hello there\"}\n" +<|fim▁hole|> "--hipfileboundary\n" +
"Content-Type: charset=UTF-8\n" +
"Content-Transfer-Encoding: base64\n" +
"Content-Disposition: attachment; name=file; filename=hipfile\n\n" +
"Z28gZ29waGVycw==\n" +
"--hipfileboundary\n"
mux.HandleFunc("/room/1/share/file", func(w http.ResponseWriter, r *http.Request) {
if m := "POST"; m != r.Method {
t.Errorf("Request method %s, want %s", r.Method, m)
}
body, _ := ioutil.ReadAll(r.Body)
if string(body) != want {
t.Errorf("Request body \n%+v\n,want \n\n%+v", string(body), want)
}
w.WriteHeader(http.StatusNoContent)
})
args := &ShareFileRequest{Path: tempFile.Name(), Message: "Hello there", Filename: "hipfile"}
_, err = client.Room.ShareFile("1", args)
if err != nil {
t.Fatalf("Room.ShareFile returns an error %v", err)
}
}
func TestRoomCreate(t *testing.T) {
setup()
defer teardown()
args := &CreateRoomRequest{Name: "n", Topic: "t"}
mux.HandleFunc("/room", func(w http.ResponseWriter, r *http.Request) {
if m := "POST"; m != r.Method {
t.Errorf("Request method %s, want %s", r.Method, m)
}
v := new(CreateRoomRequest)
json.NewDecoder(r.Body).Decode(v)
if !reflect.DeepEqual(v, args) {
t.Errorf("Request body %+v, want %+v", v, args)
}
fmt.Fprintf(w, `{"id":1,"links":{"self":"s"}}`)
})
want := &Room{ID: 1, Links: RoomLinks{Links: Links{Self: "s"}}}
room, _, err := client.Room.Create(args)
if err != nil {
t.Fatalf("Room.Create returns an error %v", err)
}
if !reflect.DeepEqual(room, want) {
t.Errorf("Room.Create returns %+v, want %+v", room, want)
}
}
func TestRoomDelete(t *testing.T) {
setup()
defer teardown()
mux.HandleFunc("/room/1", func(w http.ResponseWriter, r *http.Request) {
if m := "DELETE"; m != r.Method {
t.Errorf("Request method %s, want %s", r.Method, m)
}
})
_, err := client.Room.Delete("1")
if err != nil {
t.Fatalf("Room.Delete returns an error %v", err)
}
}
func TestRoomUpdate(t *testing.T) {
setup()
defer teardown()
args := &UpdateRoomRequest{Name: "n", Topic: "t"}
mux.HandleFunc("/room/1", func(w http.ResponseWriter, r *http.Request) {
if m := "PUT"; m != r.Method {
t.Errorf("Request method %s, want %s", r.Method, m)
}
v := new(UpdateRoomRequest)
json.NewDecoder(r.Body).Decode(v)
if !reflect.DeepEqual(v, args) {
t.Errorf("Request body %+v, want %+v", v, args)
}
})
_, err := client.Room.Update("1", args)
if err != nil {
t.Fatalf("Room.Update returns an error %v", err)
}
}
func TestRoomHistory(t *testing.T) {
setup()
defer teardown()
args := &HistoryRequest{}
mux.HandleFunc("/room/1/history", func(w http.ResponseWriter, r *http.Request) {
if m := "GET"; m != r.Method {
t.Errorf("Request method %s, want %s", r.Method, m)
}
fmt.Fprintf(w, `
{
"items": [
{
"date": "2014-11-23T21:23:49.807578+00:00",
"from": "Test Testerson",
"id": "f058e668-c9c0-4cd5-9ca5-e2c42b06f3ed",
"mentions": [],
"message": "Hey there!",
"message_format": "html",
"type": "notification"
}
],
"links": {
"self": "https://api.hipchat.com/v2/room/1/history"
},
"maxResults": 100,
"startIndex": 0
}`)
})
want := &History{Items: []Message{{Date: "2014-11-23T21:23:49.807578+00:00", From: "Test Testerson", ID: "f058e668-c9c0-4cd5-9ca5-e2c42b06f3ed", Mentions: []User{}, Message: "Hey there!", MessageFormat: "html", Type: "notification"}}, StartIndex: 0, MaxResults: 100, Links: PageLinks{Links: Links{Self: "https://api.hipchat.com/v2/room/1/history"}}}
hist, _, err := client.Room.History("1", args)
if err != nil {
t.Fatalf("Room.History returns an error %v", err)
}
if !reflect.DeepEqual(want, hist) {
t.Errorf("Room.History returned %+v, want %+v", hist, want)
}
}
func TestRoomLatest(t *testing.T) {
setup()
defer teardown()
args := &LatestHistoryRequest{}
mux.HandleFunc("/room/1/history/latest", func(w http.ResponseWriter, r *http.Request) {
if m := "GET"; m != r.Method {
t.Errorf("Request method %s, want %s", r.Method, m)
}
fmt.Fprintf(w, `
{
"items": [
{
"date": "2014-11-23T21:23:49.807578+00:00",
"from": "Test Testerson",
"id": "f058e668-c9c0-4cd5-9ca5-e2c42b06f3ed",
"mentions": [],
"message": "Hey there!",
"message_format": "html",
"type": "notification"
}
],
"links": {
"self": "https://api.hipchat.com/v2/room/1/history/latest"
},
"maxResults": 100
}`)
})
want := &History{Items: []Message{{Date: "2014-11-23T21:23:49.807578+00:00", From: "Test Testerson", ID: "f058e668-c9c0-4cd5-9ca5-e2c42b06f3ed", Mentions: []User{}, Message: "Hey there!", MessageFormat: "html", Type: "notification"}}, MaxResults: 100, Links: PageLinks{Links: Links{Self: "https://api.hipchat.com/v2/room/1/history/latest"}}}
hist, _, err := client.Room.Latest("1", args)
if err != nil {
t.Fatalf("Room.Latest returns an error %v", err)
}
if !reflect.DeepEqual(want, hist) {
t.Errorf("Room.Latest returned %+v, want %+v", hist, want)
}
}
func TestSetTopic(t *testing.T) {
setup()
defer teardown()
args := &SetTopicRequest{Topic: "t"}
mux.HandleFunc("/room/1/topic", func(w http.ResponseWriter, r *http.Request) {
if m := "PUT"; m != r.Method {
t.Errorf("Request method %s, want %s", r.Method, m)
}
v := new(SetTopicRequest)
json.NewDecoder(r.Body).Decode(v)
if !reflect.DeepEqual(v, args) {
t.Errorf("Request body %+v, want %+v", v, args)
}
})
_, err := client.Room.SetTopic("1", "t")
if err != nil {
t.Fatalf("Room.SetTopic returns an error %v", err)
}
}
func TestInvite(t *testing.T) {
setup()
defer teardown()
args := &InviteRequest{Reason: "r"}
mux.HandleFunc("/room/1/invite/user", func(w http.ResponseWriter, r *http.Request) {
if m := "POST"; m != r.Method {
t.Errorf("Request method %s, want %s", r.Method, m)
}
v := new(InviteRequest)
json.NewDecoder(r.Body).Decode(v)
if !reflect.DeepEqual(v, args) {
t.Errorf("Request body %+v, want %+v", v, args)
}
})
_, err := client.Room.Invite("1", "user", "r")
if err != nil {
t.Fatalf("Room.Invite returns an error %v", err)
}
}<|fim▁end|> | |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may<|fim▁hole|># not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
try:
from oslo_utils import encodeutils
except ImportError:
from oslo.utils import encodeutils
import six
from .._i18n import _
from . import exceptions
from .. import uuidutils
def find_resource(manager, name_or_id, **find_args):
"""Look for resource in a given manager.
Used as a helper for the _find_* methods.
Example:
.. code-block:: python
def _find_hypervisor(cs, hypervisor):
#Get a hypervisor by name or ID.
return cliutils.find_resource(cs.hypervisors, hypervisor)
"""
# first try to get entity as integer id
try:
return manager.get(int(name_or_id))
except (TypeError, ValueError, exceptions.NotFound):
pass
# now try to get entity as uuid
try:
if six.PY2:
tmp_id = encodeutils.safe_encode(name_or_id)
else:
tmp_id = encodeutils.safe_decode(name_or_id)
if uuidutils.is_uuid_like(tmp_id):
return manager.get(tmp_id)
except (TypeError, ValueError, exceptions.NotFound):
pass
# for str id which is not uuid
if getattr(manager, 'is_alphanum_id_allowed', False):
try:
return manager.get(name_or_id)
except exceptions.NotFound:
pass
try:
try:
return manager.find(human_id=name_or_id, **find_args)
except exceptions.NotFound:
pass
# finally try to find entity by name
try:
resource = getattr(manager, 'resource_class', None)
name_attr = resource.NAME_ATTR if resource else 'name'
kwargs = {name_attr: name_or_id}
kwargs.update(find_args)
return manager.find(**kwargs)
except exceptions.NotFound:
msg = _("No %(name)s with a name or "
"ID of '%(name_or_id)s' exists.") % \
{
"name": manager.resource_class.__name__.lower(),
"name_or_id": name_or_id
}
raise exceptions.CommandError(msg)
except exceptions.NoUniqueMatch:
msg = _("Multiple %(name)s matches found for "
"'%(name_or_id)s', use an ID to be more specific.") % \
{
"name": manager.resource_class.__name__.lower(),
"name_or_id": name_or_id
}
raise exceptions.CommandError(msg)<|fim▁end|> | |
<|file_name|>xmlToJson.js<|end_file_name|><|fim▁begin|>// Changes XML to JSON
exports.xml2json = function(xml){
return xmlToJson(xml);
};
<|fim▁hole|> var obj = {};
if (xml.nodeType == 1) { // element
// do attributes
if (xml.attributes.length > 0) {
obj["@attributes"] = {};
for (var j = 0; j < xml.attributes.length; j++) {
var attribute = xml.attributes.item(j);
obj["@attributes"][attribute.nodeName] = attribute.nodeValue;
}
}
} else if (xml.nodeType == 3) { // text
obj = xml.nodeValue;
}
// do children
if (xml.hasChildNodes()) {
for(var i = 0; i < xml.childNodes.length; i++) {
var item = xml.childNodes.item(i);
var nodeName = item.nodeName;
if (typeof(obj[nodeName]) == "undefined") {
obj[nodeName] = xmlToJson(item);
} else {
if (typeof(obj[nodeName].push) == "undefined") {
var old = obj[nodeName];
obj[nodeName] = [];
obj[nodeName].push(old);
}
obj[nodeName].push(xmlToJson(item));
}
}
}
return obj;
};<|fim▁end|> | function xmlToJson(xml) {
// Create the return object |
<|file_name|>cache_tests.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""API Request cache tests."""
#
# (C) Pywikibot team, 2012-2014
#
# Distributed under the terms of the MIT license.
#
from __future__ import unicode_literals
__version__ = '$Id$'<|fim▁hole|>
from tests import _cache_dir
from tests.aspects import unittest, TestCase
class RequestCacheTests(TestCase):
"""Validate cache entries."""
net = False
def _check_cache_entry(self, entry):
"""Assert validity of the cache entry."""
self.assertIsInstance(entry.site, BaseSite)
self.assertIsInstance(entry.site._loginstatus, int)
self.assertIsInstance(entry.site._username, list)
if entry.site._loginstatus >= 1:
self.assertIsNotNone(entry.site._username[0])
self.assertIsInstance(entry._params, dict)
self.assertIsNotNone(entry._params)
# TODO: more tests on entry._params, and possibly fixes needed
# to make it closely replicate the original object.
def test_cache(self):
"""Test the apicache by doing _check_cache_entry over each entry."""
cache.process_entries(_cache_dir, self._check_cache_entry)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | #
from pywikibot.site import BaseSite
import scripts.maintenance.cache as cache |
<|file_name|>snippet.py<|end_file_name|><|fim▁begin|>import vim
def func_header_snippet(row):
cmt = "//!"
cb = vim.current.buffer
start = row
while start >= 0:
line = cb[start-1].strip()
if not line.startswith(cmt):
break
start -= 1
print("HDR")
def select_snippet(line):
line = line.strip()
if line.startswith("//!"):
return func_header_snippet
<|fim▁hole|> row -= 1
cline = vim.current.buffer[row]
func = select_snippet(cline)
if func:
func(row)
#! @brief
#! @details
main()<|fim▁end|> | def main():
row, col = vim.current.window.cursor |
<|file_name|>p200_048.py<|end_file_name|><|fim▁begin|>'''
mode | desc
r 또는 rt | 텍스트 모드로 읽기
w 또는 wt | 텍스트 모드로 쓰기
a 또는 at | 텍스트 모드로 파일 마지막에 추가하기
rb | 바이너리 모드로 읽기
wb | 바이너리 모드로 쓰기
ab | 바이너리 모드로 파일 마지막에 추가하기
<|fim▁hole|>
f.write("abcd")
f.close()
r = open("./py200_sample.txt", "r")
print("-" * 60)
print(r.readline())
r.close()<|fim▁end|> | '''
f = open("./py200_sample.txt", "w") |
<|file_name|>testParser.py<|end_file_name|><|fim▁begin|># testParser.py
# -------------
# Licensing Information: You are free to use or extend these projects for
# educational purposes provided that (1) you do not distribute or publish
# solutions, (2) you retain this notice, and (3) you provide clear
# attribution to UC Berkeley, including a link to
# http://inst.eecs.berkeley.edu/~cs188/pacman/pacman.html
#
# Attribution Information: The Pacman AI projects were developed at UC Berkeley.
# The core projects and autograders were primarily created by John DeNero
# ([email protected]) and Dan Klein ([email protected]).
# Student side autograding was added by Brad Miller, Nick Hay, and
# Pieter Abbeel ([email protected]).
import re
import sys
class TestParser(object):
def __init__(self, path):
# save the path to the test file
self.path = path
def removeComments(self, rawlines):
# remove any portion of a line following a '#' symbol
fixed_lines = []
for l in rawlines:
idx = l.find('#')
if idx == -1:
fixed_lines.append(l)
else:
fixed_lines.append(l[0:idx])
return '\n'.join(fixed_lines)
def parse(self):
# read in the test case and remove comments
test = {}
with open(self.path) as handle:
raw_lines = handle.read().split('\n')
test_text = self.removeComments(raw_lines)
test['__raw_lines__'] = raw_lines
test['path'] = self.path
test['__emit__'] = []
lines = test_text.split('\n')
i = 0
# read a property in each loop cycle
while(i < len(lines)):
# skip blank lines
if re.match('\A\s*\Z', lines[i]):
test['__emit__'].append(("raw", raw_lines[i]))
i += 1
continue
m = re.match('\A([^"]*?):\s*"([^"]*)"\s*\Z', lines[i])
if m:
test[m.group(1)] = m.group(2)
test['__emit__'].append(("oneline", m.group(1)))
i += 1
continue
m = re.match('\A([^"]*?):\s*"""\s*\Z', lines[i])
if m:
msg = []
i += 1
while(not re.match('\A\s*"""\s*\Z', lines[i])):
msg.append(raw_lines[i])
i += 1
test[m.group(1)] = '\n'.join(msg)
test['__emit__'].append(("multiline", m.group(1)))
i += 1<|fim▁hole|> sys.exit(1)
return test
def emitTestDict(testDict, handle):
for kind, data in testDict['__emit__']:
if kind == "raw":
handle.write(data + "\n")
elif kind == "oneline":
handle.write('%s: "%s"\n' % (data, testDict[data]))
elif kind == "multiline":
handle.write('%s: """\n%s\n"""\n' % (data, testDict[data]))
else:
raise Exception("Bad __emit__")<|fim▁end|> | continue
print 'error parsing test file: %s' % self.path |
<|file_name|>manticore_protocol_cerberus_DeviceId__resp_from_wire.rs<|end_file_name|><|fim▁begin|>// Copyright lowRISC contributors.
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
// SPDX-License-Identifier: Apache-2.0
// !! DO NOT EDIT !!
// To regenerate this file, run `fuzz/generate_proto_tests.py`.
#![no_main]
#![allow(non_snake_case)]
use libfuzzer_sys::fuzz_target;
use manticore::mem::BumpArena;
use manticore::protocol::Command;
use manticore::protocol::wire::FromWire;
use manticore::protocol::cerberus::DeviceId as C;
fuzz_target!(|data: &[u8]| {
let mut arena = vec![0; data.len()];
let arena = BumpArena::new(&mut arena);
let mut data = data;
let _ = <C as Command<'_>>::Resp::from_wire(&mut data, &arena);<|fim▁hole|>});<|fim▁end|> | |
<|file_name|>stream_component.js<|end_file_name|><|fim▁begin|>//copy codes from d3.js, add 4 functions: tickAttr, tickTextAttr, minorTickAttr and domainAttr;
//axis() changes, need a raphael paper object param, return raphael set object.
//examples in ../examples/axis/ to know the usage.
//a basic part for other data visualization format
/*global d3*/
/*!
* Axis兼容定义
*/
;(function (name, definition) {
if (typeof define === 'function') { // Module
define(definition);
} else { // Assign to common namespaces or simply the global object (window)
this[name] = definition(function (id) { return this[id];});
}
})('Axis', function (require) {
/**
* function from d3, get scaleRange of an ordinal scale
* @param {Array} domain ordinal scale's range
*/
function d3_scaleExtent(domain) {
var start = domain[0], stop = domain[domain.length - 1];
return start < stop ? [start, stop] : [stop, start];
}
/**
* function from d3, get scaleRange of a scale
*/
function d3_scaleRange(scale) {
return scale.rangeExtent ? scale.rangeExtent() : d3_scaleExtent(scale.range());
}
/**
* function from d3, get subticks
* @param scale, scale
* @param ticks, major ticks of scale
* @param m, number of subdivide
*/
function d3_svg_axisSubdivide(scale, ticks, m) {
var subticks = [];
if (m && ticks.length > 1) {
var extent = d3_scaleExtent(scale.domain()),
i = -1,
n = ticks.length,
d = (ticks[1] - ticks[0]) / ++m,
j,
v;
while (++i < n) {
for (j = m; --j > 0;) {
if ((v = +ticks[i] - j * d) >= extent[0]) {
subticks.push(v);
}
}
}
for (--i, j = 0; ++j < m && (v = +ticks[i] + j * d) < extent[1];) {
subticks.push(v);
}
}
return subticks;
}
var Axis = function () {
var scale = d3.scale.linear(),
orient = "bottom",
tickMajorSize = 6,
tickMinorSize = 6,
tickEndSize = 6,
tickPadding = 3,
tickArguments_ = [10],
tickFormat_,
tickSubdivide = 0,
tickAttr_ = {},
tickTextAttr_ = {},
minorTickAttr_ = {},
domainAttr_ = {};
/**
* @param paper: raphael's paper object.
* @return axisSet: raphael's set object.
*/
function axis(paper) {
// Ticks for quantitative scale, or domain values for ordinal scale.
var ticks = scale.ticks ? scale.ticks.apply(scale, tickArguments_) : scale.domain(),
tickFormat = tickFormat_ === undefined ?
(scale.tickFormat ?
scale.tickFormat.apply(scale, tickArguments_)
: String)
: tickFormat_;
var subticks = d3_svg_axisSubdivide(scale, ticks, tickSubdivide);
var range = d3_scaleRange(scale);
var axisSet = paper.set();
switch (orient) {
case "bottom":
subticks.forEach(function (d, i, arr) {
var tickX = scale.ticks ? scale(d) : scale(d) + scale.rangeBand() / 2;
axisSet.push(paper
.path("M" + tickX + "," + tickMinorSize + "V0")
.attr(minorTickAttr_));
});
ticks.forEach(function (d, i, arr) {
var tickX = scale.ticks ? scale(d) : scale(d) + scale.rangeBand() / 2;
axisSet.push(paper
.path("M" + tickX + "," + tickMajorSize + "V0")
.attr(tickAttr_));
axisSet.push(paper
.text(tickX, Math.max(tickMajorSize, 0) + tickPadding + 2,
typeof tickFormat === "function" ? tickFormat(d) : tickFormat)
.attr({"text-anchor": "middle"})
.attr(tickTextAttr_));
});
axisSet.push(paper
.path("M" + range[0] + "," + tickEndSize + "V0H" + range[1] + "V" + tickEndSize)
.attr(domainAttr_));
break;
case "top":
subticks.forEach(function (d, i, arr) {
var tickX = scale.ticks ? scale(d) : scale(d) + scale.rangeBand() / 2;
axisSet.push(paper
.path("M" + tickX + "," + -tickMinorSize + "V0")
.attr(minorTickAttr_));
});
ticks.forEach(function (d, i, arr) {
var tickX = scale.ticks ? scale(d) : scale(d) + scale.rangeBand() / 2;
axisSet.push(paper
.path("M" + tickX + "," + -tickMajorSize + "V0")
.attr(tickAttr_));
axisSet.push(paper
.text(tickX, -(Math.max(tickMajorSize, 0) + tickPadding + 2),
typeof tickFormat === "function" ? tickFormat(d) : tickFormat)
.attr({"text-anchor": "middle"})
.attr(tickTextAttr_));
});
axisSet.push(paper
.path("M" + range[0] + "," + -tickEndSize + "V0H" + range[1] + "V" + -tickEndSize)
.attr(domainAttr_));
break;
case "left":
subticks.forEach(function (d, i, arr) {
var tickY = scale.ticks ? scale(d) : scale(d) + scale.rangeBand() / 2;
axisSet.push(paper
.path("M" + -tickMinorSize + "," + tickY + "H0")
.attr(minorTickAttr_));
});
ticks.forEach(function (d, i, arr) {
var tickY = scale.ticks ? scale(d) : scale(d) + scale.rangeBand() / 2;
axisSet.push(paper
.path("M" + -tickMajorSize + "," + tickY + "H0")
.attr(tickAttr_));
axisSet.push(paper
.text(-(Math.max(tickMajorSize, 0) + tickPadding), tickY,
typeof tickFormat === "function" ? tickFormat(d) : tickFormat)
.attr({"text-anchor": "end"})
.attr(tickTextAttr_));
});
axisSet.push(paper
.path("M" + -tickEndSize + "," + range[0] + "H0V" + range[1] + "H" + -tickEndSize)
.attr(domainAttr_));
break;
case "right":
subticks.forEach(function (d, i, arr) {
var tickY = scale.ticks ? scale(d) : scale(d) + scale.rangeBand() / 2;
axisSet.push(paper
.path("M" + tickMinorSize + "," + tickY + "H0")
.attr(minorTickAttr_));
});
ticks.forEach(function (d, i, arr) {
var tickY = scale.ticks ? scale(d) : scale(d) + scale.rangeBand() / 2;
axisSet.push(paper
.path("M" + tickMajorSize + "," + tickY + "H0")
.attr(tickAttr_));
axisSet.push(paper
.text(Math.max(tickMajorSize, 0) + tickPadding, tickY,
typeof tickFormat === "function" ? tickFormat(d) : tickFormat)
.attr({"text-anchor": "start"})
.attr(tickTextAttr_));
});
axisSet.push(paper
.path("M" + tickEndSize + "," + range[0] + "H0V" + range[1] + "H" + tickEndSize)
.attr(domainAttr_));
break;
}
return axisSet;
}
/**
* get or set axis' scale.
*/
axis.scale = function (x) {
if (!arguments.length) {
return scale;
}
scale = x;
return axis;
};
/**
* get or set axis' orinet: "bottom", "top", "left", "right", default orient is bottom.
*/
axis.orient = function (x) {
if (!arguments.length) {
return orient;
}
orient = x;
return axis;
};
/**
* get or set axis' ticks number.
*/
axis.ticks = function () {
if (!arguments.length) {
return tickArguments_;
}
tickArguments_ = arguments;
return axis;
};
/**
* get or set axis' ticks format function, it's a function change format style.
* from one string format to another string format.
*/
axis.tickFormat = function (x) {
if (!arguments.length) {
return tickFormat_;
}
tickFormat_ = x;
return axis;
};
/**
* get or set axis' tick size(length of tick line, unit: px).
* @param arguments.length === 0, get axis' major tick size.
* @param arguments.length === 1, set axis' all tick sizes as x.
* @param arguments.length === 2, get axis' major tick size as x, minor and end size as y.
* @param arguments.length === 3, get axis' major tick size as x, minor size as y, end size as z.
*/
axis.tickSize = function (x, y, z) {
if (!arguments.length) {
return tickMajorSize;
}
var n = arguments.length - 1;
tickMajorSize = +x;
tickMinorSize = n > 1 ? +y : tickMajorSize;
tickEndSize = n > 0 ? +arguments[n] : tickMajorSize;
return axis;
};
/**
* get or set axis' tick padding(the distance between tick text and axis).
* @param x is a number, unit is px;
*/
axis.tickPadding = function (x) {
if (!arguments.length) {
return tickPadding;
}
tickPadding = +x;
return axis;
};
/**
* get or set axis' sub tick divide number(divide number between two major ticks).
*/
axis.tickSubdivide = function (x) {
if (!arguments.length) {
return tickSubdivide;
}
tickSubdivide = +x;
return axis;
};
/**
* get or set axis' tick attribute(Raphael format).
*/
axis.tickAttr = function (x) {
if (!arguments.length) {
return tickAttr_;
}
tickAttr_ = x;
return axis;
};
/**
* get or set axis' tick text attribute(Raphael format).
*/
axis.tickTextAttr = function (x) {
if (!arguments.length) {
return tickTextAttr_;
}
tickTextAttr_ = x;
return axis;
};
/**
* get or set axis' minor tick attribute(Raphael format).
*/
axis.minorTickAttr = function (x) {
if (!arguments.length) {
return minorTickAttr_;
}
minorTickAttr_ = x;
return axis;
};
/**
* get or set axis' domain(axis line) attribute(Raphael format).
*/
axis.domainAttr = function (x) {
if (!arguments.length) {
return domainAttr_;
}
domainAttr_ = x;
return axis;
};
return axis;
};
return Axis;
});
/*global Raphael, d3, $, define, _ */
/*!
* Stream的兼容定义
*/
;(function (name, definition) {
if (typeof define === 'function') { // Module
define(definition);
} else { // Assign to common namespaces or simply the global object (window)
this[name] = definition(function (id) { return this[id];});
}
})('StreamAxis', function (require) {
var DataV = require('DataV');
DataV.Axis = require('Axis');
var Axis = DataV.extend(DataV.Chart, {
initialize: function (container) {
this.node = $(container);
/**
* 时间纬度
*/
this.dimension.x = {
type: "string",
required: true,
index: 0
};
}
});
Axis.prototype.setSource = function (source, map) {
map = this.map(map);
this.grouped = _.groupBy(source, map.x);
this.axis = _.keys(this.grouped);
this.range = [0, this.axis.length - 1];
};
Axis.prototype.init = function () {
var conf = this.defaults;
this.paper = new Raphael(this.node[0], conf.legendBesidesWidth, conf.axisHeight);
this.node.css({
"margin-top": "0px",
"border-top": "1px solid #ddd",
"height": conf.axisHeight + "px"
});
};
Axis.prototype.render = function () {
this.init();
this.clear();
//all date strings' format are same, string length are same
var conf = this.defaults,
that = this;
var getPopPath = function (El) {
//down pop
var x = 0,
y = 0,
size = 4,
cw = 23,
bb = {height: 8};
if (El) {
bb = El.getBBox();
bb.height *= 0.6;
cw = bb.width / 2 - size;
}
return [
'M', x, y,
'l', size, size, cw, 0,
'a', size, size, 0, 0, 1, size, size,
'l', 0, bb.height,
'a', size, size, 0, 0, 1, -size, size,
'l', -(size * 2 + cw * 2), 0,
'a', size, size, 0, 0, 1, -size, -size,
'l', 0, -bb.height,
'a', size, size, 0, 0, 1, size, -size,
'l', cw, 0,
'z'
].join(',');
};
var left = conf.percentageWidth,
right = conf.legendBesidesWidth - conf.percentageWidth;
var tempWord = this.paper.text(0, 0, this.axis[0]);
var tickNumber = Math.floor((right - left) / tempWord.getBBox().width / 2) + 1;
tempWord.remove();
this.dateScale = d3.scale.linear()
.domain([0, this.axis.length - 1])
.range([left, right]);
DataV.Axis().scale(this.dateScale)
.ticks(tickNumber)
.tickSize(6, 3, 3)
.tickAttr({"stroke": "none"})
.minorTickAttr({"stroke": "none"})
.domainAttr({"stroke": "none"})
.tickFormat(function (d) {
return that.axis[d] || "";
})(this.paper);
this.axisPopText = this.paper.text(0, 11, this.axis[0])
.attr({
"text-anchor": "middle",
"fill": "#fff",
"transform": "t" + left + ",0"
}).hide();
this.axisPopBubble = this.paper.path(getPopPath(this.axisPopText))
.attr({
"fill": "#000",
"transform": "t" + (-10000) + ",0"
}).toBack()
.hide();
};
Axis.prototype.hideTab = function () {
this.axisPopText.hide();
this.axisPopBubble.hide();
};
Axis.prototype.showTab = function () {
this.axisPopText.show();
this.axisPopBubble.show();
};
Axis.prototype.refreshTab = function (index) {
var conf = this.defaults;
var x = conf.chartWidth * index / (this.axis.length - 1);
var transX = x + this.defaults.percentageWidth;
this.axisPopText.attr({
"text": this.axis[index + this.range[0]]
}).transform("t" + transX + ",0");
this.axisPopBubble.transform("t" + transX + ",0");
};
Axis.prototype.clear = function () {
this.paper.clear();
};
return Axis;
});
/*global Raphael, $, define, _ */
/*!
* StreamLegend的兼容定义
*/
;(function (name, definition) {
if (typeof define === 'function') { // Module
define(definition);
} else { // Assign to common namespaces or simply the global object (window)
this[name] = definition(function (id) { return this[id];});
}
})('Legend', function (require) {
var DataV = require('DataV');
var Legend = DataV.extend(DataV.Chart, {
initialize: function (container) {
this.legendIndent = 20;
this.node = $(container);
/**
* 类型纬度
*/
this.dimension.type = {
type: "string",
required: true,
index: 1
};
/**
* 时间纬度
*/
this.dimension.x = {<|fim▁hole|> /**
* 值纬度
*/
this.dimension.value = {
type: "number",
required: true,
index: 2
};
this.defaults.highlightStyle = {"backgroundColor": "#dddddd"};
this.defaults.lowlightStyle = {"backgroundColor": "white"};
this.formatLabel = function (text) {
return text;
};
this.init();
}
});
Legend.prototype.init = function () {
var conf = this.defaults;
this.legend = $("<div></div>");
this.legend.css({
"overflow": "hidden",
"padding": "10px 0 10px 0",
"width": conf.leftLegendWidth - this.legendIndent + "px"
});
this.node.append(this.legend);
this.initEvents();
};
Legend.prototype.setSource = function (source, map) {
map = this.map(map);
var groupedByType = _.groupBy(source, map.type);
var sorted = _.sortBy(groupedByType, function (group) {
return -DataV.sum(group, map.value);
});
//this.list = _.keys();
this.list = sorted.map(function (d) { return d[0][map.type]; });
};
Legend.prototype.initEvents = function () {
var that = this;
that.on('hoverIn', function (index) {
that.highlight(index);
}).on('hoverOut', function (index) {
that.lowlight(index);
}).on('level_changed', function (start, end, needMore) {
that.render(start, end, needMore);
});
};
Legend.prototype.render = function (level) {
var conf = this.defaults;
conf.level = level || 0;
var that = this;
this.clear();
this.legends = [];
var colorFunc = conf.colorFunc,
hoverIn = function (e) {
var index = e.data.index;
that.fire('hoverIn', index);
this.highlight(index);
},
hoverOut = function (e) {
var index = e.data.index;
that.fire('hoverOut', index);
this.lowlight(index);
};
var ul = $("<ul></ul>").css({
"margin": "0 0 0 10px",
"paddingLeft": 0
});
var selected;
if (!conf.more) {
selected = this.list.slice(0);
} else {
selected = DataV.more(this.list, conf.level, conf.max, function () {
return conf.moreLabel;
});
}
var formatLabel = conf.formatLabel || this.formatLabel;
for (var i = 0, l = selected.length; i < l; i++) {
var color = colorFunc(i);
var li = $('<li style="color: ' + color + '"><span style="color: black" title="' + selected[i] + '">' + formatLabel(selected[i]) + '</span></li>');
li.mouseenter({"index": i}, $.proxy(hoverIn, this)).mouseleave({"index": i}, $.proxy(hoverOut, this));
ul.append(li);
this.legends.push(li);
}
ul.find("li").css({
"list-style-type": "square",
"list-style-position": "inside",
"white-space": "nowrap",
"padding-left": 5
});
this.legend.append(ul);
};
Legend.prototype.highlight = function (index) {
if (typeof index !== 'undefined') {
this.legends[index].css(this.defaults.highlightStyle);
}
};
Legend.prototype.lowlight = function (index) {
if (typeof index !== 'undefined') {
this.legends[index].css(this.defaults.lowlightStyle);
}
};
Legend.prototype.clear = function () {
this.legend.empty();
};
var TopLegend = DataV.extend(DataV.Widget, {
initialize: function (container) {
this.node = $(container);
this.defaults.r0 = 5;
this.defaults.r1 = 7;
}
});
TopLegend.prototype.init = function () {
var conf = this.owner.defaults;
this.legend = $("<div></div>").css({
"width": conf.width,
"backgroundColor": "#f4f4f4"
});
this.node.append(this.legend);
};
TopLegend.prototype.render = function () {
this.init();
var that = this;
var owner = this.owner,
conf = owner.defaults;
var r0 = this.defaults.r0;
this.legends = [];
this.paper = new Raphael(this.legend[0], conf.width, 50);
var paper = this.paper;
var m = [10, 20, 10, 20],
left = m[3],
top = m[0],
lineHeight = 25,
legendInterval = 10,
lineWidth = conf.width,
circleW = 18,
colorFunc = owner.getColor();
var hoverIn = function () {
var index = this.data("index");
that.owner.fire('hoverIn', index);
that.highlight(index);
};
var hoverOut = function () {
var index = this.data("index");
that.owner.fire('hoverOut', index);
that.lowlight(index);
};
that.on('hoverIn', function (index) {
that.highlight(index);
}).on('hoverOut', function (index) {
that.lowlight(index);
});
var colorArray = owner.displayData.allInfos.map(function (item, index) {
return colorFunc(index);
});
for (var i = 0, l = owner.displayData.allInfos.length; i < l; i++) {
var text = paper.text(0, 0, owner.getDisplayRowInfo(i).rowName).attr({
"font-size": conf.fontSize,
"text-anchor": "start",
"font-family": "微软雅黑"
});
var box = text.getBBox();
if (left + circleW + box.width >= lineWidth - m[1]) {
//new line
left = m[3];
top += lineHeight;
}
var color = colorArray[owner.displayData.rowIndex[i].slicedData];
var circle = paper.circle(left + circleW / 2, top + lineHeight / 2, r0)
.attr({
"stroke": "none",
"fill": color
})
.data("index", i)
.hover(hoverIn, hoverOut);
text.transform("t" + (left + circleW) + "," + (top + lineHeight / 2));
paper.rect(left + circleW, top, box.width, lineHeight).attr({
"stroke": "none",
"fill": "#000",
"opacity": 0
})
.data("index", i)
.hover(hoverIn, hoverOut);
that.legends.push({"text": text, "circle": circle});
left += legendInterval + circleW + box.width;
}
paper.setSize(lineWidth, top + lineHeight + m[2]);
};
TopLegend.prototype.highlight = function (index) {
this.legends[index].circle.animate({"r": this.defaults.r1, "opacity": 0.5}, 300);
};
TopLegend.prototype.lowlight = function (index) {
this.legends[index].circle.animate({"r": this.defaults.r0, "opacity": 1}, 300);
};
return {
Legend: Legend,
TopLegend: TopLegend
};
});
/*global Raphael, d3, $, define, _ */
/*!
* StreamLegend的兼容定义
*/
;(function (name, definition) {
if (typeof define === 'function') { // Module
define(definition);
} else { // Assign to common namespaces or simply the global object (window)
this[name] = definition(function (id) { return this[id];});
}
})('Navi', function (require) {
var DataV = require('DataV');
var Navi = DataV.extend(DataV.Chart, {
initialize: function (container) {
this.node = $(container);
}
});
Navi.prototype.init = function () {
this.naviBackWidth = 80;
var conf = this.defaults;
this.node.css({
"borderTop": "1px solid #ddd",
"borderBottom": "1px solid #ddd",
"padding": "5px 10px 10px 10px",
"fontSize": conf.fontSize + 1,
"fontFamily": "宋体"
});
this.naviTrace = $("<div></div>").css({
"width": conf.legendBesidesWidth - this.naviBackWidth - 50,
"margin-top": "5px"
});
this.naviBack = $("<div></div>");
this.naviBack.html("返回上层").css({
"width": this.naviBackWidth + "px",
"float": "right",
"background-color": "#f4f4f4",
"padding-top": "4px",
"padding-bottom": "4px",
"border": "1px solid #ddd",
"border-radius": "2px",
"cursor": "pointer",
"text-align": "center",
"visibility": "hidden"
});
this.node.append(this.naviBack).append(this.naviTrace);
var that = this;
this.naviTrace.on("click", ".navi", function (e) {
that.owner.fire('changeLevelTo', e.target.data('level'));
});
this.naviBack.on("back", function () {
that.owner.fire('changeLevel');
});
};
Navi.prototype.render = function () {
this.init();
var level = this.defaults.level;
this.clear();
for (var i = 0; i <= level; i++) {
this.naviTrace.append($("<span> > </span>"));
var span = $("<span></span>").data("level", i).html("第" + (i + 1) + "层");
this.naviTrace.append(span);
if (i !== level) {
span.css({
"cursor": "pointer",
"color": "#1E90FF"
}).addClass("navi");
}
}
this.naviBack.css('visibility', level > 0 ? "visible" : "hidden");
};
Navi.prototype.clear = function () {
this.naviTrace.empty();
};
return Navi;
});
/*global Raphael, d3, $, define, _ */
/*!
* StreamLegend的兼容定义
*/
;(function (name, definition) {
if (typeof define === 'function') { // Module
define(definition);
} else { // Assign to common namespaces or simply the global object (window)
this[name] = definition(function (id) { return this[id];});
}
})('Tip', function (require) {
var DataV = require('DataV');
//floatTag
var Tip = DataV.extend(DataV.Chart, {
initialize: function (container) {
this.container = container;
this.node = DataV.FloatTag()(this.container);
/**
* 类型纬度
*/
this.dimension.type = {
type: "string",
required: true,
index: 1
};
/**
* 时间纬度
*/
this.dimension.x = {
type: "string",
required: true,
index: 0
};
/**
* 值纬度
*/
this.dimension.value = {
type: "number",
required: true,
index: 2
};
},
getContent: function (obj) {
return obj[this.mapping.x];
}
});
Tip.prototype.setSource = function (source, map) {
var that = this;
this.map(map);
this.rawData = source;
this.groupedByX = _.groupBy(source, this.mapping.x);
this.groupedByType = _.groupBy(source, this.mapping.type);
var sorted = _.sortBy(this.groupedByType, function (group) {
return -DataV.sum(group, that.mapping.value);
});
this.sorted = sorted;
_.each(sorted, function (list, index) {
that.groupedByType[list[0][that.mapping.type]].finalRank = index + 1;
});
this.axis = _.keys(this.groupedByX);
};
Tip.prototype.render = function () {
this.hidden();
this.node.css(this.defaults.tipStyle);
};
Tip.prototype.setContent = function (rowIndex, columnIndex) {
var that = this;
var conf = this.defaults;
var getContent = conf.getContent || this.getContent;
var column = this.groupedByX[this.axis[columnIndex]];
var values = this.sorted;//_.values(this.groupedByType);
var types;
if (!conf.more) {
types = values;
} else {
types = DataV.more(values, conf.level, conf.max, function (remains) {
var row = [];
for (var i = 0; i < that.axis.length; i++) {
var col = {};
col[that.mapping.type] = conf.moreLabel;
col[that.mapping.x] = that.axis[i];
col[that.mapping.value] = NaN;// DataV.sum(_.pluck(remains, i), that.mapping.value);
col.rate = DataV.sum(_.pluck(remains, i), "rate");
row.push(col);
}
return row;
});
}
var row = types[rowIndex];
var obj = row[columnIndex];
var index = _.indexOf(_.map(column, function (item) {
return item[that.mapping.value];
}).sort(function (a, b) {
return a > b ? -1 : 1;
}), obj[that.mapping.value]);
obj.rank = index === -1 ? NaN : index + 1;
var html = getContent.call(this, obj);
this.node.html(html);
};
return Tip;
});
/*global Raphael, d3, $, define, _ */
/*!
* StreamLegend的兼容定义
*/
;(function (name, definition) {
if (typeof define === 'function') { // Module
define(definition);
} else { // Assign to common namespaces or simply the global object (window)
this[name] = definition(function (id) { return this[id];});
}
})('Percentage', function (require) {
var DataV = require('DataV');
var Percentage = DataV.extend(DataV.Chart, {
initialize: function (container) {
this.node = $(container);
this.limit = 20;
this.from = 0;
this.to = 0;
/**
* 类型纬度
*/
this.dimension.type = {
type: "string",
required: true,
index: 1
};
/**
* 值纬度
*/
this.dimension.value = {
type: "number",
required: true,
index: 2
};
}
});
Percentage.prototype.init = function () {
var conf = this.defaults;
this.paper = new Raphael(this.node[0], conf.percentageWidth, conf.chartHeight);
this.node.css({
"width": conf.percentageWidth,
"height": conf.chartHeight,
"float": "left",
"margin-bottom": "0px",
"border-bottom": "0px",
"padding-bottom": "0px"
});
};
Percentage.prototype.setSource = function (source, map) {
map = this.map(map);
this.grouped = _.groupBy(source, map.type);
this.types = _.keys(this.grouped);
if (this.types.length > this.limit) {
this.to = this.limit;
}
};
Percentage.prototype.render = function () {
this.init();
var conf = this.defaults;
var y = conf.fontSize * 2 / 3;
if (!this.rect) {//init
this.rect = this.paper.rect(0, 0, conf.percentageWidth, conf.chartHeight)
.attr({
"fill": "#f4f4f4",
"stroke": "#aaa",
"stroke-width": 0.5
});
this.text = this.paper.text(conf.percentageWidth / 2, y, Math.round(100) + "%")
.attr({"text-anchor": "middle"});
}
// this.rect.animate({"y": (1 - maxY) * conf.chartHeight, "height": maxY * conf.chartHeight}, 750);
// this.text.attr({
// "text": Math.round(maxY * 100) + "%"
// }).animate({"y": y}, 300);
};
return Percentage;
});
/*global Raphael, d3, $, define, _ */
/*!
* StreamLegend的兼容定义
*/
;(function (name, definition) {
if (typeof define === 'function') { // Module
define(definition);
} else { // Assign to common namespaces or simply the global object (window)
this[name] = definition(function (id) { return this[id];});
}
})('HoverLine', function (require) {
var DataV = require('DataV');
var HoverLine = DataV.extend(DataV.Chart, {
initialize: function () {
}
});
HoverLine.prototype.render = function () {
this.clear();
var paper = this.owner.paper;
var conf = this.defaults;
this.indicatorLine = paper.path("M0 0V" + conf.chartHeight).attr({
stroke: "none",
"stroke-width": 1,
"stroke-dasharray": "- "
});
this.highlightLine = paper.path("M0 0V" + conf.chartHeight).attr({
stroke: "none",
"stroke-width": 2
});
};
HoverLine.prototype.hidden = function () {
this.indicatorLine.attr({"stroke": "none"});
this.highlightLine.attr({"stroke": "none"});
};
HoverLine.prototype.show = function () {
this.indicatorLine.attr({"stroke": "#000"});
this.highlightLine.attr({"stroke": "white"});
};
HoverLine.prototype.refresh = function (columnIndex, rowIndex) {
//refresh lines' position
var owner = this.owner;
var pathSource = owner.pathSource;
var lineX = this.defaults.chartWidth * columnIndex / (owner.columnCount - 1);
var pathSourceCell = pathSource[pathSource.length - 1][columnIndex];
this.indicatorLine.attr({
path: "M" + lineX + " " + (pathSourceCell.y0 - pathSourceCell.y) + "V" + pathSource[0][columnIndex].y0
});
if (typeof rowIndex !== 'undefined') {
pathSourceCell = pathSource[rowIndex][columnIndex];
this.highlightLine.attr({
path: "M" + lineX + " " + (pathSourceCell.y0 - pathSourceCell.y) + "V" + pathSourceCell.y0
});
if (rowIndex === 0) {
this.highlightLine.attr({"cursor": "pointer"});
} else {
this.highlightLine.attr({"cursor": "auto"});
}
}
};
HoverLine.prototype.clear = function () {
this.indicatorLine && this.indicatorLine.remove();
this.highlightLine && this.highlightLine.remove();
};
return HoverLine;
});
/*global Raphael, d3, $, define, _ */
/*!
* PathLabel的兼容定义
*/
;(function (name, definition) {
if (typeof define === 'function') { // Module
define(definition);
} else { // Assign to common namespaces or simply the global object (window)
this[name] = definition(function (id) { return this[id];});
}
})('PathLabel', function (require) {
var DataV = require('DataV');
//pathLabel
var PathLabel = DataV.extend(DataV.Chart, {
initialize: function () {
/**
* 类型纬度
*/
this.dimension.type = {
type: "string",
required: true,
index: 1
};
/**
* 时间纬度
*/
this.dimension.x = {
type: "string",
required: true,
index: 0
};
/**
* 值纬度
*/
this.dimension.value = {
type: "number",
required: true,
index: 2
};
}
});
PathLabel.prototype.render = function () {
this.clear();
var that = this;
var owner = this.owner;
var paths = owner.paths;
var conf = this.defaults;
var pathSource = owner.pathSource;
var labels = [];
var getLabelLocation = function (locArray, el) {
var x = 0,
y = 0,
i;
var ratioMargin = 0.15;
var index = 0;
var max = 0;
var box = el.getBBox();
var xInterval;
var minTop, maxBottom;
var showLabel = true;
var loc;
var height;
xInterval = Math.ceil(box.width / (locArray[1].x - locArray[0].x) / 2);
if (xInterval === 0) {
xInterval = 1;
}
locArray.forEach(function (d, i, array) {
var m = Math.max(ratioMargin * array.length, xInterval);
if (i >= m && i <= array.length - m) {
if (d.y > max) {
minTop = d.y0 - d.y;
maxBottom = d.y0;
max = d.y;
index = i;
}
}
});
for (i = index - xInterval; i <= index + xInterval; i++) {
if (i < 0 || i >= locArray.length) {
height = 0;
showLabel = false;
break;
}
loc = locArray[i];
//top's y is small
if (loc.y0 - loc.y > minTop) {
minTop = loc.y0 - loc.y;
}
if (loc.y0 < maxBottom) {
maxBottom = loc.y0;
}
}
if (showLabel && maxBottom - minTop >= box.height * 0.8) {
x = locArray[index].x;
y = (minTop + maxBottom) / 2;
} else {
showLabel = false;
}
return {
x: x,
y: y,
showLabel: showLabel
};
};
var getPathLabel = this.defaults.getPathLabel || this.getPathLabel;
var selected;
//var values = _.values(this.groupedByType);
var values = _.values(this.sorted);
if (!conf.more) {
selected = values.slice(0);
} else {
selected = DataV.more(values, conf.level, conf.max, function (remains) {
var obj = {};
obj.type = conf.moreLabel;
obj.rank = remains[0].rank;
obj.sum = DataV.sum(remains, "sum");
return obj;
});
}
for (var i = 0, l = paths.length; i < l; i++) {
var path = paths[i];
var row = selected[i];
var obj = {
type: row.type,
rank: row.rank,
sum: row.sum,
total: this.total
};
var text = getPathLabel.call(this, obj);
var label = owner.paper.text(0, 0, text).attr({
"textAnchor": "middle",
"fill": "white",
"fontSize": conf.fontSize,
"fontFamily": "微软雅黑"
});
label.labelLoc = getLabelLocation(pathSource[i], label);
if (label.labelLoc.showLabel) {
label.attr({
"x": label.labelLoc.x,
"y": label.labelLoc.y
});
} else {
label.attr({"opacity": 0});
}
path.attr({"cursor": "auto"});
label.attr({"cursor": "auto"});
labels.push(label);
}
this.labels = labels;
};
/**
* 生成标签的默认方法,可以通过`setOption({getPathLable: function});`覆盖。
* Properties:
* - `type`, 条带类型
* - `rank`, 条带排名
* - `sum`, 当前条带总值
* - `total`, 所有条带总值
* @param {Object} obj 当前条带的对象
*/
PathLabel.prototype.getPathLabel = function (obj) {
return obj.type + " " + "排名: 第" + obj.rank;
};
PathLabel.prototype.hidden = function () {
this.labels.forEach(function (d) {
d.hide();
});
};
PathLabel.prototype.show = function () {
this.labels.forEach(function (d) {
if (d.labelLoc.showLabel) {
d.show();
}
});
};
PathLabel.prototype.clear = function () {
if (this.labels) {
this.labels.forEach(function (d) {
d.remove();
});
}
};
PathLabel.prototype.setSource = function (source, map) {
var that = this;
this.map(map);
this.groupedByType = _.groupBy(source, this.mapping.type);
var sorted = _.sortBy(this.groupedByType, function (group, type) {
var sum = DataV.sum(group, that.mapping.value);
that.groupedByType[type].sum = sum;
that.groupedByType[type].type = type;
return -sum;
});
this.sorted = sorted;
this.types = _.keys(this.groupedByType);
_.each(sorted, function (list, index) {
that.groupedByType[list[0][that.mapping.type]].rank = index + 1;
});
this.total = DataV.sum(_.map(that.groupedByType, function (group) {
return group.sum;
}));
};
return PathLabel;
});
/*global Raphael, d3, $, define, _ */
/*!
* StreamLegend的兼容定义
*/
;(function (name, definition) {
if (typeof define === 'function') { // Module
define(definition);
} else { // Assign to common namespaces or simply the global object (window)
this[name] = definition(function (id) { return this[id];});
}
})('Cover', function (require) {
var DataV = require('DataV');
//cover
var Cover = DataV.extend(DataV.Chart, {
initialize: function (container) {
var conf = this.defaults;
this.node = $(container);
this.node.css({
"position": "absolute",
"left": 0,
"top": 0,
"width": conf.chartWidth,
"height": conf.chartHeight,
"zIndex": 100,
"visibility": "hidden"
}).bind("mousemove", $.proxy(function (e) {
this.mouse = {x: e.pageX, y: e.pageY};
e.stopPropagation();
}, this)).bind("mouseleave", $.proxy(function () {
this.mouse = undefined;
}, this));
}
});
return Cover;
});
/*global Raphael, d3, $, define, _ */
/*!
* Stream的兼容定义
*/
;(function (name, definition) {
if (typeof define === 'function') { // Module
define(definition);
} else { // Assign to common namespaces or simply the global object (window)
this[name] = definition(function (id) { return this[id];});
}
})('Stream', function (require) {
var DataV = require('DataV');
var HoverLine = require('HoverLine');
var PathLabel = require('PathLabel');
//streamChart
var Stream = DataV.extend(DataV.Chart, {
initialize: function (node, options) {
this.node = this.checkContainer(node);
/**
* 类型纬度
*/
this.dimension.type = {
type: "string",
required: true,
index: 1
};
/**
* 时间纬度
*/
this.dimension.x = {
type: "string",
required: true,
index: 0
};
/**
* 值纬度
*/
this.dimension.value = {
type: "number",
required: true,
index: 2
};
this.defaults.width = 500;
this.defaults.height = 300;
this.defaults.offset = "expand";//zero, expand, silhou-ette, wiggle;
this.defaults.order = "default";//default, reverse, inside-out //in this Stream application, it will always be default, the real order is adjusted in Stream's data-process.
this.defaults.animateDuration = 750;
this.defaults.animateOrder = undefined;
this.paths = undefined;
this.source = undefined;
this.layoutData = undefined;
this.pathSource = undefined;
this.setOptions(options);
this.createPaper();
}
});
Stream.prototype.createPaper = function () {
var conf = this.defaults;
this.paper = new Raphael(this.node, conf.width, conf.height);
};
Stream.prototype.setSource = function (source, map) {
this.map(map);
this.rawData = source;
this.rawMap = map;
var that = this;
// 按类型分组
var grouped = _.groupBy(source, this.mapping.type);
this.rowCount = _.keys(grouped).length;
this.columnCount = _.keys(_.groupBy(source, this.mapping.x)).length;
// 组内按横轴排序
_.forEach(grouped, function (group, type) {
grouped[type] = _.sortBy(group, that.mapping.x);
});
this.sorted = _.sortBy(grouped, function (group) {
return 0 - DataV.sum(group, that.mapping.value);
});
this.remaped = this.remapSource();
this.layoutData = this.getLayoutData();
};
Stream.prototype.remapSource = function () {
var sorted = this.sorted;
var remap = [];
for (var j = 0; j < this.columnCount; j++) {
var plucked = _.pluck(sorted, j);
var sum = DataV.sum(plucked, this.mapping.value);
for (var i = 0; i < this.rowCount; i++) {
remap[i] = remap[i] || [];
remap[i][j] = {};
remap[i][j].x = j;
var rate = sorted[i][j][this.mapping.value] / sum;
remap[i][j].y = rate;
sorted[i][j].rate = rate;
}
}
return remap;
};
/*!
* 获取等级数据
*/
Stream.prototype.getLayoutData = function () {
var conf = this.defaults;
var remaped = this.remaped;
var that = this;
if (!conf.more) {
return remaped;
} else {
return DataV.more(remaped, conf.level, conf.max, function (remains) {
var obj = [];
for (var i = 0; i < that.columnCount; i++) {
obj.push({
x: i,
y: DataV.sum(_.pluck(remains, i), 'y')
});
}
return obj;
});
}
};
Stream.prototype.layout = function () {
var conf = this.defaults;
d3.layout.stack().offset(conf.offset).order(conf.order)(this.layoutData);
};
Stream.prototype.generateChartElements = function () {
var conf = this.defaults;
var paper = this.paper,
paths = [];
var area = this.generateArea();
var colorFunc = this.getColor();
// set div's background instread;
paper.rect(0, 0, conf.chartWidth, conf.chartHeight).attr({
"stroke": "none",
"fill": "#e0e0e0"
});
for (var i = 0, l = this.layoutData.length; i < l; i++) {
var areaString = area(this.pathSource[i]);
var color = colorFunc(i);
var path = paper.path(areaString).attr({
fill: color,
stroke: color,
"stroke-width": 1
});
paths[i] = path;
}
this.paths = paths;
};
Stream.prototype.render = function (animate) {
if (animate !== "animate") {
this.clear();
this.layout();
this.generateChartElements();
} else {
this.layout();
this.animate();
}
//hoverLine
this.hoverLine = this.own(new HoverLine());
this.hoverLine.render();//lines should be to front, so at last
//pathLabel
if (this.defaults.pathLabel) {
this.pathLabel = this.own(new PathLabel());
this.pathLabel.setSource(this.rawData, this.rawMap);
this.pathLabel.render();
}
this.createInteractive();
};
Stream.prototype.animate = function () {
var time = 0,
area,
colorFunc,
color,
i, l,
_area,
paths = [],
order,
anim,
count = this.paths.length;
var that = this;
var animateCallback = function () {
count -= 1;
if (count > 0) {
return;
}
that.animateCallback();
};
if (typeof this.defaults.animateDuration !== 'undefined') {
time = this.defaults.animateDuration;
}
// if paths have not been created
if (typeof this.paths === 'undefined') {
this.generateChartElements();
}
area = this.generateArea();
colorFunc = this.getColor();
if (typeof this.defaults.animateOrder !== 'undefined') {
order = this.defaults.animateOrder;
} else {
order = d3.range(this.pathSource.length);
}
for (i = 0, l = this.pathSource.length; i < l; i++) {
_area = area(this.pathSource[i]);
paths.push(_area);
}
for (i = 0, l = this.pathSource.length; i < l; i++) {
color = colorFunc(i);
anim = Raphael.animation({"path": paths[i]}, time, animateCallback);
this.paths[order[i]].animate(anim);
}
};
Stream.prototype.animateCallback = function () {
var newOrderPaths = [];
var that = this;
if (typeof this.defaults.animateOrder !== 'undefined') {
this.defaults.animateOrder.forEach(function (d, i) {
newOrderPaths[i] = that.paths[d];
});
this.paths = newOrderPaths;
}
};
Stream.prototype.clear = function () {
this.paper.clear();
};
Stream.prototype.getColor = function (colorJson) {
var colorMatrix = DataV.getColor();
var color;
var colorStyle = colorJson || {};
var colorMode = colorStyle.mode || 'default';
var i, l;
switch (colorMode) {
case "gradient":
l = this.source.length;
// 最大为 colorMatrix.length - 1
var colorL = Math.min(Math.round(l / 5), colorMatrix.length - 1);
var testColor = [colorMatrix[0][0], colorMatrix[colorL][0]];
var test1 = DataV.gradientColor(testColor, "special");
var testColorMatrix = [];
var testColorMatrix1 = [];
for (i = 0; i < l; i++) {
testColorMatrix.push([test1(i / (l - 1)), test1(i / (l - 1))]);
}
for (i = l - 1; i >= 0; i--) {
testColorMatrix1.push(testColorMatrix[i]);
}
colorMatrix = testColorMatrix;
break;
case "random":
case "default":
break;
}
var ratio = colorStyle.ratio || 0;
ratio = Math.max(ratio, 0);
ratio = Math.min(ratio, 1);
var colorArray = colorMatrix.map(function () {
return d3.interpolateRgb.apply(null, [colorMatrix[i][0], colorMatrix[i][1]])(ratio);
});
color = d3.scale.ordinal().range(colorArray);
return color;
};
/*
*/
Stream.prototype.getColor = function () {
var count = this.layoutData.length;
var color = this.defaults.gradientColor || ["#8be62f", "#1F4FD8"];
var gradientColor = DataV.gradientColor(color, "special");
var percent = 1 / count;
var gotColors = [];
for (var i = 0; i < count; i++) {
gotColors.push(gradientColor(i * percent));
}
var midderNum = Math.floor(count / 2);
return function (num) {
return num % 2 === 0 ? gotColors[midderNum + num / 2] : gotColors[midderNum - (num + 1) / 2];
};
};
Stream.prototype.getMaxY = function () {
return d3.max(this.layoutData, function (d) {
return d3.max(d, function (d) {
return d.y0 + d.y;
});
});
};
Stream.prototype.mapPathSource = function () {
var conf = this.defaults,
maxX = this.layoutData[0].length - 1,
maxY = this.getMaxY(),
width = conf.chartWidth,
height = conf.chartHeight;
this.pathSource = [];
for (var i = 0, l = this.layoutData.length; i < l; i++) {
this.pathSource[i] = [];
for (var j = 0, l2 = this.layoutData[0].length; j < l2; j++) {
var s = this.layoutData[i][j];
var ps = this.pathSource[i][j] = {};
ps.x = s.x * width / maxX;
ps.y0 = height - s.y0 * height / maxY;
ps.y = s.y * height / maxY;
}
}
};
Stream.prototype.generateArea = function () {
this.mapPathSource();
var area = d3.svg.area().x(function (d) {
return d.x;
}).y0(function (d) {
return d.y0;
}).y1(function (d) {
return d.y0 - d.y;
});
return area;
};
Stream.prototype.highlight = function (index) {
if (typeof index !== 'undefined') {
this.paths[index].attr({"opacity": 0.5, "stroke-width": 0});
}
};
Stream.prototype.lowlight = function (index) {
if (typeof index !== 'undefined') {
this.paths[index].attr({"opacity": 1, "stroke-width": 1});
}
};
Stream.prototype.createInteractive = function () {
$(this.paper.canvas).unbind();//prevent event rebind.
//refactor stream chart's animate function, especially change the callback
var stream = this;
this.animateCallback = function () {
var newOrderPaths = [];
var that = this;
if (typeof this.defaults.animateOrder !== 'undefined') {
this.defaults.animateOrder.forEach(function (d, i) {
newOrderPaths[i] = that.paths[d];
});
this.paths = newOrderPaths;
}
stream.cover.hidden();
if (typeof stream.cover.mouse !== 'undefined') {
stream.hoverLine.show();
stream.floatTag.show();
var mouse = stream.cover.mouse;
$(stream.paper.canvas).trigger("mousemove", [mouse.x, mouse.y]);
$(stream.floatTag).trigger("mousemove", [mouse.x, mouse.y]);
stream.cover.mouse = undefined;
}
stream.pathLabel.show();
};
//chart mouseenter
var mouseenter = function () {
stream.hoverLine.show();
stream.fire('enter');
};
//chart mouseleave
var mouseleave = function () {
stream.hoverLine.hidden();
//recover prepath;
if (typeof stream.preIndex !== 'undefined') {
stream.lowlight(stream.preIndex);
}
stream.fire('leave', stream.preIndex);
stream.preIndex = undefined;
};
//chart click
var click = function () {};
//chart mousemove
var mousemove = function (e, pageX, pageY) {
var offset = $(this).parent().offset();
var x = (e.pageX || pageX) - offset.left,
y = (e.pageY || pageY) - offset.top;
var pathSource = stream.pathSource,
rowIndex;
var columnIndex = Math.floor((x / (stream.defaults.chartWidth / (stream.columnCount - 1) / 2) + 1) / 2);
//get path and pathIndex
for (var i = 0, l = pathSource.length; i < l; i++) {
if (y >= pathSource[i][columnIndex].y0 - pathSource[i][columnIndex].y && y <= pathSource[i][columnIndex].y0) {
rowIndex = i;
break;
}
}
//recover prepath;
if (typeof stream.preIndex !== 'undefined') {
stream.lowlight(stream.preIndex);
}
stream.highlight(rowIndex);
stream.fire('move', stream.preIndex, rowIndex, columnIndex);
//set indicator and highlight line new position
stream.hoverLine.refresh(columnIndex, rowIndex);
//customevent;
if (stream.defaults.customEventHandle.mousemove) {
stream.defaults.customEventHandle.mousemove.call(stream,
{"timeIndex": columnIndex, "rowIndex": rowIndex});
}
//change new path;
stream.preIndex = rowIndex;
};
$(this.paper.canvas).bind("mouseenter", mouseenter)
.bind("mouseleave", mouseleave)
.bind("click", click)
.bind("mousemove", mousemove);
};
return Stream;
});
/*global $, define */
/*!
* Stream的兼容定义
*/
;(function (name, definition) {
if (typeof define === 'function') { // Module
define(definition);
} else { // Assign to common namespaces or simply the global object (window)
this[name] = definition(function (id) { return this[id];});
}
})('StreamComponent', function (require) {
var DataV = require('DataV');
var Legend = require('Legend');
var Navi = require('Navi');
var Percentage = require('Percentage');
var Axis = require('StreamAxis');
var Tip = require('Tip');
var Stream = require('Stream');
var Cover = require('Cover');
/*
* constructor
* @param node the dom node or dom node Id
* options options json object for determin stream style.
* @example
* create stream in a dom node with id "chart", width is 500; height is 600px;
* "chart", {"width": 500, "height": 600}
*/
var StreamComponent = DataV.extend(DataV.Chart, {
initialize: function (node, options) {
this.type = "Stream";
this.node = this.checkContainer(node);
this.defaults = {};
// Properties
this.defaults.offset = "zero";//zero, expand, silhou-ette, wiggle;(d3 stack offset)
this.defaults.order = "default";//default, reverse, descending, ascending, inside-out(d3 stack order, sort by index of maximum value, then use balanced weighting.), inside-out-reverse(inside-out like, sort by index of maximum value, not descending but ascending);
this.defaults.normalized = false;//false, true; //ratio data or not;
//this.defaults.rowDataOrder = "default"; //default, descending, ascending(according to digitdata row sum value);
this.defaults.columnNameUsed = "auto";
this.defaults.rowNameUsed = "auto";
this.defaults.pathLabel = true;
this.defaults.fontSize = 12;
this.defaults.colorCount = 20;
//this.defaults.axisTickNumber = 8; // axis ticks number
this.defaults.indexMargin = 3; // if dates.length < indexMargin * 2 + 1, do not show label
this.timeRange = [];
// paper
this.defaults.width = 800;
this.defaults.height = 560;//if only width has value and autoHeight is true, then height will be width * heightWidthRatio.
this.defaults.autoHeight = true;
this.defaults.heightWidthRatio = 0.6;
this.defaults.legendPosition = "top";//"top", "left"
this.defaults.topLegendHeight = 50;
this.defaults.leftLegendWidth = 150;
this.defaults.showLegend = true;
this.defaults.legendBesidesWidth = undefined;
this.defaults.legendBesidesHeight = undefined;
this.defaults.more = false;
this.defaults.moreLabel = "more";
this.defaults.max = 20;
this.defaults.level = 0;
this.defaults.chartWidth = undefined;//depends on width, do not recommend to change
this.defaults.chartHeight = undefined;// depends on height, do not recommend to change
this.defaults.naviHeight = 20;//do not recommend to change
this.defaults.showNavi = undefined;//ture if moreConfig.more == true, else false;
this.defaults.axisHeight = 30;//do not recommend to change
this.defaults.showAxis = true;
this.defaults.showPercentage = undefined;//true if moreConfig.more == true, else false;
this.defaults.percentageWidth = 40;
this.defaults.customEventHandle = {"mousemove": null};
this.defaults.tipStyle = {};
this.setOptions(options);
}
});
StreamComponent.prototype.init = function () {
var that = this;
var getBack = function () {
var naviCallBack = function () {
that.cover.hidden();
if (typeof that.cover.mouse !== 'undefined') {
that.hoverLine.show();
that.tip.show();
$(that.paper.canvas).trigger("mousemove",[that.cover.mouse.x, that.cover.mouse.y]);
that.cover.mouse = undefined;
}
that.pathLabel.show();
};
that.cover.show();
that.cover.mouse = undefined;
that.processData("slicedData");
that.render("renderComponents");
//hidden
that.hoverLine.hidden();
that.tip.hidden();
that.pathLabel.hidden();
that.paths.forEach(function (d) {
d.attr({transform: "s1,0.001,0,0"});
d.animate({transform: "t0,0"}, 750, "linear", naviCallBack);
});
};
that.on('changeLevelTo', function (level) {
that.defaults.level = level;
getBack(that.defaults.moreConfig.level);
});
that.on('back', function () {
that.defaults.level = that.defaults.level - 1;
getBack(that.defaults.level);
});
that.legend.on('hoverIn', function (index) {
that.stream.highlight(index);
}).on('hoverOut', function (index) {
that.stream.lowlight(index);
});
that.stream.on('enter', function () {
that.axis.showTab();
that.tip.show();
}).on('leave', function (index) {
that.axis.hideTab();
that.tip.hidden();
if (index !== undefined) {
that.legend.lowlight(index);
}
}).on('move', function (pre, rowIndex, columnIndex) {
if (pre !== undefined) {
that.legend.lowlight(pre);
}
if (typeof rowIndex === "undefined" || typeof columnIndex === "undefined") {
return;
}
that.legend.highlight(rowIndex);
that.tip.setContent(rowIndex, columnIndex);
//axis pop bubble
that.axis.refreshTab(columnIndex);
}).on('level_changed', function (start, end, needMore) {
that.legend.fire('level_changed', start, end, needMore);
});
};
StreamComponent.prototype.setSource = function (source, map) {
this.source = source;
this.map = map;
};
StreamComponent.prototype.layout = function () {
var conf = this.defaults;
if (!conf.showLegend) {
conf.legendBesidesWidth = conf.width;
conf.legendBesidesHeight = conf.height;
} else {
if (conf.legendPosition === "left") {
conf.legendBesidesWidth = conf.width - conf.leftLegendWidth;
conf.legendBesidesHeight = conf.height;
} else {
conf.legendBesidesWidth = conf.width;
conf.legendBesidesHeight = conf.height - conf.topLegendHeight;
}
}
conf.chartWidth = conf.legendBesidesWidth - 2 * conf.percentageWidth;
conf.chartHeight = conf.legendBesidesHeight - (conf.showNavi ? conf.naviHeight : 0) - (conf.showAxis ? conf.axisHeight : 0);
var node = $(this.node).css({
position: "relative",
width: conf.width
});
// 创建DOM节点
this.streamBox = $("<div></div>").addClass("stream");
this.legendBox = $("<div></div>").addClass("legend");
this.axisBox = $("<div></div>").addClass("axis");
this.naviBox = $("<div></div>").addClass("navi");
this.percentageBox = $("<div></div>").addClass("percentage");
this.container = $("<div></div>").addClass("container");
this.rightBox = $("<div></div>").addClass("right");
// cover can block stream paper when animating to prevent some default mouse event
this.coverBox = $("<div></div>").addClass("cover");
// 插入DOM
this.streamBox.append(this.coverBox);
this.container.append(this.percentageBox).append(this.streamBox);
this.rightBox.append(this.naviBox).append(this.container).append(this.axisBox);
node.append(this.legendBox).append(this.rightBox);
// 设置各个节点大小
this.streamBox.css({
"position": "relative",
"float": "left",
"width": conf.chartWidth,
"height": conf.chartHeight
});
this.percentageBox.css({
});
this.container.css({
"height": conf.chartHeight
});
this.rightBox.css({
"float": "right",
"width": conf.legendBesidesWidth
});
this.legendBox.css({
"width": conf.leftLegendWidth - 4,
"float": "left",
"overflowX": "hidden"
});
};
StreamComponent.prototype.draw = function () {
var conf = this.defaults;
//chart and paper
this.stream = this.own(new Stream(this.streamBox, {"width": conf.chartWidth, "height": conf.chartHeight}));
this.stream.setSource(this.source, this.map);
this.stream.render();
this.legend = this.own(new Legend.Legend(this.legendBox));
this.legend.setOptions({
"colorFunc": this.stream.getColor()
});
this.legend.setSource(this.source, this.map);
this.legend.render();
this.percentage = this.own(new Percentage(this.percentageBox));
this.percentage.setSource(this.source, this.map);
this.percentage.render();
this.axis = this.own(new Axis(this.axisBox));
this.axis.setSource(this.source, this.map);
this.axis.render();
this.navi = this.own(new Navi(this.naviBox));
this.navi.render();
// cover can block stream paper when animating to prevent some default mouse event
this.cover = this.own(new Cover(this.coverBox));
//floatTag
this.tip = this.own(new Tip(this.streamBox));
this.tip.setSource(this.source, this.map);
this.tip.render();
};
StreamComponent.prototype.render = function () {
this.layout();
this.draw();
this.init();
};
StreamComponent.prototype.setCustomEvent = function (eventName, callback) {
this.defaults.customEventHandle[eventName] = callback;
};
/*!
* 导出StreamComponent
*/
return StreamComponent;
});<|fim▁end|> | type: "string",
required: true,
index: 0
}; |
<|file_name|>goprocfilter.go<|end_file_name|><|fim▁begin|>// goprocfilter project goprocfilter.go
package goprocfilter
import (
"C"
"fmt"
"syscall"
"unsafe"
)
const (
FILTER_NAME = "SimpleProcFilter"
KN_MAX_PATH = 512
)
// KN_PROCESS_INFO structure
type KnProcessInfo struct {
parentProcessId uint32
processId uint32
processPath [KN_MAX_PATH]uint16
}
// KN_PROCESS_DECISION structure
type KnProcessDecision struct {
processId uint32
isAllowed uint32
}
type GoProcessCallback func(ParentPid, ProcessId uint32, ProcessPath string) (isAllowed bool)
type GoProcFilter struct {
knComm KNCOMM
callbackRoutine GoProcessCallback
}
func MakeKnComm(callback GoProcessCallback) *GoProcFilter {
return &GoProcFilter{
knComm: 0,
callbackRoutine: callback,
}
}
func (goProcFlt *GoProcFilter) goProcFilterCallback(dataId uint32, processInfo *KnProcessInfo, dataSize uintptr, isReplyRequired uint32, context uintptr) uintptr {
processPath := syscall.UTF16ToString(processInfo.processPath[:KN_MAX_PATH])
allowed := true
allowed = goProcFlt.callbackRoutine(processInfo.parentProcessId, processInfo.processId, processPath)
decision := KnProcessDecision{
processId: processInfo.processId,
}
<|fim▁hole|> if allowed == true {
decision.isAllowed = 1
} else {
decision.isAllowed = 0
}
ret, _, _ := procReplyDataViaKnComm.Call(
uintptr(goProcFlt.knComm),
uintptr(dataId),
uintptr(unsafe.Pointer(&decision)),
uintptr(unsafe.Sizeof(decision)))
if ret == 0 {
fmt.Println("Error procReplyDataViaKnComm")
}
return 0
}
func (goProcFlt *GoProcFilter) ConnectToKnComm() bool {
if goProcFlt.knComm != 0 {
return true
}
cbInfo := KnCommCbInfo{
procOnDataRecv: syscall.NewCallback(goProcFlt.goProcFilterCallback),
context: 0,
}
goProcFlt.knComm = 0
ret, _, _ := procConnectToKnComm.Call(
uintptr(unsafe.Pointer(syscall.StringToUTF16Ptr(FILTER_NAME))),
uintptr(unsafe.Pointer(&cbInfo)),
uintptr(unsafe.Pointer(&goProcFlt.knComm)))
if ret == 0 {
fmt.Println("Error procConnectToKnComm")
return false
}
return true
}
func (goProcFlt *GoProcFilter) DisconnectFromKnComm() bool {
if goProcFlt.knComm == 0 {
return true
}
ret, _, _ := procDisconnectFromKnComm.Call(
uintptr(unsafe.Pointer(goProcFlt.knComm)))
if ret == 0 {
fmt.Println("Error procDisconnectFromKnComm")
return false
}
goProcFlt.knComm = 0
return true
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
BitBake 'Fetch' implementations
Classes for obtaining upstream sources for the
BitBake build tools.
"""
# Copyright (C) 2003, 2004 Chris Larson
# Copyright (C) 2012 Intel Corporation
#
# SPDX-License-Identifier: GPL-2.0-only
#
# Based on functions from the base bb module, Copyright 2003 Holger Schurig
import os, re
import signal
import logging
import urllib.request, urllib.parse, urllib.error
if 'git' not in urllib.parse.uses_netloc:
urllib.parse.uses_netloc.append('git')
import operator
import collections
import subprocess
import pickle
import errno
import bb.persist_data, bb.utils
import bb.checksum
import bb.process
import bb.event
__version__ = "2"
_checksum_cache = bb.checksum.FileChecksumCache()
logger = logging.getLogger("BitBake.Fetcher")
class BBFetchException(Exception):
"""Class all fetch exceptions inherit from"""
def __init__(self, message):
self.msg = message
Exception.__init__(self, message)
def __str__(self):
return self.msg
class UntrustedUrl(BBFetchException):
"""Exception raised when encountering a host not listed in BB_ALLOWED_NETWORKS"""
def __init__(self, url, message=''):
if message:
msg = message
else:
msg = "The URL: '%s' is not trusted and cannot be used" % url
self.url = url
BBFetchException.__init__(self, msg)
self.args = (url,)
class MalformedUrl(BBFetchException):
"""Exception raised when encountering an invalid url"""
def __init__(self, url, message=''):
if message:
msg = message
else:
msg = "The URL: '%s' is invalid and cannot be interpreted" % url
self.url = url
BBFetchException.__init__(self, msg)
self.args = (url,)
class FetchError(BBFetchException):
"""General fetcher exception when something happens incorrectly"""
def __init__(self, message, url = None):
if url:
msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
else:
msg = "Fetcher failure: %s" % message
self.url = url
BBFetchException.__init__(self, msg)
self.args = (message, url)
class ChecksumError(FetchError):
"""Exception when mismatched checksum encountered"""
def __init__(self, message, url = None, checksum = None):
self.checksum = checksum
FetchError.__init__(self, message, url)
class NoChecksumError(FetchError):
"""Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set"""
class UnpackError(BBFetchException):
"""General fetcher exception when something happens incorrectly when unpacking"""
def __init__(self, message, url):
msg = "Unpack failure for URL: '%s'. %s" % (url, message)
self.url = url
BBFetchException.__init__(self, msg)
self.args = (message, url)
class NoMethodError(BBFetchException):
"""Exception raised when there is no method to obtain a supplied url or set of urls"""
def __init__(self, url):
msg = "Could not find a fetcher which supports the URL: '%s'" % url
self.url = url
BBFetchException.__init__(self, msg)
self.args = (url,)
class MissingParameterError(BBFetchException):
"""Exception raised when a fetch method is missing a critical parameter in the url"""
def __init__(self, missing, url):
msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
self.url = url
self.missing = missing
BBFetchException.__init__(self, msg)
self.args = (missing, url)
class ParameterError(BBFetchException):
"""Exception raised when a url cannot be proccessed due to invalid parameters."""
def __init__(self, message, url):
msg = "URL: '%s' has invalid parameters. %s" % (url, message)
self.url = url
BBFetchException.__init__(self, msg)
self.args = (message, url)
class NetworkAccess(BBFetchException):
"""Exception raised when network access is disabled but it is required."""
def __init__(self, url, cmd):
msg = "Network access disabled through BB_NO_NETWORK (or set indirectly due to use of BB_FETCH_PREMIRRORONLY) but access requested with command %s (for url %s)" % (cmd, url)
self.url = url
self.cmd = cmd
BBFetchException.__init__(self, msg)
self.args = (url, cmd)
class NonLocalMethod(Exception):
def __init__(self):
Exception.__init__(self)
class MissingChecksumEvent(bb.event.Event):
def __init__(self, url, md5sum, sha256sum):
self.url = url
self.checksums = {'md5sum': md5sum,
'sha256sum': sha256sum}
bb.event.Event.__init__(self)
class URI(object):
"""
A class representing a generic URI, with methods for
accessing the URI components, and stringifies to the
URI.
It is constructed by calling it with a URI, or setting
the attributes manually:
uri = URI("http://example.com/")
uri = URI()
uri.scheme = 'http'
uri.hostname = 'example.com'
uri.path = '/'
It has the following attributes:
* scheme (read/write)
* userinfo (authentication information) (read/write)
* username (read/write)
* password (read/write)
Note, password is deprecated as of RFC 3986.
* hostname (read/write)
* port (read/write)
* hostport (read only)
"hostname:port", if both are set, otherwise just "hostname"
* path (read/write)
* path_quoted (read/write)
A URI quoted version of path
* params (dict) (read/write)
* query (dict) (read/write)
* relative (bool) (read only)
True if this is a "relative URI", (e.g. file:foo.diff)
It stringifies to the URI itself.
Some notes about relative URIs: while it's specified that
a URI beginning with <scheme>:// should either be directly
followed by a hostname or a /, the old URI handling of the
fetch2 library did not comform to this. Therefore, this URI
class has some kludges to make sure that URIs are parsed in
a way comforming to bitbake's current usage. This URI class
supports the following:
file:relative/path.diff (IETF compliant)
git:relative/path.git (IETF compliant)
git:///absolute/path.git (IETF compliant)
file:///absolute/path.diff (IETF compliant)
file://relative/path.diff (not IETF compliant)
But it does not support the following:
file://hostname/absolute/path.diff (would be IETF compliant)
Note that the last case only applies to a list of
"whitelisted" schemes (currently only file://), that requires
its URIs to not have a network location.
"""
_relative_schemes = ['file', 'git']
_netloc_forbidden = ['file']
def __init__(self, uri=None):
self.scheme = ''
self.userinfo = ''
self.hostname = ''
self.port = None
self._path = ''
self.params = {}
self.query = {}
self.relative = False
if not uri:
return
# We hijack the URL parameters, since the way bitbake uses
# them are not quite RFC compliant.
uri, param_str = (uri.split(";", 1) + [None])[:2]
urlp = urllib.parse.urlparse(uri)
self.scheme = urlp.scheme
reparse = 0
# Coerce urlparse to make URI scheme use netloc
if not self.scheme in urllib.parse.uses_netloc:
urllib.parse.uses_params.append(self.scheme)
reparse = 1
# Make urlparse happy(/ier) by converting local resources
# to RFC compliant URL format. E.g.:
# file://foo.diff -> file:foo.diff
if urlp.scheme in self._netloc_forbidden:
uri = re.sub("(?<=:)//(?!/)", "", uri, 1)
reparse = 1
if reparse:
urlp = urllib.parse.urlparse(uri)
# Identify if the URI is relative or not
if urlp.scheme in self._relative_schemes and \
re.compile(r"^\w+:(?!//)").match(uri):
self.relative = True
if not self.relative:
self.hostname = urlp.hostname or ''
self.port = urlp.port
self.userinfo += urlp.username or ''
if urlp.password:
self.userinfo += ':%s' % urlp.password
self.path = urllib.parse.unquote(urlp.path)
if param_str:
self.params = self._param_str_split(param_str, ";")
if urlp.query:
self.query = self._param_str_split(urlp.query, "&")
def __str__(self):
userinfo = self.userinfo
if userinfo:
userinfo += '@'
return "%s:%s%s%s%s%s%s" % (
self.scheme,
'' if self.relative else '//',
userinfo,
self.hostport,
self.path_quoted,
self._query_str(),
self._param_str())
def _param_str(self):
return (
''.join([';', self._param_str_join(self.params, ";")])
if self.params else '')
def _query_str(self):
return (
''.join(['?', self._param_str_join(self.query, "&")])
if self.query else '')
def _param_str_split(self, string, elmdelim, kvdelim="="):
ret = collections.OrderedDict()
for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim)]:
ret[k] = v
return ret
def _param_str_join(self, dict_, elmdelim, kvdelim="="):
return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()])
@property
def hostport(self):
if not self.port:
return self.hostname
return "%s:%d" % (self.hostname, self.port)
@property
def path_quoted(self):
return urllib.parse.quote(self.path)
@path_quoted.setter
def path_quoted(self, path):
self.path = urllib.parse.unquote(path)
@property
def path(self):
return self._path
@path.setter
def path(self, path):
self._path = path
if not path or re.compile("^/").match(path):
self.relative = False
else:
self.relative = True
@property
def username(self):
if self.userinfo:
return (self.userinfo.split(":", 1))[0]
return ''
@username.setter
def username(self, username):
password = self.password
self.userinfo = username
if password:
self.userinfo += ":%s" % password
@property
def password(self):
if self.userinfo and ":" in self.userinfo:
return (self.userinfo.split(":", 1))[1]
return ''
@password.setter
def password(self, password):
self.userinfo = "%s:%s" % (self.username, password)
def decodeurl(url):
"""Decodes an URL into the tokens (scheme, network location, path,
user, password, parameters).
"""
m = re.compile('(?P<type>[^:]*)://((?P<user>[^/;]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
if not m:
raise MalformedUrl(url)
type = m.group('type')
location = m.group('location')
if not location:
raise MalformedUrl(url)
user = m.group('user')
parm = m.group('parm')
locidx = location.find('/')
if locidx != -1 and type.lower() != 'file':
host = location[:locidx]
path = location[locidx:]
elif type.lower() == 'file':
host = ""
path = location
else:
host = location
path = "/"
if user:
m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
if m:
user = m.group('user')
pswd = m.group('pswd')
else:
user = ''
pswd = ''
p = collections.OrderedDict()
if parm:
for s in parm.split(';'):
if s:
if not '=' in s:
raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
s1, s2 = s.split('=')
p[s1] = s2
return type, host, urllib.parse.unquote(path), user, pswd, p
def encodeurl(decoded):
"""Encodes a URL from tokens (scheme, network location, path,
user, password, parameters).
"""
type, host, path, user, pswd, p = decoded
if not type:
raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
url = '%s://' % type
if user and type != "file":
url += "%s" % user
if pswd:
url += ":%s" % pswd
url += "@"
if host and type != "file":
url += "%s" % host
if path:
# Standardise path to ensure comparisons work
while '//' in path:
path = path.replace("//", "/")
url += "%s" % urllib.parse.quote(path)
if p:
for parm in p:
url += ";%s=%s" % (parm, p[parm])
return url
def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
if not ud.url or not uri_find or not uri_replace:
logger.error("uri_replace: passed an undefined value, not replacing")
return None
uri_decoded = list(decodeurl(ud.url))
uri_find_decoded = list(decodeurl(uri_find))
uri_replace_decoded = list(decodeurl(uri_replace))
logger.debug(2, "For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
result_decoded = ['', '', '', '', '', {}]
for loc, i in enumerate(uri_find_decoded):
result_decoded[loc] = uri_decoded[loc]
regexp = i
if loc == 0 and regexp and not regexp.endswith("$"):
# Leaving the type unanchored can mean "https" matching "file" can become "files"
# which is clearly undesirable.
regexp += "$"
if loc == 5:
# Handle URL parameters
if i:
# Any specified URL parameters must match
for k in uri_find_decoded[loc]:
if uri_decoded[loc][k] != uri_find_decoded[loc][k]:
return None
# Overwrite any specified replacement parameters
for k in uri_replace_decoded[loc]:
for l in replacements:
uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
result_decoded[loc][k] = uri_replace_decoded[loc][k]
elif (re.match(regexp, uri_decoded[loc])):
if not uri_replace_decoded[loc]:
result_decoded[loc] = ""
else:
for k in replacements:
uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
#bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], 1)
if loc == 2:
# Handle path manipulations
basename = None
if uri_decoded[0] != uri_replace_decoded[0] and mirrortarball:
# If the source and destination url types differ, must be a mirrortarball mapping
basename = os.path.basename(mirrortarball)
# Kill parameters, they make no sense for mirror tarballs
uri_decoded[5] = {}
elif ud.localpath and ud.method.supports_checksum(ud):
basename = os.path.basename(ud.localpath)
if basename and not result_decoded[loc].endswith(basename):
result_decoded[loc] = os.path.join(result_decoded[loc], basename)
else:
return None
result = encodeurl(result_decoded)
if result == ud.url:
return None
logger.debug(2, "For url %s returning %s" % (ud.url, result))
return result
methods = []
urldata_cache = {}
saved_headrevs = {}
def fetcher_init(d):
"""
Called to initialize the fetchers once the configuration data is known.
Calls before this must not hit the cache.
"""
# When to drop SCM head revisions controlled by user policy
srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear"
if srcrev_policy == "cache":
logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
elif srcrev_policy == "clear":
logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
try:
bb.fetch2.saved_headrevs = revs.items()
except:
pass
revs.clear()
else:
raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
_checksum_cache.init_cache(d)
for m in methods:
if hasattr(m, "init"):
m.init(d)
def fetcher_parse_save():
_checksum_cache.save_extras()
def fetcher_parse_done():
_checksum_cache.save_merge()
def fetcher_compare_revisions(d):
"""
Compare the revisions in the persistant cache with current values and
return true/false on whether they've changed.
"""
data = bb.persist_data.persist('BB_URI_HEADREVS', d).items()
data2 = bb.fetch2.saved_headrevs
changed = False
for key in data:
if key not in data2 or data2[key] != data[key]:
logger.debug(1, "%s changed", key)
changed = True
return True
else:
logger.debug(2, "%s did not change", key)
return False
def mirror_from_string(data):
mirrors = (data or "").replace('\\n',' ').split()
# Split into pairs
if len(mirrors) % 2 != 0:
bb.warn('Invalid mirror data %s, should have paired members.' % data)
return list(zip(*[iter(mirrors)]*2))
def verify_checksum(ud, d, precomputed={}):
"""
verify the MD5 and SHA256 checksum for downloaded src
Raises a FetchError if one or both of the SRC_URI checksums do not match
the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
checksums specified.
Returns a dict of checksums that can be stored in a done stamp file and
passed in as precomputed parameter in a later call to avoid re-computing
the checksums from the file. This allows verifying the checksums of the
file against those in the recipe each time, rather than only after
downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
"""
_MD5_KEY = "md5"
_SHA256_KEY = "sha256"
if ud.ignore_checksums or not ud.method.supports_checksum(ud):
return {}
if _MD5_KEY in precomputed:<|fim▁hole|> if _SHA256_KEY in precomputed:
sha256data = precomputed[_SHA256_KEY]
else:
sha256data = bb.utils.sha256_file(ud.localpath)
if ud.method.recommends_checksum(ud) and not ud.md5_expected and not ud.sha256_expected:
# If strict checking enabled and neither sum defined, raise error
strict = d.getVar("BB_STRICT_CHECKSUM") or "0"
if strict == "1":
logger.error('No checksum specified for %s, please add at least one to the recipe:\n'
'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"' %
(ud.localpath, ud.md5_name, md5data,
ud.sha256_name, sha256data))
raise NoChecksumError('Missing SRC_URI checksum', ud.url)
bb.event.fire(MissingChecksumEvent(ud.url, md5data, sha256data), d)
if strict == "ignore":
return {
_MD5_KEY: md5data,
_SHA256_KEY: sha256data
}
# Log missing sums so user can more easily add them
logger.warning('Missing md5 SRC_URI checksum for %s, consider adding to the recipe:\n'
'SRC_URI[%s] = "%s"',
ud.localpath, ud.md5_name, md5data)
logger.warning('Missing sha256 SRC_URI checksum for %s, consider adding to the recipe:\n'
'SRC_URI[%s] = "%s"',
ud.localpath, ud.sha256_name, sha256data)
# We want to alert the user if a checksum is defined in the recipe but
# it does not match.
msg = ""
mismatch = False
if ud.md5_expected and ud.md5_expected != md5data:
msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'md5', md5data, ud.md5_expected)
mismatch = True;
if ud.sha256_expected and ud.sha256_expected != sha256data:
msg = msg + "\nFile: '%s' has %s checksum %s when %s was expected" % (ud.localpath, 'sha256', sha256data, ud.sha256_expected)
mismatch = True;
if mismatch:
msg = msg + '\nIf this change is expected (e.g. you have upgraded to a new version without updating the checksums) then you can use these lines within the recipe:\nSRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"\nOtherwise you should retry the download and/or check with upstream to determine if the file has become corrupted or otherwise unexpectedly modified.\n' % (ud.md5_name, md5data, ud.sha256_name, sha256data)
if len(msg):
raise ChecksumError('Checksum mismatch!%s' % msg, ud.url, md5data)
return {
_MD5_KEY: md5data,
_SHA256_KEY: sha256data
}
def verify_donestamp(ud, d, origud=None):
"""
Check whether the done stamp file has the right checksums (if the fetch
method supports them). If it doesn't, delete the done stamp and force
a re-download.
Returns True, if the donestamp exists and is valid, False otherwise. When
returning False, any existing done stamps are removed.
"""
if not ud.needdonestamp or (origud and not origud.needdonestamp):
return True
if not os.path.exists(ud.localpath):
# local path does not exist
if os.path.exists(ud.donestamp):
# done stamp exists, but the downloaded file does not; the done stamp
# must be incorrect, re-trigger the download
bb.utils.remove(ud.donestamp)
return False
if (not ud.method.supports_checksum(ud) or
(origud and not origud.method.supports_checksum(origud))):
# if done stamp exists and checksums not supported; assume the local
# file is current
return os.path.exists(ud.donestamp)
precomputed_checksums = {}
# Only re-use the precomputed checksums if the donestamp is newer than the
# file. Do not rely on the mtime of directories, though. If ud.localpath is
# a directory, there will probably not be any checksums anyway.
if os.path.exists(ud.donestamp) and (os.path.isdir(ud.localpath) or
os.path.getmtime(ud.localpath) < os.path.getmtime(ud.donestamp)):
try:
with open(ud.donestamp, "rb") as cachefile:
pickled = pickle.Unpickler(cachefile)
precomputed_checksums.update(pickled.load())
except Exception as e:
# Avoid the warnings on the upgrade path from emtpy done stamp
# files to those containing the checksums.
if not isinstance(e, EOFError):
# Ignore errors, they aren't fatal
logger.warning("Couldn't load checksums from donestamp %s: %s "
"(msg: %s)" % (ud.donestamp, type(e).__name__,
str(e)))
try:
checksums = verify_checksum(ud, d, precomputed_checksums)
# If the cache file did not have the checksums, compute and store them
# as an upgrade path from the previous done stamp file format.
if checksums != precomputed_checksums:
with open(ud.donestamp, "wb") as cachefile:
p = pickle.Pickler(cachefile, 2)
p.dump(checksums)
return True
except ChecksumError as e:
# Checksums failed to verify, trigger re-download and remove the
# incorrect stamp file.
logger.warning("Checksum mismatch for local file %s\n"
"Cleaning and trying again." % ud.localpath)
if os.path.exists(ud.localpath):
rename_bad_checksum(ud, e.checksum)
bb.utils.remove(ud.donestamp)
return False
def update_stamp(ud, d):
"""
donestamp is file stamp indicating the whole fetching is done
this function update the stamp after verifying the checksum
"""
if not ud.needdonestamp:
return
if os.path.exists(ud.donestamp):
# Touch the done stamp file to show active use of the download
try:
os.utime(ud.donestamp, None)
except:
# Errors aren't fatal here
pass
else:
try:
checksums = verify_checksum(ud, d)
# Store the checksums for later re-verification against the recipe
with open(ud.donestamp, "wb") as cachefile:
p = pickle.Pickler(cachefile, 2)
p.dump(checksums)
except ChecksumError as e:
# Checksums failed to verify, trigger re-download and remove the
# incorrect stamp file.
logger.warning("Checksum mismatch for local file %s\n"
"Cleaning and trying again." % ud.localpath)
if os.path.exists(ud.localpath):
rename_bad_checksum(ud, e.checksum)
bb.utils.remove(ud.donestamp)
raise
def subprocess_setup():
# Python installs a SIGPIPE handler by default. This is usually not what
# non-Python subprocesses expect.
# SIGPIPE errors are known issues with gzip/bash
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
def get_autorev(d):
# only not cache src rev in autorev case
if d.getVar('BB_SRCREV_POLICY') != "cache":
d.setVar('BB_DONT_CACHE', '1')
return "AUTOINC"
def get_srcrev(d, method_name='sortable_revision'):
"""
Return the revision string, usually for use in the version string (PV) of the current package
Most packages usually only have one SCM so we just pass on the call.
In the multi SCM case, we build a value based on SRCREV_FORMAT which must
have been set.
The idea here is that we put the string "AUTOINC+" into return value if the revisions are not
incremental, other code is then responsible for turning that into an increasing value (if needed)
A method_name can be supplied to retrieve an alternatively formatted revision from a fetcher, if
that fetcher provides a method with the given name and the same signature as sortable_revision.
"""
scms = []
fetcher = Fetch(d.getVar('SRC_URI').split(), d)
urldata = fetcher.ud
for u in urldata:
if urldata[u].method.supports_srcrev():
scms.append(u)
if len(scms) == 0:
raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0])
if len(rev) > 10:
rev = rev[:10]
if autoinc:
return "AUTOINC+" + rev
return rev
#
# Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
#
format = d.getVar('SRCREV_FORMAT')
if not format:
raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.\n"\
"The SCMs are:\n%s" % '\n'.join(scms))
name_to_rev = {}
seenautoinc = False
for scm in scms:
ud = urldata[scm]
for name in ud.names:
autoinc, rev = getattr(ud.method, method_name)(ud, d, name)
seenautoinc = seenautoinc or autoinc
if len(rev) > 10:
rev = rev[:10]
name_to_rev[name] = rev
# Replace names by revisions in the SRCREV_FORMAT string. The approach used
# here can handle names being prefixes of other names and names appearing
# as substrings in revisions (in which case the name should not be
# expanded). The '|' regular expression operator tries matches from left to
# right, so we need to sort the names with the longest ones first.
names_descending_len = sorted(name_to_rev, key=len, reverse=True)
name_to_rev_re = "|".join(re.escape(name) for name in names_descending_len)
format = re.sub(name_to_rev_re, lambda match: name_to_rev[match.group(0)], format)
if seenautoinc:
format = "AUTOINC+" + format
return format
def localpath(url, d):
fetcher = bb.fetch2.Fetch([url], d)
return fetcher.localpath(url)
def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
"""
Run cmd returning the command output
Raise an error if interrupted or cmd fails
Optionally echo command output to stdout
Optionally remove the files/directories listed in cleanup upon failure
"""
# Need to export PATH as binary could be in metadata paths
# rather than host provided
# Also include some other variables.
# FIXME: Should really include all export varaiables?
exportvars = ['HOME', 'PATH',
'HTTP_PROXY', 'http_proxy',
'HTTPS_PROXY', 'https_proxy',
'FTP_PROXY', 'ftp_proxy',
'FTPS_PROXY', 'ftps_proxy',
'NO_PROXY', 'no_proxy',
'ALL_PROXY', 'all_proxy',
'GIT_PROXY_COMMAND',
'GIT_SSH',
'GIT_SSL_CAINFO',
'GIT_SMART_HTTP',
'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
'SOCKS5_USER', 'SOCKS5_PASSWD',
'DBUS_SESSION_BUS_ADDRESS',
'P4CONFIG']
if not cleanup:
cleanup = []
# If PATH contains WORKDIR which contains PV-PR which contains SRCPV we
# can end up in circular recursion here so give the option of breaking it
# in a data store copy.
try:
d.getVar("PV")
d.getVar("PR")
except bb.data_smart.ExpansionError:
d = bb.data.createCopy(d)
d.setVar("PV", "fetcheravoidrecurse")
d.setVar("PR", "fetcheravoidrecurse")
origenv = d.getVar("BB_ORIGENV", False)
for var in exportvars:
val = d.getVar(var) or (origenv and origenv.getVar(var))
if val:
cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
# Ensure that a _PYTHON_SYSCONFIGDATA_NAME value set by a recipe
# (for example via python3native.bbclass since warrior) is not set for
# host Python (otherwise tools like git-make-shallow will fail)
cmd = 'unset _PYTHON_SYSCONFIGDATA_NAME; ' + cmd
# Disable pseudo as it may affect ssh, potentially causing it to hang.
cmd = 'export PSEUDO_DISABLED=1; ' + cmd
if workdir:
logger.debug(1, "Running '%s' in %s" % (cmd, workdir))
else:
logger.debug(1, "Running %s", cmd)
success = False
error_message = ""
try:
(output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir)
success = True
except bb.process.NotFoundError as e:
error_message = "Fetch command %s" % (e.command)
except bb.process.ExecutionError as e:
if e.stdout:
output = "output:\n%s\n%s" % (e.stdout, e.stderr)
elif e.stderr:
output = "output:\n%s" % e.stderr
else:
output = "no output"
error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output)
except bb.process.CmdError as e:
error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
if not success:
for f in cleanup:
try:
bb.utils.remove(f, True)
except OSError:
pass
raise FetchError(error_message)
return output
def check_network_access(d, info, url):
"""
log remote network access, and error if BB_NO_NETWORK is set or the given
URI is untrusted
"""
if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
raise NetworkAccess(url, info)
elif not trusted_network(d, url):
raise UntrustedUrl(url, info)
else:
logger.debug(1, "Fetcher accessed the network with the command %s" % info)
def build_mirroruris(origud, mirrors, ld):
uris = []
uds = []
replacements = {}
replacements["TYPE"] = origud.type
replacements["HOST"] = origud.host
replacements["PATH"] = origud.path
replacements["BASENAME"] = origud.path.split("/")[-1]
replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.')
def adduri(ud, uris, uds, mirrors, tarballs):
for line in mirrors:
try:
(find, replace) = line
except ValueError:
continue
for tarball in tarballs:
newuri = uri_replace(ud, find, replace, replacements, ld, tarball)
if not newuri or newuri in uris or newuri == origud.url:
continue
if not trusted_network(ld, newuri):
logger.debug(1, "Mirror %s not in the list of trusted networks, skipping" % (newuri))
continue
# Create a local copy of the mirrors minus the current line
# this will prevent us from recursively processing the same line
# as well as indirect recursion A -> B -> C -> A
localmirrors = list(mirrors)
localmirrors.remove(line)
try:
newud = FetchData(newuri, ld)
newud.setup_localpath(ld)
except bb.fetch2.BBFetchException as e:
logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
logger.debug(1, str(e))
try:
# setup_localpath of file:// urls may fail, we should still see
# if mirrors of the url exist
adduri(newud, uris, uds, localmirrors, tarballs)
except UnboundLocalError:
pass
continue
uris.append(newuri)
uds.append(newud)
adduri(newud, uris, uds, localmirrors, tarballs)
adduri(origud, uris, uds, mirrors, origud.mirrortarballs or [None])
return uris, uds
def rename_bad_checksum(ud, suffix):
"""
Renames files to have suffix from parameter
"""
if ud.localpath is None:
return
new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix)
bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath))
if not bb.utils.movefile(ud.localpath, new_localpath):
bb.warn("Renaming %s to %s failed, grep movefile in log.do_fetch to see why" % (ud.localpath, new_localpath))
def try_mirror_url(fetch, origud, ud, ld, check = False):
# Return of None or a value means we're finished
# False means try another url
if ud.lockfile and ud.lockfile != origud.lockfile:
lf = bb.utils.lockfile(ud.lockfile)
try:
if check:
found = ud.method.checkstatus(fetch, ud, ld)
if found:
return found
return False
if not verify_donestamp(ud, ld, origud) or ud.method.need_update(ud, ld):
ud.method.download(ud, ld)
if hasattr(ud.method,"build_mirror_data"):
ud.method.build_mirror_data(ud, ld)
if not ud.localpath or not os.path.exists(ud.localpath):
return False
if ud.localpath == origud.localpath:
return ud.localpath
# We may be obtaining a mirror tarball which needs further processing by the real fetcher
# If that tarball is a local file:// we need to provide a symlink to it
dldir = ld.getVar("DL_DIR")
if origud.mirrortarballs and os.path.basename(ud.localpath) in origud.mirrortarballs and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
# Create donestamp in old format to avoid triggering a re-download
if ud.donestamp:
bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
open(ud.donestamp, 'w').close()
dest = os.path.join(dldir, os.path.basename(ud.localpath))
if not os.path.exists(dest):
# In case this is executing without any file locks held (as is
# the case for file:// URLs), two tasks may end up here at the
# same time, in which case we do not want the second task to
# fail when the link has already been created by the first task.
try:
os.symlink(ud.localpath, dest)
except FileExistsError:
pass
if not verify_donestamp(origud, ld) or origud.method.need_update(origud, ld):
origud.method.download(origud, ld)
if hasattr(origud.method, "build_mirror_data"):
origud.method.build_mirror_data(origud, ld)
return origud.localpath
# Otherwise the result is a local file:// and we symlink to it
ensure_symlink(ud.localpath, origud.localpath)
update_stamp(origud, ld)
return ud.localpath
except bb.fetch2.NetworkAccess:
raise
except IOError as e:
if e.errno in [errno.ESTALE]:
logger.warning("Stale Error Observed %s." % ud.url)
return False
raise
except bb.fetch2.BBFetchException as e:
if isinstance(e, ChecksumError):
logger.warning("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
logger.warning(str(e))
if os.path.exists(ud.localpath):
rename_bad_checksum(ud, e.checksum)
elif isinstance(e, NoChecksumError):
raise
else:
logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
logger.debug(1, str(e))
try:
ud.method.clean(ud, ld)
except UnboundLocalError:
pass
return False
finally:
if ud.lockfile and ud.lockfile != origud.lockfile:
bb.utils.unlockfile(lf)
def ensure_symlink(target, link_name):
if not os.path.exists(link_name):
if os.path.islink(link_name):
# Broken symbolic link
os.unlink(link_name)
# In case this is executing without any file locks held (as is
# the case for file:// URLs), two tasks may end up here at the
# same time, in which case we do not want the second task to
# fail when the link has already been created by the first task.
try:
os.symlink(target, link_name)
except FileExistsError:
pass
def try_mirrors(fetch, d, origud, mirrors, check = False):
"""
Try to use a mirrored version of the sources.
This method will be automatically called before the fetchers go.
d Is a bb.data instance
uri is the original uri we're trying to download
mirrors is the list of mirrors we're going to try
"""
ld = d.createCopy()
uris, uds = build_mirroruris(origud, mirrors, ld)
for index, uri in enumerate(uris):
ret = try_mirror_url(fetch, origud, uds[index], ld, check)
if ret != False:
return ret
return None
def trusted_network(d, url):
"""
Use a trusted url during download if networking is enabled and
BB_ALLOWED_NETWORKS is set globally or for a specific recipe.
Note: modifies SRC_URI & mirrors.
"""
if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
return True
pkgname = d.expand(d.getVar('PN', False))
trusted_hosts = None
if pkgname:
trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
if not trusted_hosts:
trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS')
# Not enabled.
if not trusted_hosts:
return True
scheme, network, path, user, passwd, param = decodeurl(url)
if not network:
return True
network = network.split(':')[0]
network = network.lower()
for host in trusted_hosts.split(" "):
host = host.lower()
if host.startswith("*.") and ("." + network).endswith(host[1:]):
return True
if host == network:
return True
return False
def srcrev_internal_helper(ud, d, name):
"""
Return:
a) a source revision if specified
b) latest revision if SRCREV="AUTOINC"
c) None if not specified
"""
srcrev = None
pn = d.getVar("PN")
attempts = []
if name != '' and pn:
attempts.append("SRCREV_%s_pn-%s" % (name, pn))
if name != '':
attempts.append("SRCREV_%s" % name)
if pn:
attempts.append("SRCREV_pn-%s" % pn)
attempts.append("SRCREV")
for a in attempts:
srcrev = d.getVar(a)
if srcrev and srcrev != "INVALID":
break
if 'rev' in ud.parm and 'tag' in ud.parm:
raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url))
if 'rev' in ud.parm or 'tag' in ud.parm:
if 'rev' in ud.parm:
parmrev = ud.parm['rev']
else:
parmrev = ud.parm['tag']
if srcrev == "INVALID" or not srcrev:
return parmrev
if srcrev != parmrev:
raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please specify one valid value" % (srcrev, parmrev))
return parmrev
if srcrev == "INVALID" or not srcrev:
raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
if srcrev == "AUTOINC":
srcrev = ud.method.latest_revision(ud, d, name)
return srcrev
def get_checksum_file_list(d):
""" Get a list of files checksum in SRC_URI
Returns the resolved local paths of all local file entries in
SRC_URI as a space-separated string
"""
fetch = Fetch([], d, cache = False, localonly = True)
dl_dir = d.getVar('DL_DIR')
filelist = []
for u in fetch.urls:
ud = fetch.ud[u]
if ud and isinstance(ud.method, local.Local):
paths = ud.method.localpaths(ud, d)
for f in paths:
pth = ud.decodedurl
if '*' in pth:
f = os.path.join(os.path.abspath(f), pth)
if f.startswith(dl_dir):
# The local fetcher's behaviour is to return a path under DL_DIR if it couldn't find the file anywhere else
if os.path.exists(f):
bb.warn("Getting checksum for %s SRC_URI entry %s: file not found except in DL_DIR" % (d.getVar('PN'), os.path.basename(f)))
else:
bb.warn("Unable to get checksum for %s SRC_URI entry %s: file could not be found" % (d.getVar('PN'), os.path.basename(f)))
filelist.append(f + ":" + str(os.path.exists(f)))
return " ".join(filelist)
def get_file_checksums(filelist, pn):
"""Get a list of the checksums for a list of local files
Returns the checksums for a list of local files, caching the results as
it proceeds
"""
return _checksum_cache.get_checksums(filelist, pn)
class FetchData(object):
"""
A class which represents the fetcher state for a given URI.
"""
def __init__(self, url, d, localonly = False):
# localpath is the location of a downloaded result. If not set, the file is local.
self.donestamp = None
self.needdonestamp = True
self.localfile = ""
self.localpath = None
self.lockfile = None
self.mirrortarballs = []
self.basename = None
self.basepath = None
(self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(d.expand(url))
self.date = self.getSRCDate(d)
self.url = url
if not self.user and "user" in self.parm:
self.user = self.parm["user"]
if not self.pswd and "pswd" in self.parm:
self.pswd = self.parm["pswd"]
self.setup = False
if "name" in self.parm:
self.md5_name = "%s.md5sum" % self.parm["name"]
self.sha256_name = "%s.sha256sum" % self.parm["name"]
else:
self.md5_name = "md5sum"
self.sha256_name = "sha256sum"
if self.md5_name in self.parm:
self.md5_expected = self.parm[self.md5_name]
elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]:
self.md5_expected = None
else:
self.md5_expected = d.getVarFlag("SRC_URI", self.md5_name)
if self.sha256_name in self.parm:
self.sha256_expected = self.parm[self.sha256_name]
elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3"]:
self.sha256_expected = None
else:
self.sha256_expected = d.getVarFlag("SRC_URI", self.sha256_name)
self.ignore_checksums = False
self.names = self.parm.get("name",'default').split(',')
self.method = None
for m in methods:
if m.supports(self, d):
self.method = m
break
if not self.method:
raise NoMethodError(url)
if localonly and not isinstance(self.method, local.Local):
raise NonLocalMethod()
if self.parm.get("proto", None) and "protocol" not in self.parm:
logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN'))
self.parm["protocol"] = self.parm.get("proto", None)
if hasattr(self.method, "urldata_init"):
self.method.urldata_init(self, d)
if "localpath" in self.parm:
# if user sets localpath for file, use it instead.
self.localpath = self.parm["localpath"]
self.basename = os.path.basename(self.localpath)
elif self.localfile:
self.localpath = self.method.localpath(self, d)
dldir = d.getVar("DL_DIR")
if not self.needdonestamp:
return
# Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
if self.localpath and self.localpath.startswith(dldir):
basepath = self.localpath
elif self.localpath:
basepath = dldir + os.sep + os.path.basename(self.localpath)
elif self.basepath or self.basename:
basepath = dldir + os.sep + (self.basepath or self.basename)
else:
bb.fatal("Can't determine lock path for url %s" % url)
self.donestamp = basepath + '.done'
self.lockfile = basepath + '.lock'
def setup_revisions(self, d):
self.revisions = {}
for name in self.names:
self.revisions[name] = srcrev_internal_helper(self, d, name)
# add compatibility code for non name specified case
if len(self.names) == 1:
self.revision = self.revisions[self.names[0]]
def setup_localpath(self, d):
if not self.localpath:
self.localpath = self.method.localpath(self, d)
def getSRCDate(self, d):
"""
Return the SRC Date for the component
d the bb.data module
"""
if "srcdate" in self.parm:
return self.parm['srcdate']
pn = d.getVar("PN")
if pn:
return d.getVar("SRCDATE_%s" % pn) or d.getVar("SRCDATE") or d.getVar("DATE")
return d.getVar("SRCDATE") or d.getVar("DATE")
class FetchMethod(object):
"""Base class for 'fetch'ing data"""
def __init__(self, urls=None):
self.urls = []
def supports(self, urldata, d):
"""
Check to see if this fetch class supports a given url.
"""
return 0
def localpath(self, urldata, d):
"""
Return the local filename of a given url assuming a successful fetch.
Can also setup variables in urldata for use in go (saving code duplication
and duplicate code execution)
"""
return os.path.join(d.getVar("DL_DIR"), urldata.localfile)
def supports_checksum(self, urldata):
"""
Is localpath something that can be represented by a checksum?
"""
# We cannot compute checksums for directories
if os.path.isdir(urldata.localpath) == True:
return False
if urldata.localpath.find("*") != -1:
return False
return True
def recommends_checksum(self, urldata):
"""
Is the backend on where checksumming is recommended (should warnings
be displayed if there is no checksum)?
"""
return False
def _strip_leading_slashes(self, relpath):
"""
Remove leading slash as os.path.join can't cope
"""
while os.path.isabs(relpath):
relpath = relpath[1:]
return relpath
def setUrls(self, urls):
self.__urls = urls
def getUrls(self):
return self.__urls
urls = property(getUrls, setUrls, None, "Urls property")
def need_update(self, ud, d):
"""
Force a fetch, even if localpath exists?
"""
if os.path.exists(ud.localpath):
return False
return True
def supports_srcrev(self):
"""
The fetcher supports auto source revisions (SRCREV)
"""
return False
def download(self, urldata, d):
"""
Fetch urls
Assumes localpath was called first
"""
raise NoMethodError(urldata.url)
def unpack(self, urldata, rootdir, data):
iterate = False
file = urldata.localpath
# Localpath can't deal with 'dir/*' entries, so it converts them to '.',
# but it must be corrected back for local files copying
if urldata.basename == '*' and file.endswith('/.'):
file = '%s/%s' % (file.rstrip('/.'), urldata.path)
try:
unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
except ValueError as exc:
bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
(file, urldata.parm.get('unpack')))
base, ext = os.path.splitext(file)
if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']:
efile = os.path.join(rootdir, os.path.basename(base))
else:
efile = file
cmd = None
if unpack:
if file.endswith('.tar'):
cmd = 'tar x --no-same-owner -f %s' % file
elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
cmd = 'tar xz --no-same-owner -f %s' % file
elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
cmd = 'gzip -dc %s > %s' % (file, efile)
elif file.endswith('.bz2'):
cmd = 'bzip2 -dc %s > %s' % (file, efile)
elif file.endswith('.txz') or file.endswith('.tar.xz'):
cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
elif file.endswith('.xz'):
cmd = 'xz -dc %s > %s' % (file, efile)
elif file.endswith('.tar.lz'):
cmd = 'lzip -dc %s | tar x --no-same-owner -f -' % file
elif file.endswith('.lz'):
cmd = 'lzip -dc %s > %s' % (file, efile)
elif file.endswith('.tar.7z'):
cmd = '7z x -so %s | tar x --no-same-owner -f -' % file
elif file.endswith('.7z'):
cmd = '7za x -y %s 1>/dev/null' % file
elif file.endswith('.zip') or file.endswith('.jar'):
try:
dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
except ValueError as exc:
bb.fatal("Invalid value for 'dos' parameter for %s: %s" %
(file, urldata.parm.get('dos')))
cmd = 'unzip -q -o'
if dos:
cmd = '%s -a' % cmd
cmd = "%s '%s'" % (cmd, file)
elif file.endswith('.rpm') or file.endswith('.srpm'):
if 'extract' in urldata.parm:
unpack_file = urldata.parm.get('extract')
cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file)
iterate = True
iterate_file = unpack_file
else:
cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
elif file.endswith('.deb') or file.endswith('.ipk'):
output = subprocess.check_output(['ar', '-t', file], preexec_fn=subprocess_setup)
datafile = None
if output:
for line in output.decode().splitlines():
if line.startswith('data.tar.'):
datafile = line
break
else:
raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url)
else:
raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url)
cmd = 'ar x %s %s && tar --no-same-owner -xpf %s && rm %s' % (file, datafile, datafile, datafile)
# If 'subdir' param exists, create a dir and use it as destination for unpack cmd
if 'subdir' in urldata.parm:
subdir = urldata.parm.get('subdir')
if os.path.isabs(subdir):
if not os.path.realpath(subdir).startswith(os.path.realpath(rootdir)):
raise UnpackError("subdir argument isn't a subdirectory of unpack root %s" % rootdir, urldata.url)
unpackdir = subdir
else:
unpackdir = os.path.join(rootdir, subdir)
bb.utils.mkdirhier(unpackdir)
else:
unpackdir = rootdir
if not unpack or not cmd:
# If file == dest, then avoid any copies, as we already put the file into dest!
dest = os.path.join(unpackdir, os.path.basename(file))
if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)):
destdir = '.'
# For file:// entries all intermediate dirs in path must be created at destination
if urldata.type == "file":
# Trailing '/' does a copying to wrong place
urlpath = urldata.path.rstrip('/')
# Want files places relative to cwd so no leading '/'
urlpath = urlpath.lstrip('/')
if urlpath.find("/") != -1:
destdir = urlpath.rsplit("/", 1)[0] + '/'
bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
cmd = 'cp -fpPRH %s %s' % (file, destdir)
if not cmd:
return
path = data.getVar('PATH')
if path:
cmd = "PATH=\"%s\" %s" % (path, cmd)
bb.note("Unpacking %s to %s/" % (file, unpackdir))
ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=unpackdir)
if ret != 0:
raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
if iterate is True:
iterate_urldata = urldata
iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
self.unpack(urldata, rootdir, data)
return
def clean(self, urldata, d):
"""
Clean any existing full or partial download
"""
bb.utils.remove(urldata.localpath)
def try_premirror(self, urldata, d):
"""
Should premirrors be used?
"""
return True
def checkstatus(self, fetch, urldata, d):
"""
Check the status of a URL
Assumes localpath was called first
"""
logger.info("URL %s could not be checked for status since no method exists.", urldata.url)
return True
def latest_revision(self, ud, d, name):
"""
Look in the cache for the latest revision, if not present ask the SCM.
"""
if not hasattr(self, "_latest_revision"):
raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url)
revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
key = self.generate_revision_key(ud, d, name)
try:
return revs[key]
except KeyError:
revs[key] = rev = self._latest_revision(ud, d, name)
return rev
def sortable_revision(self, ud, d, name):
latest_rev = self._build_revision(ud, d, name)
return True, str(latest_rev)
def generate_revision_key(self, ud, d, name):
key = self._revision_key(ud, d, name)
return "%s-%s" % (key, d.getVar("PN") or "")
def latest_versionstring(self, ud, d):
"""
Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
by searching through the tags output of ls-remote, comparing
versions and returning the highest match as a (version, revision) pair.
"""
return ('', '')
class Fetch(object):
def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
if localonly and cache:
raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
if len(urls) == 0:
urls = d.getVar("SRC_URI").split()
self.urls = urls
self.d = d
self.ud = {}
self.connection_cache = connection_cache
fn = d.getVar('FILE')
mc = d.getVar('__BBMULTICONFIG') or ""
if cache and fn and mc + fn in urldata_cache:
self.ud = urldata_cache[mc + fn]
for url in urls:
if url not in self.ud:
try:
self.ud[url] = FetchData(url, d, localonly)
except NonLocalMethod:
if localonly:
self.ud[url] = None
pass
if fn and cache:
urldata_cache[mc + fn] = self.ud
def localpath(self, url):
if url not in self.urls:
self.ud[url] = FetchData(url, self.d)
self.ud[url].setup_localpath(self.d)
return self.d.expand(self.ud[url].localpath)
def localpaths(self):
"""
Return a list of the local filenames, assuming successful fetch
"""
local = []
for u in self.urls:
ud = self.ud[u]
ud.setup_localpath(self.d)
local.append(ud.localpath)
return local
def download(self, urls=None):
"""
Fetch all urls
"""
if not urls:
urls = self.urls
network = self.d.getVar("BB_NO_NETWORK")
premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY"))
for u in urls:
ud = self.ud[u]
ud.setup_localpath(self.d)
m = ud.method
localpath = ""
if ud.lockfile:
lf = bb.utils.lockfile(ud.lockfile)
try:
self.d.setVar("BB_NO_NETWORK", network)
if verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
localpath = ud.localpath
elif m.try_premirror(ud, self.d):
logger.debug(1, "Trying PREMIRRORS")
mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
localpath = try_mirrors(self, self.d, ud, mirrors, False)
if localpath:
try:
# early checksum verification so that if the checksum of the premirror
# contents mismatch the fetcher can still try upstream and mirrors
update_stamp(ud, self.d)
except ChecksumError as e:
logger.warning("Checksum failure encountered with premirror download of %s - will attempt other sources." % u)
logger.debug(1, str(e))
localpath = ""
if premirroronly:
self.d.setVar("BB_NO_NETWORK", "1")
firsterr = None
verified_stamp = verify_donestamp(ud, self.d)
if not localpath and (not verified_stamp or m.need_update(ud, self.d)):
try:
if not trusted_network(self.d, ud.url):
raise UntrustedUrl(ud.url)
logger.debug(1, "Trying Upstream")
m.download(ud, self.d)
if hasattr(m, "build_mirror_data"):
m.build_mirror_data(ud, self.d)
localpath = ud.localpath
# early checksum verify, so that if checksum mismatched,
# fetcher still have chance to fetch from mirror
update_stamp(ud, self.d)
except bb.fetch2.NetworkAccess:
raise
except BBFetchException as e:
if isinstance(e, ChecksumError):
logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
logger.debug(1, str(e))
if os.path.exists(ud.localpath):
rename_bad_checksum(ud, e.checksum)
elif isinstance(e, NoChecksumError):
raise
else:
logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u)
logger.debug(1, str(e))
firsterr = e
# Remove any incomplete fetch
if not verified_stamp:
m.clean(ud, self.d)
logger.debug(1, "Trying MIRRORS")
mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
localpath = try_mirrors(self, self.d, ud, mirrors)
if not localpath or ((not os.path.exists(localpath)) and localpath.find("*") == -1):
if firsterr:
logger.error(str(firsterr))
raise FetchError("Unable to fetch URL from any source.", u)
update_stamp(ud, self.d)
except IOError as e:
if e.errno in [errno.ESTALE]:
logger.error("Stale Error Observed %s." % u)
raise ChecksumError("Stale Error Detected")
except BBFetchException as e:
if isinstance(e, ChecksumError):
logger.error("Checksum failure fetching %s" % u)
raise
finally:
if ud.lockfile:
bb.utils.unlockfile(lf)
def checkstatus(self, urls=None):
"""
Check all urls exist upstream
"""
if not urls:
urls = self.urls
for u in urls:
ud = self.ud[u]
ud.setup_localpath(self.d)
m = ud.method
logger.debug(1, "Testing URL %s", u)
# First try checking uri, u, from PREMIRRORS
mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
ret = try_mirrors(self, self.d, ud, mirrors, True)
if not ret:
# Next try checking from the original uri, u
ret = m.checkstatus(self, ud, self.d)
if not ret:
# Finally, try checking uri, u, from MIRRORS
mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
ret = try_mirrors(self, self.d, ud, mirrors, True)
if not ret:
raise FetchError("URL %s doesn't work" % u, u)
def unpack(self, root, urls=None):
"""
Unpack urls to root
"""
if not urls:
urls = self.urls
for u in urls:
ud = self.ud[u]
ud.setup_localpath(self.d)
if ud.lockfile:
lf = bb.utils.lockfile(ud.lockfile)
ud.method.unpack(ud, root, self.d)
if ud.lockfile:
bb.utils.unlockfile(lf)
def clean(self, urls=None):
"""
Clean files that the fetcher gets or places
"""
if not urls:
urls = self.urls
for url in urls:
if url not in self.ud:
self.ud[url] = FetchData(url, self.d)
ud = self.ud[url]
ud.setup_localpath(self.d)
if not ud.localfile and ud.localpath is None:
continue
if ud.lockfile:
lf = bb.utils.lockfile(ud.lockfile)
ud.method.clean(ud, self.d)
if ud.donestamp:
bb.utils.remove(ud.donestamp)
if ud.lockfile:
bb.utils.unlockfile(lf)
class FetchConnectionCache(object):
"""
A class which represents an container for socket connections.
"""
def __init__(self):
self.cache = {}
def get_connection_name(self, host, port):
return host + ':' + str(port)
def add_connection(self, host, port, connection):
cn = self.get_connection_name(host, port)
if cn not in self.cache:
self.cache[cn] = connection
def get_connection(self, host, port):
connection = None
cn = self.get_connection_name(host, port)
if cn in self.cache:
connection = self.cache[cn]
return connection
def remove_connection(self, host, port):
cn = self.get_connection_name(host, port)
if cn in self.cache:
self.cache[cn].close()
del self.cache[cn]
def close_connections(self):
for cn in list(self.cache.keys()):
self.cache[cn].close()
del self.cache[cn]
from . import cvs
from . import git
from . import gitsm
from . import gitannex
from . import local
from . import svn
from . import wget
from . import ssh
from . import sftp
from . import s3
from . import perforce
from . import bzr
from . import hg
from . import osc
from . import repo
from . import clearcase
from . import npm
methods.append(local.Local())
methods.append(wget.Wget())
methods.append(svn.Svn())
methods.append(git.Git())
methods.append(gitsm.GitSM())
methods.append(gitannex.GitANNEX())
methods.append(cvs.Cvs())
methods.append(ssh.SSH())
methods.append(sftp.SFTP())
methods.append(s3.S3())
methods.append(perforce.Perforce())
methods.append(bzr.Bzr())
methods.append(hg.Hg())
methods.append(osc.Osc())
methods.append(repo.Repo())
methods.append(clearcase.ClearCase())
methods.append(npm.Npm())<|fim▁end|> | md5data = precomputed[_MD5_KEY]
else:
md5data = bb.utils.md5_file(ud.localpath)
|
<|file_name|>main.js<|end_file_name|><|fim▁begin|>var should = require('should');
var sinon = require('sinon');
var Chintz = require('../lib/main');
var atomName = 'test-atom';
var moleculeName = 'test-molecule';
var invalidName = 'invalid-element-name';
var called;
describe('prepare', function() {
describe('with no arguments', function() {
it('returns itself', function() {
var chintz = new Chintz(__dirname + '/chintz');
var result = chintz.prepare();
result.should.eql(chintz);
});
});
describe('passed an element name', function() {
it('returns itself', function() {
var chintz = new Chintz(__dirname + '/chintz');
var result = chintz.prepare(atomName);
result.should.eql(chintz);
});
});
});
describe('render', function() {
var chintz;
var result;
describe('unprepared element', function() {
var expected = "";
beforeEach(function(done) {
var callback = function(s) {
called = true;
s.should.eql(expected);
done();
};
called = false;
new Chintz(__dirname + '/chintz')
.prepare(invalidName)
.render(invalidName, null, callback);
});
it('calls back with an empty string', function() {
called.should.be.true;
});
});
describe('prepared element', function() {
describe('with no data', function() {
var expected = "Test atom template \n";
beforeEach(function(done) {
var callback = function(s) {
called = true;
s.should.eql(expected);
done();
};
called = false;
new Chintz(__dirname + '/chintz')
.prepare(atomName)
.render(atomName, null, callback);
});
it('calls back with the template', function() {
called.should.be.true;
});
});
describe('with bad data', function() {
var expected = "Test atom template \n";
beforeEach(function(done) {
var callback = function(s) {
called = true;
s.should.eql(expected);
done();
};
called = false;
new Chintz(__dirname + '/chintz')
.prepare(atomName)
.render(atomName, { non_existent_key: 'blah' }, callback);
});
it('calls back with the template', function() {
called.should.be.true;
});
});
describe('with good data', function() {
var string = "-- string value to template in --";
var expected = "Test atom template " + string + "\n";
beforeEach(function(done) {
var callback = function(s) {
called = true;
s.should.eql(expected);
done();
};
called = false;
new Chintz(__dirname + '/chintz')
.prepare(atomName)
.render(atomName, { string: string }, callback);
});
it('calls back with the template, expected', function() {
called.should.be.true;
});
});
});
describe('prepared nested elements', function() {
describe('with no data', function() {
var expected = "Test molecule template, with nested Test atom template \n\n";
beforeEach(function(done) {
var callback = function(s) {
called = true;
s.should.eql(expected);
done();
};
called = false;
new Chintz(__dirname + '/chintz')
.prepare(moleculeName)
.render(moleculeName, null, callback);
});
it('calls back with the template', function() {
called.should.be.true;
});
});
describe('with bad data', function() {
var expected = "Test molecule template, with nested Test atom template \n\n";
beforeEach(function(done) {
var callback = function(s) {
called = true;
s.should.eql(expected);
done();
};
called = false;
new Chintz(__dirname + '/chintz')
.prepare(moleculeName)
.render(moleculeName, { non_existent_key: 'blah' }, callback);
});
it('calls back with the template', function() {
called.should.be.true;
});
});
describe('with good data', function() {
var string = "-- atom string --";
var molString = "-- molecule string --";
var expected = "Test molecule template, with nested Test atom template " + string + "\n" + molString + "\n";
beforeEach(function(done) {
var callback = function(s) {
called = true;
s.should.eql(expected);
done();
};
called = false;
new Chintz(__dirname + '/chintz')
.prepare(moleculeName)
.render(moleculeName, { string: string, molString: molString }, callback);
});
it('calls back with the template, expected', function() {
called.should.be.true;
});
});
});
});
describe('getDependencies', function() {
describe('get non existent deps', function() {
beforeEach(function(done) {
var expected = [];
var callback = function(d) {
called = true;
d.should.eql(expected);
done();<|fim▁hole|> called = false;
new Chintz(__dirname + '/chintz')
.prepare(atomName)
.getDependencies('nonexistent', callback);
});
it('calls back with an empty array', function() {
called.should.be.true;
});
});
describe('get existing dependencies', function() {
beforeEach(function(done) {
var expected = [ "test dependency" ];
var callback = function(d) {
called = true;
d.should.eql(expected);
done();
};
called = false;
new Chintz(__dirname + '/chintz')
.prepare(atomName)
.getDependencies('test_deps', callback);
});
it('calls back with the expected dependencies', function() {
called.should.be.true;
});
});
describe('get handled dependencies', function() {
beforeEach(function(done) {
var expected = [ "a handled dependency" ];
var callback = function(d) {
called = true;
d.should.eql(expected);
done();
};
called = false;
new Chintz(__dirname + '/chintz')
.registerHandlers(
{
'handled_test_deps': {
format: function(deps) {
return expected
}
}
}
)
.prepare(atomName)
.getDependencies('handled_test_deps', callback);
});
it('calls back with the expected dependencies', function() {
called.should.be.true;
});
});
});<|fim▁end|> | }; |
<|file_name|>photo_grid.js<|end_file_name|><|fim▁begin|>// app.photoGrid
var Backbone = require("backbone");
// var _ = require("underscore");
var $ = require("jquery");
var ImageGridFuncs = require("./photo_grid_functions");
var ImageCollection = require("../models/photo_grid_image_collection");
var ImageView = require("./photo_grid_image");
module.exports = Backbone.View.extend({
el: '#photo-grid',
initialize: function () {
"use strict";
if (this.$el.length === 1) {
var gridJSON = this.$(".hid");
if (gridJSON.length === 1) {
this.funcs = new ImageGridFuncs();
this.template = this.funcs.slideTemplate();
// there is only one allowed div.hid
gridJSON = JSON.parse(gridJSON[0].innerHTML);
if (gridJSON.spacer_URL && gridJSON.image_URL) {
this.model.set({
parentModel: this.model, // pass as reference
spacerURL: gridJSON.spacer_URL,
imageURL: gridJSON.image_URL,
spacers: gridJSON.spacers,
images: gridJSON.images,<|fim▁hole|> // shuffle image order:
imagesShuffled: this.funcs.shuffleArray(gridJSON.images),
});
this.setupGrid();
}
this.model.on({
'change:currentSlide': this.modelChange
}, this);
app.mindbodyModel.on({
'change:popoverVisible': this.killSlides
}, this);
}
}
},
setupGrid: function () {
"use strict";
var that = this,
spacers = this.model.get("spacers"),
randomInt,
imageCollection = new ImageCollection(),
i;
for (i = 0; i < this.model.get("images").length; i += 1) {
randomInt = that.funcs.getRandomInt(0, spacers.length);
imageCollection.add({
// push some info to individual views:
parentModel: this.model,
order: i,
spacerURL: this.model.get("spacerURL"),
spacer: spacers[randomInt],
imageURL: this.model.get("imageURL"),
image: this.model.get("imagesShuffled")[i],
});
}
imageCollection.each(this.imageView, this);
},
imageView: function (imageModel) {
"use strict";
var imageView = new ImageView({model: imageModel});
this.$el.append(imageView.render().el);
},
modelChange: function () {
"use strict";
var currSlide = this.model.get("currentSlide"),
allImages = this.model.get("imageURL"),
imageInfo,
imageViewer,
imgWidth,
slideDiv;
if (currSlide !== false) {
if (app.mindbodyModel.get("popoverVisible") !== true) {
app.mindbodyModel.set({popoverVisible : true});
}
// retrieve cached DOM object:
imageViewer = app.mbBackGroundShader.openPopUp("imageViewer");
// set the stage:
imageViewer.html(this.template(this.model.toJSON()));
// select div.slide, the ugly way:
slideDiv = imageViewer[0].getElementsByClassName("slide")[0];
// pull the array of info about the image:
imageInfo = this.model.get("images")[currSlide];
// calculate the size of the image when it fits the slideshow:
imgWidth = this.funcs.findSlideSize(imageInfo,
slideDiv.offsetWidth,
slideDiv.offsetHeight);
slideDiv.innerHTML = '<img src="' + allImages + imageInfo.filename +
'" style="width: ' + imgWidth + 'px;" />';
}
},
killSlides: function () {
"use strict";
if (app.mindbodyModel.get("popoverVisible") === false) {
// popover is gone. No more slideshow.
this.model.set({currentSlide : false});
}
},
});<|fim▁end|> | |
<|file_name|>test_udpipe.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# vim:fileencoding=utf8
from __future__ import unicode_literals
<|fim▁hole|>import unittest
class TestUDPipe(unittest.TestCase):
def test_model(self):
import ufal.udpipe
model = ufal.udpipe.Model.load('test/data/test.model')
self.assertTrue(model)
tokenizer = model.newTokenizer(model.DEFAULT)
conlluOutput = ufal.udpipe.OutputFormat.newOutputFormat("conllu")
sentence = ufal.udpipe.Sentence()
error = ufal.udpipe.ProcessingError();
tokenizer.setText("Znamená to, že realitě nepodléhá. ");
self.assertTrue(tokenizer.nextSentence(sentence, error))
self.assertFalse(error.occurred())
self.assertTrue(model.tag(sentence, model.DEFAULT))
self.assertTrue(model.parse(sentence, model.DEFAULT))
self.assertEqual(conlluOutput.writeSentence(sentence), """\
# newdoc
# newpar
# sent_id = 1
# text = Znamená to, že realitě nepodléhá.
1 Znamená znamenat VERB VB-S---3P-AA--- Aspect=Imp|Mood=Ind|Negative=Pos|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin|Voice=Act 0 root _ _
2 to ten PRON PDNS1---------- Case=Nom|Gender=Neut|Number=Sing|PronType=Dem 1 nsubj _ SpaceAfter=No
3 , , PUNCT Z:------------- _ 6 punct _ _
4 že že SCONJ J,------------- _ 6 mark _ _
5 realitě realita NOUN NNFS3-----A---- Case=Dat|Gender=Fem|Negative=Pos|Number=Sing 6 dobj _ _
6 nepodléhá podléhat VERB VB-S---3P-NA--- Aspect=Imp|Mood=Ind|Negative=Neg|Number=Sing|Person=3|Tense=Pres|VerbForm=Fin|Voice=Act 1 ccomp _ SpaceAfter=No
7 . . PUNCT Z:------------- _ 1 punct _ _
""")
self.assertFalse(tokenizer.nextSentence(sentence))
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>periodictable.py<|end_file_name|><|fim▁begin|>#
# @BEGIN LICENSE
#
# Psi4: an open-source quantum chemistry software package
#
# Copyright (c) 2007-2017 The Psi4 Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
"""Elemental masses (most common isotope), symbols, and atomic numbers from psi4.
"""
_temp_element = ["GHOST", "HYDROGEN", "HELIUM", "LITHIUM", "BERYLLIUM",
"BORON", "CARBON", "NITROGEN", "OXYGEN", "FLUORINE",
"NEON", "SODIUM", "MAGNESIUM", "ALUMINUM", "SILICON",
"PHOSPHORUS", "SULFUR", "CHLORINE", "ARGON", "POTASSIUM",
"CALCIUM", "SCANDIUM", "TITANIUM", "VANADIUM", "CHROMIUM",
"MANGANESE", "IRON", "COBALT", "NICKEL", "COPPER",
"ZINC", "GALLIUM", "GERMANIUM", "ARSENIC", "SELENIUM",
"BROMINE", "KRYPTON", "RUBIDIUM", "STRONTIUM", "YTTRIUM",
"ZIRCONIUM", "NIOBIUM", "MOLYBDENUM", "TECHNETIUM", "RUTHENIUM",
"RHODIUM", "PALLADIUM", "SILVER", "CADMIUM", "INDIUM",
"TIN", "ANTIMONY", "TELLURIUM", "IODINE", "XENON",
"CESIUM", "BARIUM", "LANTHANUM", "CERIUM", "PRASEODYMIUM",
"NEODYMIUM", "PROMETHIUM", "SAMARIUM", "EUROPIUM", "GADOLINIUM",
"TERBIUM", "DYSPROSIUM", "HOLMIUM", "ERBIUM", "THULIUM",
"YTTERBIUM", "LUTETIUM", "HAFNIUM", "TANTALUM", "TUNGSTEN",
"RHENIUM", "OSMIUM", "IRIDIUM", "PLATINUM", "GOLD",
"MERCURY", "THALLIUM", "LEAD", "BISMUTH", "POLONIUM",
"ASTATINE", "RADON", "FRANCIUM", "RADIUM", "ACTINIUM",
"THORIUM", "PROTACTINIUM", "URANIUM", "NEPTUNIUM", "PLUTONIUM",
"AMERICIUM", "CURIUM", "BERKELIUM", "CALIFORNIUM", "EINSTEINIUM",
"FERMIUM", "MENDELEVIUM", "NOBELIUM", "LAWRENCIUM" "RUTHERFORDIUM",
"DUBNIUM", "SEABORGIUM", "BOHRIUM"]
_temp_symbol = ["X", "H", "HE", "LI", "BE", "B", "C", "N", "O", "F", "NE", "NA", "MG",
"AL", "SI", "P", "S", "CL", "AR", "K", "CA", "SC", "TI", "V", "CR", "MN", "FE", "CO",
"NI", "CU", "ZN", "GA", "GE", "AS", "SE", "BR", "KR", "RB", "SR", "Y", "ZR", "NB",
"MO", "TC", "RU", "RH", "PD", "AG", "CD", "IN", "SN", "SB", "TE", "I", "XE", "CS",
"BA", "LA", "CE", "PR", "ND", "PM", "SM", "EU", "GD", "TB", "DY", "HO", "ER", "TM",
"YB", "LU", "HF", "TA", "W", "RE", "OS", "IR", "PT", "AU", "HG", "TL", "PB", "BI",
"PO", "AT", "RN", "FR", "RA", "AC", "TH", "PA", "U", "NP", "PU", "AM", "CM", "BK",
"CF", "ES", "FM", "MD", "NO", "LR", "RF", "DB", "SG", "BH", "HS", "MT", "DS", "RG",
"UUB", "UUT", "UUQ", "UUP", "UUH", "UUS", "UUO"]
_temp_z = list(range(0, 108))
_temp_mass = [
0., 1.00782503207, 4.00260325415, 7.016004548, 9.012182201, 11.009305406,
12, 14.00307400478, 15.99491461956, 18.998403224, 19.99244017542,
22.98976928087, 23.985041699, 26.981538627, 27.97692653246, 30.973761629,
31.972070999, 34.968852682, 39.96238312251, 38.963706679, 39.962590983,
44.955911909, 47.947946281, 50.943959507, 51.940507472, 54.938045141,
55.934937475, 58.933195048, 57.935342907, 62.929597474, 63.929142222,
68.925573587, 73.921177767, 74.921596478, 79.916521271, 78.918337087,
85.910610729, 84.911789737, 87.905612124, 88.905848295, 89.904704416,
92.906378058, 97.905408169, 98.906254747, 101.904349312, 102.905504292,
105.903485715, 106.90509682, 113.90335854, 114.903878484, 119.902194676,
120.903815686, 129.906224399, 126.904472681, 131.904153457, 132.905451932,
137.905247237, 138.906353267, 139.905438706, 140.907652769, 141.907723297,
144.912749023, 151.919732425, 152.921230339, 157.924103912, 158.925346757,
163.929174751, 164.93032207, 165.930293061, 168.93421325, 173.938862089,
174.940771819, 179.946549953, 180.947995763, 183.950931188, 186.955753109,
191.96148069, 192.96292643, 194.964791134, 196.966568662, 201.970643011,
204.974427541, 207.976652071, 208.980398734, 208.982430435, 210.987496271,
222.017577738, 222.01755173, 228.031070292, 227.027752127, 232.038055325,
231.03588399, 238.050788247, 237.048173444, 242.058742611, 243.06138108,
247.07035354, 247.07030708, 251.079586788, 252.082978512, 257.095104724,
258.098431319, 255.093241131, 260.105504, 263.112547, 255.107398, 259.114500,
262.122892, 263.128558, 265.136151, 281.162061, 272.153615, 283.171792, 283.176451,
285.183698, 287.191186, 292.199786, 291.206564, 293.214670]
_temp_iso_symbol = [
"H", "H1", "H2", "D", "H3", "T", "H4", "H5", "H6", "H7", "HE", "HE3", "HE4",
"HE5", "HE6", "HE7", "HE8", "HE9", "HE10", "LI", "LI3", "LI4", "LI5", "LI6",
"LI7", "LI8", "LI9", "LI10", "LI11", "LI12", "BE", "BE5", "BE6", "BE7", "BE8",
"BE9", "BE10", "BE11", "BE12", "BE13", "BE14", "BE15", "BE16", "B", "B6", "B7",
"B8", "B9", "B10", "B11", "B12", "B13", "B14", "B15", "B16", "B17", "B18", "B19",
"C", "C8", "C9", "C10", "C11", "C12", "C13", "C14", "C15", "C16", "C17", "C18",
"C19", "C20", "C21", "C22", "N", "N10", "N11", "N12", "N13", "N14", "N15", "N16",
"N17", "N18", "N19", "N20", "N21", "N22", "N23", "N24", "N25", "O", "O12", "O13",
"O14", "O15", "O16", "O17", "O18", "O19", "O20", "O21", "O22", "O23", "O24",
"O25", "O26", "O27", "O28", "F", "F14", "F15", "F16", "F17", "F18", "F19", "F20",
"F21", "F22", "F23", "F24", "F25", "F26", "F27", "F28", "F29", "F30", "F31",
"NE", "NE16", "NE17", "NE18", "NE19", "NE20", "NE21", "NE22", "NE23", "NE24",
"NE25", "NE26", "NE27", "NE28", "NE29", "NE30", "NE31", "NE32", "NE33", "NE34",
"NA", "NA18", "NA19", "NA20", "NA21", "NA22", "NA23", "NA24", "NA25", "NA26",
"NA27", "NA28", "NA29", "NA30", "NA31", "NA32", "NA33", "NA34", "NA35", "NA36",
"NA37", "MG", "MG19", "MG20", "MG21", "MG22", "MG23", "MG24", "MG25", "MG26",
"MG27", "MG28", "MG29", "MG30", "MG31", "MG32", "MG33", "MG34", "MG35", "MG36",
"MG37", "MG38", "MG39", "MG40", "AL", "AL21", "AL22", "AL23", "AL24", "AL25",
"AL26", "AL27", "AL28", "AL29", "AL30", "AL31", "AL32", "AL33", "AL34", "AL35",
"AL36", "AL37", "AL38", "AL39", "AL40", "AL41", "AL42", "SI", "SI22", "SI23",
"SI24", "SI25", "SI26", "SI27", "SI28", "SI29", "SI30", "SI31", "SI32", "SI33",
"SI34", "SI35", "SI36", "SI37", "SI38", "SI39", "SI40", "SI41", "SI42", "SI43",
"SI44", "P", "P24", "P25", "P26", "P27", "P28", "P29", "P30", "P31", "P32",
"P33", "P34", "P35", "P36", "P37", "P38", "P39", "P40", "P41", "P42", "P43",
"P44", "P45", "P46", "S", "S26", "S27", "S28", "S29", "S30", "S31", "S32", "S33",
"S34", "S35", "S36", "S37", "S38", "S39", "S40", "S41", "S42", "S43", "S44",
"S45", "S46", "S47", "S48", "S49", "CL", "CL28", "CL29", "CL30", "CL31", "CL32",
"CL33", "CL34", "CL35", "CL36", "CL37", "CL38", "CL39", "CL40", "CL41", "CL42",
"CL43", "CL44", "CL45", "CL46", "CL47", "CL48", "CL49", "CL50", "CL51", "AR",
"AR30", "AR31", "AR32", "AR33", "AR34", "AR35", "AR36", "AR37", "AR38", "AR39",
"AR40", "AR41", "AR42", "AR43", "AR44", "AR45", "AR46", "AR47", "AR48", "AR49",
"AR50", "AR51", "AR52", "AR53", "K", "K32", "K33", "K34", "K35", "K36", "K37",
"K38", "K39", "K40", "K41", "K42", "K43", "K44", "K45", "K46", "K47", "K48",
"K49", "K50", "K51", "K52", "K53", "K54", "K55", "CA", "CA34", "CA35", "CA36",
"CA37", "CA38", "CA39", "CA40", "CA41", "CA42", "CA43", "CA44", "CA45", "CA46",
"CA47", "CA48", "CA49", "CA50", "CA51", "CA52", "CA53", "CA54", "CA55", "CA56",
"CA57", "SC", "SC36", "SC37", "SC38", "SC39", "SC40", "SC41", "SC42", "SC43",
"SC44", "SC45", "SC46", "SC47", "SC48", "SC49", "SC50", "SC51", "SC52", "SC53",
"SC54", "SC55", "SC56", "SC57", "SC58", "SC59", "SC60", "TI", "TI38", "TI39",
"TI40", "TI41", "TI42", "TI43", "TI44", "TI45", "TI46", "TI47", "TI48", "TI49",
"TI50", "TI51", "TI52", "TI53", "TI54", "TI55", "TI56", "TI57", "TI58", "TI59",
"TI60", "TI61", "TI62", "TI63", "V", "V40", "V41", "V42", "V43", "V44", "V45",
"V46", "V47", "V48", "V49", "V50", "V51", "V52", "V53", "V54", "V55", "V56",
"V57", "V58", "V59", "V60", "V61", "V62", "V63", "V64", "V65", "CR", "CR42",
"CR43", "CR44", "CR45", "CR46", "CR47", "CR48", "CR49", "CR50", "CR51", "CR52",
"CR53", "CR54", "CR55", "CR56", "CR57", "CR58", "CR59", "CR60", "CR61", "CR62",
"CR63", "CR64", "CR65", "CR66", "CR67", "MN", "MN44", "MN45", "MN46", "MN47",
"MN48", "MN49", "MN50", "MN51", "MN52", "MN53", "MN54", "MN55", "MN56", "MN57",
"MN58", "MN59", "MN60", "MN61", "MN62", "MN63", "MN64", "MN65", "MN66", "MN67",
"MN68", "MN69", "FE", "FE45", "FE46", "FE47", "FE48", "FE49", "FE50", "FE51",
"FE52", "FE53", "FE54", "FE55", "FE56", "FE57", "FE58", "FE59", "FE60", "FE61",
"FE62", "FE63", "FE64", "FE65", "FE66", "FE67", "FE68", "FE69", "FE70", "FE71",
"FE72", "CO", "CO47", "CO48", "CO49", "CO50", "CO51", "CO52", "CO53", "CO54",
"CO55", "CO56", "CO57", "CO58", "CO59", "CO60", "CO61", "CO62", "CO63", "CO64",
"CO65", "CO66", "CO67", "CO68", "CO69", "CO70", "CO71", "CO72", "CO73", "CO74",
"CO75", "NI", "NI48", "NI49", "NI50", "NI51", "NI52", "NI53", "NI54", "NI55",
"NI56", "NI57", "NI58", "NI59", "NI60", "NI61", "NI62", "NI63", "NI64", "NI65",
"NI66", "NI67", "NI68", "NI69", "NI70", "NI71", "NI72", "NI73", "NI74", "NI75",
"NI76", "NI77", "NI78", "CU", "CU52", "CU53", "CU54", "CU55", "CU56", "CU57",
"CU58", "CU59", "CU60", "CU61", "CU62", "CU63", "CU64", "CU65", "CU66", "CU67",
"CU68", "CU69", "CU70", "CU71", "CU72", "CU73", "CU74", "CU75", "CU76", "CU77",
"CU78", "CU79", "CU80", "ZN", "ZN54", "ZN55", "ZN56", "ZN57", "ZN58", "ZN59",
"ZN60", "ZN61", "ZN62", "ZN63", "ZN64", "ZN65", "ZN66", "ZN67", "ZN68", "ZN69",
"ZN70", "ZN71", "ZN72", "ZN73", "ZN74", "ZN75", "ZN76", "ZN77", "ZN78", "ZN79",
"ZN80", "ZN81", "ZN82", "ZN83", "GA", "GA56", "GA57", "GA58", "GA59", "GA60",
"GA61", "GA62", "GA63", "GA64", "GA65", "GA66", "GA67", "GA68", "GA69", "GA70",
"GA71", "GA72", "GA73", "GA74", "GA75", "GA76", "GA77", "GA78", "GA79", "GA80",
"GA81", "GA82", "GA83", "GA84", "GA85", "GA86", "GE", "GE58", "GE59", "GE60",
"GE61", "GE62", "GE63", "GE64", "GE65", "GE66", "GE67", "GE68", "GE69", "GE70",
"GE71", "GE72", "GE73", "GE74", "GE75", "GE76", "GE77", "GE78", "GE79", "GE80",
"GE81", "GE82", "GE83", "GE84", "GE85", "GE86", "GE87", "GE88", "GE89", "AS",
"AS60", "AS61", "AS62", "AS63", "AS64", "AS65", "AS66", "AS67", "AS68", "AS69",
"AS70", "AS71", "AS72", "AS73", "AS74", "AS75", "AS76", "AS77", "AS78", "AS79",
"AS80", "AS81", "AS82", "AS83", "AS84", "AS85", "AS86", "AS87", "AS88", "AS89",
"AS90", "AS91", "AS92", "SE", "SE65", "SE66", "SE67", "SE68", "SE69", "SE70",
"SE71", "SE72", "SE73", "SE74", "SE75", "SE76", "SE77", "SE78", "SE79", "SE80",
"SE81", "SE82", "SE83", "SE84", "SE85", "SE86", "SE87", "SE88", "SE89", "SE90",
"SE91", "SE92", "SE93", "SE94", "BR", "BR67", "BR68", "BR69", "BR70", "BR71",
"BR72", "BR73", "BR74", "BR75", "BR76", "BR77", "BR78", "BR79", "BR80", "BR81",
"BR82", "BR83", "BR84", "BR85", "BR86", "BR87", "BR88", "BR89", "BR90", "BR91",
"BR92", "BR93", "BR94", "BR95", "BR96", "BR97", "KR", "KR69", "KR70", "KR71",
"KR72", "KR73", "KR74", "KR75", "KR76", "KR77", "KR78", "KR79", "KR80", "KR81",
"KR82", "KR83", "KR84", "KR85", "KR86", "KR87", "KR88", "KR89", "KR90", "KR91",
"KR92", "KR93", "KR94", "KR95", "KR96", "KR97", "KR98", "KR99", "KR100", "RB",
"RB71", "RB72", "RB73", "RB74", "RB75", "RB76", "RB77", "RB78", "RB79", "RB80",
"RB81", "RB82", "RB83", "RB84", "RB85", "RB86", "RB87", "RB88", "RB89", "RB90",
"RB91", "RB92", "RB93", "RB94", "RB95", "RB96", "RB97", "RB98", "RB99",
"RB100", "RB101", "RB102", "SR", "SR73", "SR74", "SR75", "SR76", "SR77",
"SR78", "SR79", "SR80", "SR81", "SR82", "SR83", "SR84", "SR85", "SR86", "SR87",
"SR88", "SR89", "SR90", "SR91", "SR92", "SR93", "SR94", "SR95", "SR96", "SR97",
"SR98", "SR99", "SR100", "SR101", "SR102", "SR103", "SR104", "SR105", "Y",
"Y76", "Y77", "Y78", "Y79", "Y80", "Y81", "Y82", "Y83", "Y84", "Y85", "Y86",
"Y87", "Y88", "Y89", "Y90", "Y91", "Y92", "Y93", "Y94", "Y95", "Y96", "Y97",
"Y98", "Y99", "Y100", "Y101", "Y102", "Y103", "Y104", "Y105", "Y106", "Y107",
"Y108", "ZR", "ZR78", "ZR79", "ZR80", "ZR81", "ZR82", "ZR83", "ZR84", "ZR85",
"ZR86", "ZR87", "ZR88", "ZR89", "ZR90", "ZR91", "ZR92", "ZR93", "ZR94", "ZR95",
"ZR96", "ZR97", "ZR98", "ZR99", "ZR100", "ZR101", "ZR102", "ZR103", "ZR104",
"ZR105", "ZR106", "ZR107", "ZR108", "ZR109", "ZR110", "NB", "NB81", "NB82",
"NB83", "NB84", "NB85", "NB86", "NB87", "NB88", "NB89", "NB90", "NB91", "NB92",
"NB93", "NB94", "NB95", "NB96", "NB97", "NB98", "NB99", "NB100", "NB101",
"NB102", "NB103", "NB104", "NB105", "NB106", "NB107", "NB108", "NB109",
"NB110", "NB111", "NB112", "NB113", "MO", "MO83", "MO84", "MO85", "MO86",
"MO87", "MO88", "MO89", "MO90", "MO91", "MO92", "MO93", "MO94", "MO95", "MO96",
"MO97", "MO98", "MO99", "MO100", "MO101", "MO102", "MO103", "MO104", "MO105",
"MO106", "MO107", "MO108", "MO109", "MO110", "MO111", "MO112", "MO113",
"MO114", "MO115", "TC", "TC85", "TC86", "TC87", "TC88", "TC89", "TC90", "TC91",
"TC92", "TC93", "TC94", "TC95", "TC96", "TC97", "TC98", "TC99", "TC100",
"TC101", "TC102", "TC103", "TC104", "TC105", "TC106", "TC107", "TC108",
"TC109", "TC110", "TC111", "TC112", "TC113", "TC114", "TC115", "TC116",
"TC117", "TC118", "RU", "RU87", "RU88", "RU89", "RU90", "RU91", "RU92", "RU93",
"RU94", "RU95", "RU96", "RU97", "RU98", "RU99", "RU100", "RU101", "RU102",
"RU103", "RU104", "RU105", "RU106", "RU107", "RU108", "RU109", "RU110",
"RU111", "RU112", "RU113", "RU114", "RU115", "RU116", "RU117", "RU118",
"RU119", "RU120", "RH", "RH89", "RH90", "RH91", "RH92", "RH93", "RH94", "RH95",
"RH96", "RH97", "RH98", "RH99", "RH100", "RH101", "RH102", "RH103", "RH104",
"RH105", "RH106", "RH107", "RH108", "RH109", "RH110", "RH111", "RH112",
"RH113", "RH114", "RH115", "RH116", "RH117", "RH118", "RH119", "RH120",
"RH121", "RH122", "PD", "PD91", "PD92", "PD93", "PD94", "PD95", "PD96", "PD97",
"PD98", "PD99", "PD100", "PD101", "PD102", "PD103", "PD104", "PD105", "PD106",
"PD107", "PD108", "PD109", "PD110", "PD111", "PD112", "PD113", "PD114",
"PD115", "PD116", "PD117", "PD118", "PD119", "PD120", "PD121", "PD122",
"PD123", "PD124", "AG", "AG93", "AG94", "AG95", "AG96", "AG97", "AG98", "AG99",
"AG100", "AG101", "AG102", "AG103", "AG104", "AG105", "AG106", "AG107",
"AG108", "AG109", "AG110", "AG111", "AG112", "AG113", "AG114", "AG115",
"AG116", "AG117", "AG118", "AG119", "AG120", "AG121", "AG122", "AG123",
"AG124", "AG125", "AG126", "AG127", "AG128", "AG129", "AG130", "CD", "CD95",
"CD96", "CD97", "CD98", "CD99", "CD100", "CD101", "CD102", "CD103", "CD104",
"CD105", "CD106", "CD107", "CD108", "CD109", "CD110", "CD111", "CD112",
"CD113", "CD114", "CD115", "CD116", "CD117", "CD118", "CD119", "CD120",
"CD121", "CD122", "CD123", "CD124", "CD125", "CD126", "CD127", "CD128",
"CD129", "CD130", "CD131", "CD132", "IN", "IN97", "IN98", "IN99", "IN100",
"IN101", "IN102", "IN103", "IN104", "IN105", "IN106", "IN107", "IN108",
"IN109", "IN110", "IN111", "IN112", "IN113", "IN114", "IN115", "IN116",
"IN117", "IN118", "IN119", "IN120", "IN121", "IN122", "IN123", "IN124",
"IN125", "IN126", "IN127", "IN128", "IN129", "IN130", "IN131", "IN132",
"IN133", "IN134", "IN135", "SN", "SN99", "SN100", "SN101", "SN102", "SN103",
"SN104", "SN105", "SN106", "SN107", "SN108", "SN109", "SN110", "SN111",
"SN112", "SN113", "SN114", "SN115", "SN116", "SN117", "SN118", "SN119",
"SN120", "SN121", "SN122", "SN123", "SN124", "SN125", "SN126", "SN127",
"SN128", "SN129", "SN130", "SN131", "SN132", "SN133", "SN134", "SN135",
"SN136", "SN137", "SB", "SB103", "SB104", "SB105", "SB106", "SB107", "SB108",
"SB109", "SB110", "SB111", "SB112", "SB113", "SB114", "SB115", "SB116",
"SB117", "SB118", "SB119", "SB120", "SB121", "SB122", "SB123", "SB124",
"SB125", "SB126", "SB127", "SB128", "SB129", "SB130", "SB131", "SB132",
"SB133", "SB134", "SB135", "SB136", "SB137", "SB138", "SB139", "TE", "TE105",
"TE106", "TE107", "TE108", "TE109", "TE110", "TE111", "TE112", "TE113",
"TE114", "TE115", "TE116", "TE117", "TE118", "TE119", "TE120", "TE121",
"TE122", "TE123", "TE124", "TE125", "TE126", "TE127", "TE128", "TE129",
"TE130", "TE131", "TE132", "TE133", "TE134", "TE135", "TE136", "TE137",
"TE138", "TE139", "TE140", "TE141", "TE142", "I", "I108", "I109", "I110",
"I111", "I112", "I113", "I114", "I115", "I116", "I117", "I118", "I119", "I120",
"I121", "I122", "I123", "I124", "I125", "I126", "I127", "I128", "I129", "I130",
"I131", "I132", "I133", "I134", "I135", "I136", "I137", "I138", "I139", "I140",
"I141", "I142", "I143", "I144", "XE", "XE110", "XE111", "XE112", "XE113",
"XE114", "XE115", "XE116", "XE117", "XE118", "XE119", "XE120", "XE121",
"XE122", "XE123", "XE124", "XE125", "XE126", "XE127", "XE128", "XE129",
"XE130", "XE131", "XE132", "XE133", "XE134", "XE135", "XE136", "XE137",
"XE138", "XE139", "XE140", "XE141", "XE142", "XE143", "XE144", "XE145",
"XE146", "XE147", "CS", "CS112", "CS113", "CS114", "CS115", "CS116", "CS117",
"CS118", "CS119", "CS120", "CS121", "CS122", "CS123", "CS124", "CS125",
"CS126", "CS127", "CS128", "CS129", "CS130", "CS131", "CS132", "CS133",
"CS134", "CS135", "CS136", "CS137", "CS138", "CS139", "CS140", "CS141",
"CS142", "CS143", "CS144", "CS145", "CS146", "CS147", "CS148", "CS149",
"CS150", "CS151", "BA", "BA114", "BA115", "BA116", "BA117", "BA118", "BA119",
"BA120", "BA121", "BA122", "BA123", "BA124", "BA125", "BA126", "BA127",
"BA128", "BA129", "BA130", "BA131", "BA132", "BA133", "BA134", "BA135",
"BA136", "BA137", "BA138", "BA139", "BA140", "BA141", "BA142", "BA143",
"BA144", "BA145", "BA146", "BA147", "BA148", "BA149", "BA150", "BA151",
"BA152", "BA153", "LA", "LA117", "LA118", "LA119", "LA120", "LA121", "LA122",
"LA123", "LA124", "LA125", "LA126", "LA127", "LA128", "LA129", "LA130",
"LA131", "LA132", "LA133", "LA134", "LA135", "LA136", "LA137", "LA138",
"LA139", "LA140", "LA141", "LA142", "LA143", "LA144", "LA145", "LA146",
"LA147", "LA148", "LA149", "LA150", "LA151", "LA152", "LA153", "LA154",
"LA155", "CE", "CE119", "CE120", "CE121", "CE122", "CE123", "CE124", "CE125",
"CE126", "CE127", "CE128", "CE129", "CE130", "CE131", "CE132", "CE133",
"CE134", "CE135", "CE136", "CE137", "CE138", "CE139", "CE140", "CE141",
"CE142", "CE143", "CE144", "CE145", "CE146", "CE147", "CE148", "CE149",
"CE150", "CE151", "CE152", "CE153", "CE154", "CE155", "CE156", "CE157", "PR",
"PR121", "PR122", "PR123", "PR124", "PR125", "PR126", "PR127", "PR128",
"PR129", "PR130", "PR131", "PR132", "PR133", "PR134", "PR135", "PR136",
"PR137", "PR138", "PR139", "PR140", "PR141", "PR142", "PR143", "PR144",
"PR145", "PR146", "PR147", "PR148", "PR149", "PR150", "PR151", "PR152",
"PR153", "PR154", "PR155", "PR156", "PR157", "PR158", "PR159", "ND", "ND124",
"ND125", "ND126", "ND127", "ND128", "ND129", "ND130", "ND131", "ND132",
"ND133", "ND134", "ND135", "ND136", "ND137", "ND138", "ND139", "ND140",
"ND141", "ND142", "ND143", "ND144", "ND145", "ND146", "ND147", "ND148",
"ND149", "ND150", "ND151", "ND152", "ND153", "ND154", "ND155", "ND156",
"ND157", "ND158", "ND159", "ND160", "ND161", "PM", "PM126", "PM127", "PM128",
"PM129", "PM130", "PM131", "PM132", "PM133", "PM134", "PM135", "PM136",
"PM137", "PM138", "PM139", "PM140", "PM141", "PM142", "PM143", "PM144",
"PM145", "PM146", "PM147", "PM148", "PM149", "PM150", "PM151", "PM152",
"PM153", "PM154", "PM155", "PM156", "PM157", "PM158", "PM159", "PM160",
"PM161", "PM162", "PM163", "SM", "SM128", "SM129", "SM130", "SM131", "SM132",
"SM133", "SM134", "SM135", "SM136", "SM137", "SM138", "SM139", "SM140",
"SM141", "SM142", "SM143", "SM144", "SM145", "SM146", "SM147", "SM148",
"SM149", "SM150", "SM151", "SM152", "SM153", "SM154", "SM155", "SM156",
"SM157", "SM158", "SM159", "SM160", "SM161", "SM162", "SM163", "SM164",
"SM165", "EU", "EU130", "EU131", "EU132", "EU133", "EU134", "EU135", "EU136",
"EU137", "EU138", "EU139", "EU140", "EU141", "EU142", "EU143", "EU144",
"EU145", "EU146", "EU147", "EU148", "EU149", "EU150", "EU151", "EU152",
"EU153", "EU154", "EU155", "EU156", "EU157", "EU158", "EU159", "EU160",
"EU161", "EU162", "EU163", "EU164", "EU165", "EU166", "EU167", "GD", "GD134",
"GD135", "GD136", "GD137", "GD138", "GD139", "GD140", "GD141", "GD142",
"GD143", "GD144", "GD145", "GD146", "GD147", "GD148", "GD149", "GD150",
"GD151", "GD152", "GD153", "GD154", "GD155", "GD156", "GD157", "GD158",
"GD159", "GD160", "GD161", "GD162", "GD163", "GD164", "GD165", "GD166",
"GD167", "GD168", "GD169", "TB", "TB136", "TB137", "TB138", "TB139", "TB140",
"TB141", "TB142", "TB143", "TB144", "TB145", "TB146", "TB147", "TB148",
"TB149", "TB150", "TB151", "TB152", "TB153", "TB154", "TB155", "TB156",
"TB157", "TB158", "TB159", "TB160", "TB161", "TB162", "TB163", "TB164",
"TB165", "TB166", "TB167", "TB168", "TB169", "TB170", "TB171", "DY", "DY138",
"DY139", "DY140", "DY141", "DY142", "DY143", "DY144", "DY145", "DY146",
"DY147", "DY148", "DY149", "DY150", "DY151", "DY152", "DY153", "DY154",
"DY155", "DY156", "DY157", "DY158", "DY159", "DY160", "DY161", "DY162",
"DY163", "DY164", "DY165", "DY166", "DY167", "DY168", "DY169", "DY170",
"DY171", "DY172", "DY173", "HO", "HO140", "HO141", "HO142", "HO143", "HO144",
"HO145", "HO146", "HO147", "HO148", "HO149", "HO150", "HO151", "HO152",
"HO153", "HO154", "HO155", "HO156", "HO157", "HO158", "HO159", "HO160",
"HO161", "HO162", "HO163", "HO164", "HO165", "HO166", "HO167", "HO168",
"HO169", "HO170", "HO171", "HO172", "HO173", "HO174", "HO175", "ER", "ER143",
"ER144", "ER145", "ER146", "ER147", "ER148", "ER149", "ER150", "ER151",
"ER152", "ER153", "ER154", "ER155", "ER156", "ER157", "ER158", "ER159",
"ER160", "ER161", "ER162", "ER163", "ER164", "ER165", "ER166", "ER167",
"ER168", "ER169", "ER170", "ER171", "ER172", "ER173", "ER174", "ER175",
"ER176", "ER177", "TM", "TM145", "TM146", "TM147", "TM148", "TM149", "TM150",
"TM151", "TM152", "TM153", "TM154", "TM155", "TM156", "TM157", "TM158",
"TM159", "TM160", "TM161", "TM162", "TM163", "TM164", "TM165", "TM166",
"TM167", "TM168", "TM169", "TM170", "TM171", "TM172", "TM173", "TM174",
"TM175", "TM176", "TM177", "TM178", "TM179", "YB", "YB148", "YB149", "YB150",
"YB151", "YB152", "YB153", "YB154", "YB155", "YB156", "YB157", "YB158",
"YB159", "YB160", "YB161", "YB162", "YB163", "YB164", "YB165", "YB166",
"YB167", "YB168", "YB169", "YB170", "YB171", "YB172", "YB173", "YB174",
"YB175", "YB176", "YB177", "YB178", "YB179", "YB180", "YB181", "LU", "LU150",
"LU151", "LU152", "LU153", "LU154", "LU155", "LU156", "LU157", "LU158",
"LU159", "LU160", "LU161", "LU162", "LU163", "LU164", "LU165", "LU166",
"LU167", "LU168", "LU169", "LU170", "LU171", "LU172", "LU173", "LU174",
"LU175", "LU176", "LU177", "LU178", "LU179", "LU180", "LU181", "LU182",
"LU183", "LU184", "HF", "HF153", "HF154", "HF155", "HF156", "HF157", "HF158",
"HF159", "HF160", "HF161", "HF162", "HF163", "HF164", "HF165", "HF166",
"HF167", "HF168", "HF169", "HF170", "HF171", "HF172", "HF173", "HF174",
"HF175", "HF176", "HF177", "HF178", "HF179", "HF180", "HF181", "HF182",
"HF183", "HF184", "HF185", "HF186", "HF187", "HF188", "TA", "TA155", "TA156",
"TA157", "TA158", "TA159", "TA160", "TA161", "TA162", "TA163", "TA164",
"TA165", "TA166", "TA167", "TA168", "TA169", "TA170", "TA171", "TA172",
"TA173", "TA174", "TA175", "TA176", "TA177", "TA178", "TA179", "TA180",
"TA181", "TA182", "TA183", "TA184", "TA185", "TA186", "TA187", "TA188",
"TA189", "TA190", "W", "W158", "W159", "W160", "W161", "W162", "W163", "W164",
"W165", "W166", "W167", "W168", "W169", "W170", "W171", "W172", "W173", "W174",
"W175", "W176", "W177", "W178", "W179", "W180", "W181", "W182", "W183", "W184",
"W185", "W186", "W187", "W188", "W189", "W190", "W191", "W192", "RE", "RE160",
"RE161", "RE162", "RE163", "RE164", "RE165", "RE166", "RE167", "RE168",
"RE169", "RE170", "RE171", "RE172", "RE173", "RE174", "RE175", "RE176",
"RE177", "RE178", "RE179", "RE180", "RE181", "RE182", "RE183", "RE184",
"RE185", "RE186", "RE187", "RE188", "RE189", "RE190", "RE191", "RE192",
"RE193", "RE194", "OS", "OS162", "OS163", "OS164", "OS165", "OS166", "OS167",
"OS168", "OS169", "OS170", "OS171", "OS172", "OS173", "OS174", "OS175",
"OS176", "OS177", "OS178", "OS179", "OS180", "OS181", "OS182", "OS183",
"OS184", "OS185", "OS186", "OS187", "OS188", "OS189", "OS190", "OS191",
"OS192", "OS193", "OS194", "OS195", "OS196", "IR", "IR164", "IR165", "IR166",
"IR167", "IR168", "IR169", "IR170", "IR171", "IR172", "IR173", "IR174",
"IR175", "IR176", "IR177", "IR178", "IR179", "IR180", "IR181", "IR182",
"IR183", "IR184", "IR185", "IR186", "IR187", "IR188", "IR189", "IR190",
"IR191", "IR192", "IR193", "IR194", "IR195", "IR196", "IR197", "IR198",
"IR199", "PT", "PT166", "PT167", "PT168", "PT169", "PT170", "PT171", "PT172",
"PT173", "PT174", "PT175", "PT176", "PT177", "PT178", "PT179", "PT180",
"PT181", "PT182", "PT183", "PT184", "PT185", "PT186", "PT187", "PT188",
"PT189", "PT190", "PT191", "PT192", "PT193", "PT194", "PT195", "PT196",
"PT197", "PT198", "PT199", "PT200", "PT201", "PT202", "AU", "AU169", "AU170",
"AU171", "AU172", "AU173", "AU174", "AU175", "AU176", "AU177", "AU178",
"AU179", "AU180", "AU181", "AU182", "AU183", "AU184", "AU185", "AU186",
"AU187", "AU188", "AU189", "AU190", "AU191", "AU192", "AU193", "AU194",
"AU195", "AU196", "AU197", "AU198", "AU199", "AU200", "AU201", "AU202",
"AU203", "AU204", "AU205", "HG", "HG171", "HG172", "HG173", "HG174", "HG175",
"HG176", "HG177", "HG178", "HG179", "HG180", "HG181", "HG182", "HG183",
"HG184", "HG185", "HG186", "HG187", "HG188", "HG189", "HG190", "HG191",
"HG192", "HG193", "HG194", "HG195", "HG196", "HG197", "HG198", "HG199",
"HG200", "HG201", "HG202", "HG203", "HG204", "HG205", "HG206", "HG207",
"HG208", "HG209", "HG210", "TL", "TL176", "TL177", "TL178", "TL179", "TL180",
"TL181", "TL182", "TL183", "TL184", "TL185", "TL186", "TL187", "TL188",
"TL189", "TL190", "TL191", "TL192", "TL193", "TL194", "TL195", "TL196",
"TL197", "TL198", "TL199", "TL200", "TL201", "TL202", "TL203", "TL204",
"TL205", "TL206", "TL207", "TL208", "TL209", "TL210", "TL211", "TL212", "PB",
"PB178", "PB179", "PB180", "PB181", "PB182", "PB183", "PB184", "PB185",
"PB186", "PB187", "PB188", "PB189", "PB190", "PB191", "PB192", "PB193",
"PB194", "PB195", "PB196", "PB197", "PB198", "PB199", "PB200", "PB201",
"PB202", "PB203", "PB204", "PB205", "PB206", "PB207", "PB208", "PB209",
"PB210", "PB211", "PB212", "PB213", "PB214", "PB215", "BI", "BI184", "BI185",
"BI186", "BI187", "BI188", "BI189", "BI190", "BI191", "BI192", "BI193",
"BI194", "BI195", "BI196", "BI197", "BI198", "BI199", "BI200", "BI201",
"BI202", "BI203", "BI204", "BI205", "BI206", "BI207", "BI208", "BI209",
"BI210", "BI211", "BI212", "BI213", "BI214", "BI215", "BI216", "BI217",
"BI218", "PO", "PO188", "PO189", "PO190", "PO191", "PO192", "PO193", "PO194",
"PO195", "PO196", "PO197", "PO198", "PO199", "PO200", "PO201", "PO202",
"PO203", "PO204", "PO205", "PO206", "PO207", "PO208", "PO209", "PO210",
"PO211", "PO212", "PO213", "PO214", "PO215", "PO216", "PO217", "PO218",
"PO219", "PO220", "AT", "AT193", "AT194", "AT195", "AT196", "AT197", "AT198",
"AT199", "AT200", "AT201", "AT202", "AT203", "AT204", "AT205", "AT206",
"AT207", "AT208", "AT209", "AT210", "AT211", "AT212", "AT213", "AT214",
"AT215", "AT216", "AT217", "AT218", "AT219", "AT220", "AT221", "AT222",
"AT223", "RN", "RN195", "RN196", "RN197", "RN198", "RN199", "RN200", "RN201",
"RN202", "RN203", "RN204", "RN205", "RN206", "RN207", "RN208", "RN209",
"RN210", "RN211", "RN212", "RN213", "RN214", "RN215", "RN216", "RN217",
"RN218", "RN219", "RN220", "RN221", "RN222", "RN223", "RN224", "RN225",
"RN226", "RN227", "RN228", "FR", "FR199", "FR200", "FR201", "FR202", "FR203",
"FR204", "FR205", "FR206", "FR207", "FR208", "FR209", "FR210", "FR211",
"FR212", "FR213", "FR214", "FR215", "FR216", "FR217", "FR218", "FR219",
"FR220", "FR221", "FR222", "FR223", "FR224", "FR225", "FR226", "FR227",
"FR228", "FR229", "FR230", "FR231", "FR232", "RA", "RA202", "RA203", "RA204",
"RA205", "RA206", "RA207", "RA208", "RA209", "RA210", "RA211", "RA212",
"RA213", "RA214", "RA215", "RA216", "RA217", "RA218", "RA219", "RA220",
"RA221", "RA222", "RA223", "RA224", "RA225", "RA226", "RA227", "RA228",
"RA229", "RA230", "RA231", "RA232", "RA233", "RA234", "AC", "AC206", "AC207",
"AC208", "AC209", "AC210", "AC211", "AC212", "AC213", "AC214", "AC215",
"AC216", "AC217", "AC218", "AC219", "AC220", "AC221", "AC222", "AC223",
"AC224", "AC225", "AC226", "AC227", "AC228", "AC229", "AC230", "AC231",
"AC232", "AC233", "AC234", "AC235", "AC236", "TH", "TH209", "TH210", "TH211",
"TH212", "TH213", "TH214", "TH215", "TH216", "TH217", "TH218", "TH219",
"TH220", "TH221", "TH222", "TH223", "TH224", "TH225", "TH226", "TH227",
"TH228", "TH229", "TH230", "TH231", "TH232", "TH233", "TH234", "TH235",
"TH236", "TH237", "TH238", "PA", "PA212", "PA213", "PA214", "PA215", "PA216",
"PA217", "PA218", "PA219", "PA220", "PA221", "PA222", "PA223", "PA224",
"PA225", "PA226", "PA227", "PA228", "PA229", "PA230", "PA231", "PA232",
"PA233", "PA234", "PA235", "PA236", "PA237", "PA238", "PA239", "PA240", "U",
"U217", "U218", "U219", "U220", "U221", "U222", "U223", "U224", "U225", "U226",
"U227", "U228", "U229", "U230", "U231", "U232", "U233", "U234", "U235", "U236",
"U237", "U238", "U239", "U240", "U241", "U242", "NP", "NP225", "NP226",
"NP227", "NP228", "NP229", "NP230", "NP231", "NP232", "NP233", "NP234",
"NP235", "NP236", "NP237", "NP238", "NP239", "NP240", "NP241", "NP242",
"NP243", "NP244", "PU", "PU228", "PU229", "PU230", "PU231", "PU232", "PU233",
"PU234", "PU235", "PU236", "PU237", "PU238", "PU239", "PU240", "PU241",
"PU242", "PU243", "PU244", "PU245", "PU246", "PU247", "AM", "AM231", "AM232",
"AM233", "AM234", "AM235", "AM236", "AM237", "AM238", "AM239", "AM240",
"AM241", "AM242", "AM243", "AM244", "AM245", "AM246", "AM247", "AM248",
"AM249", "CM", "CM233", "CM234", "CM235", "CM236", "CM237", "CM238", "CM239",
"CM240", "CM241", "CM242", "CM243", "CM244", "CM245", "CM246", "CM247",
"CM248", "CM249", "CM250", "CM251", "CM252", "BK", "BK235", "BK236", "BK237",
"BK238", "BK239", "BK240", "BK241", "BK242", "BK243", "BK244", "BK245",
"BK246", "BK247", "BK248", "BK249", "BK250", "BK251", "BK252", "BK253",
"BK254", "CF", "CF237", "CF238", "CF239", "CF240", "CF241", "CF242", "CF243",
"CF244", "CF245", "CF246", "CF247", "CF248", "CF249", "CF250", "CF251",
"CF252", "CF253", "CF254", "CF255", "CF256", "ES", "ES240", "ES241", "ES242",
"ES243", "ES244", "ES245", "ES246", "ES247", "ES248", "ES249", "ES250",
"ES251", "ES252", "ES253", "ES254", "ES255", "ES256", "ES257", "ES258", "FM",
"FM242", "FM243", "FM244", "FM245", "FM246", "FM247", "FM248", "FM249",
"FM250", "FM251", "FM252", "FM253", "FM254", "FM255", "FM256", "FM257",
"FM258", "FM259", "FM260", "MD", "MD245", "MD246", "MD247", "MD248", "MD249",
"MD250", "MD251", "MD252", "MD253", "MD254", "MD255", "MD256", "MD257",
"MD258", "MD259", "MD260", "MD261", "MD262", "NO", "NO248", "NO249", "NO250",
"NO251", "NO252", "NO253", "NO254", "NO255", "NO256", "NO257", "NO258",
"NO259", "NO260", "NO261", "NO262", "NO263", "NO264", "LR", "LR251", "LR252",
"LR253", "LR254", "LR255", "LR256", "LR257", "LR258", "LR259", "LR260",
"LR261", "LR262", "LR263", "LR264", "LR265", "LR266", "RF", "RF253", "RF254",
"RF255", "RF256", "RF257", "RF258", "RF259", "RF260", "RF261", "RF262",
"RF263", "RF264", "RF265", "RF266", "RF267", "RF268", "DB", "DB255", "DB256",
"DB257", "DB258", "DB259", "DB260", "DB261", "DB262", "DB263", "DB264",
"DB265", "DB266", "DB267", "DB268", "DB269", "DB270", "SG", "SG258", "SG259",
"SG260", "SG261", "SG262", "SG263", "SG264", "SG265", "SG266", "SG267",
"SG268", "SG269", "SG270", "SG271", "SG272", "SG273", "BH", "BH260", "BH261",
"BH262", "BH263", "BH264", "BH265", "BH266", "BH267", "BH268", "BH269",
"BH270", "BH271", "BH272", "BH273", "BH274", "BH275", "HS", "HS263", "HS264",
"HS265", "HS266", "HS267", "HS268", "HS269", "HS270", "HS271", "HS272",
"HS273", "HS274", "HS275", "HS276", "HS277", "MT", "MT265", "MT266", "MT267",
"MT268", "MT269", "MT270", "MT271", "MT272", "MT273", "MT274", "MT275",
"MT276", "MT277", "MT278", "MT279", "DS", "DS267", "DS268", "DS269", "DS270",
"DS271", "DS272", "DS273", "DS274", "DS275", "DS276", "DS277", "DS278",
"DS279", "DS280", "DS281", "RG", "RG272", "RG273", "RG274", "RG275", "RG276",
"RG277", "RG278", "RG279", "RG280", "RG281", "RG282", "RG283", "UUB",
"UUB277", "UUB278", "UUB279", "UUB280", "UUB281", "UUB282", "UUB283",
"UUB284", "UUB285", "UUT", "UUT283", "UUT284", "UUT285", "UUT286", "UUT287",
"UUQ", "UUQ285", "UUQ286", "UUQ287", "UUQ288", "UUQ289", "UUP", "UUP287",
"UUP288", "UUP289", "UUP290", "UUP291", "UUH", "UUH289", "UUH290", "UUH291",
"UUH292", "UUS", "UUS291", "UUS292", "UUO", "UUO293"]
_temp_iso_mass = [
1.00782503207, 1.00782503207, 2.01410177785, 2.01410177785, 3.01604927767,
3.01604927767, 4.027806424, 5.035311488, 6.044942594, 7.052749,
4.00260325415, 3.01602931914, 4.00260325415, 5.012223624, 6.018889124,
7.028020618, 8.033921897, 9.043950286, 10.052398837, 7.016004548, 3.030775,
4.027185558, 5.0125378, 6.015122794, 7.016004548, 8.022487362, 9.026789505,
10.035481259, 11.043797715, 12.053780, 9.012182201, 5.040790, 6.019726317,
7.016929828, 8.005305103, 9.012182201, 10.013533818, 11.021657749,
12.026920737, 13.035693007, 14.04289292, 15.053460, 16.061920, 11.009305406,
6.046810, 7.029917901, 8.024607233, 9.013328782, 10.012936992, 11.009305406,
12.014352104, 13.017780217, 14.025404009, 15.031103021, 16.039808829,
17.046989906, 18.056170, 19.063730, 12, 8.037675025, 9.031036689,
10.016853228, 11.011433613, 12, 13.00335483778, 14.0032419887, 15.010599256,
16.014701252, 17.022586116, 18.026759354, 19.034805018, 20.040319754,
21.049340, 22.057200, 14.00307400478, 10.041653674, 11.026090956,
12.018613197, 13.005738609, 14.00307400478, 15.00010889823, 16.006101658,
17.008450261, 18.014078959, 19.017028697, 20.023365807, 21.02710824,
22.034394934, 23.041220, 24.051040, 25.060660, 15.99491461956,
12.034404895, 13.024812213, 14.00859625, 15.003065617, 15.99491461956,
16.999131703, 17.999161001, 19.00358013, 20.004076742, 21.008655886,
22.009966947, 23.015687659, 24.020472917, 25.029460, 26.038340, 27.048260,
28.057810, 18.998403224, 14.035060, 15.018009103, 16.011465724,
17.002095237, 18.000937956, 18.998403224, 19.999981315, 20.999948951,
22.002998815, 23.003574631, 24.008115485, 25.012101747, 26.019615555,
27.026760086, 28.035670, 29.043260, 30.052500, 31.060429, 19.99244017542,
16.025761262, 17.017671504, 18.005708213, 19.001880248, 19.99244017542,
20.993846684, 21.991385113, 22.994466904, 23.993610779, 24.997736888,
26.000461206, 27.007589903, 28.012071575, 29.019385933, 30.024801045,
31.033110, 32.040020, 33.049380, 34.057028, 22.98976928087, 18.025969,
19.013877499, 20.007351328, 20.997655206, 21.994436425, 22.98976928087,
23.990962782, 24.989953968, 25.992633, 26.994076788, 27.998938, 29.002861,
30.008976, 31.013585452, 32.02046656, 33.026719756, 34.035170, 35.042493,
36.051480, 37.059340, 23.985041699, 19.03547, 20.018862545, 21.01171291,
21.999573843, 22.994123669, 23.985041699, 24.985836917, 25.982592929,
26.984340585, 27.983876825, 28.9886, 29.990434, 30.996546, 31.998975,
33.005254, 34.009456424, 35.017340, 36.023000, 37.031400, 38.037570,
39.046772, 40.053930, 26.981538627, 21.028040, 22.019520, 23.007267432,
23.999938865, 24.990428095, 25.986891692, 26.981538627, 27.981910306,
28.980445046, 29.982960256, 30.983946619, 31.988124489, 32.990843336,
33.996851837, 34.999860235, 36.006207204, 37.01067782, 38.017231021,
39.02297, 40.031450, 41.038330, 42.046890, 27.97692653246, 22.034530,
23.025520, 24.011545616, 25.004105574, 25.992329921, 26.986704905,
27.97692653246, 28.9764947, 29.973770171, 30.975363226999998,
31.974148082, 32.97800022, 33.978575524, 34.984583575, 35.986599477,
36.99293608, 37.995633601, 39.002070013, 40.005869121, 41.01456,
42.019790, 43.028660, 44.035260, 30.973761629, 24.034350, 25.020260,
26.011780, 26.999230236, 27.992314761, 28.981800606, 29.978313789,
30.973761629, 31.973907274, 32.971725543, 33.973636257, 34.973314117,
35.97825968, 36.979608946, 37.984156827, 38.986179475, 39.991296951,
40.994335435, 42.001007913, 43.00619, 44.012990, 45.019220, 46.027380,
31.972070999, 26.027880, 27.018833, 28.004372763, 28.996608049,
29.984903249, 30.979554728, 31.972070999, 32.971458759, 33.967866902,
34.969032161, 35.96708076, 36.971125567, 37.971163317, 38.975134306,
39.975451728, 40.979582149, 41.981022419, 42.98715479, 43.99021339,
44.996508112, 46.000750, 47.008590, 48.014170, 49.023619, 34.968852682,
28.028510, 29.014110, 30.004770, 30.992413086, 31.985689901, 32.977451887,
33.973762819, 34.968852682, 35.968306981, 36.965902591, 37.968010425,
38.968008164, 39.970415472, 40.970684525, 41.973254804, 42.974054403,
43.978281071, 44.980286886, 45.98421004, 46.988710, 47.994950, 49.000320,
50.007840, 51.014490, 39.96238312251, 30.021560, 31.012123, 31.997637984,
32.989925709, 33.980271244, 34.975257585, 35.967545105, 36.96677632,
37.962732394, 38.964313231, 39.96238312251, 40.964500611, 41.963045736,
42.965636056, 43.964924033, 44.968039956, 45.968094129, 46.972186792,
47.974540, 48.980520, 49.984430, 50.991630, 51.996780, 53.004940,
38.963706679, 32.021920, 33.007260, 33.998410, 34.988009692, 35.981292235,
36.973375889, 37.969081184, 38.963706679, 39.963998475, 40.961825762,
41.96240281, 42.96071554, 43.961556804, 44.960699493, 45.961976864,
46.961678473, 47.965513535, 48.967450928, 49.972783355, 50.976380,
51.982610, 52.987120, 53.994200, 54.999710, 39.962590983, 34.014120,
35.004940, 35.993087063, 36.985870269, 37.976318452, 38.970719725,
39.962590983, 40.962278062, 41.958618014, 42.958766628, 43.955481754,
44.956186566, 45.953692587, 46.954546006, 47.952534177, 48.955674148,
49.957518962, 50.961499214, 51.9651, 52.970050, 53.974350, 54.980550,
55.985570, 56.992356, 44.955911909, 36.014920, 37.003050, 37.994700,
38.984790002, 39.977967407, 40.969251125, 41.965516429, 42.961150658,
43.959402752, 44.955911909, 45.95517189, 46.952407508, 47.952231468,
48.950023975, 49.952187685, 50.953603368, 51.956675468, 52.959610,
53.963264561, 54.968243949, 55.972870, 56.977790, 57.983710, 58.989220,
59.995710, 47.947946281, 38.009770, 39.001610, 39.990498838, 40.983145,
41.973030902, 42.968522499, 43.959690069, 44.958125616, 45.952631555,
46.951763088, 47.947946281, 48.947869982, 49.944791194, 50.946614955,
51.946897311, 52.949727171, 53.951052401, 54.955265056, 55.958199639,
56.963989137, 57.966970, 58.972930, 59.976760, 60.983200, 61.987490,
62.994420, 50.943959507, 40.011090, 40.999780, 41.991230, 42.980650,
43.97411, 44.965775808, 45.960200481, 46.95490894, 47.952253707,
48.948516101, 49.947158485, 50.943959507, 51.944775479, 52.944337979,
53.946439854, 54.947233701, 55.950530966, 56.952561432, 57.956834136,
58.960207407, 59.965026862, 60.968480, 61.973780, 62.977550, 63.983470,
64.987920, 51.940507472, 42.006430, 42.997710, 43.985549, 44.97964,
45.968358635, 46.962900046, 47.954031716, 48.951335721, 49.946044205,
50.944767431, 51.940507472, 52.940649386, 53.938880395, 54.940839672,
55.940653139, 56.943613013, 57.944353129, 58.948586367, 59.950076033,
60.954717204, 61.95661319, 62.961860, 63.964410, 64.970160, 65.973380,
66.979550, 54.938045141, 44.006870, 44.994510, 45.986720, 46.976100,
47.96852, 48.959618005, 49.95423823, 50.948210787, 51.945565464,
52.941290117, 53.940358854, 54.938045141, 55.93890491, 56.938285378,
57.939981549, 58.940440237, 59.942911246, 60.944652638, 61.94842822,
62.95023999, 63.95424909, 64.956336065, 65.961080, 66.964140, 67.969300,
68.972840, 55.934937475, 45.014578, 46.000810, 46.992890, 47.980504,
48.973610, 49.962988982, 50.956819538, 51.948113875, 52.945307942,
53.939610501, 54.938293357, 55.934937475, 56.935393969, 57.933275558,
58.934875464, 59.934071683, 60.936745281, 61.936767442, 62.940369091,
63.941201265, 64.94538027, 65.946780638, 66.950947244, 67.9537, 68.958780,
69.961460, 70.966720, 71.969620, 58.933195048, 47.011490, 48.001760,
48.989720, 49.981540, 50.970720, 51.963590, 52.954218896, 53.948459635,
54.941999029, 55.939839278, 56.936291373, 57.935752814, 58.933195048,
59.933817059, 60.932475763, 61.934050563, 62.933611611, 63.935809908,
64.93647846, 65.939762004, 66.940889529, 67.944873058, 68.94632, 69.951,
70.9529, 71.957810, 72.960240, 73.965380, 74.968330, 57.935342907,
48.019750, 49.009660, 49.995930, 50.987720, 51.975680, 52.968470,
53.957905495, 54.951330251, 55.942132022, 56.939793526, 57.935342907,
58.934346705, 59.930786372, 60.931056033, 61.928345115, 62.929669374,
63.927965959, 64.930084304, 65.929139334, 66.931569414, 67.931868789,
68.935610269, 69.9365, 70.940736283, 71.942092682, 72.946470, 73.948070,
74.952870, 75.955330, 76.960550, 77.963180, 62.929597474, 51.997180,
52.985550, 53.976710, 54.966050, 55.958560, 56.949211078, 57.944538499,
58.939498028, 59.93736503, 60.933457821, 61.932583745, 62.929597474,
63.929764183, 64.927789485, 65.928868813, 66.927730314, 67.929610889,
68.929429269, 69.932392343, 70.932676833, 71.935820307, 72.936675282,
73.939874862, 74.9419, 75.945275026, 76.947850, 77.951960, 78.954560,
79.960870, 63.929142222, 53.992950, 54.983980, 55.972380, 56.964788,
57.954591555, 58.949263764, 59.941827035, 60.939510635, 61.934329764,
62.933211566, 63.929142222, 64.929240984, 65.926033419, 66.927127345,
67.924844154, 68.926550281, 69.925319274, 70.927721599, 71.926857951,
72.929779104, 73.929458609, 74.932936741, 75.93329357, 76.936958967,
77.938440216, 78.942652, 79.944342348, 80.950480, 81.954420, 82.961030,
68.925573587, 55.994910, 56.982930, 57.974250, 58.963370, 59.957060,
60.949446287, 61.944175238, 62.939294196, 63.936838747, 64.932734754,
65.93158901, 66.928201703, 67.927980084, 68.925573587, 69.926021972,
70.924701349, 71.926366268, 72.925174682, 73.926945762, 74.926500246,
75.928827626, 76.9291543, 77.93160818, 78.93289326, 79.936515781,
80.937752355, 81.942990, 82.946980, 83.952650, 84.957000, 85.963120,
73.921177767, 57.991010, 58.981750, 59.970190, 60.963790, 61.954650,
62.949640, 63.941653, 64.939436406, 65.933843453, 66.93273407,
67.92809424, 68.927964533, 69.924247381, 70.924950954, 71.922075815,
72.923458945, 73.921177767, 74.922858948, 75.921402557, 76.923548591,
77.922852739, 78.925400995, 79.925372392, 80.928820467, 81.929549725,
82.934620, 83.937470, 84.943030, 85.946490, 86.952510, 87.956910,
88.963830, 74.921596478, 59.993130, 60.980620, 61.973200, 62.963690,
63.957572, 64.949564, 65.94471, 66.939186071, 67.936769069, 68.932273675,
69.930924826, 70.927112428, 71.926752283, 72.923824844, 73.923928692,
74.921596478, 75.922394021, 76.920647286, 77.921827281, 78.920947934,
79.922533816, 80.922132287, 81.924504067, 82.924980024, 83.929058,
84.932020, 85.936500, 86.939900, 87.944940, 88.949390, 89.955500,
90.960430, 91.966800, 79.916521271, 64.964660, 65.955210, 66.950090,
67.941798, 68.939557817, 69.933390644, 70.932241822, 71.927112352,
72.926765345, 73.922476436, 74.922523368, 75.919213597, 76.919914038,
77.91730909, 78.918499098, 79.916521271, 80.917992474, 81.916699401,
82.919118473, 83.918462354, 84.922245053, 85.924271579, 86.928521358,
87.931423998, 88.936450, 89.939960, 90.945960, 91.949920, 92.956290,
93.960490, 78.918337087, 66.964790, 67.958516, 68.950106, 69.944792,
70.93874, 71.936644572, 72.931691524, 73.929891034, 74.925776207,
75.924541469, 76.921379082, 77.921145706, 78.918337087, 79.918529296,
80.916290563, 81.916804119, 82.915180421, 83.916478974, 84.915608403,
85.918797577, 86.920711324, 87.924065926, 88.926385334, 89.930627737,
90.933968095, 91.939258714, 92.943050, 93.948680, 94.952870, 95.958530,
96.962800, 85.910610729, 68.965180, 69.955259, 70.949625738, 71.942092038,
72.939289195, 73.933084369, 74.930945746, 75.925910078, 76.92467,
77.920364783, 78.920082431, 79.916378965, 80.916592015, 81.9134836,
82.914136099, 83.911506687, 84.912527331, 85.910610729, 86.913354862,
87.914446969, 88.917630581, 89.919516555, 90.923445215, 91.92615621,
92.931274357, 93.934360, 94.939840, 95.943070, 96.948560, 97.951910,
98.957600, 99.961140, 84.911789737, 70.965320, 71.959080, 72.950561,
73.944264751, 74.93857, 75.935072226, 76.930408, 77.928141, 78.92398946,
79.92251925, 80.918995913, 81.918208598, 82.915109701, 83.914384821,
84.911789737, 85.911167419, 86.909180526, 87.911315588, 88.912278016,
89.914801694, 90.916536958, 91.9197289, 92.922041876, 93.926404946,
94.929302889, 95.934272637, 96.937351916, 97.941790668, 98.945379283,
99.949870, 100.953196445, 101.958870, 87.905612124, 72.965970,
73.956310, 74.949949568, 75.941766782, 76.937944782, 77.93218,
78.929708, 79.924521013, 80.923211846, 81.918401639, 82.917556701,
83.913425275, 84.912932803, 85.909260204, 86.908877124, 87.905612124,
88.907450675, 89.907737888, 90.910203095, 91.911037858, 92.914025634,
93.915361312, 94.919358766, 95.921696802, 96.926152923, 97.928452934,
98.933240926, 99.935351911, 100.940517888, 101.943018987, 102.948950,
103.952330, 104.958580, 88.905848295, 75.958450, 76.949645, 77.943610,
78.937351634, 79.93428, 80.929127468, 81.926792451, 82.922354243,
83.920388264, 84.916433039, 85.914885576, 86.91087573, 87.909501146,
88.905848295, 89.907151886, 90.907304791, 91.908949143, 92.909582713,
93.911595245, 94.912820621, 95.915891343, 96.918133995, 97.92220302,
98.924636204, 99.927756586, 100.93031385, 101.933555695, 102.936730,
103.941050, 104.944870, 105.949790, 106.954140, 107.959480,
89.904704416, 77.955230, 78.949160, 79.9404, 80.937210026, 81.931087,
82.928653801, 83.923250, 84.921471182, 85.916473591, 86.914816252,
87.910226904, 88.9088895, 89.904704416, 90.905645767, 91.905040847,
92.906476006, 93.906315192, 94.9080426, 95.908273386, 96.910953109,
97.912734892, 98.916512106, 99.917761889, 100.921140415, 101.922981285,
102.926599606, 103.928780, 104.933050, 105.935910, 106.940750,
107.943960, 108.949240, 109.952870, 92.906378058, 80.949030,
81.943130, 82.936705382, 83.933570, 84.927912447, 85.925038326,
86.920361108, 87.918332163, 88.913418245, 89.911264845,
90.906996243, 91.907193888, 92.906378058, 93.907283888, 94.906835792,
95.908100647, 96.908098556, 97.910328412, 98.911618375, 99.914181619,
100.915252025, 101.918037614, 102.919143842, 103.922464701,
104.923936545, 105.927970, 106.930310, 107.934840, 108.937630,
109.942440, 110.945650, 111.950830, 112.954700, 97.905408169, 82.948740,
83.940090, 84.936550, 85.930695904, 86.927326502, 87.921953241,
88.919480009, 89.913936896, 90.911750194, 91.906810991, 92.90681261,
93.905088269, 94.905842129, 95.904679477, 96.906021465, 97.905408169,
98.90771187, 99.907477336, 100.910347001, 101.91029736, 102.913207142,
103.913763625, 104.91697461, 105.918136802, 106.921692604, 107.923453,
108.927810, 109.929730, 110.934410, 111.936840, 112.941880, 113.944920,
114.950290, 98.906254747, 84.948830, 85.942880, 86.936530, 87.932678,
88.927167, 89.923556564, 90.918427639, 91.915260166, 92.910248984,
93.909657002, 94.907657084, 95.907871383, 96.906365358, 97.907215966,
98.906254747, 99.90765778, 100.907314659, 101.909215019, 102.909181351,
103.911447454, 104.911660566, 105.914357927, 106.915079572, 107.918461226,
108.919982665, 109.923820483, 110.92569283, 111.929146493, 112.931590,
113.935880, 114.938690, 115.943370, 116.946480, 117.951480, 101.904349312,
86.949180, 87.940260, 88.936110, 89.929890, 90.926292, 91.920120,
92.917052034, 93.911359711, 94.910412929, 95.907597835, 96.9075547,
97.905287132, 98.905939302, 99.904219476, 100.905582087, 101.904349312,
102.906323847, 103.905432701, 104.907752866, 105.907329433,
106.909905089, 107.910173465, 108.913203233, 109.914136041, 110.917696,
111.918965, 112.922487194, 113.924281, 114.928686173, 115.930810,
116.935580, 117.937820, 118.942840, 119.945310, 102.905504292,
88.948837, 89.942870, 90.936550, 91.931980, 92.925740, 93.921698,
94.91589874, 95.914460631, 96.911336797, 97.910708158, 98.908132104,
99.90812155, 100.906163625, 101.906843196, 102.905504292, 103.906655518,
104.905693821, 105.907287135, 106.906748423, 107.908728018, 108.908737289,
109.911136411, 110.911585913, 111.914394159, 112.915530627, 113.918806,
114.920334, 115.924062, 116.925980, 117.930070, 118.932110, 119.936410,
120.938720, 121.943210, 105.903485715, 90.949110, 91.940420, 92.935910,
93.928770, 94.924690, 95.918164359, 96.916479073, 97.912720902,
98.911767833, 99.908505886, 100.908289242, 101.905608544, 102.906087307,
103.904035834, 104.90508492, 105.903485715, 106.905133481, 107.903891701,
108.905950451, 109.905153254, 110.907670734, 111.907314058, 112.910152908,
113.910362638, 114.913683824, 115.914158662, 116.917841338, 117.9189843,
118.923110, 119.924691878, 120.928870, 121.930550, 122.934930, 123.936880,
106.90509682, 92.949780, 93.942780, 94.935480, 95.930680, 96.923972412,
97.921566201, 98.917597178, 99.916104255, 100.912802233, 101.911685,
102.90897272, 103.908629157, 104.906528661, 105.906668921, 106.90509682,
107.905955556, 108.904752292, 109.906107231, 110.905291157, 111.907004814,
112.906566579, 113.908803704, 114.908762698, 115.911359933, 116.911684562,
117.914582768, 118.915665059, 119.918787384, 120.919848046, 121.923530,
122.924900, 123.928640, 124.930430, 125.934500, 126.936770, 127.941170,
128.943690, 129.950448, 113.90335854, 94.949870, 95.939770, 96.934940,
97.927395546, 98.925010, 99.920289525, 100.918681538, 101.914462258,
102.913419246, 103.909849475, 104.909467905, 105.90645941, 106.906617928,
107.904183683, 108.904982293, 109.90300207, 110.904178107, 111.902757809,
112.904401662, 113.90335854, 114.905430969, 115.904755809, 116.907218618,
117.90691453, 118.909921597, 119.909850129, 120.912977363, 121.913332432,
122.917002999, 123.917647616, 124.92124637, 125.922353321, 126.926443864,
127.927762285, 128.932150, 129.933901937, 130.940670, 131.945550,
114.903878484, 96.949540, 97.942140, 98.934220, 99.931110851,
100.926340, 101.924090238, 102.919914188, 103.918296171, 104.91467354,
105.913465411, 106.9102951, 107.90969818, 108.907150507, 109.907165274,
110.905103278, 111.905532331, 112.904057761, 113.904913876,
114.903878484, 115.905259703, 116.904513564, 117.906354367, 118.90584535,
119.907959608, 120.907845822, 121.91027601, 122.910438276, 123.913175231,
124.913600588, 125.916463857, 126.917353091, 127.920172328, 128.92169698,
129.924970049, 130.926851767, 131.93299026, 132.937810, 133.944150,
134.949330, 119.902194676, 98.949330, 99.939044343, 100.936060,
101.930295324, 102.928100, 103.923143223, 104.921349437, 105.91688062,
106.915644329, 107.911925378, 108.911283214, 109.907842791, 110.90773446,
111.904818207, 112.905170577, 113.902778869, 114.903342397, 115.90174053,
116.902951656, 117.901603167, 118.90330763, 119.902194676, 120.90423548,
121.903439046, 122.905720838, 123.905273946, 124.907784125, 125.90765328,
126.910360024, 127.910536624, 128.913479, 129.913967295, 130.916999769,
131.917815713, 132.923829249, 133.928291765, 134.934730, 135.939340,
136.945990, 120.903815686, 102.939690, 103.936472, 104.931486348,
105.928791, 106.924150, 107.922160, 108.918132426, 109.916753, 110.913163,
111.912398009, 112.909371672, 113.909269, 114.906598, 115.906793629,
116.904835941, 117.905528731, 118.903942009, 119.905072427, 120.903815686,
121.905173651, 122.90421397, 123.905935743, 124.905253818, 125.90724748,
126.906923609, 127.909169001, 128.909148442, 129.911656324, 130.911982275,
131.914466896, 132.91525163, 133.920379744, 134.925165771, 135.930350,
136.935310, 137.940790, 138.945980, 129.906224399, 104.943640,
105.937504237, 106.935006, 107.929444597, 108.927415515, 109.922407316,
110.921110692, 111.917013672, 112.915891, 113.912089, 114.911902,
115.90846, 116.908644719, 117.905827581, 118.906403645, 119.904020222,
120.904936424, 121.903043898, 122.904270029, 123.902817896, 124.904430731,
125.903311696, 126.905226336, 127.904463056, 128.906598238, 129.906224399,
130.908523864, 131.90855316, 132.910955306, 133.911368737, 134.916448592,
135.920101246, 136.925322954, 137.929220, 138.934730, 139.938850,
140.944650, 141.949080, 126.904472681, 107.943475, 108.938149417,
109.935242, 110.930276, 111.927970, 112.923640583, 113.921850, 114.918048,
115.916808633, 116.91365, 117.913074, 118.910074, 119.910048173,
120.907366811, 121.907589284, 122.905588965, 123.906209852, 124.904630164,
125.905624153, 126.904472681, 127.905809443, 128.904987722, 129.906674247,
130.906124609, 131.907997381, 132.907796939, 133.909744465, 134.910048121,
135.914653993, 136.91787084, 137.922349591, 138.926099478, 139.931000,
140.935030, 141.940180, 142.944560, 143.949990, 131.904153457, 109.944278068,
110.941602, 111.935623112, 112.933341174, 113.927980306, 114.92629392,
115.921581087, 116.920358735, 117.916178655, 118.915410688, 119.911784244,
120.911461829, 121.908367632, 122.90848191, 123.905893003, 124.906395464,
125.904273634, 126.905183723, 127.903531275, 128.904779435, 129.903508007,
130.905082362, 131.904153457, 132.905910722, 133.905394464, 134.907227495,
135.907218794, 136.911562125, 137.913954475, 138.918792936, 139.921640943,
140.926648049, 141.92970959, 142.935110, 143.938510, 144.944070, 145.947750,
146.953560, 132.905451932, 111.950301, 112.944493274, 113.941450, 114.935910,
115.933367, 116.928670701, 117.926559494, 118.922377304, 119.920677253,
120.917229209, 121.916113434, 122.912996036, 123.912257798, 124.90972827,
125.909451977, 126.907417525, 127.907748866, 128.906064426, 129.906708552,
130.905463926, 131.90643426, 132.905451932, 133.906718475, 134.905977008,
135.907311576, 136.907089473, 137.911016704, 138.913363999, 139.917282354,
140.920045752, 141.924298927, 142.92735175, 143.932076914, 144.93552617,
145.940289423, 146.944155008, 147.949218153, 148.952930, 149.958170,
150.962190, 137.905247237, 113.950675405, 114.947370, 115.941380,
116.938499, 117.933040, 118.930659661, 119.926044974, 120.924054499,
121.919904, 122.918781036, 123.915093603, 124.914472912, 125.911250177,
126.911093797, 127.908317698, 128.908679439, 129.906320811, 130.906941118,
131.905061288, 132.90600749, 133.904508383, 134.905688591, 135.904575945,
136.905827384, 137.905247237, 138.908841341, 139.910604505, 140.914411009,
141.91645341, 142.920626719, 143.922952853, 144.927627032, 145.930219572,
146.934945, 147.937720047, 148.942580, 149.945680, 150.950810, 151.954270,
152.959610, 138.906353267, 116.950068, 117.946730, 118.940990, 119.938070,
120.933010, 121.930710, 122.926240, 123.924574275, 124.920816034,
125.919512667, 126.916375448, 127.915585177, 128.912692815, 129.912368724,
130.91007, 131.910101145, 132.908218, 133.908514011, 134.906976844,
135.907635536, 136.906493598, 137.90711193, 138.906353267, 139.909477645,
140.910962152, 141.91407913, 142.91606272, 143.919599647, 144.921645401,
145.92579346, 146.928235284, 147.932228868, 148.934734, 149.938770,
150.941720, 151.946250, 152.949620, 153.954500, 154.958350, 139.905438706,
118.952760, 119.946640, 120.943420, 121.937910, 122.935400, 123.930410,
124.928440, 125.923971, 126.922731, 127.918911, 128.918102, 129.914736,
130.914422, 131.911460487, 132.91151502, 133.908924821, 134.909151396,
135.907172422, 136.907805577, 137.905991321, 138.906652651, 139.905438706,
140.90827627, 141.909244205, 142.91238591, 143.913647336, 144.917233135,
145.918759009, 146.922673954, 147.92443241, 148.928399883, 149.930408931,
150.933976196, 151.936540, 152.940580, 153.943420, 154.948040, 155.951260,
156.956340, 140.907652769, 120.955364, 121.951810, 122.945960, 123.942960,
124.937830, 125.935310, 126.930830, 127.928791, 128.925095, 129.92359,
130.920259, 131.919255, 132.916330532, 133.915711737, 134.913111745,
135.912691611, 136.910705455, 137.910754636, 138.908938399, 139.909075874,
140.907652769, 141.910044806, 142.910816926, 143.913305245, 144.9145117,
145.917644336, 146.918995992, 147.922135026, 148.923717651, 149.926672997,
150.928318618, 151.931499225, 152.933838905, 153.937518153, 154.940120,
155.944270, 156.947430, 157.951980, 158.955500, 141.907723297, 123.952230,
124.948880, 125.943220, 126.940500, 127.935390, 128.933188, 129.928506,
130.927247, 131.923321237, 132.922348, 133.918790181, 134.91818116,
135.914976035, 136.914567137, 137.911949961, 138.911978288, 139.909552,
140.909609854, 141.907723297, 142.90981429, 143.910087274, 144.912573636,
145.913116939, 146.916100441, 147.916893288, 148.920148842, 149.920890888,
150.923828929, 151.924682219, 152.927698232, 153.929477307, 154.932932,
155.935018114, 156.939030, 157.941600, 158.946090, 159.949090, 160.953880,
144.912749023, 125.957520, 126.951630, 127.948420, 128.943160, 129.940450,
130.935870, 131.933750, 132.929782, 133.928353, 134.924876, 135.923565829,
136.920479493, 137.919548281, 138.916804082, 139.916041789, 140.913555054,
141.912874471, 142.910932616, 143.912590843, 144.912749023, 145.914696305,
146.915138545, 147.917474618, 148.918334155, 149.920983561, 150.921206973,
151.923496795, 152.924116889, 153.926463943, 154.928101267, 155.931056736,
156.933039369, 157.936561407, 158.938970, 159.942990, 160.945860,
161.950290, 162.953680, 151.919732425, 127.958080, 128.954640, 129.948920,
130.946110, 131.940690, 132.938670, 133.933970, 134.93252, 135.928275527,
136.926971746, 137.923243961, 138.922296605, 139.918994687, 140.918476488,
141.915197641, 142.914628338, 143.911999478, 144.913410353, 145.9130409,
146.914897923, 147.914822674, 148.917184735, 149.917275539, 150.919932409,
151.919732425, 152.922097356, 153.922209273, 154.924640161, 155.925527887,
156.928358717, 157.929991317, 158.933211271, 159.935140, 160.938830,
161.941220, 162.945360, 163.948280, 164.952980, 152.921230339, 129.963569,
130.957753, 131.954370, 132.949240, 133.946510, 134.941820, 135.939600,
136.935570, 137.933709, 138.92979228, 139.928087607, 140.92493072,
141.923434945, 142.920297509, 143.918816823, 144.916265237, 145.917205817,
146.916746111, 147.918085895, 148.917931238, 149.919701819, 150.919850161,
151.921744534, 152.921230339, 153.922979237, 154.92289326, 155.924752249,
156.925423647, 157.927845302, 158.929088861, 159.931971, 160.933680,
161.937040, 162.939210, 163.942990, 164.945720, 165.949970, 166.953210,
157.924103912, 133.955370, 134.952570, 135.947340, 136.945020, 137.940120,
138.938240, 139.933674, 140.932126, 141.928116, 142.92674951, 143.922963,
144.921709252, 145.918310608, 146.91909442, 147.918114524, 148.919340915,
149.918658876, 150.920348482, 151.919790996, 152.921749543, 153.920865598,
154.922622022, 155.922122743, 156.923960135, 157.924103912, 158.926388658,
159.927054146, 160.929669211, 161.930984751, 162.933990, 163.935860,
164.939380, 165.941600, 166.945570, 167.948360, 168.952870, 158.925346757,
135.961380, 136.955980, 137.953160, 138.948290, 139.945805049, 140.941448,
141.938744, 142.935121, 143.933045, 144.929274, 145.927246584, 146.924044585,
147.924271701, 148.923245909, 149.923659686, 150.923102543, 151.924074438,
152.923434588, 153.924678019, 154.923505236, 155.924747213, 156.924024604,
157.925413137, 158.925346757, 159.927167606, 160.927569919, 161.929488234,
162.930647536, 163.933350838, 164.934880, 165.937991959, 166.940050,
167.943640, 168.946220, 169.950250, 170.953300, 163.929174751, 137.962490,
138.959540, 139.954010, 140.951350, 141.946366, 142.943830, 143.939254,
144.937425, 145.932845369, 146.9310915, 147.927149831, 148.927304787,
149.925585184, 150.926184601, 151.9247183, 152.92576467, 153.924424457,
154.925753775, 155.92428311, 156.925466095, 157.924409487, 158.925739214,
159.925197517, 160.926933364, 161.926798447, 162.928731159, 163.929174751,
164.931703333, 165.932806741, 166.935655462, 167.937128769, 168.940307614,
169.942390, 170.946200, 171.948760, 172.953000, 164.93032207, 139.968539,
140.963098, 141.959770, 142.954610, 143.951480, 144.947200, 145.944640,
146.940056, 147.937718, 148.933774771, 149.933496182, 150.931688142,
151.931713714, 152.930198789, 153.930601579, 154.929103491, 155.929839,
156.928256188, 157.928941007, 158.927711959, 159.928729478, 160.927854776,
161.929095504, 162.928733903, 163.930233507, 164.93032207, 165.932284162,
166.933132633, 167.935515708, 168.936872273, 169.939618929, 170.94146515,
171.944820, 172.947290, 173.951150, 174.954050, 165.930293061, 142.966340,
143.960380, 144.957390, 145.952000, 146.949490, 147.944550, 148.942306,
149.937913839, 150.937448903, 151.935050389, 152.935063492, 153.932783081,
154.933208949, 155.931064698, 156.931916, 157.929893474, 158.930684066,
159.929083292, 160.929995309, 161.928778264, 162.930032749, 163.929200229,
164.930726003, 165.930293061, 166.932048159, 167.932370224, 168.934590364,
169.935464312, 170.938029808, 171.939356113, 172.942400, 173.944230,
174.947770, 175.950080, 176.954050, 168.93421325, 144.970073, 145.966425,
146.960961, 147.957840, 148.952720, 149.949960, 150.94548349, 151.944422,
152.942012112, 153.941567808, 154.939199459, 155.938979933, 156.936973,
157.936979525, 158.934975, 159.935262801, 160.933549, 161.933994682,
162.932651124, 163.93356, 164.932435492, 165.933554131, 166.932851622,
167.934172776, 168.93421325, 169.935801397, 170.93642944, 171.938400044,
172.939603607, 173.942168605, 174.943836853, 175.946994685, 176.949040,
177.952640, 178.955340, 173.938862089, 147.967420, 148.964040, 149.958420,
150.955400769, 151.950288919, 152.949480, 153.946393928, 154.945782332,
155.942818215, 156.942627848, 157.939865617, 158.940050099, 159.937552344,
160.937901678, 161.93576821, 162.936334305, 163.934489416, 164.935279,
165.933882042, 166.934949605, 167.933896895, 168.935189802, 169.934761837,
170.936325799, 171.936381469, 172.938210787, 173.938862089, 174.94127645,
175.942571683, 176.945260822, 177.94664668, 178.950170, 179.952330,
180.956150, 174.940771819, 149.973228, 150.967577, 151.964120,
152.958767331, 153.957522, 154.954316216, 155.953032523, 156.9500983,
157.949313283, 158.946628776, 159.946033, 160.943572, 161.943277288,
162.941179, 163.941339, 164.939406724, 165.939859, 166.93827,
167.938739111, 168.937651439, 169.938474968, 170.937913136, 171.939085669,
172.938930602, 173.94033748, 174.940771819, 175.94268631, 176.943758055,
177.945954559, 178.947327443, 179.94988116, 180.951970, 181.955040,
182.957570, 183.960910, 179.946549953, 152.970690, 153.964860, 154.963390,
155.959364025, 156.958396, 157.954799366, 158.95399487, 159.950684379,
160.950274844, 161.947210498, 162.947089, 163.944367284, 164.944567,
165.94218, 166.9426, 167.940568, 168.941259, 169.939609, 170.940492,
171.939448301, 172.940513, 173.940046178, 174.941509181, 175.941408631,
176.943220651, 177.943698766, 178.945816145, 179.946549953, 180.949101246,
181.950554096, 182.953530439, 183.955446515, 184.958820, 185.960890,
186.964590, 187.966850, 180.947995763, 154.974592, 155.972303,
156.968192445, 157.966699, 158.963018173, 159.961486056, 160.958417,
161.957291859, 162.954330271, 163.953534, 164.950772514, 165.950512,
166.948093, 167.948047, 168.946011, 169.946175, 170.944476, 171.944895,
172.94375, 173.944454, 174.943737, 175.944857, 176.944472403,
177.945778221, 178.945929535, 179.947464831, 180.947995763, 181.950151849,
182.951372616, 183.954007966, 184.955559375, 185.958552023, 186.960530,
187.963700, 188.965830, 189.969230, 183.950931188, 157.974562, 158.972918,
159.968478805, 160.967357, 161.963497417, 162.962523542, 163.958954382,
164.958279949, 165.955027253, 166.954816014, 167.951808394, 168.95177879,
169.949228482, 170.949451, 171.947292, 172.947689, 173.946079, 174.946717,
175.945634, 176.946643, 177.945876236, 178.947070447, 179.946704459,
180.948197248, 181.948204156, 182.950222951, 183.950931188, 184.953419264,
185.954364127, 186.957160466, 187.958489105, 188.961912868, 189.963181378,
190.966600, 191.968170, 186.955753109, 159.982115, 160.977589119,
161.976002, 162.972080535, 163.970323, 164.967088557, 165.965808,
166.962601, 167.961572608, 168.958791096, 169.958220071, 170.955716,
171.955422961, 172.953243, 173.953115, 174.951381, 175.951623, 176.950328,
177.950989, 178.949987641, 179.950789084, 180.950067916, 181.95121008,
182.950819841, 183.952520756, 184.952954982, 185.954986084, 186.955753109,
187.958114438, 188.959229007, 189.961817977, 190.963125242, 191.965960,
192.967470, 193.970420, 191.96148069, 161.984431, 162.982690,
163.978035649, 164.976762, 165.972690753, 166.971547969, 167.967803678,
168.96701927, 169.963577028, 170.963184819, 171.960023303, 172.959808409,
173.957062202, 174.956945835, 175.954806, 176.954965324, 177.953251241,
178.953816017, 179.952378803, 180.953244, 181.952110186, 182.953126102,
183.952489071, 184.954042265, 185.953838158, 186.955750458, 187.955838228,
188.95814747, 189.958447048, 190.960929718, 191.96148069, 192.964151563,
193.965182083, 194.968126661, 195.969639333, 192.96292643, 163.992201,
164.987520, 165.985824, 166.981665156, 167.979881, 168.976294942, 169.974965,
170.971626042, 171.970456, 172.967501739, 173.966861045, 174.964112895,
175.963648688, 176.9613015, 177.961082, 178.959122266, 179.959229446,
180.957625297, 181.958076296, 182.956846458, 183.957476, 184.956698,
185.957946104, 186.957363361, 187.958853121, 188.958718935, 189.960545968,
190.960594046, 191.962605012, 192.96292643, 193.965078378, 194.965979573,
195.968396542, 196.969653285, 197.972280, 198.973804583, 194.964791134,
165.994855, 166.992979, 167.988150742, 168.986715, 169.982495289,
170.981244542, 171.977347128, 172.976444754, 173.972818767, 174.972420552,
175.968944622, 176.968469481, 177.965648724, 178.965363404, 179.963031477,
180.963097285, 181.961170656, 182.961596703, 183.959922251, 184.960619,
185.959350813, 186.960587, 187.959395391, 188.960833686, 189.959931655,
190.961676661, 191.961038005, 192.962987401, 193.962680253, 194.964791134,
195.964951521, 196.967340182, 197.96789279, 198.970593094, 199.971440677,
200.974512868, 201.975740, 196.966568662, 168.998080, 169.996122,
170.991878881, 171.990035, 172.98623738, 173.984761, 174.981274107,
175.980099, 176.976864908, 177.97603192, 178.973212812, 179.972521124,
180.970079048, 181.969617874, 182.967593034, 183.967451524, 184.965789411,
185.965952703, 186.964567541, 187.965323661, 188.963948286, 189.964700339,
190.963704225, 191.964812953, 192.964149715, 193.96536525, 194.96503464,
195.966569813, 196.966568662, 197.968242303, 198.968765193, 199.970725647,
200.97165724, 201.973805838, 202.975154542, 203.977724, 204.979870,
201.970643011, 171.003760, 171.998832686, 172.997242, 173.992863695,
174.99142327, 175.98735458, 176.986279158, 177.982483143, 178.981833861,
179.978266394, 180.977819311, 181.974689964, 182.974449841, 183.971713051,
184.971899086, 185.96936179, 186.969814236, 187.967577049, 188.968190034,
189.966322449, 190.967157105, 191.965634327, 192.966665421, 193.965439409,
194.966720113, 195.965832649, 196.967212908, 197.966769032, 198.968279932,
199.968326004, 200.970302268, 201.970643011, 202.972872484, 203.973493933,
204.976073386, 205.977514066, 206.982588545, 207.985940, 208.991040,
209.994510, 204.974427541, 176.000590, 176.996427286, 177.994897,
178.991089082, 179.989906, 180.986257447, 181.985667104, 182.982192802,
183.981873122, 184.978791305, 185.978325, 186.975905897, 187.976009782,
188.973588428, 189.973877149, 190.971786154, 191.972225, 192.970672,
193.9712, 194.969774335, 195.970481151, 196.969574511, 197.970483495,
198.969877, 199.970962672, 200.970818891, 201.972105808, 202.97234422,
203.973863522, 204.974427541, 205.97611032, 206.977419429, 207.9820187,
208.985358952, 209.990073689, 210.993477, 211.998228, 207.976652071,
178.003830191, 179.002150, 179.997918173, 180.996623958, 181.992671842,
182.991874629, 183.988142339, 184.987609944, 185.984238945, 186.98391837,
187.980874338, 188.980807, 189.978081517, 190.978265, 191.975785171,
192.976173234, 193.97401207, 194.97454205, 195.972774109, 196.973431124,
197.972033959, 198.97291665, 199.971826675, 200.972884511, 201.972159133,<|fim▁hole|>189.988295129, 190.985786119, 191.985457954, 192.982959771, 193.98283396,
194.980650737, 195.980666509, 196.978864454, 197.979206, 198.977671961,
199.978131829, 200.977009036, 201.977742324, 202.976876001, 203.977812736,
204.977389366, 205.97849913, 206.978470679, 207.979742196, 208.980398734,
209.984120371, 210.98726946, 211.991285724, 212.994384666, 213.998711539,
215.001769776, 216.006305943, 217.009470, 218.014316, 208.982430435,
187.999422048, 188.998480562, 189.995101185, 190.994574485, 191.991335149,
192.991025275, 193.988185606, 194.988110728, 195.98553458, 196.98565963,
197.983388616, 198.983666063, 199.981798604, 200.982259764, 201.980757541,
202.981420103, 203.980318121, 204.981203322, 205.980481099, 206.981593173,
207.981245702, 208.982430435, 209.982873673, 210.986653154, 211.988867969,
212.99285728, 213.99520135, 214.999419988, 216.001915035, 217.006334796,
218.008973037, 219.013744, 220.016602, 210.987496271, 192.999843112,
193.998725085, 194.996268098, 195.995788077, 196.993189215, 197.992837202,
198.990532254, 199.990351264, 200.988416999, 201.988630236, 202.986941984,
203.987251326, 204.986074483, 205.986667036, 206.985783502, 207.986589977,
208.986173143, 209.98714771, 210.987496271, 211.990744771, 212.992936646,
213.996371733, 214.99865257, 216.002423257, 217.004718822, 218.008694336,
219.011161691, 220.015407682, 221.018050, 222.022330, 223.025190,
222.017577738, 195.005437696, 196.002115223, 197.001584351, 197.998678663,
198.998370297, 199.9956993, 200.995628335, 201.993263492, 202.993386687,
203.99142874, 204.991718799, 205.990214104, 206.990734225, 207.98964247,
208.990414742, 209.989696216, 210.990600523, 211.990703529, 212.993882668,
213.995362554, 214.998745483, 216.00027437, 217.003927675, 218.005601256,
219.009480204, 220.011393981, 221.015536782, 222.017577738, 223.021790,
224.024090, 225.028440, 226.030890, 227.035407, 228.037986, 222.01755173,
199.007258147, 200.00657249, 201.003860867, 202.003372847, 203.000924647,
204.000653204, 204.99859396, 205.998666066, 206.996949414, 207.997138783,
208.995953555, 209.996407738, 210.995536544, 211.996202244, 212.996189081,
213.998971145, 215.000341497, 216.00319799, 217.004631951, 218.007578322,
219.009252149, 220.012327405, 221.014254762, 222.01755173, 223.019735857,
224.023249951, 225.025565414, 226.029386231, 227.031835938, 228.035729,
229.038450228, 230.042510, 231.045440, 232.049772, 228.031070292,
202.009890686, 203.009271619, 204.006499668, 205.00626857, 206.00382727,
207.003798105, 208.00183994, 209.001991373, 210.000494978, 211.000897987,
211.999794499, 213.000383959, 214.000107894, 215.002719834, 216.003533035,
217.006320327, 218.00714023, 219.010085078, 220.011028384, 221.013917338,
222.01537453, 223.018502171, 224.020211821, 225.023611564, 226.025409823,
227.029177842, 228.031070292, 229.034957577, 230.037056394, 231.041220,
232.043638, 233.048060, 234.050704, 227.027752127, 206.01450498,
207.011949748, 208.011551551, 209.009494863, 210.009435986, 211.007734835,
212.007813822, 213.006607643, 214.006901798, 215.006453625, 216.008720075,
217.009346914, 218.011641453, 219.012420389, 220.014762979, 221.015591248,
222.017843851, 223.019137468, 224.021722866, 225.023229585, 226.026098089,
227.027752127, 228.031021112, 229.033015243, 230.036294178, 231.038558786,
232.042027438, 233.044550, 234.048420, 235.051232, 236.055296,
232.038055325, 209.017715682, 210.015075342, 211.014928413, 212.012980288,
213.01301014, 214.01149977, 215.01173033, 216.011062115, 217.013114328,
218.013284499, 219.015536895, 220.015747762, 221.018183674, 222.018468121,
223.020811448, 224.021466895, 225.023951021, 226.024903069, 227.02770407,
228.028741127, 229.03176243, 230.033133843, 231.036304343, 232.038055325,
233.041581843, 234.04360123, 235.047510074, 236.049870, 237.053894,
238.056496, 231.03588399, 212.023204138, 213.02110934, 214.020918417,
215.019185865, 216.019109564, 217.018323986, 218.020041889, 219.019883143,
220.021875303, 221.021877983, 222.023742, 223.023962273, 224.025625738,
225.026130678, 226.027947753, 227.028805072, 228.031051376, 229.032096793,
230.034540754, 231.03588399, 232.038591592, 233.040247277, 234.043308058,
235.045443615, 236.048681284, 237.051145659, 238.05450271, 239.057260,
240.060980, 238.050788247, 217.024368791, 218.023535671, 219.02491916,
220.024723, 221.026399, 222.026086, 223.0277386, 224.027604778,
225.029390717, 226.029338702, 227.031156367, 228.031374006, 229.033505939,
230.033939784, 231.036293704, 232.037156152, 233.039635207, 234.040952088,
235.043929918, 236.045568006, 237.048730184, 238.050788247, 239.054293299,
240.056591988, 241.060330, 242.062931, 237.048173444, 225.033913933,
226.035145, 227.034956789, 228.036180, 229.036263808, 230.037827597,
231.038245085, 232.040108, 233.040740546, 234.042895038, 235.044063267,
236.0465696, 237.048173444, 238.050946405, 239.052939025, 240.056162182,
241.058252431, 242.06164118, 243.064279, 244.067850, 242.058742611,
228.038742328, 229.040150212, 230.039649886, 231.041101107, 232.041187097,
233.042997375, 234.043317076, 235.04528605, 236.046057964, 237.048409658,
238.049559894, 239.052163381, 240.053813545, 241.056851456, 242.058742611,
243.062003092, 244.064203907, 245.067747154, 246.070204627, 247.074070,
243.06138108, 231.045560, 232.046590, 233.046348, 234.047809, 235.047946,
236.049579, 237.049996, 238.051984324, 239.053024479, 240.055300179,
241.056829144, 242.059549159, 243.06138108, 244.064284847, 245.066452114,
246.069774619, 247.072093, 248.075752, 249.078480, 247.07035354,
233.050771232, 234.050159841, 235.051434, 236.051413, 237.052901,
238.053028697, 239.054957, 240.055529539, 241.057653001, 242.058835824,
243.061389114, 244.062752578, 245.065491249, 246.067223662, 247.07035354,
248.072348508, 249.075953413, 250.078356959, 251.082284605, 252.084870,
247.07030708, 235.056580, 236.057330, 237.057003, 238.058281, 239.058279,
240.059759, 241.060230, 242.061981, 243.063007572, 244.065180774,
245.066361616, 246.068672947, 247.07030708, 248.073086, 249.074986657,
250.07831652, 251.080760172, 252.084310, 253.086880, 254.090600,
251.079586788, 237.062070, 238.061410, 239.062422, 240.062302, 241.063726,
242.063701552, 243.065427, 244.066000689, 245.068048612, 246.068805309,
247.071000589, 248.072184861, 249.074853537, 250.076406066, 251.079586788,
252.081625846, 253.085133145, 254.087322909, 255.091046, 256.093440,
252.082978512, 240.068920, 241.068538, 242.069745, 243.069548, 244.070883,
245.071324, 246.072896, 247.073656, 248.075471, 249.076411, 250.078612,
251.079992142, 252.082978512, 253.084824697, 254.088022021, 255.090273122,
256.093598, 257.095979, 258.099520, 257.095104724, 242.073430, 243.074353,
244.074084, 245.075385, 246.075299023, 247.076847, 248.077194714,
249.079034, 250.079521264, 251.081575017, 252.082466855, 253.085185236,
254.08685422, 255.089962202, 256.091773117, 257.095104724, 258.097076,
259.100595, 260.102678, 258.098431319, 245.080829, 246.081886, 247.081635,
248.082823, 249.083013, 250.084420, 251.084839, 252.086560, 253.087280,
254.089656, 255.091082705, 256.094059025, 257.095541368, 258.098431319,
259.100509, 260.103652, 261.105721, 262.108865, 255.093241131, 248.086596,
249.087833, 250.087510, 251.089012, 252.088976521, 253.090678,
254.090955253, 255.093241131, 256.094282666, 257.09687719, 258.098207,
259.101031, 260.102643, 261.105749, 262.107301, 263.110552, 264.112345,
260.105504, 251.094360, 252.095371, 253.095210, 254.096454, 255.096681,
256.098629, 257.099555, 258.101814, 259.102901, 260.105504, 261.106883,
262.109634, 263.111293, 264.114038, 265.115839, 266.119305, 263.112547,
253.100689, 254.100184, 255.101340, 256.101166194, 257.102990,
258.103489, 259.105637, 260.106440, 261.108766556, 262.109925, 263.112547,
264.113985, 265.116704, 266.117956, 267.121529, 268.123644, 255.107398,
255.107398, 256.108127, 257.107722, 258.109231, 259.109610, 260.111300,
261.112056, 262.114084, 263.114988, 264.117404, 265.118601, 266.121029,
267.122377, 268.125445, 269.127460, 270.130712, 259.114500, 258.113168,
259.114500, 260.114422071, 261.116117, 262.116398, 263.118322, 264.118931,
265.121114693, 266.122065, 267.124425, 268.125606, 269.128755, 270.130329,
271.133472, 272.135158, 273.138220, 262.122892, 260.121970, 261.121664,
262.122892, 263.123035, 264.124604, 265.125147, 266.126942, 267.127650,
268.129755, 269.130694, 270.133616, 271.135179, 272.138032, 273.139618,
274.142440, 275.144250, 263.128558, 263.128558, 264.128394885, 265.130085,
266.130097, 267.131789, 268.132162, 269.134056, 270.134650, 271.137657,
272.139052, 273.141986, 274.143131, 275.145952, 276.147208, 277.149841,
265.136151, 265.136151, 266.137299, 267.137307, 268.138728, 269.139055,
270.140657, 271.141139, 272.143738, 273.144913, 274.147492, 275.148647,
276.151156, 277.152420, 278.154812, 279.156193, 281.162061, 267.144341,
268.143795, 269.145124, 270.144720, 271.146062, 272.146317, 273.148863,
274.149492, 275.152176, 276.153034, 277.155647, 278.156469, 279.158861,
280.159795, 281.162061, 272.153615, 272.153615, 273.153682, 274.155713,
275.156142, 276.158493, 277.159519, 278.161604, 279.162468, 280.164473,
281.165372, 282.167486, 283.168415, 283.171792, 277.163943, 278.164312,
279.166546, 280.167039, 281.169286, 282.169765, 283.171792, 284.172384,
285.174105, 283.176451, 283.176451, 284.178080, 285.178732, 286.180481,
287.181045, 285.183698, 285.183698, 286.183855, 287.185599, 288.185689,
289.187279, 287.191186, 287.191186, 288.192492, 289.192715, 290.194141,
291.194384, 292.199786, 289.198862, 290.198590, 291.200011, 292.199786,
291.206564, 291.206564, 292.207549, 293.214670, 293.214670]
el2mass = dict(zip(_temp_symbol, _temp_mass))
el2mass["GH"] = 0. # note that ghost atoms in Cfour have mass 100.
eliso2mass = dict(zip(_temp_iso_symbol, _temp_iso_mass)) # encompasses el2mass
eliso2mass["GH"] = 0. # note that ghost atoms in Cfour have mass 100. # encompasses el2mass
#eliso2mass["X0"] = 0. # probably needed, just checking
el2z = dict(zip(_temp_symbol, _temp_z))
el2z["GH"] = 0
z2mass = dict(zip(_temp_z, _temp_mass))
z2el = dict(zip(_temp_z, _temp_symbol))
z2element = dict(zip(_temp_z, _temp_element))<|fim▁end|> | 202.973390521, 203.973043589, 204.974481755, 205.974465278, 206.975896887,
207.976652071, 208.98109012, 209.984188527, 210.988736964, 211.991897543,
212.996581499, 213.999805408, 215.004807, 208.980398734, 184.001124,
184.997625, 185.996597625, 186.993157835, 187.992265154, 188.989199012, |
<|file_name|>chrome_content_rules_registry.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/extensions/api/declarative_content/chrome_content_rules_registry.h"
#include "base/bind.h"
#include "chrome/browser/chrome_notification_types.h"
#include "chrome/browser/extensions/api/declarative_content/content_constants.h"
#include "chrome/browser/extensions/extension_util.h"
#include "chrome/browser/profiles/profile.h"
#include "chrome/browser/ui/browser.h"
#include "chrome/browser/ui/browser_iterator.h"
#include "chrome/browser/ui/tabs/tab_strip_model.h"
#include "content/public/browser/navigation_details.h"
#include "content/public/browser/notification_service.h"
#include "content/public/browser/notification_source.h"
#include "content/public/browser/web_contents.h"
#include "extensions/browser/api/declarative/rules_registry_service.h"
#include "extensions/browser/extension_registry.h"
#include "extensions/browser/extension_system.h"
namespace extensions {
//
// EvaluationScope
//
// Used to coalesce multiple requests for evaluation into a zero or one actual
// evaluations (depending on the EvaluationDisposition). This is required for
// correctness when multiple trackers respond to the same event. Otherwise,
// executing the request from the first tracker will be done before the tracked
// state has been updated for the other trackers.
class ChromeContentRulesRegistry::EvaluationScope {
public:
// Default disposition is PERFORM_EVALUATION.
explicit EvaluationScope(ChromeContentRulesRegistry* registry);
EvaluationScope(ChromeContentRulesRegistry* registry,
EvaluationDisposition disposition);
~EvaluationScope();
private:
ChromeContentRulesRegistry* const registry_;
const EvaluationDisposition previous_disposition_;
DISALLOW_COPY_AND_ASSIGN(EvaluationScope);
};
ChromeContentRulesRegistry::EvaluationScope::EvaluationScope(
ChromeContentRulesRegistry* registry)
: EvaluationScope(registry, DEFER_REQUESTS) {}
ChromeContentRulesRegistry::EvaluationScope::EvaluationScope(
ChromeContentRulesRegistry* registry,
EvaluationDisposition disposition)
: registry_(registry),
previous_disposition_(registry_->evaluation_disposition_) {
DCHECK_NE(EVALUATE_REQUESTS, disposition);
registry_->evaluation_disposition_ = disposition;
}
ChromeContentRulesRegistry::EvaluationScope::~EvaluationScope() {
registry_->evaluation_disposition_ = previous_disposition_;
if (registry_->evaluation_disposition_ == EVALUATE_REQUESTS) {
for (content::WebContents* tab : registry_->evaluation_pending_)
registry_->EvaluateConditionsForTab(tab);
registry_->evaluation_pending_.clear();
}
}
//
// ChromeContentRulesRegistry
//
ChromeContentRulesRegistry::ChromeContentRulesRegistry(
content::BrowserContext* browser_context,
RulesCacheDelegate* cache_delegate,
const PredicateEvaluatorsFactory& evaluators_factory)
: ContentRulesRegistry(browser_context,
declarative_content_constants::kOnPageChanged,
content::BrowserThread::UI,
cache_delegate,
RulesRegistryService::kDefaultRulesRegistryID),
evaluators_(evaluators_factory.Run(this)),
evaluation_disposition_(EVALUATE_REQUESTS) {
registrar_.Add(this,
content::NOTIFICATION_WEB_CONTENTS_DESTROYED,
content::NotificationService::AllBrowserContextsAndSources());
}
void ChromeContentRulesRegistry::Observe(
int type,
const content::NotificationSource& source,
const content::NotificationDetails& details) {
switch (type) {
case content::NOTIFICATION_WEB_CONTENTS_DESTROYED: {
content::WebContents* tab =
content::Source<content::WebContents>(source).ptr();
// Note that neither non-tab WebContents nor tabs from other browser
// contexts will be in the map.
active_rules_.erase(tab);
break;
}
}
}
void ChromeContentRulesRegistry::RequestEvaluation(
content::WebContents* contents) {
switch (evaluation_disposition_) {
case EVALUATE_REQUESTS:
EvaluateConditionsForTab(contents);
break;
case DEFER_REQUESTS:
evaluation_pending_.insert(contents);
break;
case IGNORE_REQUESTS:
break;
}
}
bool ChromeContentRulesRegistry::ShouldManageConditionsForBrowserContext(
content::BrowserContext* context) {
return ManagingRulesForBrowserContext(context);
}
void ChromeContentRulesRegistry::MonitorWebContentsForRuleEvaluation(
content::WebContents* contents) {
// We rely on active_rules_ to have a key-value pair for |contents| to know
// which WebContents we are working with.
active_rules_[contents] = std::set<const ContentRule*>();
EvaluationScope evaluation_scope(this);
for (const scoped_ptr<ContentPredicateEvaluator>& evaluator : evaluators_)
evaluator->TrackForWebContents(contents);
}
void ChromeContentRulesRegistry::DidNavigateMainFrame(
content::WebContents* contents,
const content::LoadCommittedDetails& details,
const content::FrameNavigateParams& params) {
if (ContainsKey(active_rules_, contents)) {
EvaluationScope evaluation_scope(this);
for (const scoped_ptr<ContentPredicateEvaluator>& evaluator : evaluators_)
evaluator->OnWebContentsNavigation(contents, details, params);
}
}
ChromeContentRulesRegistry::ContentRule::ContentRule(
const Extension* extension,
std::vector<scoped_ptr<const ContentCondition>> conditions,
std::vector<scoped_ptr<const ContentAction>> actions,
int priority)
: extension(extension),
conditions(std::move(conditions)),
actions(std::move(actions)),
priority(priority) {}
ChromeContentRulesRegistry::ContentRule::~ContentRule() {}
scoped_ptr<const ChromeContentRulesRegistry::ContentRule>
ChromeContentRulesRegistry::CreateRule(
const Extension* extension,
const std::map<std::string, ContentPredicateFactory*>& predicate_factories,
const api::events::Rule& api_rule,
std::string* error) {
std::vector<scoped_ptr<const ContentCondition>> conditions;
for (const linked_ptr<base::Value>& value : api_rule.conditions) {
conditions.push_back(
CreateContentCondition(extension, predicate_factories, *value, error));
if (!error->empty())
return scoped_ptr<ContentRule>();
}
std::vector<scoped_ptr<const ContentAction>> actions;
for (const linked_ptr<base::Value>& value : api_rule.actions) {
actions.push_back(ContentAction::Create(browser_context(), extension,
*value, error));
if (!error->empty())
return scoped_ptr<ContentRule>();
}
// Note: |api_rule| may contain tags, but these are ignored.
return make_scoped_ptr(new ContentRule(extension, std::move(conditions),
std::move(actions),
*api_rule.priority));
}
bool ChromeContentRulesRegistry::ManagingRulesForBrowserContext(
content::BrowserContext* context) {
// Manage both the normal context and incognito contexts associated with it.
return Profile::FromBrowserContext(context)->GetOriginalProfile() ==
Profile::FromBrowserContext(browser_context());
}
// static
bool ChromeContentRulesRegistry::EvaluateConditionForTab(
const ContentCondition* condition,
content::WebContents* tab) {
for (const scoped_ptr<const ContentPredicate>& predicate :
condition->predicates) {
if (predicate && !predicate->IsIgnored() &&
!predicate->GetEvaluator()->EvaluatePredicate(predicate.get(), tab)) {
return false;
}
}
return true;
}
std::set<const ChromeContentRulesRegistry::ContentRule*>
ChromeContentRulesRegistry::GetMatchingRules(content::WebContents* tab) const {
const bool is_incognito_tab = tab->GetBrowserContext()->IsOffTheRecord();
std::set<const ContentRule*> matching_rules;
for (const RulesMap::value_type& rule_id_rule_pair : content_rules_) {
const ContentRule* rule = rule_id_rule_pair.second.get();
if (is_incognito_tab &&
!ShouldEvaluateExtensionRulesForIncognitoRenderer(rule->extension))
continue;
for (const scoped_ptr<const ContentCondition>& condition :
rule->conditions) {
if (EvaluateConditionForTab(condition.get(), tab))
matching_rules.insert(rule);
}
}
return matching_rules;
}
std::string ChromeContentRulesRegistry::AddRulesImpl(
const std::string& extension_id,
const std::vector<linked_ptr<api::events::Rule>>& api_rules) {
EvaluationScope evaluation_scope(this);
const Extension* extension = ExtensionRegistry::Get(browser_context())
->GetInstalledExtension(extension_id);
DCHECK(extension);
std::string error;
RulesMap new_rules;
std::map<ContentPredicateEvaluator*,
std::map<const void*, std::vector<const ContentPredicate*>>>
new_predicates;
std::map<std::string, ContentPredicateFactory*> predicate_factories;
for (const scoped_ptr<ContentPredicateEvaluator>& evaluator : evaluators_) {
predicate_factories[evaluator->GetPredicateApiAttributeName()] =
evaluator.get();
}
for (const linked_ptr<api::events::Rule>& api_rule : api_rules) {
ExtensionIdRuleIdPair rule_id(extension_id, *api_rule->id);
DCHECK(content_rules_.find(rule_id) == content_rules_.end());
scoped_ptr<const ContentRule> rule(
CreateRule(extension, predicate_factories, *api_rule, &error));
if (!error.empty()) {
// Notify evaluators that none of the created predicates will be tracked
// after all.
for (const scoped_ptr<ContentPredicateEvaluator>& evaluator :
evaluators_) {
if (!new_predicates[evaluator.get()].empty()) {
evaluator->TrackPredicates(
std::map<const void*, std::vector<const ContentPredicate*>>());
}
}
return error;
}
DCHECK(rule);
// Group predicates by evaluator and rule, so we can later notify the<|fim▁hole|> condition->predicates) {
if (predicate.get()) {
new_predicates[predicate->GetEvaluator()][rule.get()].push_back(
predicate.get());
}
}
}
new_rules[rule_id] = make_linked_ptr(rule.release());
}
// Notify the evaluators about their new predicates.
for (const scoped_ptr<ContentPredicateEvaluator>& evaluator : evaluators_)
evaluator->TrackPredicates(new_predicates[evaluator.get()]);
// Wohoo, everything worked fine.
content_rules_.insert(new_rules.begin(), new_rules.end());
// Request evaluation for all WebContents, under the assumption that a
// non-empty condition has been added.
for (const auto& web_contents_rules_pair : active_rules_)
RequestEvaluation(web_contents_rules_pair.first);
return std::string();
}
std::string ChromeContentRulesRegistry::RemoveRulesImpl(
const std::string& extension_id,
const std::vector<std::string>& rule_identifiers) {
// Ignore evaluation requests in this function because it reverts actions on
// any active rules itself. Otherwise, we run the risk of reverting the same
// rule multiple times.
EvaluationScope evaluation_scope(this, IGNORE_REQUESTS);
std::vector<RulesMap::iterator> rules_to_erase;
std::vector<const void*> predicate_groups_to_stop_tracking;
for (const std::string& id : rule_identifiers) {
// Skip unknown rules.
RulesMap::iterator content_rules_entry =
content_rules_.find(std::make_pair(extension_id, id));
if (content_rules_entry == content_rules_.end())
continue;
const ContentRule* rule = content_rules_entry->second.get();
// Remove the ContentRule from active_rules_.
for (auto& tab_rules_pair : active_rules_) {
if (ContainsKey(tab_rules_pair.second, rule)) {
ContentAction::ApplyInfo apply_info =
{rule->extension, browser_context(), tab_rules_pair.first,
rule->priority};
for (const auto& action : rule->actions)
action->Revert(apply_info);
tab_rules_pair.second.erase(rule);
}
}
rules_to_erase.push_back(content_rules_entry);
predicate_groups_to_stop_tracking.push_back(rule);
}
// Notify the evaluators to stop tracking the predicates that will be removed.
for (const scoped_ptr<ContentPredicateEvaluator>& evaluator : evaluators_)
evaluator->StopTrackingPredicates(predicate_groups_to_stop_tracking);
// Remove the rules.
for (RulesMap::iterator it : rules_to_erase)
content_rules_.erase(it);
return std::string();
}
std::string ChromeContentRulesRegistry::RemoveAllRulesImpl(
const std::string& extension_id) {
// Search all identifiers of rules that belong to extension |extension_id|.
std::vector<std::string> rule_identifiers;
for (const RulesMap::value_type& id_rule_pair : content_rules_) {
const ExtensionIdRuleIdPair& extension_id_rule_id_pair = id_rule_pair.first;
if (extension_id_rule_id_pair.first == extension_id)
rule_identifiers.push_back(extension_id_rule_id_pair.second);
}
return RemoveRulesImpl(extension_id, rule_identifiers);
}
void ChromeContentRulesRegistry::EvaluateConditionsForTab(
content::WebContents* tab) {
std::set<const ContentRule*> matching_rules = GetMatchingRules(tab);
if (matching_rules.empty() && !ContainsKey(active_rules_, tab))
return;
std::set<const ContentRule*>& prev_matching_rules = active_rules_[tab];
for (const ContentRule* rule : matching_rules) {
ContentAction::ApplyInfo apply_info =
{rule->extension, browser_context(), tab, rule->priority};
if (!ContainsKey(prev_matching_rules, rule)) {
for (const scoped_ptr<const ContentAction>& action : rule->actions)
action->Apply(apply_info);
} else {
for (const scoped_ptr<const ContentAction>& action : rule->actions)
action->Reapply(apply_info);
}
}
for (const ContentRule* rule : prev_matching_rules) {
if (!ContainsKey(matching_rules, rule)) {
ContentAction::ApplyInfo apply_info =
{rule->extension, browser_context(), tab, rule->priority};
for (const scoped_ptr<const ContentAction>& action : rule->actions)
action->Revert(apply_info);
}
}
if (matching_rules.empty())
active_rules_[tab].clear();
else
swap(matching_rules, prev_matching_rules);
}
bool
ChromeContentRulesRegistry::ShouldEvaluateExtensionRulesForIncognitoRenderer(
const Extension* extension) const {
if (!util::IsIncognitoEnabled(extension->id(), browser_context()))
return false;
// Split-mode incognito extensions register their rules with separate
// RulesRegistries per Original/OffTheRecord browser contexts, whereas
// spanning-mode extensions share the Original browser context.
if (util::CanCrossIncognito(extension, browser_context())) {
// The extension uses spanning mode incognito. No rules should have been
// registered for the extension in the OffTheRecord registry so
// execution for that registry should never reach this point.
CHECK(!browser_context()->IsOffTheRecord());
} else {
// The extension uses split mode incognito. Both the Original and
// OffTheRecord registries may have (separate) rules for this extension.
// Since we're looking at an incognito renderer, so only the OffTheRecord
// registry should process its rules.
if (!browser_context()->IsOffTheRecord())
return false;
}
return true;
}
size_t ChromeContentRulesRegistry::GetActiveRulesCountForTesting() {
size_t count = 0;
for (const auto& web_contents_rules_pair : active_rules_)
count += web_contents_rules_pair.second.size();
return count;
}
ChromeContentRulesRegistry::~ChromeContentRulesRegistry() {
}
} // namespace extensions<|fim▁end|> | // evaluators that they have new predicates to manage.
for (const scoped_ptr<const ContentCondition>& condition :
rule->conditions) {
for (const scoped_ptr<const ContentPredicate>& predicate : |
<|file_name|>when-module-option-changes.js<|end_file_name|><|fim▁begin|>//// [/lib/initial-buildOutput.txt]
/lib/tsc --b /src/core --verbose
12:01:00 AM - Projects in this build:
* src/core/tsconfig.json
12:01:00 AM - Project 'src/core/tsconfig.json' is out of date because output file 'src/core/anotherModule.js' does not exist
12:01:00 AM - Building project '/src/core/tsconfig.json'...
exitCode:: ExitStatus.Success
//// [/src/core/anotherModule.js]
"use strict";
exports.__esModule = true;
exports.World = "hello";
//// [/src/core/index.js]
"use strict";
exports.__esModule = true;
exports.someString = "HELLO WORLD";
function leftPad(s, n) { return s + n; }
exports.leftPad = leftPad;
function multiply(a, b) { return a * b; }
exports.multiply = multiply;
//// [/src/core/tsconfig.json]
{
"compilerOptions": {
"incremental": true,
"module": "commonjs"<|fim▁hole|>
//// [/src/core/tsconfig.tsbuildinfo]
{
"program": {
"fileInfos": {
"../../lib/lib.d.ts": {
"version": "3858781397-/// <reference no-default-lib=\"true\"/>\ninterface Boolean {}\ninterface Function {}\ninterface CallableFunction {}\ninterface NewableFunction {}\ninterface IArguments {}\ninterface Number { toExponential: any; }\ninterface Object {}\ninterface RegExp {}\ninterface String { charAt: any; }\ninterface Array<T> { length: number; [n: number]: T; }\ninterface ReadonlyArray<T> {}\ndeclare const console: { log(msg: any): void; };",
"signature": "3858781397-/// <reference no-default-lib=\"true\"/>\ninterface Boolean {}\ninterface Function {}\ninterface CallableFunction {}\ninterface NewableFunction {}\ninterface IArguments {}\ninterface Number { toExponential: any; }\ninterface Object {}\ninterface RegExp {}\ninterface String { charAt: any; }\ninterface Array<T> { length: number; [n: number]: T; }\ninterface ReadonlyArray<T> {}\ndeclare const console: { log(msg: any): void; };"
},
"./anothermodule.ts": {
"version": "-2676574883-export const World = \"hello\";\r\n",
"signature": "-8396256275-export declare const World = \"hello\";\r\n"
},
"./index.ts": {
"version": "-18749805970-export const someString: string = \"HELLO WORLD\";\r\nexport function leftPad(s: string, n: number) { return s + n; }\r\nexport function multiply(a: number, b: number) { return a * b; }\r\n",
"signature": "1874987148-export declare const someString: string;\r\nexport declare function leftPad(s: string, n: number): string;\r\nexport declare function multiply(a: number, b: number): number;\r\n"
},
"./some_decl.d.ts": {
"version": "-9253692965-declare const dts: any;\r\n",
"signature": "-9253692965-declare const dts: any;\r\n"
}
},
"options": {
"incremental": true,
"module": 1,
"configFilePath": "./tsconfig.json"
},
"referencedMap": {},
"exportedModulesMap": {},
"semanticDiagnosticsPerFile": [
"../../lib/lib.d.ts",
"./anothermodule.ts",
"./index.ts",
"./some_decl.d.ts"
]
},
"version": "FakeTSVersion"
}<|fim▁end|> | }
}
|
<|file_name|>code_from_book.py<|end_file_name|><|fim▁begin|># Python Code From Book
# This file consists of code snippets only
# It is not intended to be run as a script
raise SystemExit
####################################################################
# 3. Thinking in Binary
####################################################################
import magic
print magic.from_file("my_image.jpg")
# JPEG image data, Exif standard: [TIFF image data, big-endian,
# direntries=16, height=3264, bps=0, PhotometricIntepretation=RGB],
# baseline, precision 8, 2378x2379, frames 3
if magic.from_file("upload.jpg", mime=True) == "image/jpeg":
continue_uploading("upload.jpg")
else:
alert("Sorry! This file type is not allowed")
import imghdr
print imghdr.what("path/to/my/file.ext")
import binascii
def spoof_file(file, magic_number):
magic_number = binascii.unhexlify(magic_number)
with open(file, "r+b") as f:
old = f.read()
f.seek(0)
f.write(magic_number + old)
def to_ascii_bytes(string):
return " ".join(format(ord(char), '08b') for char in string)
string = "my ascii string"
"".join(hex(ord(char))[2:] for char in string)
# '6d7920617363696920737472696e67'
hex_string = "6d7920617363696920737472696e67"
hex_string.decode("hex")
# 'my ascii string'
"".join(chr(int(hex_string[i:i+2], 16)) for i in range(0, len(hex_string), 2))
# 'my ascii string'
# adapted from https://code.activestate.com/recipes/142812-hex-dumper/
def hexdump(string, length=8):
result = []
digits = 4 if isinstance(string, unicode) else 2
for i in xrange(0, len(string), length):
s = string[i:i + length]
hexa = "".join("{:0{}X}".format(ord(x), digits) for x in s)
text = "".join(x if 0x20 <= ord(x) < 0x7F else '.' for x in s)
result.append("{:04X} {:{}} {}".format(i, hexa, length * (digits + 1), text))
return '\n'.join(result)
with open("/path/to/my_file.ext", "r") as f:
print hexdump(f.read())
import struct
num = 0x103e4
struct.pack("I", 0x103e4)
# '\xe4\x03\x01\x00'
string = '\xe4\x03\x01\x00'
struct.unpack("i", string)
# (66532,)
bytes = '\x01\xc2'
struct.pack("<h", struct.unpack(">h", bytes)[0])
# '\xc2\x01'
import base64
base64.b64encode('encodings are fun...')
# 'ZW5jb2RpbmdzIGFyZSBmdW4uLi4='
base64.b64decode(_)
# 'encodings are fun...'
string = "hello\x00"
binary_string = ' '.join('{:08b}'.format(ord(char)) for char in string)
" ".join(binary_string[i:i+6] for i in range(0, len(binary_string), 6))
# '011010 000110 010101 101100 011011 000110 111100 000000'
bin_string = '011010 000110 010101 101100 011011 000110 111100 000000'
[int(b, 2) for b in bin_string.split()]
# [26, 6, 21, 44, 27, 6, 60, 0]
u'◑ \u2020'.encode('utf8')
# '\xe2\x97\x91 \xe2\x80\xa0'
'\xe2\x97\x91 \xe2\x80\xa0'.decode('utf8')
# u'\u25d1 \u2020'
unicode('\xe2\x97\x91 \xe2\x80\xa0', encoding='utf8')
# u'\u25d1 \u2020'
utf8_string = 'Åêíòü'
utf8_string
# '\xc3\x85\xc3\xaa\xc3\xad\xc3\xb2\xc3\xbc'
unicode_string = utf8_string.decode('utf8')
unicode_string
# u'\xc5\xea\xed\xf2\xfc'
unicode_string.encode('mac roman')
# '\x81\x90\x92\x98\x9f'
'Åêíòü'.decode('utf8').encode('ascii')
# Traceback (most recent call last):
# File "<stdin>", line 1, in <module>
# UnicodeEncodeError: 'ascii' codec can't encode characters in position 0-4: ordinal not in range(128)
file = """潍楪慢敫椠桴慧扲敬整瑸琠慨⁴獩琠敨爠獥汵⁴景琠硥⁴敢湩敤潣敤獵湩湡甠楮瑮湥敤档
牡捡整湥潣楤杮楷桴挠浯汰瑥汥⁹湵敲慬整湯獥景整牦浯愠搠晩敦敲瑮眠楲楴杮猠獹整‧⠊慔敫
牦浯攠楷楫数楤牯⥧"""
print file.decode('utf8').encode('utf16')
# ??Mojibake is the garbled text that is the result of text being decoded using an
# unintended character encoding with completely unrelated ones, often from a
# different writing system.' (Taken from en.wikipedia.org)
import ftfy
ftfy.fix_text(u"“Mojibake“ can be fixed.")
# u'"Mojibake" can be fixed.'
bin(0b1010 & 0b1111110111)
# '0b10'
bin(0b1010 | 0b0110)
# '0b1110'
bin(0b10111 | 0b01000)
# '0b11111'
bin(0b100 ^ 0b110)
# '0b10'
bin(-0b1010 >> 0b10)
# '-0b11'
x = 0b1111
y = 0b1010
bin(int("{:b}{:b}".format(x, y), 2))
# '0b11111010'
bin(x << 4 | y)
# '0b11111010'
####################################################################
# 4. Cryptography
####################################################################
import random
import string
r = random.SystemRandom()
# Get a random integer between 0 and 20
r.randint(0, 20)
# 5
# Get a random number between 0 and 1
r.random()
# 0.8282475835972263
# Generate a random 40-bit number
r.getrandbits(40)
# 595477188771L
# Choose a random item from a string or list
chars = string.printable
r.choice(chars)
# 'e'
# Randomize the order of a sequence
seq = ['a', 'b', 'c', 'd', 'e']
r.shuffle(seq)
<|fim▁hole|># ['c','d', 'a', 'e', 'b']
"ALLIGATOR".encode('rot13')
# 'NYYVTNGBE'
"NYYVTNGBE".encode('rot13')
# 'ALLIGATOR'
plaintext = "A secret-ish message!"
"".join(chr((ord(c) + 20) % 256) for c in plaintext)
# 'U4\x87yw\x86y\x88A}\x87|4\x81y\x87\x87u{y5'
ciphertext = 'U4\x87yw\x86y\x88A}\x87|4\x81y\x87\x87u{y5'
"".join(chr((ord(c) - 20) % 256) for c in ciphertext)
# 'A secret-ish message!'
plaintext = 0b110100001101001
one_time_pad = 0b110000011100001
bin(plaintext ^ one_time_pad)
# '0b100010001000'
decrypted = 0b100010001000 ^ one_time_pad
format(decrypted, 'x').decode('hex')
# 'hi'
import os
import binascii
# ASCII-encoded plaintext
plaintext = "this is a secret message"
plaintext_bits = int(binascii.hexlify(plaintext), 16)
print "plaintext (ascii):", plaintext
print "plaintext (hex):", plaintext_bits
# Generate the one-time pad
onetime_pad = int(binascii.hexlify(os.urandom(len(plaintext))), 16)
print "one-time pad: (hex):", onetime_pad
# Encrypt plaintext using XOR operation with one-time pad
ciphertext_bits = plaintext_bits ^ onetime_pad
print "encrypted text (hex):", ciphertext_bits
# Decrypt using XOR operation with one-time pad
decrypted_text = ciphertext_bits ^ onetime_pad
decrypted_text = binascii.unhexlify(hex(decrypted_text)[2:-1])
print "decrypted text (ascii):", decrypted_text
import random
import binascii
p1 = "this is the part where you run away"
p2 = "from bad cryptography practices."
# pad plaintexts with spaces to ensure equal length
p1 = p1.ljust(len(p2))
p2 = p2.ljust(len(p1))
p1 = int(binascii.hexlify(p1), 16)
p2 = int(binascii.hexlify(p2), 16)
# get random one-time pad
otp = random.SystemRandom().getrandbits(p1.bit_length())
# encrypt
c1 = p1 ^ otp
c2 = p2 ^ otp # otp reuse...not good!
print "c1 ^ c2 == p1 ^ p2 ?", c1 ^ c2 == p1 ^ p2
print "c1 ^ c2 =", hex(c1 ^ c2)
# the crib
crib = " the "
crib = int(binascii.hexlify(crib), 16)
xored = c1 ^ c2
print "crib =", hex(crib)
cbl = crib.bit_length()
xbl = xored.bit_length()
print
mask = (2**(cbl + 1) - 1)
fill = len(str(xbl / 8))
# crib dragging
for s in range(0, xbl - cbl + 8, 8):
xor = (xored ^ (crib << s)) & (mask << s)
out = binascii.unhexlify(hex(xor)[2:-1])
print "{:>{}} {}".format(s/8, fill, out)
from cryptography.fernet import Fernet
key = Fernet.generate_key()
f = Fernet(key)
ciphertext = f.encrypt("this is my plaintext")
decrypted = f.decrypt(ciphertext)
print decrypted
# this is my plaintext
import os
from cryptography.hazmat.primitives import padding
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from cryptography.hazmat.backends import default_backend
pt = "my plaintext"
backend = default_backend()
key = os.urandom(32)
iv = os.urandom(16)
padder = padding.PKCS7(128).padder()
pt = padder.update(pt) + padder.finalize()
cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=backend)
encryptor = cipher.encryptor()
ct = encryptor.update(pt) + encryptor.finalize()
decryptor = cipher.decryptor()
out = decryptor.update(ct) + decryptor.finalize()
unpadder = padding.PKCS7(128).unpadder()
out = unpadder.update(out) + unpadder.finalize()
print out
import hashlib
hashlib.md5("hash me please").hexdigest()
# '760d92b6a6f974ae11904cd0a6fc2e90'
hashlib.sha1("hash me please").hexdigest()
# '1a58c9b3d138a45519518ee42e634600d1b52153'
import os
from cryptography.hazmat.primitives.kdf.scrypt import Scrypt
from cryptography.hazmat.backends import default_backend
backend = default_backend()
salt = os.urandom(16)
kdf = Scrypt(salt=salt, length=64, n=2**14, r=8, p=1, backend=backend)
key = kdf.derive("your favorite password")
key
import hmac
import hashlib
secret_key = "my secret key"
ciphertext = "my ciphertext"
# generate HMAC
h = hmac.new(key=secret_key, msg=ciphertext, digestmod=hashlib.sha256)
print h.hexdigest()
# verify HMAC
hmac.compare_digest(h.hexdigest(), h.hexdigest())
p = 9576890767
q = 1299827
n = p * q
print n
# 12448301194997309
e = 65537
phi = (p - 1) * (q - 1)
phi % e != 0
# True
import sympy
d = sympy.numbers.igcdex(e, phi)[0]
print d
# 1409376745910033
m = 12345
c = pow(m, e, n)
print c
# 3599057382134015
pow(c, d, n)
# 12345
m = 0
while pow(m, e, n) != c:
m += 1
print m
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives import serialization
private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048, b
ackend=default_backend())
public_key = private_key.public_key()
private_pem = private_key.private_bytes(encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.PKCS8,
encryption_algorithm=serialization.BestAvailableEncryption('your password here'))
public_pem = public_key.public_bytes(encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo)
print public_pem
print private_pem
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import padding
import base64
with open("path/to/public_key.pem", "rb") as key_file:
public_key = serialization.load_pem_public_key(key_file.read(),
backend=default_backend())
message = "your secret message"
ciphertext = public_key.encrypt(message,
padding.OAEP(mgf=padding.MGF1(algorithm=hashes.SHA256()),
algorithm=hashes.SHA256(),
label=None))
b64_ciphertext = base64.urlsafe_b64encode(ciphertext)
print b64_ciphertext
plaintext = private_key.decrypt(ciphertext,
padding.OAEP(mgf=padding.MGF1(algorithm=hashes.SHA256()),
algorithm=hashes.SHA256(),
label=None))
print plaintext
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import padding
signer = private_key.signer(padding.PSS(mgf=padding.MGF1(hashes.SHA256()), salt_length=padding.PSS.MAX_LENGTH), hashes.SHA256())
message = "A message of arbitrary length"
signer.update(message)
signature = signer.finalize()
public_key = private_key.public_key()
verifier = public_key.verifier(signature, padding.PSS(mgf=padding.MGF1(hashes.SHA256()), salt_length=padding.PSS.MAX_LENGTH), hashes.SHA256())
verifier.update(message)
verifier.verify()
####################################################################
# 5. Networking
####################################################################
import requests
r = requests.get('https://www.google.com/imghp')
r.content[:200]
# View status code
r.status_code
# 200
# View response header fields
r.headers
# {'Alt-Svc': 'quic=":443"; ma=2592000; v="36,35,34"',
# 'Cache-Control': 'private, max-age=0',
# 'Content-Encoding': 'gzip',
# 'Content-Type': 'text/html; charset=ISO-8859-1',
# 'Expires': '-1',
# 'P3P': 'CP="This is not a P3P policy! See https://www.google.com/support/accounts/answer/151657?hl=en for more info."',
# 'Server': 'gws',
# path=/; domain=.google.com; HttpOnly',
# 'Transfer-Encoding': 'chunked',
# 'X-Frame-Options': 'SAMEORIGIN',
# 'X-XSS-Protection': '1; mode=block'}
# Get content length in bytes
len(r.content)
# 10971
# Encoding
r.apparent_encoding
# 'ISO-8859-2'
# Time elapsed during request
r.elapsed
# datetime.timedelta(0, 0, 454447)
r.request.headers
# {'Accept': '*/*',
# 'Accept-Encoding': 'gzip, deflate',
# 'Connection': 'keep-alive',
# 'User-Agent': 'python-requests/2.12.4'}
custom_headers = {"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36"}
r = requests.get("https://www.google.com/imghp", headers=custom_headers)
r.request.headers
# {'Accept': '*/*',
# 'Accept-Encoding': 'gzip, deflate',
# 'Connection': 'keep-alive',
# 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36'}
import requests
import logging
import http.client as http_client
http_client.HTTPConnection.debuglevel = 1
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
requests_log = logging.getLogger("requests.packages.urllib3")
requests_log.setLevel(logging.DEBUG)
requests_log.propagate = True
r = requests.get('https://www.google.com/')
# send: 'GET / HTTP/1.1\r\nHost: www.google.com\r\nConnection: keep-alive\r\nAccept-Encoding: gzip, deflate\r\nAccept: */*\r\nUser-Agent: python-requests/2.12.4\r\n\r\n'
# reply: 'HTTP/1.1 200 OK\r\n'
# header: Expires: -1
# header: Cache-Control: private, max-age=0
# header: Content-Type: text/html; charset=ISO-8859-1
# header: P3P: CP="This is not a P3P policy! See https://www.google.com/support/accounts/answer/151657?hl=en for more info."
# header: Content-Encoding: gzip
# header: Server: gws
# header: X-XSS-Protection: 1; mode=block
# header: X-Frame-Options: SAMEORIGIN
import urlparse
simple_url = "http://www.example.com/path/to/my/page"
parsed = urlparse.urlparse(simple_url)
parsed.scheme
parsed.hostname
parsed.path
url_with_query = "http://www.example.com/?page=1&key=Anvn4mo24"
query = urlparse.urlparse(url_with_query).query
urlparse.parse_qs(query)
# {'key': ['Anvn4mo24'], 'page': ['1']}
import urllib
url = 'https://www.example.com/%5EA-url-with-%-and-%5E?page=page+with%20spaces'
urllib.unquote(url)
# 'https://www.example.com/^A-url-with-%-and-^?page=page+with spaces'
chars = '!@#$%^%$#)'
urllib.quote(chars)
# '%21%40%23%24%25%5E%25%24%23%29'
urllib.unquote_plus(url)
# 'https://www.example.com/^A-url-with-%-and-^?page=page with spaces'
urllib.quote_plus('one two')
'one+two'
import requests
from bs4 import BeautifulSoup
r = requests.get("http://www.google.com")
soup = BeautifulSoup(r.content, "lxml")
soup.find_all('p')
soup.find_all('a')
# [<a class="gb1" href="http://www.google.com/imghp?hl=en&tab=wi">Images</a>,
# <a class="gb1" href="http://maps.google.com/maps?hl=en&tab=wl">Maps</a>,
# <a class="gb1" href="https://play.google.com/?hl=en&tab=w8">Play</a>,
# <a class="gb1" href="http://www.youtube.com/?tab=w1">YouTube</a>,
# <a class="gb1" href="http://news.google.com/nwshp?hl=en&tab=wn">News</a>,
# …]
for link in soup.find_all('a'):
print link.text, link["href"]
# Images http://www.google.com/imghp?hl=en&tab=wi
# Maps http://maps.google.com/maps?hl=en&tab=wl
# Play https://play.google.com/?hl=en&tab=w8
# YouTube http://www.youtube.com/?tab=w1
import dryscrape
from bs4 import BeautifulSoup
session = dryscrape.Session()
session.visit("http://www.google.com")
r = session.body()
soup = BeautifulSoup(r, "lxml")
from selenium import webdriver
driver = webdriver.Chrome("/path/to/chromedriver")
driver.get("http://www.google.com")
html = driver.page_source
driver.save_screenshot("screenshot.png")
driver.quit()
import smtplib
server = smtplib.SMTP('localhost', port=1025)
server.set_debuglevel(True)
server.sendmail("me@localhost", "you@localhost", "This is an email message")
server.quit()<|fim▁end|> | print seq |
<|file_name|>viewchecker.py<|end_file_name|><|fim▁begin|>import os
import argparse
from logger import HoneyHornetLogger
from threading import BoundedSemaphore
import threading
import logging
from datetime import date, datetime
from termcolor import colored
import http.client
import re
import time
class ViewChecker(HoneyHornetLogger):
def __init__(self, config=None):
HoneyHornetLogger.__init__(self)
self.config = config
self.verbose = False
self.banner = False
MAX_CONNECTIONS = 20 # max threads that can be created
self.CONNECTION_LOCK = BoundedSemaphore(value=MAX_CONNECTIONS)
self.TIMER_DELAY = 3 # timer delay used for Telnet testing
self.default_filepath = os.path.dirname(os.getcwd())
log_name = str(date.today()) + "_DEBUG.log"
log_name = os.path.join(self.default_filepath, "logs", log_name)
logging.basicConfig(filename=log_name, format='%(asctime)s %(levelname)s: %(message)s',
level=logging.DEBUG)
def determine_camera_model(self, vulnerable_host, https=False, retry=False):
""" simple banner grab with http.client """
ports = []
self.CONNECTION_LOCK.acquire()
service = "DETERMINE-CAMERA-MODEL"
if retry is False:
try:
host = vulnerable_host.ip
ports_to_check = set(vulnerable_host.ports)
except vulnerable_host.DoesNotExist:
host = str(vulnerable_host)
ports_to_check = set(ports.split(',').strip())
elif retry is True:
host = vulnerable_host
if self.verbose:
print("[*] Checking camera make & model of {0}".format(host))
logging.info('{0} set for {1} service'.format(host, service))
try:
for port in ports_to_check:
if https is True:
conn = http.client.HTTPSConnection(host, port)
else:
conn = http.client.HTTPConnection(host, port)
conn.request("GET", "/")
http_r1 = conn.getresponse()
camera_check = http_r1.read(1024)
headers = http_r1.getheaders()
if self.verbose:
print(http_r1.status, http_r1.reason)
print(http_r1.status, http_r1.reason)
results = re.findall(r"<title>(?P<camera_title>.*)</title>", str(camera_check))
if results:
print(results)
else:
print("No match for <Title> tag found.")
# puts banner into the class instance of the host
# vulnerable_host.put_banner(port, banner_txt, http_r1.status, http_r1.reason, headers)
# banner_grab_filename = str(date.today()) + "_banner_grabs.log"
# banner_grab_filename = os.path.join(self.default_filepath, "logs", banner_grab_filename)
# with open(banner_grab_filename, 'a') as banner_log:
# banner_to_log = "host={0}, http_port={1},\nheaders={2},\nbanner={3}\n".format(host, port,
# headers, banner_txt)
# banner_log.write(banner_to_log)
except http.client.HTTPException:
try:
self.determine_camera_model(host, https=True, retry=True)
except Exception as error:
logging.exception("{0}\t{1}\t{2}\t{3}".format(host, port, service, error))
except Exception as error:
if error[0] == 104:
self.determine_camera_model(host, https=True, retry=True)
logging.exception("{0}\t{1}\t{2}\t{3}".format(host, port, service, error))
except KeyboardInterrupt:
exit(0)
self.CONNECTION_LOCK.release()
def run_view_checker(self, hosts_to_check):
"""
Function tests hosts for default credentials on open 'admin' ports
Utilizes threading to greatly speed up the scanning
"""
service = "building_threads"
logging.info("Building threads.")
logging.info("Verbosity set to {0}".format(self.verbose))
threads = []
print("[*] Testing vulnerable host ip addresses...")<|fim▁hole|> print('[*] checking >> {0}'.format(vulnerable_host.ip))
if set(vulnerable_host.ports):
t0 = threading.Thread(target=self.determine_camera_model, args=(vulnerable_host, ))
threads.append(t0)
logging.info("Starting {0} threads.".format(len(threads)))
for thread in threads:
thread.start()
for thread in threads:
thread.join(120)
except KeyboardInterrupt:
exit(0)
except threading.ThreadError as error:
logging.exception("{0}\t{1}".format(service, error))
except Exception as e:
logging.exception(e)<|fim▁end|> | try:
for vulnerable_host in hosts_to_check:
if self.verbose: |
<|file_name|>TransformPatternDlg.cpp<|end_file_name|><|fim▁begin|>// TransformPatternDlg.cpp : implementation file
#include <psycle/host/detail/project.private.hpp>
#include "TransformPatternDlg.hpp"
#include "Song.hpp"
#include "ChildView.hpp"
#include "MainFrm.hpp"
namespace psycle { namespace host {
static const char notes[12][3]={"C-","C#","D-","D#","E-","F-","F#","G-","G#","A-","A#","B-"};
static const char *empty ="Empty";
static const char *nonempty="Nonempty";
static const char *all="All";
static const char *same="Same";
static const char *off="off";
static const char *twk="twk";
static const char *tws="tws";
static const char *mcm="mcm";
// CTransformPatternDlg dialog
IMPLEMENT_DYNAMIC(CTransformPatternDlg, CDialog)
CTransformPatternDlg::CTransformPatternDlg(Song& _pSong, CChildView& _cview, CWnd* pParent /*=NULL*/)
: CDialog(CTransformPatternDlg::IDD, pParent)
, song(_pSong), cview(_cview), m_applyto(0)
{
}
CTransformPatternDlg::~CTransformPatternDlg()
{
}
void CTransformPatternDlg::DoDataExchange(CDataExchange* pDX)
{
CDialog::DoDataExchange(pDX);
DDX_Control(pDX, IDC_SEARCHNOTECOMB, m_searchnote);
DDX_Control(pDX, IDC_SEARCHINSTCOMB, m_searchinst);
DDX_Control(pDX, IDC_SEARCHMACHCOMB, m_searchmach);
DDX_Control(pDX, IDC_REPLNOTECOMB, m_replacenote);
DDX_Control(pDX, IDC_REPLINSTCOMB, m_replaceinst);
DDX_Control(pDX, IDC_REPLMACHCOMB, m_replacemach);
DDX_Control(pDX, IDC_REPLTWEAKCHECK, m_replacetweak);
DDX_Radio(pDX,IDC_APPLYTOSONG, m_applyto);
DDX_Control(pDX, IDC_CH_INCLUDEPAT, m_includePatNotInSeq);
}
BEGIN_MESSAGE_MAP(CTransformPatternDlg, CDialog)
ON_BN_CLICKED(IDD_SEARCH, &CTransformPatternDlg::OnBnClickedSearch)
ON_BN_CLICKED(IDD_REPLACE, &CTransformPatternDlg::OnBnClickedReplace)
END_MESSAGE_MAP()
BOOL CTransformPatternDlg::OnInitDialog()
{
CDialog::OnInitDialog();
//Note (search and replace)
m_searchnote.AddString(all); m_searchnote.SetItemData(0,1003);
m_searchnote.AddString(empty); m_searchnote.SetItemData(1,1001);
m_searchnote.AddString(nonempty); m_searchnote.SetItemData(2,1002);
m_replacenote.AddString(same); m_replacenote.SetItemData(0,1002);
m_replacenote.AddString(empty); m_replacenote.SetItemData(1,1001);
bool is440 = PsycleGlobal::conf().patView().showA440;
for (int i=notecommands::c0; i <= notecommands::b9;i++) {
std::ostringstream os;
os << notes[i%12];
if (is440) os << (i/12)-1;
else os << (i/12);
m_searchnote.AddString(os.str().c_str()); m_searchnote.SetItemData(3+i,i);
m_replacenote.AddString(os.str().c_str()); m_replacenote.SetItemData(2+i,i);
}
m_searchnote.AddString(off); m_searchnote.SetItemData(123,notecommands::release);
m_searchnote.AddString(twk); m_searchnote.SetItemData(124,notecommands::tweak);
m_searchnote.AddString(tws); m_searchnote.SetItemData(125,notecommands::tweakslide);
m_searchnote.AddString(mcm); m_searchnote.SetItemData(126,notecommands::midicc);
m_replacenote.AddString(off); m_replacenote.SetItemData(122,notecommands::release);
m_replacenote.AddString(twk); m_replacenote.SetItemData(123,notecommands::tweak);
m_replacenote.AddString(tws); m_replacenote.SetItemData(124,notecommands::tweakslide);
m_replacenote.AddString(mcm); m_replacenote.SetItemData(125,notecommands::midicc);
m_searchnote.SetCurSel(0);
m_replacenote.SetCurSel(0);
//Inst (search and replace)
m_searchinst.AddString(all); m_searchinst.SetItemData(0,1003);
m_searchinst.AddString(empty); m_searchinst.SetItemData(1,1001);
m_searchinst.AddString(nonempty); m_searchinst.SetItemData(2,1002);
m_replaceinst.AddString(same); m_replaceinst.SetItemData(0,1002);
m_replaceinst.AddString(empty); m_replaceinst.SetItemData(1,1001);
for (int i=0; i < 0xFF; i++) {
std::ostringstream os;
if (i < 16) os << "0";
os << std::uppercase << std::hex << i;
m_searchinst.AddString(os.str().c_str()); m_searchinst.SetItemData(3+i,i);
m_replaceinst.AddString(os.str().c_str()); m_replaceinst.SetItemData(2+i,i);
}
m_searchinst.SetCurSel(0);
m_replaceinst.SetCurSel(0);
//Mach (search and replace)
m_searchmach.AddString(all); m_searchmach.SetItemData(0,1003);
m_searchmach.AddString(empty); m_searchmach.SetItemData(1,1001);
m_searchmach.AddString(nonempty); m_searchmach.SetItemData(2,1002);
m_replacemach.AddString(same); m_replacemach.SetItemData(0,1002);
m_replacemach.AddString(empty); m_replacemach.SetItemData(1,1001);
for (int i=0; i < 0xFF; i++) {
std::ostringstream os;
if (i < 16) os << "0";
os << std::uppercase << std::hex << i;
m_searchmach.AddString(os.str().c_str()); m_searchmach.SetItemData(3+i,i);
m_replacemach.AddString(os.str().c_str()); m_replacemach.SetItemData(2+i,i);
}
m_searchmach.SetCurSel(0);
m_replacemach.SetCurSel(0);
if (cview.blockSelected) m_applyto = 2;
UpdateData(FALSE);<|fim▁hole|> }
// CTransformPatternDlg message handlers
void CTransformPatternDlg::OnBnClickedSearch()
{
CSearchReplaceMode mode = cview.SetupSearchReplaceMode(
m_searchnote.GetItemData(m_searchnote.GetCurSel()),
m_searchinst.GetItemData(m_searchinst.GetCurSel()),
m_searchmach.GetItemData(m_searchmach.GetCurSel()));
CCursor cursor;
cursor.line = -1;
int pattern = -1;
UpdateData (TRUE);
if (m_applyto == 0) {
bool includeOther = m_includePatNotInSeq.GetCheck() > 0;
int lastPatternUsed = (includeOther )? song.GetHighestPatternIndexInSequence() : MAX_PATTERNS;
for (int currentPattern = 0; currentPattern <= lastPatternUsed; currentPattern++)
{
if (song.IsPatternUsed(currentPattern, !includeOther))
{
CSelection sel;
sel.start.line = 0; sel.start.track = 0;
sel.end.line = song.patternLines[currentPattern];
sel.end.track = MAX_TRACKS;
cursor = cview.SearchInPattern(currentPattern, sel , mode);
if (cursor.line != -1) {
pattern=currentPattern;
break;
}
}
}
}
else if (m_applyto == 1) {
CSelection sel;
sel.start.line = 0; sel.start.track = 0;
sel.end.line = song.patternLines[cview._ps()];
sel.end.track = MAX_TRACKS;
cursor = cview.SearchInPattern(cview._ps(), sel , mode);
pattern = cview._ps();
}
else if (m_applyto == 2 && cview.blockSelected) {
cursor = cview.SearchInPattern(cview._ps(), cview.blockSel , mode);
pattern = cview._ps();
}
else {
MessageBox("No block selected for action","Search and replace",MB_ICONWARNING);
return;
}
if (cursor.line == -1) {
MessageBox("Nothing found that matches the selected options","Search and replace",MB_ICONINFORMATION);
}
else {
cview.editcur = cursor;
if (cview._ps() != pattern) {
int pos = -1;
for (int i=0; i < MAX_SONG_POSITIONS; i++) {
if (song.playOrder[i] == pattern) {
pos = i;
break;
}
}
if (pos == -1){
pos = song.playLength;
++song.playLength;
song.playOrder[pos]=pattern;
((CMainFrame*)cview.pParentFrame)->UpdateSequencer();
}
cview.editPosition = pos;
memset(song.playOrderSel,0,MAX_SONG_POSITIONS*sizeof(bool));
song.playOrderSel[cview.editPosition]=true;
((CMainFrame*)cview.pParentFrame)->UpdatePlayOrder(true);
cview.Repaint(draw_modes::pattern);
}
else {
cview.Repaint(draw_modes::cursor);
}
}
}
void CTransformPatternDlg::OnBnClickedReplace()
{
CSearchReplaceMode mode = cview.SetupSearchReplaceMode(
m_searchnote.GetItemData(m_searchnote.GetCurSel()),
m_searchinst.GetItemData(m_searchinst.GetCurSel()),
m_searchmach.GetItemData(m_searchmach.GetCurSel()),
m_replacenote.GetItemData(m_replacenote.GetCurSel()),
m_replaceinst.GetItemData(m_replaceinst.GetCurSel()),
m_replacemach.GetItemData(m_replacemach.GetCurSel()),
m_replacetweak.GetCheck());
bool replaced=false;
UpdateData (TRUE);
if (m_applyto == 0) {
bool includeOther = m_includePatNotInSeq.GetCheck() > 0;
int lastPatternUsed = (includeOther )? song.GetHighestPatternIndexInSequence() : MAX_PATTERNS;
for (int currentPattern = 0; currentPattern <= lastPatternUsed; currentPattern++)
{
if (song.IsPatternUsed(currentPattern, !includeOther))
{
CSelection sel;
sel.start.line = 0; sel.start.track = 0;
sel.end.line = song.patternLines[currentPattern];
sel.end.track = MAX_TRACKS;
replaced=cview.SearchReplace(currentPattern, sel , mode);
}
}
}
else if (m_applyto == 1) {
CSelection sel;
sel.start.line = 0; sel.start.track = 0;
sel.end.line = song.patternLines[cview._ps()];
sel.end.track = MAX_TRACKS;
replaced=cview.SearchReplace(cview._ps(), sel, mode);
}
else if (m_applyto == 2 && cview.blockSelected) {
replaced=cview.SearchReplace(cview._ps(), cview.blockSel , mode);
}
else {
MessageBox("No block selected for action","Search and replace",MB_ICONWARNING);
return;
}
if (replaced) {
cview.Repaint(draw_modes::pattern);
}
else {
MessageBox("Nothing found that matches the selected options","Search and replace",MB_ICONINFORMATION);
}
}
} // namespace
} // namespace<|fim▁end|> |
return true; // return true unless you set the focus to a control
// EXCEPTION: OCX Property Pages should return false |
<|file_name|>webpack.vendor.js<|end_file_name|><|fim▁begin|>module.exports = [
'babel-polyfill',
'react',
'react-redux',
'react-router',
'react-dom',
'redux',
'redux-thunk',
'seamless-immutable',
'react-router-redux',<|fim▁hole|> 'styled-components',
'prop-types',
];<|fim▁end|> | 'history',
'lodash', |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
setup(
name="Ente",<|fim▁hole|> license='LICENSE',
url='https://github.com/rizsotto/Ente',
long_description=open('README.md').read(),
scripts=['bin/ente']
)<|fim▁end|> | version="0.1",
description="place finder on commoncrawl dataset",
author="László Nagy",
author_email="[email protected]", |
<|file_name|>whoispy.py<|end_file_name|><|fim▁begin|>import re
import sys
import whoisSrvDict
import whoispy_sock
import parser_branch
OK = '\033[92m'
FAIL = '\033[91m'
ENDC = '\033[0m'
def query(domainName):
rawMsg = ""
tldName = ""
whoisSrvAddr = ""
regex = re.compile('.+\..+')
match = regex.search(domainName)<|fim▁hole|>
# Divice TLD
regex = re.compile('\..+')
match = regex.search(domainName)
if match:
tldName = match.group()
else:
_display_fail("Can not parse TLD")
return None
# Get TLD List
if not (tldName in whoisSrvDict.get_whoisSrvDict()):
_display_fail("Not Found TLD whois server")
return None
whoisSrvAddr = whoisSrvDict.get_whoisSrvDict().get(tldName)
rawMsg = whoispy_sock.get_rawMsg(whoisSrvAddr , domainName, 43)
return parser_branch.get_parser(rawMsg, whoisSrvAddr)
# Display method
def _display_fail(msg):
sys.stdout.write( FAIL )
sys.stdout.write("%s\n" % msg)
sys.stdout.write( ENDC )
def _display_safe(msg):
sys.stdout.write( OK )
sys.stdout.write("%s\n" % msg)
sys.stdout.write( ENDC )<|fim▁end|> | if not match:
# Invalid domain
_display_fail("Invalid domain format")
return None |
<|file_name|>_pad.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators
class PadValidator(_plotly_utils.basevalidators.CompoundValidator):<|fim▁hole|> plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Pad"),
data_docs=kwargs.pop(
"data_docs",
"""
b
The amount of padding (in px) along the bottom
of the component.
l
The amount of padding (in px) on the left side
of the component.
r
The amount of padding (in px) on the right side
of the component.
t
The amount of padding (in px) along the top of
the component.
""",
),
**kwargs
)<|fim▁end|> | def __init__(self, plotly_name="pad", parent_name="layout.title", **kwargs):
super(PadValidator, self).__init__( |
<|file_name|>interface.ts<|end_file_name|><|fim▁begin|>/** @module transition */ /** for typedoc */
import {StateDeclaration} from "../state/interface";
import {Predicate} from "../common/common";
import {Transition} from "./transition";
import {State} from "../state/stateObject";
import {PathNode} from "../path/node";
import {TargetState} from "../state/targetState";
import {UIInjector} from "../common/interface";
/**
* The TransitionOptions object can be used to change the behavior of a transition.
*
* It is passed as the third argument to [[$state.go]], [[$state.transitionTo]], and
* can be used with the [[ui-sref-opts]] directive.
*/
export interface TransitionOptions {
/**
* This option changes how the Transition interacts with the browser's location bar (URL).
*
* - If `true`, it will update the url in the location bar.
* - If `false`, it will not update the url in the location bar.
* - If it is the string "`replace`", it will update the url and also replace the last history record.
*
* @default `true`
*/
location ?: (boolean|string);
/**
* When transitioning to relative path (e.g '`^`'), this option defines which state to be relative from.
* @default `$state.current`
*/
relative ?: (string|StateDeclaration|State);
/**
* This option sets whether or not the transition's parameter values should be inherited from
* the current state parameters.
*
* - If `true`, it will inherit parameters from current state.
* - If `false`, only the parameters which are provided to `transitionTo` will be used.
*
* @default `false`
*/
inherit ?: boolean;
/**
* @deprecated
*/
notify ?: boolean;
/**
* This option may be used to force states which are currently active to reload.
*
* During a normal transition, a state is "retained" if:
* - It was previously active
* - The state's parameter values have not changed
* - All the parent states' parameter values have not changed
*
* Forcing a reload of a state will cause it to be exited and entered, which will:
* - Refetch that state's resolve data
* - Exit the state (onExit hook)
* - Re-enter the state (onEnter hook)
* - Re-render the views (controllers and templates)
*
* - When `true`, the destination state (and all parent states) will be reloaded.
* - When it is a string and is the name of a state, or when it is a State object,
* that state and any children states will be reloaded.
*
* @default `false`
*/
reload ?: (boolean|string|StateDeclaration|State);
/**
* You can define your own Transition Options inside this property and use them, e.g., from a Transition Hook
*/
custom ?: any;
/** @hidden @internal */
reloadState ?: (State);
/** @hidden @internal
* If this transition is a redirect, this property should be the original Transition (which was redirected to this one)
*/
redirectedFrom?: Transition;
/** @hidden @internal */
current ?: () => Transition;
/** @hidden @internal */
source ?: "sref"|"url"|"redirect"|"unknown";
}
/** @hidden @internal */
export interface TransitionHookOptions {
async ?: boolean;
rejectIfSuperseded ?: boolean;
current ?: () => Transition; //path?
transition ?: Transition;
hookType ?: string;
target ?: any;
traceData ?: any;
bind ?: any;
stateHook ?: boolean;
}
/**
* TreeChanges encapsulates the various Paths that are involved in a Transition.
*
* Get a TreeChanges object using [[Transition.treeChanges]]
*
* A UI-Router Transition is from one Path in a State Tree to another Path. For a given Transition,
* this object stores the "to" and "from" paths, as well as subsets of those: the "retained",
* "exiting" and "entering" paths.
*
* Each path in TreeChanges is an array of [[PathNode]] objects. Each PathNode in the array corresponds to a portion
* of a nested state.
*
* For example, if you had a nested state named `foo.bar.baz`, it would have three
* portions, `foo, bar, baz`. If you transitioned **to** `foo.bar.baz` and inspected the [[TreeChanges.to]]
* Path, you would find a node in the array for each portion: `foo`, `bar`, and `baz`.
*
* ---
*
* @todo show visual state tree
*/
export interface TreeChanges {
/** @nodoc */
[key: string]: PathNode[];
/** The path of nodes in the state tree that the transition is coming *from* */
from: PathNode[];
/** The path of nodes in the state tree that the transition is going *to* */
to: PathNode[];
/**
* The path of active nodes that the transition is retaining.
*
* These nodes are neither exited, nor entered.
* Before and after the transition is successful, these nodes are active.
*/
retained: PathNode[];
/**
* The path of previously active nodes that the transition is exiting.
*
* After the Transition is successful, these nodes are no longer active.
*
* Note that a state that is being reloaded (due to parameter values changing, or `reload: true`) may be in both the
* `exiting` and `entering` paths.
*/
exiting: PathNode[];
/**
* The path of nodes that the transition is entering.
*
* After the Transition is successful, these nodes will be active.
* Because they are entering, they have their resolves fetched, `onEnter` hooks run, and their views
* (component(s) or controller(s)+template(s)) refreshed.
*
* Note that a state that is reloaded (due to parameter values changing, or `reload: true`) may be in both the
* `exiting` and `entering` paths.
*/
entering: PathNode[];
}
export type IErrorHandler = (error: Error) => void;
export type IHookGetter = (hookName: string) => IEventHook[];
export type IHookRegistration = (matchCriteria: HookMatchCriteria, callback: HookFn, options?: HookRegOptions) => Function;
/**
* The signature for Transition Hooks.
*
* Transition hooks are callback functions that hook into the lifecycle of transitions.
* As a transition runs, it reaches certain lifecycle events.
* As each event occurs, the hooks which are registered for the event are called (in priority order).
*
* A transition hook may alter a Transition by returning a [[HookResult]].
*
* @param transition the current [[Transition]]
* @param injector (for ng1 or ng2 only) the injector service
*
* @returns a [[HookResult]] which may alter the transition
*
* @see
*
* - [[IHookRegistry.onBefore]]
* - [[IHookRegistry.onStart]]
* - [[IHookRegistry.onFinish]]
* - [[IHookRegistry.onSuccess]]
* - [[IHookRegistry.onError]]
*/
export interface TransitionHookFn {
(transition: Transition) : HookResult
}
/**
* The signature for Transition State Hooks.
*
* A function which hooks into a lifecycle event for a specific state.
*
* Transition State Hooks are callback functions that hook into the lifecycle events of specific states during a transition.
* As a transition runs, it may exit some states, retain (keep) states, and enter states.
* As each lifecycle event occurs, the hooks which are registered for the event and that state are called (in priority order).
*
* @param transition the current [[Transition]]
* @param state the [[State]] that the hook is bound to
* @param injector (for ng1 or ng2 only) the injector service
*
* @returns a [[HookResult]] which may alter the transition
*
* @see
*
* - [[IHookRegistry.onExit]]
* - [[IHookRegistry.onRetain]]
* - [[IHookRegistry.onEnter]]
*/
export interface TransitionStateHookFn {
(transition: Transition, state: State) : HookResult
}
export type HookFn = (TransitionHookFn|TransitionStateHookFn);
/**
* The return value of a [[TransitionHookFn]] or [[TransitionStateHookFn]]
*
* When returned from a [[TransitionHookFn]] or [[TransitionStateHookFn]], these values alter the running [[Transition]]:
*
* - `false`: the transition will be cancelled.
* - [[TargetState]]: the transition will be redirected to the new target state (see: [[StateService.target]])
* - `Promise`: the transition will wait for the promise to resolve or reject
* - If the promise is rejected (or resolves to `false`), the transition will be cancelled
* - If the promise resolves to a [[TargetState]], the transition will be redirected
* - If the promise resolves to anything else, the transition will resume
* - Anything else: the transition will resume
*/
export type HookResult = (boolean | TargetState | void | Promise<boolean|TargetState|void>);
/**
* These options may be provided when registering a Transition Hook (such as `onStart`)
*/
export interface HookRegOptions {
/**
* Sets the priority of the registered hook
*
* Hooks of the same type (onBefore, onStart, etc) are invoked in priority order. A hook with a higher priority
* is invoked before a hook with a lower priority.
*
* The default hook priority is 0
*/
priority?: number;
/**
* Specifies what `this` is bound to during hook invocation.
*/
bind?: any;
}
/**
* This interface specifies the api for registering Transition Hooks. Both the
* [[TransitionService]] and also the [[Transition]] object itself implement this interface.
* Note: the Transition object only allows hooks to be registered before the Transition is started.
*/
export interface IHookRegistry {
/**
* Registers a [[TransitionHookFn]], called *before a transition starts*.
*
* Registers a transition lifecycle hook, which is invoked before a transition even begins.
* This hook can be useful to implement logic which prevents a transition from even starting, such
* as authentication, redirection
*
* See [[TransitionHookFn]] for the signature of the function.
*
* The [[HookMatchCriteria]] is used to determine which Transitions the hook should be invoked for.
* To match all Transitions, use an empty criteria object `{}`.
*
* ### Lifecycle
*
* `onBefore` hooks are invoked *before a Transition starts*.
* No resolves have been fetched yet.
* Each `onBefore` hook is invoked synchronously, in the same call stack as [[StateService.transitionTo]].
* The registered `onBefore` hooks are invoked in priority order.
*
* Note: during the `onBefore` phase, additional hooks can be added to the specific [[Transition]] instance.
* These "on-the-fly" hooks only affect the currently running transition..
*
* ### Return value
*
* The hook's return value can be used to pause, cancel, or redirect the current Transition.
* See [[HookResult]] for more information.
*
* If any hook modifies the transition *synchronously* (by throwing, returning `false`, or returning
* a [[TargetState]]), the remainder of the hooks are skipped.
* If a hook returns a promise, the remainder of the `onBefore` hooks are still invoked synchronously.
* All promises are resolved, and processed asynchronously before the `onStart` phase of the Transition.
*
* ### Examples
*
* #### Default Substate
*
* This example redirects any transition from 'home' to 'home.dashboard'. This is commonly referred to as a
* "default substate".
*
* @example
* ```js
* // ng2
* transitionService.onBefore({ to: 'home' }, (trans: Transition) =>
* trans.router.stateService.target("home.dashboard"));
* ```
*
* #### Data Driven Default Substate
*
* This example provides data-driven default substate functionality. It matches on a transition to any state
* which has `defaultSubstate: "some.sub.state"` defined. See: [[Transition.to]] which returns the "to state"
* definition.
*
* @example
* ```js
* // ng1
* // state declaration
* {
* name: 'home',
* template: '<div ui-view/>',
* defaultSubstate: 'home.dashboard'
* }
*
* var criteria = {
* to: function(state) {
* return state.defaultSubstate != null;
* }
* }
*
* $transitions.onBefore(criteria, function(trans: Transition) {
* var substate = trans.to().defaultSubstate;
* return trans.router.stateService.target(substate);
* });
* ```
*
*
* #### Require authentication
*
* This example cancels a transition to a state which requires authentication, if the user is not currently authenticated.
*
* This example assumes a state tree where all states which require authentication are children of a parent `'requireauth'` state.
* This example assumes `MyAuthService` synchronously returns a boolean from `isAuthenticated()`.
*
* @example
* ```js
* // ng1
* $transitions.onBefore( { to: 'requireauth.**' }, function(trans) {
* var myAuthService = trans.injector().get('MyAuthService');
* // If isAuthenticated returns false, the transition is cancelled.
* return myAuthService.isAuthenticated();
* });
* ```
*
* @param matchCriteria defines which Transitions the Hook should be invoked for.
* @param callback the hook function which will be invoked.<|fim▁hole|>
/**
* Registers a [[TransitionHookFn]], called when a transition starts.
*
* Registers a transition lifecycle hook, which is invoked as a transition starts running.
* This hook can be useful to perform some asynchronous action before completing a transition.
*
* See [[TransitionHookFn]] for the signature of the function.
*
* The [[HookMatchCriteria]] is used to determine which Transitions the hook should be invoked for.
* To match all Transitions, use an empty criteria object `{}`.
*
* ### Lifecycle
*
* `onStart` hooks are invoked asynchronously when the Transition starts running.
* This happens after the `onBefore` phase is complete.
* At this point, the Transition has not yet exited nor entered any states.
* The registered `onStart` hooks are invoked in priority order.
*
* Note: A built-in `onStart` hook with high priority is used to fetch any eager resolve data.
*
* ### Return value
*
* The hook's return value can be used to pause, cancel, or redirect the current Transition.
* See [[HookResult]] for more information.
*
* ### Example
*
* #### Login during transition
*
* This example intercepts any transition to a state which requires authentication, when the user is
* not currently authenticated. It allows the user to authenticate asynchronously, then resumes the
* transition. If the user did not authenticate successfully, it redirects to the "guest" state, which
* does not require authentication.
*
* This example assumes:
* - a state tree where all states which require authentication are children of a parent `'auth'` state.
* - `MyAuthService.isAuthenticated()` synchronously returns a boolean.
* - `MyAuthService.authenticate()` presents a login dialog, and returns a promise which is resolved
* or rejected, whether or not the login attempt was successful.
*
* @example
* ```js
* // ng1
* $transitions.onStart( { to: 'auth.**' }, function(trans) {
* var $state = trans.router.stateService;
* var MyAuthService = trans.injector().get('MyAuthService');
*
* // If the user is not authenticated
* if (!MyAuthService.isAuthenticated()) {
*
* // Then return a promise for a successful login.
* // The transition will wait for this promise to settle
*
* return MyAuthService.authenticate().catch(function() {
*
* // If the authenticate() method failed for whatever reason,
* // redirect to a 'guest' state which doesn't require auth.
* return $state.target("guest");
* });
* }
* });
* ```
*
* @param matchCriteria defines which Transitions the Hook should be invoked for.
* @param callback the hook function which will be injected and invoked.
* @returns a function which deregisters the hook.
*/
onStart(matchCriteria: HookMatchCriteria, callback: TransitionHookFn, options?: HookRegOptions): Function;
/**
* Registers a [[TransitionStateHookFn]], called when a specific state is entered.
*
* Registers a lifecycle hook, which is invoked (during a transition) when a specific state is being entered.
*
* Since this hook is run only when the specific state is being *entered*, it can be useful for
* performing tasks when entering a submodule/feature area such as initializing a stateful service,
* or for guarding access to a submodule/feature area.
*
* See [[TransitionStateHookFn]] for the signature of the function.
*
* The [[HookMatchCriteria]] is used to determine which Transitions the hook should be invoked for.
* `onEnter` hooks generally specify `{ entering: 'somestate' }`.
* To match all Transitions, use an empty criteria object `{}`.
*
* ### Lifecycle
*
* `onEnter` hooks are invoked when the Transition is entering a state.
* States are entered after the `onRetain` phase is complete.
* If more than one state is being entered, the parent state is entered first.
* The registered `onEnter` hooks for a state are invoked in priority order.
*
* Note: A built-in `onEnter` hook with high priority is used to fetch lazy resolve data for states being entered.
*
* ### Return value
*
* The hook's return value can be used to pause, cancel, or redirect the current Transition.
* See [[HookResult]] for more information.
*
* ### Inside a state declaration
*
* Instead of registering `onEnter` hooks using the [[TransitionService]], you may define an `onEnter` hook
* directly on a state declaration (see: [[StateDeclaration.onEnter]]).
*
*
* ### Examples
*
* #### Audit Log
*
* This example uses a service to log that a user has entered the admin section of an app.
* This assumes that there are substates of the "admin" state, such as "admin.users", "admin.pages", etc.
* @example
* ```
*
* $transitions.onEnter({ entering: 'admin' }, function(transition, state) {
* var AuditService = trans.injector().get('AuditService');
* AuditService.log("Entered " + state.name + " module while transitioning to " + transition.to().name);
* }
* ```
*
* #### Audit Log (inside a state declaration)
*
* The `onEnter` inside this state declaration is syntactic sugar for the previous Audit Log example.
* ```
* {
* name: 'admin',
* component: 'admin',
* onEnter: function($transition$, $state$) {
* var AuditService = $transition$.injector().get('AuditService');
* AuditService.log("Entered " + state.name + " module while transitioning to " + transition.to().name);
* }
* }
* ```
*
* Note: A state declaration's `onEnter` function is injected for Angular 1 only.
*
* @param matchCriteria defines which Transitions the Hook should be invoked for.
* @param callback the hook function which will be injected and invoked.
* @returns a function which deregisters the hook.
*/
onEnter(matchCriteria: HookMatchCriteria, callback: TransitionStateHookFn, options?: HookRegOptions): Function;
/**
* Registers a [[TransitionStateHookFn]], called when a specific state is retained/kept.
*
* Registers a lifecycle hook, which is invoked (during a transition) for
* a specific state that was previously active and is not being entered nor exited.
*
* Since this hook is invoked when a transition is when the state is kept, it means the transition
* is coming *from* a substate of the kept state *to* a substate of the kept state.
* This hook can be used to perform actions when the user moves from one substate to another, such as
* between steps in a wizard.
*
* The [[HookMatchCriteria]] is used to determine which Transitions the hook should be invoked for.
* `onRetain` hooks generally specify `{ retained: 'somestate' }`.
* To match all Transitions, use an empty criteria object `{}`.
*
* ### Lifecycle
*
* `onRetain` hooks are invoked after any `onExit` hooks have been fired.
* If more than one state is retained, the child states' `onRetain` hooks are invoked first.
* The registered `onRetain` hooks for a state are invoked in priority order.
*
* ### Return value
*
* The hook's return value can be used to pause, cancel, or redirect the current Transition.
* See [[HookResult]] for more information.
*
* ### Inside a state declaration
*
* Instead of registering `onRetain` hooks using the [[TransitionService]], you may define an `onRetain` hook
* directly on a state declaration (see: [[StateDeclaration.onRetain]]).
*
* Note: A state declaration's `onRetain` function is injected for Angular 1 only.
*
* @param matchCriteria defines which Transitions the Hook should be invoked for.
* @param callback the hook function which will be injected and invoked.
* @returns a function which deregisters the hook.
*/
onRetain(matchCriteria: HookMatchCriteria, callback: TransitionStateHookFn, options?: HookRegOptions): Function;
/**
* Registers a [[TransitionStateHookFn]], called when a specific state is exited.
*
* Registers a lifecycle hook, which is invoked (during a transition) when a specific state is being exited.
*
* Since this hook is run only when the specific state is being *exited*, it can be useful for
* performing tasks when leaving a submodule/feature area such as cleaning up a stateful service,
* or for preventing the user from leaving a state or submodule until some criteria is satisfied.
*
* See [[TransitionStateHookFn]] for the signature of the function.
*
* The [[HookMatchCriteria]] is used to determine which Transitions the hook should be invoked for.
* `onExit` hooks generally specify `{ exiting: 'somestate' }`.
* To match all Transitions, use an empty criteria object `{}`.
*
* ### Lifecycle
*
* `onExit` hooks are invoked when the Transition is exiting a state.
* States are exited after any `onStart` phase is complete.
* If more than one state is being exited, the child states are exited first.
* The registered `onExit` hooks for a state are invoked in priority order.
*
* ### Return value
*
* The hook's return value can be used to pause, cancel, or redirect the current Transition.
* See [[HookResult]] for more information.
*
* ### Inside a state declaration
*
* Instead of registering `onExit` hooks using the [[TransitionService]], you may define an `onExit` hook
* directly on a state declaration (see: [[StateDeclaration.onExit]]).
*
* Note: A state declaration's `onExit` function is injected for Angular 1 only.
*
* @param matchCriteria defines which Transitions the Hook should be invoked for.
* @param callback the hook function which will be injected and invoked.
* @returns a function which deregisters the hook.
*/
onExit(matchCriteria: HookMatchCriteria, callback: TransitionStateHookFn, options?: HookRegOptions): Function;
/**
* Registers a [[TransitionHookFn]], called *just before a transition finishes*.
*
* Registers a transition lifecycle hook, which is invoked just before a transition finishes.
* This hook is a last chance to cancel or redirect a transition.
*
* See [[TransitionHookFn]] for the signature of the function.
*
* The [[HookMatchCriteria]] is used to determine which Transitions the hook should be invoked for.
* To match all Transitions, use an empty criteria object `{}`.
*
* ### Lifecycle
*
* `onFinish` hooks are invoked after the `onEnter` phase is complete.
* These hooks are invoked just before the transition is "committed".
* Each hook is invoked in priority order.
*
* ### Return value
*
* The hook's return value can be used to pause, cancel, or redirect the current Transition.
* See [[HookResult]] for more information.
*
* @param matchCriteria defines which Transitions the Hook should be invoked for.
* @param callback the hook function which will be injected and invoked.
* @returns a function which deregisters the hook.
*/
onFinish(matchCriteria: HookMatchCriteria, callback: TransitionHookFn, options?: HookRegOptions): Function;
/**
* Registers a [[TransitionHookFn]], called after a successful transition completed.
*
* Registers a transition lifecycle hook, which is invoked after a transition successfully completes.
*
* See [[TransitionHookFn]] for the signature of the function.
*
* The [[HookMatchCriteria]] is used to determine which Transitions the hook should be invoked for.
* To match all Transitions, use an empty criteria object `{}`.
*
* ### Lifecycle
*
* `onSuccess` hooks are chained off the Transition's promise (see [[Transition.promise]]).
* If the Transition is successful and its promise is resolved, then the `onSuccess` hooks are invoked.
* Since these hooks are run after the transition is over, their return value is ignored.
* The `onSuccess` hooks are invoked in priority order.
*
* ### Return value
*
* Since the Transition is already completed, the hook's return value is ignored
*
* @param matchCriteria defines which Transitions the Hook should be invoked for.
* @param callback the hook function which will be injected and invoked.
* @returns a function which deregisters the hook.
*/
onSuccess(matchCriteria: HookMatchCriteria, callback: TransitionHookFn, options?: HookRegOptions): Function;
/**
* Registers a [[TransitionHookFn]], called after a transition has errored.
*
* Registers a transition lifecycle hook, which is invoked after a transition has been rejected for any reason.
*
* See [[TransitionHookFn]] for the signature of the function.
*
* The [[HookMatchCriteria]] is used to determine which Transitions the hook should be invoked for.
* To match all Transitions, use an empty criteria object `{}`.
*
* ### Lifecycle
*
* The `onError` hooks are chained off the Transition's promise (see [[Transition.promise]]).
* If a Transition fails, its promise is rejected and the `onError` hooks are invoked.
* The `onError` hooks are invoked in priority order.
*
* Since these hooks are run after the transition is over, their return value is ignored.
*
* A transition "errors" if it was started, but failed to complete (for any reason).
* A *non-exhaustive list* of reasons a transition can error:
*
* - A transition was cancelled because a new transition started while it was still running
* - A transition was cancelled by a Transition Hook returning false
* - A transition was redirected by a Transition Hook returning a [[TargetState]]
* - A transition was invalid because the target state/parameters are not valid
* - A transition was ignored because the target state/parameters are exactly the current state/parameters
* - A Transition Hook or resolve function threw an error
* - A Transition Hook returned a rejected promise
* - A resolve function returned a rejected promise
*
* To check the failure reason, inspect the return value of [[Transition.error]].
*
* Note: `onError` should be used for targeted error handling, or error recovery.
* For simple catch-all error reporting, use [[StateService.defaultErrorHandler]].
*
* ### Return value
*
* Since the Transition is already completed, the hook's return value is ignored
*
* @param matchCriteria defines which Transitions the Hook should be invoked for.
* @param callback the hook function which will be injected and invoked.
* @returns a function which deregisters the hook.
*/
onError(matchCriteria: HookMatchCriteria, callback: TransitionHookFn, options?: HookRegOptions): Function;
/**
* Returns all the registered hooks of a given `hookName` type
*
* @example
* ```
*
* $transitions.getHooks("onEnter")
* ```
*/
getHooks(hookName: string): IEventHook[];
}
/** A predicate type which takes a [[State]] and returns a boolean */
export type IStateMatch = Predicate<State>
/**
* This object is used to configure whether or not a Transition Hook is invoked for a particular transition,
* based on the Transition's "to state" and "from state".
*
* Each property (`to`, `from`, `exiting`, `retained`, and `entering`) can be a state [[Glob]] string,
* a boolean, or a function that takes a state and returns a boolean (see [[HookMatchCriterion]])
*
* All properties are optional. If any property is omitted, it is replaced with the value `true`, and always matches.
* To match any transition, use an empty criteria object `{}`.
*
* @example
* ```js
*
* // This matches a transition coming from the `parent` state and going to the `parent.child` state.
* var match = {
* to: 'parent',
* from: 'parent.child'
* }
* ```
*
* @example
* ```js
*
* // This matches a transition coming from any substate of `parent` and going directly to the `parent` state.
* var match = {
* to: 'parent',
* from: 'parent.**'
* }
* ```
*
* @example
* ```js
*
* // This matches a transition coming from any state and going to any substate of `mymodule`
* var match = {
* to: 'mymodule.**'
* }
* ```
*
* @example
* ```js
*
* // This matches a transition coming from any state and going to any state that has `data.authRequired`
* // set to a truthy value.
* var match = {
* to: function(state) {
* return state.data != null && state.data.authRequired === true;
* }
* }
* ```
*
* @example
* ```js
*
* // This matches a transition that is exiting `parent.child`
* var match = {
* exiting: 'parent.child'
* }
* ```
*/
export interface HookMatchCriteria {
/** A [[HookMatchCriterion]] to match the destination state */
to?: HookMatchCriterion;
/** A [[HookMatchCriterion]] to match the original (from) state */
from?: HookMatchCriterion;
/** A [[HookMatchCriterion]] to match any state that would be exiting */
exiting?: HookMatchCriterion;
/** A [[HookMatchCriterion]] to match any state that would be retained */
retained?: HookMatchCriterion;
/** A [[HookMatchCriterion]] to match any state that would be entering */
entering?: HookMatchCriterion;
}
export interface IMatchingNodes {
[key: string]: PathNode[];
to: PathNode[];
from: PathNode[];
exiting: PathNode[];
retained: PathNode[];
entering: PathNode[];
}
/**
* Hook Criterion used to match a transition.
*
* A [[Glob]] string that matches the name of a state.
*
* Or, a function with the signature `function(state) { return matches; }`
* which should return a boolean to indicate if a state matches.
*
* Or, `true` to always match
*/
export type HookMatchCriterion = (string|IStateMatch|boolean)
/** @hidden */
export interface IEventHook {
callback: HookFn;
priority?: number;
bind?: any;
matches: (treeChanges: TreeChanges) => IMatchingNodes;
_deregistered: boolean;
}<|fim▁end|> | * @returns a function which deregisters the hook.
*/
onBefore(matchCriteria: HookMatchCriteria, callback: TransitionHookFn, options?: HookRegOptions): Function; |
<|file_name|>exportgpstracks.py<|end_file_name|><|fim▁begin|>from django.core.management.base import BaseCommand, CommandError
from ship_data.models import GpggaGpsFix
import datetime
from main import utils
import csv
import os
from django.db.models import Q
import glob
from main.management.commands import findgpsgaps
gps_bridge_working_intervals = None
# This file is part of https://github.com/cpina/science-cruise-data-management
#
# This project was programmed in a hurry without any prior Django experience,
# while circumnavigating the Antarctic on the ACE expedition, without proper
# Internet access, with 150 scientists using the system and doing at the same
# cruise other data management and system administration tasks.
#
# Sadly there aren't unit tests and we didn't have time to refactor the code
# during the cruise, which is really needed.
#
# Carles Pina ([email protected]) and Jen Thomas ([email protected]), 2016-2017.
class Command(BaseCommand):
help = 'Outputs the track in CSV format.'
def add_arguments(self, parser):
parser.add_argument('output_directory', type=str, help="Will delete existing files that started on the same start date")
parser.add_argument('start', type=str, help="Start of the GPS data. Format: YYYYMMDD")
parser.add_argument('end', type=str, help="End of the GPS data. Format: YYYYMMDD or 'yesterday'")
def handle(self, *args, **options):
generate_all_tracks(options['output_directory'], options['start'], options['end'])
def generate_all_tracks(output_directory, start, end):
global gps_bridge_working_intervals
gps_gaps = findgpsgaps.FindDataGapsGps("GPS Bridge1", start, end)
gps_bridge_working_intervals = gps_gaps.find_gps_missings()
generate_fast(output_directory, 3600, "1hour", start, end)
generate_fast(output_directory, 300, "5min", start, end)
generate_fast(output_directory, 60, "1min", start, end)
generate_fast(output_directory, 1, "1second", start, end)
def generate_fast(output_directory, seconds, file_suffix, start, end):
"""
This method uses Mysql datetime 'ends with' instead of doing individual queries
for each 'seconds'. It's faster but harder to find gaps in the data.
"""
first_date = datetime.datetime.strptime(start, "%Y%m%d")
first_date = utils.set_utc(first_date)
if end == "yesterday":
last_date = utils.last_midnight()
else:
last_date = datetime.datetime.strptime(end, "%Y%m%d")
last_date = utils.set_utc(last_date)
starts_file_format = first_date.strftime("%Y%m%d")
ends_file_format = last_date.strftime("%Y%m%d")
filename = "track_{}_{}_{}.csv".format(starts_file_format, ends_file_format, file_suffix)
files_to_delete = glob.glob(os.path.join(output_directory, "track_{}_*_{}.csv".format(starts_file_format,
file_suffix)))
print("Will start processing:", filename)
file_path = os.path.join(output_directory, filename)
if file_path in files_to_delete:
files_to_delete.remove(file_path) # In case that this script is re-generating the file
file = open(file_path + ".tmp", "w")
csv_writer = csv.writer(file)
csv_writer.writerow(["date_time", "latitude", "longitude"])
one_day = datetime.timedelta(days=1)
current_day = first_date
while current_day <= last_date:
process_day(current_day, seconds, csv_writer)
current_day += one_day
delete_files(files_to_delete)
file.close()
os.rename(file_path + ".tmp", file_path)
def process_day(date_time_process, seconds, csv_writer):
date_time_process_tomorrow = date_time_process + datetime.timedelta(days=1)
today_filter = Q(date_time__gte=date_time_process) & Q(date_time__lt=date_time_process_tomorrow)
if seconds == 1:
query_set = GpggaGpsFix.objects.filter(today_filter).order_by('date_time')
elif seconds == 60:
query_set = GpggaGpsFix.objects.filter(today_filter).filter(date_time__contains=':01.').order_by('date_time')
elif seconds == 300:
query_set = GpggaGpsFix.objects.filter(today_filter).filter(Q(date_time__contains=':00:01.') |
Q(date_time__contains=':05:01.') |
Q(date_time__contains=':10:01.') |
Q(date_time__contains=':15:01.') |
Q(date_time__contains=':20:01.') |
Q(date_time__contains=':25:01.') |
Q(date_time__contains=':30:01.') |
Q(date_time__contains=':35:01.') |
Q(date_time__contains=':40:01.') |
Q(date_time__contains=':45:01.') |
Q(date_time__contains=':50:01.') |
Q(date_time__contains=':55:01.')).order_by('date_time')
elif seconds == 3600:
query_set = GpggaGpsFix.objects.filter(today_filter).filter(date_time__contains=':00:01').order_by('date_time')
else:
assert False # need to add a if case for this
# 64: GPS Bridge
# 63: GPS Trimble
query_set = query_set.filter(utils.filter_out_bad_values())
previous_date_time_string = ""
for gps_info in query_set.iterator():
date_time_string = gps_info.date_time.strftime("%Y-%m-%d %H:%M:%S")
if date_time_string == previous_date_time_string:
continue
if which_gps(date_time_string) == "GPS Bridge1":
if gps_info.device_id == 64:
l = [gps_info.date_time.strftime("%Y-%m-%d %H:%M:%S"),
"{:.4f}".format(gps_info.latitude),
"{:.4f}".format(gps_info.longitude)]
# print(l)
csv_writer.writerow(l)
previous_date_time_string = date_time_string
else:
if gps_info.device_id == 63:
l = [gps_info.date_time.strftime("%Y-%m-%d %H:%M:%S"),
"{:.4f}".format(gps_info.latitude),
"{:.4f}".format(gps_info.longitude)]
# print(l)
csv_writer.writerow(l)
previous_date_time_string = date_time_string
def delete_files(files):
for file in files:
print("Deleting file:", file)
os.remove(file)
def generate_method_1(output_directory, seconds, file_suffix):
"""
This method does a query every 'seconds'. Very slow, could be used to find gaps easily on the data.
As it is now it is difficult to decide which GPS the get comes from.
"""
time_delta = datetime.timedelta(seconds=seconds)
first_date = GpggaGpsFix.objects.earliest().date_time
last_date = GpggaGpsFix.objects.latest().date_time
filename = "track_{}_{}_{}.csv".format(first_date.strftime("%Y%m%d"), last_date.strftime("%Y%m%d"), file_suffix)
print("Will start processing:", filename)<|fim▁hole|>
file = open(file_path, "w")
csv_writer = csv.writer(file)
csv_writer.writerow(["date_time", "latitude", "longitude"])
current_date = first_date
previous_date = current_date
while current_date < last_date:
location = utils.ship_location(current_date)
if location.date_time != previous_date:
if location.date_time is not None and location.latitude is not None and location.longitude is not None:
csv_writer.writerow([location.date_time.strftime("%Y-%m-%d %H:%M:%S"), "{:.4f}".format(location.latitude), "{:.4f}".format(location.longitude)])
if location.date_time is None:
print("No data for:", current_date)
if previous_date.day != current_date.day:
print("Generating CSV GPS info:", current_date)
previous_date = current_date
current_date = current_date + time_delta
def which_gps(date_time_str):
for interval in gps_bridge_working_intervals:
if interval['starts'] < date_time_str <= interval['stops']:
# if date_time_str > interval['starts'] and date_time_str <= interval['stops']:
return "GPS Bridge1"
return "Trimble GPS"<|fim▁end|> |
file_path = os.path.join(output_directory, filename) |
<|file_name|>XMLHttpRequest.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2004, 2006, 2008 Apple Inc. All rights reserved.
* Copyright (C) 2005-2007 Alexey Proskuryakov <[email protected]>
* Copyright (C) 2007, 2008 Julien Chaffraix <[email protected]>
* Copyright (C) 2008, 2011 Google Inc. All rights reserved.
* Copyright (C) 2012 Intel Corporation
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "config.h"
#include "XMLHttpRequest.h"
#include "Blob.h"
#include "BlobData.h"
#include "ContentSecurityPolicy.h"
#include "ContextFeatures.h"
#include "CrossOriginAccessControl.h"
#include "DOMFormData.h"
#include "DOMImplementation.h"
#include "Event.h"
#include "EventException.h"
#include "EventListener.h"
#include "EventNames.h"
#include "ExceptionCode.h"
#include "File.h"
#include "HTMLDocument.h"
#include "HTTPParsers.h"
#include "HistogramSupport.h"
#include "InspectorInstrumentation.h"
#include "JSDOMBinding.h"
#include "JSDOMWindow.h"
#include "MemoryCache.h"
#include "ParsedContentType.h"
#include "ResourceError.h"
#include "ResourceRequest.h"
#include "ScriptCallStack.h"
#include "ScriptController.h"
#include "ScriptProfile.h"
#include "Settings.h"
#include "SharedBuffer.h"
#include "TextResourceDecoder.h"
#include "ThreadableLoader.h"
#include "XMLHttpRequestException.h"
#include "XMLHttpRequestProgressEvent.h"
#include "XMLHttpRequestUpload.h"
#include "markup.h"
#include <heap/Strong.h>
#include <runtime/ArrayBuffer.h>
#include <runtime/ArrayBufferView.h>
#include <runtime/JSLock.h>
#include <runtime/Operations.h>
#include <wtf/RefCountedLeakCounter.h>
#include <wtf/StdLibExtras.h>
#include <wtf/text/CString.h>
#if ENABLE(RESOURCE_TIMING)
#include "CachedResourceRequestInitiators.h"
#endif
namespace WebCore {
DEFINE_DEBUG_ONLY_GLOBAL(WTF::RefCountedLeakCounter, xmlHttpRequestCounter, ("XMLHttpRequest"));
// Histogram enum to see when we can deprecate xhr.send(ArrayBuffer).
enum XMLHttpRequestSendArrayBufferOrView {
XMLHttpRequestSendArrayBuffer,
XMLHttpRequestSendArrayBufferView,
XMLHttpRequestSendArrayBufferOrViewMax,
};
struct XMLHttpRequestStaticData {
WTF_MAKE_NONCOPYABLE(XMLHttpRequestStaticData); WTF_MAKE_FAST_ALLOCATED;
public:
XMLHttpRequestStaticData();
String m_proxyHeaderPrefix;
String m_secHeaderPrefix;
HashSet<String, CaseFoldingHash> m_forbiddenRequestHeaders;
};
XMLHttpRequestStaticData::XMLHttpRequestStaticData()
: m_proxyHeaderPrefix("proxy-")
, m_secHeaderPrefix("sec-")
{
m_forbiddenRequestHeaders.add("accept-charset");
m_forbiddenRequestHeaders.add("accept-encoding");
m_forbiddenRequestHeaders.add("access-control-request-headers");
m_forbiddenRequestHeaders.add("access-control-request-method");
m_forbiddenRequestHeaders.add("connection");
m_forbiddenRequestHeaders.add("content-length");
m_forbiddenRequestHeaders.add("content-transfer-encoding");
m_forbiddenRequestHeaders.add("cookie");
m_forbiddenRequestHeaders.add("cookie2");
m_forbiddenRequestHeaders.add("date");
m_forbiddenRequestHeaders.add("expect");
m_forbiddenRequestHeaders.add("host");
m_forbiddenRequestHeaders.add("keep-alive");
m_forbiddenRequestHeaders.add("origin");
m_forbiddenRequestHeaders.add("referer");
m_forbiddenRequestHeaders.add("te");
m_forbiddenRequestHeaders.add("trailer");
m_forbiddenRequestHeaders.add("transfer-encoding");
m_forbiddenRequestHeaders.add("upgrade");
m_forbiddenRequestHeaders.add("user-agent");
m_forbiddenRequestHeaders.add("via");
}
static bool isSetCookieHeader(const AtomicString& name)
{
return equalIgnoringCase(name, "set-cookie") || equalIgnoringCase(name, "set-cookie2");
}
static void replaceCharsetInMediaType(String& mediaType, const String& charsetValue)
{
unsigned int pos = 0, len = 0;
findCharsetInMediaType(mediaType, pos, len);
if (!len) {
// When no charset found, do nothing.
return;
}
// Found at least one existing charset, replace all occurrences with new charset.
while (len) {
mediaType.replace(pos, len, charsetValue);
unsigned int start = pos + charsetValue.length();
findCharsetInMediaType(mediaType, pos, len, start);
}
}
static const XMLHttpRequestStaticData* staticData = 0;
static const XMLHttpRequestStaticData* createXMLHttpRequestStaticData()
{
staticData = new XMLHttpRequestStaticData;
return staticData;
}
static const XMLHttpRequestStaticData* initializeXMLHttpRequestStaticData()
{
// Uses dummy to avoid warnings about an unused variable.
AtomicallyInitializedStatic(const XMLHttpRequestStaticData*, dummy = createXMLHttpRequestStaticData());
return dummy;
}
static void logConsoleError(ScriptExecutionContext* context, const String& message)
{
if (!context)
return;
// FIXME: It's not good to report the bad usage without indicating what source line it came from.
// We should pass additional parameters so we can tell the console where the mistake occurred.
context->addConsoleMessage(JSMessageSource, ErrorMessageLevel, message);
}
PassRefPtr<XMLHttpRequest> XMLHttpRequest::create(ScriptExecutionContext* context)
{
RefPtr<XMLHttpRequest> xmlHttpRequest(adoptRef(new XMLHttpRequest(context)));
xmlHttpRequest->suspendIfNeeded();
return xmlHttpRequest.release();
}
XMLHttpRequest::XMLHttpRequest(ScriptExecutionContext* context)
: ActiveDOMObject(context)
, m_async(true)
, m_includeCredentials(false)
#if ENABLE(XHR_TIMEOUT)
, m_timeoutMilliseconds(0)
#endif
, m_state(UNSENT)
, m_createdDocument(false)
, m_error(false)
, m_uploadEventsAllowed(true)
, m_uploadComplete(false)
, m_sameOriginRequest(true)
, m_receivedLength(0)
, m_lastSendLineNumber(0)
, m_exceptionCode(0)
, m_progressEventThrottle(this)
, m_responseTypeCode(ResponseTypeDefault)
{
initializeXMLHttpRequestStaticData();
#ifndef NDEBUG
xmlHttpRequestCounter.increment();
#endif
}
XMLHttpRequest::~XMLHttpRequest()
{
#ifndef NDEBUG
xmlHttpRequestCounter.decrement();
#endif
}
Document* XMLHttpRequest::document() const
{
ASSERT(scriptExecutionContext()->isDocument());
return static_cast<Document*>(scriptExecutionContext());
}
SecurityOrigin* XMLHttpRequest::securityOrigin() const
{
return scriptExecutionContext()->securityOrigin();
}
#if ENABLE(DASHBOARD_SUPPORT)
bool XMLHttpRequest::usesDashboardBackwardCompatibilityMode() const
{
if (scriptExecutionContext()->isWorkerGlobalScope())
return false;
Settings* settings = document()->settings();
return settings && settings->usesDashboardBackwardCompatibilityMode();
}
#endif
XMLHttpRequest::State XMLHttpRequest::readyState() const
{
return m_state;
}
String XMLHttpRequest::responseText(ExceptionCode& ec)
{
if (m_responseTypeCode != ResponseTypeDefault && m_responseTypeCode != ResponseTypeText) {
ec = INVALID_STATE_ERR;
return "";
}
return m_responseBuilder.toStringPreserveCapacity();
}
Document* XMLHttpRequest::responseXML(ExceptionCode& ec)
{
if (m_responseTypeCode != ResponseTypeDefault && m_responseTypeCode != ResponseTypeDocument) {
ec = INVALID_STATE_ERR;
return 0;
}
if (m_error || m_state != DONE)
return 0;
if (!m_createdDocument) {
bool isHTML = equalIgnoringCase(responseMIMEType(), "text/html");
// The W3C spec requires the final MIME type to be some valid XML type, or text/html.
// If it is text/html, then the responseType of "document" must have been supplied explicitly.
if ((m_response.isHTTP() && !responseIsXML() && !isHTML)
|| (isHTML && m_responseTypeCode == ResponseTypeDefault)
|| scriptExecutionContext()->isWorkerGlobalScope()) {
m_responseDocument = 0;
} else {
if (isHTML)
m_responseDocument = HTMLDocument::create(0, m_url);
else
m_responseDocument = Document::create(0, m_url);
// FIXME: Set Last-Modified.
m_responseDocument->setContent(m_responseBuilder.toStringPreserveCapacity());
m_responseDocument->setSecurityOrigin(securityOrigin());
m_responseDocument->setContextFeatures(document()->contextFeatures());
if (!m_responseDocument->wellFormed())
m_responseDocument = 0;
}
m_createdDocument = true;
}
return m_responseDocument.get();
}
Blob* XMLHttpRequest::responseBlob(ExceptionCode& ec)
{
if (m_responseTypeCode != ResponseTypeBlob) {
ec = INVALID_STATE_ERR;
return 0;
}
// We always return null before DONE.
if (m_state != DONE)
return 0;
if (!m_responseBlob) {
// FIXME: This causes two (or more) unnecessary copies of the data.
// Chromium stores blob data in the browser process, so we're pulling the data
// from the network only to copy it into the renderer to copy it back to the browser.
// Ideally we'd get the blob/file-handle from the ResourceResponse directly
// instead of copying the bytes. Embedders who store blob data in the
// same process as WebCore would at least to teach BlobData to take
// a SharedBuffer, even if they don't get the Blob from the network layer directly.
OwnPtr<BlobData> blobData = BlobData::create();
// If we errored out or got no data, we still return a blob, just an empty one.
size_t size = 0;
if (m_binaryResponseBuilder) {
RefPtr<RawData> rawData = RawData::create();
size = m_binaryResponseBuilder->size();
rawData->mutableData()->append(m_binaryResponseBuilder->data(), size);
blobData->appendData(rawData, 0, BlobDataItem::toEndOfFile);
String normalizedContentType = Blob::normalizedContentType(responseMIMEType());
blobData->setContentType(normalizedContentType); // responseMIMEType defaults to text/xml which may be incorrect.
m_binaryResponseBuilder.clear();
}
m_responseBlob = Blob::create(blobData.release(), size);
}
return m_responseBlob.get();
}
ArrayBuffer* XMLHttpRequest::responseArrayBuffer(ExceptionCode& ec)
{
if (m_responseTypeCode != ResponseTypeArrayBuffer) {
ec = INVALID_STATE_ERR;
return 0;
}
if (m_state != DONE)
return 0;
if (!m_responseArrayBuffer.get() && m_binaryResponseBuilder.get() && m_binaryResponseBuilder->size() > 0) {
m_responseArrayBuffer = ArrayBuffer::create(const_cast<char*>(m_binaryResponseBuilder->data()), static_cast<unsigned>(m_binaryResponseBuilder->size()));
m_binaryResponseBuilder.clear();
}
return m_responseArrayBuffer.get();
}
#if ENABLE(XHR_TIMEOUT)
void XMLHttpRequest::setTimeout(unsigned long timeout, ExceptionCode& ec)
{
// FIXME: Need to trigger or update the timeout Timer here, if needed. http://webkit.org/b/98156
// XHR2 spec, 4.7.3. "This implies that the timeout attribute can be set while fetching is in progress. If that occurs it will still be measured relative to the start of fetching."
if (scriptExecutionContext()->isDocument() && !m_async) {
logConsoleError(scriptExecutionContext(), "XMLHttpRequest.timeout cannot be set for synchronous HTTP(S) requests made from the window context.");
ec = INVALID_ACCESS_ERR;
return;
}
m_timeoutMilliseconds = timeout;
}
#endif
void XMLHttpRequest::setResponseType(const String& responseType, ExceptionCode& ec)
{
if (m_state >= LOADING) {
ec = INVALID_STATE_ERR;
return;
}
// Newer functionality is not available to synchronous requests in window contexts, as a spec-mandated
// attempt to discourage synchronous XHR use. responseType is one such piece of functionality.
// We'll only disable this functionality for HTTP(S) requests since sync requests for local protocols
// such as file: and data: still make sense to allow.
if (!m_async && scriptExecutionContext()->isDocument() && m_url.protocolIsInHTTPFamily()) {
logConsoleError(scriptExecutionContext(), "XMLHttpRequest.responseType cannot be changed for synchronous HTTP(S) requests made from the window context.");
ec = INVALID_ACCESS_ERR;
return;
}
if (responseType == "")
m_responseTypeCode = ResponseTypeDefault;
else if (responseType == "text")
m_responseTypeCode = ResponseTypeText;
else if (responseType == "document")
m_responseTypeCode = ResponseTypeDocument;
else if (responseType == "blob")
m_responseTypeCode = ResponseTypeBlob;
else if (responseType == "arraybuffer")
m_responseTypeCode = ResponseTypeArrayBuffer;
else
ASSERT_NOT_REACHED();
}
String XMLHttpRequest::responseType()
{
switch (m_responseTypeCode) {
case ResponseTypeDefault:
return "";
case ResponseTypeText:
return "text";
case ResponseTypeDocument:
return "document";
case ResponseTypeBlob:
return "blob";
case ResponseTypeArrayBuffer:
return "arraybuffer";
}
return "";
}
XMLHttpRequestUpload* XMLHttpRequest::upload()
{
if (!m_upload)
m_upload = XMLHttpRequestUpload::create(this);
return m_upload.get();
}
void XMLHttpRequest::changeState(State newState)
{
if (m_state != newState) {
m_state = newState;
callReadyStateChangeListener();
}
}
void XMLHttpRequest::callReadyStateChangeListener()
{
if (!scriptExecutionContext())
return;
InspectorInstrumentationCookie cookie = InspectorInstrumentation::willDispatchXHRReadyStateChangeEvent(scriptExecutionContext(), this);
if (m_async || (m_state <= OPENED || m_state == DONE))
m_progressEventThrottle.dispatchReadyStateChangeEvent(XMLHttpRequestProgressEvent::create(eventNames().readystatechangeEvent), m_state == DONE ? FlushProgressEvent : DoNotFlushProgressEvent);
InspectorInstrumentation::didDispatchXHRReadyStateChangeEvent(cookie);
if (m_state == DONE && !m_error) {
InspectorInstrumentationCookie cookie = InspectorInstrumentation::willDispatchXHRLoadEvent(scriptExecutionContext(), this);
m_progressEventThrottle.dispatchEvent(XMLHttpRequestProgressEvent::create(eventNames().loadEvent));
InspectorInstrumentation::didDispatchXHRLoadEvent(cookie);
m_progressEventThrottle.dispatchEvent(XMLHttpRequestProgressEvent::create(eventNames().loadendEvent));
}
}
void XMLHttpRequest::setWithCredentials(bool value, ExceptionCode& ec)
{
if (m_state > OPENED || m_loader) {
ec = INVALID_STATE_ERR;
return;
}
m_includeCredentials = value;
}
bool XMLHttpRequest::isAllowedHTTPMethod(const String& method)
{
return !equalIgnoringCase(method, "TRACE")
&& !equalIgnoringCase(method, "TRACK")
&& !equalIgnoringCase(method, "CONNECT");
}
String XMLHttpRequest::uppercaseKnownHTTPMethod(const String& method)
{
if (equalIgnoringCase(method, "COPY") || equalIgnoringCase(method, "DELETE") || equalIgnoringCase(method, "GET")
|| equalIgnoringCase(method, "HEAD") || equalIgnoringCase(method, "INDEX") || equalIgnoringCase(method, "LOCK")
|| equalIgnoringCase(method, "M-POST") || equalIgnoringCase(method, "MKCOL") || equalIgnoringCase(method, "MOVE")
|| equalIgnoringCase(method, "OPTIONS") || equalIgnoringCase(method, "POST") || equalIgnoringCase(method, "PROPFIND")
|| equalIgnoringCase(method, "PROPPATCH") || equalIgnoringCase(method, "PUT") || equalIgnoringCase(method, "UNLOCK")) {
return method.upper();
}
return method;
}
bool XMLHttpRequest::isAllowedHTTPHeader(const String& name)
{
initializeXMLHttpRequestStaticData();
return !staticData->m_forbiddenRequestHeaders.contains(name) && !name.startsWith(staticData->m_proxyHeaderPrefix, false)
&& !name.startsWith(staticData->m_secHeaderPrefix, false);
}
void XMLHttpRequest::open(const String& method, const KURL& url, ExceptionCode& ec)
{
open(method, url, true, ec);
}
void XMLHttpRequest::open(const String& method, const KURL& url, bool async, ExceptionCode& ec)
{
internalAbort();
State previousState = m_state;
m_state = UNSENT;
m_error = false;
m_uploadComplete = false;
// clear stuff from possible previous load
clearResponse();
clearRequest();
ASSERT(m_state == UNSENT);
if (!isValidHTTPToken(method)) {
ec = SYNTAX_ERR;
return;
}
if (!isAllowedHTTPMethod(method)) {
ec = SECURITY_ERR;
return;
}
// FIXME: Convert this to check the isolated world's Content Security Policy once webkit.org/b/104520 is solved.
bool shouldBypassMainWorldContentSecurityPolicy = false;
if (scriptExecutionContext()->isDocument()) {
Document* document = static_cast<Document*>(scriptExecutionContext());
if (document->frame())
shouldBypassMainWorldContentSecurityPolicy = document->frame()->script().shouldBypassMainWorldContentSecurityPolicy();
}
if (!shouldBypassMainWorldContentSecurityPolicy && !scriptExecutionContext()->contentSecurityPolicy()->allowConnectToSource(url)) {
// FIXME: Should this be throwing an exception?
ec = SECURITY_ERR;
return;
}
if (!async && scriptExecutionContext()->isDocument()) {
if (document()->settings() && !document()->settings()->syncXHRInDocumentsEnabled()) {
logConsoleError(scriptExecutionContext(), "Synchronous XMLHttpRequests are disabled for this page.");
ec = INVALID_ACCESS_ERR;
return;
}
// Newer functionality is not available to synchronous requests in window contexts, as a spec-mandated
// attempt to discourage synchronous XHR use. responseType is one such piece of functionality.
// We'll only disable this functionality for HTTP(S) requests since sync requests for local protocols
// such as file: and data: still make sense to allow.
if (url.protocolIsInHTTPFamily() && m_responseTypeCode != ResponseTypeDefault) {
logConsoleError(scriptExecutionContext(), "Synchronous HTTP(S) requests made from the window context cannot have XMLHttpRequest.responseType set.");
ec = INVALID_ACCESS_ERR;
return;
}
#if ENABLE(XHR_TIMEOUT)
// Similarly, timeouts are disabled for synchronous requests as well.
if (m_timeoutMilliseconds > 0) {
logConsoleError(scriptExecutionContext(), "Synchronous XMLHttpRequests must not have a timeout value set.");
ec = INVALID_ACCESS_ERR;
return;
}
#endif
}
m_method = uppercaseKnownHTTPMethod(method);
m_url = url;
m_async = async;
ASSERT(!m_loader);
// Check previous state to avoid dispatching readyState event
// when calling open several times in a row.
if (previousState != OPENED)
changeState(OPENED);
else
m_state = OPENED;
}
void XMLHttpRequest::open(const String& method, const KURL& url, bool async, const String& user, ExceptionCode& ec)
{
KURL urlWithCredentials(url);
urlWithCredentials.setUser(user);
open(method, urlWithCredentials, async, ec);
}
void XMLHttpRequest::open(const String& method, const KURL& url, bool async, const String& user, const String& password, ExceptionCode& ec)
{
KURL urlWithCredentials(url);
urlWithCredentials.setUser(user);
urlWithCredentials.setPass(password);
open(method, urlWithCredentials, async, ec);
}
bool XMLHttpRequest::initSend(ExceptionCode& ec)
{
if (!scriptExecutionContext())
return false;
if (m_state != OPENED || m_loader) {
ec = INVALID_STATE_ERR;
return false;
}
m_error = false;
return true;
}
void XMLHttpRequest::send(ExceptionCode& ec)
{
send(String(), ec);
}
void XMLHttpRequest::send(Document* document, ExceptionCode& ec)
{
ASSERT(document);
if (!initSend(ec))
return;
if (m_method != "GET" && m_method != "HEAD" && m_url.protocolIsInHTTPFamily()) {
String contentType = getRequestHeader("Content-Type");
if (contentType.isEmpty()) {
#if ENABLE(DASHBOARD_SUPPORT)
if (usesDashboardBackwardCompatibilityMode())
setRequestHeaderInternal("Content-Type", "application/x-www-form-urlencoded");
else
#endif
// FIXME: this should include the charset used for encoding.
setRequestHeaderInternal("Content-Type", "application/xml");
}
// FIXME: According to XMLHttpRequest Level 2, this should use the Document.innerHTML algorithm
// from the HTML5 specification to serialize the document.
String body = createMarkup(document);
// FIXME: this should use value of document.inputEncoding to determine the encoding to use.
TextEncoding encoding = UTF8Encoding();
m_requestEntityBody = FormData::create(encoding.encode(body.characters(), body.length(), EntitiesForUnencodables));
if (m_upload)
m_requestEntityBody->setAlwaysStream(true);
}
createRequest(ec);
}
void XMLHttpRequest::send(const String& body, ExceptionCode& ec)
{
if (!initSend(ec))
return;
if (!body.isNull() && m_method != "GET" && m_method != "HEAD" && m_url.protocolIsInHTTPFamily()) {
String contentType = getRequestHeader("Content-Type");
if (contentType.isEmpty()) {
#if ENABLE(DASHBOARD_SUPPORT)
if (usesDashboardBackwardCompatibilityMode())
setRequestHeaderInternal("Content-Type", "application/x-www-form-urlencoded");
else
#endif
setRequestHeaderInternal("Content-Type", "application/xml");
} else {
replaceCharsetInMediaType(contentType, "UTF-8");
m_requestHeaders.set("Content-Type", contentType);
}
m_requestEntityBody = FormData::create(UTF8Encoding().encode(body.characters(), body.length(), EntitiesForUnencodables));
if (m_upload)
m_requestEntityBody->setAlwaysStream(true);
}
createRequest(ec);
}
void XMLHttpRequest::send(Blob* body, ExceptionCode& ec)
{
if (!initSend(ec))
return;
if (m_method != "GET" && m_method != "HEAD" && m_url.protocolIsInHTTPFamily()) {
const String& contentType = getRequestHeader("Content-Type");
if (contentType.isEmpty()) {
const String& blobType = body->type();
if (!blobType.isEmpty() && isValidContentType(blobType))
setRequestHeaderInternal("Content-Type", blobType);
else {
// From FileAPI spec, whenever media type cannot be determined, empty string must be returned.
setRequestHeaderInternal("Content-Type", "");
}
}
// FIXME: add support for uploading bundles.
m_requestEntityBody = FormData::create();
if (body->isFile())
m_requestEntityBody->appendFile(toFile(body)->path());
#if ENABLE(BLOB)
else
m_requestEntityBody->appendBlob(body->url());
#endif
}
createRequest(ec);
}
void XMLHttpRequest::send(DOMFormData* body, ExceptionCode& ec)
{
if (!initSend(ec))
return;
if (m_method != "GET" && m_method != "HEAD" && m_url.protocolIsInHTTPFamily()) {
m_requestEntityBody = FormData::createMultiPart(*(static_cast<FormDataList*>(body)), body->encoding(), document());
// We need to ask the client to provide the generated file names if needed. When FormData fills the element
// for the file, it could set a flag to use the generated file name, i.e. a package file on Mac.
m_requestEntityBody->generateFiles(document());
String contentType = getRequestHeader("Content-Type");
if (contentType.isEmpty()) {
contentType = makeString("multipart/form-data; boundary=", m_requestEntityBody->boundary().data());
setRequestHeaderInternal("Content-Type", contentType);
}
}
createRequest(ec);
}
void XMLHttpRequest::send(ArrayBuffer* body, ExceptionCode& ec)
{
String consoleMessage("ArrayBuffer is deprecated in XMLHttpRequest.send(). Use ArrayBufferView instead.");
scriptExecutionContext()->addConsoleMessage(JSMessageSource, WarningMessageLevel, consoleMessage);
HistogramSupport::histogramEnumeration("WebCore.XHR.send.ArrayBufferOrView", XMLHttpRequestSendArrayBuffer, XMLHttpRequestSendArrayBufferOrViewMax);
sendBytesData(body->data(), body->byteLength(), ec);
}
void XMLHttpRequest::send(ArrayBufferView* body, ExceptionCode& ec)
{
HistogramSupport::histogramEnumeration("WebCore.XHR.send.ArrayBufferOrView", XMLHttpRequestSendArrayBufferView, XMLHttpRequestSendArrayBufferOrViewMax);
sendBytesData(body->baseAddress(), body->byteLength(), ec);
}
void XMLHttpRequest::sendBytesData(const void* data, size_t length, ExceptionCode& ec)
{
if (!initSend(ec))
return;
if (m_method != "GET" && m_method != "HEAD" && m_url.protocolIsInHTTPFamily()) {
m_requestEntityBody = FormData::create(data, length);
if (m_upload)
m_requestEntityBody->setAlwaysStream(true);
}
createRequest(ec);
}
void XMLHttpRequest::sendFromInspector(PassRefPtr<FormData> formData, ExceptionCode& ec)
{
m_requestEntityBody = formData ? formData->deepCopy() : 0;
createRequest(ec);
m_exceptionCode = ec;
}
void XMLHttpRequest::createRequest(ExceptionCode& ec)
{
#if ENABLE(BLOB)
// Only GET request is supported for blob URL.
if (m_url.protocolIs("blob") && m_method != "GET") {
ec = XMLHttpRequestException::NETWORK_ERR;
return;
}
#endif
// The presence of upload event listeners forces us to use preflighting because POSTing to an URL that does not
// permit cross origin requests should look exactly like POSTing to an URL that does not respond at all.
// Also, only async requests support upload progress events.
bool uploadEvents = false;
if (m_async) {
m_progressEventThrottle.dispatchEvent(XMLHttpRequestProgressEvent::create(eventNames().loadstartEvent));
if (m_requestEntityBody && m_upload) {
uploadEvents = m_upload->hasEventListeners();
m_upload->dispatchEvent(XMLHttpRequestProgressEvent::create(eventNames().loadstartEvent));
}
}
m_sameOriginRequest = securityOrigin()->canRequest(m_url);
// We also remember whether upload events should be allowed for this request in case the upload listeners are
// added after the request is started.
m_uploadEventsAllowed = m_sameOriginRequest || uploadEvents || !isSimpleCrossOriginAccessRequest(m_method, m_requestHeaders);
ResourceRequest request(m_url);
request.setHTTPMethod(m_method);
#if PLATFORM(BLACKBERRY)
request.setTargetType(ResourceRequest::TargetIsXHR);
#endif
InspectorInstrumentation::willLoadXHR(scriptExecutionContext(), this, m_method, m_url, m_async, m_requestEntityBody ? m_requestEntityBody->deepCopy() : 0, m_requestHeaders, m_includeCredentials);
if (m_requestEntityBody) {
ASSERT(m_method != "GET");
ASSERT(m_method != "HEAD");
request.setHTTPBody(m_requestEntityBody.release());
}
if (m_requestHeaders.size() > 0)
request.addHTTPHeaderFields(m_requestHeaders);
ThreadableLoaderOptions options;
options.sendLoadCallbacks = SendCallbacks;
options.sniffContent = DoNotSniffContent;
options.preflightPolicy = uploadEvents ? ForcePreflight : ConsiderPreflight;
options.allowCredentials = (m_sameOriginRequest || m_includeCredentials) ? AllowStoredCredentials : DoNotAllowStoredCredentials;
options.crossOriginRequestPolicy = UseAccessControl;
options.securityOrigin = securityOrigin();
#if ENABLE(RESOURCE_TIMING)
options.initiator = cachedResourceRequestInitiators().xmlhttprequest;
#endif
#if ENABLE(XHR_TIMEOUT)
if (m_timeoutMilliseconds)
request.setTimeoutInterval(m_timeoutMilliseconds / 1000.0);
#endif
m_exceptionCode = 0;
m_error = false;
if (m_async) {
if (m_upload)
request.setReportUploadProgress(true);
// ThreadableLoader::create can return null here, for example if we're no longer attached to a page.
// This is true while running onunload handlers.
// FIXME: Maybe we need to be able to send XMLHttpRequests from onunload, <http://bugs.webkit.org/show_bug.cgi?id=10904>.
// FIXME: Maybe create() can return null for other reasons too?
m_loader = ThreadableLoader::create(scriptExecutionContext(), this, request, options);
if (m_loader) {
// Neither this object nor the JavaScript wrapper should be deleted while
// a request is in progress because we need to keep the listeners alive,
// and they are referenced by the JavaScript wrapper.
setPendingActivity(this);
}
} else {
InspectorInstrumentation::willLoadXHRSynchronously(scriptExecutionContext());
ThreadableLoader::loadResourceSynchronously(scriptExecutionContext(), request, *this, options);
InspectorInstrumentation::didLoadXHRSynchronously(scriptExecutionContext());
}
if (!m_exceptionCode && m_error)
m_exceptionCode = XMLHttpRequestException::NETWORK_ERR;
ec = m_exceptionCode;
}
void XMLHttpRequest::abort()
{
// internalAbort() calls dropProtection(), which may release the last reference.
RefPtr<XMLHttpRequest> protect(this);
bool sendFlag = m_loader;
internalAbort();
clearResponseBuffers();
// Clear headers as required by the spec
m_requestHeaders.clear();
if ((m_state <= OPENED && !sendFlag) || m_state == DONE)
m_state = UNSENT;
else {
ASSERT(!m_loader);
changeState(DONE);
m_state = UNSENT;
}
m_progressEventThrottle.dispatchEventAndLoadEnd(XMLHttpRequestProgressEvent::create(eventNames().abortEvent));
if (!m_uploadComplete) {
m_uploadComplete = true;
if (m_upload && m_uploadEventsAllowed)
m_upload->dispatchEventAndLoadEnd(XMLHttpRequestProgressEvent::create(eventNames().abortEvent));
}
}
void XMLHttpRequest::internalAbort()
{
bool hadLoader = m_loader;
m_error = true;
// FIXME: when we add the support for multi-part XHR, we will have to think be careful with this initialization.
m_receivedLength = 0;
if (hadLoader) {
m_loader->cancel();
m_loader = 0;
}
m_decoder = 0;
InspectorInstrumentation::didFailXHRLoading(scriptExecutionContext(), this);
if (hadLoader)
dropProtection();
}
void XMLHttpRequest::clearResponse()
{
m_response = ResourceResponse();
clearResponseBuffers();
}
void XMLHttpRequest::clearResponseBuffers()
{
m_responseBuilder.clear();
m_responseEncoding = String();
m_createdDocument = false;
m_responseDocument = 0;
m_responseBlob = 0;
m_binaryResponseBuilder.clear();
m_responseArrayBuffer.clear();
}
void XMLHttpRequest::clearRequest()
{
m_requestHeaders.clear();
m_requestEntityBody = 0;
}
void XMLHttpRequest::genericError()
{
clearResponse();
clearRequest();
m_error = true;
changeState(DONE);
}
void XMLHttpRequest::networkError()
{
genericError();
if (!m_uploadComplete) {
m_uploadComplete = true;
if (m_upload && m_uploadEventsAllowed)
m_upload->dispatchEventAndLoadEnd(XMLHttpRequestProgressEvent::create(eventNames().errorEvent));
}
m_progressEventThrottle.dispatchEventAndLoadEnd(XMLHttpRequestProgressEvent::create(eventNames().errorEvent));
internalAbort();
}
void XMLHttpRequest::abortError()
{
genericError();
if (!m_uploadComplete) {
m_uploadComplete = true;
if (m_upload && m_uploadEventsAllowed)
m_upload->dispatchEventAndLoadEnd(XMLHttpRequestProgressEvent::create(eventNames().abortEvent));
}
m_progressEventThrottle.dispatchEventAndLoadEnd(XMLHttpRequestProgressEvent::create(eventNames().abortEvent));
}
void XMLHttpRequest::dropProtection()
{
// The XHR object itself holds on to the responseText, and
// thus has extra cost even independent of any
// responseText or responseXML objects it has handed
// out. But it is protected from GC while loading, so this
// can't be recouped until the load is done, so only
// report the extra cost at that point.
JSC::VM* vm = scriptExecutionContext()->vm();
JSC::JSLockHolder lock(vm);
vm->heap.reportExtraMemoryCost(m_responseBuilder.length() * 2);
unsetPendingActivity(this);
}
void XMLHttpRequest::overrideMimeType(const String& override)
{
m_mimeTypeOverride = override;
}
void XMLHttpRequest::setRequestHeader(const AtomicString& name, const String& value, ExceptionCode& ec)
{
if (m_state != OPENED || m_loader) {
#if ENABLE(DASHBOARD_SUPPORT)
if (usesDashboardBackwardCompatibilityMode())
return;
#endif
ec = INVALID_STATE_ERR;
return;
}
if (!isValidHTTPToken(name) || !isValidHTTPHeaderValue(value)) {
ec = SYNTAX_ERR;
return;
}
// A privileged script (e.g. a Dashboard widget) can set any headers.
if (!securityOrigin()->canLoadLocalResources() && !isAllowedHTTPHeader(name)) {
logConsoleError(scriptExecutionContext(), "Refused to set unsafe header \"" + name + "\"");
return;
}
setRequestHeaderInternal(name, value);
}
void XMLHttpRequest::setRequestHeaderInternal(const AtomicString& name, const String& value)
{
HTTPHeaderMap::AddResult result = m_requestHeaders.add(name, value);
if (!result.isNewEntry)
result.iterator->value.append(", " + value);
}
String XMLHttpRequest::getRequestHeader(const AtomicString& name) const
{
return m_requestHeaders.get(name);
}
String XMLHttpRequest::getAllResponseHeaders(ExceptionCode& ec) const
{
if (m_state < HEADERS_RECEIVED) {
ec = INVALID_STATE_ERR;
return "";
}
StringBuilder stringBuilder;
HTTPHeaderSet accessControlExposeHeaderSet;
parseAccessControlExposeHeadersAllowList(m_response.httpHeaderField("Access-Control-Expose-Headers"), accessControlExposeHeaderSet);
HTTPHeaderMap::const_iterator end = m_response.httpHeaderFields().end();
for (HTTPHeaderMap::const_iterator it = m_response.httpHeaderFields().begin(); it!= end; ++it) {
// Hide Set-Cookie header fields from the XMLHttpRequest client for these reasons:
// 1) If the client did have access to the fields, then it could read HTTP-only
// cookies; those cookies are supposed to be hidden from scripts.
// 2) There's no known harm in hiding Set-Cookie header fields entirely; we don't
// know any widely used technique that requires access to them.
// 3) Firefox has implemented this policy.
if (isSetCookieHeader(it->key) && !securityOrigin()->canLoadLocalResources())
continue;
if (!m_sameOriginRequest && !isOnAccessControlResponseHeaderWhitelist(it->key) && !accessControlExposeHeaderSet.contains(it->key))
continue;
stringBuilder.append(it->key);
stringBuilder.append(':');
stringBuilder.append(' ');
stringBuilder.append(it->value);
stringBuilder.append('\r');
stringBuilder.append('\n');
}
return stringBuilder.toString();
}
String XMLHttpRequest::getResponseHeader(const AtomicString& name, ExceptionCode& ec) const
{
if (m_state < HEADERS_RECEIVED) {
ec = INVALID_STATE_ERR;
return String();
}
// See comment in getAllResponseHeaders above.
if (isSetCookieHeader(name) && !securityOrigin()->canLoadLocalResources()) {
logConsoleError(scriptExecutionContext(), "Refused to get unsafe header \"" + name + "\"");
return String();
}
HTTPHeaderSet accessControlExposeHeaderSet;
parseAccessControlExposeHeadersAllowList(m_response.httpHeaderField("Access-Control-Expose-Headers"), accessControlExposeHeaderSet);
if (!m_sameOriginRequest && !isOnAccessControlResponseHeaderWhitelist(name) && !accessControlExposeHeaderSet.contains(name)) {
logConsoleError(scriptExecutionContext(), "Refused to get unsafe header \"" + name + "\"");
return String();
}
return m_response.httpHeaderField(name);
}
String XMLHttpRequest::responseMIMEType() const
{
String mimeType = extractMIMETypeFromMediaType(m_mimeTypeOverride);
if (mimeType.isEmpty()) {
if (m_response.isHTTP())
mimeType = extractMIMETypeFromMediaType(m_response.httpHeaderField("Content-Type"));
else
mimeType = m_response.mimeType();
}
if (mimeType.isEmpty())
mimeType = "text/xml";
return mimeType;
}
bool XMLHttpRequest::responseIsXML() const
{
// FIXME: Remove the lower() call when DOMImplementation.isXMLMIMEType() is modified
// to do case insensitive MIME type matching.
return DOMImplementation::isXMLMIMEType(responseMIMEType().lower());
}
int XMLHttpRequest::status(ExceptionCode& ec) const
{
if (m_response.httpStatusCode())
return m_response.httpStatusCode();
if (m_state == OPENED) {
// Firefox only raises an exception in this state; we match it.
// Note the case of local file requests, where we have no HTTP response code! Firefox never raises an exception for those, but we match HTTP case for consistency.
ec = INVALID_STATE_ERR;
}
return 0;
}
String XMLHttpRequest::statusText(ExceptionCode& ec) const
{
if (!m_response.httpStatusText().isNull())
return m_response.httpStatusText();
if (m_state == OPENED) {
// See comments in status() above.
ec = INVALID_STATE_ERR;
}
return String();
}
void XMLHttpRequest::didFail(const ResourceError& error)
{
// If we are already in an error state, for instance we called abort(), bail out early.
if (m_error)
return;
if (error.isCancellation()) {
m_exceptionCode = XMLHttpRequestException::ABORT_ERR;
abortError();
return;
}
#if ENABLE(XHR_TIMEOUT)
if (error.isTimeout()) {
didTimeout();
return;
}
#endif
// Network failures are already reported to Web Inspector by ResourceLoader.
if (error.domain() == errorDomainWebKitInternal)
logConsoleError(scriptExecutionContext(), "XMLHttpRequest cannot load " + error.failingURL() + ". " + error.localizedDescription());
m_exceptionCode = XMLHttpRequestException::NETWORK_ERR;
networkError();
}
void XMLHttpRequest::didFailRedirectCheck()
{
networkError();
}
void XMLHttpRequest::didFinishLoading(unsigned long identifier, double)
{
if (m_error)
return;
if (m_state < HEADERS_RECEIVED)
changeState(HEADERS_RECEIVED);
if (m_decoder)
m_responseBuilder.append(m_decoder->flush());
m_responseBuilder.shrinkToFit();
InspectorInstrumentation::didFinishXHRLoading(scriptExecutionContext(), this, identifier, m_responseBuilder.toStringPreserveCapacity(), m_url, m_lastSendURL, m_lastSendLineNumber);
bool hadLoader = m_loader;
m_loader = 0;
changeState(DONE);
m_responseEncoding = String();
m_decoder = 0;
if (hadLoader)
dropProtection();
}
void XMLHttpRequest::didSendData(unsigned long long bytesSent, unsigned long long totalBytesToBeSent)
{
if (!m_upload)
return;
if (m_uploadEventsAllowed)
m_upload->dispatchEvent(XMLHttpRequestProgressEvent::create(eventNames().progressEvent, true, bytesSent, totalBytesToBeSent));
if (bytesSent == totalBytesToBeSent && !m_uploadComplete) {
m_uploadComplete = true;
if (m_uploadEventsAllowed)
m_upload->dispatchEventAndLoadEnd(XMLHttpRequestProgressEvent::create(eventNames().loadEvent));
}
}
void XMLHttpRequest::didReceiveResponse(unsigned long identifier, const ResourceResponse& response)
{
InspectorInstrumentation::didReceiveXHRResponse(scriptExecutionContext(), identifier);
m_response = response;
if (!m_mimeTypeOverride.isEmpty()) {
m_response.setHTTPHeaderField("Content-Type", m_mimeTypeOverride);
m_responseEncoding = extractCharsetFromMediaType(m_mimeTypeOverride);
}
if (m_responseEncoding.isEmpty())
m_responseEncoding = response.textEncodingName();
}
void XMLHttpRequest::didReceiveData(const char* data, int len)
{
if (m_error)
return;
if (m_state < HEADERS_RECEIVED)
changeState(HEADERS_RECEIVED);
bool useDecoder = m_responseTypeCode == ResponseTypeDefault || m_responseTypeCode == ResponseTypeText || m_responseTypeCode == ResponseTypeDocument;
if (useDecoder && !m_decoder) {
if (!m_responseEncoding.isEmpty())
m_decoder = TextResourceDecoder::create("text/plain", m_responseEncoding);
// allow TextResourceDecoder to look inside the m_response if it's XML or HTML
else if (responseIsXML()) {
m_decoder = TextResourceDecoder::create("application/xml");
// Don't stop on encoding errors, unlike it is done for other kinds of XML resources. This matches the behavior of previous WebKit versions, Firefox and Opera.
m_decoder->useLenientXMLDecoding();
} else if (equalIgnoringCase(responseMIMEType(), "text/html"))
m_decoder = TextResourceDecoder::create("text/html", "UTF-8");
else
m_decoder = TextResourceDecoder::create("text/plain", "UTF-8");
}
if (!len)
return;
if (len == -1)
len = strlen(data);
if (useDecoder)
m_responseBuilder.append(m_decoder->decode(data, len));
else if (m_responseTypeCode == ResponseTypeArrayBuffer || m_responseTypeCode == ResponseTypeBlob) {
// Buffer binary data.
if (!m_binaryResponseBuilder)
m_binaryResponseBuilder = SharedBuffer::create();
m_binaryResponseBuilder->append(data, len);
}
if (!m_error) {
long long expectedLength = m_response.expectedContentLength();
m_receivedLength += len;
if (m_async) {
bool lengthComputable = expectedLength > 0 && m_receivedLength <= expectedLength;
unsigned long long total = lengthComputable ? expectedLength : 0;
m_progressEventThrottle.dispatchProgressEvent(lengthComputable, m_receivedLength, total);
}
if (m_state != LOADING)
changeState(LOADING);
else
// Firefox calls readyStateChanged every time it receives data, 4449442
callReadyStateChangeListener();
}
}
#if ENABLE(XHR_TIMEOUT)
void XMLHttpRequest::didTimeout()
{
// internalAbort() calls dropProtection(), which may release the last reference.
RefPtr<XMLHttpRequest> protect(this);
internalAbort();
clearResponse();
clearRequest();
m_error = true;
m_exceptionCode = XMLHttpRequestException::TIMEOUT_ERR;
if (!m_async) {
m_state = DONE;
m_exceptionCode = TIMEOUT_ERR;
return;
}
changeState(DONE);
if (!m_uploadComplete) {
m_uploadComplete = true;
if (m_upload && m_uploadEventsAllowed)
m_upload->dispatchEventAndLoadEnd(XMLHttpRequestProgressEvent::create(eventNames().timeoutEvent));
}
m_progressEventThrottle.dispatchEventAndLoadEnd(XMLHttpRequestProgressEvent::create(eventNames().timeoutEvent));<|fim▁hole|>#endif
bool XMLHttpRequest::canSuspend() const
{
return !m_loader;
}
void XMLHttpRequest::suspend(ReasonForSuspension)
{
m_progressEventThrottle.suspend();
}
void XMLHttpRequest::resume()
{
m_progressEventThrottle.resume();
}
void XMLHttpRequest::stop()
{
internalAbort();
}
void XMLHttpRequest::contextDestroyed()
{
ASSERT(!m_loader);
ActiveDOMObject::contextDestroyed();
}
const AtomicString& XMLHttpRequest::interfaceName() const
{
return eventNames().interfaceForXMLHttpRequest;
}
ScriptExecutionContext* XMLHttpRequest::scriptExecutionContext() const
{
return ActiveDOMObject::scriptExecutionContext();
}
EventTargetData* XMLHttpRequest::eventTargetData()
{
return &m_eventTargetData;
}
EventTargetData& XMLHttpRequest::ensureEventTargetData()
{
return m_eventTargetData;
}
} // namespace WebCore<|fim▁end|> | } |
<|file_name|>f32.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Operations and constants for 32-bits floats (`f32` type)
#![stable(feature = "rust1", since = "1.0.0")]
#![allow(missing_docs)]
#![allow(unsigned_negation)]
#![doc(primitive = "f32")]
use prelude::v1::*;
use intrinsics;
use libc::c_int;
use num::{Float, FpCategory};
use num::strconv;
use num::strconv::ExponentFormat::{ExpNone, ExpDec};
use num::strconv::SignificantDigits::{DigAll, DigMax, DigExact};
use num::strconv::SignFormat::SignNeg;
use core::num;
pub use core::f32::{RADIX, MANTISSA_DIGITS, DIGITS, EPSILON, MIN_VALUE};
pub use core::f32::{MIN_POS_VALUE, MAX_VALUE, MIN_EXP, MAX_EXP, MIN_10_EXP};
pub use core::f32::{MAX_10_EXP, NAN, INFINITY, NEG_INFINITY};
pub use core::f32::consts;
#[allow(dead_code)]
mod cmath {
use libc::{c_float, c_int};
#[link_name = "m"]
extern {
pub fn acosf(n: c_float) -> c_float;
pub fn asinf(n: c_float) -> c_float;
pub fn atanf(n: c_float) -> c_float;
pub fn atan2f(a: c_float, b: c_float) -> c_float;
pub fn cbrtf(n: c_float) -> c_float;
pub fn coshf(n: c_float) -> c_float;
pub fn erff(n: c_float) -> c_float;
pub fn erfcf(n: c_float) -> c_float;
pub fn expm1f(n: c_float) -> c_float;
pub fn fdimf(a: c_float, b: c_float) -> c_float;
pub fn frexpf(n: c_float, value: &mut c_int) -> c_float;
pub fn fmaxf(a: c_float, b: c_float) -> c_float;
pub fn fminf(a: c_float, b: c_float) -> c_float;
pub fn fmodf(a: c_float, b: c_float) -> c_float;
pub fn nextafterf(x: c_float, y: c_float) -> c_float;
pub fn hypotf(x: c_float, y: c_float) -> c_float;
pub fn ldexpf(x: c_float, n: c_int) -> c_float;
pub fn logbf(n: c_float) -> c_float;
pub fn log1pf(n: c_float) -> c_float;
pub fn ilogbf(n: c_float) -> c_int;
pub fn modff(n: c_float, iptr: &mut c_float) -> c_float;
pub fn sinhf(n: c_float) -> c_float;
pub fn tanf(n: c_float) -> c_float;
pub fn tanhf(n: c_float) -> c_float;
pub fn tgammaf(n: c_float) -> c_float;
#[cfg(unix)]
pub fn lgammaf_r(n: c_float, sign: &mut c_int) -> c_float;
#[cfg(windows)]
#[link_name="__lgammaf_r"]
pub fn lgammaf_r(n: c_float, sign: &mut c_int) -> c_float;
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Float for f32 {
#[inline]
fn nan() -> f32 { num::Float::nan() }
#[inline]
fn infinity() -> f32 { num::Float::infinity() }
#[inline]
fn neg_infinity() -> f32 { num::Float::neg_infinity() }
#[inline]
fn zero() -> f32 { num::Float::zero() }
#[inline]
fn neg_zero() -> f32 { num::Float::neg_zero() }
#[inline]
fn one() -> f32 { num::Float::one() }
#[allow(deprecated)]
#[inline]
fn mantissa_digits(unused_self: Option<f32>) -> uint {
num::Float::mantissa_digits(unused_self)
}
#[allow(deprecated)]
#[inline]
fn digits(unused_self: Option<f32>) -> uint { num::Float::digits(unused_self) }
#[allow(deprecated)]
#[inline]
fn epsilon() -> f32 { num::Float::epsilon() }
#[allow(deprecated)]
#[inline]
fn min_exp(unused_self: Option<f32>) -> int { num::Float::min_exp(unused_self) }
#[allow(deprecated)]
#[inline]
fn max_exp(unused_self: Option<f32>) -> int { num::Float::max_exp(unused_self) }
#[allow(deprecated)]
#[inline]
fn min_10_exp(unused_self: Option<f32>) -> int { num::Float::min_10_exp(unused_self) }
#[allow(deprecated)]
#[inline]
fn max_10_exp(unused_self: Option<f32>) -> int { num::Float::max_10_exp(unused_self) }
#[allow(deprecated)]
#[inline]
fn min_value() -> f32 { num::Float::min_value() }
#[allow(deprecated)]
#[inline]
fn min_pos_value(unused_self: Option<f32>) -> f32 { num::Float::min_pos_value(unused_self) }
#[allow(deprecated)]
#[inline]
fn max_value() -> f32 { num::Float::max_value() }
#[inline]
fn is_nan(self) -> bool { num::Float::is_nan(self) }
#[inline]
fn is_infinite(self) -> bool { num::Float::is_infinite(self) }
#[inline]
fn is_finite(self) -> bool { num::Float::is_finite(self) }
#[inline]
fn is_normal(self) -> bool { num::Float::is_normal(self) }
#[inline]
fn classify(self) -> FpCategory { num::Float::classify(self) }
#[inline]
fn integer_decode(self) -> (u64, i16, i8) { num::Float::integer_decode(self) }
#[inline]
fn floor(self) -> f32 { num::Float::floor(self) }
#[inline]
fn ceil(self) -> f32 { num::Float::ceil(self) }
#[inline]
fn round(self) -> f32 { num::Float::round(self) }
#[inline]
fn trunc(self) -> f32 { num::Float::trunc(self) }
#[inline]
fn fract(self) -> f32 { num::Float::fract(self) }
#[inline]
fn abs(self) -> f32 { num::Float::abs(self) }
#[inline]
fn signum(self) -> f32 { num::Float::signum(self) }
#[inline]
fn is_positive(self) -> bool { num::Float::is_positive(self) }
#[inline]
fn is_negative(self) -> bool { num::Float::is_negative(self) }
#[inline]
fn mul_add(self, a: f32, b: f32) -> f32 { num::Float::mul_add(self, a, b) }
#[inline]
fn recip(self) -> f32 { num::Float::recip(self) }
#[inline]
fn powi(self, n: i32) -> f32 { num::Float::powi(self, n) }
#[inline]
fn powf(self, n: f32) -> f32 { num::Float::powf(self, n) }
#[inline]
fn sqrt(self) -> f32 { num::Float::sqrt(self) }
#[inline]
fn rsqrt(self) -> f32 { num::Float::rsqrt(self) }
#[inline]
fn exp(self) -> f32 { num::Float::exp(self) }
#[inline]
fn exp2(self) -> f32 { num::Float::exp2(self) }
#[inline]
fn ln(self) -> f32 { num::Float::ln(self) }
#[inline]
fn log(self, base: f32) -> f32 { num::Float::log(self, base) }
#[inline]
fn log2(self) -> f32 { num::Float::log2(self) }
#[inline]
fn log10(self) -> f32 { num::Float::log10(self) }
#[inline]
fn to_degrees(self) -> f32 { num::Float::to_degrees(self) }
#[inline]
fn to_radians(self) -> f32 { num::Float::to_radians(self) }
/// Constructs a floating point number by multiplying `x` by 2 raised to the
/// power of `exp`
#[inline]
fn ldexp(x: f32, exp: int) -> f32 {
unsafe { cmath::ldexpf(x, exp as c_int) }
}
/// Breaks the number into a normalized fraction and a base-2 exponent,
/// satisfying:
///
/// - `self = x * pow(2, exp)`
/// - `0.5 <= abs(x) < 1.0`
#[inline]
fn frexp(self) -> (f32, int) {
unsafe {
let mut exp = 0;
let x = cmath::frexpf(self, &mut exp);
(x, exp as int)
}
}
/// Returns the next representable floating-point value in the direction of
/// `other`.
#[inline]
fn next_after(self, other: f32) -> f32 {
unsafe { cmath::nextafterf(self, other) }
}
#[inline]
fn max(self, other: f32) -> f32 {
unsafe { cmath::fmaxf(self, other) }
}
#[inline]
fn min(self, other: f32) -> f32 {
unsafe { cmath::fminf(self, other) }
}
#[inline]
fn abs_sub(self, other: f32) -> f32 {
unsafe { cmath::fdimf(self, other) }
}
#[inline]
fn cbrt(self) -> f32 {
unsafe { cmath::cbrtf(self) }
}
#[inline]
fn hypot(self, other: f32) -> f32 {
unsafe { cmath::hypotf(self, other) }
}
#[inline]
fn sin(self) -> f32 {
unsafe { intrinsics::sinf32(self) }
}
#[inline]
fn cos(self) -> f32 {
unsafe { intrinsics::cosf32(self) }
}
#[inline]
fn tan(self) -> f32 {
unsafe { cmath::tanf(self) }
}
#[inline]
fn asin(self) -> f32 {
unsafe { cmath::asinf(self) }
}
#[inline]
fn acos(self) -> f32 {
unsafe { cmath::acosf(self) }
}
#[inline]
fn atan(self) -> f32 {
unsafe { cmath::atanf(self) }
}
#[inline]
fn atan2(self, other: f32) -> f32 {
unsafe { cmath::atan2f(self, other) }
}
/// Simultaneously computes the sine and cosine of the number
#[inline]
fn sin_cos(self) -> (f32, f32) {
(self.sin(), self.cos())
}
/// Returns the exponential of the number, minus `1`, in a way that is
/// accurate even if the number is close to zero
#[inline]
fn exp_m1(self) -> f32 {
unsafe { cmath::expm1f(self) }
}
/// Returns the natural logarithm of the number plus `1` (`ln(1+n)`) more
/// accurately than if the operations were performed separately
#[inline]
fn ln_1p(self) -> f32 {
unsafe { cmath::log1pf(self) }
}
#[inline]
fn sinh(self) -> f32 {
unsafe { cmath::sinhf(self) }
}
#[inline]
fn cosh(self) -> f32 {
unsafe { cmath::coshf(self) }
}
#[inline]
fn tanh(self) -> f32 {
unsafe { cmath::tanhf(self) }
}
/// Inverse hyperbolic sine
///
/// # Returns
///
/// - on success, the inverse hyperbolic sine of `self` will be returned
/// - `self` if `self` is `0.0`, `-0.0`, `INFINITY`, or `NEG_INFINITY`
/// - `NAN` if `self` is `NAN`
#[inline]
fn asinh(self) -> f32 {
match self {
NEG_INFINITY => NEG_INFINITY,
x => (x + ((x * x) + 1.0).sqrt()).ln(),
}
}
/// Inverse hyperbolic cosine
///
/// # Returns
///
/// - on success, the inverse hyperbolic cosine of `self` will be returned
/// - `INFINITY` if `self` is `INFINITY`
/// - `NAN` if `self` is `NAN` or `self < 1.0` (including `NEG_INFINITY`)
#[inline]
fn acosh(self) -> f32 {
match self {
x if x < 1.0 => Float::nan(),
x => (x + ((x * x) - 1.0).sqrt()).ln(),
}
}
/// Inverse hyperbolic tangent
///
/// # Returns
///
/// - on success, the inverse hyperbolic tangent of `self` will be returned
/// - `self` if `self` is `0.0` or `-0.0`
/// - `INFINITY` if `self` is `1.0`
/// - `NEG_INFINITY` if `self` is `-1.0`
/// - `NAN` if the `self` is `NAN` or outside the domain of `-1.0 <= self <= 1.0`
/// (including `INFINITY` and `NEG_INFINITY`)
#[inline]
fn atanh(self) -> f32 {
0.5 * ((2.0 * self) / (1.0 - self)).ln_1p()
}
}
//
// Section: String Conversions
//
/// Converts a float to a string
///
/// # Arguments
///
/// * num - The float value
#[inline]
#[unstable(feature = "std_misc", reason = "may be removed or relocated")]
pub fn to_string(num: f32) -> String {
let (r, _) = strconv::float_to_str_common(
num, 10u, true, SignNeg, DigAll, ExpNone, false);
r
}
/// Converts a float to a string in hexadecimal format
///
/// # Arguments
///
/// * num - The float value
#[inline]
#[unstable(feature = "std_misc", reason = "may be removed or relocated")]
pub fn to_str_hex(num: f32) -> String {
let (r, _) = strconv::float_to_str_common(
num, 16u, true, SignNeg, DigAll, ExpNone, false);
r
}
/// Converts a float to a string in a given radix, and a flag indicating
/// whether it's a special value
///
/// # Arguments
///
/// * num - The float value
/// * radix - The base to use
#[inline]
#[unstable(feature = "std_misc", reason = "may be removed or relocated")]
pub fn to_str_radix_special(num: f32, rdx: uint) -> (String, bool) {
strconv::float_to_str_common(num, rdx, true, SignNeg, DigAll, ExpNone, false)
}
/// Converts a float to a string with exactly the number of
/// provided significant digits
///
/// # Arguments
///
/// * num - The float value
/// * digits - The number of significant digits
#[inline]
#[unstable(feature = "std_misc", reason = "may be removed or relocated")]
pub fn to_str_exact(num: f32, dig: uint) -> String {
let (r, _) = strconv::float_to_str_common(
num, 10u, true, SignNeg, DigExact(dig), ExpNone, false);
r
}
/// Converts a float to a string with a maximum number of
/// significant digits
///
/// # Arguments
///
/// * num - The float value
/// * digits - The number of significant digits
#[inline]
#[unstable(feature = "std_misc", reason = "may be removed or relocated")]
pub fn to_str_digits(num: f32, dig: uint) -> String {
let (r, _) = strconv::float_to_str_common(
num, 10u, true, SignNeg, DigMax(dig), ExpNone, false);
r
}
/// Converts a float to a string using the exponential notation with exactly the number of
/// provided digits after the decimal point in the significand
///
/// # Arguments
///
/// * num - The float value
/// * digits - The number of digits after the decimal point
/// * upper - Use `E` instead of `e` for the exponent sign
#[inline]
#[unstable(feature = "std_misc", reason = "may be removed or relocated")]
pub fn to_str_exp_exact(num: f32, dig: uint, upper: bool) -> String {
let (r, _) = strconv::float_to_str_common(
num, 10u, true, SignNeg, DigExact(dig), ExpDec, upper);
r
}
/// Converts a float to a string using the exponential notation with the maximum number of
/// digits after the decimal point in the significand
///
/// # Arguments
///
/// * num - The float value
/// * digits - The number of digits after the decimal point
/// * upper - Use `E` instead of `e` for the exponent sign
#[inline]
#[unstable(feature = "std_misc", reason = "may be removed or relocated")]
pub fn to_str_exp_digits(num: f32, dig: uint, upper: bool) -> String {
let (r, _) = strconv::float_to_str_common(
num, 10u, true, SignNeg, DigMax(dig), ExpDec, upper);
r
}
#[cfg(test)]
mod tests {
use f32::*;
use num::*;
use num::FpCategory as Fp;
#[test]
fn test_min_nan() {
assert_eq!(NAN.min(2.0), 2.0);
assert_eq!(2.0f32.min(NAN), 2.0);
}
#[test]
fn test_max_nan() {
assert_eq!(NAN.max(2.0), 2.0);
assert_eq!(2.0f32.max(NAN), 2.0);
}
#[test]
fn test_num_f32() {
test_num(10f32, 2f32);
}
#[test]
fn test_floor() {
assert_approx_eq!(1.0f32.floor(), 1.0f32);
assert_approx_eq!(1.3f32.floor(), 1.0f32);
assert_approx_eq!(1.5f32.floor(), 1.0f32);
assert_approx_eq!(1.7f32.floor(), 1.0f32);
assert_approx_eq!(0.0f32.floor(), 0.0f32);
assert_approx_eq!((-0.0f32).floor(), -0.0f32);
assert_approx_eq!((-1.0f32).floor(), -1.0f32);
assert_approx_eq!((-1.3f32).floor(), -2.0f32);
assert_approx_eq!((-1.5f32).floor(), -2.0f32);
assert_approx_eq!((-1.7f32).floor(), -2.0f32);
}
#[test]
fn test_ceil() {
assert_approx_eq!(1.0f32.ceil(), 1.0f32);
assert_approx_eq!(1.3f32.ceil(), 2.0f32);
assert_approx_eq!(1.5f32.ceil(), 2.0f32);
assert_approx_eq!(1.7f32.ceil(), 2.0f32);
assert_approx_eq!(0.0f32.ceil(), 0.0f32);
assert_approx_eq!((-0.0f32).ceil(), -0.0f32);
assert_approx_eq!((-1.0f32).ceil(), -1.0f32);
assert_approx_eq!((-1.3f32).ceil(), -1.0f32);
assert_approx_eq!((-1.5f32).ceil(), -1.0f32);
assert_approx_eq!((-1.7f32).ceil(), -1.0f32);
}
#[test]
fn test_round() {
assert_approx_eq!(1.0f32.round(), 1.0f32);
assert_approx_eq!(1.3f32.round(), 1.0f32);
assert_approx_eq!(1.5f32.round(), 2.0f32);
assert_approx_eq!(1.7f32.round(), 2.0f32);
assert_approx_eq!(0.0f32.round(), 0.0f32);
assert_approx_eq!((-0.0f32).round(), -0.0f32);
assert_approx_eq!((-1.0f32).round(), -1.0f32);
assert_approx_eq!((-1.3f32).round(), -1.0f32);
assert_approx_eq!((-1.5f32).round(), -2.0f32);
assert_approx_eq!((-1.7f32).round(), -2.0f32);
}
#[test]
fn test_trunc() {
assert_approx_eq!(1.0f32.trunc(), 1.0f32);
assert_approx_eq!(1.3f32.trunc(), 1.0f32);
assert_approx_eq!(1.5f32.trunc(), 1.0f32);
assert_approx_eq!(1.7f32.trunc(), 1.0f32);
assert_approx_eq!(0.0f32.trunc(), 0.0f32);
assert_approx_eq!((-0.0f32).trunc(), -0.0f32);
assert_approx_eq!((-1.0f32).trunc(), -1.0f32);
assert_approx_eq!((-1.3f32).trunc(), -1.0f32);
assert_approx_eq!((-1.5f32).trunc(), -1.0f32);
assert_approx_eq!((-1.7f32).trunc(), -1.0f32);
}
#[test]
fn test_fract() {
assert_approx_eq!(1.0f32.fract(), 0.0f32);
assert_approx_eq!(1.3f32.fract(), 0.3f32);
assert_approx_eq!(1.5f32.fract(), 0.5f32);
assert_approx_eq!(1.7f32.fract(), 0.7f32);
assert_approx_eq!(0.0f32.fract(), 0.0f32);
assert_approx_eq!((-0.0f32).fract(), -0.0f32);
assert_approx_eq!((-1.0f32).fract(), -0.0f32);
assert_approx_eq!((-1.3f32).fract(), -0.3f32);
assert_approx_eq!((-1.5f32).fract(), -0.5f32);
assert_approx_eq!((-1.7f32).fract(), -0.7f32);
}
#[test]
fn test_exp() {
assert_eq!(1.0, 0.0f32.exp());
assert_approx_eq!(2.718282, 1.0f32.exp());
assert_approx_eq!(148.413162, 5.0f32.exp());
let inf: f32 = Float::infinity();
let neg_inf: f32 = Float::neg_infinity();
let nan: f32 = Float::nan();
assert_eq!(inf, inf.exp());
assert_eq!(0.0, neg_inf.exp());
assert!(nan.exp().is_nan());
}
#[test]
fn test_exp2() {
assert_eq!(32.0, 5.0f32.exp2());
assert_eq!(1.0, 0.0f32.exp2());
let inf: f32 = Float::infinity();
let neg_inf: f32 = Float::neg_infinity();
let nan: f32 = Float::nan();
assert_eq!(inf, inf.exp2());
assert_eq!(0.0, neg_inf.exp2());
assert!(nan.exp2().is_nan());
}
<|fim▁hole|> fn test_asinh() {
assert_eq!(0.0f32.asinh(), 0.0f32);
assert_eq!((-0.0f32).asinh(), -0.0f32);
let inf: f32 = Float::infinity();
let neg_inf: f32 = Float::neg_infinity();
let nan: f32 = Float::nan();
assert_eq!(inf.asinh(), inf);
assert_eq!(neg_inf.asinh(), neg_inf);
assert!(nan.asinh().is_nan());
assert_approx_eq!(2.0f32.asinh(), 1.443635475178810342493276740273105f32);
assert_approx_eq!((-2.0f32).asinh(), -1.443635475178810342493276740273105f32);
}
#[test]
fn test_acosh() {
assert_eq!(1.0f32.acosh(), 0.0f32);
assert!(0.999f32.acosh().is_nan());
let inf: f32 = Float::infinity();
let neg_inf: f32 = Float::neg_infinity();
let nan: f32 = Float::nan();
assert_eq!(inf.acosh(), inf);
assert!(neg_inf.acosh().is_nan());
assert!(nan.acosh().is_nan());
assert_approx_eq!(2.0f32.acosh(), 1.31695789692481670862504634730796844f32);
assert_approx_eq!(3.0f32.acosh(), 1.76274717403908605046521864995958461f32);
}
#[test]
fn test_atanh() {
assert_eq!(0.0f32.atanh(), 0.0f32);
assert_eq!((-0.0f32).atanh(), -0.0f32);
let inf32: f32 = Float::infinity();
let neg_inf32: f32 = Float::neg_infinity();
assert_eq!(1.0f32.atanh(), inf32);
assert_eq!((-1.0f32).atanh(), neg_inf32);
assert!(2f64.atanh().atanh().is_nan());
assert!((-2f64).atanh().atanh().is_nan());
let inf64: f32 = Float::infinity();
let neg_inf64: f32 = Float::neg_infinity();
let nan32: f32 = Float::nan();
assert!(inf64.atanh().is_nan());
assert!(neg_inf64.atanh().is_nan());
assert!(nan32.atanh().is_nan());
assert_approx_eq!(0.5f32.atanh(), 0.54930614433405484569762261846126285f32);
assert_approx_eq!((-0.5f32).atanh(), -0.54930614433405484569762261846126285f32);
}
#[test]
fn test_real_consts() {
use super::consts;
let pi: f32 = consts::PI;
let two_pi: f32 = consts::PI_2;
let frac_pi_2: f32 = consts::FRAC_PI_2;
let frac_pi_3: f32 = consts::FRAC_PI_3;
let frac_pi_4: f32 = consts::FRAC_PI_4;
let frac_pi_6: f32 = consts::FRAC_PI_6;
let frac_pi_8: f32 = consts::FRAC_PI_8;
let frac_1_pi: f32 = consts::FRAC_1_PI;
let frac_2_pi: f32 = consts::FRAC_2_PI;
let frac_2_sqrtpi: f32 = consts::FRAC_2_SQRTPI;
let sqrt2: f32 = consts::SQRT2;
let frac_1_sqrt2: f32 = consts::FRAC_1_SQRT2;
let e: f32 = consts::E;
let log2_e: f32 = consts::LOG2_E;
let log10_e: f32 = consts::LOG10_E;
let ln_2: f32 = consts::LN_2;
let ln_10: f32 = consts::LN_10;
assert_approx_eq!(two_pi, 2f32 * pi);
assert_approx_eq!(frac_pi_2, pi / 2f32);
assert_approx_eq!(frac_pi_3, pi / 3f32);
assert_approx_eq!(frac_pi_4, pi / 4f32);
assert_approx_eq!(frac_pi_6, pi / 6f32);
assert_approx_eq!(frac_pi_8, pi / 8f32);
assert_approx_eq!(frac_1_pi, 1f32 / pi);
assert_approx_eq!(frac_2_pi, 2f32 / pi);
assert_approx_eq!(frac_2_sqrtpi, 2f32 / pi.sqrt());
assert_approx_eq!(sqrt2, 2f32.sqrt());
assert_approx_eq!(frac_1_sqrt2, 1f32 / 2f32.sqrt());
assert_approx_eq!(log2_e, e.log2());
assert_approx_eq!(log10_e, e.log10());
assert_approx_eq!(ln_2, 2f32.ln());
assert_approx_eq!(ln_10, 10f32.ln());
}
#[test]
pub fn test_abs() {
assert_eq!(INFINITY.abs(), INFINITY);
assert_eq!(1f32.abs(), 1f32);
assert_eq!(0f32.abs(), 0f32);
assert_eq!((-0f32).abs(), 0f32);
assert_eq!((-1f32).abs(), 1f32);
assert_eq!(NEG_INFINITY.abs(), INFINITY);
assert_eq!((1f32/NEG_INFINITY).abs(), 0f32);
assert!(NAN.abs().is_nan());
}
#[test]
fn test_abs_sub() {
assert_eq!((-1f32).abs_sub(1f32), 0f32);
assert_eq!(1f32.abs_sub(1f32), 0f32);
assert_eq!(1f32.abs_sub(0f32), 1f32);
assert_eq!(1f32.abs_sub(-1f32), 2f32);
assert_eq!(NEG_INFINITY.abs_sub(0f32), 0f32);
assert_eq!(INFINITY.abs_sub(1f32), INFINITY);
assert_eq!(0f32.abs_sub(NEG_INFINITY), INFINITY);
assert_eq!(0f32.abs_sub(INFINITY), 0f32);
}
#[test]
fn test_abs_sub_nowin() {
assert!(NAN.abs_sub(-1f32).is_nan());
assert!(1f32.abs_sub(NAN).is_nan());
}
#[test]
fn test_signum() {
assert_eq!(INFINITY.signum(), 1f32);
assert_eq!(1f32.signum(), 1f32);
assert_eq!(0f32.signum(), 1f32);
assert_eq!((-0f32).signum(), -1f32);
assert_eq!((-1f32).signum(), -1f32);
assert_eq!(NEG_INFINITY.signum(), -1f32);
assert_eq!((1f32/NEG_INFINITY).signum(), -1f32);
assert!(NAN.signum().is_nan());
}
#[test]
fn test_is_positive() {
assert!(INFINITY.is_positive());
assert!(1f32.is_positive());
assert!(0f32.is_positive());
assert!(!(-0f32).is_positive());
assert!(!(-1f32).is_positive());
assert!(!NEG_INFINITY.is_positive());
assert!(!(1f32/NEG_INFINITY).is_positive());
assert!(!NAN.is_positive());
}
#[test]
fn test_is_negative() {
assert!(!INFINITY.is_negative());
assert!(!1f32.is_negative());
assert!(!0f32.is_negative());
assert!((-0f32).is_negative());
assert!((-1f32).is_negative());
assert!(NEG_INFINITY.is_negative());
assert!((1f32/NEG_INFINITY).is_negative());
assert!(!NAN.is_negative());
}
#[test]
fn test_is_normal() {
let nan: f32 = Float::nan();
let inf: f32 = Float::infinity();
let neg_inf: f32 = Float::neg_infinity();
let zero: f32 = Float::zero();
let neg_zero: f32 = Float::neg_zero();
assert!(!nan.is_normal());
assert!(!inf.is_normal());
assert!(!neg_inf.is_normal());
assert!(!zero.is_normal());
assert!(!neg_zero.is_normal());
assert!(1f32.is_normal());
assert!(1e-37f32.is_normal());
assert!(!1e-38f32.is_normal());
}
#[test]
fn test_classify() {
let nan: f32 = Float::nan();
let inf: f32 = Float::infinity();
let neg_inf: f32 = Float::neg_infinity();
let zero: f32 = Float::zero();
let neg_zero: f32 = Float::neg_zero();
assert_eq!(nan.classify(), Fp::Nan);
assert_eq!(inf.classify(), Fp::Infinite);
assert_eq!(neg_inf.classify(), Fp::Infinite);
assert_eq!(zero.classify(), Fp::Zero);
assert_eq!(neg_zero.classify(), Fp::Zero);
assert_eq!(1f32.classify(), Fp::Normal);
assert_eq!(1e-37f32.classify(), Fp::Normal);
assert_eq!(1e-38f32.classify(), Fp::Subnormal);
}
#[test]
fn test_ldexp() {
// We have to use from_str until base-2 exponents
// are supported in floating-point literals
let f1: f32 = FromStrRadix::from_str_radix("1p-123", 16).unwrap();
let f2: f32 = FromStrRadix::from_str_radix("1p-111", 16).unwrap();
assert_eq!(Float::ldexp(1f32, -123), f1);
assert_eq!(Float::ldexp(1f32, -111), f2);
assert_eq!(Float::ldexp(0f32, -123), 0f32);
assert_eq!(Float::ldexp(-0f32, -123), -0f32);
let inf: f32 = Float::infinity();
let neg_inf: f32 = Float::neg_infinity();
let nan: f32 = Float::nan();
assert_eq!(Float::ldexp(inf, -123), inf);
assert_eq!(Float::ldexp(neg_inf, -123), neg_inf);
assert!(Float::ldexp(nan, -123).is_nan());
}
#[test]
fn test_frexp() {
// We have to use from_str until base-2 exponents
// are supported in floating-point literals
let f1: f32 = FromStrRadix::from_str_radix("1p-123", 16).unwrap();
let f2: f32 = FromStrRadix::from_str_radix("1p-111", 16).unwrap();
let (x1, exp1) = f1.frexp();
let (x2, exp2) = f2.frexp();
assert_eq!((x1, exp1), (0.5f32, -122));
assert_eq!((x2, exp2), (0.5f32, -110));
assert_eq!(Float::ldexp(x1, exp1), f1);
assert_eq!(Float::ldexp(x2, exp2), f2);
assert_eq!(0f32.frexp(), (0f32, 0));
assert_eq!((-0f32).frexp(), (-0f32, 0));
}
#[test] #[cfg_attr(windows, ignore)] // FIXME #8755
fn test_frexp_nowin() {
let inf: f32 = Float::infinity();
let neg_inf: f32 = Float::neg_infinity();
let nan: f32 = Float::nan();
assert_eq!(match inf.frexp() { (x, _) => x }, inf);
assert_eq!(match neg_inf.frexp() { (x, _) => x }, neg_inf);
assert!(match nan.frexp() { (x, _) => x.is_nan() })
}
#[test]
fn test_integer_decode() {
assert_eq!(3.14159265359f32.integer_decode(), (13176795u64, -22i16, 1i8));
assert_eq!((-8573.5918555f32).integer_decode(), (8779358u64, -10i16, -1i8));
assert_eq!(2f32.powf(100.0).integer_decode(), (8388608u64, 77i16, 1i8));
assert_eq!(0f32.integer_decode(), (0u64, -150i16, 1i8));
assert_eq!((-0f32).integer_decode(), (0u64, -150i16, -1i8));
assert_eq!(INFINITY.integer_decode(), (8388608u64, 105i16, 1i8));
assert_eq!(NEG_INFINITY.integer_decode(), (8388608u64, 105i16, -1i8));
assert_eq!(NAN.integer_decode(), (12582912u64, 105i16, 1i8));
}
#[test]
fn test_sqrt_domain() {
assert!(NAN.sqrt().is_nan());
assert!(NEG_INFINITY.sqrt().is_nan());
assert!((-1.0f32).sqrt().is_nan());
assert_eq!((-0.0f32).sqrt(), -0.0);
assert_eq!(0.0f32.sqrt(), 0.0);
assert_eq!(1.0f32.sqrt(), 1.0);
assert_eq!(INFINITY.sqrt(), INFINITY);
}
}<|fim▁end|> | #[test] |
<|file_name|>SyncDialog.js<|end_file_name|><|fim▁begin|>dojo.provide("plugins.dijit.SyncDialog");
// HAS A
dojo.require("dijit.Dialog");
dojo.require("dijit.form.Button");
dojo.require("dijit.form.ValidationTextBox");
// INHERITS
dojo.require("plugins.core.Common");
dojo.declare( "plugins.dijit.SyncDialog",
[ dijit._Widget, dijit._Templated, plugins.core.Common ], {
//Path to the template of this widget.
templatePath: dojo.moduleUrl("plugins", "dijit/templates/syncdialog.html"),
// OR USE @import IN HTML TEMPLATE
cssFiles : [
dojo.moduleUrl("plugins", "dijit/css/syncdialog.css")
],
// Calls dijit._Templated.widgetsInTemplate
widgetsInTemplate : true,
// PARENT plugins.workflow.Apps WIDGET
parentWidget : null,
// DISPLAYED MESSAGE
message : null,
//////}}
constructor : function(args) {
console.log("SyncDialog.constructor args:");
console.dir({args:args});
// LOAD CSS
this.loadCSS();
},
postCreate : function() {
//////console.log("SyncDialog.postCreate plugins.dijit.SyncDialog.postCreate()");
this.startup();
},
startup : function () {
////console.log("SyncDialog.startup plugins.dijit.SyncDialog.startup()");
////console.log("SyncDialog.startup this.parentWidget: " + this.parentWidget);
this.inherited(arguments);
// SET UP DIALOG
this.setDialogue();
// SET KEY LISTENER
this.setKeyListener();
// ADD CSS NAMESPACE CLASS FOR TITLE CSS STYLING
this.setNamespaceClass("syncDialog");
},
setKeyListener : function () {
dojo.connect(this.dialog, "onkeypress", dojo.hitch(this, "handleOnKeyPress"));
},
handleOnKeyPress: function (event) {
var key = event.charOrCode;
console.log("SyncDialog.handleOnKeyPress key: " + key);
if ( key == null ) return;
event.stopPropagation();
if ( key == dojo.keys.ESCAPE ) this.hide();
},
setNamespaceClass : function (ccsClass) {
// ADD CSS NAMESPACE CLASS
dojo.addClass(this.dialog.domNode, ccsClass);
dojo.addClass(this.dialog.titleNode, ccsClass);
dojo.addClass(this.dialog.closeButtonNode, ccsClass);
},
show: function () {
// SHOW THE DIALOGUE
this.dialog.show();
this.message.focus();
},
hide: function () {
// HIDE THE DIALOGUE
this.dialog.hide();
},
doEnter : function(type) {
// RUN ENTER CALLBACK IF 'ENTER' CLICKED
console.log("SyncDialog.doEnter plugins.dijit.SyncDialog.doEnter()");
var inputs = this.validateInputs(["message", "details"]);
console.log("SyncDialog.doEnter inputs:");
console.dir({inputs:inputs});
if ( ! inputs ) {
console.log("SyncDialog.doEnter inputs is null. Returning");
return;
}
// RESET
this.message.set('value', "");
this.details.value = "";
// HIDE
this.hide();
// DO CALLBACK
this.dialog.enterCallback(inputs);
},
validateInputs : function (keys) {
console.log("Hub.validateInputs keys: ");
console.dir({keys:keys});
var inputs = new Object;
this.isValid = true;
for ( var i = 0; i < keys.length; i++ ) {
console.log("Hub.validateInputs Doing keys[" + i + "]: " + keys[i]);
inputs[keys[i]] = this.verifyInput(keys[i]);
}
console.log("Hub.validateInputs inputs: ");
console.dir({inputs:inputs});
if ( ! this.isValid ) return null;
return inputs;
},
verifyInput : function (input) {
console.log("Aws.verifyInput input: ");
console.dir({this_input:this[input]});
var value = this[input].value;
console.log("Aws.verifyInput value: " + value);
var className = this.getClassName(this[input]);
console.log("Aws.verifyInput className: " + className);
if ( className ) {
console.log("Aws.verifyInput this[input].isValid(): " + this[input].isValid());
if ( ! value || ! this[input].isValid() ) {
console.log("Aws.verifyInput input " + input + " value is empty. Adding class 'invalid'");
dojo.addClass(this[input].domNode, 'invalid');
this.isValid = false;
}
else {
console.log("SyncDialog.verifyInput value is NOT empty. Removing class 'invalid'");
dojo.removeClass(this[input].domNode, 'invalid');
return value;
}
}
else {
if ( input.match(/;/) || input.match(/`/) ) {
console.log("SyncDialog.verifyInput value is INVALID. Adding class 'invalid'");
dojo.addClass(this[input], 'invalid');<|fim▁hole|> console.log("SyncDialog.verifyInput value is VALID. Removing class 'invalid'");
dojo.removeClass(this[input], 'invalid');
return value;
}
}
return null;
},
doCancel : function() {
// RUN CANCEL CALLBACK IF 'CANCEL' CLICKED
////console.log("SyncDialog.doCancel plugins.dijit.SyncDialog.doCancel()");
this.dialog.cancelCallback();
this.dialog.hide();
},
setDialogue : function () {
// APPEND DIALOG TO DOCUMENT
document.body.appendChild(this.dialog.domNode);
this.dialog.parentWidget = this;
// AVOID this._fadeOutDeferred NOT DEFINED ERROR
this._fadeOutDeferred = function () {};
},
load : function (args) {
console.log("SyncDialog.load args:");
console.dir({args:args});
if ( args.title ) {
console.log("SyncDialog.load SETTING TITLE: " + args.title);
this.dialog.set('title', args.title);
}
this.headerNode.innerHTML = args.header;
if (args.message) this.message.set('value', args.message);
if (args.details) this.details.value = args.details;
//if (args.details) this.details.innerHTML(args.details);
this.dialog.enterCallback = args.enterCallback;
this.show();
}
});<|fim▁end|> | this.isValid = false;
return null;
}
else { |
<|file_name|>forms.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from django import forms
<|fim▁hole|> outputfile = forms.FileField(label='Select the survery output file (output.txt) if available', required=False)
ctsoutputfile = forms.FileField(label='Select the continuous file (ctsout.txt) if available', required=False)
save_to = forms.BooleanField(initial=False, label='Save to My Scenarios', required=False)
scenario_label = forms.CharField(label="Scenario name (optional)", required=False)<|fim▁end|> | class DocumentForm(forms.Form):
xmlfile = forms.FileField(label='Select the input xml file (scenario.xml)') |
<|file_name|>AddOptionsDialog.cpp<|end_file_name|><|fim▁begin|>#include "AddOptionsDialog.h"
#include <wx/tokenzr.h>
#include "ColoursAndFontsManager.h"
#include <wx/sstream.h>
#include <wx/txtstrm.h><|fim▁hole|>
AddOptionsDialog::AddOptionsDialog(wxWindow* parent, const wxString& value)
: AddOptionsDialogBase(parent)
{
LexerConf::Ptr_t lexer = ColoursAndFontsManager::Get().GetLexer("text");
lexer->Apply(m_stc);
wxArrayString lines = ::wxStringTokenize(value, ";");
for(const wxString& line : lines) {
m_stc->AppendText(line + "\n");
}
}
AddOptionsDialog::~AddOptionsDialog() {}
wxString AddOptionsDialog::GetValue() const
{
wxStringInputStream input(m_stc->GetText());
wxTextInputStream text(input);
wxString value;
while(!input.Eof()) {
// Read the next line
value += text.ReadLine();
value += ";";
}
return value.BeforeLast(';');
}<|fim▁end|> | |
<|file_name|>fixture1.js<|end_file_name|><|fim▁begin|>var xmas = {};
(function() {
<|fim▁hole|> xmas.present = {
box: {}
};
}());
(function(global) {
global.xmas.present.box.color = 'Red';
}(this));<|fim▁end|> | |
<|file_name|>UploadContext.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2017 Seven Bridges Genomics, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sevenbridges.apiclient.transfer;
import com.sevenbridges.apiclient.file.File;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
/**<|fim▁hole|> * calls.
*/
public interface UploadContext {
/**
* <b>BLOCKING CALL</b>
* <p>
* This is a blocking call, similar to invoking get on a {@link java.util.concurrent.Future}
* instance. Current thread will block until the file is fully uploaded to the Seven Bridges
* Platform, or until some exception happen. If the thread is interrupted from sleep, a runtime
* exception wrapping the interrupted exception will be thrown. If some other thread aborts, or
* pauses the upload, another runtime exception will be called, and will wake up the current
* sleeping thread.
* <p>
* If the upload is completed successfully this call will return the {@link File} resource
* instance that is uploaded.
*
* @return File 'file' resource that is uploaded
* @throws PausedUploadException if the current upload the thread is blocked on is paused by
* {@link #pauseTransfer()} call by some other thread
*/
File getFile() throws PausedUploadException;
/**
* <b>BLOCKING CALL</b>
* <p>
* This is a timed blocking call, similar to invoking get(timeValue, timeUnit) on a {@link
* java.util.concurrent.Future} instance. Current thread will wait specified time until the file
* is fully uploaded to the Seven Bridges Platform, or until some exception happen. If the thread
* is interrupted from sleep, a runtime exception wrapping the interrupted exception will be
* thrown. If some other thread aborts, or pauses the upload, another runtime exception will be
* called, and will wake up the current sleeping thread.
* <p>
* If the upload is completed successfully this call will return the {@link File} resource
* instance that is uploaded.
*
* @param timeValue the maximum number of TimeUnits to wait
* @param timeUnit durations of one unit of timeValue
* @return File 'file' resource that is uploaded
* @throws TimeoutException if the wait time times out, and the upload is still not
* completed
* @throws PausedUploadException if the current upload the thread is blocked on is paused by
* {@link #pauseTransfer()} call by some other thread
*/
File getFile(long timeValue, TimeUnit timeUnit) throws TimeoutException, PausedUploadException;
/**
* Checks if the current {@link com.sevenbridges.apiclient.upload.Upload}, managed by this
* UploadContext is finished successfully.
*
* @return Boolean indicator is upload finished
*/
boolean isFinished();
/**
* Aborts the {@link com.sevenbridges.apiclient.upload.Upload} managed by this UploadContext. Any
* thread blocked on the getFile() call on this uploadContext will be woken up by the
* RuntimeException. Upload is aborted totally, and any progress on this upload will be lost.
*/
void abortTransfer();
/**
* Gets the current state of the upload managed by this UploadContext.
*
* @return UploadState current state
*/
UploadState getState();
/**
* Pauses the {@link com.sevenbridges.apiclient.upload.Upload} managed by this UploadContext. Any
* thread blocked on the getFile() call on this uploadContext will be woken up by the
* RuntimeException that indicates that the upload is paused. Paused upload is not aborted on the
* Platform, and your progress (measured in file parts) is saved.
* <p>
* You can use this UploadContext object to resume upload via {@link
* com.sevenbridges.apiclient.user.UserActions#resumeUpload(UploadContext, java.io.File)} call.
* That call will provide a new instance of UploadContext, this one is useless after that call.
* <p>
* Pause action is not instantaneous, the call is not blocking, and it will put upload in the
* PAUSING state. After the first running part upload is finished, the upload state will change to
* PAUSED state.
*/
void pauseTransfer();
/**
* Gets summed number of bytes transferred by the {@link com.sevenbridges.apiclient.upload.Upload}
* managed by this UploadContext. This is a pretty low level byte counter, and it will update much
* more often than the part upload finished event.
*
* @return Current bytes transferred for this upload
*/
long getBytesTransferred();
/**
* Size of the whole upload in bytes.
*
* @return long upload size
*/
long getUploadSize();
/**
* Name of the {@link com.sevenbridges.apiclient.upload.Upload} managed by this UploadContext.
*
* @return String upload name
*/
String getUploadName();
/**
* ID of the {@link com.sevenbridges.apiclient.upload.Upload} managed by this UploadContext.
*
* @return String ID of the upload managed by this upload context
*/
String getUploadId();
}<|fim▁end|> | * This object allows user to take control of upload submitted to internal transfer manager via the
* {@link com.sevenbridges.apiclient.user.UserActions#submitUpload(com.sevenbridges.apiclient.upload.CreateUploadRequest)} |
<|file_name|>Limbgrower.js<|end_file_name|><|fim▁begin|>import { useBackend, useSharedState } from '../backend';
import { Box, Button, Dimmer, Icon, LabeledList, Section, Tabs } from '../components';
import { Window } from '../layouts';
export const Limbgrower = (props, context) => {
const { act, data } = useBackend(context);
const {
reagents = [],
total_reagents,
max_reagents,
categories = [],
busy,
} = data;
const [tab, setTab] = useSharedState(
context, 'category', categories[0]?.name);
const designList = categories
.find(category => category.name === tab)
?.designs || [];
return (
<Window
title="Limb Grower"
width={400}
height={550}>
{!!busy && (
<Dimmer fontSize="32px">
<Icon name="cog" spin={1} />
{' Building...'}
</Dimmer>
)}
<Window.Content scrollable>
<Section title="Reagents">
<Box mb={1}>
{total_reagents} / {max_reagents} reagent capacity used.
</Box>
<LabeledList>
{reagents.map(reagent => (
<LabeledList.Item
key={reagent.reagent_name}
label={reagent.reagent_name}
buttons={(
<Button.Confirm
textAlign="center"
width="120px"<|fim▁hole|> })} />
)}>
{reagent.reagent_amount}u
</LabeledList.Item>
))}
</LabeledList>
</Section>
<Section title="Designs">
<Tabs>
{categories.map(category => (
<Tabs.Tab
fluid
key={category.name}
selected={tab === category.name}
onClick={() => setTab(category.name)}>
{category.name}
</Tabs.Tab>
))}
</Tabs>
<LabeledList>
{designList.map(design => (
<LabeledList.Item
key={design.name}
label={design.name}
buttons={(
<Button
content="Make"
color="good"
onClick={() => act('make_limb', {
design_id: design.id,
active_tab: design.parent_category,
})} />
)}>
{design.needed_reagents.map(reagent => (
<Box key={reagent.name}>
{reagent.name}: {reagent.amount}u
</Box>
))}
</LabeledList.Item>
))}
</LabeledList>
</Section>
</Window.Content>
</Window>
);
};<|fim▁end|> | content="Remove Reagent"
color="bad"
onClick={() => act('empty_reagent', {
reagent_type: reagent.reagent_type, |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>__author__ = 'beau'
import serial
ser = serial.Serial('/dev/tty.usbmodem1422',9600, timeout=1)
import time
import sys
def get_angle():
print "requesting angle"
start = time.time()
ser.write('nofddfp\n')
print "..."
reply = ser.readline()
deltaT = time.time()-start
print reply
print "round-trip time {}".format(deltaT)
sys.stdout.flush()<|fim▁hole|>while True:
get_angle()
print "-------------"
#time.sleep(0.1)<|fim▁end|> | |
<|file_name|>az_circle_counter.js<|end_file_name|><|fim▁begin|>{
base: 'az_circle_counter',
name: Drupal.t('Circle counter'),
icon: 'et et-icon-speedometer',
// description: Drupal.t('Infographic Counter'),
params: [{
type: 'colorpicker',
heading: Drupal.t('Foreground color'),
param_name: 'fgcolor',
value: '#333333',
}, {
type: 'colorpicker',
heading: Drupal.t('Background color'),
param_name: 'bgcolor',
value: '#999999',
}, {
type: 'colorpicker',
heading: Drupal.t('Fill'),
param_name: 'fill',
}, {
type: 'checkbox',
heading: Drupal.t('Half Circle'),
param_name: 'type',
value: {
'half': Drupal.t("Yes"),
},
}, {
type: 'bootstrap_slider',
heading: Drupal.t('Dimension'),
param_name: 'dimension',
max: '500',
value: '250',
}, {
type: 'textfield',
heading: Drupal.t('Text'),
param_name: 'text',
tab: Drupal.t('Circle content'),
}, {
type: 'bootstrap_slider',
heading: Drupal.t('Font size'),
param_name: 'fontsize',
max: '100',
value: '16',
formatter: true,
tab: Drupal.t('Circle content'),
<|fim▁hole|> type: 'textfield',
heading: Drupal.t('Info'),
param_name: 'info',
tab: Drupal.t('Circle content'),
}, {
type: 'bootstrap_slider',
heading: Drupal.t('Width'),
param_name: 'width',
max: '100',
value: '5',
}, {
type: 'bootstrap_slider',
heading: Drupal.t('Percent'),
param_name: 'percent',
max: '100',
value: '50',
}, {
type: 'dropdown',
heading: Drupal.t('Border style'),
param_name: 'border',
value: {
'default': Drupal.t('Default'),
'inline': Drupal.t('Inline'),
'outline': Drupal.t('Outline'),
},
}, {
type: 'icon',
heading: Drupal.t('Icon'),
param_name: 'icon',
tab: Drupal.t('Icon'),
}, {
type: 'bootstrap_slider',
heading: Drupal.t('Icon size'),
param_name: 'icon_size',
max: '100',
description: Drupal.t('Will set the font size of the icon.'),
value: '16',
tab: Drupal.t('Icon'),
}, {
type: 'colorpicker',
heading: Drupal.t('Icon color'),
param_name: 'icon_color',
description: Drupal.t('Will set the font color of the icon.'),
tab: Drupal.t('Icon'),
}, ],
show_settings_on_create: true,
frontend_render: true,
showed: function($) {
this.baseclass.prototype.showed.apply(this, arguments);
var icon_set = this.attrs['icon'].charAt(0);
switch (icon_set) {
case 'e':
this.add_css('vendor/et-line-font/et-line-font.css', 'ETLineFont' in $.fn, function() {});
break;
case 'f':
this.add_css('vendor/font-awesome/css/font-awesome.min.css', 'fontAwesome' in $.fn, function() {});
break;
case 'p':
this.add_css('vendor/pe-icon-7-stroke/css/pe-icon-7-stroke.css', 'PELineFont' in $.fn, function() {});
break;
default:
break;
}
var element = this;
this.add_css('vendor/jquery.circliful/css/jquery.circliful.css', 'circliful' in $.fn, function() {});
this.add_js_list({
paths: ['vendor/jquery.circliful/js/jquery.circliful.min.js',
'vendor/jquery.waypoints/lib/jquery.waypoints.min.js'
],
loaded: 'waypoint' in $.fn && 'circliful' in $.fn,
callback: function() {
$(element.dom_element).waypoint(function(direction) {
$(element.dom_element).find('#' + element.id).once().circliful();
}, {
offset: '100%',
handler: function(direction) {
this.destroy()
},
});
$(document).trigger('scroll');
}
});
},
render: function($) {
if (this.attrs['icon']) {
var circliful_icon = '" data-icon=" ' + this.attrs['icon'] + '" data-iconsize="' + this.attrs[
'icon_size'] + '" data-iconcolor="' + this.attrs['icon_color'];
}
else {
var circliful_icon = '';
}
this.dom_element = $('<div class="az-element az-circle-counter ' + this.attrs['el_class'] + '" style="' +
this.attrs['style'] + '"><div id="' + this.id + '" data-dimension="' + this.attrs['dimension'] +
'" data-text="' + this.attrs['text'] + '" data-info="' + this.attrs['info'] + '" data-width="' + this
.attrs['width'] + '" data-fontsize="' + this.attrs['fontsize'] + '" data-type="' + this.attrs['type'] +
'" data-percent="' + this.attrs['percent'] + '" data-fgcolor="' + this.attrs['fgcolor'] +
'" data-bgcolor="' + this.attrs['bgcolor'] + '" data-fill="' + this.attrs['fill'] + '" data-border="' +
this.attrs['border'] + circliful_icon + '"></div></div>');
this.baseclass.prototype.render.apply(this, arguments);
},
},<|fim▁end|> | }, {
|
<|file_name|>thss_generation_errors.rs<|end_file_name|><|fim▁begin|>#![cfg(feature = "dss")]
extern crate rusty_secrets;
use rusty_secrets::dss::thss;
#[test]
#[should_panic(expected = "ThresholdTooBig")]
fn test_generate_invalid_k() {
let secret = b"These programs were never about terrorism: they're about economic spying, \
social control, and diplomatic manipulation. They're about power.";
thss::split_secret(10, 7, secret, &None).unwrap();<|fim▁hole|><|fim▁end|> | } |
<|file_name|>test_byteswap.py<|end_file_name|><|fim▁begin|>import sys, unittest, struct, math, ctypes
from binascii import hexlify
from ctypes import *
def bin(s):
return hexlify(memoryview(s)).decode().upper()
# Each *simple* type that supports different byte orders has an
# __ctype_be__ attribute that specifies the same type in BIG ENDIAN
# byte order, and a __ctype_le__ attribute that is the same type in
# LITTLE ENDIAN byte order.
#
# For Structures and Unions, these types are created on demand.
class Test(unittest.TestCase):
@unittest.skip('test disabled')
def test_X(self):
print(sys.byteorder, file=sys.stderr)
for i in range(32):
bits = BITS()
setattr(bits, "i%s" % i, 1)
dump(bits)
def test_slots(self):
class BigPoint(BigEndianStructure):
__slots__ = ()
_fields_ = [("x", c_int), ("y", c_int)]
class LowPoint(LittleEndianStructure):
__slots__ = ()
_fields_ = [("x", c_int), ("y", c_int)]
big = BigPoint()
little = LowPoint()
big.x = 4
big.y = 2
little.x = 2
little.y = 4
with self.assertRaises(AttributeError):
big.z = 42
with self.assertRaises(AttributeError):
little.z = 24
def test_endian_short(self):
if sys.byteorder == "little":
self.assertIs(c_short.__ctype_le__, c_short)
self.assertIs(c_short.__ctype_be__.__ctype_le__, c_short)
else:
self.assertIs(c_short.__ctype_be__, c_short)
self.assertIs(c_short.__ctype_le__.__ctype_be__, c_short)
s = c_short.__ctype_be__(0x1234)
self.assertEqual(bin(struct.pack(">h", 0x1234)), "1234")
self.assertEqual(bin(s), "1234")
self.assertEqual(s.value, 0x1234)
s = c_short.__ctype_le__(0x1234)
self.assertEqual(bin(struct.pack("<h", 0x1234)), "3412")
self.assertEqual(bin(s), "3412")
self.assertEqual(s.value, 0x1234)
s = c_ushort.__ctype_be__(0x1234)
self.assertEqual(bin(struct.pack(">h", 0x1234)), "1234")
self.assertEqual(bin(s), "1234")
self.assertEqual(s.value, 0x1234)
s = c_ushort.__ctype_le__(0x1234)
self.assertEqual(bin(struct.pack("<h", 0x1234)), "3412")
self.assertEqual(bin(s), "3412")
self.assertEqual(s.value, 0x1234)
def test_endian_int(self):
if sys.byteorder == "little":
self.assertIs(c_int.__ctype_le__, c_int)
self.assertIs(c_int.__ctype_be__.__ctype_le__, c_int)
else:
self.assertIs(c_int.__ctype_be__, c_int)
self.assertIs(c_int.__ctype_le__.__ctype_be__, c_int)
s = c_int.__ctype_be__(0x12345678)
self.assertEqual(bin(struct.pack(">i", 0x12345678)), "12345678")
self.assertEqual(bin(s), "12345678")
self.assertEqual(s.value, 0x12345678)
s = c_int.__ctype_le__(0x12345678)
self.assertEqual(bin(struct.pack("<i", 0x12345678)), "78563412")
self.assertEqual(bin(s), "78563412")
self.assertEqual(s.value, 0x12345678)
s = c_uint.__ctype_be__(0x12345678)
self.assertEqual(bin(struct.pack(">I", 0x12345678)), "12345678")
self.assertEqual(bin(s), "12345678")
self.assertEqual(s.value, 0x12345678)
s = c_uint.__ctype_le__(0x12345678)
self.assertEqual(bin(struct.pack("<I", 0x12345678)), "78563412")
self.assertEqual(bin(s), "78563412")
self.assertEqual(s.value, 0x12345678)
def test_endian_longlong(self):
if sys.byteorder == "little":
self.assertIs(c_longlong.__ctype_le__, c_longlong)
self.assertIs(c_longlong.__ctype_be__.__ctype_le__, c_longlong)
else:
self.assertIs(c_longlong.__ctype_be__, c_longlong)
self.assertIs(c_longlong.__ctype_le__.__ctype_be__, c_longlong)
s = c_longlong.__ctype_be__(0x1234567890ABCDEF)
self.assertEqual(bin(struct.pack(">q", 0x1234567890ABCDEF)), "1234567890ABCDEF")
self.assertEqual(bin(s), "1234567890ABCDEF")
self.assertEqual(s.value, 0x1234567890ABCDEF)
s = c_longlong.__ctype_le__(0x1234567890ABCDEF)
self.assertEqual(bin(struct.pack("<q", 0x1234567890ABCDEF)), "EFCDAB9078563412")
self.assertEqual(bin(s), "EFCDAB9078563412")
self.assertEqual(s.value, 0x1234567890ABCDEF)
s = c_ulonglong.__ctype_be__(0x1234567890ABCDEF)
self.assertEqual(bin(struct.pack(">Q", 0x1234567890ABCDEF)), "1234567890ABCDEF")
self.assertEqual(bin(s), "1234567890ABCDEF")
self.assertEqual(s.value, 0x1234567890ABCDEF)
s = c_ulonglong.__ctype_le__(0x1234567890ABCDEF)
self.assertEqual(bin(struct.pack("<Q", 0x1234567890ABCDEF)), "EFCDAB9078563412")
self.assertEqual(bin(s), "EFCDAB9078563412")
self.assertEqual(s.value, 0x1234567890ABCDEF)
def test_endian_float(self):
if sys.byteorder == "little":
self.assertIs(c_float.__ctype_le__, c_float)
self.assertIs(c_float.__ctype_be__.__ctype_le__, c_float)
else:
self.assertIs(c_float.__ctype_be__, c_float)
self.assertIs(c_float.__ctype_le__.__ctype_be__, c_float)
s = c_float(math.pi)
self.assertEqual(bin(struct.pack("f", math.pi)), bin(s))
# Hm, what's the precision of a float compared to a double?
self.assertAlmostEqual(s.value, math.pi, places=6)
s = c_float.__ctype_le__(math.pi)
self.assertAlmostEqual(s.value, math.pi, places=6)
self.assertEqual(bin(struct.pack("<f", math.pi)), bin(s))
s = c_float.__ctype_be__(math.pi)
self.assertAlmostEqual(s.value, math.pi, places=6)
self.assertEqual(bin(struct.pack(">f", math.pi)), bin(s))
def test_endian_double(self):
if sys.byteorder == "little":
self.assertIs(c_double.__ctype_le__, c_double)
self.assertIs(c_double.__ctype_be__.__ctype_le__, c_double)
else:
self.assertIs(c_double.__ctype_be__, c_double)
self.assertIs(c_double.__ctype_le__.__ctype_be__, c_double)
s = c_double(math.pi)
self.assertEqual(s.value, math.pi)
self.assertEqual(bin(struct.pack("d", math.pi)), bin(s))
s = c_double.__ctype_le__(math.pi)
self.assertEqual(s.value, math.pi)
self.assertEqual(bin(struct.pack("<d", math.pi)), bin(s))
s = c_double.__ctype_be__(math.pi)
self.assertEqual(s.value, math.pi)
self.assertEqual(bin(struct.pack(">d", math.pi)), bin(s))
def test_endian_other(self):
self.assertIs(c_byte.__ctype_le__, c_byte)
self.assertIs(c_byte.__ctype_be__, c_byte)
self.assertIs(c_ubyte.__ctype_le__, c_ubyte)
self.assertIs(c_ubyte.__ctype_be__, c_ubyte)
self.assertIs(c_char.__ctype_le__, c_char)
self.assertIs(c_char.__ctype_be__, c_char)
def test_struct_fields_1(self):
if sys.byteorder == "little":
base = BigEndianStructure
else:
base = LittleEndianStructure
class T(base):
pass
_fields_ = [("a", c_ubyte),
("b", c_byte),
("c", c_short),
("d", c_ushort),
("e", c_int),
("f", c_uint),
("g", c_long),
("h", c_ulong),
("i", c_longlong),
("k", c_ulonglong),
("l", c_float),
("m", c_double),
("n", c_char),
("b1", c_byte, 3),
("b2", c_byte, 3),
("b3", c_byte, 2),
("a", c_int * 3 * 3 * 3)]
T._fields_ = _fields_
<|fim▁hole|> _fields_.append(("x", typ))
class T(base):
pass
self.assertRaises(TypeError, setattr, T, "_fields_", [("x", typ)])
def test_struct_struct(self):
# nested structures with different byteorders
# create nested structures with given byteorders and set memory to data
for nested, data in (
(BigEndianStructure, b'\0\0\0\1\0\0\0\2'),
(LittleEndianStructure, b'\1\0\0\0\2\0\0\0'),
):
for parent in (
BigEndianStructure,
LittleEndianStructure,
Structure,
):
class NestedStructure(nested):
_fields_ = [("x", c_uint32),
("y", c_uint32)]
class TestStructure(parent):
_fields_ = [("point", NestedStructure)]
self.assertEqual(len(data), sizeof(TestStructure))
ptr = POINTER(TestStructure)
s = cast(data, ptr)[0]
del ctypes._pointer_type_cache[TestStructure]
self.assertEqual(s.point.x, 1)
self.assertEqual(s.point.y, 2)
def test_struct_fields_2(self):
# standard packing in struct uses no alignment.
# So, we have to align using pad bytes.
#
# Unaligned accesses will crash Python (on those platforms that
# don't allow it, like sparc solaris).
if sys.byteorder == "little":
base = BigEndianStructure
fmt = ">bxhid"
else:
base = LittleEndianStructure
fmt = "<bxhid"
class S(base):
_fields_ = [("b", c_byte),
("h", c_short),
("i", c_int),
("d", c_double)]
s1 = S(0x12, 0x1234, 0x12345678, 3.14)
s2 = struct.pack(fmt, 0x12, 0x1234, 0x12345678, 3.14)
self.assertEqual(bin(s1), bin(s2))
def test_unaligned_nonnative_struct_fields(self):
if sys.byteorder == "little":
base = BigEndianStructure
fmt = ">b h xi xd"
else:
base = LittleEndianStructure
fmt = "<b h xi xd"
class S(base):
_pack_ = 1
_fields_ = [("b", c_byte),
("h", c_short),
("_1", c_byte),
("i", c_int),
("_2", c_byte),
("d", c_double)]
s1 = S()
s1.b = 0x12
s1.h = 0x1234
s1.i = 0x12345678
s1.d = 3.14
s2 = struct.pack(fmt, 0x12, 0x1234, 0x12345678, 3.14)
self.assertEqual(bin(s1), bin(s2))
def test_unaligned_native_struct_fields(self):
if sys.byteorder == "little":
fmt = "<b h xi xd"
else:
base = LittleEndianStructure
fmt = ">b h xi xd"
class S(Structure):
_pack_ = 1
_fields_ = [("b", c_byte),
("h", c_short),
("_1", c_byte),
("i", c_int),
("_2", c_byte),
("d", c_double)]
s1 = S()
s1.b = 0x12
s1.h = 0x1234
s1.i = 0x12345678
s1.d = 3.14
s2 = struct.pack(fmt, 0x12, 0x1234, 0x12345678, 3.14)
self.assertEqual(bin(s1), bin(s2))
if __name__ == "__main__":
unittest.main()<|fim▁end|> |
# these fields do not support different byte order:
for typ in c_wchar, c_void_p, POINTER(c_int):
|
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import os
import httplib
import logging
import functools
from modularodm.exceptions import ValidationValueError
from framework.exceptions import HTTPError
from framework.analytics import update_counter
from website.addons.osfstorage import settings
logger = logging.getLogger(__name__)
LOCATION_KEYS = ['service', settings.WATERBUTLER_RESOURCE, 'object']
def update_analytics(node, file_id, version_idx):
"""
:param Node node: Root node to update
:param str file_id: The _id field of a filenode<|fim▁hole|> :param int version_idx: Zero-based version index
"""
update_counter(u'download:{0}:{1}'.format(node._id, file_id))
update_counter(u'download:{0}:{1}:{2}'.format(node._id, file_id, version_idx))
def serialize_revision(node, record, version, index, anon=False):
"""Serialize revision for use in revisions table.
:param Node node: Root node
:param FileRecord record: Root file record
:param FileVersion version: The version to serialize
:param int index: One-based index of version
"""
if anon:
user = None
else:
user = {
'name': version.creator.fullname,
'url': version.creator.url,
}
return {
'user': user,
'index': index + 1,
'date': version.date_created.isoformat(),
'downloads': record.get_download_count(version=index),
'md5': version.metadata.get('md5'),
'sha256': version.metadata.get('sha256'),
}
SIGNED_REQUEST_ERROR = HTTPError(
httplib.SERVICE_UNAVAILABLE,
data={
'message_short': 'Upload service unavailable',
'message_long': (
'Upload service is not available; please retry '
'your upload in a moment'
),
},
)
def get_filename(version_idx, file_version, file_record):
"""Build name for downloaded file, appending version date if not latest.
:param int version_idx: One-based version index
:param FileVersion file_version: Version to name
:param FileRecord file_record: Root file object
"""
if version_idx == len(file_record.versions):
return file_record.name
name, ext = os.path.splitext(file_record.name)
return u'{name}-{date}{ext}'.format(
name=name,
date=file_version.date_created.isoformat(),
ext=ext,
)
def validate_location(value):
for key in LOCATION_KEYS:
if key not in value:
raise ValidationValueError
def must_be(_type):
"""A small decorator factory for OsfStorageFileNode. Acts as a poor mans
polymorphic inheritance, ensures that the given instance is of "kind" folder or file
"""
def _must_be(func):
@functools.wraps(func)
def wrapped(self, *args, **kwargs):
if not self.kind == _type:
raise ValueError('This instance is not a {}'.format(_type))
return func(self, *args, **kwargs)
return wrapped
return _must_be
def copy_files(src, target_settings, parent=None, name=None):
"""Copy the files from src to the target nodesettings
:param OsfStorageFileNode src: The source to copy children from
:param OsfStorageNodeSettings target_settings: The node settings of the project to copy files to
:param OsfStorageFileNode parent: The parent of to attach the clone of src to, if applicable
"""
cloned = src.clone()
cloned.parent = parent
cloned.name = name or cloned.name
cloned.node_settings = target_settings
if src.is_file:
cloned.versions = src.versions
cloned.save()
if src.is_folder:
for child in src.children:
copy_files(child, target_settings, parent=cloned)
return cloned<|fim▁end|> | |
<|file_name|>network.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*-
import requests
from bs4 import BeautifulSoup, UnicodeDammit
import time
import os
import re
import log
import tools
class Get(object):
# timeout, retry_interval -> seconds
def __init__(self, url='', timeout=5, retry=5, retry_interval=2, proxies={}, headers={}, download_file=None, savepath='.'):
self.log = log.Log()
<|fim▁hole|> self.retry = retry
self.retry_interval = retry_interval
self.proxies = proxies
self.headers = headers
if download_file is None:
self.download_file = False
else:
self.download_file = download_file
self.savepath = savepath
self.download_result = None
self.__run()
def config(self, url='', timeout=5, retry=5, retry_interval=2, proxies={}, headers={}, download_file=None, savepath=''):
self.url = url
self.timeout = timeout
self.retry = retry
self.retry_interval = retry_interval
if len(proxies) > 0:
self.proxies = proxies
if not download_file is None:
self.download_file = download_file
if len(headers) > 0:
self.headers = headers
if savepath != '':
self.savepath = savepath
self.__run()
def __run(self):
if self.url != '':
self.s = requests.Session()
self.__get()
if self.download_file:
self.__download()
else:
self.__soup()
def __get(self):
if self.url == '':
return False
self.log.info('start get [%s]'%self.url)
self.r = None
for i in range(self.retry):
try:
self.r = self.s.get(self.url, timeout=self.timeout, proxies=self.proxies, headers=self.headers)
break
except Exception as e:
self.log.error( '[retry %d] get [%s] fail. except [%s]'%(i+1, self.url, str(e)) )
time.sleep(self.retry_interval)
if self.r is None:
self.log.error('get [%s] fail' % self.url)
return False
self.log.info('end, get [%s]' % self.url)
return True
def __soup(self):
if not self.r:
self.log.error('self.r is None, cannot get soup. url [%s]' % self.url)
return False
if self.download_file:
self.log.info('to download url [%s], should not get soup' % self.url)
return False
self.soup = None
try:
self.soup = BeautifulSoup(self.r.content, 'html.parser')
return True
except Exception as e:
log.error('contruct BeautifulSoup fail, url [%s], except [%s]' & (self.url, str(e)))
return False
def __download(self):
self.log.info('start download [%s]' % self.url)
if self.r is None:
self.log.error('self.r is None. download fail. url [%s]' % self.url)
return False
filepath = self.savepath
tools.mkdir(filepath)
r = self.r
url = self.url
# 获取headers中的content-length
tot_size = 0
try:
tot_size = int( r.headers['content-length'] )
except Exception as e:
self.log.error('cannot get content-length, url [%s], headers [%s]' % (url, str(r.headers)) )
# get file name
filename = self.__get_file_name()
chunk_size = 4096
flag = 3
# retry if size is not right.
for i in range(3):
now_size = 0
try:
#print filename, type(filename)
with open( os.path.join(self.savepath, filename), 'wb' ) as f:
for chunk in r.iter_content(chunk_size):
now_size = now_size + len(chunk)
f.write(chunk)
except Exception as e:
self.log.error(u'something wrong. url [%s], exception [%s], 文件名 [%s], retry [%d]' % (url, unicode(e), filename, i+1) )
flag = 3
if tot_size == 0:
self.log.info(u'获取文件size失败,无法校验。 获取的文件大小 [%d], 文件名 [%s], url [%s]' % (now_size, filename, url) )
flag = 0
break
if now_size != tot_size:
self.log.error('文件size不正确. 获取的文件大小 [%d], 正确文件大小 [%d], 文件名 [%s], url[%s], retry[%d]' % (now_size, tot_size, filename.encode('utf8'), url, i+1))
flag = 4
else:
flag = 0
break
time.sleep(1)
self.log.info('end download [%s]' % self.url)
self.download_result = {'errno': flag, 'filename': filename}
return self.download_result
def __get_file_name(self):
# 通过content-type获取后缀
r = self.r
url = self.url
suf = ''
try:
ct = r.headers['content-type']
ctl = ct.split(';')
for i in ctl:
try:
suf = constant.CONTENT_TYPE_REVERSE[i.strip()]
except Exception as e:
pass
except Exception as e:
self.log.error('cannot get suffix, url[%s], headers [%s]' % (url, str(r.headers)))
# 尝试获取Content-Disposition,并以该项中的文件名及后缀优先
try:
content_disposition = r.headers['Content-Disposition']
fntmp = re.findall(r'filename=[\"\'](.*?)[\"\']', content_disposition)[0]
pos = fntmp.rfind('.')
if pos > -1:
fn = fntmp[:pos]
suf = fntmp[pos:]
else:
fn = fntmp
if filename is None:
filename = fn
dammit = UnicodeDammit(filename, ['utf-8', 'gb2312', 'gbk'])
filename = dammit.unicode_markup
except Exception as e:
pass
# url中的文件名以及后缀
pos = url.rfind("/") + 1
if pos >= len(url) or pos == -1:
fn = str(time.time()).replace(".", "")
else:
fn = url[pos:]
pos = fn.rfind('.')
if pos >= len(fn) or pos == -1:
pass
else:
if suf == '':
suf = fn[pos:]
try:
fn = fn[:pos]
except Exception as e:
pass
filename = fn
dammit = UnicodeDammit(filename, ['utf-8', 'gb2312', 'gbk'])
filename = dammit.unicode_markup
# 这里要判断一下有没有重名的文件,并做处理
i = 0
while True:
if i == 0:
if not os.path.exists( os.path.join(self.savepath, filename+suf) ):
break
else:
if not os.path.exists( os.path.join(self.savepath, filename+("(%d)"%i)+suf ) ):
filename = filename + ("(%d)"%i)
break
i = i + 1
filename = filename + suf
# 确保文件名合法(windows)
filename = tools.replaceBanCharacter(filename)
return filename
def download(self, url, savepath=''):
self.url = url
self.download_file = True
if savepath != '':
self.savepath = savepath
return self.__download()
def get(self):
return self.r
def soup(self):
return self.soup
def getsoup(self):
return (self.r, self.soup)
def clear_headers(self):
self.headers = {}
def clear_proxies(self):
self.proxies = {}
def stop(self):
self.log.stop()
def __del__(self):
self.stop()<|fim▁end|> | self.url = url
self.timeout = timeout |
<|file_name|>gce_instance_template.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gce_instance_template
version_added: "2.3"
short_description: create or destroy instance templates of Compute Engine of GCP.
description:
- Creates or destroy Google instance templates
of Compute Engine of Google Cloud Platform.
options:
state:
description:
- The desired state for the instance template.
default: "present"
choices: ["present", "absent"]
name:
description:
- The name of the GCE instance template.
required: true
default: null
size:
description:
- The desired machine type for the instance template.
default: "f1-micro"
source:
description:
- A source disk to attach to the instance.
Cannot specify both I(image) and I(source).
default: null
image:
description:
- The image to use to create the instance.
Cannot specify both both I(image) and I(source).
default: null
image_family:
description:
- The image family to use to create the instance.
If I(image) has been used I(image_family) is ignored.
Cannot specify both I(image) and I(source).
default: null
disk_type:
description:
- Specify a C(pd-standard) disk or C(pd-ssd)
for an SSD disk.
default: pd-standard
disk_auto_delete:
description:
- Indicate that the boot disk should be
deleted when the Node is deleted.
default: true
network:
description:
- The network to associate with the instance.
default: "default"
subnetwork:
description:
- The Subnetwork resource name for this instance.
default: null
can_ip_forward:
description:
- Set to True to allow instance to
send/receive non-matching src/dst packets.
default: false
external_ip:
description:
- The external IP address to use.
If C(ephemeral), a new non-static address will be
used. If C(None), then no external address will
be used. To use an existing static IP address
specify address name.
default: "ephemeral"
service_account_email:
description:
- service account email
default: null
service_account_permissions:
description:
- service account permissions (see
U(https://cloud.google.com/sdk/gcloud/reference/compute/instances/create),
--scopes section for detailed information)
default: null
choices: [
"bigquery", "cloud-platform", "compute-ro", "compute-rw",
"useraccounts-ro", "useraccounts-rw", "datastore", "logging-write",
"monitoring", "sql-admin", "storage-full", "storage-ro",
"storage-rw", "taskqueue", "userinfo-email"
]
automatic_restart:
description:
- Defines whether the instance should be
automatically restarted when it is
terminated by Compute Engine.
default: null
preemptible:
description:
- Defines whether the instance is preemptible.
default: null
tags:
description:
- a comma-separated list of tags to associate with the instance
default: null
metadata:
description:
- a hash/dictionary of custom data for the instance;
'{"key":"value", ...}'
default: null
description:
description:
- description of instance template
default: null
disks:
description:
- a list of persistent disks to attach to the instance; a string value
gives the name of the disk; alternatively, a dictionary value can
define 'name' and 'mode' ('READ_ONLY' or 'READ_WRITE'). The first entry
will be the boot disk (which must be READ_WRITE).
default: null
nic_gce_struct:
description:
- Support passing in the GCE-specific
formatted networkInterfaces[] structure.
default: null
disks_gce_struct:
description:
- Support passing in the GCE-specific
formatted formatted disks[] structure. Case sensitive.
see U(https://cloud.google.com/compute/docs/reference/latest/instanceTemplates#resource) for detailed information
default: null
version_added: "2.4"
project_id:
description:
- your GCE project ID
default: null
pem_file:
description:
- path to the pem file associated with the service account email
This option is deprecated. Use 'credentials_file'.
default: null
credentials_file:
description:
- path to the JSON file associated with the service account email
default: null
subnetwork_region:
version_added: "2.4"
description:
- Region that subnetwork resides in. (Required for subnetwork to successfully complete)
default: null
requirements:
- "python >= 2.6"
- "apache-libcloud >= 0.13.3, >= 0.17.0 if using JSON credentials,
>= 0.20.0 if using preemptible option"
notes:
- JSON credentials strongly preferred.
author: "Gwenael Pellen (@GwenaelPellenArkeup) <[email protected]>"
'''
EXAMPLES = '''
# Usage
- name: create instance template named foo
gce_instance_template:
name: foo
size: n1-standard-1
image_family: ubuntu-1604-lts
state: present
project_id: "your-project-name"
credentials_file: "/path/to/your-key.json"
service_account_email: "[email protected]"
# Example Playbook
- name: Compute Engine Instance Template Examples<|fim▁hole|> project_id: "your-project-name"
tasks:
- name: create instance template
gce_instance_template:
name: my-test-instance-template
size: n1-standard-1
image_family: ubuntu-1604-lts
state: present
project_id: "{{ project_id }}"
credentials_file: "{{ credentials_file }}"
service_account_email: "{{ service_account_email }}"
- name: delete instance template
gce_instance_template:
name: my-test-instance-template
size: n1-standard-1
image_family: ubuntu-1604-lts
state: absent
project_id: "{{ project_id }}"
credentials_file: "{{ credentials_file }}"
service_account_email: "{{ service_account_email }}"
# Example playbook using disks_gce_struct
- name: Compute Engine Instance Template Examples
hosts: localhost
vars:
service_account_email: "[email protected]"
credentials_file: "/path/to/your-key.json"
project_id: "your-project-name"
tasks:
- name: create instance template
gce_instance_template:
name: foo
size: n1-standard-1
state: present
project_id: "{{ project_id }}"
credentials_file: "{{ credentials_file }}"
service_account_email: "{{ service_account_email }}"
disks_gce_struct:
- device_name: /dev/sda
boot: true
autoDelete: true
initializeParams:
diskSizeGb: 30
diskType: pd-ssd
sourceImage: projects/debian-cloud/global/images/family/debian-8
'''
RETURN = '''
'''
import traceback
try:
from ast import literal_eval
HAS_PYTHON26 = True
except ImportError:
HAS_PYTHON26 = False
try:
import libcloud
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
from libcloud.common.google import GoogleBaseError, QuotaExceededError, \
ResourceExistsError, ResourceInUseError, ResourceNotFoundError
from libcloud.compute.drivers.gce import GCEAddress
_ = Provider.GCE
HAS_LIBCLOUD = True
except ImportError:
HAS_LIBCLOUD = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.gce import gce_connect
from ansible.module_utils._text import to_native
def get_info(inst):
"""Retrieves instance template information
"""
return({
'name': inst.name,
'extra': inst.extra,
})
def create_instance_template(module, gce):
"""Create an instance template
module : AnsibleModule object
gce: authenticated GCE libcloud driver
Returns:
instance template information
"""
# get info from module
name = module.params.get('name')
size = module.params.get('size')
source = module.params.get('source')
image = module.params.get('image')
image_family = module.params.get('image_family')
disk_type = module.params.get('disk_type')
disk_auto_delete = module.params.get('disk_auto_delete')
network = module.params.get('network')
subnetwork = module.params.get('subnetwork')
subnetwork_region = module.params.get('subnetwork_region')
can_ip_forward = module.params.get('can_ip_forward')
external_ip = module.params.get('external_ip')
service_account_permissions = module.params.get(
'service_account_permissions')
on_host_maintenance = module.params.get('on_host_maintenance')
automatic_restart = module.params.get('automatic_restart')
preemptible = module.params.get('preemptible')
tags = module.params.get('tags')
metadata = module.params.get('metadata')
description = module.params.get('description')
disks_gce_struct = module.params.get('disks_gce_struct')
changed = False
# args of ex_create_instancetemplate
gce_args = dict(
name="instance",
size="f1-micro",
source=None,
image=None,
disk_type='pd-standard',
disk_auto_delete=True,
network='default',
subnetwork=None,
can_ip_forward=None,
external_ip='ephemeral',
service_accounts=None,
on_host_maintenance=None,
automatic_restart=None,
preemptible=None,
tags=None,
metadata=None,
description=None,
disks_gce_struct=None,
nic_gce_struct=None
)
gce_args['name'] = name
gce_args['size'] = size
if source is not None:
gce_args['source'] = source
if image:
gce_args['image'] = image
else:
if image_family:
image = gce.ex_get_image_from_family(image_family)
gce_args['image'] = image
else:
gce_args['image'] = "debian-8"
gce_args['disk_type'] = disk_type
gce_args['disk_auto_delete'] = disk_auto_delete
gce_network = gce.ex_get_network(network)
gce_args['network'] = gce_network
if subnetwork is not None:
gce_args['subnetwork'] = gce.ex_get_subnetwork(subnetwork, region=subnetwork_region)
if can_ip_forward is not None:
gce_args['can_ip_forward'] = can_ip_forward
if external_ip == "ephemeral":
instance_external_ip = external_ip
elif external_ip == "none":
instance_external_ip = None
else:
try:
instance_external_ip = gce.ex_get_address(external_ip)
except GoogleBaseError as err:
# external_ip is name ?
instance_external_ip = external_ip
gce_args['external_ip'] = instance_external_ip
ex_sa_perms = []
bad_perms = []
if service_account_permissions:
for perm in service_account_permissions:
if perm not in gce.SA_SCOPES_MAP:
bad_perms.append(perm)
if len(bad_perms) > 0:
module.fail_json(msg='bad permissions: %s' % str(bad_perms))
ex_sa_perms.append({'email': "default"})
ex_sa_perms[0]['scopes'] = service_account_permissions
gce_args['service_accounts'] = ex_sa_perms
if on_host_maintenance is not None:
gce_args['on_host_maintenance'] = on_host_maintenance
if automatic_restart is not None:
gce_args['automatic_restart'] = automatic_restart
if preemptible is not None:
gce_args['preemptible'] = preemptible
if tags is not None:
gce_args['tags'] = tags
if disks_gce_struct is not None:
gce_args['disks_gce_struct'] = disks_gce_struct
# Try to convert the user's metadata value into the format expected
# by GCE. First try to ensure user has proper quoting of a
# dictionary-like syntax using 'literal_eval', then convert the python
# dict into a python list of 'key' / 'value' dicts. Should end up
# with:
# [ {'key': key1, 'value': value1}, {'key': key2, 'value': value2}, ...]
if metadata:
if isinstance(metadata, dict):
md = metadata
else:
try:
md = literal_eval(str(metadata))
if not isinstance(md, dict):
raise ValueError('metadata must be a dict')
except ValueError as e:
module.fail_json(msg='bad metadata: %s' % str(e))
except SyntaxError as e:
module.fail_json(msg='bad metadata syntax')
if hasattr(libcloud, '__version__') and libcloud.__version__ < '0.15':
items = []
for k, v in md.items():
items.append({"key": k, "value": v})
metadata = {'items': items}
else:
metadata = md
gce_args['metadata'] = metadata
if description is not None:
gce_args['description'] = description
instance = None
try:
instance = gce.ex_get_instancetemplate(name)
except ResourceNotFoundError:
try:
instance = gce.ex_create_instancetemplate(**gce_args)
changed = True
except GoogleBaseError as err:
module.fail_json(
msg='Unexpected error attempting to create instance {}, error: {}'
.format(
instance,
err.value
)
)
if instance:
json_data = get_info(instance)
else:
module.fail_json(msg="no instance template!")
return (changed, json_data, name)
def delete_instance_template(module, gce):
""" Delete instance template.
module : AnsibleModule object
gce: authenticated GCE libcloud driver
Returns:
instance template information
"""
name = module.params.get('name')
current_state = "absent"
changed = False
# get instance template
instance = None
try:
instance = gce.ex_get_instancetemplate(name)
current_state = "present"
except GoogleBaseError as e:
json_data = dict(msg='instance template not exists: %s' % to_native(e),
exception=traceback.format_exc())
if current_state == "present":
rc = instance.destroy()
if rc:
changed = True
else:
module.fail_json(
msg='instance template destroy failed'
)
json_data = {}
return (changed, json_data, name)
def module_controller(module, gce):
''' Control module state parameter.
module : AnsibleModule object
gce: authenticated GCE libcloud driver
Returns:
nothing
Exit:
AnsibleModule object exit with json data.
'''
json_output = dict()
state = module.params.get("state")
if state == "present":
(changed, output, name) = create_instance_template(module, gce)
json_output['changed'] = changed
json_output['msg'] = output
elif state == "absent":
(changed, output, name) = delete_instance_template(module, gce)
json_output['changed'] = changed
json_output['msg'] = output
module.exit_json(**json_output)
def check_if_system_state_would_be_changed(module, gce):
''' check_if_system_state_would_be_changed !
module : AnsibleModule object
gce: authenticated GCE libcloud driver
Returns:
system_state changed
'''
changed = False
current_state = "absent"
state = module.params.get("state")
name = module.params.get("name")
try:
gce.ex_get_instancetemplate(name)
current_state = "present"
except GoogleBaseError as e:
module.fail_json(msg='GCE get instancetemplate problem: %s' % to_native(e),
exception=traceback.format_exc())
if current_state != state:
changed = True
if current_state == "absent":
if changed:
output = 'instance template {0} will be created'.format(name)
else:
output = 'nothing to do for instance template {0} '.format(name)
if current_state == "present":
if changed:
output = 'instance template {0} will be destroyed'.format(name)
else:
output = 'nothing to do for instance template {0} '.format(name)
return (changed, output)
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(choices=['present', 'absent'], default='present'),
name=dict(require=True, aliases=['base_name']),
size=dict(default='f1-micro'),
source=dict(),
image=dict(),
image_family=dict(default='debian-8'),
disk_type=dict(choices=['pd-standard', 'pd-ssd'], default='pd-standard', type='str'),
disk_auto_delete=dict(type='bool', default=True),
network=dict(default='default'),
subnetwork=dict(),
can_ip_forward=dict(type='bool', default=False),
external_ip=dict(default='ephemeral'),
service_account_email=dict(),
service_account_permissions=dict(type='list'),
automatic_restart=dict(type='bool', default=None),
preemptible=dict(type='bool', default=None),
tags=dict(type='list'),
metadata=dict(),
description=dict(),
disks=dict(type='list'),
nic_gce_struct=dict(type='list'),
project_id=dict(),
pem_file=dict(type='path'),
credentials_file=dict(type='path'),
subnetwork_region=dict(),
disks_gce_struct=dict(type='list')
),
mutually_exclusive=[['source', 'image']],
required_one_of=[['image', 'image_family']],
supports_check_mode=True
)
if not HAS_PYTHON26:
module.fail_json(
msg="GCE module requires python's 'ast' module, python v2.6+")
if not HAS_LIBCLOUD:
module.fail_json(
msg='libcloud with GCE support (0.17.0+) required for this module')
try:
gce = gce_connect(module)
except GoogleBaseError as e:
module.fail_json(msg='GCE Connexion failed %s' % to_native(e), exception=traceback.format_exc())
if module.check_mode:
(changed, output) = check_if_system_state_would_be_changed(module, gce)
module.exit_json(
changed=changed,
msg=output
)
else:
module_controller(module, gce)
if __name__ == '__main__':
main()<|fim▁end|> | hosts: localhost
vars:
service_account_email: "[email protected]"
credentials_file: "/path/to/your-key.json" |
<|file_name|>tactic.py<|end_file_name|><|fim▁begin|>"""ThreatConnect TI Email"""
# standard library
from typing import TYPE_CHECKING
# first-party
from tcex.api.tc.v2.threat_intelligence.mappings.group.group import Group
if TYPE_CHECKING:
# first-party
from tcex.api.tc.v2.threat_intelligence.threat_intelligence import ThreatIntelligence
class Tactic(Group):
"""Unique API calls for Tactic API Endpoints
Args:<|fim▁hole|> """
def __init__(self, ti: 'ThreatIntelligence', **kwargs):
"""Initialize Class properties."""
super().__init__(ti, sub_type='Tactic', api_entity='tactic', api_branch='tactics', **kwargs)<|fim▁end|> | ti (ThreatIntelligence): An instance of the ThreatIntelligence Class.
name (str, kwargs): [Required for Create] The name for this Group.
owner (str, kwargs): The name for this Group. Default to default Org when not provided |
<|file_name|>optimize.go<|end_file_name|><|fim▁begin|>// Copyright 2012-2015 Oliver Eilhard. All rights reserved.
// Use of this source code is governed by a MIT-license.
// See http://olivere.mit-license.org/license.txt for details.
package elastic
import (
"encoding/json"
"fmt"
"net/url"
"strings"
"bosun.org/_third_party/gopkg.in/olivere/elastic.v3/uritemplates"
)
type OptimizeService struct {
client *Client
indices []string<|fim▁hole|> maxNumSegments *int
onlyExpungeDeletes *bool
flush *bool
waitForMerge *bool
force *bool
pretty bool
}
func NewOptimizeService(client *Client) *OptimizeService {
builder := &OptimizeService{
client: client,
indices: make([]string, 0),
}
return builder
}
func (s *OptimizeService) Index(indices ...string) *OptimizeService {
s.indices = append(s.indices, indices...)
return s
}
func (s *OptimizeService) MaxNumSegments(maxNumSegments int) *OptimizeService {
s.maxNumSegments = &maxNumSegments
return s
}
func (s *OptimizeService) OnlyExpungeDeletes(onlyExpungeDeletes bool) *OptimizeService {
s.onlyExpungeDeletes = &onlyExpungeDeletes
return s
}
func (s *OptimizeService) Flush(flush bool) *OptimizeService {
s.flush = &flush
return s
}
func (s *OptimizeService) WaitForMerge(waitForMerge bool) *OptimizeService {
s.waitForMerge = &waitForMerge
return s
}
func (s *OptimizeService) Force(force bool) *OptimizeService {
s.force = &force
return s
}
func (s *OptimizeService) Pretty(pretty bool) *OptimizeService {
s.pretty = pretty
return s
}
func (s *OptimizeService) Do() (*OptimizeResult, error) {
// Build url
path := "/"
// Indices part
indexPart := make([]string, 0)
for _, index := range s.indices {
index, err := uritemplates.Expand("{index}", map[string]string{
"index": index,
})
if err != nil {
return nil, err
}
indexPart = append(indexPart, index)
}
if len(indexPart) > 0 {
path += strings.Join(indexPart, ",")
}
path += "/_optimize"
// Parameters
params := make(url.Values)
if s.maxNumSegments != nil {
params.Set("max_num_segments", fmt.Sprintf("%d", *s.maxNumSegments))
}
if s.onlyExpungeDeletes != nil {
params.Set("only_expunge_deletes", fmt.Sprintf("%v", *s.onlyExpungeDeletes))
}
if s.flush != nil {
params.Set("flush", fmt.Sprintf("%v", *s.flush))
}
if s.waitForMerge != nil {
params.Set("wait_for_merge", fmt.Sprintf("%v", *s.waitForMerge))
}
if s.force != nil {
params.Set("force", fmt.Sprintf("%v", *s.force))
}
if s.pretty {
params.Set("pretty", fmt.Sprintf("%v", s.pretty))
}
// Get response
res, err := s.client.PerformRequest("POST", path, params, nil)
if err != nil {
return nil, err
}
// Return result
ret := new(OptimizeResult)
if err := json.Unmarshal(res.Body, ret); err != nil {
return nil, err
}
return ret, nil
}
// -- Result of an optimize request.
type OptimizeResult struct {
Shards shardsInfo `json:"_shards,omitempty"`
}<|fim▁end|> | |
<|file_name|>apps.py<|end_file_name|><|fim▁begin|>"""
Discounts application configuration
"""
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from django.apps import AppConfig
<|fim▁hole|>class DiscountsConfig(AppConfig):
name = 'openedx.features.discounts'<|fim▁end|> | |
<|file_name|>acl.go<|end_file_name|><|fim▁begin|>// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
//
// Contributor: Julien Vehent [email protected] [:ulfr]
package mig /* import "mig.ninja/mig" */
import (
"fmt"
"strings"
)
type ACL []Permission
type Permission map[string]struct {
MinimumWeight int
Investigators map[string]struct {
Fingerprint string
Weight int
}
}
// verifyPermission controls that the PGP keys, identified by their fingerprints, that
// signed an operation are sufficient to allow this operation to run
func verifyPermission(operation Operation, permName string, perm Permission, fingerprints []string) (err error) {
if perm[permName].MinimumWeight < 1 {
return fmt.Errorf("Invalid permission '%s'. Must require at least 1 signature, has %d",
permName, perm[permName].MinimumWeight)
}
var seenFp []string
signaturesWeight := 0
for _, fp := range fingerprints {
// if the same key is used to sign multiple times, return an error
for _, seen := range seenFp {
if seen == fp {
return fmt.Errorf("Permission violation: key id '%s' used to sign multiple times.", fp)
}
}
for _, signer := range perm[permName].Investigators {
if strings.ToUpper(fp) == strings.ToUpper(signer.Fingerprint) {
signaturesWeight += signer.Weight
}<|fim▁hole|> }
if signaturesWeight < perm[permName].MinimumWeight {
return fmt.Errorf("Permission denied for operation '%s'. Insufficient signatures weight. Need %d, got %d",
operation.Module, perm[permName].MinimumWeight, signaturesWeight)
}
return
}<|fim▁end|> | }
seenFp = append(seenFp, fp) |
<|file_name|>CRLList.tsx<|end_file_name|><|fim▁begin|>import PropTypes from "prop-types";
import React from "react";
import { connect } from "react-redux";
import { loadAllCertificates } from "../../AC";
import accordion from "../../decorators/accordion";
import { filteredCrlsSelector } from "../../selectors/crlsSelectors";
import ProgressBars from "../ProgressBars";
import CRLListItem from "./CRLListItem";
interface ICRLListProps {
activeCert: (certificate: any) => void;
certificates: any;
isLoaded: boolean;
isLoading: boolean;
loadAllCertificates: () => void;
verifyCertificate: (id: number) => void;
}
class CRLList extends React.Component<ICRLListProps, {}> {
static contextTypes = {
locale: PropTypes.string,
localize: PropTypes.func,
};
componentDidMount() {
// tslint:disable-next-line:no-shadowed-variable
const { isLoaded, isLoading, loadAllCertificates } = this.props;
if (!isLoading && !isLoaded) {
loadAllCertificates();
}
$(".collapsible").collapsible();
}
render() {
const { isLoading } = this.props;
if (isLoading) {
return <ProgressBars />;
}
return (
<React.Fragment>
<ul className="collapsible" data-collapsible="accordion">
{this.getCollapsibleBody()}
</ul>
</React.Fragment>
);
}
getCollapsibleBody() {
const { crls, activeCert, selectedCert, operation, toggleOpenItem, isItemOpened } = this.props;
if (!crls || crls.length === 0) {
return null;
}
const elements = crls.map((crl: any) =>
<CRLListItem
key={crl.id}
crl={crl}
chooseCert={() => activeCert(crl)}<|fim▁hole|>
return (
<li>
<div className="collapsible-header color">
<i className="material-icons left intermediate" />
Список отзыва сертификатов
</div>
<div className="collapsible-body">
{elements}
</div>
</li>
);
}
}
export default connect((state) => {
return {
crls: filteredCrlsSelector(state),
isLoaded: state.crls.loaded,
isLoading: state.crls.loading,
};
}, { loadAllCertificates })(accordion(CRLList));<|fim▁end|> | operation={operation}
selectedCert={() => selectedCert(crl)}
isOpen={isItemOpened(crl.id.toString())}
toggleOpen={toggleOpenItem(crl.id.toString())} />); |
<|file_name|>json.py<|end_file_name|><|fim▁begin|>try:
import simplejson as json
except ImportError:
import json
import functools
from cgi import parse_header
def wrap_json(func=None, *, encoder=json.JSONEncoder, preserve_raw_body=False):
"""
A middleware that parses the body of json requests and
encodes the json responses.
NOTE: this middleware exists just for backward compatibility,
but it has some limitations in terms of response body encoding
because it only accept list or dictionary outputs and json
specification allows store other values also.
It is recommended use the `wrap_json_body` and wrap_json_response`
instead of this.
"""
if func is None:
return functools.partial(
wrap_json,
encoder=encoder,
preserve_raw_body=preserve_raw_body
)
wrapped_func = wrap_json_body(func, preserve_raw_body=preserve_raw_body)
wrapped_func = wrap_json_response(wrapped_func, encoder=encoder)
return wrapped_func
def wrap_json_body(func=None, *, preserve_raw_body=False):
"""
A middleware that parses the body of json requests and
add it to the request under the `body` attribute (replacing
the previous value). Can preserve the original value in
a new attribute `raw_body` if you give preserve_raw_body=True.
"""
if func is None:
return functools.partial(
wrap_json_body,
preserve_raw_body=preserve_raw_body
)
@functools.wraps(func)
def wrapper(request, *args, **kwargs):
ctype, pdict = parse_header(request.headers.get('Content-Type', ''))
if preserve_raw_body:
request.raw_body = request.body
if ctype == "application/json":
request.body = json.loads(request.body.decode("utf-8")) if request.body else None
return func(request, *args, **kwargs)
return wrapper
def wrap_json_params(func):
"""
A middleware that parses the body of json requests and
add it to the request under the `params` key.
"""
@functools.wraps(func)
def wrapper(request, *args, **kwargs):
ctype, pdict = parse_header(request.headers.get('Content-Type', ''))
if ctype == "application/json":
request.params = json.loads(request.body.decode("utf-8")) if request.body else None
return func(request, *args, **kwargs)
return wrapper
def wrap_json_response(func=None, *, encoder=json.JSONEncoder):
"""
A middleware that encodes in json the response body in case
of that the "Content-Type" header is "application/json".
This middlware accepts and optional `encoder` parameter, that
allow to the user specify its own json encoder class.
"""
<|fim▁hole|>
@functools.wraps(func)
def wrapper(request, *args, **kwargs):
response = func(request, *args, **kwargs)
if "Content-Type" in response.headers and response.headers['Content-Type'] is not None:
ctype, pdict = parse_header(response.headers.get('Content-Type', ''))
if ctype == "application/json" and (isinstance(response.body, dict) or isinstance(response.body, list)):
response.body = json.dumps(response.body, cls=encoder)
return response
return wrapper<|fim▁end|> | if func is None:
return functools.partial(wrap_json_response, encoder=encoder) |
<|file_name|>operator.py<|end_file_name|><|fim▁begin|>def keysetter(key):
if not isinstance(key, str):
raise TypeError('key name must be a string')
resolve = key.split('.')
head, last = tuple(resolve[:-1]), resolve[-1]
def g(obj,value):
for key in head :
obj = obj[key]
obj[last] = value
return g
def keygetter(key):
if not isinstance(key, str):
raise TypeError('key name must be a string')
return lambda obj : resolve_key(obj, key)
def resolve_key(obj, key):<|fim▁hole|> return obj<|fim▁end|> | for name in key.split('.'):
obj = obj[name] |
<|file_name|>kt_main.js<|end_file_name|><|fim▁begin|>/* Initialize
*/
var isMobile = {
Android: function() {
return navigator.userAgent.match(/Android/i);
},
BlackBerry: function() {
return navigator.userAgent.match(/BlackBerry/i);
},
iOS: function() {
return navigator.userAgent.match(/iPhone|iPad|iPod/i);
},
Opera: function() {
return navigator.userAgent.match(/Opera Mini/i);
},
Windows: function() {
return navigator.userAgent.match(/IEMobile/i);
},
any: function() {
return (isMobile.Android() || isMobile.BlackBerry() || isMobile.iOS() || isMobile.Opera() || isMobile.Windows());
}
};
jQuery(document).ready(function ($) {
// Bootstrap Init
$("[rel=tooltip]").tooltip();
$('[data-toggle=tooltip]').tooltip();
$("[rel=popover]").popover();
$('#authorTab a').click(function (e) {e.preventDefault(); $(this).tab('show'); });
$('.sc_tabs a').click(function (e) {e.preventDefault(); $(this).tab('show'); });
$(".videofit").fitVids();
$(".embed-youtube").fitVids();
$('.kad-select').customSelect();
$('.woocommerce-ordering select').customSelect();
$('.collapse-next').click(function (e) {
//e.preventDefault();
var $target = $(this).siblings('.sf-dropdown-menu');
if($target.hasClass('in') ) {
$target.collapse('toggle');
$(this).removeClass('toggle-active');
} else {
$target.collapse('toggle');
$(this).addClass('toggle-active');
}
});
// Lightbox
function kt_check_images( index, element ) {
return /(png|jpg|jpeg|gif|tiff|bmp)$/.test(
$( element ).attr( 'href' ).toLowerCase().split( '?' )[0].split( '#' )[0]
);
}
function kt_find_images() {
$( 'a[href]' ).filter( kt_check_images ).attr( 'data-rel', 'lightbox' );
}
kt_find_images();
$.extend(true, $.magnificPopup.defaults, {
tClose: '',
tLoading: light_load, // Text that is displayed during loading. Can contain %curr% and %total% keys
gallery: {
tPrev: '', // Alt text on left arrow
tNext: '', // Alt text on right arrow
tCounter: light_of // Markup for "1 of 7" counter
},
image: {
tError: light_error, // Error message when image could not be loaded
titleSrc: function(item) {
return item.el.find('img').attr('alt');
}
}
});
$("a[rel^='lightbox']").magnificPopup({type:'image'});
$("a[data-rel^='lightbox']").magnificPopup({type:'image'});
$('.kad-light-gallery').each(function(){
$(this).find('a[rel^="lightbox"]').magnificPopup({
type: 'image',
gallery: {
enabled:true
},
image: {
titleSrc: 'title'
}
});
});
$('.kad-light-gallery').each(function(){
$(this).find("a[data-rel^='lightbox']").magnificPopup({
type: 'image',
gallery: {
enabled:true
},
image: {
titleSrc: 'title'
}
});
});
$('.kad-light-wp-gallery').each(function(){
$(this).find('a[rel^="lightbox"]').magnificPopup({
type: 'image',
gallery: {
enabled:true
},
image: {
titleSrc: function(item) {
return item.el.find('img').attr('alt');
}
}
});
});
$('.kad-light-wp-gallery').each(function(){
$(this).find("a[data-rel^='lightbox']").magnificPopup({
type: 'image',
gallery: {
enabled:true
},
image: {
titleSrc: function(item) {
return item.el.find('img').attr('alt');
}
}
});
});
//Superfish Menu
$('ul.sf-menu').superfish({
delay: 200, // one second delay on mouseout
animation: {opacity:'show',height:'show'}, // fade-in and slide-down animation
speed: 'fast' // faster animation speed
});
function kad_fullwidth_panel() {
var margins = $(window).width() - $('#content').width();
$('.panel-row-style-wide-feature').each(function(){
$(this).css({'padding-left': margins/2 + 'px'});
$(this).css({'padding-right': margins/2 + 'px'});
$(this).css({'margin-left': '-' + margins/2 + 'px'});
$(this).css({'margin-right': '-' + margins/2 + 'px'});
$(this).css({'visibility': 'visible'});
});
}
kad_fullwidth_panel();
$(window).on("debouncedresize", function( event ) {kad_fullwidth_panel();});
//init Flexslider
$('.kt-flexslider').each(function(){
var flex_speed = $(this).data('flex-speed'),
flex_animation = $(this).data('flex-animation'),
flex_animation_speed = $(this).data('flex-anim-speed'),
flex_auto = $(this).data('flex-auto');
$(this).flexslider({
animation:flex_animation,
animationSpeed: flex_animation_speed,
slideshow: flex_auto,
slideshowSpeed: flex_speed,
start: function ( slider ) {
slider.removeClass( 'loading' );
}
});
});
//init masonry
$('.init-masonry').each(function(){
var masonrycontainer = $(this),
masonry_selector = $(this).data('masonry-selector');
masonrycontainer.imagesLoadedn( function(){
masonrycontainer.masonry({itemSelector: masonry_selector});
});
});
//init carousel
jQuery('.initcaroufedsel').each(function(){
var container = jQuery(this);
var wcontainerclass = container.data('carousel-container'),
cspeed = container.data('carousel-speed'),
ctransition = container.data('carousel-transition'),
cauto = container.data('carousel-auto'),
carouselid = container.data('carousel-id'),
ss = container.data('carousel-ss'),
xs = container.data('carousel-xs'),
sm = container.data('carousel-sm'),
md = container.data('carousel-md');
var wcontainer = jQuery(wcontainerclass);
function getUnitWidth() {var width;
if(jQuery(window).width() <= 540) {
width = wcontainer.width() / ss;
} else if(jQuery(window).width() <= 768) {
width = wcontainer.width() / xs;
} else if(jQuery(window).width() <= 990) {
width = wcontainer.width() / sm;
} else {
width = wcontainer.width() / md;
}
return width;
}
function setWidths() {
var unitWidth = getUnitWidth() -1;
container.children().css({ width: unitWidth });
}
setWidths();
function initCarousel() {<|fim▁hole|> container.carouFredSel({
scroll: {items:1, easing: "swing", duration: ctransition, pauseOnHover : true},
auto: {play: cauto, timeoutDuration: cspeed},
prev: '#prevport-'+carouselid, next: '#nextport-'+carouselid, pagination: false, swipe: true, items: {visible: null}
});
}
container.imagesLoadedn( function(){
initCarousel();
});
wcontainer.animate({'opacity' : 1});
jQuery(window).on("debouncedresize", function( event ) {
container.trigger("destroy");
setWidths();
initCarousel();
});
});
//init carouselslider
jQuery('.initcarouselslider').each(function(){
var container = jQuery(this);
var wcontainerclass = container.data('carousel-container'),
cspeed = container.data('carousel-speed'),
ctransition = container.data('carousel-transition'),
cauto = container.data('carousel-auto'),
carouselid = container.data('carousel-id'),
carheight = container.data('carousel-height'),
align = 'center';
var wcontainer = jQuery(wcontainerclass);
function setWidths() {
var unitWidth = container.width();
container.children().css({ width: unitWidth });
if(jQuery(window).width() <= 768) {
carheight = null;
container.children().css({ height: 'auto' });
}
}
setWidths();
function initCarouselslider() {
container.carouFredSel({
width: '100%',
height: carheight,
align: align,
auto: {play: cauto, timeoutDuration: cspeed},
scroll: {items : 1,easing: 'quadratic'},
items: {visible: 1,width: 'variable'},
prev: '#prevport-'+carouselid,
next: '#nextport-'+carouselid,
swipe: {onMouse: false,onTouch: true},
});
}
container.imagesLoadedn( function(){
initCarouselslider();
wcontainer.animate({'opacity' : 1});
wcontainer.css({ height: 'auto' });
wcontainer.parent().removeClass('loading');
});
jQuery(window).on("debouncedresize", function( event ) {
container.trigger("destroy");
setWidths();
initCarouselslider();
});
});
});
if( isMobile.any() ) {
jQuery(document).ready(function ($) {
$('.caroufedselclass').tswipe({
excludedElements:"button, input, select, textarea, .noSwipe",
tswipeLeft: function() {
$('.caroufedselclass').trigger('next', 1);
},
tswipeRight: function() {
$('.caroufedselclass').trigger('prev', 1);
},
tap: function(event, target) {
window.open(jQuery(target).closest('.grid_item').find('a').attr('href'), '_self');
}
});
});
}<|fim▁end|> | |
<|file_name|>map-queries-spec.js<|end_file_name|><|fim▁begin|>'use babel';
import MapQueries from '../lib/map-queries';
// Use the command `window:run-package-specs` (cmd-alt-ctrl-p) to run specs.
//
// To run a specific `it` or `describe` block add an `f` to the front (e.g. `fit`
// or `fdescribe`). Remove the `f` to unfocus the block.
describe('MapQueries', () => {
let workspaceElement, activationPromise;
beforeEach(() => {
workspaceElement = atom.views.getView(atom.workspace);
activationPromise = atom.packages.activatePackage('map-queries');
});
describe('when the map-queries:toggle event is triggered', () => {
it('hides and shows the modal panel', () => {<|fim▁hole|> expect(workspaceElement.querySelector('.map-queries')).not.toExist();
// This is an activation event, triggering it will cause the package to be
// activated.
atom.commands.dispatch(workspaceElement, 'map-queries:toggle');
waitsForPromise(() => {
return activationPromise;
});
runs(() => {
expect(workspaceElement.querySelector('.map-queries')).toExist();
let mapQueriesElement = workspaceElement.querySelector('.map-queries');
expect(mapQueriesElement).toExist();
let mapQueriesPanel = atom.workspace.panelForItem(mapQueriesElement);
expect(mapQueriesPanel.isVisible()).toBe(true);
atom.commands.dispatch(workspaceElement, 'map-queries:toggle');
expect(mapQueriesPanel.isVisible()).toBe(false);
});
});
it('hides and shows the view', () => {
// This test shows you an integration test testing at the view level.
// Attaching the workspaceElement to the DOM is required to allow the
// `toBeVisible()` matchers to work. Anything testing visibility or focus
// requires that the workspaceElement is on the DOM. Tests that attach the
// workspaceElement to the DOM are generally slower than those off DOM.
jasmine.attachToDOM(workspaceElement);
expect(workspaceElement.querySelector('.map-queries')).not.toExist();
// This is an activation event, triggering it causes the package to be
// activated.
atom.commands.dispatch(workspaceElement, 'map-queries:toggle');
waitsForPromise(() => {
return activationPromise;
});
runs(() => {
// Now we can test for view visibility
let mapQueriesElement = workspaceElement.querySelector('.map-queries');
expect(mapQueriesElement).toBeVisible();
atom.commands.dispatch(workspaceElement, 'map-queries:toggle');
expect(mapQueriesElement).not.toBeVisible();
});
});
});
});<|fim▁end|> | // Before the activation event the view is not on the DOM, and no panel
// has been created |
<|file_name|>views.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from django.shortcuts import render_to_response, get_object_or_404, redirect
def in_place(request, slug):
place = get_object_or_404( pombola.core.models.Place, slug=slug)
projects = place.project_set
return render_to_response(
'projects/in_place.html',
{
'place': place,
'projects': projects,
},
context_instance=RequestContext(request)
)<|fim▁end|> | import models
import pombola.core.models
from django.template import RequestContext |
<|file_name|>gc_get_referents.py<|end_file_name|><|fim▁begin|>import gc
import pprint
class Graph:
def __init__(self, name):
self.name = name
self.next = None
def set_next(self, next):
print('Linking nodes {}.next = {}'.format(self, next))
self.next = next
def __repr__(self):
return '{}({})'.format(
self.__class__.__name__, self.name)<|fim▁hole|># Construct a graph cycle
one = Graph('one')
two = Graph('two')
three = Graph('three')
one.set_next(two)
two.set_next(three)
three.set_next(one)
print()
print('three refers to:')
for r in gc.get_referents(three):
pprint.pprint(r)<|fim▁end|> | |
<|file_name|>pay_period_details.py<|end_file_name|><|fim▁begin|>"""
The Plaid API
The Plaid REST API. Please see https://plaid.com/docs/api for more details. # noqa: E501
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from plaid.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from plaid.model.distribution_breakdown import DistributionBreakdown
globals()['DistributionBreakdown'] = DistributionBreakdown
class PayPeriodDetails(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('pay_frequency',): {
'None': None,
'PAY_FREQUENCY_UNKNOWN': "PAY_FREQUENCY_UNKNOWN",
'PAY_FREQUENCY_WEEKLY': "PAY_FREQUENCY_WEEKLY",
'PAY_FREQUENCY_BIWEEKLY': "PAY_FREQUENCY_BIWEEKLY",
'PAY_FREQUENCY_SEMIMONTHLY': "PAY_FREQUENCY_SEMIMONTHLY",
'PAY_FREQUENCY_MONTHLY': "PAY_FREQUENCY_MONTHLY",
'NULL': "null",
},
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'check_amount': (float, none_type,), # noqa: E501
'distribution_breakdown': ([DistributionBreakdown],), # noqa: E501
'end_date': (date, none_type,), # noqa: E501
'gross_earnings': (float, none_type,), # noqa: E501
'pay_date': (date, none_type,), # noqa: E501
'pay_frequency': (str, none_type,), # noqa: E501
'pay_day': (date, none_type,), # noqa: E501
'start_date': (date, none_type,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'check_amount': 'check_amount', # noqa: E501
'distribution_breakdown': 'distribution_breakdown', # noqa: E501
'end_date': 'end_date', # noqa: E501
'gross_earnings': 'gross_earnings', # noqa: E501
'pay_date': 'pay_date', # noqa: E501
'pay_frequency': 'pay_frequency', # noqa: E501
'pay_day': 'pay_day', # noqa: E501
'start_date': 'start_date', # noqa: E501
}
_composed_schemas = {}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""PayPeriodDetails - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the<|fim▁hole|> is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
check_amount (float, none_type): The amount of the paycheck.. [optional] # noqa: E501
distribution_breakdown ([DistributionBreakdown]): [optional] # noqa: E501
end_date (date, none_type): The pay period end date, in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) format: \"yyyy-mm-dd\".. [optional] # noqa: E501
gross_earnings (float, none_type): Total earnings before tax/deductions.. [optional] # noqa: E501
pay_date (date, none_type): The date on which the paystub was issued, in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) format (\"yyyy-mm-dd\").. [optional] # noqa: E501
pay_frequency (str, none_type): The frequency at which an individual is paid.. [optional] # noqa: E501
pay_day (date, none_type): The date on which the paystub was issued, in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) format (\"yyyy-mm-dd\").. [optional] # noqa: E501
start_date (date, none_type): The pay period start date, in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) format: \"yyyy-mm-dd\".. [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)<|fim▁end|> | composed schema that is |
<|file_name|>enc-utf16-test.js<|end_file_name|><|fim▁begin|>YUI.add('enc-utf16-test', function (Y) {
var C = CryptoJS;
Y.CryptoJSTestSuite.add(new Y.Test.Case({
name: 'Utf16',
testStringify1: function () {
Y.Assert.areEqual('z', C.enc.Utf16.stringify(C.lib.WordArray.create([0x007a0000], 2)));
},
testStringify2: function () {
Y.Assert.areEqual('水', C.enc.Utf16.stringify(C.lib.WordArray.create([0x6c340000], 2)));
},
testStringify3: function () {
Y.Assert.areEqual('𐀀', C.enc.Utf16.stringify(C.lib.WordArray.create([0xd800dc00], 4)));
},
testStringify4: function () {
Y.Assert.areEqual('𝄞', C.enc.Utf16.stringify(C.lib.WordArray.create([0xd834dd1e], 4)));
},
testStringify5: function () {
Y.Assert.areEqual('', C.enc.Utf16.stringify(C.lib.WordArray.create([0xdbffdffd], 4)));
},
testStringifyLE: function () {
Y.Assert.areEqual('', C.enc.Utf16LE.stringify(C.lib.WordArray.create([0xffdbfddf], 4)));
},
testStringifyLEInputIntegrity: function () {
var wordArray = C.lib.WordArray.create([0xffdbfddf], 4);
Y.Assert.areEqual('', C.enc.Utf16LE.stringify(wordArray));
Y.Assert.areEqual('', C.enc.Utf16LE.stringify(wordArray));
},
testParse1: function () {
Y.Assert.areEqual(C.lib.WordArray.create([0x007a0000], 2).toString(), C.enc.Utf16.parse('z').toString());
},
testParse2: function () {
Y.Assert.areEqual(C.lib.WordArray.create([0x6c340000], 2).toString(), C.enc.Utf16.parse('水').toString());
},
testParse3: function () {
Y.Assert.areEqual(C.lib.WordArray.create([0xd800dc00], 4).toString(), C.enc.Utf16.parse('𐀀').toString());
},
<|fim▁hole|> Y.Assert.areEqual(C.lib.WordArray.create([0xd834dd1e], 4).toString(), C.enc.Utf16.parse('𝄞').toString());
},
testParse5: function () {
Y.Assert.areEqual(C.lib.WordArray.create([0xdbffdffd], 4).toString(), C.enc.Utf16.parse('').toString());
},
testParseLE: function () {
Y.Assert.areEqual(C.lib.WordArray.create([0xffdbfddf], 4).toString(), C.enc.Utf16LE.parse('').toString());
}
}));
}, '$Rev$');<|fim▁end|> |
testParse4: function () {
|
<|file_name|>portal_modules.js<|end_file_name|><|fim▁begin|>$('#section').on('click', '[id$="Empty"]', function(event) {
event.preventDefault();
var match = /(.+)Empty/.exec($(event.target).closest('.unwell').attr('id'));
var id = match[1];
var emptyId = match[0];
$('#'+id).trigger('addrow');
$('#'+emptyId).addClass('hidden');
return false;
});
$('#section').on('submit', 'form[name="formItem"]', function(e) {
e.preventDefault();
var form = $(this),
btn = form.find('.btn-primary'),
valid = isFormValid(form);
$('select[name$=".type"]:not(:disabled)').each(function(i,e){
if($(e).val() == "Select an option"){
valid = false;
showPermanentError(form, "Please select a valid action.");
}
});
if (valid) {
btn.button('loading');
resetAlert($('#section'));
$.ajax({
type: 'POST',
url: form.attr('action'),
data: form.serialize()
}).always(function() {
btn.button('reset');
}).done(function(data, textStatus, jqXHR) {
showSuccess(form, "Saved");
window.location.hash = "#config/portal_module/"+form.find('input[name="id"]').val()+"/read"
}).fail(function(jqXHR) {
$("body,html").animate({scrollTop:0}, 'fast');
var status_msg = getStatusMsg(jqXHR);
showPermanentError(form, status_msg);
});
}
});
$('#section').on('click', '.delete-portal-module', function(e){
e.preventDefault();
var button = $(e.target);
button.button('loading');
$.ajax({
type: 'GET',
url: button.attr('href'),
}).always(function() {
}).done(function(data, textStatus, jqXHR) {
showSuccess(button.closest('.table'), "Deleted");
button.closest('tr').remove();
}).fail(function(jqXHR) {
button.button('reset');
$("body,html").animate({scrollTop:0}, 'fast');
var status_msg = getStatusMsg(jqXHR);
showPermanentError(button.closest('.table'), status_msg);
});
return false;
});
$('#section').on('click', '.expand', function(e){<|fim▁hole|> return false;
});
$('#section').on('change', '#actions select[name$=".type"]', function(event) {
var type_input = $(event.currentTarget);
updateActionMatchInput(type_input,false);
});
$('#section').on('click', '#actionsContainer a[href="#add"]', function(event) {
setTimeout(initActionMatchInput, 3000);
});
function initActionMatchInput() {
$('select[name$=".type"]:not(:disabled)').each(function(i,e){
updateActionMatchInput($(e),true);
});
}
function updateActionMatchInput(type_input, keep) {
var match_input = type_input.next();
var type_value = type_input.val();
var match_input_template_id = '#' + type_value + "_action_match";
var match_input_template = $(match_input_template_id);
if ( match_input_template.length == 0 ) {
match_input_template = $('#default_action_match');
}
if ( match_input_template.length ) {
changeInputFromTemplate(match_input, match_input_template, keep);
if (type_value == "switch") {
type_input.next().typeahead({
source: searchSwitchesGenerator($('#section h2')),
minLength: 2,
items: 11,
matcher: function(item) { return true; }
});
}
}
}<|fim▁end|> | e.preventDefault();
$(e.target).hide(function(){
$($(e.target).attr('data-expand')).slideDown();
}); |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.http import Http404, JsonResponse, HttpResponseForbidden
from django.shortcuts import render, redirect, get_object_or_404
from .forms import PlaylistForm
from .models import Playlist
def form_data(user, form):
data = {
'owner': user,
'playlist_form': form,
}
return data
def playlists(request, user_id):
user = get_object_or_404(User, id=user_id)
if request.method == 'GET':
data = {<|fim▁hole|> }
return render(request, 'playlist/index.html', data)
elif request.method == 'POST':
# Check if user matches URL
if request.user != user:
return HttpResponseForbidden()
form = PlaylistForm(request.POST)
if form.is_valid():
playlist = form.save(commit=False)
playlist.user = request.user
playlist.save()
return redirect('playlist:all', user_id)
else:
data = form_data(user, form)
return render(request, 'playlist/form.html', data)
@login_required
def create_view(request, user_id):
user = get_object_or_404(User, id=user_id)
# Check if user matches URL
if request.user != user:
return redirect('playlist:create', request.user.id)
data = form_data(user, PlaylistForm())
return render(request, 'playlist/form.html', data)
def playlist(request, user_id, playlist_id):
user = get_object_or_404(User, id=user_id)
playlist = get_object_or_404(Playlist, id=playlist_id, user=user)
if request.method == 'GET':
data = {
'owner': user,
'playlist': playlist,
}
return render(request, 'playlist/playlist.html', data)
elif request.method == 'POST':
# Check if user owns playlist
if request.user != playlist.user:
return HttpResponseForbidden()
action = request.GET.get('action', False)
if action:
if action == 'delete':
playlist.delete()
elif action == 'update':
form = PlaylistForm(request.POST, instance=playlist)
if form.is_valid():
playlist = form.save()
else:
data = form_data(user, form)
return render(request, 'playlist/form.html', data)
return redirect('playlist:all', user_id)
@login_required
def edit_view(request, user_id, playlist_id):
user = get_object_or_404(User, id=user_id)
playlist = get_object_or_404(Playlist, id=playlist_id, user=user)
# Check if playlist belongs to logged in user
if request.user != playlist.user:
return redirect('playlist:all', playlist.user.id)
data = form_data(user, PlaylistForm(instance=playlist))
return render(request, 'playlist/form.html', data)<|fim▁end|> | 'owner': user,
'playlists': Playlist.objects.filter(user=user), |
<|file_name|>exceptions.py<|end_file_name|><|fim▁begin|>class StretchException(Exception):
"""Common base class for all exceptions raised explicitly by stretch.
Exceptions which are subclasses of this type will be handled nicely by
stretch and will not cause the program to exit. Any exceptions raised
which are not a subclass of this type will exit(1) and print a traceback
to stdout.
"""
level = "error"
def __init__(self, message, **kwargs):
Exception.__init__(self, message)
self.message = message<|fim▁hole|> return self.message
def __unicode__(self):
return self.message
def __str__(self):
return self.message.encode('utf-8')<|fim▁end|> | self.kwargs = kwargs
def format_message(self): |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.