prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>issue-3049.rs<|end_file_name|><|fim▁begin|>// rustfmt-indent_style: Visual
fn main() {
something.aaaaaaaaaaaa()
.aaaaaaaaaaaa()
.aaaaaaaaaaaa()
.aaaaaaaaaaaa()
.aaaaaaaaaaaa()
.aaaaaaaaaaaa()
.aaaaaaaaaaaa()
.bench_function(|| {
let x = hello();
});
something.aaaaaaaaaaaa()
.aaaaaaaaaaaa()
.aaaaaaaaaaaa()
.aaaaaaaaaaaa()
.aaaaaaaaaaaa()
.aaaaaaaaaaaa()
.aaaaaaaaaaaa()
.bench_function(arg, || {
let x = hello();
});
something.aaaaaaaaaaaa()
.aaaaaaaaaaaa()
.aaaaaaaaaaaa()
.aaaaaaaaaaaa()
.aaaaaaaaaaaa()
.aaaaaaaaaaaa()
.aaaaaaaaaaaa()
.bench_function(arg,
|| {
let x = hello();
},<|fim▁hole|> arg);
AAAAAAAAAAA.function(|| {
let _ = ();
});
AAAAAAAAAAA.chain().function(|| {
let _ = ();
})
}<|fim▁end|> | |
<|file_name|>test_cron_triggers.py<|end_file_name|><|fim▁begin|># Copyright 2014 - Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import json
from unittest import mock
import sqlalchemy as sa
from mistral.db.v2 import api as db_api
from mistral.db.v2.sqlalchemy import models
from mistral import exceptions as exc
from mistral.services import security
from mistral.tests.unit.api import base
from mistral.tests.unit import base as unit_base
WF = models.WorkflowDefinition(
spec={
'version': '2.0',
'name': 'my_wf',
'tasks': {
'task1': {
'action': 'std.noop'
}
}
}
)
WF.update({'id': '123e4567-e89b-12d3-a456-426655440000', 'name': 'my_wf'})
TRIGGER = {
'id': '02abb422-55ef-4bb2-8cb9-217a583a6a3f',
'name': 'my_cron_trigger',
'pattern': '* * * * *',
'workflow_name': WF.name,
'workflow_id': '123e4567-e89b-12d3-a456-426655440000',
'workflow_input': '{}',
'workflow_params': '{}',
'scope': 'private',
'remaining_executions': 42
}
trigger_values = copy.deepcopy(TRIGGER)
trigger_values['workflow_input'] = json.loads(
trigger_values['workflow_input'])
trigger_values['workflow_params'] = json.loads(
trigger_values['workflow_params'])
TRIGGER_DB = models.CronTrigger()
TRIGGER_DB.update(trigger_values)
TRIGGER_DB_WITH_PROJECT_ID = TRIGGER_DB.get_clone()
TRIGGER_DB_WITH_PROJECT_ID.project_id = '<default-project>'
MOCK_WF = mock.MagicMock(return_value=WF)
MOCK_TRIGGER = mock.MagicMock(return_value=TRIGGER_DB)
MOCK_TRIGGERS = mock.MagicMock(return_value=[TRIGGER_DB])
MOCK_DELETE = mock.MagicMock(return_value=1)
MOCK_EMPTY = mock.MagicMock(return_value=[])
MOCK_NOT_FOUND = mock.MagicMock(side_effect=exc.DBEntityNotFoundError())
MOCK_DUPLICATE = mock.MagicMock(side_effect=exc.DBDuplicateEntryError())
class TestCronTriggerController(base.APITest):
@mock.patch.object(db_api, "get_cron_trigger", MOCK_TRIGGER)
def test_get(self):
resp = self.app.get('/v2/cron_triggers/my_cron_trigger')
self.assertEqual(200, resp.status_int)
self.assertDictEqual(TRIGGER, resp.json)
@mock.patch.object(db_api, 'get_cron_trigger')
def test_get_operational_error(self, mocked_get):
mocked_get.side_effect = [
# Emulating DB OperationalError
sa.exc.OperationalError('Mock', 'mock', 'mock'),
TRIGGER_DB # Successful run
]
resp = self.app.get('/v2/cron_triggers/my_cron_trigger')
self.assertEqual(200, resp.status_int)
self.assertDictEqual(TRIGGER, resp.json)
@mock.patch.object(db_api, "get_cron_trigger",
return_value=TRIGGER_DB_WITH_PROJECT_ID)
def test_get_within_project_id(self, mock_get):<|fim▁hole|> resp = self.app.get('/v2/cron_triggers/my_cron_trigger')
self.assertEqual(200, resp.status_int)
self.assertTrue('project_id' in resp.json)
@mock.patch.object(db_api, "get_cron_trigger", MOCK_NOT_FOUND)
def test_get_not_found(self):
resp = self.app.get(
'/v2/cron_triggers/my_cron_trigger',
expect_errors=True
)
self.assertEqual(404, resp.status_int)
@mock.patch.object(db_api, "get_cron_trigger", MOCK_TRIGGER)
def test_get_by_id(self):
resp = self.app.get(
"/v2/cron_triggers/02abb422-55ef-4bb2-8cb9-217a583a6a3f")
self.assertEqual(200, resp.status_int)
self.assertDictEqual(TRIGGER, resp.json)
@mock.patch.object(db_api, "get_workflow_definition", MOCK_WF)
@mock.patch.object(db_api, "create_cron_trigger")
def test_post(self, mock_mtd):
mock_mtd.return_value = TRIGGER_DB
resp = self.app.post_json('/v2/cron_triggers', TRIGGER)
self.assertEqual(201, resp.status_int)
self.assertDictEqual(TRIGGER, resp.json)
self.assertEqual(1, mock_mtd.call_count)
values = mock_mtd.call_args[0][0]
self.assertEqual('* * * * *', values['pattern'])
self.assertEqual(42, values['remaining_executions'])
@mock.patch.object(db_api, "get_workflow_definition", MOCK_WF)
@mock.patch.object(db_api, "create_cron_trigger", MOCK_DUPLICATE)
@mock.patch.object(security, "delete_trust")
def test_post_dup(self, delete_trust):
resp = self.app.post_json(
'/v2/cron_triggers', TRIGGER, expect_errors=True
)
self.assertEqual(1, delete_trust.call_count)
self.assertEqual(409, resp.status_int)
@mock.patch.object(db_api, "get_workflow_definition", MOCK_WF)
@mock.patch.object(db_api, "create_cron_trigger", MOCK_DUPLICATE)
def test_post_same_wf_and_input(self):
trig = TRIGGER.copy()
trig['name'] = 'some_trigger_name'
resp = self.app.post_json(
'/v2/cron_triggers', trig, expect_errors=True
)
self.assertEqual(409, resp.status_int)
@mock.patch.object(db_api, "get_cron_trigger", MOCK_TRIGGER)
@mock.patch.object(db_api, "delete_cron_trigger", MOCK_DELETE)
@mock.patch.object(security, "delete_trust")
def test_delete(self, delete_trust):
resp = self.app.delete('/v2/cron_triggers/my_cron_trigger')
self.assertEqual(1, delete_trust.call_count)
self.assertEqual(204, resp.status_int)
@mock.patch.object(db_api, "get_cron_trigger", MOCK_TRIGGER)
@mock.patch.object(db_api, "delete_cron_trigger", MOCK_DELETE)
@mock.patch.object(security, "delete_trust")
def test_delete_by_id(self, delete_trust):
resp = self.app.delete(
'/v2/cron_triggers/02abb422-55ef-4bb2-8cb9-217a583a6a3f')
self.assertEqual(1, delete_trust.call_count)
self.assertEqual(204, resp.status_int)
@mock.patch.object(db_api, "delete_cron_trigger", MOCK_NOT_FOUND)
def test_delete_not_found(self):
resp = self.app.delete(
'/v2/cron_triggers/my_cron_trigger',
expect_errors=True
)
self.assertEqual(404, resp.status_int)
@mock.patch.object(db_api, "get_cron_triggers", MOCK_TRIGGERS)
def test_get_all(self):
resp = self.app.get('/v2/cron_triggers')
self.assertEqual(200, resp.status_int)
self.assertEqual(1, len(resp.json['cron_triggers']))
self.assertDictEqual(TRIGGER, resp.json['cron_triggers'][0])
@mock.patch.object(db_api, 'get_cron_triggers')
def test_get_all_operational_error(self, mocked_get_all):
mocked_get_all.side_effect = [
# Emulating DB OperationalError
sa.exc.OperationalError('Mock', 'mock', 'mock'),
[TRIGGER_DB] # Successful run
]
resp = self.app.get('/v2/cron_triggers')
self.assertEqual(200, resp.status_int)
self.assertEqual(1, len(resp.json['cron_triggers']))
self.assertDictEqual(TRIGGER, resp.json['cron_triggers'][0])
@mock.patch.object(db_api, 'get_cron_triggers')
@mock.patch('mistral.context.MistralContext.from_environ')
def test_get_all_projects_admin(self, mock_context, mock_get_triggers):
admin_ctx = unit_base.get_context(admin=True)
mock_context.return_value = admin_ctx
resp = self.app.get('/v2/cron_triggers?all_projects=true')
self.assertEqual(200, resp.status_int)
self.assertTrue(mock_get_triggers.call_args[1].get('insecure', False))
@mock.patch.object(db_api, 'get_cron_triggers')
@mock.patch('mistral.context.MistralContext.from_environ')
def test_get_all_filter_project(self, mock_context, mock_get_triggers):
admin_ctx = unit_base.get_context(admin=True)
mock_context.return_value = admin_ctx
resp = self.app.get(
'/v2/cron_triggers?all_projects=true&'
'project_id=192796e61c174f718d6147b129f3f2ff'
)
self.assertEqual(200, resp.status_int)
self.assertTrue(mock_get_triggers.call_args[1].get('insecure', False))
self.assertEqual(
{'eq': '192796e61c174f718d6147b129f3f2ff'},
mock_get_triggers.call_args[1].get('project_id')
)
@mock.patch.object(db_api, "get_cron_triggers", MOCK_EMPTY)
def test_get_all_empty(self):
resp = self.app.get('/v2/cron_triggers')
self.assertEqual(200, resp.status_int)
self.assertEqual(0, len(resp.json['cron_triggers']))<|fim▁end|> | |
<|file_name|>SimpleServlet.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2015 Adobe Systems Incorporated
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at<|fim▁hole|> * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ${package}.core.servlets;
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.api.SlingHttpServletResponse;
import org.apache.sling.api.resource.Resource;
import org.apache.sling.api.servlets.HttpConstants;
import org.apache.sling.api.servlets.SlingAllMethodsServlet;
import org.apache.sling.api.servlets.SlingSafeMethodsServlet;
import org.apache.sling.api.resource.ValueMap;
import org.osgi.framework.Constants;
import org.osgi.service.component.annotations.Component;
import javax.servlet.Servlet;
import javax.servlet.ServletException;
import java.io.IOException;
/**
* Servlet that writes some sample content into the response. It is mounted for
* all resources of a specific Sling resource type. The
* {@link SlingSafeMethodsServlet} shall be used for HTTP methods that are
* idempotent. For write operations use the {@link SlingAllMethodsServlet}.
*/
@Component(service=Servlet.class,
property={
Constants.SERVICE_DESCRIPTION + "=Simple Demo Servlet",
"sling.servlet.methods=" + HttpConstants.METHOD_GET,
"sling.servlet.resourceTypes="+ "${appsFolderName}/components/structure/page",
"sling.servlet.extensions=" + "txt"
})
public class SimpleServlet extends SlingSafeMethodsServlet {
private static final long serialVersionUid = 1L;
@Override
protected void doGet(final SlingHttpServletRequest req,
final SlingHttpServletResponse resp) throws ServletException, IOException {
final Resource resource = req.getResource();
resp.setContentType("text/plain");
resp.getWriter().write("Title = " + resource.adaptTo(ValueMap.class).get("jcr:title"));
}
}<|fim▁end|> | *
* http://www.apache.org/licenses/LICENSE-2.0
* |
<|file_name|>mf6lakeconn.py<|end_file_name|><|fim▁begin|>import os
import numpy as np
import flopy
ml = flopy.modflow.Modflow.load('l2a_2k.nam', version='mf2005', verbose=True)
delx = ml.dis.delr.array
dely = ml.dis.delc.array
# get data from the lst file
f = open('l2a_2k.lst', 'r')
for line in f:
if 'LAYER # ROW # COLUMN # LAKE # INTERFACE TYPE LAKEBED LEAKANCE' in line:
break
cdata = []
for idx, line in enumerate(f):
if (len(line.strip()) < 1):
break
cdata.append(line)
f.close()
tpth = 'mf5.conn.dat'
f = open(tpth, 'w')
for c in cdata:
f.write(c)
f.close()
dir_dict = {1:'HORIZONTAL',
2:'HORIZONTAL',
3:'HORIZONTAL',
4:'HORIZONTAL',
6:'VERTICAL'}
dtype = [('k', np.int), ('i', np.int), ('j', np.int),
('lake', np.int), ('itype', np.int),
('bedleak', np.float)]
cdata = np.loadtxt(tpth, dtype=dtype)
cdata['k'] -= 1
cdata['i'] -= 1
cdata['j'] -= 1
nlakes = np.unique(cdata['lake'])
print(nlakes)
lake_cnt = {}
for lake in nlakes:
lake_cnt[lake] = 0<|fim▁hole|> ('dx', np.float), ('width', np.float)]
cdata2 = np.zeros((cdata.shape[0]), dtype=dtype2)
# fill cdata2
for idx in range(cdata.shape[0]):
k = cdata['k'][idx]
i = cdata['i'][idx]
j = cdata['j'][idx]
ilak = cdata['lake'][idx]
lake_cnt[ilak] += 1
itype = cdata['itype'][idx]
cdir = dir_dict[itype]
belev = 0.
telev = 0.
if cdir == 'HORIZONTAL':
if itype == 1 or itype == 2:
dx = 0.5 * delx[j]
width = dely[i]
elif itype == 3 or itype == 4:
dx = 0.5 * dely[i]
width = delx[j]
else:
dx = 0.
width = 0.
cdata2['iconn'][idx] = lake_cnt[ilak]
cdata2['belev'][idx] = belev
cdata2['telev'][idx] = telev
cdata2['dx'][idx] = dx
cdata2['width'][idx] = width
#
tpth = 'mf6.conn.dat'
f = open(tpth, 'w')
f.write('begin lakes\n')
c = '# lakeno strt lakeconn boundname'
f.write('{}\n'.format(c))
for lake in nlakes:
f.write(' LAKE {:10d}{:10.3g}{:10d} LAKE_{:03d}\n'.format(lake, 130., lake_cnt[lake], lake))
f.write('end lakes\n\n')
f.write('begin lake_connections\n')
# header
## lakeno iconn layer row column ctype bedleak belev telev dx width
c = '# lakeno iconn layer row ' + \
'column ctype bedleak belev '+ \
'telev dx width'
f.write('{}\n'.format(c))
# data
for idx in range(cdata.shape[0]):
itype = cdata['itype'][idx]
c = ' LAKE'
c += ' {:10d}{:10d}{:10d}{:10d}{:10d}'.format(cdata['lake'][idx],
cdata2['iconn'][idx],
cdata['k'][idx]+1,
cdata['i'][idx]+1,
cdata['j'][idx]+1)
c += '{:>15s} '.format(dir_dict[itype])
c += '{:10.3g}'.format(cdata['bedleak'][idx])
c += '{:10.3g}'.format(cdata2['belev'][idx])
c += '{:10.3g}'.format(cdata2['telev'][idx])
c += '{:10.3g}'.format(cdata2['dx'][idx])
c += '{:10.3g}'.format(cdata2['width'][idx])
f.write('{}\n'.format(c))
f.write('end lake_connections\n\n')
f.close()<|fim▁end|> | print(lake_cnt)
dtype2 = [('iconn', np.int), ('belev', np.float), ('telev', np.float), |
<|file_name|>environment_agents.ts<|end_file_name|><|fim▁begin|>/*
* Copyright 2021 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import Stream from "mithril/stream";
import {Origin, OriginJSON} from "models/origin";
export interface EnvironmentAgentJSON {
uuid: string;
hostname: string;
origin: OriginJSON;
}
export class AgentWithOrigin {
uuid: Stream<string>;
hostname: Stream<string>;
readonly origin: Stream<Origin>;
constructor(uuid: string, hostname: string, origin: Origin) {
this.uuid = Stream(uuid);
this.hostname = Stream(hostname);
this.origin = Stream(origin);
}
static fromJSON(data: EnvironmentAgentJSON) {
return new AgentWithOrigin(data.uuid, data.hostname, Origin.fromJSON(data.origin));
}
clone() {
return new AgentWithOrigin(this.uuid(), this.hostname(), this.origin().clone());
}
}
export class Agents extends Array<AgentWithOrigin> {
constructor(...agents: AgentWithOrigin[]) {
super(...agents);
Object.setPrototypeOf(this, Object.create(Agents.prototype));
}
static fromJSON(agents: EnvironmentAgentJSON[]) {
if (agents) {
return new Agents(...agents.map(AgentWithOrigin.fromJSON));
} else {
return new Agents();
}<|fim▁hole|><|fim▁end|> | }
} |
<|file_name|>parser.py<|end_file_name|><|fim▁begin|>import re
import datetime
import logging
log = logging.getLogger(__name__)
class Marker(object):
__slots__ = ['line_start', 'line_end', 'expires']
def __init__(self, lineno):
self.line_start = lineno
self.line_end = lineno
self.expires = None
def __str__(self):
if self.line_start == self.line_end:
line = str(self.line_start)
else:
line = '{0}-{1}'.format(self.line_start, self.line_end)
return 'Marker(line={0}, expires={1})'.format(line, self.expires)
def __repr__(self):
return str(self)
class Parser(object):
re_sunset_begin = re.compile(
r'>>SUNSET'
r'\s+(?P<date>([1-9][0-9]{3})-(1[1-2]|0?[1-9])-([1-2][0-9]|3[0-1]|0?[1-9]))\s*'
r'(?P<end><<)?\s*$')
re_sunset_end = re.compile(r'<<SUNSET')
def __init__(self):
self.markers = []
self._open_marker = None
def parse_begin(self, lineno, comment):
match = self.re_sunset_begin.search(comment)<|fim▁hole|> if self._open_marker:
log.warn('Unmatched marker start at line %d', self._open_marker.line_start)
self.markers.append(self._open_marker)
groupdict = match.groupdict()
self._open_marker = Marker(lineno)
self._open_marker.expires = datetime.date(*map(int, groupdict['date'].split('-')))
if groupdict['end']:
self.markers.append(self._open_marker)
self._open_marker = None
return True
return False
def parse_end(self, lineno, comment):
match = self.re_sunset_end.search(comment)
if match:
if self._open_marker:
self._open_marker.line_end = lineno
self.markers.append(self._open_marker)
self._open_marker = None
else:
log.warn('Dangling marker end at line %d', lineno)
def parse(self, lineno, comment):
if not self.parse_begin(lineno, comment):
self.parse_end(lineno, comment)<|fim▁end|> | if match: |
<|file_name|>HttpData.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2016 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.armeria.common;
import static java.util.Objects.requireNonNull;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.Reader;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.Formatter;
import java.util.Locale;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufUtil;
import it.unimi.dsi.fastutil.io.FastByteArrayInputStream;
/**
* HTTP/2 data. Helpers in this class create {@link HttpData} objects that leave the stream open.
* To create a {@link HttpData} that closes the stream, directly instantiate {@link DefaultHttpData}.
*
* <p>Implementations should generally extend {@link AbstractHttpData} to interact with other {@link HttpData}
* implementations.
*/
public interface HttpData extends HttpObject {
/**
* Empty HTTP/2 data.
*/
HttpData EMPTY_DATA = new DefaultHttpData(new byte[0], 0, 0, false);
/**
* Creates a new instance from the specified byte array. The array is not copied; any changes made in the
* array later will be visible to {@link HttpData}.
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if the length of the specified array is 0.
*/
static HttpData of(byte[] data) {
requireNonNull(data, "data");
if (data.length == 0) {
return EMPTY_DATA;
}
return new DefaultHttpData(data, 0, data.length, false);
}
/**
* Creates a new instance from the specified byte array, {@code offset} and {@code length}.
* The array is not copied; any changes made in the array later will be visible to {@link HttpData}.
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if {@code length} is 0.
*
* @throws ArrayIndexOutOfBoundsException if {@code offset} and {@code length} are out of bounds
*/
static HttpData of(byte[] data, int offset, int length) {
requireNonNull(data);
if (offset < 0 || length < 0 || offset > data.length - length) {
throw new ArrayIndexOutOfBoundsException(
"offset: " + offset + ", length: " + length + ", data.length: " + data.length);
}
if (length == 0) {
return EMPTY_DATA;
}
return new DefaultHttpData(data, offset, length, false);
}
/**
* Converts the specified {@code text} into an {@link HttpData}.
*
* @param charset the {@link Charset} to use for encoding {@code text}
* @param text the {@link String} to convert
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if the length of {@code text} is 0.
*/
static HttpData of(Charset charset, String text) {
requireNonNull(charset, "charset");
requireNonNull(text, "text");
if (text.isEmpty()) {
return EMPTY_DATA;
}
return of(text.getBytes(charset));
}
/**
* Converts the specified Netty {@link ByteBuf} into an {@link HttpData}. Unlike {@link #of(byte[])}, this
* method makes a copy of the {@link ByteBuf}.
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if the readable bytes of {@code buf} is 0.
*/
static HttpData of(ByteBuf buf) {
requireNonNull(buf, "buf");
if (!buf.isReadable()) {
return EMPTY_DATA;
}
return of(ByteBufUtil.getBytes(buf));
}
/**
* Converts the specified formatted string into an {@link HttpData}. The string is formatted by
* {@link String#format(Locale, String, Object...)} with {@linkplain Locale#ENGLISH English locale}.
*
* @param charset the {@link Charset} to use for encoding string
* @param format {@linkplain Formatter the format string} of the response content
* @param args the arguments referenced by the format specifiers in the format string
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if {@code format} is empty.
*/
static HttpData of(Charset charset, String format, Object... args) {
requireNonNull(charset, "charset");
requireNonNull(format, "format");
requireNonNull(args, "args");
if (format.isEmpty()) {
return EMPTY_DATA;
}
return of(String.format(Locale.ENGLISH, format, args).getBytes(charset));
}
/**
* Converts the specified {@code text} into a UTF-8 {@link HttpData}.
*
* @param text the {@link String} to convert
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if the length of {@code text} is 0.
*/
static HttpData ofUtf8(String text) {
return of(StandardCharsets.UTF_8, text);
}
/**<|fim▁hole|> * @param args the arguments referenced by the format specifiers in the format string
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if {@code format} is empty.
*/
static HttpData ofUtf8(String format, Object... args) {
return of(StandardCharsets.UTF_8, format, args);
}
/**
* Converts the specified {@code text} into a US-ASCII {@link HttpData}.
*
* @param text the {@link String} to convert
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if the length of {@code text} is 0.
*/
static HttpData ofAscii(String text) {
return of(StandardCharsets.US_ASCII, text);
}
/**
* Converts the specified formatted string into a US-ASCII {@link HttpData}. The string is formatted by
* {@link String#format(Locale, String, Object...)} with {@linkplain Locale#ENGLISH English locale}.
*
* @param format {@linkplain Formatter the format string} of the response content
* @param args the arguments referenced by the format specifiers in the format string
*
* @return a new {@link HttpData}. {@link #EMPTY_DATA} if {@code format} is empty.
*/
static HttpData ofAscii(String format, Object... args) {
return of(StandardCharsets.US_ASCII, format, args);
}
/**
* Returns the underlying byte array of this data.
*/
byte[] array();
/**
* Returns the start offset of the {@link #array()}.
*/
int offset();
/**
* Returns the length of this data.
*/
int length();
/**
* Returns whether the {@link #length()} is 0.
*/
default boolean isEmpty() {
return length() == 0;
}
/**
* Decodes this data into a {@link String}.
*
* @param charset the {@link Charset} to use for decoding this data
*
* @return the decoded {@link String}
*/
default String toString(Charset charset) {
requireNonNull(charset, "charset");
return new String(array(), offset(), length(), charset);
}
/**
* Decodes this data into a {@link String} using UTF-8 encoding.
*
* @return the decoded {@link String}
*/
default String toStringUtf8() {
return toString(StandardCharsets.UTF_8);
}
/**
* Decodes this data into a {@link String} using US-ASCII encoding.
*
* @return the decoded {@link String}
*/
default String toStringAscii() {
return toString(StandardCharsets.US_ASCII);
}
/**
* Returns a new {@link InputStream} that is sourced from this data.
*/
default InputStream toInputStream() {
return new FastByteArrayInputStream(array(), offset(), length());
}
/**
* Returns a new {@link Reader} that is sourced from this data and decoded using the specified
* {@link Charset}.
*/
default Reader toReader(Charset charset) {
requireNonNull(charset, "charset");
return new InputStreamReader(toInputStream(), charset);
}
/**
* Returns a new {@link Reader} that is sourced from this data and decoded using
* {@link StandardCharsets#UTF_8}.
*/
default Reader toReaderUtf8() {
return toReader(StandardCharsets.UTF_8);
}
/**
* Returns a new {@link Reader} that is sourced from this data and decoded using
* {@link StandardCharsets#US_ASCII}.
*/
default Reader toReaderAscii() {
return toReader(StandardCharsets.US_ASCII);
}
}<|fim▁end|> | * Converts the specified formatted string into a UTF-8 {@link HttpData}. The string is formatted by
* {@link String#format(Locale, String, Object...)} with {@linkplain Locale#ENGLISH English locale}.
*
* @param format {@linkplain Formatter the format string} of the response content |
<|file_name|>AbstractCloud.java<|end_file_name|><|fim▁begin|>/**
* Copyright (C) 2009-2014 Dell, Inc.
* See annotations for authorship information
*
* ====================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
package org.dasein.cloud;
import org.dasein.cloud.admin.AdminServices;
import org.dasein.cloud.ci.CIServices;
import org.dasein.cloud.compute.ComputeServices;
import org.dasein.cloud.identity.IdentityServices;
import org.dasein.cloud.network.NetworkServices;
import org.dasein.cloud.platform.PlatformServices;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
/**
* Simple base implementation of a cloud provider bootstrap object that defaults all services to <code>null</code>.
* @author George Reese
* @version 2013.07 added javadoc, fixed annotations on data center services, made it return an NPE
* @since unknown
*/
public abstract class AbstractCloud extends CloudProvider {
/**
* Constructs a cloud provider instance.
*/
public AbstractCloud() { }
@Override
public @Nullable AdminServices getAdminServices() {
return null;
}
@Override
public @Nullable ComputeServices getComputeServices() {
CloudProvider compute = getComputeCloud();
return (compute == null ? null : compute.getComputeServices());
}
@Override
public @Nonnull ContextRequirements getContextRequirements() {
return new ContextRequirements(
new ContextRequirements.Field("apiKeys", ContextRequirements.FieldType.KEYPAIR),
new ContextRequirements.Field("x509", ContextRequirements.FieldType.KEYPAIR, false)
);
}
@Override
public @Nullable CIServices getCIServices() {
CloudProvider compute = getComputeCloud();
return (compute == null ? null : compute.getCIServices());
}
@Override
public @Nullable IdentityServices getIdentityServices() {
CloudProvider compute = getComputeCloud();
return (compute == null ? null : compute.getIdentityServices());
}
@Override
public @Nullable NetworkServices getNetworkServices() {
CloudProvider compute = getComputeCloud();
return (compute == null ? null : compute.getNetworkServices());
}
@Override
public @Nullable PlatformServices getPlatformServices() {
CloudProvider compute = getComputeCloud();
return ( compute == null ? null : compute.getPlatformServices() );
}<|fim▁hole|>
}<|fim▁end|> | |
<|file_name|>commands.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2016-2018 Boucher, Antoni <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of<|fim▁hole|> *
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
#[derive(Commands)]
pub enum AppCommand {
#[completion(hidden)]
ActivateSelection,
#[help(text="Update the host file used by the adblocker")]
AdblockUpdate,
#[help(text="Add a new user agent")]
AddUserAgent(String),
#[help(text="Go back in the history")]
Back,
#[special_command(incremental, identifier="?")]
BackwardSearch(String),
#[help(text="Add the current page to the bookmarks")]
Bookmark,
#[help(text="Delete the current page from the bookmarks")]
BookmarkDel,
#[help(text="Edit the bookmark tags of the current page")]
BookmarkEditTags,
#[help(text="Clear the browser cache")]
ClearCache,
#[help(text="Try to click link to next page if it exists")]
ClickNextPage,
#[help(text="Try to click link to the previous page if it exists")]
ClickPrevPage,
#[completion(hidden)]
CopyLinkUrl,
#[completion(hidden)]
CopyUrl,
#[help(text="Delete all the cookies")]
DeleteAllCookies,
#[help(text="Delete the cookies for the specified domain")]
DeleteCookies(String),
#[completion(hidden)]
DeleteSelectedBookmark,
#[completion(hidden)]
FinishSearch,
#[completion(hidden)]
FocusInput,
#[completion(hidden)]
Follow,
#[help(text="Go forward in the history")]
Forward,
#[completion(hidden)]
GoMark(String),
#[count]
#[help(text="Go up one directory in url")]
GoParentDir(Option<u32>),
#[help(text="Go to root directory of url")]
GoRootDir,
#[completion(hidden)]
HideHints,
#[completion(hidden)]
Hover,
#[completion(hidden)]
Insert,
#[help(text="Open the web inspector")]
Inspector,
#[help(text="Kill the webview without confirmation")]
KillWin,
#[completion(hidden)]
Mark(String),
#[completion(hidden)]
Normal,
#[help(text="Open an URL")]
Open(String),
#[help(text="Delete the credentials for the current URL")]
PasswordDelete,
#[help(text="Insert a password in the focused text input")]
PasswordInsert,
#[help(text="Insert a password in the focused text input and submit the form")]
PasswordInsertSubmit,
#[help(text="Load the credentials in the login form")]
PasswordLoad,
#[help(text="Save the credentials from the login form")]
PasswordSave,
#[help(text="Load the credentials in the login form and submit the form")]
PasswordSubmit,
#[completion(hidden)]
PasteUrl,
#[help(text="Print the current page")]
Print,
#[help(text="Open an URL in a new private window")]
PrivateWinOpen(String),
#[help(text="Quit the application")]
Quit,
#[help(text="Reload the current page")]
Reload,
#[help(text="Reload the current page without using the cache")]
ReloadBypassCache,
#[help(text="Restore the opened pages after a crash")]
RestoreUrls,
#[completion(hidden)]
SaveLink,
#[completion(hidden)]
SearchEngine(String),
#[completion(hidden)]
Screenshot(String),
#[count]
#[completion(hidden)]
ScrollTo(Option<u32>),
#[completion(hidden)]
ScrollDown,
#[completion(hidden)]
ScrollDownHalf,
#[completion(hidden)]
ScrollDownLine,
#[completion(hidden)]
ScrollLeft,
#[completion(hidden)]
ScrollRight,
#[completion(hidden)]
ScrollTop,
#[completion(hidden)]
ScrollUp,
#[completion(hidden)]
ScrollUpHalf,
#[completion(hidden)]
ScrollUpLine,
#[special_command(incremental, identifier="/")]
Search(String),
#[completion(hidden)]
SearchNext,
#[completion(hidden)]
SearchPrevious,
#[help(text="Select a user agent by name")]
SelectUserAgent(String),
#[help(text="Stop loading the current page")]
Stop,
#[completion(hidden)]
UrlIncrement,
#[completion(hidden)]
UrlDecrement,
#[completion(hidden)]
WinFollow,
#[help(text="Open an URL in a new window")]
WinOpen(String),
#[completion(hidden)]
WinPasteUrl,
#[help(text="Zoom the current page in")]
ZoomIn,
#[help(text="Zoom the current page to 100%")]
ZoomNormal,
#[help(text="Zoom the current page out")]
ZoomOut,
}<|fim▁end|> | * the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions: |
<|file_name|>em-osspec-network.cpp<|end_file_name|><|fim▁begin|>/* -*-c++-*-
Copyright (C) 2003-2015 Runtime Revolution Ltd.
This file is part of LiveCode.
LiveCode is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License v3 as published by the Free
Software Foundation.
LiveCode is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with LiveCode. If not see <http://www.gnu.org/licenses/>. */
#include "em-util.h"<|fim▁hole|>#include "sysdefs.h"
#include "osspec.h"
/* ================================================================
* Socket handling
* ================================================================ */
MCSocket *
MCS_accept(uint16_t p_port,
MCObject *p_object,
MCNameRef p_message,
Boolean p_datagram,
Boolean p_secure,
Boolean p_sslverify,
MCStringRef p_sslcertfile)
{
MCEmscriptenNotImplemented();
return nil;
}
bool
MCS_ha(MCSocket *p_socket,
MCStringRef & r_address)
{
MCEmscriptenNotImplemented();
return false;
}<|fim▁end|> | |
<|file_name|>handlers.py<|end_file_name|><|fim▁begin|>import hashlib
import json
from PIL import Image
from django.http import HttpResponse
from django.shortcuts import render
try:
from django.urls import reverse
except ImportError:
from django.core.urlresolvers import reverse
from crits.core.class_mapper import class_from_id
from crits.core.crits_mongoengine import json_handler, create_embedded_source
from crits.core.crits_mongoengine import EmbeddedSource
from crits.core.handlers import build_jtable, jtable_ajax_list,jtable_ajax_delete
from crits.core.user_tools import user_sources
from crits.screenshots.screenshot import Screenshot
def get_screenshots_for_id(type_, _id, analyst, buckets=False):
"""
Get screenshots for a top-level object.
:param type_: The class type.
:type type_: str
:param _id: The ObjectId to lookup.
:type _id: str
:param analyst: The user looking up the screenshots.
:type analyst: str
:param buckets: Use buckets as tag lookups for screenshots.
:type buckets: boolean
:returns: list
"""
result = {'success': False}
sources = user_sources(analyst)
obj = class_from_id(type_, _id)
if not obj:
result['message'] = "No valid top-level object found."
return result
screenshots = Screenshot.objects(id__in=obj.screenshots,
source__name__in=sources)
bucket_shots = Screenshot.objects(tags__in=obj.bucket_list,
source__name__in=sources)
final_shots = []
for s in screenshots:
if s.screenshot and s.thumb and s not in final_shots:
final_shots.append(s)
for b in bucket_shots:
if b not in final_shots:
# since .bucket isn't supported, this will show up in the template
# under unsupported_attrs, which is ok.
b.bucket = True
final_shots.append(b)
result['success'] = True
result['screenshots'] = final_shots
return result<|fim▁hole|>def get_screenshot(_id=None, tag=None, analyst=None, thumb=False):
"""
Get a screenshot.
:param _id: The ObjectId to lookup.
:type _id: str
:param tag: The tag to look for.
:type tag: str
:param analyst: The user looking up the screenshots.
:type analyst: str
:returns: screenshot
"""
if not analyst:
return None
sources = user_sources(analyst)
if _id:
screenshot = Screenshot.objects(id=_id,
source__name__in=sources).first()
if tag:
screenshot = Screenshot.objects(tags=tag,
source__name__in=sources).first()
if not screenshot:
return None
if thumb:
im = Image.open(screenshot.thumb)
else:
im = Image.open(screenshot.screenshot)
response = HttpResponse(content_type="image/png")
im.save(response, "PNG")
return response
def add_screenshot(description, tags, source, method, reference, tlp, analyst,
screenshot, screenshot_ids, oid, otype):
"""
Add a screenshot or screenshots to a top-level object.
:param description: The description of the screenshot.
:type description: str
:param tags: Tags associated with this screenshot.
:type tags: str, list
:param source: The source who provided the screenshot.
:type source_name: str,
:class:`crits.core.crits_mongoengine.EmbeddedSource`,
list of :class:`crits.core.crits_mongoengine.EmbeddedSource`
:param method: The method of acquiring this screenshot.
:type method: str
:param reference: A reference to the source of this screenshot.
:type reference: str
:param tlp: The TLP Sharing of this screenshot.
:type tlp: str
:param analyst: The user adding the screenshot.
:type analyst: str
:param screenshot: The screenshot to add.
:type screenshot: file handle
:param screenshot_ids: A list of ObjectIds of existing screenshots to add.
:type screenshot_ids: str, list
:param oid: The ObjectId of the top-level object to add to.
:type oid: str
:param otype: The top-level object type.
:type otype: str
:returns: dict with keys:
'success' (boolean),
'message' (str),
'id' (str) if successful,
'html' (str) if successful,
"""
result = {'success': False}
if not source:
result['message'] = "Must provide a source"
return result
obj = class_from_id(otype, oid)
if not obj:
result['message'] = "Could not find the top-level object."
return result
final_screenshots = []
if screenshot_ids:
if not isinstance(screenshot_ids, list):
screenshot_list = screenshot_ids.split(',')
else:
screenshot_list = screenshot_ids
for screenshot_id in screenshot_list:
screenshot_id = screenshot_id.strip().lower()
s = Screenshot.objects(id=screenshot_id).first()
if s:
if isinstance(source, basestring) and len(source) > 0:
s_embed = create_embedded_source(source, method=method,
reference=reference,
analyst=analyst,
tlp=tlp)
s.add_source(s_embed)
elif isinstance(source, EmbeddedSource):
s.add_source(source=source, method=method,
reference=reference, analyst=analyst, tlp=tlp)
elif isinstance(source, list) and len(source) > 0:
for x in source:
if isinstance(x, EmbeddedSource):
s.add_source(x, method=method, reference=reference,
analyst=analyst, tlp=tlp)
s.add_tags(tags)
s.save()
obj.screenshots.append(screenshot_id)
obj.save()
final_screenshots.append(s)
else:
md5 = hashlib.md5(screenshot.read()).hexdigest()
check = Screenshot.objects(md5=md5).first()
if check:
s = check
s.add_tags(tags)
else:
s = Screenshot()
s.analyst = analyst
s.description = description
s.md5 = md5
screenshot.seek(0)
s.add_screenshot(screenshot, tags)
if isinstance(source, basestring) and len(source) > 0:
s_embed = create_embedded_source(source, method=method,
reference=reference,
analyst=analyst,
tlp=tlp)
s.add_source(s_embed)
elif isinstance(source, EmbeddedSource):
s.add_source(source, method=method, reference=reference,
analyst=analyst, tlp=tlp)
elif isinstance(source, list) and len(source) > 0:
for x in source:
if isinstance(x, EmbeddedSource):
s.add_source(x, method=method, reference=reference,
analyst=analyst, tlp=tlp)
if not s.screenshot and not s.thumb:
result['message'] = "Problem adding screenshot to GridFS. No screenshot uploaded."
return result
try:
s.save(username=analyst)
final_screenshots.append(s)
except Exception, e:
result['message'] = str(e)
return result
obj.screenshots.append(str(s.id))
obj.save(username=analyst)
result['message'] = "Screenshot(s) successfully uploaded!"
result['id'] = str(s.id)
final_html = ""
for f in final_screenshots:
final_html += create_screenshot_html(f, oid, otype)
result['html'] = final_html
result['success'] = True
return result
def create_screenshot_html(s, oid, otype):
"""
Create HTML for a thumbnail view for the screenshot.
:param s: The screenshot.
:type s: :class:`crits.screenshots.screenshot.Screenshot`
:param oid: The ObjectId of the top-level object it's associating with.
:type oid: str
:param otype: The type of top-level object it's associating with.
:returns: str
"""
if s.tags and s.description:
description = s.description + ": " + ','.join(s.tags)
else:
description = s.md5
description += " (submitted by %s)" % s.analyst
html = '<a href="%s" title="%s" data-id="%s" data-dialog><img class="ss_no_bucket" src="%s">' % \
(reverse('crits-screenshots-views-render_screenshot',
args=[s.id]),
description,
str(s.id),
reverse('crits-screenshots-views-render_screenshot',
args=[s.id, 'thumb']))
html += '<span class="remove_screenshot ui-icon ui-icon-trash" data-id="'
html += '%s" data-obj="%s" data-type="%s" title="Remove from %s">' % (str(s.id),
oid,
otype,
otype)
html += '</span><span class="copy_ss_id ui-icon ui-icon-radio-on" '
html += 'data-id="%s" title="Copy ID to clipboard"></span>' % str(s.id)
return html
def delete_screenshot_from_object(obj, oid, sid, analyst):
"""
Remove a screenshot from a top-level object.
:param obj: The type of top-level object to work with.
:type obj: str
:param oid: The ObjectId of the top-level object to work with.
:type oid: str
:param sid: The ObjectId of the screenshot to remove.
:type sid: str
:param analyst: The user removing the screenshot.
:type analyst: str
:returns: dict with keys "success" (boolean) and "message" (str).
"""
result = {'success': False}
klass = class_from_id(obj, oid)
if not klass:
result['message'] = "Could not find Object to delete screenshot from."
return result
clean = [s for s in klass.screenshots if s != sid]
klass.screenshots = clean
try:
klass.save(username=analyst)
result['success'] = True
return result
except Exception, e:
result['message'] = str(e)
return result
def generate_screenshot_jtable(request, option):
"""
Generate the jtable data for rendering in the list template.
:param request: The request for this jtable.
:type request: :class:`django.http.HttpRequest`
:param option: Action to take.
:type option: str of either 'jtlist', 'jtdelete', or 'inline'.
:returns: :class:`django.http.HttpResponse`
"""
obj_type = Screenshot
type_ = "screenshot"
mapper = obj_type._meta['jtable_opts']
if option == "jtlist":
# Sets display url
details_url = mapper['details_url']
details_url_key = mapper['details_url_key']
fields = mapper['fields']
response = jtable_ajax_list(obj_type,
details_url,
details_url_key,
request,
includes=fields)
return HttpResponse(json.dumps(response,
default=json_handler),
content_type="application/json")
if option == "jtdelete":
response = {"Result": "ERROR"}
if jtable_ajax_delete(obj_type,request):
response = {"Result": "OK"}
return HttpResponse(json.dumps(response,
default=json_handler),
content_type="application/json")
jtopts = {
'title': "Screenshots",
'default_sort': mapper['default_sort'],
'listurl': reverse('crits-%ss-views-%ss_listing' % (type_,
type_),
args=('jtlist',)),
'deleteurl': reverse('crits-%ss-views-%ss_listing' % (type_,
type_),
args=('jtdelete',)),
'searchurl': reverse(mapper['searchurl']),
'fields': mapper['jtopts_fields'],
'hidden_fields': mapper['hidden_fields'],
'linked_fields': mapper['linked_fields'],
'details_link': mapper['details_link'],
'no_sort': mapper['no_sort']
}
jtable = build_jtable(jtopts,request)
jtable['toolbar'] = []
if option == "inline":
return render(request, "jtable.html",
{'jtable': jtable,
'jtid': '%s_listing' % type_,
'button' : '%ss_tab' % type_},
)
else:
return render(request, "%s_listing.html" % type_,
{'jtable': jtable,
'jtid': '%s_listing' % type_},
)<|fim▁end|> | |
<|file_name|>test_init.py<|end_file_name|><|fim▁begin|>"""The tests for the analytics ."""
from unittest.mock import patch
from homeassistant.components.analytics.const import ANALYTICS_ENDPOINT_URL, DOMAIN
from homeassistant.setup import async_setup_component
MOCK_VERSION = "1970.1.0"
async def test_setup(hass):
"""Test setup of the integration."""
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
await hass.async_block_till_done()
assert DOMAIN in hass.data<|fim▁hole|>async def test_websocket(hass, hass_ws_client, aioclient_mock):
"""Test WebSocket commands."""
aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200)
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}})
await hass.async_block_till_done()
ws_client = await hass_ws_client(hass)
await ws_client.send_json({"id": 1, "type": "analytics"})
response = await ws_client.receive_json()
assert response["success"]
with patch("homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION):
await ws_client.send_json(
{"id": 2, "type": "analytics/preferences", "preferences": {"base": True}}
)
response = await ws_client.receive_json()
assert len(aioclient_mock.mock_calls) == 1
assert response["result"]["preferences"]["base"]
await ws_client.send_json({"id": 3, "type": "analytics"})
response = await ws_client.receive_json()
assert response["result"]["preferences"]["base"]<|fim▁end|> | |
<|file_name|>issue-21801.rs<|end_file_name|><|fim▁begin|>// compile-flags: -Cmetadata=aux
pub struct Foo;<|fim▁hole|>
impl Foo {
pub fn new<F>(f: F) -> Foo where F: FnMut() -> i32 {
loop {}
}
}<|fim▁end|> | |
<|file_name|>xml_name.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#<|fim▁hole|># translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
#
class XmlNamespace(object):
def __init__(self, namespace):
self._namespace = namespace
def name(self, tag):
return "{%s}%s" % (self._namespace, tag)
class XmlNamer(object):
"""Initialize me with a DOM node or a DOM document node (the
toplevel node you get when parsing an XML file). Then use me
to generate fully qualified XML names.
>>> xml = '<office:document-styles xmlns:office="urn:oasis:names:tc:opendocument:xmlns:office:1.0"></office>'
>>> from lxml import etree
>>> namer = XmlNamer(etree.fromstring(xml))
>>> namer.name('office', 'blah')
{urn:oasis:names:tc:opendocument:xmlns:office:1.0}blah
>>> namer.name('office:blah')
{urn:oasis:names:tc:opendocument:xmlns:office:1.0}blah
I can also give you XmlNamespace objects if you give me the abbreviated
namespace name. These are useful if you need to reference a namespace
continuously.
>>> office_ns = name.namespace('office')
>>> office_ns.name('foo')
{urn:oasis:names:tc:opendocument:xmlns:office:1.0}foo
"""
def __init__(self, dom_node):
# Allow the user to pass a dom node of the
# XML document nodle
if hasattr(dom_node, 'nsmap'):
self.nsmap = dom_node.nsmap
else:
self.nsmap = dom_node.getroot().nsmap
def name(self, namespace_shortcut, tag=None):
# If the user doesn't pass an argument into 'tag'
# then namespace_shortcut contains a tag of the form
# 'short-namespace:tag'
if tag is None:
try:
namespace_shortcut, tag = namespace_shortcut.split(':')
except ValueError:
# If there is no namespace in namespace_shortcut.
tag = namespace_shortcut.lstrip("{}")
return tag
return "{%s}%s" % (self.nsmap[namespace_shortcut], tag)
def namespace(self, namespace_shortcut):
return XmlNamespace(self.nsmap[namespace_shortcut])<|fim▁end|> | # Copyright 2008 Zuza Software Foundation
#
# This file is part of translate.
# |
<|file_name|>optout.py<|end_file_name|><|fim▁begin|>"""
Bot wide hook opt-out for channels
"""
import asyncio
from collections import defaultdict
from fnmatch import fnmatch
from functools import total_ordering
from threading import RLock
from sqlalchemy import Table, Column, String, Boolean, PrimaryKeyConstraint, and_
from cloudbot import hook
from cloudbot.hook import Priority
from cloudbot.util import database, web
from cloudbot.util.formatting import gen_markdown_table
optout_table = Table(
'optout',
database.metadata,
Column('network', String),
Column('chan', String),
Column('hook', String),
Column('allow', Boolean, default=False),
PrimaryKeyConstraint('network', 'chan', 'hook')
)
optout_cache = defaultdict(list)
cache_lock = RLock()
@total_ordering
class OptOut:
def __init__(self, channel, hook_pattern, allow):
self.channel = channel.casefold()
self.hook = hook_pattern.casefold()
self.allow = allow
def __lt__(self, other):
if isinstance(other, OptOut):
diff = len(self.channel) - len(other.channel)
if diff:
return diff < 0
return len(self.hook) < len(other.hook)
return NotImplemented
def __str__(self):
return "{} {} {}".format(self.channel, self.hook, self.allow)
def __repr__(self):
return "{}({}, {}, {})".format(self.__class__.__name__, self.channel, self.hook, self.allow)
def match(self, channel, hook_name):
return self.match_chan(channel) and fnmatch(hook_name.casefold(), self.hook)
def match_chan(self, channel):
return fnmatch(channel.casefold(), self.channel)
@asyncio.coroutine
def check_channel_permissions(event, chan, *perms):
old_chan = event.chan
event.chan = chan
allowed = yield from event.check_permissions(*perms)
event.chan = old_chan
return allowed
def get_channel_optouts(conn_name, chan=None):
with cache_lock:
return [opt for opt in optout_cache[conn_name] if not chan or opt.match_chan(chan)]
def format_optout_list(opts):
headers = ("Channel Pattern", "Hook Pattern", "Allowed")
table = [(opt.channel, opt.hook, "true" if opt.allow else "false") for opt in opts]
return gen_markdown_table(headers, table)
def set_optout(db, conn, chan, pattern, allowed):
conn_cf = conn.casefold()
chan_cf = chan.casefold()
pattern_cf = pattern.casefold()
clause = and_(optout_table.c.network == conn_cf, optout_table.c.chan == chan_cf, optout_table.c.hook == pattern_cf)
res = db.execute(optout_table.update().values(allow=allowed).where(clause))
if not res.rowcount:
db.execute(optout_table.insert().values(network=conn_cf, chan=chan_cf, hook=pattern_cf, allow=allowed))
db.commit()
load_cache(db)
def del_optout(db, conn, chan, pattern):
conn_cf = conn.casefold()
chan_cf = chan.casefold()
pattern_cf = pattern.casefold()
clause = and_(optout_table.c.network == conn_cf, optout_table.c.chan == chan_cf, optout_table.c.hook == pattern_cf)
res = db.execute(optout_table.delete().where(clause))
db.commit()
load_cache(db)
return res.rowcount > 0
def clear_optout(db, conn, chan=None):
conn_cf = conn.casefold()
if chan:
chan_cf = chan.casefold()
clause = and_(optout_table.c.network == conn_cf, optout_table.c.chan == chan_cf)
else:
clause = optout_table.c.network == conn_cf
<|fim▁hole|> res = db.execute(optout_table.delete().where(clause))
db.commit()
load_cache(db)
return res.rowcount
_STR_TO_BOOL = {
"yes": True,
"y": True,
"no": False,
"n": False,
"on": True,
"off": False,
"enable": True,
"disable": False,
"allow": True,
"deny": False,
}
@hook.onload
def load_cache(db):
with cache_lock:
optout_cache.clear()
for row in db.execute(optout_table.select()):
optout_cache[row["network"]].append(OptOut(row["chan"], row["hook"], row["allow"]))
for opts in optout_cache.values():
opts.sort(reverse=True)
# noinspection PyUnusedLocal
@hook.sieve(priority=Priority.HIGHEST)
def optout_sieve(bot, event, _hook):
if not event.chan or not event.conn:
return event
hook_name = _hook.plugin.title + "." + _hook.function_name
with cache_lock:
optouts = optout_cache[event.conn.name]
for _optout in optouts:
if _optout.match(event.chan, hook_name):
if not _optout.allow:
if _hook.type == "command":
event.notice("Sorry, that command is disabled in this channel.")
return None
break
return event
@hook.command
@asyncio.coroutine
def optout(text, event, chan, db, conn):
"""[chan] <pattern> [allow] - Set the global allow option for hooks matching <pattern> in [chan], or the current channel if not specified
:type text: str
:type event: cloudbot.event.CommandEvent
"""
args = text.split()
if args[0].startswith("#") and len(args) > 1:
chan = args.pop(0)
has_perm = yield from check_channel_permissions(event, chan, "op", "chanop", "snoonetstaff", "botcontrol")
if not has_perm:
event.notice("Sorry, you may not configure optout settings for that channel.")
return
pattern = args.pop(0)
allowed = False
if args:
allow = args.pop(0)
try:
allowed = _STR_TO_BOOL[allow.lower()]
except KeyError:
return "Invalid allow option."
yield from event.async_call(set_optout, db, conn.name, chan, pattern, allowed)
return "{action} hooks matching {pattern} in {channel}.".format(
action="Enabled" if allowed else "Disabled",
pattern=pattern,
channel=chan
)
@hook.command
@asyncio.coroutine
def deloptout(text, event, chan, db, conn):
"""[chan] <pattern> - Delete global optout hooks matching <pattern> in [chan], or the current channel if not specified"""
args = text.split()
if len(args) > 1:
chan = args.pop(0)
has_perm = yield from check_channel_permissions(event, chan, "op", "chanop", "snoonetstaff", "botcontrol")
if not has_perm:
event.notice("Sorry, you may not configure optout settings for that channel.")
return
pattern = args.pop(0)
deleted = yield from event.async_call(del_optout, db, conn, chan, pattern)
if deleted:
return "Deleted optout '{}' in channel '{}'.".format(pattern, chan)
return "No matching optouts in channel '{}'.".format(chan)
@asyncio.coroutine
def check_global_perms(event):
chan = event.chan
text = event.text
if text:
chan = text.split()[0]
can_global = yield from event.check_permissions("snoonetstaff", "botcontrol")
allowed = can_global or (yield from check_channel_permissions(event, chan, "op", "chanop"))
if not allowed:
event.notice("Sorry, you are not allowed to use this command.")
if chan.lower() == "global":
if not can_global:
event.notice("You do not have permission to access global opt outs")
allowed = False
chan = None
return chan, allowed
@hook.command("listoptout", autohelp=False)
@asyncio.coroutine
def list_optout(conn, event, async_call):
"""[channel] - View the opt out data for <channel> or the current channel if not specified. Specify "global" to view all data for this network
:type conn: cloudbot.clients.irc.Client
:type event: cloudbot.event.CommandEvent
"""
chan, allowed = yield from check_global_perms(event)
if not allowed:
return
opts = yield from async_call(get_channel_optouts, conn.name, chan)
table = yield from async_call(format_optout_list, opts)
return web.paste(table, "md", "hastebin")
@hook.command("clearoptout", autohelp=False)
@asyncio.coroutine
def clear(conn, event, db, async_call):
"""[channel] - Clears the optout list for a channel. Specify "global" to clear all data for this network"""
chan, allowed = yield from check_global_perms(event)
if not allowed:
return
count = yield from async_call(clear_optout, db, conn.name, chan)
return "Cleared {} opt outs from the list.".format(count)<|fim▁end|> | |
<|file_name|>math ceiling.ts<|end_file_name|><|fim▁begin|>//<|fim▁hole|><|fim▁end|> | Math.ceil(foo);
// |
<|file_name|>feature_compute2.py<|end_file_name|><|fim▁begin|>''' Computes feature representations '''
from __future__ import division, print_function
from hscom import __common__
(print, print_, print_on, print_off,
rrr, profile) = __common__.init(__name__, '[fc2]')
# scientific
import numpy as np
# python
from os.path import join
# hotspotter
from hscom import helpers as util
from hscom import params
from hscom import fileio as io
from hscom.Parallelize import parallel_compute
import extern_feat
def whiten_features(desc_list):
import algos
print('[fc2] * Whitening features')
ax2_desc = np.vstack(desc_list)
ax2_desc_white = algos.scale_to_byte(algos.whiten(ax2_desc))
index = 0
offset = 0
for cx in xrange(len(desc_list)):
old_desc = desc_list[cx]
print ('[fc2] * ' + util.info(old_desc, 'old_desc'))
offset = len(old_desc)
new_desc = ax2_desc_white[index:(index + offset)]
desc_list[cx] = new_desc
index += offset
<|fim▁hole|># Main Script
# =======================================
@profile
def bigcache_feat_save(cache_dir, uid, ext, kpts_list, desc_list):
print('[fc2] Caching desc_list and kpts_list')
io.smart_save(kpts_list, cache_dir, 'kpts_list', uid, ext)
io.smart_save(desc_list, cache_dir, 'desc_list', uid, ext)
@profile
def bigcache_feat_load(cache_dir, uid, ext):
#io.debug_smart_load(cache_dir, fname='*', uid=uid, ext='.*')
kpts_list = io.smart_load(cache_dir, 'kpts_list', uid, ext, can_fail=True)
desc_list = io.smart_load(cache_dir, 'desc_list', uid, ext, can_fail=True)
if desc_list is None or kpts_list is None:
return None
desc_list = desc_list.tolist()
kpts_list = kpts_list.tolist()
print('[fc2] Loaded kpts_list and desc_list from big cache')
return kpts_list, desc_list
@profile
def sequential_feat_load(feat_cfg, feat_fpath_list):
kpts_list = []
desc_list = []
# Debug loading (seems to use lots of memory)
print('\n')
try:
nFeats = len(feat_fpath_list)
prog_label = '[fc2] Loading feature: '
mark_progress, end_progress = util.progress_func(nFeats, prog_label)
for count, feat_path in enumerate(feat_fpath_list):
try:
npz = np.load(feat_path, mmap_mode=None)
except IOError:
print('\n')
util.checkpath(feat_path, verbose=True)
print('IOError on feat_path=%r' % feat_path)
raise
kpts = npz['arr_0']
desc = npz['arr_1']
npz.close()
kpts_list.append(kpts)
desc_list.append(desc)
mark_progress(count)
end_progress()
print('[fc2] Finished load of individual kpts and desc')
except MemoryError:
print('\n------------')
print('[fc2] Out of memory')
print('[fc2] Trying to read: %r' % feat_path)
print('[fc2] len(kpts_list) = %d' % len(kpts_list))
print('[fc2] len(desc_list) = %d' % len(desc_list))
raise
if feat_cfg.whiten:
desc_list = whiten_features(desc_list)
return kpts_list, desc_list
# Maps a preference string into a function
feat_type2_precompute = {
'hesaff+sift': extern_feat.precompute_hesaff,
}
@profile
def _load_features_individualy(hs, cx_list):
use_cache = not params.args.nocache_feats
feat_cfg = hs.prefs.feat_cfg
feat_dir = hs.dirs.feat_dir
feat_uid = feat_cfg.get_uid()
print('[fc2] Loading ' + feat_uid + ' individually')
# Build feature paths
rchip_fpath_list = [hs.cpaths.cx2_rchip_path[cx] for cx in iter(cx_list)]
cid_list = hs.tables.cx2_cid[cx_list]
feat_fname_fmt = ''.join(('cid%d', feat_uid, '.npz'))
feat_fpath_fmt = join(feat_dir, feat_fname_fmt)
feat_fpath_list = [feat_fpath_fmt % cid for cid in cid_list]
#feat_fname_list = [feat_fname_fmt % cid for cid in cid_list]
# Compute features in parallel, saving them to disk
kwargs_list = [feat_cfg.get_dict_args()] * len(rchip_fpath_list)
pfc_kwargs = {
'func': feat_type2_precompute[feat_cfg.feat_type],
'arg_list': [rchip_fpath_list, feat_fpath_list, kwargs_list],
'num_procs': params.args.num_procs,
'lazy': use_cache,
}
parallel_compute(**pfc_kwargs)
# Load precomputed features sequentially
kpts_list, desc_list = sequential_feat_load(feat_cfg, feat_fpath_list)
return kpts_list, desc_list
@profile
def _load_features_bigcache(hs, cx_list):
# args for smart load/save
feat_cfg = hs.prefs.feat_cfg
feat_uid = feat_cfg.get_uid()
cache_dir = hs.dirs.cache_dir
sample_uid = util.hashstr_arr(cx_list, 'cids')
bigcache_uid = '_'.join((feat_uid, sample_uid))
ext = '.npy'
loaded = bigcache_feat_load(cache_dir, bigcache_uid, ext)
if loaded is not None: # Cache Hit
kpts_list, desc_list = loaded
else: # Cache Miss
kpts_list, desc_list = _load_features_individualy(hs, cx_list)
# Cache all the features
bigcache_feat_save(cache_dir, bigcache_uid, ext, kpts_list, desc_list)
return kpts_list, desc_list
@profile
@util.indent_decor('[fc2]')
def load_features(hs, cx_list=None, **kwargs):
# TODO: There needs to be a fast way to ensure that everything is
# already loaded. Same for cc2.
print('=============================')
print('[fc2] Precomputing and loading features: %r' % hs.get_db_name())
#----------------
# COMPUTE SETUP
#----------------
use_cache = not params.args.nocache_feats
use_big_cache = use_cache and cx_list is None
feat_cfg = hs.prefs.feat_cfg
feat_uid = feat_cfg.get_uid()
if hs.feats.feat_uid != '' and hs.feats.feat_uid != feat_uid:
print('[fc2] Disagreement: OLD_feat_uid = %r' % hs.feats.feat_uid)
print('[fc2] Disagreement: NEW_feat_uid = %r' % feat_uid)
print('[fc2] Unloading all chip information')
hs.unload_all()
hs.load_chips(cx_list=cx_list)
print('[fc2] feat_uid = %r' % feat_uid)
# Get the list of chip features to load
cx_list = hs.get_valid_cxs() if cx_list is None else cx_list
if not np.iterable(cx_list):
cx_list = [cx_list]
print('[cc2] len(cx_list) = %r' % len(cx_list))
if len(cx_list) == 0:
return # HACK
cx_list = np.array(cx_list) # HACK
if use_big_cache: # use only if all descriptors requested
kpts_list, desc_list = _load_features_bigcache(hs, cx_list)
else:
kpts_list, desc_list = _load_features_individualy(hs, cx_list)
# Extend the datastructure if needed
list_size = max(cx_list) + 1
util.ensure_list_size(hs.feats.cx2_kpts, list_size)
util.ensure_list_size(hs.feats.cx2_desc, list_size)
# Copy the values into the ChipPaths object
for lx, cx in enumerate(cx_list):
hs.feats.cx2_kpts[cx] = kpts_list[lx]
for lx, cx in enumerate(cx_list):
hs.feats.cx2_desc[cx] = desc_list[lx]
hs.feats.feat_uid = feat_uid
print('[fc2]=============================')
def clear_feature_cache(hs):
feat_cfg = hs.prefs.feat_cfg
feat_dir = hs.dirs.feat_dir
cache_dir = hs.dirs.cache_dir
feat_uid = feat_cfg.get_uid()
print('[fc2] clearing feature cache: %r' % feat_dir)
util.remove_files_in_dir(feat_dir, '*' + feat_uid + '*', verbose=True, dryrun=False)
util.remove_files_in_dir(cache_dir, '*' + feat_uid + '*', verbose=True, dryrun=False)
pass<|fim▁end|> |
# ======================================= |
<|file_name|>CalendarLink.java<|end_file_name|><|fim▁begin|>/**
* Copyright © 2002 Instituto Superior Técnico
*
* This file is part of FenixEdu Academic.
*
* FenixEdu Academic is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* FenixEdu Academic is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with FenixEdu Academic. If not, see <http://www.gnu.org/licenses/>.
*/
package org.fenixedu.academic.ui.faces.components.util;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.struts.util.MessageResources;
import org.fenixedu.academic.domain.Exam;
import org.fenixedu.academic.domain.ExecutionCourse;
import org.fenixedu.academic.domain.Project;
import org.fenixedu.academic.domain.WrittenEvaluation;
import org.fenixedu.academic.domain.WrittenTest;
import org.fenixedu.academic.util.Bundle;
import org.fenixedu.academic.util.DateFormatUtil;
public class CalendarLink {
private Calendar objectOccurrence;
private String objectLinkLabel;
private Map<String, String> linkParameters = new HashMap<String, String>();
private boolean asLink;
public CalendarLink(boolean asLink) {
setAsLink(asLink);
}
public CalendarLink() {
this(true);
}
public CalendarLink(final ExecutionCourse executionCourse, final WrittenEvaluation writtenEvaluation, final Locale locale) {
setObjectOccurrence(writtenEvaluation.getDay());
setObjectLinkLabel(constructCalendarPresentation(executionCourse, writtenEvaluation, locale));
}
public CalendarLink(final ExecutionCourse executionCourse, final Project project, final Date date, final String tail,
final Locale locale) {
setObjectOccurrence(date);
setObjectLinkLabel(constructCalendarPresentation(executionCourse, project, date, tail, locale));
}
public void setObjectOccurrence(Calendar objectOccurrence) {
this.objectOccurrence = objectOccurrence;
}
public void setObjectOccurrence(Date objectOccurrence) {
final Calendar calendar = Calendar.getInstance();
calendar.setTime(objectOccurrence);
this.objectOccurrence = calendar;
}
public Calendar getObjectOccurrence() {
return this.objectOccurrence;
}
public void setObjectLinkLabel(String objectLinkLabel) {
this.objectLinkLabel = objectLinkLabel;
}
public String getObjectLinkLabel() {
return this.objectLinkLabel;
}
public void setLinkParameters(Map<String, String> linkParameters) {
this.linkParameters = linkParameters;
}
public String giveLink(String editLinkPage) {
final StringBuilder linkParameters = new StringBuilder();
linkParameters.append(editLinkPage);
if (this.linkParameters != null && !this.linkParameters.isEmpty()) {
linkParameters.append(editLinkPage.indexOf('?') > 0 ? '&' : '?');
for (final Iterator<Entry<String, String>> iterator = this.linkParameters.entrySet().iterator(); iterator.hasNext();) {
final Entry<String, String> entry = iterator.next();
linkParameters.append(entry.getKey());
linkParameters.append('=');
linkParameters.append(entry.getValue());
if (iterator.hasNext()) {
linkParameters.append('&');
}
}
}
return linkParameters.toString();
}
public void addLinkParameter(final String key, final String value) {
linkParameters.put(key, value);
}
<|fim▁hole|>
private String constructCalendarPresentation(final ExecutionCourse executionCourse,
final WrittenEvaluation writtenEvaluation, final Locale locale) {
final StringBuilder stringBuilder = new StringBuilder();
if (writtenEvaluation instanceof WrittenTest) {
stringBuilder.append(messages.getMessage(locale, "label.evaluation.shortname.test"));
} else if (writtenEvaluation instanceof Exam) {
stringBuilder.append(messages.getMessage(locale, "label.evaluation.shortname.exam"));
}
stringBuilder.append(" ");
stringBuilder.append(executionCourse.getSigla());
stringBuilder.append(" (");
stringBuilder.append(DateFormatUtil.format("HH:mm", writtenEvaluation.getBeginningDate()));
stringBuilder.append("-");
stringBuilder.append(DateFormatUtil.format("HH:mm", writtenEvaluation.getEndDate()));
stringBuilder.append(")");
return stringBuilder.toString();
}
private String constructCalendarPresentation(final ExecutionCourse executionCourse, final Project project, final Date time,
final String tail, final Locale locale) {
final StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append(messages.getMessage(locale, "label.evaluation.shortname.project"));
stringBuilder.append(" ");
stringBuilder.append(executionCourse.getSigla());
stringBuilder.append(" (");
stringBuilder.append(DateFormatUtil.format("HH:mm", time));
stringBuilder.append(") ");
stringBuilder.append(tail);
return stringBuilder.toString();
}
public boolean isAsLink() {
return asLink;
}
public void setAsLink(boolean asLink) {
this.asLink = asLink;
}
}<|fim▁end|> | private static final MessageResources messages = MessageResources.getMessageResources(Bundle.DEGREE); |
<|file_name|>test_client.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import os
import collections
import mock
import accurev.client
import accurev.depot
class TestAccuRevClient(unittest.TestCase):
def setUp(self):
self.client = accurev.client.Client()
def test_cmd(self):
self.client.chdir('somedirectory')
expected = "accurev somecommand"
with mock.patch.object(accurev.utils, "cmd") as mocked:
self.client.cmd('somecommand')
mocked.assert_called_once_with('accurev somecommand', 'somedirectory')
def test_xml_cmd(self):
with mock.patch.object(self.client, "tempfile_cmd") as mocked:
self.client.xml_cmd('somestring')
mocked.assert_called_once_with('xml', 'somestring')
def test_info(self):
string = """Shell: /bin/bash
Principal: automaticTasks
Host: madprdci2
Domain: (none)
Server name: 169.0.0.1
Port: 5050
DB Encoding: Unicode
ACCUREV_BIN: /opt/accurev-5.5/bin
Client time: 2017/05/14 04:29:59 CEST (1494728999)
Server time: 2017/05/14 04:30:00 CEST (1494729000)"""
with mock.patch.object(self.client, "cmd") as mocked:
mocked.return_value = string, ''
self.assertTrue(isinstance(self.client.info, dict))
expected = [
'Shell',
'Principal',
'Host',
'Domain',
'Server name',
'Port',
'DB Encoding',
'ACCUREV_BIN',
'Client time',
'Server time',
]
self.assertEqual(len(self.client.info.keys()), len(expected))
def test_depot_count(self):
string = """<?xml version="1.0" encoding="utf-8"?>
<AcResponse
Command="show depots"
TaskId="12492">
<Element
Number="1"
Name="OFFICE"
Slice="1"
exclusiveLocking="false"
case="insensitive"
locWidth="128"/>
<Element
Number="2"
Name="PROVIDER"
Slice="2"
exclusiveLocking="false"
case="insensitive"
locWidth="128"/>
</AcResponse>"""
with mock.patch.object(self.client, "cmd") as mocked:
mocked.return_value = string, ''
depots = self.client.depots
self.assertEqual(len(depots.keys()), 2)
for d in depots.values():
self.assertTrue(isinstance(d, accurev.depot.Depot))
def test_login_permanent(self):
with mock.patch.object(self.client, "cmd") as mocked:
self.client.login('user', 'pass', permanent=True)
mocked.assert_called_once_with('login -n user pass')
def test_users(self):
xml = """<?xml version="1.0" encoding="utf-8"?>
<AcResponse
Command="show users"
TaskId="647018">
<Element
Number="1"
Name="Administrator"
Kind="full"/>
<Element
Number="2"
Name="SomeoneElse"
Kind="full"/>
</AcResponse>"""
with mock.patch.object(self.client, "user_show") as mocked:
mocked.return_value = xml
users = list(self.client.users)
self.assertTrue(len(users), 2)
def test_tempfile_cmd(self):
with mock.patch.object(accurev.client.tempfile, "NamedTemporaryFile") as mocktmp:
mocktmp.return_value = open('notrandomfile', 'w')
with mock.patch.object(self.client, "cmd") as mocked:
mocked.return_value = 'stdout', 'stderr'
self.client.tempfile_cmd('xml', 'world')
mocked.assert_called_once_with('xml -l notrandomfile')
if os.path.isfile('notrandomfile'):
os.unlink('notrandomfile')
def test_group_show_no_user(self):
with mock.patch.object(self.client, "cmd") as mocked:
mocked.return_value = '', ''
self.client.group_show()
mocked.assert_called_once_with('show -fx groups')
def test_group_show_with_user(self):
with mock.patch.object(self.client, "cmd") as mocked:
mocked.return_value = '', ''
self.client.group_show('user')
mocked.assert_called_once_with('show -fx -u user groups')
def test_member_show(self):
with mock.patch.object(self.client, "cmd") as mocked:
mocked.return_value = '', ''
self.client.member_show('group')
mocked.assert_called_once_with('show -fx -g group members')
def test_cpkdescribe(self):
query = "<AcRequest>\n"
query += "\t<cpkdescribe>\n"
query += "\t\t<depot>mycompany</depot>\n"
query += "\t\t<stream1>some_stream</stream1>\n"
query += "\t\t<issues>\n"
query += "\t\t\t<issueNum>1010</issueNum>\n"
query += "\t\t</issues>\n"
query += "\t</cpkdescribe>\n"
query += "</AcRequest>"
response = """<?xml version="1.0" encoding="utf-8"?>
<acResponse>
<issues>
<issue ancestry="direct">
<issueNum fid="1">1010</issueNum>
</issue>
</issues>
</acResponse>"""
with mock.patch.object(self.client, "xml_cmd") as mocked:
mocked.return_value = response, ''
issues = self.client.cpkdescribe(['1010'], 'mycompany', 'some_stream')
mocked.assert_called_once_with(query)
def test_schema(self):
response = """<?xml version="1.0" encoding="UTF-8"?>
<template name="default">
<lookupField fid="5"/>
<field name="issueNum" type="internal" label="Issue" reportWidth="10" fid="1"></field>
<field name="transNum" type="internal" label="Transaction" reportWidth="10" fid="2"> </field>
<field name="shortDescription" type="Text" label="Short Description" reportWidth="150" width="60" fid="3"></field>
<field name="state" type="Choose" label="State" reportWidth="10" fid="4">
<value>Open</value>
<value>Cancelled</value>
<value>Closed</value>
</field>
<field name="JIRA" type="Text" label="Jira Issue" reportWidth="10" width="15" fid="5"></field>
</template>"""
with mock.patch.object(self.client, "getconfig") as mocked:
mocked.return_value = response, ''
schema = self.client.schema('mycompany')
mocked.assert_called_once_with('mycompany', 'schema.xml')
def test_element_promote(self):
response = "<elements>\n"
response += """\t<e eid="10" v="1/1"/>\n"""
response += """\t<e eid="11" v="2/2"/>\n"""
response += "</elements>"
class Element:
pass
element_one = Element()
element_one.eid = "10"
element_one.real_version = "1/1"
element_two = Element()
element_two.eid = "11"
element_two.real_version ="2/2"
element_list = [
element_one,
element_two
]
with mock.patch.object(self.client, "tempfile_cmd") as mocked:
self.client.element_promote(element_list, 'hello', 'world')
mocked.assert_called_once_with('promote -s hello -S world -Fx', response)
def test_issue_query(self):
expected = """<queryIssue issueDB="mycompany" useAltQuery="false">\n"""
expected += "\t<OR>\n"
expected += "\t\t<condition>1 == 10</condition>\n"
expected += "\t\t<condition>1 == 20</condition>\n"
expected += "\t</OR>\n"
expected += "</queryIssue>"
response = """<?something>\n"""
response += """<issueOne/>"""
response += """<issueTwo/>"""
with mock.patch.object(self.client, "xml_cmd") as mocked:
mocked.return_value = response, ''
out, err = self.client.issue_query('mycompany', ['10', '20'])
mocked.assert_called_once_with(expected)
def test_stream_show(self):
response = """<?xml version="1.0" encoding="utf-8"?>
<streams>
<stream
name="trunk"
depotName="OFFICE"
streamNumber="1"
isDynamic="true"
type="normal"
startTime="1197383792"
hasDefaultGroup="false"/>
</streams>"""
with mock.patch.object(self.client, "cmd") as mocked:
mocked.return_value = response, ''
self.client.stream_show('mycompany', 'trunk')
mocked.assert_called_once_with('show -p mycompany -fxg -s trunk streams')
def test_stream_children(self):
response = """<?xml version="1.0" encoding="utf-8"?>
<streams>
<stream
name="trunk"
depotName="OFFICE"
streamNumber="1"
isDynamic="true"
type="normal"
startTime="1197383792"
hasDefaultGroup="false"/>
</streams>"""
with mock.patch.object(self.client, "cmd") as mocked:
mocked.return_value = response, ''
self.client.stream_children('mycompany', 'trunk')
mocked.assert_called_once_with('show -p mycompany -fexvg -1 -s trunk streams')
def test_stream_family(self):
response = """<?xml version="1.0" encoding="utf-8"?>
<streams>
<stream
name="trunk"
depotName="OFFICE"
streamNumber="1"
isDynamic="true"
type="normal"
startTime="1197383792"
hasDefaultGroup="false"/>
</streams>"""
with mock.patch.object(self.client, "cmd") as mocked:
mocked.return_value = response, ''
self.client.stream_family('mycompany', 'trunk')
mocked.assert_called_once_with('show -p mycompany -fexvg -r -s trunk streams')
def test_stream_issues(self):
expected = [
'issuelist -p mycompany -fx -s some_stream',
'issuelist -p mycompany -fx -s some_stream -i',
]
response = """<?xml version="1.0" encoding="utf-8"?>
<acResponse>
<issues>
<issue ancestry="direct">
<issueNum fid="1">101010</issueNum>
<transNum fid="2">4105368</transNum>
<shortDescription fid="3">Some fancy description</shortDescription>
<state fid="4">Open</state>
<JIRA fid="5">JIRA-10</JIRA>
</issue>
<issue ancestry="direct">
<issueNum fid="1">202020</issueNum>
<transNum fid="2">4106525</transNum>
<shortDescription fid="3">Another Fancy Description</shortDescription>
<state fid="4">Closed</state>
<JIRA fid="5">JIRA-20</JIRA>
</issue>
</issues>
</acResponse>"""
with mock.patch.object(self.client, "cmd") as mocked:
mocked.return_value = response, ''
# Ensure we prime the generator, otherwise nosetests won't consider
# the method as executed.
issues = list(self.client.stream_issues('mycompany', 'some_stream'))
for e in expected:
mocked.assert_any_call(e)
def test_stream_stat_default_group(self):
response = """<?xml version="1.0" encoding="utf-8"?>
<AcResponse
Command="stat"
Directory="/jenkins/home/jenkins/pruebas/joaogn/pyacc"
TaskId="302012">
<element
location="/./ITQA"
dir="yes"
executable="no"
id="138803"
elemType="dir"
modTime="0"
hierType="parallel"
Virtual="2094/1"
namedVersion="ING_PRO_ITQA/1"
Real="32/1"
status="(backed)"/>
</AcResponse>"""
with mock.patch.object(self.client, "cmd") as mocked:
mocked.return_value = response, ''
elements = list(self.client.stream_stat('some_stream', default_group=True))
mocked.assert_called_once_with('stat -fexv -s some_stream -d')
def test_stream_stat_all(self):
response = """<?xml version="1.0" encoding="utf-8"?>
<AcResponse
Command="stat"
Directory="/jenkins/home/jenkins/pruebas/joaogn/pyacc"
TaskId="302012">
<element
location="/./ITQA"
dir="yes"
executable="no"
id="138803"
elemType="dir"
modTime="0"
hierType="parallel"
Virtual="2094/1"
namedVersion="ING_PRO_ITQA/1"
Real="32/1"
status="(backed)"/>
</AcResponse>"""
with mock.patch.object(self.client, "cmd") as mocked:
mocked.return_value = response, ''
elements = list(self.client.stream_stat('some_stream', default_group=False))
mocked.assert_called_once_with('stat -fexv -s some_stream -a')
def test_modify_issue(self):
expected = """<modifyIssue issueDB="mycompany">\n"""
expected += "\t<issue>\n"
expected += """\t\t<one fid="1">1</one>\n"""
expected += """\t\t<two fid="2">2</two>\n"""
expected += "\t</issue>\n"
expected += "</modifyIssue>\n"
properties = collections.OrderedDict()
properties['one'] = {
'fid': '1',
'value': '1',
}
properties['two'] = {
'fid': '2',
'value': '2',
}
with mock.patch.object(self.client, "xml_cmd") as mocked:
mocked.return_value = '', ''
result = self.client.modify_issue(properties, 'mycompany')
mocked.assert_called_once_with(expected)
def test_cpkhist(self):
expected = '<acRequest>\n'
expected += '\t<cpkhist verbose="true">\n'
expected += '\t\t<depot>mycompany</depot>\n'
expected += '\t\t<issues>\n'
expected += '\t\t\t<issue>\n'
expected += '\t\t\t\t<issueNum>1</issueNum>\n'
expected += '\t\t\t</issue>\n'
expected += '\t\t\t<issue>\n'
expected += '\t\t\t\t<issueNum>2</issueNum>\n'
expected += '\t\t\t</issue>\n'
expected += '\t\t</issues>\n'
expected += '\t</cpkhist>\n'
expected += '</acRequest>'
with mock.patch.object(self.client, "xml_cmd") as mocked:
mocked.return_value = '', ''
result = self.client.cpkhist(['1', '2'], 'mycompany')
mocked.assert_called_once_with(expected)
def test_issue_promote(self):
expected = '<issues>\n'
expected += '\t<id>1</id>\n'
expected += '\t<id>2</id>\n'
expected += '</issues>'
with mock.patch.object(self.client, "tempfile_cmd") as mocked:<|fim▁hole|> self.client.issue_promote(['1', '2'], 'source', 'target')
mocked.assert_called_once_with('promote -s source -S target -Fx', expected)
def test_default_group_promote(self):
with mock.patch.object(self.client, "cmd") as mocked:
self.client.default_group_promote('source', 'target')
mocked.assert_called_once_with('promote -s source -S target -d')
def test_refs_show(self):
xml = """<?xml version="1.0" encoding="utf-8"?>
<AcResponse
Command="show refs"
TaskId="316705">
<Element
Name="reftree_one"
Storage="E:/RefTree/reftree_one"
Host="hostname"
Type="3"
user_id="1"
Stream="20"
user_name="Administrator"/>
</AcResponse>"""
with mock.patch.object(self.client, "cmd") as mocked:
mocked.return_value = xml, ''
self.client.refs_show()
mocked.assert_called_once_with('show -fexv refs')
def test_hist(self):
xml = """<?xml version="1.0" encoding="utf-8"?>
<AcResponse
Command="hist"
TaskId="646546">
<element
id="100">
<transaction
id="2020"
type="promote"
time="1495051170"
user="JohnDoe"
streamName="StreamDestination"
streamNumber="13638"
fromStreamName="StreamOrigin"
fromStreamNumber="13752">
<comment>A nice comment</comment>
<version
path="/some/path"
eid="90"
virtual="13638/2"
real="18125/1"
virtualNamedVersion="StreamDestination/2"
realNamedVersion="UserWorkspace/1"
elem_type="text"
dir="no">
<issueNum>50</issueNum>
</version>
</transaction>
</element>
</AcResponse>"""
with mock.patch.object(self.client, "cmd") as mocked:
mocked.return_value = xml, ''
self.client.hist('100', 'mycompany')
mocked.assert_called_once_with('hist -fexv -p mycompany -e 100')
def test_cpkdepend(self):
xml = """<?xml version="1.0" encoding="utf-8"?>
<AcResponse
Command="cpkdepend"
TaskId="646546">
<issueDependencies>
<issueDependency>
<dependencies>
<issue number="10"/>
</dependencies>
</issueDependency>
</issueDependencies>
</AcResponse>"""
response = """<?something>\n"""
response += """<issueOne/>"""
response += """<issueTwo/>"""
with mock.patch.object(self.client, "cmd") as mocked_cmd:
mocked_cmd.return_value = xml, ''
with mock.patch.object(self.client, "issue_query") as mocked_query:
mocked_query.return_value = response, ''
self.client.cpkdepend(['10', '20'], 'mycompany', 'source', 'target')
mocked_cmd.assert_called_once_with('cpkdepend -fvx -p mycompany -s source -S target -I 10,20')<|fim▁end|> | |
<|file_name|>test_run_CR3BP.py<|end_file_name|><|fim▁begin|>"""
********************************************************************
Test file for implementation check of CR3BP library.
********************************************************************
Last update: 21/01/2022
Description
-----------
Contains a few sample orbit propagations to test the CR3BP library.
The orbits currently found in test file include:
- L2 southern NRHO (9:2 NRHO of Lunar Gateway Station)
- Distant Retrograde Orbit (DRO)
- Butterfly Orbit
- L2 Vertical Orbit
"""
# Testing CR3BP implementation
import matplotlib.pyplot as plt
import numpy as np
from astropy import units as u
from CR3BP import getChar_CR3BP, propagate, propagateSTM
from poliastro.bodies import Earth, Moon
# Earth-Moon system properties
k1 = Earth.k.to(u.km**3 / u.s**2).value
k2 = Moon.k.to(u.km**3 / u.s**2).value
r12 = 384747.99198 # Earth-Moon distance
# Compute CR3BP characterisitic values
mu, kstr, lstr, tstr, vstr, nstr = getChar_CR3BP(k1, k2, r12)
# -- Lunar Gateway Station Orbit - 9:2 NRHO
"""
The orbit is a Near-Rectilinear Halo Orbit (NRHO) around the L2 Lagragian
point of the Earth-Moon system. The orbit presented here is a southern
sub-family of the L2-NRHO. This orbit is 9:2 resonant orbit currenly set
as the candidate orbit for the Lunar Gateway Station (LOP-G). Its called
9:2 resonant since a spacecraft would complete 9 orbits in the NRHO for
every 2 lunar month (slightly different from lunar orbit period).
<|fim▁hole|>The exact orbital elements presented here are from the auther's simulations.
The orbit states were obtained starting form guess solutions given in various
references. A few are provided below:
Ref: White Paper: Gateway Destination Orbit Model: A Continuous 15 Year NRHO
Reference Trajectory - NASA, 2019
Ref: Strategies for Low-Thrust Transfer Design Based on Direct Collocation
Techniques - Park, Howell and Folta
The NRHO are subfamily of the Halo orbits. The 'Near-Rectilinear' term comes
from the very elongated state of the orbit considering a regular Halo. Halo
orbits occur in all three co-linear equilibrum points L1,L2 and L3. They occur
in a pair of variants (nothern and southern) due to symmetry of CR3BP.
"""
# 9:2 L2 souther NRHO orbit
r0 = np.array([[1.021881345465263, 0, -0.182000000000000]])
v0 = np.array([0, -0.102950816739606, 0])
tf = 1.509263667286943
# number of points to plot
Nplt = 300
tofs = np.linspace(0, tf, Nplt)
# propagate the base trajectory
rf, vf = propagate(mu, r0, v0, tofs, rtol=1e-11)
# ploting orbit
rf = np.array(rf)
fig = plt.figure()
ax = plt.axes(projection="3d")
ax.set_box_aspect(
(np.ptp(rf[:, 0]), np.ptp(rf[:, 1]), np.ptp(rf[:, 2]))
) # aspect ratio is 1:1:1 in data space
# ploting the moon
ax.plot3D(1 - mu, 0, 0, "ok")
ax.set_title("L2 Southern NRHO")
ax.set_xlabel("x-axis [nd]")
ax.set_ylabel("y-axis [nd]")
ax.set_zlabel("z-axis [nd]")
ax.plot3D(rf[:, 0], rf[:, 1], rf[:, 2], "b")
plt.show()
"""
All other orbits in this section are computed from guess solutions available
in Grebow's Master and PhD thesis. He lists a quite detailed set of methods
to compute most of the major periodic orbits I have presented here. All of
them use differntial correction methods which are not yet implemented in this
library.
Ref: GENERATING PERIODIC ORBITS IN THE CIRCULAR RESTRICTED THREEBODY PROBLEM
WITH APPLICATIONS TO LUNAR SOUTH POLE COVERAGE
- D.Grebow 2006 (Master thesis)
Ref: TRAJECTORY DESIGN IN THE EARTH-MOON SYSTEM
AND LUNAR SOUTH POLE COVERAGE
- D.Grebow 2010 (PhD desertation)
"""
# -- DRO orbit
# DRO orbit states
r0 = np.array([0.783390492345344, 0, 0])
v0 = np.array([0, 0.548464515316651, 0])
tf = 3.63052604667440
# number of points to plot
Nplt = 300
tofs = np.linspace(0, tf, Nplt)
# propagate the base trajectory
rf, vf = propagate(mu, r0, v0, tofs, rtol=1e-11)
# ploting orbit
rf = np.array(rf)
fig = plt.figure()
ax = plt.axes(projection="3d")
ax.set_box_aspect(
(np.ptp(rf[:, 0]), np.ptp(rf[:, 1]), np.ptp(rf[:, 2]))
) # aspect ratio is 1:1:1 in data space
# ploting the moon
ax.plot3D(1 - mu, 0, 0, "ok")
ax.set_title("Distant Restrograde orbit (DRO)")
ax.set_xlabel("x-axis [nd]")
ax.set_ylabel("y-axis [nd]")
ax.set_zlabel("z-axis [nd]")
ax.plot3D(rf[:, 0], rf[:, 1], rf[:, 2], "m")
plt.show()
# -- Butterfly orbit
# Butterfly orbit states
r0 = np.array([1.03599510774957, 0, 0.173944812752286])
v0 = np.array([0, -0.0798042160573269, 0])
tf = 2.78676904546834
# number of points to plot
Nplt = 300
tofs = np.linspace(0, tf, Nplt)
# propagate the base trajectory
rf, vf = propagate(mu, r0, v0, tofs, rtol=1e-11)
# ploting orbit
rf = np.array(rf)
fig = plt.figure()
ax = plt.axes(projection="3d")
ax.set_box_aspect(
(np.ptp(rf[:, 0]), np.ptp(rf[:, 1]), np.ptp(rf[:, 2]))
) # aspect ratio is 1:1:1 in data space
# ploting the moon
ax.plot3D(1 - mu, 0, 0, "ok")
ax.set_title("Butterfly orbit")
ax.set_xlabel("x-axis [nd]")
ax.set_ylabel("y-axis [nd]")
ax.set_zlabel("z-axis [nd]")
ax.plot3D(rf[:, 0], rf[:, 1], rf[:, 2], "r")
plt.show()
# -- Vertical orbit
# Vertical orbit states
r0 = np.array([0.504689989562366, 0, 0.836429774762193])
v0 = np.array([0, 0.552722840538063, 0])
tf = 6.18448756121754
# number of points to plot
Nplt = 300
tofs = np.linspace(0, tf, Nplt)
# propagate the base trajectory
rf, vf = propagate(mu, r0, v0, tofs, rtol=1e-11)
# ploting orbit
rf = np.array(rf)
fig = plt.figure()
ax = plt.axes(projection="3d")
ax.set_box_aspect(
(np.ptp(rf[:, 0]), np.ptp(rf[:, 1]), np.ptp(rf[:, 2]))
) # aspect ratio is 1:1:1 in data space
# ploting the moon
ax.plot3D(1 - mu, 0, 0, "ok")
ax.set_title("L2 Vertical orbit")
ax.set_xlabel("x-axis [nd]")
ax.set_ylabel("y-axis [nd]")
ax.set_zlabel("z-axis [nd]")
ax.plot3D(rf[:, 0], rf[:, 1], rf[:, 2], "g")
plt.show()
# -- Propage STM
# propagate base trajectory with state-transition-matrix
STM0 = np.eye(6)
rf, vf, STM = propagateSTM(mu, r0, v0, STM0, tofs, rtol=1e-11)
# STM is a matrix of partial derivatives which are used in Newton-Raphson
# methods for trajectory design<|fim▁end|> | |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from cStringIO import StringIO
from captcha.models import CaptchaStore
from django.http import HttpResponse, Http404
from django.shortcuts import get_object_or_404
import Image,ImageDraw,ImageFont,ImageFilter
import random
from captcha.conf import settings
def captcha_image(request,key):
store = get_object_or_404(CaptchaStore,hashkey=key)
text=store.challenge
if settings.CAPTCHA_FONT_PATH.lower().strip().endswith('ttf'):
font = ImageFont.truetype(settings.CAPTCHA_FONT_PATH,settings.CAPTCHA_FONT_SIZE)
else:
font = ImageFont.load(settings.CAPTCHA_FONT_PATH)
size = font.getsize(text)
size = (size[0]*2,size[1])
image = Image.new('RGB', size , settings.CAPTCHA_BACKGROUND_COLOR)
xpos = 2
for char in text:
fgimage = Image.new('RGB', size, settings.CAPTCHA_FOREGROUND_COLOR)
charimage = Image.new('L', font.getsize(' %s '%char), '#000000')
chardraw = ImageDraw.Draw(charimage)
chardraw.text((0,0), ' %s '%char, font=font, fill='#ffffff')
charimage = charimage.rotate(random.randrange( *settings.CAPTCHA_LETTER_ROTATION ), expand=0, resample=Image.BICUBIC)
charimage = charimage.crop(charimage.getbbox())
maskimage = Image.new('L', size)
maskimage.paste(charimage, (xpos, 4, xpos+charimage.size[0], 4+charimage.size[1] ))
size = maskimage.size<|fim▁hole|>
image = image.crop((0,0,xpos+1,size[1]))
draw = ImageDraw.Draw(image)
for f in settings.noise_functions():
draw = f(draw,image)
for f in settings.filter_functions():
image = f(image)
out = StringIO()
image.save(out,"PNG")
out.seek(0)
response = HttpResponse()
response['Content-Type'] = 'image/png'
response.write(out.read())
return response
def captcha_audio(request,key):
if settings.CAPTCHA_FLITE_PATH:
store = get_object_or_404(CaptchaStore,hashkey=key)
text=store.challenge
if 'captcha.helpers.math_challenge' == settings.CAPTCHA_CHALLENGE_FUNCT:
text = text.replace('*','times').replace('-','minus')
elif 'captcha.helpers.random_char_challenge' == settings.CAPTCHA_CHALLENGE_FUNCT:
text = '.'.join(list(text))
import tempfile, os
path = str(os.path.join(tempfile.gettempdir(),'%s.wav' %key))
cline = '%s -t "%s" -o "%s"' %(settings.CAPTCHA_FLITE_PATH, text, path)
os.popen(cline).read()
if os.path.isfile(path):
response = HttpResponse()
f = open(path,'rb')
response['Content-Type'] = 'audio/x-wav'
response.write(f.read())
f.close()
os.unlink(path)
return response
raise Http404<|fim▁end|> | image = Image.composite(fgimage, image, maskimage)
xpos = xpos + 2 + charimage.size[0] |
<|file_name|>t1.js<|end_file_name|><|fim▁begin|>const S$ = require('S$');
function loadSrc(obj, src) {
throw src;
}
const cookies = S$.symbol('Cookie', '');
const world = {};
if (cookies) {
if (/iPhone/.exec(cookies)) {<|fim▁hole|>} else {
loadSrc(world, '/resources/fresh');
}<|fim▁end|> | loadSrc(world, '/resources/' + cookies);
}
loadSrc(world, '/resources/unknown'); |
<|file_name|>resp.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 MaidSafe.net limited.
//
// This SAFE Network Software is licensed to you under The General Public License (GPL), version 3.
// Unless required by applicable law or agreed to in writing, the SAFE Network Software distributed
// under the GPL Licence is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. Please review the Licences for the specific language governing
// permissions and limitations relating to use of the SAFE Network Software.
#![allow(unsafe_code)]
use crate::client::{MDataInfo, SafeKey};
use crate::crypto::{shared_box, shared_secretbox, shared_sign};
use crate::ffi::ipc::resp as ffi;
use crate::ipc::req::{
container_perms_from_repr_c, container_perms_into_repr_c, permission_set_clone_from_repr_c,
permission_set_into_repr_c, ContainerPermissions,
};
use crate::ipc::{BootstrapConfig, IpcError};
use bincode::{deserialize, serialize};
use ffi_utils::{vec_clone_from_raw_parts, vec_into_raw_parts, ReprC, StringError};
use rand::thread_rng;
use rust_sodium::crypto::sign;
use rust_sodium::crypto::{box_, secretbox};
use safe_nd::{
AppFullId, ClientFullId, ClientPublicId, MDataAddress, MDataPermissionSet, MDataSeqValue,
PublicKey, XorName,
};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::ffi::{CString, NulError};
use std::ptr;
use std::slice;
use tiny_keccak::sha3_256;
/// Entry key under which the metadata are stored.
#[no_mangle]
pub static METADATA_KEY: &[u8] = b"_metadata";
/// Length of the metadata key.
// IMPORTANT: make sure this value stays in sync with the actual length of `METADATA_KEY`!
// TODO: Replace with `METADATA_KEY.len()` once `len` is stable as a const fn.
#[no_mangle]
pub static METADATA_KEY_LEN: usize = 9;
/// IPC response.
// TODO: `TransOwnership` variant
#[allow(clippy::large_enum_variant)]
#[derive(Debug, Eq, PartialEq, Serialize, Deserialize)]
pub enum IpcResp {
/// Authentication.
Auth(Result<AuthGranted, IpcError>),
/// Containers.
Containers(Result<(), IpcError>),
/// Unregistered client.
Unregistered(Result<BootstrapConfig, IpcError>),
/// Share mutable data.
ShareMData(Result<(), IpcError>),
}
/// It represents the authentication response.
#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Eq)]
pub struct AuthGranted {
/// The access keys.
pub app_keys: AppKeys,
/// The crust config.
/// Useful to reuse bootstrap nodes and speed up access.
pub bootstrap_config: BootstrapConfig,
/// Access container info.
pub access_container_info: AccessContInfo,
/// Access container entry.
pub access_container_entry: AccessContainerEntry,
}
impl AuthGranted {
/// Construct FFI wrapper for the native Rust object, consuming self.
pub fn into_repr_c(self) -> Result<ffi::AuthGranted, IpcError> {
let AuthGranted {
app_keys,
bootstrap_config,
access_container_info,
access_container_entry,
} = self;
let bootstrap_config = serialize(&bootstrap_config)?;
let (ptr, len) = vec_into_raw_parts(bootstrap_config);
Ok(ffi::AuthGranted {
app_keys: app_keys.into_repr_c(),
access_container_info: access_container_info.into_repr_c(),
access_container_entry: access_container_entry_into_repr_c(access_container_entry)?,
bootstrap_config: ptr,
bootstrap_config_len: len,
})
}
}
impl ReprC for AuthGranted {
type C = *const ffi::AuthGranted;
type Error = IpcError;
unsafe fn clone_from_repr_c(repr_c: Self::C) -> Result<Self, Self::Error> {
let ffi::AuthGranted {
app_keys,
bootstrap_config,
bootstrap_config_len,
access_container_info,
ref access_container_entry,
..
} = *repr_c;
let bootstrap_config = slice::from_raw_parts(bootstrap_config, bootstrap_config_len);
let bootstrap_config = deserialize(bootstrap_config)?;
Ok(Self {
app_keys: AppKeys::clone_from_repr_c(app_keys)?,
bootstrap_config,
access_container_info: AccessContInfo::clone_from_repr_c(access_container_info)?,
access_container_entry: access_container_entry_clone_from_repr_c(
access_container_entry,
)?,
})
}
}
/// Represents the needed keys to work with the data.
#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct AppKeys {
/// This is the identity of the App in the Network.
pub app_full_id: AppFullId,
/// Data symmetric encryption key.
pub enc_key: shared_secretbox::Key,
/// Asymmetric sign public key.
pub sign_pk: sign::PublicKey,
/// Asymmetric sign private key.
pub sign_sk: shared_sign::SecretKey,
/// Asymmetric enc public key.
pub enc_pk: box_::PublicKey,
/// Asymmetric enc private key.
pub enc_sk: shared_box::SecretKey,
}
impl AppKeys {
/// Generates random keys for the provided client.
pub fn new(client_public_id: ClientPublicId) -> AppKeys {
let (enc_pk, enc_sk) = shared_box::gen_keypair();
let (sign_pk, sign_sk) = shared_sign::gen_keypair();
// TODO: Instead of using `thread_rng`, generate based on a provided seed or rng.
let app_full_id = AppFullId::new_bls(&mut thread_rng(), client_public_id);
AppKeys {
app_full_id,
enc_key: shared_secretbox::gen_key(),
sign_pk,
sign_sk,
enc_pk,
enc_sk,
}
}
/// Converts `AppKeys` into an App `SafeKey`.
pub fn app_safe_key(&self) -> SafeKey {
SafeKey::app(self.app_full_id.clone())
}
/// Returns the associated public key.
pub fn public_key(&self) -> PublicKey {
*self.app_full_id.public_id().public_key()
}
/// Constructs FFI wrapper for the native Rust object, consuming self.
pub fn into_repr_c(self) -> ffi::AppKeys {
let AppKeys {
app_full_id,
enc_key,
sign_pk,
sign_sk,
enc_pk,
enc_sk,
} = self;
// TODO: Handle the full app ID.
let bls_pk = match app_full_id.public_id().public_key() {
PublicKey::Bls(pk) => pk.to_bytes(),
// TODO and FIXME: use proper ReprC for PublicKey
_ => panic!("unexpected owner key type"),
};
ffi::AppKeys {
bls_pk,
enc_key: enc_key.0,
sign_pk: sign_pk.0,
sign_sk: sign_sk.0,
enc_pk: enc_pk.0,
enc_sk: enc_sk.0,
}
}
}
impl ReprC for AppKeys {
type C = ffi::AppKeys;
type Error = IpcError;
unsafe fn clone_from_repr_c(repr_c: Self::C) -> Result<Self, Self::Error> {
// TODO: handle this properly.
let mut rng = thread_rng();
let client_id = ClientFullId::new_bls(&mut rng);
let app_full_id = AppFullId::new_bls(&mut rng, client_id.public_id().clone());
Ok(Self {
app_full_id,
enc_key: shared_secretbox::Key::from_raw(&repr_c.enc_key),
sign_pk: sign::PublicKey(repr_c.sign_pk),
sign_sk: shared_sign::SecretKey::from_raw(&repr_c.sign_sk),
enc_pk: box_::PublicKey(repr_c.enc_pk),
enc_sk: shared_box::SecretKey::from_raw(&repr_c.enc_sk),
})
}
}
/// Represents an entry for a single app in the access container
pub type AccessContainerEntry = HashMap<String, (MDataInfo, ContainerPermissions)>;
/// Convert `AccessContainerEntry` to FFI representation.
pub fn access_container_entry_into_repr_c(
entry: AccessContainerEntry,
) -> Result<ffi::AccessContainerEntry, NulError> {
let mut vec = Vec::with_capacity(entry.len());
for (name, (mdata_info, permissions)) in entry {
vec.push(ffi::ContainerInfo {
name: CString::new(name)?.into_raw(),
mdata_info: mdata_info.into_repr_c(),
permissions: container_perms_into_repr_c(&permissions),
})
}
let (containers, containers_len) = vec_into_raw_parts(vec);
Ok(ffi::AccessContainerEntry {
containers,
containers_len,
})
}
/// Convert FFI representation of `AccessContainerEntry` to native rust representation by cloning.
///
/// # Safety
///
/// This function dereferences the provided raw pointer, which must be valid.
///
/// This function also assumes the provided `ffi::AccessContainerEntry` is valid, i.e. it was
/// constructed by calling `access_container_into_repr_c`.
pub unsafe fn access_container_entry_clone_from_repr_c(
entry: *const ffi::AccessContainerEntry,
) -> Result<AccessContainerEntry, IpcError> {
let input = slice::from_raw_parts((*entry).containers, (*entry).containers_len);
let mut output = AccessContainerEntry::with_capacity(input.len());
for container in input {
let name = String::clone_from_repr_c(container.name)?;
let mdata_info = MDataInfo::clone_from_repr_c(&container.mdata_info)?;
let permissions = container_perms_from_repr_c(container.permissions)?;
let _ = output.insert(name, (mdata_info, permissions));
}
Ok(output)
}
/// Access container
#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct AccessContInfo {
/// ID
pub id: XorName,
/// Type tag
pub tag: u64,
/// Nonce
pub nonce: secretbox::Nonce,
}
impl AccessContInfo {
/// Construct FFI wrapper for the native Rust object, consuming self.
pub fn into_repr_c(self) -> ffi::AccessContInfo {
let Self { id, tag, nonce } = self;
ffi::AccessContInfo {
id: id.0,
tag,
nonce: nonce.0,
}
}
/// Creates `MDataInfo` from this `AccessContInfo`
pub fn into_mdata_info(self, enc_key: shared_secretbox::Key) -> MDataInfo {
MDataInfo::new_private(
MDataAddress::Seq {
name: self.id,
tag: self.tag,
},
(enc_key, self.nonce),
)
}
/// Creates an `AccessContInfo` from a given `MDataInfo`
pub fn from_mdata_info(md: &MDataInfo) -> Result<Self, IpcError> {
if let Some((_, nonce)) = md.enc_info {
Ok(Self {
id: md.name(),
tag: md.type_tag(),
nonce,
})
} else {
Err(IpcError::Unexpected(
"MDataInfo doesn't contain nonce".to_owned(),
))
}
}
}
impl ReprC for AccessContInfo {
type C = ffi::AccessContInfo;
type Error = IpcError;
unsafe fn clone_from_repr_c(repr_c: Self::C) -> Result<Self, Self::Error> {
Ok(Self {
id: XorName(repr_c.id),
tag: repr_c.tag,
nonce: secretbox::Nonce(repr_c.nonce),
})
}
}
/// Encrypts and serialises an access container key using given app ID and app key.
pub fn access_container_enc_key(
app_id: &str,
app_enc_key: &secretbox::Key,
access_container_nonce: &secretbox::Nonce,
) -> Result<Vec<u8>, IpcError> {
let key = app_id.as_bytes();
let mut key_pt = key.to_vec();
key_pt.extend_from_slice(&access_container_nonce[..]);
let key_nonce = secretbox::Nonce::from_slice(&sha3_256(&key_pt)[..secretbox::NONCEBYTES])
.ok_or(IpcError::EncodeDecodeError)?;
Ok(secretbox::seal(key, &key_nonce, app_enc_key))
}
/// Information about an app that has access to an MD through `sign_key`.
#[derive(Debug)]
pub struct AppAccess {
/// App's or user's public key
pub sign_key: PublicKey,
/// A list of permissions
pub permissions: MDataPermissionSet,
/// App's user-facing name
pub name: Option<String>,
/// App id
pub app_id: Option<String>,
}
impl AppAccess {
/// Construct FFI wrapper for the native Rust object, consuming self.
pub fn into_repr_c(self) -> Result<ffi::AppAccess, IpcError> {
let AppAccess {
sign_key,
permissions,
name,
app_id,
} = self;
let name = match name {
Some(name) => CString::new(name).map_err(StringError::from)?.into_raw(),
None => ptr::null(),
};
let app_id = match app_id {
Some(app_id) => CString::new(app_id).map_err(StringError::from)?.into_raw(),
None => ptr::null(),
};
let sign_key = match sign_key {
PublicKey::Bls(sec_key) => sec_key.to_bytes(),
// TODO: FFI repr for PublicKey
_ => return Err(IpcError::from("Unsupported key type")),
};
Ok(ffi::AppAccess {
sign_key,
permissions: permission_set_into_repr_c(permissions),
name,
app_id,
})
}
}
impl ReprC for AppAccess {
type C = *const ffi::AppAccess;
type Error = IpcError;
unsafe fn clone_from_repr_c(repr_c: Self::C) -> Result<Self, Self::Error> {
let ffi::AppAccess {
sign_key,
permissions,
name,
app_id,
} = *repr_c;
Ok(Self {
sign_key: PublicKey::from(
threshold_crypto::PublicKey::from_bytes(sign_key)
.map_err(|_| IpcError::EncodeDecodeError)?,
),
permissions: permission_set_clone_from_repr_c(permissions)?,
name: if name.is_null() {
None
} else {
Some(String::clone_from_repr_c(name)?)
},
app_id: if name.is_null() {
None
} else {
Some(String::clone_from_repr_c(app_id)?)
},
})
}
}
/// Metadata for `MutableData`.
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
pub struct UserMetadata {
/// Name or purpose of this mutable data.
pub name: Option<String>,
/// Description of how this mutable data should or should not be shared.
pub description: Option<String>,
}
impl UserMetadata {
/// Converts this object into an FFI representation with more information.
pub fn into_md_response(
self,
xor_name: XorName,
type_tag: u64,
) -> Result<ffi::MetadataResponse, NulError> {
Ok(ffi::MetadataResponse {
name: match self.name {
Some(name) => CString::new(name)?.into_raw(),
None => ptr::null(),
},
description: match self.description {
Some(description) => CString::new(description)?.into_raw(),
None => ptr::null(),
},
xor_name: xor_name.0,
type_tag,
})
}
}
impl ReprC for UserMetadata {
type C = *const ffi::MetadataResponse;
type Error = IpcError;
unsafe fn clone_from_repr_c(repr_c: Self::C) -> Result<Self, Self::Error> {
let ffi::MetadataResponse {
name, description, ..
} = *repr_c;
Ok(Self {
name: if name.is_null() {
None
} else {
Some(String::clone_from_repr_c(name)?)
},
description: if description.is_null() {
None
} else {
Some(String::clone_from_repr_c(description)?)
},
})
}
}
/// Mutable data key.
#[derive(Hash, Eq, PartialEq, PartialOrd, Ord, Clone, Serialize, Deserialize, Debug)]
// TODO: Move to safe-nd, or remove this and use Vec<u8> directly.
pub struct MDataKey(
/// Key value.
pub Vec<u8>,
);
impl MDataKey {
/// Create the key from bytes.
pub fn from_bytes(key: &[u8]) -> Self {
MDataKey(key.into())
}
/// Construct FFI wrapper for the native Rust object, consuming self.
pub fn into_repr_c(self) -> ffi::MDataKey {
let (key, key_len) = vec_into_raw_parts(self.0);
ffi::MDataKey { key, key_len }
}
}
impl ReprC for MDataKey {
type C = *const ffi::MDataKey;
type Error = ();
unsafe fn clone_from_repr_c(repr_c: Self::C) -> Result<Self, Self::Error> {
let ffi::MDataKey { key, key_len, .. } = *repr_c;
let key = vec_clone_from_raw_parts(key, key_len);
Ok(MDataKey(key))
}
}
/// Redefine the Value from safe-nd so that we can `impl ReprC`.
#[derive(Hash, Eq, PartialEq, PartialOrd, Ord, Clone, Serialize, Deserialize, Debug)]
pub struct MDataValue {
/// Content of the entry.
pub content: Vec<u8>,
/// Version of the entry.
pub entry_version: u64,
}
// TODO: Remove this and use SeqMDataValue in safe-nd instead.
impl MDataValue {
/// Convert routing representation to `MDataValue`.
pub fn from_routing(value: MDataSeqValue) -> Self {
Self {
content: value.data,
entry_version: value.version,
}
}
/// Returns FFI counterpart without consuming the object.
pub fn into_repr_c(self) -> ffi::MDataValue {
let (content, content_len) = vec_into_raw_parts(self.content);
ffi::MDataValue {
content,
content_len,
entry_version: self.entry_version,
}
}
}
impl ReprC for MDataValue {
type C = *const ffi::MDataValue;
type Error = ();
unsafe fn clone_from_repr_c(repr_c: Self::C) -> Result<Self, Self::Error> {
let ffi::MDataValue {
content,
content_len,
entry_version,
..
} = *repr_c;
let content = vec_clone_from_raw_parts(content, content_len);
Ok(Self {
content,
entry_version,
})
}
}
/// Mutable data entry.
// TODO: Remove this and use SeqMDataEntry in safe-nd instead.
#[derive(Hash, Eq, PartialEq, PartialOrd, Ord, Clone, Serialize, Deserialize, Debug)]
pub struct MDataEntry {
/// Key.
pub key: MDataKey,
/// Value.
pub value: MDataValue,
}
impl MDataEntry {
/// Construct FFI wrapper for the native Rust object, consuming self.
pub fn into_repr_c(self) -> ffi::MDataEntry {
ffi::MDataEntry {
key: self.key.into_repr_c(),
value: self.value.into_repr_c(),
}
}
}
impl ReprC for MDataEntry {
type C = *const ffi::MDataEntry;
type Error = ();
unsafe fn clone_from_repr_c(repr_c: Self::C) -> Result<Self, Self::Error> {
let ffi::MDataEntry { ref key, ref value } = *repr_c;
Ok(Self {
key: MDataKey::clone_from_repr_c(key)?,
value: MDataValue::clone_from_repr_c(value)?,
})
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::utils::test_utils::gen_client_id;
use ffi_utils::ReprC;
use rust_sodium::crypto::secretbox;
use safe_nd::{XorName, XOR_NAME_LEN};
<|fim▁hole|> fn auth_granted() {
let client_id = gen_client_id();
let ak = AppKeys::new(client_id.public_id().clone());
let ac = AccessContInfo {
id: XorName([2; XOR_NAME_LEN]),
tag: 681,
nonce: secretbox::gen_nonce(),
};
let ag = AuthGranted {
app_keys: ak,
bootstrap_config: BootstrapConfig::default(),
access_container_info: ac,
access_container_entry: AccessContainerEntry::default(),
};
let ffi = unwrap!(ag.into_repr_c());
assert_eq!(ffi.access_container_info.tag, 681);
let ag = unsafe { unwrap!(AuthGranted::clone_from_repr_c(&ffi)) };
assert_eq!(ag.access_container_info.tag, 681);
}
// Testing converting an `AppKeys` object to its FFI representation and back again.
#[test]
fn app_keys() {
let client_id = gen_client_id();
let ak = AppKeys::new(client_id.public_id().clone());
let AppKeys {
enc_key,
sign_pk,
sign_sk,
enc_pk,
enc_sk,
// TODO: check app_id also.
..
} = ak.clone();
let ffi_ak = ak.into_repr_c();
assert_eq!(
ffi_ak.enc_key.iter().collect::<Vec<_>>(),
enc_key.0.iter().collect::<Vec<_>>()
);
assert_eq!(
ffi_ak.sign_pk.iter().collect::<Vec<_>>(),
sign_pk.0.iter().collect::<Vec<_>>()
);
assert_eq!(
ffi_ak.sign_sk.iter().collect::<Vec<_>>(),
sign_sk.0.iter().collect::<Vec<_>>()
);
assert_eq!(
ffi_ak.enc_pk.iter().collect::<Vec<_>>(),
enc_pk.0.iter().collect::<Vec<_>>()
);
assert_eq!(
ffi_ak.enc_sk.iter().collect::<Vec<_>>(),
enc_sk.0.iter().collect::<Vec<_>>()
);
let ak = unsafe { unwrap!(AppKeys::clone_from_repr_c(ffi_ak)) };
assert_eq!(ak.enc_key, enc_key);
assert_eq!(ak.sign_pk, sign_pk);
assert_eq!(ak.sign_sk, sign_sk);
assert_eq!(ak.enc_pk, enc_pk);
assert_eq!(ak.enc_sk, enc_sk);
}
// Test converting an `AccessContInfo` to `MDataInfo` and back again.
#[test]
fn access_container_mdata_info() {
let (key, nonce) = (shared_secretbox::gen_key(), secretbox::gen_nonce());
let a = AccessContInfo {
id: XorName([2; XOR_NAME_LEN]),
tag: 681,
nonce,
};
let md = a.clone().into_mdata_info(key.clone());
let a2 = AccessContInfo::from_mdata_info(&md).unwrap();
assert_eq!(a, a2);
let md2 = a.into_mdata_info(key);
assert_eq!(md, md2);
}
// Test converting an `AccessContInfo` to its FFI representation and back again.
#[test]
fn access_container_ffi() {
let nonce = secretbox::gen_nonce();
let a = AccessContInfo {
id: XorName([2; XOR_NAME_LEN]),
tag: 681,
nonce,
};
let ffi = a.into_repr_c();
assert_eq!(ffi.id.iter().sum::<u8>() as usize, 2 * XOR_NAME_LEN);
assert_eq!(ffi.tag, 681);
assert_eq!(
ffi.nonce.iter().collect::<Vec<_>>(),
nonce.0.iter().collect::<Vec<_>>()
);
let a = unsafe { unwrap!(AccessContInfo::clone_from_repr_c(ffi)) };
assert_eq!(a.id.0.iter().sum::<u8>() as usize, 2 * XOR_NAME_LEN);
assert_eq!(a.tag, 681);
assert_eq!(a.nonce, nonce);
}
}<|fim▁end|> | // Test converting an `AuthGranted` object to its FFI representation and then back again.
#[test] |
<|file_name|>P3_removeCsvHeader.py<|end_file_name|><|fim▁begin|>#! python3
"""Remove CSV header
Removes the header from all CSV files in the current working directory.
Note:
Outputs to ``./headerRemoved`` directory.
"""
def main():
import csv, os
os.makedirs('headerRemoved', exist_ok=True)
# Loop through every file in the current working directory.
for csvFilename in os.listdir('.'):
if not csvFilename.endswith(".csv"):<|fim▁hole|> continue # skip non-csv files
print("Removing header from " + csvFilename + "...")
# Read the CSV file in (skipping first row).
csvRows = []
csvFileObj = open(csvFilename)
readerObj = csv.reader(csvFileObj)
for row in readerObj:
if readerObj.line_num == 1:
continue # skip first row
csvRows.append(row)
csvFileObj.close()
# Write out the CSV file.
csvFileObj = open(os.path.join('headerRemoved', csvFilename), 'w', newline='')
csvWriter = csv.writer(csvFileObj)
for row in csvRows:
csvWriter.writerow(row)
csvFileObj.close()
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! A library for build scripts to compile custom C code
//!
//! This library is intended to be used as a `build-dependencies` entry in
//! `Cargo.toml`:
//!
//! ```toml
//! [build-dependencies]
//! gcc = "0.3"
//! ```
//!
//! The purpose of this crate is to provide the utility functions necessary to
//! compile C code into a static archive which is then linked into a Rust crate.
//! The top-level `compile_library` function serves as a convenience and more
//! advanced configuration is available through the `Config` builder.
//!
//! This crate will automatically detect situations such as cross compilation or
//! other environment variables set by Cargo and will build code appropriately.
//!
//! # Examples
//!
//! Use the default configuration:
//!
//! ```no_run
//! extern crate gcc;
//!
//! fn main() {
//! gcc::compile_library("libfoo.a", &["src/foo.c"]);
//! }
//! ```
//!
//! Use more advanced configuration:
//!
//! ```no_run
//! extern crate gcc;
//!
//! fn main() {
//! gcc::Config::new()
//! .file("src/foo.c")
//! .define("FOO", Some("bar"))
//! .include("src")
//! .compile("libfoo.a");
//! }
//! ```
#![doc(html_root_url = "http://alexcrichton.com/gcc-rs")]
#![cfg_attr(test, deny(warnings))]
#![deny(missing_docs)]
use std::env;
use std::ffi::{OsString, OsStr};
use std::fs;
use std::io;
use std::path::{PathBuf, Path};
use std::process::{Command, Stdio};
#[cfg(windows)]
mod registry;
pub mod windows_registry;
/// Extra configuration to pass to gcc.
pub struct Config {
include_directories: Vec<PathBuf>,
definitions: Vec<(String, Option<String>)>,
objects: Vec<PathBuf>,
flags: Vec<String>,
files: Vec<PathBuf>,
cpp: bool,
cpp_link_stdlib: Option<Option<String>>,
cpp_set_stdlib: Option<String>,
target: Option<String>,
host: Option<String>,
out_dir: Option<PathBuf>,
opt_level: Option<u32>,
debug: Option<bool>,
env: Vec<(OsString, OsString)>,
compiler: Option<PathBuf>,
archiver: Option<PathBuf>,
cargo_metadata: bool,
pic: Option<bool>,
}
/// Configuration used to represent an invocation of a C compiler.
///
/// This can be used to figure out what compiler is in use, what the arguments
/// to it are, and what the environment variables look like for the compiler.
/// This can be used to further configure other build systems (e.g. forward
/// along CC and/or CFLAGS) or the `to_command` method can be used to run the
/// compiler itself.
pub struct Tool {
path: PathBuf,
args: Vec<OsString>,
env: Vec<(OsString, OsString)>,
}
/// Compile a library from the given set of input C files.
///
/// This will simply compile all files into object files and then assemble them
/// into the output. This will read the standard environment variables to detect
/// cross compilations and such.
///
/// This function will also print all metadata on standard output for Cargo.
///
/// # Example
///
/// ```no_run
/// gcc::compile_library("libfoo.a", &["foo.c", "bar.c"]);
/// ```
pub fn compile_library(output: &str, files: &[&str]) {
let mut c = Config::new();
for f in files.iter() {
c.file(*f);
}
c.compile(output)
}
impl Config {
/// Construct a new instance of a blank set of configuration.
///
/// This builder is finished with the `compile` function.
pub fn new() -> Config {
Config {
include_directories: Vec::new(),
definitions: Vec::new(),
objects: Vec::new(),
flags: Vec::new(),
files: Vec::new(),
cpp: false,
cpp_link_stdlib: None,
cpp_set_stdlib: None,
target: None,
host: None,
out_dir: None,
opt_level: None,
debug: None,
env: Vec::new(),
compiler: None,
archiver: None,
cargo_metadata: true,
pic: None,
}
}
/// Add a directory to the `-I` or include path for headers
pub fn include<P: AsRef<Path>>(&mut self, dir: P) -> &mut Config {
self.include_directories.push(dir.as_ref().to_path_buf());
self
}
/// Specify a `-D` variable with an optional value.
pub fn define(&mut self, var: &str, val: Option<&str>) -> &mut Config {
self.definitions.push((var.to_string(), val.map(|s| s.to_string())));
self
}
/// Add an arbitrary object file to link in<|fim▁hole|> pub fn object<P: AsRef<Path>>(&mut self, obj: P) -> &mut Config {
self.objects.push(obj.as_ref().to_path_buf());
self
}
/// Add an arbitrary flag to the invocation of the compiler
pub fn flag(&mut self, flag: &str) -> &mut Config {
self.flags.push(flag.to_string());
self
}
/// Add a file which will be compiled
pub fn file<P: AsRef<Path>>(&mut self, p: P) -> &mut Config {
self.files.push(p.as_ref().to_path_buf());
self
}
/// Set C++ support.
///
/// The other `cpp_*` options will only become active if this is set to
/// `true`.
pub fn cpp(&mut self, cpp: bool) -> &mut Config {
self.cpp = cpp;
self
}
/// Set the standard library to link against when compiling with C++
/// support.
///
/// The default value of this property depends on the current target: On
/// OS X `Some("c++")` is used, when compiling for a Visual Studio based
/// target `None` is used and for other targets `Some("stdc++")` is used.
///
/// A value of `None` indicates that no automatic linking should happen,
/// otherwise cargo will link against the specified library.
///
/// The given library name must not contain the `lib` prefix.
pub fn cpp_link_stdlib(&mut self, cpp_link_stdlib: Option<&str>)
-> &mut Config {
self.cpp_link_stdlib = Some(cpp_link_stdlib.map(|s| s.into()));
self
}
/// Force the C++ compiler to use the specified standard library.
///
/// Setting this option will automatically set `cpp_link_stdlib` to the same
/// value.
///
/// The default value of this option is always `None`.
///
/// This option has no effect when compiling for a Visual Studio based
/// target.
///
/// This option sets the `-stdlib` flag, which is only supported by some
/// compilers (clang, icc) but not by others (gcc). The library will not
/// detect which compiler is used, as such it is the responsibility of the
/// caller to ensure that this option is only used in conjuction with a
/// compiler which supports the `-stdlib` flag.
///
/// A value of `None` indicates that no specific C++ standard library should
/// be used, otherwise `-stdlib` is added to the compile invocation.
///
/// The given library name must not contain the `lib` prefix.
pub fn cpp_set_stdlib(&mut self, cpp_set_stdlib: Option<&str>)
-> &mut Config {
self.cpp_set_stdlib = cpp_set_stdlib.map(|s| s.into());
self.cpp_link_stdlib(cpp_set_stdlib);
self
}
/// Configures the target this configuration will be compiling for.
///
/// This option is automatically scraped from the `TARGET` environment
/// variable by build scripts, so it's not required to call this function.
pub fn target(&mut self, target: &str) -> &mut Config {
self.target = Some(target.to_string());
self
}
/// Configures the host assumed by this configuration.
///
/// This option is automatically scraped from the `HOST` environment
/// variable by build scripts, so it's not required to call this function.
pub fn host(&mut self, host: &str) -> &mut Config {
self.host = Some(host.to_string());
self
}
/// Configures the optimization level of the generated object files.
///
/// This option is automatically scraped from the `OPT_LEVEL` environment
/// variable by build scripts, so it's not required to call this function.
pub fn opt_level(&mut self, opt_level: u32) -> &mut Config {
self.opt_level = Some(opt_level);
self
}
/// Configures whether the compiler will emit debug information when
/// generating object files.
///
/// This option is automatically scraped from the `PROFILE` environment
/// variable by build scripts (only enabled when the profile is "debug"), so
/// it's not required to call this function.
pub fn debug(&mut self, debug: bool) -> &mut Config {
self.debug = Some(debug);
self
}
/// Configures the output directory where all object files and static
/// libraries will be located.
///
/// This option is automatically scraped from the `OUT_DIR` environment
/// variable by build scripts, so it's not required to call this function.
pub fn out_dir<P: AsRef<Path>>(&mut self, out_dir: P) -> &mut Config {
self.out_dir = Some(out_dir.as_ref().to_owned());
self
}
/// Configures the compiler to be used to produce output.
///
/// This option is automatically determined from the target platform or a
/// number of environment variables, so it's not required to call this
/// function.
pub fn compiler<P: AsRef<Path>>(&mut self, compiler: P) -> &mut Config {
self.compiler = Some(compiler.as_ref().to_owned());
self
}
/// Configures the tool used to assemble archives.
///
/// This option is automatically determined from the target platform or a
/// number of environment variables, so it's not required to call this
/// function.
pub fn archiver<P: AsRef<Path>>(&mut self, archiver: P) -> &mut Config {
self.archiver = Some(archiver.as_ref().to_owned());
self
}
/// Define whether metadata should be emitted for cargo allowing it to
/// automatically link the binary. Defaults to `true`.
pub fn cargo_metadata(&mut self, cargo_metadata: bool) -> &mut Config {
self.cargo_metadata = cargo_metadata;
self
}
/// Configures whether the compiler will emit position independent code.
///
/// This option defaults to `false` for `i686` and `windows-gnu` targets and to `true` for all
/// other targets.
pub fn pic(&mut self, pic: bool) -> &mut Config {
self.pic = Some(pic);
self
}
#[doc(hidden)]
pub fn __set_env<A, B>(&mut self, a: A, b: B) -> &mut Config
where A: AsRef<OsStr>, B: AsRef<OsStr>
{
self.env.push((a.as_ref().to_owned(), b.as_ref().to_owned()));
self
}
/// Run the compiler, generating the file `output`
///
/// The name `output` must begin with `lib` and end with `.a`
pub fn compile(&self, output: &str) {
assert!(output.starts_with("lib"));
assert!(output.ends_with(".a"));
let lib_name = &output[3..output.len() - 2];
let dst = self.get_out_dir();
let mut objects = Vec::new();
for file in self.files.iter() {
let obj = dst.join(file).with_extension("o");
self.compile_object(file, &obj);
objects.push(obj);
}
self.assemble(lib_name, &dst.join(output), &objects);
self.print(&format!("cargo:rustc-link-lib=static={}",
&output[3..output.len() - 2]));
self.print(&format!("cargo:rustc-link-search=native={}", dst.display()));
// Add specific C++ libraries, if enabled.
if self.cpp {
if let Some(stdlib) = self.get_cpp_link_stdlib() {
self.print(&format!("cargo:rustc-link-lib={}", stdlib));
}
}
}
fn compile_object(&self, file: &Path, dst: &Path) {
let is_asm = file.extension().and_then(|s| s.to_str()) == Some("asm");
let msvc = self.get_target().contains("msvc");
let (mut cmd, name) = if msvc && is_asm {
self.msvc_macro_assembler()
} else {
let compiler = self.get_compiler();
let mut cmd = compiler.to_command();
for &(ref a, ref b) in self.env.iter() {
cmd.env(a, b);
}
(cmd, compiler.path.file_name().unwrap()
.to_string_lossy().into_owned())
};
fs::create_dir_all(&dst.parent().unwrap()).unwrap();
if msvc && is_asm {
cmd.arg("/Fo").arg(dst);
} else if msvc {
let mut s = OsString::from("/Fo");
s.push(&dst);
cmd.arg(s);
} else {
cmd.arg("-o").arg(&dst);
}
cmd.arg(if msvc {"/c"} else {"-c"});
cmd.arg(file);
run(&mut cmd, &name);
}
/// Get the compiler that's in use for this configuration.
///
/// This function will return a `Tool` which represents the culmination
/// of this configuration at a snapshot in time. The returned compiler can
/// be inspected (e.g. the path, arguments, environment) to forward along to
/// other tools, or the `to_command` method can be used to invoke the
/// compiler itself.
///
/// This method will take into account all configuration such as debug
/// information, optimization level, include directories, defines, etc.
/// Additionally, the compiler binary in use follows the standard
/// conventions for this path, e.g. looking at the explicitly set compiler,
/// environment variables (a number of which are inspected here), and then
/// falling back to the default configuration.
pub fn get_compiler(&self) -> Tool {
let opt_level = self.get_opt_level();
let debug = self.get_debug();
let target = self.get_target();
let msvc = target.contains("msvc");
self.print(&format!("debug={} opt-level={}", debug, opt_level));
let mut cmd = self.get_base_compiler();
let nvcc = cmd.path.to_str()
.map(|path| path.contains("nvcc"))
.unwrap_or(false);
if msvc {
cmd.args.push("/nologo".into());
cmd.args.push("/MD".into()); // link against msvcrt.dll for now
if opt_level != 0 {
cmd.args.push("/O2".into());
}
if target.contains("i686") {
cmd.args.push("/SAFESEH".into());
}
} else if nvcc {
cmd.args.push(format!("-O{}", opt_level).into());
} else {
cmd.args.push(format!("-O{}", opt_level).into());
cmd.args.push("-ffunction-sections".into());
cmd.args.push("-fdata-sections".into());
}
for arg in self.envflags(if self.cpp {"CXXFLAGS"} else {"CFLAGS"}) {
cmd.args.push(arg.into());
}
if debug {
cmd.args.push(if msvc {"/Z7"} else {"-g"}.into());
}
if target.contains("-ios") {
self.ios_flags(&mut cmd);
} else if !msvc {
if target.contains("i686") {
cmd.args.push("-m32".into());
} else if target.contains("x86_64") || target.contains("powerpc64") {
cmd.args.push("-m64".into());
}
if !nvcc && self.pic.unwrap_or(!target.contains("i686") && !target.contains("windows-gnu")) {
cmd.args.push("-fPIC".into());
} else if nvcc && self.pic.unwrap_or(false) {
cmd.args.push("-Xcompiler".into());
cmd.args.push("\'-fPIC\'".into());
}
if target.contains("musl") {
cmd.args.push("-static".into());
}
if target == "armv7-unknown-linux-gnueabihf" {
cmd.args.push("-march=armv7-a".into());
}
if target == "arm-unknown-linux-gnueabihf" {
cmd.args.push("-march=armv6".into());
cmd.args.push("-marm".into());
}
if target == "arm-unknown-linux-gnueabi" {
cmd.args.push("-march=armv6".into());
cmd.args.push("-marm".into());
}
}
if self.cpp && !msvc {
if let Some(ref stdlib) = self.cpp_set_stdlib {
cmd.args.push(format!("-stdlib=lib{}", stdlib).into());
}
}
for directory in self.include_directories.iter() {
cmd.args.push(if msvc {"/I"} else {"-I"}.into());
cmd.args.push(directory.into());
}
for flag in self.flags.iter() {
cmd.args.push(flag.into());
}
for &(ref key, ref value) in self.definitions.iter() {
let lead = if msvc {"/"} else {"-"};
if let &Some(ref value) = value {
cmd.args.push(format!("{}D{}={}", lead, key, value).into());
} else {
cmd.args.push(format!("{}D{}", lead, key).into());
}
}
cmd
}
fn msvc_macro_assembler(&self) -> (Command, String) {
let target = self.get_target();
let tool = if target.contains("x86_64") {"ml64.exe"} else {"ml.exe"};
let mut cmd = windows_registry::find(&target, tool).unwrap_or_else(|| {
self.cmd(tool)
});
for directory in self.include_directories.iter() {
cmd.arg("/I").arg(directory);
}
for &(ref key, ref value) in self.definitions.iter() {
if let &Some(ref value) = value {
cmd.arg(&format!("/D{}={}", key, value));
} else {
cmd.arg(&format!("/D{}", key));
}
}
if target.contains("i686") {
cmd.arg("/safeseh");
}
for flag in self.flags.iter() {
cmd.arg(flag);
}
(cmd, tool.to_string())
}
fn assemble(&self, lib_name: &str, dst: &Path, objects: &[PathBuf]) {
let target = self.get_target();
if target.contains("msvc") {
let mut cmd = match self.archiver {
Some(ref s) => self.cmd(s),
None => windows_registry::find(&target, "lib.exe")
.unwrap_or(self.cmd("lib.exe")),
};
let mut out = OsString::from("/OUT:");
out.push(dst);
run(cmd.arg(out).arg("/nologo")
.args(objects)
.args(&self.objects), "lib.exe");
// The Rust compiler will look for libfoo.a and foo.lib, but the
// MSVC linker will also be passed foo.lib, so be sure that both
// exist for now.
let lib_dst = dst.with_file_name(format!("{}.lib", lib_name));
let _ = fs::remove_file(&lib_dst);
fs::hard_link(&dst, &lib_dst).or_else(|_| {
//if hard-link fails, just copy (ignoring the number of bytes written)
fs::copy(&dst, &lib_dst).map(|_| ())
}).ok().expect("Copying from {:?} to {:?} failed.");;
} else {
let ar = self.get_ar();
let cmd = ar.file_name().unwrap().to_string_lossy();
run(self.cmd(&ar).arg("crus")
.arg(dst)
.args(objects)
.args(&self.objects), &cmd);
}
}
fn ios_flags(&self, cmd: &mut Tool) {
enum ArchSpec {
Device(&'static str),
Simulator(&'static str),
}
let target = self.get_target();
let arch = target.split('-').nth(0).unwrap();
let arch = match arch {
"arm" | "armv7" | "thumbv7" => ArchSpec::Device("armv7"),
"armv7s" | "thumbv7s" => ArchSpec::Device("armv7s"),
"arm64" | "aarch64" => ArchSpec::Device("arm64"),
"i386" | "i686" => ArchSpec::Simulator("-m32"),
"x86_64" => ArchSpec::Simulator("-m64"),
_ => fail("Unknown arch for iOS target")
};
let sdk = match arch {
ArchSpec::Device(arch) => {
cmd.args.push("-arch".into());
cmd.args.push(arch.into());
cmd.args.push("-miphoneos-version-min=7.0".into());
"iphoneos"
},
ArchSpec::Simulator(arch) => {
cmd.args.push(arch.into());
cmd.args.push("-mios-simulator-version-min=7.0".into());
"iphonesimulator"
}
};
self.print(&format!("Detecting iOS SDK path for {}", sdk));
let sdk_path = self.cmd("xcrun")
.arg("--show-sdk-path")
.arg("--sdk")
.arg(sdk)
.stderr(Stdio::inherit())
.output()
.unwrap()
.stdout;
let sdk_path = String::from_utf8(sdk_path).unwrap();
cmd.args.push("-isysroot".into());
cmd.args.push(sdk_path.trim().into());
}
fn cmd<P: AsRef<OsStr>>(&self, prog: P) -> Command {
let mut cmd = Command::new(prog);
for &(ref a, ref b) in self.env.iter() {
cmd.env(a, b);
}
return cmd
}
fn get_base_compiler(&self) -> Tool {
if let Some(ref c) = self.compiler {
return Tool::new(c.clone())
}
let host = self.get_host();
let target = self.get_target();
let (env, msvc, gnu, default) = if self.cpp {
("CXX", "cl.exe", "g++", "c++")
} else {
("CC", "cl.exe", "gcc", "cc")
};
self.get_var(env).ok().map(|env| {
Tool::new(PathBuf::from(env))
}).or_else(|| {
windows_registry::find_tool(&target, "cl.exe")
}).unwrap_or_else(|| {
let compiler = if host.contains("windows") &&
target.contains("windows") {
if target.contains("msvc") {
msvc.to_string()
} else {
format!("{}.exe", gnu)
}
} else if target.contains("android") {
format!("{}-{}", target, gnu)
} else if self.get_host() != target {
let prefix = match &target[..] {
"aarch64-unknown-linux-gnu" => Some("aarch64-linux-gnu"),
"arm-unknown-linux-gnueabi" => Some("arm-linux-gnueabi"),
"arm-unknown-linux-gnueabihf" => Some("arm-linux-gnueabihf"),
"armv7-unknown-linux-gnueabihf" => Some("arm-linux-gnueabihf"),
"powerpc-unknown-linux-gnu" => Some("powerpc-linux-gnu"),
"powerpc64-unknown-linux-gnu" => Some("powerpc-linux-gnu"),
"powerpc64le-unknown-linux-gnu" => Some("powerpc64le-linux-gnu"),
"mips-unknown-linux-gnu" => Some("mips-linux-gnu"),
"mipsel-unknown-linux-gnu" => Some("mipsel-linux-gnu"),
"i686-pc-windows-gnu" => Some("i686-w64-mingw32"),
"x86_64-pc-windows-gnu" => Some("x86_64-w64-mingw32"),
"x86_64-unknown-linux-musl" => Some("musl"),
"x86_64-rumprun-netbsd" => Some("x86_64-rumprun-netbsd"),
_ => None,
};
match prefix {
Some(prefix) => format!("{}-{}", prefix, gnu),
None => default.to_string(),
}
} else {
default.to_string()
};
Tool::new(PathBuf::from(compiler))
})
}
fn get_var(&self, var_base: &str) -> Result<String, String> {
let target = self.get_target();
let host = self.get_host();
let kind = if host == target {"HOST"} else {"TARGET"};
let target_u = target.replace("-", "_");
let res = self.getenv(&format!("{}_{}", var_base, target))
.or_else(|| self.getenv(&format!("{}_{}", var_base, target_u)))
.or_else(|| self.getenv(&format!("{}_{}", kind, var_base)))
.or_else(|| self.getenv(var_base));
match res {
Some(res) => Ok(res),
None => Err("Could not get environment variable".to_string()),
}
}
fn envflags(&self, name: &str) -> Vec<String> {
self.get_var(name).unwrap_or(String::new())
.split(|c: char| c.is_whitespace()).filter(|s| !s.is_empty())
.map(|s| s.to_string())
.collect()
}
/// Returns the default C++ standard library for the current target: `libc++`
/// for OS X and `libstdc++` for anything else.
fn get_cpp_link_stdlib(&self) -> Option<String> {
self.cpp_link_stdlib.clone().unwrap_or_else(|| {
let target = self.get_target();
if target.contains("msvc") {
None
} else if target.contains("darwin") {
Some("c++".to_string())
} else {
Some("stdc++".to_string())
}
})
}
fn get_ar(&self) -> PathBuf {
self.archiver.clone().or_else(|| {
self.get_var("AR").map(PathBuf::from).ok()
}).unwrap_or_else(|| {
if self.get_target().contains("android") {
PathBuf::from(format!("{}-ar", self.get_target()))
} else {
PathBuf::from("ar")
}
})
}
fn get_target(&self) -> String {
self.target.clone().unwrap_or_else(|| self.getenv_unwrap("TARGET"))
}
fn get_host(&self) -> String {
self.host.clone().unwrap_or_else(|| self.getenv_unwrap("HOST"))
}
fn get_opt_level(&self) -> u32 {
self.opt_level.unwrap_or_else(|| {
self.getenv_unwrap("OPT_LEVEL").parse().unwrap()
})
}
fn get_debug(&self) -> bool {
self.debug.unwrap_or_else(|| self.getenv_unwrap("PROFILE") == "debug")
}
fn get_out_dir(&self) -> PathBuf {
self.out_dir.clone().unwrap_or_else(|| {
env::var_os("OUT_DIR").map(PathBuf::from).unwrap()
})
}
fn getenv(&self, v: &str) -> Option<String> {
let r = env::var(v).ok();
self.print(&format!("{} = {:?}", v, r));
r
}
fn getenv_unwrap(&self, v: &str) -> String {
match self.getenv(v) {
Some(s) => s,
None => fail(&format!("environment variable `{}` not defined", v)),
}
}
fn print(&self, s: &str) {
if self.cargo_metadata {
println!("{}", s);
}
}
}
impl Tool {
fn new(path: PathBuf) -> Tool {
Tool {
path: path,
args: Vec::new(),
env: Vec::new(),
}
}
/// Converts this compiler into a `Command` that's ready to be run.
///
/// This is useful for when the compiler needs to be executed and the
/// command returned will already have the initial arguments and environment
/// variables configured.
pub fn to_command(&self) -> Command {
let mut cmd = Command::new(&self.path);
cmd.args(&self.args);
for &(ref k, ref v) in self.env.iter() {
cmd.env(k, v);
}
return cmd
}
/// Returns the path for this compiler.
///
/// Note that this may not be a path to a file on the filesystem, e.g. "cc",
/// but rather something which will be resolved when a process is spawned.
pub fn path(&self) -> &Path {
&self.path
}
/// Returns the default set of arguments to the compiler needed to produce
/// executables for the target this compiler generates.
pub fn args(&self) -> &[OsString] {
&self.args
}
/// Returns the set of environment variables needed for this compiler to
/// operate.
///
/// This is typically only used for MSVC compilers currently.
pub fn env(&self) -> &[(OsString, OsString)] {
&self.env
}
}
fn run(cmd: &mut Command, program: &str) {
println!("running: {:?}", cmd);
let status = match cmd.status() {
Ok(status) => status,
Err(ref e) if e.kind() == io::ErrorKind::NotFound => {
let extra = if cfg!(windows) {
" (see https://github.com/alexcrichton/gcc-rs#compile-time-requirements \
for help)"
} else {
""
};
fail(&format!("failed to execute command: {}\nIs `{}` \
not installed?{}", e, program, extra));
}
Err(e) => fail(&format!("failed to execute command: {}", e)),
};
println!("{:?}", status);
if !status.success() {
fail(&format!("command did not execute successfully, got: {}", status));
}
}
fn fail(s: &str) -> ! {
println!("\n\n{}\n\n", s);
panic!()
}<|fim▁end|> | |
<|file_name|>test_db_base.cpp<|end_file_name|><|fim▁begin|>#include "test_db_base.h"
#include <cppcms/json.h>
test_db_base::test_db_base(cppcms::service &srv) : cppcms::application(srv) {
db_connection_str = settings().get<std::string>("app.db_connection_string");
}
<|fim▁hole|>
void test_db_base::clear() {
sql.close();
}<|fim▁end|> | void test_db_base::init() {
sql.open(db_connection_str);
} |
<|file_name|>a3.py<|end_file_name|><|fim▁begin|>def p5a():
xs = [-.75, -0.5,-0.25,0]
fxs = [-.0718125, -.02475, .3349375, 1.101]
getdd123(xs,fxs,3)
def getdd123(xs,fxs,n):
#derivatives
l1stdd = []
l2nddd = []
l3rddd = []
for i in range(0,n):
l1stdd.append((fxs[i+1]-fxs[i])/(xs[i+1]-xs[i]))
for i in range(0,n-1):
l2nddd.append((l1stdd[i+1]-l1stdd[i])/(xs[i+2]-xs[i]))
for i in range(0,n-2):
l3rddd.append((l2nddd[i+1]-l2nddd[i])/(xs[i+3]-xs[i]))
#print [l1stdd,l2nddd,l3rddd]
return [l1stdd,l2nddd,l3rddd]
def p7a():
xs = [-.1, 0,.2,.3]
fxs = [5.3, 2, 3.19, 1]<|fim▁hole|>
def p14():
xs = [0, .25,.5,.75]
fxs = [1, 2, 3.5, 6]
getdd123(xs,fxs,3)<|fim▁end|> | getdd123(xs,fxs,3) |
<|file_name|>toc.service.ts<|end_file_name|><|fim▁begin|>import { Inject, Injectable } from '@angular/core';
import { DOCUMENT, DomSanitizer, SafeHtml } from '@angular/platform-browser';
import { ReplaySubject } from 'rxjs/ReplaySubject';
import { ScrollSpyInfo, ScrollSpyService } from 'app/shared/scroll-spy.service';
export interface TocItem {
content: SafeHtml;
href: string;
isSecondary?: boolean;
level: string;
title: string;
}
@Injectable()
export class TocService {
tocList = new ReplaySubject<TocItem[]>(1);
activeItemIndex = new ReplaySubject<number | null>(1);
private scrollSpyInfo: ScrollSpyInfo | null;
constructor(
@Inject(DOCUMENT) private document: any,
private domSanitizer: DomSanitizer,
private scrollSpyService: ScrollSpyService) { }
genToc(docElement?: Element, docId = '') {
this.resetScrollSpyInfo();
if (!docElement) {
this.tocList.next([]);
return;
}
const headings = this.findTocHeadings(docElement);
const idMap = new Map<string, number>();
const tocList = headings.map(heading => ({
content: this.extractHeadingSafeHtml(heading),
href: `${docId}#${this.getId(heading, idMap)}`,
level: heading.tagName.toLowerCase(),
title: heading.innerText.trim(),
}));
this.tocList.next(tocList);
this.scrollSpyInfo = this.scrollSpyService.spyOn(headings);
this.scrollSpyInfo.active.subscribe(item => this.activeItemIndex.next(item && item.index));
}
reset() {
this.resetScrollSpyInfo();
this.tocList.next([]);
}
// This bad boy exists only to strip off the anchor link attached to a heading
private extractHeadingSafeHtml(heading: HTMLHeadingElement) {
const a = this.document.createElement('a') as HTMLAnchorElement;
a.innerHTML = heading.innerHTML;
const anchorLink = a.querySelector('a');
if (anchorLink) {
a.removeChild(anchorLink);
}
// security: the document element which provides this heading content
// is always authored by the documentation team and is considered to be safe
return this.domSanitizer.bypassSecurityTrustHtml(a.innerHTML.trim());
}
private findTocHeadings(docElement: Element): HTMLHeadingElement[] {
const headings = docElement.querySelectorAll('h2,h3');
const skipNoTocHeadings = (heading: HTMLHeadingElement) => !/(?:no-toc|notoc)/i.test(heading.className);
return Array.prototype.filter.call(headings, skipNoTocHeadings);
}
private resetScrollSpyInfo() {
if (this.scrollSpyInfo) {
this.scrollSpyInfo.unspy();
this.scrollSpyInfo = null;
}
this.activeItemIndex.next(null);
}
// Extract the id from the heading; create one if necessary
// Is it possible for a heading to lack an id?
private getId(h: HTMLHeadingElement, idMap: Map<string, number>) {
let id = h.id;<|fim▁hole|> addToMap(id);
} else {
id = h.innerText.toLowerCase().replace(/\W+/g, '-');
id = addToMap(id);
h.id = id;
}
return id;
// Map guards against duplicate id creation.
function addToMap(key: string) {
const count = idMap[key] = idMap[key] ? idMap[key] + 1 : 1;
return count === 1 ? key : `${key}-${count}`;
}
}
}<|fim▁end|> | if (id) { |
<|file_name|>config.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
'''
.. module:: genomics.config
:synopsis: library configuration
:noindex:
:copyright: Copyright 2014 by Tiago Antao
:license: GNU Affero, see LICENSE for details
.. moduleauthor:: Tiago Antao <[email protected]>
'''
import os
import configparser as cp
config_file = os.path.expanduser('~/.config/pygenomics/main.conf')
# This can be configured before loading of the main module to read another file
class Config(object):
'''Configuration object
:param config_file: The config file to use
The default config file is defined above and can be changed before doing
import genomics
Configuration parameters are separated by section
**Section main**
* **mr_dir** Directory where temporary map_reduce communication is stored
* **grid** Grid type (Local)
**Section grid.local**
The parameters for grid type Local.
Currently limit (see :py:class:`genomics.parallel.executor.Local`)
'''
def __init__(self, config_file=config_file):
self.config_file = config_file
def load_config(self):
config = cp.ConfigParser()
config.read(self.config_file)
try:
self.mr_dir = config.get('main', 'mr_dir')
self.grid = config.get('main', 'grid')
if self.grid == 'Local':
self.grid_limit = config.get('grid.local', 'limit')
if self.grid_limit.find('.') > -1:
self.grid_limit = float(self.grid_limit)
else:<|fim▁hole|> self.mr_dir = '/tmp'
self.grid = 'Local'
self.grid_limit = 1.0<|fim▁end|> | self.grid_limit = int(self.grid_limit)
except cp.NoSectionError: |
<|file_name|>softlayer.py<|end_file_name|><|fim▁begin|>from os import getenv
from time import time, sleep
from core import Platform, Instance
from SoftLayer import Client
from SoftLayer.CCI import CCIManager
from paramiko import SSHClient
class _SuppressPolicy(object):
def missing_host_key(self, client, hostname, key):
pass
class CCIPlatform(Platform):
_required_opts = ['cores', 'memory', 'domain',
'datacenter', 'os_code']
def _on_init(self):
self._client = Client(username=getenv('SL_USERNAME'),
api_key=getenv('SL_API_KEY'))
self._manager = CCIManager(self._client)
def find_instance(self, host_name):
instance = None
host_name = host_name.lower()
for ii in self._manager.list_instances():
fqdn = ii.get('fullyQualifiedDomainName', '')
if fqdn.lower() == host_name:
instance = Instance(id=ii.get('id'), name=fqdn)
break
return instance
def get_instance(self, id):
cci = self._manager.get_instance(id)
return self._cci_to_instance(cci)
def create_instance(self, host_name):
host_bits = host_name.split('.', 1)
host_name = host_bits[0]
domain = host_bits[1] if len(host_bits) >= 2 else self.config('domain')
base_options = {'cpus': self.config('cores'),
'memory': self.config('memory'),
'hostname': host_name,
'domain': domain,
'datacenter': self.config('datacenter'),
'os_code': self.config('os_code')}
print 'creating cci %s/%s' % (host_name, domain)
print base_options
cci = self._manager.create_instance(**base_options)
cci = self._cci_await_ready(cci)
self._cci_install_keys(cci['id'])
return self._cci_to_instance(cci)
def reimage_instance(self, instance):
self._manager.reload_instance(instance.id)
cci = self._manager.get_instance(instance.id)
cci = self._cci_await_transaction_start(cci)
cci = self._cci_await_ready(cci)
self._cci_install_keys(cci['id'])
return self._cci_to_instance(cci)
def delete_instance(self, instance):
self._manager.cancel_instance(instance.id)
self._cci_await_delete(self._manager.get_instance(instance.id))
def instance_ready(self, instance):
cci = self._manager.get_instance(instance.id)
return (cci and 'activeTransaction' not in cci)
def _cci_to_instance(self, cci):
if not cci:
return None
return Instance(id=cci['id'], name=cci['fullyQualifiedDomainName'])
def _cci_await_state(self, cci, state_check, sleep_secs=5):
wait_start = time()
self.log_info('Waiting for %s to change state...' % (cci['id']))
while state_check(cci):
sleep(sleep_secs)
cci = self._manager.get_instance(cci['id'])
self.log_info('...')
self.log_info('Available after %0.3f secs.' % (time() - wait_start))
return cci
def _cci_await_ready(self, cci):
return self._cci_await_state(cci,
lambda c: 'activeTransaction' in c,
sleep_secs=5)
def _cci_await_transaction_start(self, cci):
return self._cci_await_state(cci,
lambda c: 'activeTransaction' not in c,
sleep_secs=2)
def _cci_await_delete(self, cci):
return self._cci_await_state(cci,
lambda c: c and 'id' in c,
sleep_secs=2)
def _get_cci_root_password(self, cci):
passwords = self._manager.get_instance_passwords(cci['id'])
password = None
for p in passwords:
if 'username' in p and p['username'] == 'root':
password = p['password']
break
return password
def _cci_install_keys(self, id):
cci = self._manager.get_instance(id)
password = self._get_cci_root_password(cci)
if not password:
raise Exception('Passwords are not available for instance %s' %
cci['id'])
keys_url = self.config('ssh_key_url')
if not keys_url:
return
client_settings = {'hostname': cci['primaryIpAddress'],
'username': 'root',<|fim▁hole|> client.connect(look_for_keys=False, **client_settings)
client.exec_command('mkdir -p ~/.ssh')
client.exec_command('wget -T 10 -q -O ~/.ssh/authorized_keys %s' %
keys_url)
client.close()<|fim▁end|> | 'password': password}
client = SSHClient()
client.set_missing_host_key_policy(_SuppressPolicy()) |
<|file_name|>DefaultProcedureOperator.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2007-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.ymate.platform.persistence.jdbc.base.impl;
import net.ymate.platform.commons.util.ExpressionUtils;
import net.ymate.platform.core.persistence.base.Type;
import net.ymate.platform.persistence.jdbc.IDatabaseConnectionHolder;
import net.ymate.platform.persistence.jdbc.IDatabaseDataSourceConfig;
import net.ymate.platform.persistence.jdbc.base.*;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.StopWatch;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.sql.CallableStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* 数据库存储过程操作器接口实现
*
* @param <T> 元素类型
* @author 刘镇 ([email protected]) on 16/12/8 上午1:04
*/
public class DefaultProcedureOperator<T> extends AbstractOperator implements IProcedureOperator<T> {
private static final Log LOG = LogFactory.getLog(DefaultProcedureOperator.class);
/**
* 存储过程OUT参数类型集合
*/
private final List<Integer> outParams = new ArrayList<>();
private IOutResultProcessor resultProcessor;
private IResultSetHandler<T> resultSetHandler;
private final List<List<T>> resultSets = new ArrayList<>();
public DefaultProcedureOperator(String sql, IDatabaseConnectionHolder connectionHolder) {
super(sql, connectionHolder);
}
public DefaultProcedureOperator(String sql, IDatabaseConnectionHolder connectionHolder, IAccessorConfig accessorConfig) {
super(sql, connectionHolder, accessorConfig);
}
@Override
public void execute() throws Exception {
if (!this.executed) {
StopWatch time = new StopWatch();
time.start();
try {
doExecute();
// 执行过程未发生异常将标记已执行,避免重复执行
this.executed = true;
} finally {
time.stop();
this.expenseTime = time.getTime();
//
if (LOG.isInfoEnabled()) {
IDatabaseDataSourceConfig dataSourceConfig = this.getConnectionHolder().getDataSourceConfig();
if (dataSourceConfig.isShowSql()) {
String logStr = ExpressionUtils.bind("[${sql}]${param}[${count}][${time}]")
.set("sql", StringUtils.defaultIfBlank(this.sql, "@NULL"))
.set("param", serializeParameters())
.set("count", "N/A")
.set("time", this.expenseTime + "ms").getResult();
if (dataSourceConfig.isStackTraces()) {
StringBuilder stackBuilder = new StringBuilder(logStr);
doAppendStackTraces(dataSourceConfig, stackBuilder);
LOG.info(stackBuilder.toString());
} else {
LOG.info(logStr);
}
}
}
}
}
}
@Override
public IProcedureOperator<T> execute(IResultSetHandler<T> resultSetHandler) throws Exception {
this.resultSetHandler = resultSetHandler;
this.execute();
return this;
}
@Override
public IProcedureOperator<T> execute(IOutResultProcessor resultProcessor) throws Exception {
this.resultProcessor = resultProcessor;
this.execute();
return this;
}
@Override
protected int doExecute() throws Exception {<|fim▁hole|> IAccessor accessor = new BaseAccessor(this.getAccessorConfig());
statement = accessor.getCallableStatement(this.getConnectionHolder().getConnection(), doBuildCallSql());
doSetParameters(statement);
doRegisterOutParams(statement);
if (this.getAccessorConfig() != null) {
eventContext = new AccessorEventContext(statement, Type.OPT.PROCEDURE);
this.getAccessorConfig().beforeStatementExecution(eventContext);
}
boolean flag = statement.execute();
if (flag) {
do {
ResultSet resultSet = statement.getResultSet();
if (resultSet != null) {
resultSets.add(resultSetHandler.handle(resultSet));
resultSet.close();
}
} while (statement.getMoreResults());
} else {
int idx = this.getParameters().size() + 1;
for (Integer paramType : outParams) {
resultProcessor.process(idx, paramType, statement.getObject((idx)));
idx++;
}
}
return -1;
} catch (Exception ex) {
hasEx = true;
throw ex;
} finally {
if (!hasEx && this.getAccessorConfig() != null && eventContext != null) {
this.getAccessorConfig().afterStatementExecution(eventContext);
}
if (statement != null) {
statement.close();
}
}
}
/**
* 构建存储过程CALL语句(根据不同的数据库, 可由子类重新实现)
*
* @return 返回CALL语句
*/
protected String doBuildCallSql() {
List<String> params = new ArrayList<>();
for (int i = 0; i < this.getParameters().size() + this.outParams.size(); i++) {
params.add("?");
}
this.sql = String.format("{CALL %s%s}", this.getSQL(), params.isEmpty() ? "()" : String.format("(%s)", StringUtils.join(params, ',')));
return this.sql;
}
/**
* 注册存储过程输出的参数(从最后一个输入参数后开始, 根据不同的数据库,可由子类重新实现)
*
* @param statement CallableStatement
* @throws SQLException 可能产生的任何异常
*/
protected void doRegisterOutParams(CallableStatement statement) throws SQLException {
int idx = this.getParameters().size() + 1;
for (Integer type : outParams) {
statement.registerOutParameter(idx++, type);
}
}
@Override
@SuppressWarnings("unchecked")
public IProcedureOperator<T> addParameter(SQLParameter parameter) {
return (IProcedureOperator<T>) super.addParameter(parameter);
}
@Override
@SuppressWarnings("unchecked")
public IProcedureOperator<T> addParameter(Object parameter) {
return (IProcedureOperator<T>) super.addParameter(parameter);
}
@Override
public IProcedureOperator<T> addOutParameter(Integer sqlParamType) {
this.outParams.add(sqlParamType);
return this;
}
@Override
public IProcedureOperator<T> setOutResultProcessor(IOutResultProcessor outResultProcessor) {
resultProcessor = outResultProcessor;
return this;
}
@Override
public IProcedureOperator<T> setResultSetHandler(IResultSetHandler<T> resultSetHandler) {
this.resultSetHandler = resultSetHandler;
return this;
}
@Override
public List<List<T>> getResultSets() {
return Collections.unmodifiableList(resultSets);
}
}<|fim▁end|> | CallableStatement statement = null;
AccessorEventContext eventContext = null;
boolean hasEx = false;
try { |
<|file_name|>__manifest__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2014 Davide Corio
# Copyright 2015-2016 Lorenzo Battistini - Agile Business Group
# Copyright 2018 Sergio Zanchetta (Associazione PNLUG - Gruppo Odoo)
# Copyright 2018 Sergio Corato
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl).
{
'name': 'Italian Localization - Fattura elettronica - Emissione',
'version': '10.0.1.5.1',
'category': 'Localization/Italy',
'summary': 'Emissione fatture elettroniche',
'author': 'Davide Corio, Agile Business Group, Innoviu,'
'Odoo Community Association (OCA)',
'website': 'https://github.com/OCA/l10n-italy/tree/10.0/'
'l10n_it_fatturapa_out',
'license': 'LGPL-3',
"depends": [
'l10n_it_fatturapa',
'l10n_it_split_payment',
],
"data": [
'wizard/wizard_export_fatturapa_view.xml',
'views/attachment_view.xml',
'views/account_view.xml',
'security/ir.model.access.csv',
'security/rules.xml',
],
'installable': True,
'external_dependencies': {
'python': [
'unidecode',
'pyxb', # pyxb 1.2.6
],
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>test_app.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Test for: command line arguments
"""
from nose.tools import eq_, assert_raises
from m2bk import app, config, const
import os
def _get_arg_cfg_file_name(arg, filename):
try:
app.init_parsecmdline([arg, filename])
except FileNotFoundError:
pass
return config.get_config_file_name()<|fim▁hole|>
def test_args_config():
# file names
f1 = 'f1.txt'
f2 = 'f2.txt'
f3 = 'f3.txt'
# ---
# Test whether -c works as --config
eq_(_get_arg_cfg_file_name('-c', f1),
_get_arg_cfg_file_name('--config', f1),
msg="-c and --config are not capturing the expected file name")
# ---
# Test -c and --config with more than one value
assert_raises(SystemExit, app.init_parsecmdline, ['-c', f1, f2])
# absolute path is expected for f1
eq_(config.get_config_file_name(), os.path.abspath(f1),
msg="Unexpected file, got '{f}' instead of '{f1}'".format(f=config.get_config_file_name(), f1=os.path.abspath(f1)))
# ---
# test when several config directives are specified
try:
app.init_parsecmdline(['-c', f1, '--config', f2, '-c', f3])
except FileNotFoundError:
pass
# file name should be f3
eq_(config.get_config_file_name(), os.path.abspath(f3),
msg="The last --config/-c argument should be the one whose file name"
"should be captured")
def test_args_noargs():
# Test whether m2bk tries to use default config file
# when no arguments are present
try:
app.init_parsecmdline()
except FileNotFoundError:
pass
eq_(config.get_config_file_name(), config.CONF_DEFAULT_FILE,
msg="CONF_DEFAULT_FILE expected, got '{f}'".format(f=config.get_config_file_name()))<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>extern crate cgmath;
#[macro_use]
extern crate glium;
extern crate glutin;
#[macro_use]
extern crate log;
extern crate rand;
extern crate time;
mod main;<|fim▁hole|>mod scene;
fn main() {
main::main();
}<|fim▁end|> | mod prelude;
mod raytrace; |
<|file_name|>u16.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at<|fim▁hole|>// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The 16-bit unsigned integer type.
//!
//! *[See also the `u16` primitive type](../primitive.u16.html).*
#![stable(feature = "rust1", since = "1.0.0")]
uint_module! { u16, i16, 16 }<|fim▁end|> | // http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or |
<|file_name|>web.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3
import subprocess
from xml.dom import minidom
import imaplib
from pycious.lib.common import singleton
class Mail:
def __init__(self, username, password,\
server='imap.gmail.com', port=993):
"""
It returns -1 if there is no connection otherwise it returns
the number of unread mails.
"""
if not username or not password:
raise Exception('Error: You must specify the username and '+\
'password in your config file of pycious.')
self.username, self.password = username, password
self.server, self.port = server, port
# Define the connection object to None
self.M = None
def __connect(self):<|fim▁hole|>
def __call__(self):
"""
It returns -1 if it's not available the information otherwise
returns the number of unread mail.
"""
try:
if not self.M:
self.__connect()
status, counts = self.M.status("Inbox","(MESSAGES UNSEEN)")
unread = counts[0].split()[4][:-1]
return int(unread)
except:
self.M = None
return -1
class Grss:
def __init__(self, username, password):
"""
It returns -1 if there is no connection otherwise it returns
the number of unread news.
"""
if not username or not password:
raise Exception('Error: You must specify the username and '+\
'password in your config file of pycious.')
self.username, self.password = username, password
def __connect(self):
st, out = subprocess.getstatusoutput('curl -fs '+\
'"https://www.google.com/accounts/ClientLogin?'+\
'service=reader&Email='+self.username+\
'&Passwd='+self.password+'"')
if not out or out=="":
raise Exception()
auth_resp_dict = dict(x.split('=') for x in out.split('\n') if x)
auth_token = auth_resp_dict["Auth"]
auth = 'GoogleLogin auth='+ auth_token
command = 'curl -s -X GET http://www.google.com/reader/api/0/unread-count?all=true --header "Authorization: '+auth+'"'
st, out = subprocess.getstatusoutput(command)
xml_doc = minidom.parseString(str(out))
return xml_doc
def __call__(self):
try:
xml_doc = self.__connect()
list_el = xml_doc.firstChild.getElementsByTagName('list')[0]
if len(list_el.childNodes)==0:
return -1
for obj in list_el.childNodes:
if obj.getElementsByTagName('string')[0].firstChild.data.find('reading-list')!=-1:
for numb in obj.getElementsByTagName('number'):
if numb.attributes['name'].value=='count':
count = int(numb.firstChild.data)
return count
except:
return -1<|fim▁end|> | self.M=imaplib.IMAP4_SSL(self.server , self.port)
#First field is imap login (gmail uses login with
#domain and '@' character), second - password
self.M.login(self.username, self.password) |
<|file_name|>electrum_reorg.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (c) 2019 The Bitcoin Unlimited developers
"""
Tests to check if basic electrum server integration works
"""
import random
from test_framework.util import waitFor, assert_equal
from test_framework.test_framework import BitcoinTestFramework
from test_framework.loginit import logging
from test_framework.electrumutil import compare, bitcoind_electrum_args
class ElectrumReorgTests(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [bitcoind_electrum_args()]
def run_test(self):
n = self.nodes[0]
n.generate(200)
<|fim▁hole|> # waitFor throws on timeout, failing the test
waitFor(10, lambda: compare(n, "index_height", n.getblockcount()))
waitFor(10, lambda: compare(n, "mempool_count", 0, True))
n.sendtoaddress(n.getnewaddress(), 1)
assert_equal(1, len(n.getrawmempool()))
waitFor(10, lambda: compare(n, "mempool_count", 1, True))
blocks = n.generate(50)
waitFor(10, lambda: compare(n, "index_height", n.getblockcount()))
waitFor(10, lambda: compare(n, "mempool_count", 0, True))
logging.info("invalidating %d blocks", len(blocks))
n.invalidateblock(blocks[0])
# electrum server should trim its chain as well and see our
# transaction go back into mempool
waitFor(10, lambda: compare(n, "index_height", n.getblockcount()))
waitFor(10, lambda: compare(n, "mempool_count", 1, True))
n.generate(50)
waitFor(10, lambda: compare(n, "index_height", n.getblockcount()))
waitFor(10, lambda: compare(n, "mempool_count", 0, True))
def setup_network(self, dummy = None):
self.nodes = self.setup_nodes()
if __name__ == '__main__':
ElectrumReorgTests().main()<|fim▁end|> | |
<|file_name|>NotificationPermissionsModule.native.ts<|end_file_name|><|fim▁begin|>import { NativeModulesProxy } from '@unimodules/core';
<|fim▁hole|><|fim▁end|> | import { NotificationPermissionsModule } from './NotificationPermissionsModule.types';
export default (NativeModulesProxy.ExpoNotificationPermissionsModule as any) as NotificationPermissionsModule; |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
import re
import os
import sys
def get_version(package):
"""
Return package version as listed in `__version__` in `init.py`.
"""
init_py = open(os.path.join(package, '__init__.py')).read()
return re.match("__version__ = ['\"]([^'\"]+)['\"]", init_py).group(1)
def get_packages(package):
"""
Return root package and all sub-packages.
"""
return [dirpath
for dirpath, dirnames, filenames in os.walk(package)
if os.path.exists(os.path.join(dirpath, '__init__.py'))]
def get_package_data(package):
"""
Return all files under the root package, that are not in a
package themselves.
"""
walk = [(dirpath.replace(package + os.sep, '', 1), filenames)
for dirpath, dirnames, filenames in os.walk(package)
if not os.path.exists(os.path.join(dirpath, '__init__.py'))]
filepaths = []
for base, filenames in walk:
filepaths.extend([os.path.join(base, filename)
for filename in filenames])
return {package: filepaths}
version = get_version('blog')
<|fim▁hole|> print(" git push --tags")
sys.exit()
setup(
name='django-blog-sl',
version=version,
url='http://github.com/simonluijk/django-blog',
license='BSD',
description='Yet another django blog.',
author='Simon Luijk',
author_email='[email protected]', # SEE NOTE BELOW (*)
packages=get_packages('blog'),
package_data=get_package_data('blog'),
test_suite='blog.runtests.runtests.main',
install_requires=['django-mptt', 'django-markdown-deux'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP',
]
)
# (*) Please direct queries to the discussion group, rather than to me directly
# Doing so helps ensure your question is helpful to other users.
# Queries directly to my email are likely to receive a canned response.
#
# Many thanks for your understanding.<|fim▁end|> | if sys.argv[-1] == 'publish':
os.system("python setup.py sdist upload")
print("You probably want to also tag the version now:")
print(" git tag -a %s -m 'version %s'" % (version, version)) |
<|file_name|>mail.py<|end_file_name|><|fim▁begin|>"""
Tools for sending email.
"""
import mimetypes
import os
import smtplib
import socket
import time
import random
from email import Charset, Encoders
from email.MIMEText import MIMEText
from email.MIMEMultipart import MIMEMultipart
from email.MIMEBase import MIMEBase
from email.Header import Header
from email.Utils import formatdate, parseaddr, formataddr
from django.conf import settings
from django.utils.encoding import smart_str, force_unicode
# Don't BASE64-encode UTF-8 messages so that we avoid unwanted attention from
# some spam filters.
Charset.add_charset('utf-8', Charset.SHORTEST, Charset.QP, 'utf-8')
# Default MIME type to use on attachments (if it is not explicitly given
# and cannot be guessed).
DEFAULT_ATTACHMENT_MIME_TYPE = 'application/octet-stream'
# Cache the hostname, but do it lazily: socket.getfqdn() can take a couple of
# seconds, which slows down the restart of the server.
class CachedDnsName(object):
def __str__(self):
return self.get_fqdn()
def get_fqdn(self):
if not hasattr(self, '_fqdn'):
self._fqdn = socket.getfqdn()
return self._fqdn
DNS_NAME = CachedDnsName()
# Copied from Python standard library, with the following modifications:
# * Used cached hostname for performance.
# * Added try/except to support lack of getpid() in Jython (#5496).
def make_msgid(idstring=None):
"""Returns a string suitable for RFC 2822 compliant Message-ID, e.g:
<[email protected]>
Optional idstring if given is a string used to strengthen the
uniqueness of the message id.
"""
timeval = time.time()
utcdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(timeval))
try:
pid = os.getpid()
except AttributeError:
# No getpid() in Jython, for example.
pid = 1
randint = random.randrange(100000)
if idstring is None:
idstring = ''
else:
idstring = '.' + idstring
idhost = DNS_NAME
msgid = '<%s.%s.%s%s@%s>' % (utcdate, pid, randint, idstring, idhost)
return msgid
class BadHeaderError(ValueError):
pass
def forbid_multi_line_headers(name, val):
"""Forbids multi-line headers, to prevent header injection."""
val = force_unicode(val)
if '\n' in val or '\r' in val:
raise BadHeaderError("Header values can't contain newlines (got %r for header %r)" % (val, name))
try:
val = val.encode('ascii')
except UnicodeEncodeError:<|fim▁hole|> if name.lower() in ('to', 'from', 'cc'):
result = []
for item in val.split(', '):
nm, addr = parseaddr(item)
nm = str(Header(nm, settings.DEFAULT_CHARSET))
result.append(formataddr((nm, str(addr))))
val = ', '.join(result)
else:
val = Header(val, settings.DEFAULT_CHARSET)
else:
if name.lower() == 'subject':
val = Header(val)
return name, val
class SafeMIMEText(MIMEText):
def __setitem__(self, name, val):
name, val = forbid_multi_line_headers(name, val)
MIMEText.__setitem__(self, name, val)
class SafeMIMEMultipart(MIMEMultipart):
def __setitem__(self, name, val):
name, val = forbid_multi_line_headers(name, val)
MIMEMultipart.__setitem__(self, name, val)
class SMTPConnection(object):
"""
A wrapper that manages the SMTP network connection.
"""
def __init__(self, host=None, port=None, username=None, password=None,
use_tls=None, fail_silently=False):
self.host = host or settings.EMAIL_HOST
self.port = port or settings.EMAIL_PORT
self.username = username or settings.EMAIL_HOST_USER
self.password = password or settings.EMAIL_HOST_PASSWORD
self.use_tls = (use_tls is not None) and use_tls or settings.EMAIL_USE_TLS
self.fail_silently = fail_silently
self.connection = None
def open(self):
"""
Ensures we have a connection to the email server. Returns whether or
not a new connection was required (True or False).
"""
if self.connection:
# Nothing to do if the connection is already open.
return False
try:
# If local_hostname is not specified, socket.getfqdn() gets used.
# For performance, we use the cached FQDN for local_hostname.
self.connection = smtplib.SMTP(self.host, self.port,
local_hostname=DNS_NAME.get_fqdn())
if self.use_tls:
self.connection.ehlo()
self.connection.starttls()
self.connection.ehlo()
if self.username and self.password:
self.connection.login(self.username, self.password)
return True
except:
if not self.fail_silently:
raise
def close(self):
"""Closes the connection to the email server."""
try:
try:
self.connection.quit()
except socket.sslerror:
# This happens when calling quit() on a TLS connection
# sometimes.
self.connection.close()
except:
if self.fail_silently:
return
raise
finally:
self.connection = None
def send_messages(self, email_messages):
"""
Sends one or more EmailMessage objects and returns the number of email
messages sent.
"""
if not email_messages:
return
new_conn_created = self.open()
if not self.connection:
# We failed silently on open(). Trying to send would be pointless.
return
num_sent = 0
for message in email_messages:
sent = self._send(message)
if sent:
num_sent += 1
if new_conn_created:
self.close()
return num_sent
def _send(self, email_message):
"""A helper method that does the actual sending."""
if not email_message.recipients():
return False
try:
self.connection.sendmail(email_message.from_email,
email_message.recipients(),
email_message.message().as_string())
except:
if not self.fail_silently:
raise
return False
return True
class EmailMessage(object):
"""
A container for email information.
"""
content_subtype = 'plain'
multipart_subtype = 'mixed'
encoding = None # None => use settings default
def __init__(self, subject='', body='', from_email=None, to=None, bcc=None,
connection=None, attachments=None, headers=None):
"""
Initialize a single email message (which can be sent to multiple
recipients).
All strings used to create the message can be unicode strings (or UTF-8
bytestrings). The SafeMIMEText class will handle any necessary encoding
conversions.
"""
if to:
assert not isinstance(to, basestring), '"to" argument must be a list or tuple'
self.to = list(to)
else:
self.to = []
if bcc:
assert not isinstance(bcc, basestring), '"bcc" argument must be a list or tuple'
self.bcc = list(bcc)
else:
self.bcc = []
self.from_email = from_email or settings.DEFAULT_FROM_EMAIL
self.subject = subject
self.body = body
self.attachments = attachments or []
self.extra_headers = headers or {}
self.connection = connection
def get_connection(self, fail_silently=False):
if not self.connection:
self.connection = SMTPConnection(fail_silently=fail_silently)
return self.connection
def message(self):
encoding = self.encoding or settings.DEFAULT_CHARSET
msg = SafeMIMEText(smart_str(self.body, settings.DEFAULT_CHARSET),
self.content_subtype, encoding)
if self.attachments:
body_msg = msg
msg = SafeMIMEMultipart(_subtype=self.multipart_subtype)
if self.body:
msg.attach(body_msg)
for attachment in self.attachments:
if isinstance(attachment, MIMEBase):
msg.attach(attachment)
else:
msg.attach(self._create_attachment(*attachment))
msg['Subject'] = self.subject
msg['From'] = self.from_email
msg['To'] = ', '.join(self.to)
# Email header names are case-insensitive (RFC 2045), so we have to
# accommodate that when doing comparisons.
header_names = [key.lower() for key in self.extra_headers]
if 'date' not in header_names:
msg['Date'] = formatdate()
if 'message-id' not in header_names:
msg['Message-ID'] = make_msgid()
for name, value in self.extra_headers.items():
msg[name] = value
return msg
def recipients(self):
"""
Returns a list of all recipients of the email (includes direct
addressees as well as Bcc entries).
"""
return self.to + self.bcc
def send(self, fail_silently=False):
"""Sends the email message."""
return self.get_connection(fail_silently).send_messages([self])
def attach(self, filename=None, content=None, mimetype=None):
"""
Attaches a file with the given filename and content. The filename can
be omitted (useful for multipart/alternative messages) and the mimetype
is guessed, if not provided.
If the first parameter is a MIMEBase subclass it is inserted directly
into the resulting message attachments.
"""
if isinstance(filename, MIMEBase):
assert content == mimetype == None
self.attachments.append(filename)
else:
assert content is not None
self.attachments.append((filename, content, mimetype))
def attach_file(self, path, mimetype=None):
"""Attaches a file from the filesystem."""
filename = os.path.basename(path)
content = open(path, 'rb').read()
self.attach(filename, content, mimetype)
def _create_attachment(self, filename, content, mimetype=None):
"""
Converts the filename, content, mimetype triple into a MIME attachment
object.
"""
if mimetype is None:
mimetype, _ = mimetypes.guess_type(filename)
if mimetype is None:
mimetype = DEFAULT_ATTACHMENT_MIME_TYPE
basetype, subtype = mimetype.split('/', 1)
if basetype == 'text':
attachment = SafeMIMEText(smart_str(content,
settings.DEFAULT_CHARSET), subtype, settings.DEFAULT_CHARSET)
else:
# Encode non-text attachments with base64.
attachment = MIMEBase(basetype, subtype)
attachment.set_payload(content)
Encoders.encode_base64(attachment)
if filename:
attachment.add_header('Content-Disposition', 'attachment',
filename=filename)
return attachment
class EmailMultiAlternatives(EmailMessage):
"""
A version of EmailMessage that makes it easy to send multipart/alternative
messages. For example, including text and HTML versions of the text is
made easier.
"""
multipart_subtype = 'alternative'
def attach_alternative(self, content, mimetype=None):
"""Attach an alternative content representation."""
self.attach(content=content, mimetype=mimetype)
def send_mail(subject, message, from_email, recipient_list,
fail_silently=False, auth_user=None, auth_password=None):
"""
Easy wrapper for sending a single message to a recipient list. All members
of the recipient list will see the other recipients in the 'To' field.
If auth_user is None, the EMAIL_HOST_USER setting is used.
If auth_password is None, the EMAIL_HOST_PASSWORD setting is used.
Note: The API for this method is frozen. New code wanting to extend the
functionality should use the EmailMessage class directly.
"""
connection = SMTPConnection(username=auth_user, password=auth_password,
fail_silently=fail_silently)
return EmailMessage(subject, message, from_email, recipient_list,
connection=connection).send()
def send_mass_mail(datatuple, fail_silently=False, auth_user=None,
auth_password=None):
"""
Given a datatuple of (subject, message, from_email, recipient_list), sends
each message to each recipient list. Returns the number of e-mails sent.
If from_email is None, the DEFAULT_FROM_EMAIL setting is used.
If auth_user and auth_password are set, they're used to log in.
If auth_user is None, the EMAIL_HOST_USER setting is used.
If auth_password is None, the EMAIL_HOST_PASSWORD setting is used.
Note: The API for this method is frozen. New code wanting to extend the
functionality should use the EmailMessage class directly.
"""
connection = SMTPConnection(username=auth_user, password=auth_password,
fail_silently=fail_silently)
messages = [EmailMessage(subject, message, sender, recipient)
for subject, message, sender, recipient in datatuple]
return connection.send_messages(messages)
def mail_admins(subject, message, fail_silently=False):
"""Sends a message to the admins, as defined by the ADMINS setting."""
EmailMessage(settings.EMAIL_SUBJECT_PREFIX + subject, message,
settings.SERVER_EMAIL, [a[1] for a in settings.ADMINS]
).send(fail_silently=fail_silently)
def mail_managers(subject, message, fail_silently=False):
"""Sends a message to the managers, as defined by the MANAGERS setting."""
EmailMessage(settings.EMAIL_SUBJECT_PREFIX + subject, message,
settings.SERVER_EMAIL, [a[1] for a in settings.MANAGERS]
).send(fail_silently=fail_silently)<|fim▁end|> | |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>angular.module('MEANcraftApp', ['ngRoute', 'MEANcraftApp.login', 'MEANcraftApp.overview', 'btford.socket-io'/*,'socket-io', 'flow'*/])
.config(function ($httpProvider, $routeProvider) {<|fim▁hole|> $httpProvider.interceptors.push('TokenInterceptor');
$routeProvider
.when('/login', {
templateUrl: 'app/login/login',
controller: 'loginCtrl',
protect: false
})
.when('/overview', {
templateUrl: 'app/overview/overview',
//controller: 'overviewCtrl',
protect: true,
resolve: {
initialData: function (ServerSocket, FetchData, $q) {
return $q(function (resolve, reject) {
ServerSocket.emit('info');
ServerSocket.once('info', function (data) {
console.log(data);
FetchData = angular.extend(FetchData, data);
resolve();
});
});
}
}
})
.otherwise({
redirectTo: '/overview'
});
})
.run(function ($rootScope, $location, $window, $routeParams, UserAuth) {
if (!UserAuth.isLogged) {
$location.path('/login');
}
$rootScope.$on('$routeChangeStart', function (event, nextRoute, prevRoute) {
console.groupCollapsed('%cAPP.RUN -> ROUTE CHANGE START', 'background: #222; color: #bada55;');
console.log('%cTOKEN -> %c' + $window.sessionStorage.token, 'font-weight: bold', '');
console.log('%cLOGGIN STATUS -> %c' + UserAuth.isLogged, 'font-weight: bold', UserAuth.isLogged ? 'color: green;' : 'color: red;');
console.groupEnd('APP.RUN -> ROUTE CHANGE START');
if (nextRoute.protect && UserAuth.isLogged === false && !$window.sessionStorage.token) {
$location.path('/login');
console.error('Route protected, user not logged in');
} else if (!nextRoute.protect && UserAuth.isLogged) {
$location.path('/overview');
}
});
});<|fim▁end|> | |
<|file_name|>install-app-device-test.js<|end_file_name|><|fim▁begin|>const td = require('testdouble');
const expect = require('../../../../helpers/expect');
const RSVP = require('rsvp');
const Promise = RSVP.Promise;
const adbPath = 'adbPath';
const deviceUUID = 'uuid';
const apkPath = 'apk-path';
const spawnArgs = [adbPath, ['-s', deviceUUID, 'install', '-r', apkPath]];
describe('Android Install App - Device', () => {
let installAppDevice;
let spawn;<|fim▁hole|>
beforeEach(() => {
let sdkPaths = td.replace('../../../../../lib/targets/android/utils/sdk-paths');
td.when(sdkPaths()).thenReturn({ adb: adbPath });
spawn = td.replace('../../../../../lib/utils/spawn');
td.when(spawn(...spawnArgs)).thenReturn(Promise.resolve({ code: 0 }));
installAppDevice = require('../../../../../lib/targets/android/tasks/install-app-device');
});
afterEach(() => {
td.reset();
});
it('calls spawn with correct arguments', () => {
td.config({ ignoreWarnings: true });
td.when(spawn(), { ignoreExtraArgs: true })
.thenReturn(Promise.resolve({ code: 0 }));
return installAppDevice(deviceUUID, apkPath).then(() => {
td.verify(spawn(...spawnArgs));
td.config({ ignoreWarnings: false });
});
});
it('resolves with object containing exit code from spawned process', () => {
return expect(installAppDevice(deviceUUID, apkPath))
.to.eventually.contain({ code: 0 });
});
it('bubbles up error message when spawn rejects', () => {
td.when(spawn(...spawnArgs)).thenReturn(Promise.reject('spawn error'));
return expect(installAppDevice(deviceUUID, apkPath))
.to.eventually.be.rejectedWith('spawn error');
});
});<|fim▁end|> | |
<|file_name|>limitprocessor_identifier_hint_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
"""
Test for Hintidentifier
"""
import datetime
import unittest
from base_test import PschedTestBase
from pscheduler.limitprocessor.identifier.hint import *
DATA = {
"hint": "value",
"match": {
"style": "exact",
"match": "testing",
"case-insensitive": False
}
}
HINTS_HIT = {
"value": "testing"
}
HINTS_MISS = {
"value": "not-testing"
}
class TestLimitprocessorIdentifierAlways(PschedTestBase):
"""
Test the Identifier
"""
def test_data_is_valid(self):
"""Limit Processor / Identifier Hint / Data Validation"""
self.assertEqual(data_is_valid(DATA), (True, "OK"))
self.assertEqual(data_is_valid({}), (False, "At /: 'hint' is a required property"))<|fim▁hole|>
def test_identifier(self):
"""Limit Processor / Identifier Hint / Identifier"""
ident = IdentifierHint(DATA)
self.assertEqual(ident.evaluate(HINTS_HIT), True)
self.assertEqual(ident.evaluate(HINTS_MISS), False)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | self.assertRaises(ValueError, data_is_valid, 123)
|
<|file_name|>RemoveSubscriptionInfoTest.java<|end_file_name|><|fim▁begin|>/**
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.openwire.v4;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import org.apache.activemq.openwire.*;
import org.apache.activemq.command.*;
/**
* Test case for the OpenWire marshalling for RemoveSubscriptionInfo
*
*
* NOTE!: This file is auto generated - do not modify!<|fim▁hole|> *
*/
public class RemoveSubscriptionInfoTest extends BaseCommandTestSupport {
public static RemoveSubscriptionInfoTest SINGLETON = new RemoveSubscriptionInfoTest();
public Object createObject() throws Exception {
RemoveSubscriptionInfo info = new RemoveSubscriptionInfo();
populateObject(info);
return info;
}
protected void populateObject(Object object) throws Exception {
super.populateObject(object);
RemoveSubscriptionInfo info = (RemoveSubscriptionInfo) object;
info.setConnectionId(createConnectionId("ConnectionId:1"));
info.setSubscriptionName("SubcriptionName:2");
info.setClientId("ClientId:3");
}
}<|fim▁end|> | * if you need to make a change, please see the modify the groovy scripts in the
* under src/gram/script and then use maven openwire:generate to regenerate
* this file.
* |
<|file_name|>convert.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2013, Eduard Broecker
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that
# the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this list of conditions and the
# following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
# DAMAGE.
from __future__ import absolute_import, division, print_function
import logging
import sys
import click
import canmatrix.convert
import canmatrix.log
logger = logging.getLogger(__name__)
def get_formats():
input = ""
output = ""
for suppFormat, features in canmatrix.formats.supportedFormats.items():
if 'load' in features:
input += suppFormat + "\n"
if 'dump' in features:
output += suppFormat + "\n"
return (input, output)
@click.command()
# global switches
@click.option('-v', '--verbose', 'verbosity', count=True, default=1)
@click.option('-s', '--silent/--no-silent', is_flag=True, default=False, help="don't print status messages to stdout. (only errors)")
@click.option('-f', '--force_output', help="enforce output format, ignoring output file extension (e.g., -f csv).\nSupported formats for writing:\n" + get_formats()[1])
@click.option('-i', '--input_format', 'import_type', help="give hint for input format\nSupported formats for reading:\n" + get_formats()[0])
@click.option('--ignoreEncodingErrors/--no-ignoreEncodingErrors', 'ignoreEncodingErrors', default=False, help="ignore character encoding errors during export (dbc,dbf,sym)")
# manipulation and filter switches
@click.option('--deleteObsoleteDefines/--no-deleteObsoleteDefines', 'deleteObsoleteDefines', default=False, help="delete defines from all ECUs, frames and Signals\nExample --deleteObsoleteDefines")
@click.option('--deleteEcu', 'deleteEcu', help="delete Ecu form databases. (comma separated list)\nSyntax: --deleteEcu=myEcu,mySecondEcu")
@click.option('--renameEcu', 'renameEcu', help="rename Ecu form databases. (comma separated list)\nSyntax: --renameEcu=myOldEcu:myNewEcu,mySecondEcu:mySecondNewEcu")
@click.option('--deleteSignal', 'deleteSignal', help="delete Signal form databases. (comma separated list)\nSyntax: --deleteSignal=mySignal1,mySecondSignal")
@click.option('--renameSignal', 'renameSignal', help="rename Signal form databases. (comma separated list)\nSyntax: --renameSignal=myOldSignal:myNewSignal,mySecondSignal:mySecondNewSignal")
@click.option('--deleteZeroSignals/--no-deleteZeroSignals', 'deleteZeroSignals', default=False, help="delete zero length signals (signals with 0 bit length) from matrix\ndefault False")
@click.option('--deleteSignalAttributes', 'deleteSignalAttributes', help="delete attributes from all signals\nExample --deleteSignalAttributes GenMsgSomeVar,CycleTime")
@click.option('--deleteFrame', 'deleteFrame', help="delete Frame form databases. (comma separated list)\nSyntax: --deleteFrame=myFrame1,mySecondFrame")
@click.option('--renameFrame', 'renameFrame', help="increment each frame.id in database by increment\nSyntax: --frameIdIncrement=increment")
@click.option('--addFrameReceiver', 'addFrameReceiver', help="add receiver Ecu to frame(s) (comma separated list)\nSyntax: --addFrameReceiver=framename:myNewEcu,mySecondEcu:myNEWEcu")
@click.option('--changeFrameId', 'changeFrameId', help="change frame.id in database\nSyntax: --changeFrameId=oldId:newId")
@click.option('--setFrameFd', 'setFrameFd', help="set Frame from database to canfd. (comma separated list)\nSyntax: --setFrameFd=myFrame1,mySecondFrame")
@click.option('--unsetFrameFd', 'unsetFrameFd', help="set Frame from database to normal (not FD). (comma separated list)\nSyntax: --unsetFrameFd=myFrame1,mySecondFrame")
@click.option('--recalcDLC', 'recalcDLC', help="recalculate dlc; max: use maximum of stored and calculated dlc; force: force new calculated dlc")
@click.option('--skipLongDlc', 'skipLongDlc', help="skip all Frames with dlc bigger than given threshold")
@click.option('--cutLongFrames', 'cutLongFrames', help="cut all signals out of Frames with dlc bigger than given threshold")
@click.option('--deleteFrameAttributes', 'deleteFrameAttributes', help="delete attributes from all frames\nExample --deleteFrameAttributes GenMsgSomeVar,CycleTime")
@click.option('--ecus', help="Copy only given ECUs (comma separated list) to target matrix; suffix 'rx' or 'tx' for selection: Example: --ecus FirstEcu:rx,SecondEcu:tx,ThirdEcu")
@click.option('--frames', help="Copy only given Frames (comma separated list) to target matrix")
@click.option('--signals', help="Copy only given Signals (comma separated list) to target matrix just as 'free' signals without containing frame")
@click.option('--merge', help="merge additional can databases.\nSyntax: --merge filename[:ecu=SOMEECU][:frame=FRAME1][:frame=FRAME2],filename2")
# arxml switches
@click.option('--arxmlIgnoreClusterInfo/--no-arxmlIgnoreClusterInfo', 'arxmlIgnoreClusterInfo', default=False, help="Ignore any can cluster info from arxml; Import all frames in one matrix\ndefault False")
@click.option('--arxmlUseXpath(--no-arxmlUseXpath', 'arxmlUseXpath', default=False, help="Use experimental Xpath-Implementation for resolving AR-Paths; \ndefault False")
@click.option('--arxmlExportVersion', 'arVersion', default="3.2.3", help="Set output AUTOSAR version\ncurrently only 3.2.3 and 4.1.0 are supported\ndefault 3.2.3")
# dbc switches
@click.option('--dbcImportEncoding', 'dbcImportEncoding', default="iso-8859-1", help="Import charset of dbc (relevant for units), maybe utf-8\ndefault iso-8859-1")
@click.option('--dbcImportCommentEncoding', 'dbcImportCommentEncoding', default="iso-8859-1", help="Import charset of Comments in dbc\ndefault iso-8859-1")
@click.option('--dbcExportEncoding', 'dbcExportEncoding', default="iso-8859-1", help="Export charset of dbc (relevant for units), maybe utf-8\ndefault iso-8859-1")
@click.option('--dbcExportCommentEncoding', 'dbcExportCommentEncoding', default="iso-8859-1", help="Export charset of comments in dbc\ndefault iso-8859-1")
@click.option('--dbcUniqueSignalNames/--no-dbcUniqueSignalNames', 'dbcUniqueSignalNames', default=True, help="Check if signal names are unique per frame")
# dbf switches
@click.option('--dbfImportEncoding', 'dbfImportEncoding', default="iso-8859-1", help="Import charset of dbf, maybe utf-8\ndefault iso-8859-1")
@click.option('--dbfExportEncoding', 'dbfExportEncoding', default="iso-8859-1", help="Export charset of dbf, maybe utf-8\ndefault iso-8859-1")
# sym switches
@click.option('--symImportEncoding', 'symImportEncoding', default="iso-8859-1", help="Import charset of sym format, maybe utf-8\ndefault iso-8859-1")
@click.option('--symExportEncoding', 'symExportEncoding', default="iso-8859-1", help="Export charset of sym format, maybe utf-8\ndefault iso-8859-1")
# xls/csv switches
@click.option('--xlsMotorolaBitFormat', 'xlsMotorolaBitFormat', default="msbreverse", help="Excel format for startbit of motorola codescharset signals\nValid values: msb, lsb, msbreverse\n default msbreverse")
@click.option('--additionalFrameAttributes', 'additionalFrameAttributes', default = "", help="append columns to csv/xls(x), example: is_fd")
@click.option('--additionalSignalAttributes', 'additionalSignalAttributes', default = "", help="append columns to csv/xls(x), example: is_signed,attributes[\"GenSigStartValue\"]")
@click.option('--xlsValuesInSeperateLines/--no-xlsValuesInSeperateLines', 'xlsValuesInSeperateLines', default = False, help="Excel format: create seperate line for each value of signal value table\tdefault: False")
# json switches
@click.option('--jsonExportCanard/--no-jsonExportCanard', 'jsonExportCanard', default=False, help="Export Canard compatible json format")
@click.option('--jsonExportAll/--no-jsonExportAll', 'jsonExportAll', default=False, help="Export more data to json format")
@click.option('--jsonMotorolaBitFormat', 'jsonMotorolaBitFormat', default="lsb", help="Json format: startbit of motorola signals\nValid values: msb, lsb, msbreverse\n default lsb")
@click.option('--jsonNativeTypes/--no-jsonNativeTypes', 'jsonNativeTypes', default=False, help="Uses native json representation for decimals instead of string.")
#sym switches
@click.option('--symExportEncoding', 'symExportEncoding', default="iso-8859-1", help="Export charset of sym format, maybe utf-8\ndefault iso-8859-1")
# in and out file
@click.argument('infile', required=True)
@click.argument('outfile', required=True)
#
def cli_convert(infile, outfile, silent, verbosity, **options):
"""
canmatrix.cli.convert [options] import-file export-file
import-file: *.dbc|*.dbf|*.kcd|*.arxml|*.json|*.xls(x)|*.sym
export-file: *.dbc|*.dbf|*.kcd|*.arxml|*.json|*.xls(x)|*.sym|*.py
\n"""<|fim▁hole|>
root_logger = canmatrix.log.setup_logger()
if silent is True:
# only print error messages, ignore verbosity flag
verbosity = -1
options["silent"] = True
canmatrix.log.set_log_level(root_logger, verbosity)
if options["ignoreEncodingErrors"]:
options["ignoreEncodingErrors"] = "ignore"
else:
options["ignoreEncodingErrors"] = ""
canmatrix.convert.convert(infile, outfile, **options)
return 0
if __name__ == '__main__':
sys.exit(cli_convert())<|fim▁end|> | |
<|file_name|>node.py<|end_file_name|><|fim▁begin|>from props.graph_representation.word import Word,NO_INDEX, strip_punctuations
import cgi
from copy import deepcopy
from props.dependency_tree.definitions import time_prep, definite_label,\
adjectival_mod_dependencies
COPULA = "SAME AS" # the textual value of a copula node
PROP = "PROP" # the textual value of a property node
RCMOD_PROP = "PROP" # the textual value of a property for rcmod node
POSSESSIVE = "POSSESS" # the textual value of a possessive node
APPOSITION = "appos" # the textual value of an appositio n node
PREP = "PREP" # the textual value of a preposition node
PREP_TYPE = "TYPE" # the textual value of a preposition node's type
COND = "COND" # the textual value of a conditional node
<|fim▁hole|>CONJUNCTION = "CONJ -" # the textual value of a conjunction node
ADVERB = "ADV" # the textual value of a conjunction node
EXISTENSIAL = "EXISTS" # the textual value of a conjunction node
COND_TYPE= PREP_TYPE # the textual value of a conditional node's type
## Node shapes
RECT_NODE_SHAPE = "rect"
DEFAULT_NODE_SHAPE = "ellipse"
PRINT_FEATURES = [("Tense",lambda t:t),
("Determiner",lambda t:"det: "+t["Value"]),
("Time Value",lambda t:"date: "+t),
("Negation", lambda t:"negated"),
("Passive Voice", lambda t:"passive"),
("Modal",lambda t:"modal: "+ " ".join(t["Value"])),
("Definite",lambda t:t),
("Modifier",lambda t:"modifer: "+t)]
global nodeCounter
nodeCounter =0
class Node:
"""
node class
represents a single node in the representation graph.
@type isPredicate: bool
@var isPredicate: denotes if this node is a predicate
@type text: list of Word object
@var text: the text contained within this node
@type features: dict
@var features: syntactic features of this node (e.g., definiteness)
@type propagateTo: list
@var propagateTo: list of Node objects onto which the properties of this node should propogate to
@type span: list
@var span: list of indices in the original sentence which this node spans
@todo think if this is needed, or consumed by Word
@type valid: bool
@var valid: debug variable, indicates if this node should be converted
@type uid: int
@var uid: unique id for this node, to be able to distinguish nodes with identical features
"""
def __init__(self,isPredicate,text,features,valid,orderText = True):
"""
initialize a node object
@type orderText: boolean
@param orderText: defines if text elements should be sorted by indices upon printing in the __str__ function
"""
self.isPredicate = isPredicate
self.text = text
self.features = features
self.valid = valid
global nodeCounter
self.uid = nodeCounter
nodeCounter +=1
self.propagateTo = []
self.orderText = orderText
self.nodeShape = DEFAULT_NODE_SHAPE
self.__str__() # calculate variables in str
def get_text(self,gr):
return self.text
def copy(self):
"""
'copy constructor'
"""
# get proper type and new uid
ret = self.__class__(isPredicate = self.isPredicate,
text = self.text,
features = self.features,
valid = self.valid)
# copy propagations
for curNode in self.propagateTo:
addSymmetricPropogation(ret, curNode)
return ret
def addPropogation(self,node):
"""
Add a node onto which this node's properties should propogate.
@type node: Node
@param node: The node onto which to propogate
"""
if node not in self.propagateTo:
self.propagateTo.append(node)
def minIndex(self):
"""
Minimum index covered by this node
@rtype: int
"""
if not self.text:
return NO_INDEX # TODO: why is this happenning?
return min([w.index for w in self.text])
def maxIndex(self):
"""
Minimum index covered by this node
@rtype: int
"""
if not self.text:
return NO_INDEX # TODO: why is this happenning?
return max([w.index for w in self.text])
def __str__(self):
ret = '<TABLE BORDER="0" CELLSPACING="0"><TR><TD>'
filtered_spans = []
for feat,_ in PRINT_FEATURES:
if (feat in self.features) and (isinstance(self.features[feat], dict)) and ("Span" in self.features[feat]):
filtered_spans.extend(self.features[feat]["Span"])
if 'Lemma' in self.features and len(self.text)==1:
self.str = [Word(index = self.text[0].index,word=self.features['Lemma'])]
else:
ls = self.text
if self.orderText:
ls = sorted(self.text,key=lambda word:word.index)
# self.str stores the words as displayed in the node
self.str = [w for w in ls if w.index not in filtered_spans]
self.str = strip_punctuations(self.str)
ret+= " ".join([str(x) for x in self.str])
ret+="</TD></TR>"
for feat, printFunc in PRINT_FEATURES:
if feat in self.features:
if self.isPredicate and feat =="Definite":
continue
ret += "<TR><TD>"
ret+= '<FONT POINT-SIZE="10">{0}</FONT>'.format(cgi.escape(str(printFunc(self.features[feat]))))
ret+="</TD></TR>"
ret +="</TABLE>"
return ret
def __hash__(self):
return self.__str__().__hash__()
class CopularNode(Node):
"""
A class representing a copular head "BE" node.
"""
@classmethod
def init(cls,index,features,valid):
if "Lemma" in features:
del(features["Lemma"])
return cls(isPredicate=True,
text=[Word(index,COPULA)],
features=features,
valid=valid)
class PossessiveNode(Node):
"""
A class representing a copular head "HAS" node.
"""
@classmethod
def init(cls,index,features,valid):
return cls(isPredicate=True,
text=[Word(index,POSSESSIVE)],
features=features,
valid=valid)
class PropNode(Node):
"""
A class representing a prop head node
"""
@classmethod
def init(cls,features,valid,index,parent_relation):
if "Lemma" in features:
del(features["Lemma"])
ret = cls(isPredicate=True,
text=[Word(index,PROP)],
features=features,
valid=valid)
ret.parent_relation = parent_relation
return ret
def copy(self):
ret = Node.copy(self)
ret.parent_relation = self.parent_relation
if hasattr(self, 'str'):
ret.str = self.str
return ret
def is_relative(self):
if "relative" not in self.features:
return False
return self.features["relative"]
def is_prenominal(self):
# TODO: this should be a property of the edge and not the node
return (self.parent_relation == "amod")
def get_text(self,gr):
return []
class RCMODPropNode(Node):
"""
A class representing a prop head for rcmod node
"""
@classmethod
def init(cls,features,valid):
return cls(isPredicate=True,
text=[Word(NO_INDEX,RCMOD_PROP)],
features=features,
valid=valid)
def is_prenominal(self):
return False
class TimeNode(Node):
"""
A class representing a time head node
"""
@classmethod
def init(cls,features):
return cls(isPredicate=False,
text=[Word(NO_INDEX,TIME)],
features=features,
valid=True)
cls.nodeShape = RECT_NODE_SHAPE
def get_text(self,gr):
neighbors = gr.neighbors(self)
ret = []
for n in neighbors:
ret.extend(n.get_text(gr))
return sorted(ret,key=lambda x:x.index)
class LocationNode(Node):
"""
A class representing a location head node
"""
@classmethod
def init(cls,features):
return cls(isPredicate=True,
text=[Word(NO_INDEX,LOCATION)],
features=features,
valid=True)
def get_text(self,gr):
neighbors = gr.neighbors(self)
ret = []
for n in neighbors:
ret.extend(n.get_text(gr))
return sorted(ret,key=lambda x:x.index)
class PrepNode(Node):
"""
A class representing a preposition head node
"""
@classmethod
def init(cls,index,prepType,features,valid):
prepType = prepType.lower()
ret = cls(isPredicate=True,
text=[Word(index,"{0}-{1}".format(PREP,prepType))],
features=features,
valid=valid)
ret.prepType = prepType
return ret
def copy(self):
ret = Node.copy(self)
ret.prepType = self.prepType
if hasattr(self, 'str'):
ret.str = self.str
return ret
def get_text(self,gr):
return [Word(index = self.text[0].index,
word = self.prepType)]
def is_time_prep(self):
return self.prepType in time_prep
class CondNode(Node):
"""
A class representing a conditional/temporal head node
"""
@classmethod
def init(cls,index,condType,features,valid):
condType = condType.lower()
ret= cls(isPredicate=True,
text=[Word(index,"{0}-{1}".format(COND,condType))],
features=features,
valid=valid)
ret.condType = condType
ret.nodeShape = RECT_NODE_SHAPE
return ret
def get_text(self,gr):
return [Word(index = self.text[0].index,
word = self.condType)]
def copy(self):
ret = Node.copy(self)
ret.condType = self.condType
return ret
class AppositionNode(Node):
"""
A class representing an apposition head node
"""
@classmethod
def init(cls,index,features):
return cls(isPredicate=False,
text=[Word(index,APPOSITION)],
features=features,
valid=False)
class ConjunctionNode(Node):
"""
A class representing an conjunction head node
"""
@classmethod
def init(cls,text,features):
"""
initialize a conjunction head node
"""
conjType = " ".join([x.word for x in sorted(text,
key=lambda word:word.index)])
text = [Word(NO_INDEX,CONJUNCTION)] + text
ret = cls(isPredicate=True,
text=text,
features=features,
valid=True)
ret.conjType = conjType
ret.__str__()
return ret
def copy(self):
ret = Node.copy(self)
ret.conjType = self.conjType
return ret
def get_text(self,gr):
neighbors = gr.neighbors(self)
ret = []
for n in neighbors:
ret.extend(n.get_text(gr))
return sorted(ret,key=lambda x:x.index)
class advNode(Node):
"""
A class representing an adverb head node
"""
@classmethod
def init(cls,features):
"""
initialize an adverb head node
"""
return cls(isPredicate=True,
text=[Word(NO_INDEX,ADVERB)],
features=features,
valid=True)
def isCopular(node):
"""
check if this node is an copular instance
@type node: Node
@param node: node to examine
@rtype bool
@return True iff this node is an copular instance
"""
return isinstance(node,CopularNode)
def isApposition(node):
"""
check if this node is an apposition instance
@type node: Node
@param node: node to examine
@rtype bool
@return True iff this node is an apposition instance
"""
return isinstance(node,AppositionNode)
def isProp(node):
"""
check if this node is a prop instance
@type node: Node
@param node: node to examine
@rtype bool
@return True iff this node is prop node instance
"""
#TODO: efficiency
return isinstance(node,PropNode)
def isRcmodProp(node):
"""
check if this node is a prop instance
@type node: Node
@param node: node to examine
@rtype bool
@return True iff this node is prop node instance
"""
#TODO: efficiency
return isinstance(node,RCMODPropNode)
def isConjunction(node):
"""
check if this node is a conjunction instance
@type node: Node
@param node: node to examine
@rtype bool
@return True iff this node is conjunction node instance
"""
#TODO: efficiency
return isinstance(node,ConjunctionNode)
def isPreposition(node):
"""
check if this node is a preposition instance
@type node: Node
@param node: node to examine
@rtype bool
@return True iff this node is preposition node instance
"""
#TODO: efficiency
return isinstance(node,PrepNode)
def isTime(node):
"""
check if this node is a time instance
@type node: Node
@param node: node to examine
@rtype bool
@return True iff this node is time node instance
"""
#TODO: efficiency
return isinstance(node,TimeNode)
def isLocation(node):
"""
check if this node is a location instance
@type node: Node
@param node: node to examine
@rtype bool
@return True iff this node is location node instance
"""
#TODO: efficiency
return isinstance(node,LocationNode)
def isAdverb(node):
"""
check if this node is a adverb instance
@type node: Node
@param node: node to examine
@rtype bool
@return True iff this node is adverb node instance
"""
#TODO: efficiency
return isinstance(node,advNode)
def isCondition(node):
"""
check if this node is a Cond instance
@type node: Node
@param node: node to examine
@rtype bool
@return True iff this node is condition node instance
"""
#TODO: efficiency
return isinstance(node,CondNode)
def isDefinite(node):
return node.features.get("Definite",False) == definite_label
def isNominal(node,gr):
if node.isPredicate: #predicate
return False
if [father for father in gr.incidents(node) if isProp(father)]: #prop
return False
return True
def isPossessive(node):
"""
check if this node is a Possessive instance
@type node: Node
@param node: node to examine
@rtype bool
@return True iff this node is possessive node instance
"""
#TODO: efficiency
return isinstance(node,PossessiveNode)
def join(node1,node2,gr):
"""
Returns a node which is the concatenation of two nodes
Raises in error in case they have contradicting features
@type node1: Node
@param node1: first node to be joined
@type node2: Node
@param node2: second node to be joined
@rtype Node
@return a node representing the union of both nodes
"""
# make sure everything is ok
if node1.isPredicate != node2.isPredicate:
#raise Exception("Contradicting isPredicate value")
print "Contradicting isPredicate value"
if (not node1.valid) or (not node2.valid):
raise Exception("Invalid node cannot be joined")
# join all values
isPredicate = (node1.isPredicate and node2.isPredicate)
text = list(set(node1.get_text(gr)).union(node2.get_text(gr)))
features = {}
features.update(node1.features)
features.update(node2.features)
valid = node1.valid
# remove contradicting features
for k in set(node1.features).intersection(node2.features):
if node1.features[k]!=node2.features[k]:
del(features[k])
print("Contradicting features")
# return new node
return Node(isPredicate = isPredicate,
text = text,
features = features,
valid = valid)
def addSymmetricPropogation(node1,node2):
"""
Add two nodes onto each other's propogation
@type node1: Node
@param node: The node onto which to propogate node2
@type node1: Node2
@param node: The node onto which to propogate node1
"""
node1.addPropogation(node2)
node2.addPropogation(node1)
if __name__ == "__main__":
copNode = CopularNode(index = 1,
features={"tense":"past"},
valid=True)
n = copNode.copy()<|fim▁end|> | TIME = "TIME" # the textual value of a time node
LOCATION = "LOCATION" # the textual value of a location node
|
<|file_name|>csslint.js<|end_file_name|><|fim▁begin|>/*!
CSSLint
Copyright (c) 2011 Nicole Sullivan and Nicholas C. Zakas. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/* Build time: 20-August-2012 10:49:54 */
var CSSLint = (function(){
/*!
Parser-Lib
Copyright (c) 2009-2011 Nicholas C. Zakas. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/* Version v0.1.9, Build time: 23-July-2012 10:52:31 */
var parserlib = {};
(function(){
/**
* A generic base to inherit from for any object
* that needs event handling.
* @class EventTarget
* @constructor
*/
function EventTarget(){
/**
* The array of listeners for various events.
* @type Object
* @property _listeners
* @private
*/
this._listeners = {};
}
EventTarget.prototype = {
//restore constructor
constructor: EventTarget,
/**
* Adds a listener for a given event type.
* @param {String} type The type of event to add a listener for.
* @param {Function} listener The function to call when the event occurs.
* @return {void}
* @method addListener
*/
addListener: function(type, listener){
if (!this._listeners[type]){
this._listeners[type] = [];
}
this._listeners[type].push(listener);
},
/**
* Fires an event based on the passed-in object.
* @param {Object|String} event An object with at least a 'type' attribute
* or a string indicating the event name.
* @return {void}
* @method fire
*/
fire: function(event){
if (typeof event == "string"){
event = { type: event };
}
if (typeof event.target != "undefined"){
event.target = this;
}
if (typeof event.type == "undefined"){
throw new Error("Event object missing 'type' property.");
}
if (this._listeners[event.type]){
//create a copy of the array and use that so listeners can't chane
var listeners = this._listeners[event.type].concat();
for (var i=0, len=listeners.length; i < len; i++){
listeners[i].call(this, event);
}
}
},
/**
* Removes a listener for a given event type.
* @param {String} type The type of event to remove a listener from.
* @param {Function} listener The function to remove from the event.
* @return {void}
* @method removeListener
*/
removeListener: function(type, listener){
if (this._listeners[type]){
var listeners = this._listeners[type];
for (var i=0, len=listeners.length; i < len; i++){
if (listeners[i] === listener){
listeners.splice(i, 1);
break;
}
}
}
}
};
/**
* Convenient way to read through strings.
* @namespace parserlib.util
* @class StringReader
* @constructor
* @param {String} text The text to read.
*/
function StringReader(text){
/**
* The input text with line endings normalized.
* @property _input
* @type String
* @private
*/
this._input = text.replace(/\n\r?/g, "\n");
/**
* The row for the character to be read next.
* @property _line
* @type int
* @private
*/
this._line = 1;
/**
* The column for the character to be read next.
* @property _col
* @type int
* @private
*/
this._col = 1;
/**
* The index of the character in the input to be read next.
* @property _cursor
* @type int
* @private
*/
this._cursor = 0;
}
StringReader.prototype = {
//restore constructor
constructor: StringReader,
//-------------------------------------------------------------------------
// Position info
//-------------------------------------------------------------------------
/**
* Returns the column of the character to be read next.
* @return {int} The column of the character to be read next.
* @method getCol
*/
getCol: function(){
return this._col;
},
/**
* Returns the row of the character to be read next.
* @return {int} The row of the character to be read next.
* @method getLine
*/
getLine: function(){
return this._line ;
},
/**
* Determines if you're at the end of the input.
* @return {Boolean} True if there's no more input, false otherwise.
* @method eof
*/
eof: function(){
return (this._cursor == this._input.length);
},
//-------------------------------------------------------------------------
// Basic reading
//-------------------------------------------------------------------------
/**
* Reads the next character without advancing the cursor.
* @param {int} count How many characters to look ahead (default is 1).
* @return {String} The next character or null if there is no next character.
* @method peek
*/
peek: function(count){
var c = null;
count = (typeof count == "undefined" ? 1 : count);
//if we're not at the end of the input...
if (this._cursor < this._input.length){
//get character and increment cursor and column
c = this._input.charAt(this._cursor + count - 1);
}
return c;
},
/**
* Reads the next character from the input and adjusts the row and column
* accordingly.
* @return {String} The next character or null if there is no next character.
* @method read
*/
read: function(){
var c = null;
//if we're not at the end of the input...
if (this._cursor < this._input.length){
//if the last character was a newline, increment row count
//and reset column count
if (this._input.charAt(this._cursor) == "\n"){
this._line++;
this._col=1;
} else {
this._col++;
}
//get character and increment cursor and column
c = this._input.charAt(this._cursor++);
}
return c;
},
//-------------------------------------------------------------------------
// Misc
//-------------------------------------------------------------------------
/**
* Saves the current location so it can be returned to later.
* @method mark
* @return {void}
*/
mark: function(){
this._bookmark = {
cursor: this._cursor,
line: this._line,
col: this._col
};
},
reset: function(){
if (this._bookmark){
this._cursor = this._bookmark.cursor;
this._line = this._bookmark.line;
this._col = this._bookmark.col;
delete this._bookmark;
}
},
//-------------------------------------------------------------------------
// Advanced reading
//-------------------------------------------------------------------------
/**
* Reads up to and including the given string. Throws an error if that
* string is not found.
* @param {String} pattern The string to read.
* @return {String} The string when it is found.
* @throws Error when the string pattern is not found.
* @method readTo
*/
readTo: function(pattern){
var buffer = "",
c;
/*
* First, buffer must be the same length as the pattern.
* Then, buffer must end with the pattern or else reach the
* end of the input.
*/
while (buffer.length < pattern.length || buffer.lastIndexOf(pattern) != buffer.length - pattern.length){
c = this.read();
if (c){
buffer += c;
} else {
throw new Error("Expected \"" + pattern + "\" at line " + this._line + ", col " + this._col + ".");
}
}
return buffer;
},
/**
* Reads characters while each character causes the given
* filter function to return true. The function is passed
* in each character and either returns true to continue
* reading or false to stop.
* @param {Function} filter The function to read on each character.
* @return {String} The string made up of all characters that passed the
* filter check.
* @method readWhile
*/
readWhile: function(filter){
var buffer = "",
c = this.read();
while(c !== null && filter(c)){
buffer += c;
c = this.read();
}
return buffer;
},
/**
* Reads characters that match either text or a regular expression and
* returns those characters. If a match is found, the row and column
* are adjusted; if no match is found, the reader's state is unchanged.
* reading or false to stop.
* @param {String|RegExp} matchter If a string, then the literal string
* value is searched for. If a regular expression, then any string
* matching the pattern is search for.
* @return {String} The string made up of all characters that matched or
* null if there was no match.
* @method readMatch
*/
readMatch: function(matcher){
var source = this._input.substring(this._cursor),
value = null;
//if it's a string, just do a straight match
if (typeof matcher == "string"){
if (source.indexOf(matcher) === 0){
value = this.readCount(matcher.length);
}
} else if (matcher instanceof RegExp){
if (matcher.test(source)){
value = this.readCount(RegExp.lastMatch.length);
}
}
return value;
},
/**
* Reads a given number of characters. If the end of the input is reached,
* it reads only the remaining characters and does not throw an error.
* @param {int} count The number of characters to read.
* @return {String} The string made up the read characters.
* @method readCount
*/
readCount: function(count){
var buffer = "";
while(count--){
buffer += this.read();
}
return buffer;
}
};
/**
* Type to use when a syntax error occurs.
* @class SyntaxError
* @namespace parserlib.util
* @constructor
* @param {String} message The error message.
* @param {int} line The line at which the error occurred.
* @param {int} col The column at which the error occurred.
*/
function SyntaxError(message, line, col){
/**
* The column at which the error occurred.
* @type int
* @property col
*/
this.col = col;
/**
* The line at which the error occurred.
* @type int
* @property line
*/
this.line = line;
/**
* The text representation of the unit.
* @type String
* @property text
*/
this.message = message;
}
//inherit from Error
SyntaxError.prototype = new Error();
/**
* Base type to represent a single syntactic unit.
* @class SyntaxUnit
* @namespace parserlib.util
* @constructor
* @param {String} text The text of the unit.
* @param {int} line The line of text on which the unit resides.
* @param {int} col The column of text on which the unit resides.
*/
function SyntaxUnit(text, line, col, type){
/**
* The column of text on which the unit resides.
* @type int
* @property col
*/
this.col = col;
/**
* The line of text on which the unit resides.
* @type int
* @property line
*/
this.line = line;
/**
* The text representation of the unit.
* @type String
* @property text
*/
this.text = text;
/**
* The type of syntax unit.
* @type int
* @property type
*/
this.type = type;
}
/**
* Create a new syntax unit based solely on the given token.
* Convenience method for creating a new syntax unit when
* it represents a single token instead of multiple.
* @param {Object} token The token object to represent.
* @return {parserlib.util.SyntaxUnit} The object representing the token.
* @static
* @method fromToken
*/
SyntaxUnit.fromToken = function(token){
return new SyntaxUnit(token.value, token.startLine, token.startCol);
};
SyntaxUnit.prototype = {
//restore constructor
constructor: SyntaxUnit,
/**
* Returns the text representation of the unit.
* @return {String} The text representation of the unit.
* @method valueOf
*/
valueOf: function(){
return this.toString();
},
/**
* Returns the text representation of the unit.
* @return {String} The text representation of the unit.
* @method toString
*/
toString: function(){
return this.text;
}
};
/*global StringReader, SyntaxError*/
/**
* Generic TokenStream providing base functionality.
* @class TokenStreamBase
* @namespace parserlib.util
* @constructor
* @param {String|StringReader} input The text to tokenize or a reader from
* which to read the input.
*/
function TokenStreamBase(input, tokenData){
/**
* The string reader for easy access to the text.
* @type StringReader
* @property _reader
* @private
*/
this._reader = input ? new StringReader(input.toString()) : null;
/**
* Token object for the last consumed token.
* @type Token
* @property _token
* @private
*/
this._token = null;
/**
* The array of token information.
* @type Array
* @property _tokenData
* @private
*/
this._tokenData = tokenData;
/**
* Lookahead token buffer.
* @type Array
* @property _lt
* @private
*/
this._lt = [];
/**
* Lookahead token buffer index.
* @type int
* @property _ltIndex
* @private
*/
this._ltIndex = 0;
this._ltIndexCache = [];
}
/**
* Accepts an array of token information and outputs
* an array of token data containing key-value mappings
* and matching functions that the TokenStream needs.
* @param {Array} tokens An array of token descriptors.
* @return {Array} An array of processed token data.
* @method createTokenData
* @static
*/
TokenStreamBase.createTokenData = function(tokens){
var nameMap = [],
typeMap = {},
tokenData = tokens.concat([]),
i = 0,
len = tokenData.length+1;
tokenData.UNKNOWN = -1;
tokenData.unshift({name:"EOF"});
for (; i < len; i++){
nameMap.push(tokenData[i].name);
tokenData[tokenData[i].name] = i;
if (tokenData[i].text){
typeMap[tokenData[i].text] = i;
}
}
tokenData.name = function(tt){
return nameMap[tt];
};
tokenData.type = function(c){
return typeMap[c];
};
return tokenData;
};
TokenStreamBase.prototype = {
//restore constructor
constructor: TokenStreamBase,
//-------------------------------------------------------------------------
// Matching methods
//-------------------------------------------------------------------------
/**
* Determines if the next token matches the given token type.
* If so, that token is consumed; if not, the token is placed
* back onto the token stream. You can pass in any number of
* token types and this will return true if any of the token
* types is found.
* @param {int|int[]} tokenTypes Either a single token type or an array of
* token types that the next token might be. If an array is passed,
* it's assumed that the token can be any of these.
* @param {variant} channel (Optional) The channel to read from. If not
* provided, reads from the default (unnamed) channel.
* @return {Boolean} True if the token type matches, false if not.
* @method match
*/
match: function(tokenTypes, channel){
//always convert to an array, makes things easier
if (!(tokenTypes instanceof Array)){
tokenTypes = [tokenTypes];
}
var tt = this.get(channel),
i = 0,
len = tokenTypes.length;
while(i < len){
if (tt == tokenTypes[i++]){
return true;
}
}
//no match found, put the token back
this.unget();
return false;
},
/**
* Determines if the next token matches the given token type.
* If so, that token is consumed; if not, an error is thrown.
* @param {int|int[]} tokenTypes Either a single token type or an array of
* token types that the next token should be. If an array is passed,
* it's assumed that the token must be one of these.
* @param {variant} channel (Optional) The channel to read from. If not
* provided, reads from the default (unnamed) channel.
* @return {void}
* @method mustMatch
*/
mustMatch: function(tokenTypes, channel){
var token;
//always convert to an array, makes things easier
if (!(tokenTypes instanceof Array)){
tokenTypes = [tokenTypes];
}
if (!this.match.apply(this, arguments)){
token = this.LT(1);
throw new SyntaxError("Expected " + this._tokenData[tokenTypes[0]].name +
" at line " + token.startLine + ", col " + token.startCol + ".", token.startLine, token.startCol);
}
},
//-------------------------------------------------------------------------
// Consuming methods
//-------------------------------------------------------------------------
/**
* Keeps reading from the token stream until either one of the specified
* token types is found or until the end of the input is reached.
* @param {int|int[]} tokenTypes Either a single token type or an array of
* token types that the next token should be. If an array is passed,
* it's assumed that the token must be one of these.
* @param {variant} channel (Optional) The channel to read from. If not
* provided, reads from the default (unnamed) channel.
* @return {void}
* @method advance
*/
advance: function(tokenTypes, channel){
while(this.LA(0) !== 0 && !this.match(tokenTypes, channel)){
this.get();
}
return this.LA(0);
},
/**
* Consumes the next token from the token stream.
* @return {int} The token type of the token that was just consumed.
* @method get
*/
get: function(channel){
var tokenInfo = this._tokenData,
reader = this._reader,
value,
i =0,
len = tokenInfo.length,
found = false,
token,
info;
//check the lookahead buffer first
if (this._lt.length && this._ltIndex >= 0 && this._ltIndex < this._lt.length){
i++;
this._token = this._lt[this._ltIndex++];
info = tokenInfo[this._token.type];
//obey channels logic
while((info.channel !== undefined && channel !== info.channel) &&
this._ltIndex < this._lt.length){
this._token = this._lt[this._ltIndex++];
info = tokenInfo[this._token.type];
i++;
}
//here be dragons
if ((info.channel === undefined || channel === info.channel) &&
this._ltIndex <= this._lt.length){
this._ltIndexCache.push(i);
return this._token.type;
}
}
//call token retriever method
token = this._getToken();
//if it should be hidden, don't save a token
if (token.type > -1 && !tokenInfo[token.type].hide){
//apply token channel
token.channel = tokenInfo[token.type].channel;
//save for later
this._token = token;
this._lt.push(token);
//save space that will be moved (must be done before array is truncated)
this._ltIndexCache.push(this._lt.length - this._ltIndex + i);
//keep the buffer under 5 items
if (this._lt.length > 5){
this._lt.shift();
}
//also keep the shift buffer under 5 items
if (this._ltIndexCache.length > 5){
this._ltIndexCache.shift();
}
//update lookahead index
this._ltIndex = this._lt.length;
}
/*
* Skip to the next token if:
* 1. The token type is marked as hidden.
* 2. The token type has a channel specified and it isn't the current channel.
*/
info = tokenInfo[token.type];
if (info &&
(info.hide ||
(info.channel !== undefined && channel !== info.channel))){
return this.get(channel);
} else {
//return just the type
return token.type;
}
},
/**
* Looks ahead a certain number of tokens and returns the token type at
* that position. This will throw an error if you lookahead past the
* end of input, past the size of the lookahead buffer, or back past
* the first token in the lookahead buffer.
* @param {int} The index of the token type to retrieve. 0 for the
* current token, 1 for the next, -1 for the previous, etc.
* @return {int} The token type of the token in the given position.
* @method LA
*/
LA: function(index){
var total = index,
tt;
if (index > 0){
//TODO: Store 5 somewhere
if (index > 5){
throw new Error("Too much lookahead.");
}
//get all those tokens
while(total){
tt = this.get();
total--;
}
//unget all those tokens
while(total < index){
this.unget();
total++;
}
} else if (index < 0){
if(this._lt[this._ltIndex+index]){
tt = this._lt[this._ltIndex+index].type;
} else {
throw new Error("Too much lookbehind.");
}
} else {
tt = this._token.type;
}
return tt;
},
/**
* Looks ahead a certain number of tokens and returns the token at
* that position. This will throw an error if you lookahead past the
* end of input, past the size of the lookahead buffer, or back past
* the first token in the lookahead buffer.
* @param {int} The index of the token type to retrieve. 0 for the
* current token, 1 for the next, -1 for the previous, etc.
* @return {Object} The token of the token in the given position.
* @method LA
*/
LT: function(index){
//lookahead first to prime the token buffer
this.LA(index);
//now find the token, subtract one because _ltIndex is already at the next index
return this._lt[this._ltIndex+index-1];
},
/**
* Returns the token type for the next token in the stream without
* consuming it.
* @return {int} The token type of the next token in the stream.
* @method peek
*/
peek: function(){
return this.LA(1);
},
/**
* Returns the actual token object for the last consumed token.
* @return {Token} The token object for the last consumed token.
* @method token
*/
token: function(){
return this._token;
},
/**
* Returns the name of the token for the given token type.
* @param {int} tokenType The type of token to get the name of.
* @return {String} The name of the token or "UNKNOWN_TOKEN" for any
* invalid token type.
* @method tokenName
*/
tokenName: function(tokenType){
if (tokenType < 0 || tokenType > this._tokenData.length){
return "UNKNOWN_TOKEN";
} else {
return this._tokenData[tokenType].name;
}
},
/**
* Returns the token type value for the given token name.
* @param {String} tokenName The name of the token whose value should be returned.
* @return {int} The token type value for the given token name or -1
* for an unknown token.
* @method tokenName
*/
tokenType: function(tokenName){
return this._tokenData[tokenName] || -1;
},
/**
* Returns the last consumed token to the token stream.
* @method unget
*/
unget: function(){
//if (this._ltIndex > -1){
if (this._ltIndexCache.length){
this._ltIndex -= this._ltIndexCache.pop();//--;
this._token = this._lt[this._ltIndex - 1];
} else {
throw new Error("Too much lookahead.");
}
}
};
parserlib.util = {
StringReader: StringReader,
SyntaxError : SyntaxError,
SyntaxUnit : SyntaxUnit,
EventTarget : EventTarget,
TokenStreamBase : TokenStreamBase
};
})();
/*
Parser-Lib
Copyright (c) 2009-2011 Nicholas C. Zakas. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
/* Version v0.1.9, Build time: 23-July-2012 10:52:31 */
(function(){
var EventTarget = parserlib.util.EventTarget,
TokenStreamBase = parserlib.util.TokenStreamBase,
StringReader = parserlib.util.StringReader,
SyntaxError = parserlib.util.SyntaxError,
SyntaxUnit = parserlib.util.SyntaxUnit;
var Colors = {
aliceblue :"#f0f8ff",
antiquewhite :"#faebd7",
aqua :"#00ffff",
aquamarine :"#7fffd4",
azure :"#f0ffff",
beige :"#f5f5dc",
bisque :"#ffe4c4",
black :"#000000",
blanchedalmond :"#ffebcd",
blue :"#0000ff",
blueviolet :"#8a2be2",
brown :"#a52a2a",
burlywood :"#deb887",
cadetblue :"#5f9ea0",
chartreuse :"#7fff00",
chocolate :"#d2691e",
coral :"#ff7f50",
cornflowerblue :"#6495ed",
cornsilk :"#fff8dc",
crimson :"#dc143c",
cyan :"#00ffff",
darkblue :"#00008b",
darkcyan :"#008b8b",
darkgoldenrod :"#b8860b",
darkgray :"#a9a9a9",
darkgreen :"#006400",
darkkhaki :"#bdb76b",
darkmagenta :"#8b008b",
darkolivegreen :"#556b2f",
darkorange :"#ff8c00",
darkorchid :"#9932cc",
darkred :"#8b0000",
darksalmon :"#e9967a",
darkseagreen :"#8fbc8f",
darkslateblue :"#483d8b",
darkslategray :"#2f4f4f",
darkturquoise :"#00ced1",
darkviolet :"#9400d3",
deeppink :"#ff1493",
deepskyblue :"#00bfff",
dimgray :"#696969",
dodgerblue :"#1e90ff",
firebrick :"#b22222",
floralwhite :"#fffaf0",
forestgreen :"#228b22",
fuchsia :"#ff00ff",
gainsboro :"#dcdcdc",
ghostwhite :"#f8f8ff",
gold :"#ffd700",
goldenrod :"#daa520",
gray :"#808080",
green :"#008000",
greenyellow :"#adff2f",
honeydew :"#f0fff0",
hotpink :"#ff69b4",
indianred :"#cd5c5c",
indigo :"#4b0082",
ivory :"#fffff0",
khaki :"#f0e68c",
lavender :"#e6e6fa",
lavenderblush :"#fff0f5",
lawngreen :"#7cfc00",
lemonchiffon :"#fffacd",
lightblue :"#add8e6",
lightcoral :"#f08080",
lightcyan :"#e0ffff",
lightgoldenrodyellow :"#fafad2",
lightgray :"#d3d3d3",
lightgreen :"#90ee90",
lightpink :"#ffb6c1",
lightsalmon :"#ffa07a",
lightseagreen :"#20b2aa",
lightskyblue :"#87cefa",
lightslategray :"#778899",
lightsteelblue :"#b0c4de",
lightyellow :"#ffffe0",
lime :"#00ff00",
limegreen :"#32cd32",
linen :"#faf0e6",
magenta :"#ff00ff",
maroon :"#800000",
mediumaquamarine:"#66cdaa",
mediumblue :"#0000cd",
mediumorchid :"#ba55d3",
mediumpurple :"#9370d8",
mediumseagreen :"#3cb371",
mediumslateblue :"#7b68ee",
mediumspringgreen :"#00fa9a",
mediumturquoise :"#48d1cc",
mediumvioletred :"#c71585",
midnightblue :"#191970",
mintcream :"#f5fffa",
mistyrose :"#ffe4e1",
moccasin :"#ffe4b5",
navajowhite :"#ffdead",
navy :"#000080",
oldlace :"#fdf5e6",
olive :"#808000",
olivedrab :"#6b8e23",
orange :"#ffa500",
orangered :"#ff4500",
orchid :"#da70d6",
palegoldenrod :"#eee8aa",
palegreen :"#98fb98",
paleturquoise :"#afeeee",
palevioletred :"#d87093",
papayawhip :"#ffefd5",
peachpuff :"#ffdab9",
peru :"#cd853f",
pink :"#ffc0cb",
plum :"#dda0dd",
powderblue :"#b0e0e6",
purple :"#800080",
red :"#ff0000",
rosybrown :"#bc8f8f",
royalblue :"#4169e1",
saddlebrown :"#8b4513",
salmon :"#fa8072",
sandybrown :"#f4a460",
seagreen :"#2e8b57",
seashell :"#fff5ee",
sienna :"#a0522d",
silver :"#c0c0c0",
skyblue :"#87ceeb",
slateblue :"#6a5acd",
slategray :"#708090",
snow :"#fffafa",
springgreen :"#00ff7f",
steelblue :"#4682b4",
tan :"#d2b48c",
teal :"#008080",
thistle :"#d8bfd8",
tomato :"#ff6347",
turquoise :"#40e0d0",
violet :"#ee82ee",
wheat :"#f5deb3",
white :"#ffffff",
whitesmoke :"#f5f5f5",
yellow :"#ffff00",
yellowgreen :"#9acd32"
};
/*global SyntaxUnit, Parser*/
/**
* Represents a selector combinator (whitespace, +, >).
* @namespace parserlib.css
* @class Combinator
* @extends parserlib.util.SyntaxUnit
* @constructor
* @param {String} text The text representation of the unit.
* @param {int} line The line of text on which the unit resides.
* @param {int} col The column of text on which the unit resides.
*/
function Combinator(text, line, col){
SyntaxUnit.call(this, text, line, col, Parser.COMBINATOR_TYPE);
/**
* The type of modifier.
* @type String
* @property type
*/
this.type = "unknown";
//pretty simple
if (/^\s+$/.test(text)){
this.type = "descendant";
} else if (text == ">"){
this.type = "child";
} else if (text == "+"){
this.type = "adjacent-sibling";
} else if (text == "~"){
this.type = "sibling";
}
}
Combinator.prototype = new SyntaxUnit();
Combinator.prototype.constructor = Combinator;
/*global SyntaxUnit, Parser*/
/**
* Represents a media feature, such as max-width:500.
* @namespace parserlib.css
* @class MediaFeature
* @extends parserlib.util.SyntaxUnit
* @constructor
* @param {SyntaxUnit} name The name of the feature.
* @param {SyntaxUnit} value The value of the feature or null if none.
*/
function MediaFeature(name, value){
SyntaxUnit.call(this, "(" + name + (value !== null ? ":" + value : "") + ")", name.startLine, name.startCol, Parser.MEDIA_FEATURE_TYPE);
/**
* The name of the media feature
* @type String
* @property name
*/
this.name = name;
/**
* The value for the feature or null if there is none.
* @type SyntaxUnit
* @property value
*/
this.value = value;
}
MediaFeature.prototype = new SyntaxUnit();
MediaFeature.prototype.constructor = MediaFeature;
/*global SyntaxUnit, Parser*/
/**
* Represents an individual media query.
* @namespace parserlib.css
* @class MediaQuery
* @extends parserlib.util.SyntaxUnit
* @constructor
* @param {String} modifier The modifier "not" or "only" (or null).
* @param {String} mediaType The type of media (i.e., "print").
* @param {Array} parts Array of selectors parts making up this selector.
* @param {int} line The line of text on which the unit resides.
* @param {int} col The column of text on which the unit resides.
*/
function MediaQuery(modifier, mediaType, features, line, col){
SyntaxUnit.call(this, (modifier ? modifier + " ": "") + (mediaType ? mediaType + " " : "") + features.join(" and "), line, col, Parser.MEDIA_QUERY_TYPE);
/**
* The media modifier ("not" or "only")
* @type String
* @property modifier
*/
this.modifier = modifier;
/**
* The mediaType (i.e., "print")
* @type String
* @property mediaType
*/
this.mediaType = mediaType;
/**
* The parts that make up the selector.
* @type Array
* @property features
*/
this.features = features;
}
MediaQuery.prototype = new SyntaxUnit();
MediaQuery.prototype.constructor = MediaQuery;
/*global Tokens, TokenStream, SyntaxError, Properties, Validation, ValidationError, SyntaxUnit,
PropertyValue, PropertyValuePart, SelectorPart, SelectorSubPart, Selector,
PropertyName, Combinator, MediaFeature, MediaQuery, EventTarget */
/**
* A CSS3 parser.
* @namespace parserlib.css
* @class Parser
* @constructor
* @param {Object} options (Optional) Various options for the parser:
* starHack (true|false) to allow IE6 star hack as valid,
* underscoreHack (true|false) to interpret leading underscores
* as IE6-7 targeting for known properties, ieFilters (true|false)
* to indicate that IE < 8 filters should be accepted and not throw
* syntax errors.
*/
function Parser(options){
//inherit event functionality
EventTarget.call(this);
this.options = options || {};
this._tokenStream = null;
}
//Static constants
Parser.DEFAULT_TYPE = 0;
Parser.COMBINATOR_TYPE = 1;
Parser.MEDIA_FEATURE_TYPE = 2;
Parser.MEDIA_QUERY_TYPE = 3;
Parser.PROPERTY_NAME_TYPE = 4;
Parser.PROPERTY_VALUE_TYPE = 5;
Parser.PROPERTY_VALUE_PART_TYPE = 6;
Parser.SELECTOR_TYPE = 7;
Parser.SELECTOR_PART_TYPE = 8;
Parser.SELECTOR_SUB_PART_TYPE = 9;
Parser.prototype = function(){
var proto = new EventTarget(), //new prototype
prop,
additions = {
//restore constructor
constructor: Parser,
//instance constants - yuck
DEFAULT_TYPE : 0,
COMBINATOR_TYPE : 1,
MEDIA_FEATURE_TYPE : 2,
MEDIA_QUERY_TYPE : 3,
PROPERTY_NAME_TYPE : 4,
PROPERTY_VALUE_TYPE : 5,
PROPERTY_VALUE_PART_TYPE : 6,
SELECTOR_TYPE : 7,
SELECTOR_PART_TYPE : 8,
SELECTOR_SUB_PART_TYPE : 9,
//-----------------------------------------------------------------
// Grammar
//-----------------------------------------------------------------
_stylesheet: function(){
/*
* stylesheet
* : [ CHARSET_SYM S* STRING S* ';' ]?
* [S|CDO|CDC]* [ import [S|CDO|CDC]* ]*
* [ namespace [S|CDO|CDC]* ]*
* [ [ ruleset | media | page | font_face | keyframes ] [S|CDO|CDC]* ]*
* ;
*/
var tokenStream = this._tokenStream,
charset = null,
count,
token,
tt;
this.fire("startstylesheet");
//try to read character set
this._charset();
this._skipCruft();
//try to read imports - may be more than one
while (tokenStream.peek() == Tokens.IMPORT_SYM){
this._import();
this._skipCruft();
}
//try to read namespaces - may be more than one
while (tokenStream.peek() == Tokens.NAMESPACE_SYM){
this._namespace();
this._skipCruft();
}
//get the next token
tt = tokenStream.peek();
//try to read the rest
while(tt > Tokens.EOF){
try {
switch(tt){
case Tokens.MEDIA_SYM:
this._media();
this._skipCruft();
break;
case Tokens.PAGE_SYM:
this._page();
this._skipCruft();
break;
case Tokens.FONT_FACE_SYM:
this._font_face();
this._skipCruft();
break;
case Tokens.KEYFRAMES_SYM:
this._keyframes();
this._skipCruft();
break;
case Tokens.UNKNOWN_SYM: //unknown @ rule
tokenStream.get();
if (!this.options.strict){
//fire error event
this.fire({
type: "error",
error: null,
message: "Unknown @ rule: " + tokenStream.LT(0).value + ".",
line: tokenStream.LT(0).startLine,
col: tokenStream.LT(0).startCol
});
//skip braces
count=0;
while (tokenStream.advance([Tokens.LBRACE, Tokens.RBRACE]) == Tokens.LBRACE){
count++; //keep track of nesting depth
}
while(count){
tokenStream.advance([Tokens.RBRACE]);
count--;
}
} else {
//not a syntax error, rethrow it
throw new SyntaxError("Unknown @ rule.", tokenStream.LT(0).startLine, tokenStream.LT(0).startCol);
}
break;
case Tokens.S:
this._readWhitespace();
break;
default:
if(!this._ruleset()){
//error handling for known issues
switch(tt){
case Tokens.CHARSET_SYM:
token = tokenStream.LT(1);
this._charset(false);
throw new SyntaxError("@charset not allowed here.", token.startLine, token.startCol);
case Tokens.IMPORT_SYM:
token = tokenStream.LT(1);
this._import(false);
throw new SyntaxError("@import not allowed here.", token.startLine, token.startCol);
case Tokens.NAMESPACE_SYM:
token = tokenStream.LT(1);
this._namespace(false);
throw new SyntaxError("@namespace not allowed here.", token.startLine, token.startCol);
default:
tokenStream.get(); //get the last token
this._unexpectedToken(tokenStream.token());
}
}
}
} catch(ex) {
if (ex instanceof SyntaxError && !this.options.strict){
this.fire({
type: "error",
error: ex,
message: ex.message,
line: ex.line,
col: ex.col
});
} else {
throw ex;
}
}
tt = tokenStream.peek();
}
if (tt != Tokens.EOF){
this._unexpectedToken(tokenStream.token());
}
this.fire("endstylesheet");
},
_charset: function(emit){
var tokenStream = this._tokenStream,
charset,
token,
line,
col;
if (tokenStream.match(Tokens.CHARSET_SYM)){
line = tokenStream.token().startLine;
col = tokenStream.token().startCol;
this._readWhitespace();
tokenStream.mustMatch(Tokens.STRING);
token = tokenStream.token();
charset = token.value;
this._readWhitespace();
tokenStream.mustMatch(Tokens.SEMICOLON);
if (emit !== false){
this.fire({
type: "charset",
charset:charset,
line: line,
col: col
});
}
}
},
_import: function(emit){
/*
* import
* : IMPORT_SYM S*
* [STRING|URI] S* media_query_list? ';' S*
*/
var tokenStream = this._tokenStream,
tt,
uri,
importToken,
mediaList = [];
//read import symbol
tokenStream.mustMatch(Tokens.IMPORT_SYM);
importToken = tokenStream.token();
this._readWhitespace();
tokenStream.mustMatch([Tokens.STRING, Tokens.URI]);
//grab the URI value
uri = tokenStream.token().value.replace(/(?:url\()?["']([^"']+)["']\)?/, "$1");
this._readWhitespace();
mediaList = this._media_query_list();
//must end with a semicolon
tokenStream.mustMatch(Tokens.SEMICOLON);
this._readWhitespace();
if (emit !== false){
this.fire({
type: "import",
uri: uri,
media: mediaList,
line: importToken.startLine,
col: importToken.startCol
});
}
},
_namespace: function(emit){
/*
* namespace
* : NAMESPACE_SYM S* [namespace_prefix S*]? [STRING|URI] S* ';' S*
*/
var tokenStream = this._tokenStream,
line,
col,
prefix,
uri;
//read import symbol
tokenStream.mustMatch(Tokens.NAMESPACE_SYM);
line = tokenStream.token().startLine;
col = tokenStream.token().startCol;
this._readWhitespace();
//it's a namespace prefix - no _namespace_prefix() method because it's just an IDENT
if (tokenStream.match(Tokens.IDENT)){
prefix = tokenStream.token().value;
this._readWhitespace();
}
tokenStream.mustMatch([Tokens.STRING, Tokens.URI]);
/*if (!tokenStream.match(Tokens.STRING)){
tokenStream.mustMatch(Tokens.URI);
}*/
//grab the URI value
uri = tokenStream.token().value.replace(/(?:url\()?["']([^"']+)["']\)?/, "$1");
this._readWhitespace();
//must end with a semicolon
tokenStream.mustMatch(Tokens.SEMICOLON);
this._readWhitespace();
if (emit !== false){
this.fire({
type: "namespace",
prefix: prefix,
uri: uri,
line: line,
col: col
});
}
},
_media: function(){
/*
* media
* : MEDIA_SYM S* media_query_list S* '{' S* ruleset* '}' S*
* ;
*/
var tokenStream = this._tokenStream,
line,
col,
mediaList;// = [];
//look for @media
tokenStream.mustMatch(Tokens.MEDIA_SYM);
line = tokenStream.token().startLine;
col = tokenStream.token().startCol;
this._readWhitespace();
mediaList = this._media_query_list();
tokenStream.mustMatch(Tokens.LBRACE);
this._readWhitespace();
this.fire({
type: "startmedia",
media: mediaList,
line: line,
col: col
});
while(true) {
if (tokenStream.peek() == Tokens.PAGE_SYM){
this._page();
} else if (!this._ruleset()){
break;
}
}
tokenStream.mustMatch(Tokens.RBRACE);
this._readWhitespace();
this.fire({
type: "endmedia",
media: mediaList,
line: line,
col: col
});
},
//CSS3 Media Queries
_media_query_list: function(){
/*
* media_query_list
* : S* [media_query [ ',' S* media_query ]* ]?
* ;
*/
var tokenStream = this._tokenStream,
mediaList = [];
this._readWhitespace();
if (tokenStream.peek() == Tokens.IDENT || tokenStream.peek() == Tokens.LPAREN){
mediaList.push(this._media_query());
}
while(tokenStream.match(Tokens.COMMA)){
this._readWhitespace();
mediaList.push(this._media_query());
}
return mediaList;
},
/*
* Note: "expression" in the grammar maps to the _media_expression
* method.
*/
_media_query: function(){
/*
* media_query
* : [ONLY | NOT]? S* media_type S* [ AND S* expression ]*
* | expression [ AND S* expression ]*
* ;
*/
var tokenStream = this._tokenStream,
type = null,
ident = null,
token = null,
expressions = [];
if (tokenStream.match(Tokens.IDENT)){
ident = tokenStream.token().value.toLowerCase();
//since there's no custom tokens for these, need to manually check
if (ident != "only" && ident != "not"){
tokenStream.unget();
ident = null;
} else {
token = tokenStream.token();
}
}
this._readWhitespace();
if (tokenStream.peek() == Tokens.IDENT){
type = this._media_type();
if (token === null){
token = tokenStream.token();
}
} else if (tokenStream.peek() == Tokens.LPAREN){
if (token === null){
token = tokenStream.LT(1);
}
expressions.push(this._media_expression());
}
if (type === null && expressions.length === 0){
return null;
} else {
this._readWhitespace();
while (tokenStream.match(Tokens.IDENT)){
if (tokenStream.token().value.toLowerCase() != "and"){
this._unexpectedToken(tokenStream.token());
}
this._readWhitespace();
expressions.push(this._media_expression());
}
}
return new MediaQuery(ident, type, expressions, token.startLine, token.startCol);
},
//CSS3 Media Queries
_media_type: function(){
/*
* media_type
* : IDENT
* ;
*/
return this._media_feature();
},
/**
* Note: in CSS3 Media Queries, this is called "expression".
* Renamed here to avoid conflict with CSS3 Selectors
* definition of "expression". Also note that "expr" in the
* grammar now maps to "expression" from CSS3 selectors.
* @method _media_expression
* @private
*/
_media_expression: function(){
/*
* expression
* : '(' S* media_feature S* [ ':' S* expr ]? ')' S*
* ;
*/
var tokenStream = this._tokenStream,
feature = null,
token,
expression = null;
tokenStream.mustMatch(Tokens.LPAREN);
feature = this._media_feature();
this._readWhitespace();
if (tokenStream.match(Tokens.COLON)){
this._readWhitespace();
token = tokenStream.LT(1);
expression = this._expression();
}
tokenStream.mustMatch(Tokens.RPAREN);
this._readWhitespace();
return new MediaFeature(feature, (expression ? new SyntaxUnit(expression, token.startLine, token.startCol) : null));
},
//CSS3 Media Queries
_media_feature: function(){
/*
* media_feature
* : IDENT
* ;
*/
var tokenStream = this._tokenStream;
tokenStream.mustMatch(Tokens.IDENT);
return SyntaxUnit.fromToken(tokenStream.token());
},
//CSS3 Paged Media
_page: function(){
/*
* page:
* PAGE_SYM S* IDENT? pseudo_page? S*
* '{' S* [ declaration | margin ]? [ ';' S* [ declaration | margin ]? ]* '}' S*
* ;
*/
var tokenStream = this._tokenStream,
line,
col,
identifier = null,
pseudoPage = null;
//look for @page
tokenStream.mustMatch(Tokens.PAGE_SYM);
line = tokenStream.token().startLine;
col = tokenStream.token().startCol;
this._readWhitespace();
if (tokenStream.match(Tokens.IDENT)){
identifier = tokenStream.token().value;
//The value 'auto' may not be used as a page name and MUST be treated as a syntax error.
if (identifier.toLowerCase() === "auto"){
this._unexpectedToken(tokenStream.token());
}
}
//see if there's a colon upcoming
if (tokenStream.peek() == Tokens.COLON){
pseudoPage = this._pseudo_page();
}
this._readWhitespace();
this.fire({
type: "startpage",
id: identifier,
pseudo: pseudoPage,
line: line,
col: col
});
this._readDeclarations(true, true);
this.fire({
type: "endpage",
id: identifier,
pseudo: pseudoPage,
line: line,
col: col
});
},
//CSS3 Paged Media
_margin: function(){
/*
* margin :
* margin_sym S* '{' declaration [ ';' S* declaration? ]* '}' S*
* ;
*/
var tokenStream = this._tokenStream,
line,
col,
marginSym = this._margin_sym();
if (marginSym){
line = tokenStream.token().startLine;
col = tokenStream.token().startCol;
this.fire({
type: "startpagemargin",
margin: marginSym,
line: line,
col: col
});
this._readDeclarations(true);
this.fire({
type: "endpagemargin",
margin: marginSym,
line: line,
col: col
});
return true;
} else {
return false;
}
},
//CSS3 Paged Media
_margin_sym: function(){
/*
* margin_sym :
* TOPLEFTCORNER_SYM |
* TOPLEFT_SYM |
* TOPCENTER_SYM |
* TOPRIGHT_SYM |
* TOPRIGHTCORNER_SYM |
* BOTTOMLEFTCORNER_SYM |
* BOTTOMLEFT_SYM |
* BOTTOMCENTER_SYM |
* BOTTOMRIGHT_SYM |
* BOTTOMRIGHTCORNER_SYM |
* LEFTTOP_SYM |
* LEFTMIDDLE_SYM |
* LEFTBOTTOM_SYM |
* RIGHTTOP_SYM |
* RIGHTMIDDLE_SYM |
* RIGHTBOTTOM_SYM
* ;
*/
var tokenStream = this._tokenStream;
if(tokenStream.match([Tokens.TOPLEFTCORNER_SYM, Tokens.TOPLEFT_SYM,
Tokens.TOPCENTER_SYM, Tokens.TOPRIGHT_SYM, Tokens.TOPRIGHTCORNER_SYM,
Tokens.BOTTOMLEFTCORNER_SYM, Tokens.BOTTOMLEFT_SYM,
Tokens.BOTTOMCENTER_SYM, Tokens.BOTTOMRIGHT_SYM,
Tokens.BOTTOMRIGHTCORNER_SYM, Tokens.LEFTTOP_SYM,
Tokens.LEFTMIDDLE_SYM, Tokens.LEFTBOTTOM_SYM, Tokens.RIGHTTOP_SYM,
Tokens.RIGHTMIDDLE_SYM, Tokens.RIGHTBOTTOM_SYM]))
{
return SyntaxUnit.fromToken(tokenStream.token());
} else {
return null;
}
},
_pseudo_page: function(){
/*
* pseudo_page
* : ':' IDENT
* ;
*/
var tokenStream = this._tokenStream;
tokenStream.mustMatch(Tokens.COLON);
tokenStream.mustMatch(Tokens.IDENT);
//TODO: CSS3 Paged Media says only "left", "center", and "right" are allowed
return tokenStream.token().value;
},
_font_face: function(){
/*
* font_face
* : FONT_FACE_SYM S*
* '{' S* declaration [ ';' S* declaration ]* '}' S*
* ;
*/
var tokenStream = this._tokenStream,
line,
col;
//look for @page
tokenStream.mustMatch(Tokens.FONT_FACE_SYM);
line = tokenStream.token().startLine;
col = tokenStream.token().startCol;
this._readWhitespace();
this.fire({
type: "startfontface",
line: line,
col: col
});
this._readDeclarations(true);
this.fire({
type: "endfontface",
line: line,
col: col
});
},
_operator: function(){
/*
* operator
* : '/' S* | ',' S* | /( empty )/
* ;
*/
var tokenStream = this._tokenStream,
token = null;
if (tokenStream.match([Tokens.SLASH, Tokens.COMMA])){
token = tokenStream.token();
this._readWhitespace();
}
return token ? PropertyValuePart.fromToken(token) : null;
},
_combinator: function(){
/*
* combinator
* : PLUS S* | GREATER S* | TILDE S* | S+
* ;
*/
var tokenStream = this._tokenStream,
value = null,
token;
if(tokenStream.match([Tokens.PLUS, Tokens.GREATER, Tokens.TILDE])){
token = tokenStream.token();
value = new Combinator(token.value, token.startLine, token.startCol);
this._readWhitespace();
}
return value;
},
_unary_operator: function(){
/*
* unary_operator
* : '-' | '+'
* ;
*/
var tokenStream = this._tokenStream;
if (tokenStream.match([Tokens.MINUS, Tokens.PLUS])){
return tokenStream.token().value;
} else {
return null;
}
},
_property: function(){
/*
* property
* : IDENT S*
* ;
*/
var tokenStream = this._tokenStream,
value = null,
hack = null,
tokenValue,
token,
line,
col;
//check for star hack - throws error if not allowed
if (tokenStream.peek() == Tokens.STAR && this.options.starHack){
tokenStream.get();
token = tokenStream.token();
hack = token.value;
line = token.startLine;
col = token.startCol;
}
if(tokenStream.match(Tokens.IDENT)){
token = tokenStream.token();
tokenValue = token.value;
//check for underscore hack - no error if not allowed because it's valid CSS syntax
if (tokenValue.charAt(0) == "_" && this.options.underscoreHack){
hack = "_";
tokenValue = tokenValue.substring(1);
}
value = new PropertyName(tokenValue, hack, (line||token.startLine), (col||token.startCol));
this._readWhitespace();
}
<|fim▁hole|> return value;
},
//Augmented with CSS3 Selectors
_ruleset: function(){
/*
* ruleset
* : selectors_group
* '{' S* declaration? [ ';' S* declaration? ]* '}' S*
* ;
*/
var tokenStream = this._tokenStream,
tt,
selectors;
/*
* Error Recovery: If even a single selector fails to parse,
* then the entire ruleset should be thrown away.
*/
try {
selectors = this._selectors_group();
} catch (ex){
if (ex instanceof SyntaxError && !this.options.strict){
//fire error event
this.fire({
type: "error",
error: ex,
message: ex.message,
line: ex.line,
col: ex.col
});
//skip over everything until closing brace
tt = tokenStream.advance([Tokens.RBRACE]);
if (tt == Tokens.RBRACE){
//if there's a right brace, the rule is finished so don't do anything
} else {
//otherwise, rethrow the error because it wasn't handled properly
throw ex;
}
} else {
//not a syntax error, rethrow it
throw ex;
}
//trigger parser to continue
return true;
}
//if it got here, all selectors parsed
if (selectors){
this.fire({
type: "startrule",
selectors: selectors,
line: selectors[0].line,
col: selectors[0].col
});
this._readDeclarations(true);
this.fire({
type: "endrule",
selectors: selectors,
line: selectors[0].line,
col: selectors[0].col
});
}
return selectors;
},
//CSS3 Selectors
_selectors_group: function(){
/*
* selectors_group
* : selector [ COMMA S* selector ]*
* ;
*/
var tokenStream = this._tokenStream,
selectors = [],
selector;
selector = this._selector();
if (selector !== null){
selectors.push(selector);
while(tokenStream.match(Tokens.COMMA)){
this._readWhitespace();
selector = this._selector();
if (selector !== null){
selectors.push(selector);
} else {
this._unexpectedToken(tokenStream.LT(1));
}
}
}
return selectors.length ? selectors : null;
},
//CSS3 Selectors
_selector: function(){
/*
* selector
* : simple_selector_sequence [ combinator simple_selector_sequence ]*
* ;
*/
var tokenStream = this._tokenStream,
selector = [],
nextSelector = null,
combinator = null,
ws = null;
//if there's no simple selector, then there's no selector
nextSelector = this._simple_selector_sequence();
if (nextSelector === null){
return null;
}
selector.push(nextSelector);
do {
//look for a combinator
combinator = this._combinator();
if (combinator !== null){
selector.push(combinator);
nextSelector = this._simple_selector_sequence();
//there must be a next selector
if (nextSelector === null){
this._unexpectedToken(this.LT(1));
} else {
//nextSelector is an instance of SelectorPart
selector.push(nextSelector);
}
} else {
//if there's not whitespace, we're done
if (this._readWhitespace()){
//add whitespace separator
ws = new Combinator(tokenStream.token().value, tokenStream.token().startLine, tokenStream.token().startCol);
//combinator is not required
combinator = this._combinator();
//selector is required if there's a combinator
nextSelector = this._simple_selector_sequence();
if (nextSelector === null){
if (combinator !== null){
this._unexpectedToken(tokenStream.LT(1));
}
} else {
if (combinator !== null){
selector.push(combinator);
} else {
selector.push(ws);
}
selector.push(nextSelector);
}
} else {
break;
}
}
} while(true);
return new Selector(selector, selector[0].line, selector[0].col);
},
//CSS3 Selectors
_simple_selector_sequence: function(){
/*
* simple_selector_sequence
* : [ type_selector | universal ]
* [ HASH | class | attrib | pseudo | negation ]*
* | [ HASH | class | attrib | pseudo | negation ]+
* ;
*/
var tokenStream = this._tokenStream,
//parts of a simple selector
elementName = null,
modifiers = [],
//complete selector text
selectorText= "",
//the different parts after the element name to search for
components = [
//HASH
function(){
return tokenStream.match(Tokens.HASH) ?
new SelectorSubPart(tokenStream.token().value, "id", tokenStream.token().startLine, tokenStream.token().startCol) :
null;
},
this._class,
this._attrib,
this._pseudo,
this._negation
],
i = 0,
len = components.length,
component = null,
found = false,
line,
col;
//get starting line and column for the selector
line = tokenStream.LT(1).startLine;
col = tokenStream.LT(1).startCol;
elementName = this._type_selector();
if (!elementName){
elementName = this._universal();
}
if (elementName !== null){
selectorText += elementName;
}
while(true){
//whitespace means we're done
if (tokenStream.peek() === Tokens.S){
break;
}
//check for each component
while(i < len && component === null){
component = components[i++].call(this);
}
if (component === null){
//we don't have a selector
if (selectorText === ""){
return null;
} else {
break;
}
} else {
i = 0;
modifiers.push(component);
selectorText += component.toString();
component = null;
}
}
return selectorText !== "" ?
new SelectorPart(elementName, modifiers, selectorText, line, col) :
null;
},
//CSS3 Selectors
_type_selector: function(){
/*
* type_selector
* : [ namespace_prefix ]? element_name
* ;
*/
var tokenStream = this._tokenStream,
ns = this._namespace_prefix(),
elementName = this._element_name();
if (!elementName){
/*
* Need to back out the namespace that was read due to both
* type_selector and universal reading namespace_prefix
* first. Kind of hacky, but only way I can figure out
* right now how to not change the grammar.
*/
if (ns){
tokenStream.unget();
if (ns.length > 1){
tokenStream.unget();
}
}
return null;
} else {
if (ns){
elementName.text = ns + elementName.text;
elementName.col -= ns.length;
}
return elementName;
}
},
//CSS3 Selectors
_class: function(){
/*
* class
* : '.' IDENT
* ;
*/
var tokenStream = this._tokenStream,
token;
if (tokenStream.match(Tokens.DOT)){
tokenStream.mustMatch(Tokens.IDENT);
token = tokenStream.token();
return new SelectorSubPart("." + token.value, "class", token.startLine, token.startCol - 1);
} else {
return null;
}
},
//CSS3 Selectors
_element_name: function(){
/*
* element_name
* : IDENT
* ;
*/
var tokenStream = this._tokenStream,
token;
if (tokenStream.match(Tokens.IDENT)){
token = tokenStream.token();
return new SelectorSubPart(token.value, "elementName", token.startLine, token.startCol);
} else {
return null;
}
},
//CSS3 Selectors
_namespace_prefix: function(){
/*
* namespace_prefix
* : [ IDENT | '*' ]? '|'
* ;
*/
var tokenStream = this._tokenStream,
value = "";
//verify that this is a namespace prefix
if (tokenStream.LA(1) === Tokens.PIPE || tokenStream.LA(2) === Tokens.PIPE){
if(tokenStream.match([Tokens.IDENT, Tokens.STAR])){
value += tokenStream.token().value;
}
tokenStream.mustMatch(Tokens.PIPE);
value += "|";
}
return value.length ? value : null;
},
//CSS3 Selectors
_universal: function(){
/*
* universal
* : [ namespace_prefix ]? '*'
* ;
*/
var tokenStream = this._tokenStream,
value = "",
ns;
ns = this._namespace_prefix();
if(ns){
value += ns;
}
if(tokenStream.match(Tokens.STAR)){
value += "*";
}
return value.length ? value : null;
},
//CSS3 Selectors
_attrib: function(){
/*
* attrib
* : '[' S* [ namespace_prefix ]? IDENT S*
* [ [ PREFIXMATCH |
* SUFFIXMATCH |
* SUBSTRINGMATCH |
* '=' |
* INCLUDES |
* DASHMATCH ] S* [ IDENT | STRING ] S*
* ]? ']'
* ;
*/
var tokenStream = this._tokenStream,
value = null,
ns,
token;
if (tokenStream.match(Tokens.LBRACKET)){
token = tokenStream.token();
value = token.value;
value += this._readWhitespace();
ns = this._namespace_prefix();
if (ns){
value += ns;
}
tokenStream.mustMatch(Tokens.IDENT);
value += tokenStream.token().value;
value += this._readWhitespace();
if(tokenStream.match([Tokens.PREFIXMATCH, Tokens.SUFFIXMATCH, Tokens.SUBSTRINGMATCH,
Tokens.EQUALS, Tokens.INCLUDES, Tokens.DASHMATCH])){
value += tokenStream.token().value;
value += this._readWhitespace();
tokenStream.mustMatch([Tokens.IDENT, Tokens.STRING]);
value += tokenStream.token().value;
value += this._readWhitespace();
}
tokenStream.mustMatch(Tokens.RBRACKET);
return new SelectorSubPart(value + "]", "attribute", token.startLine, token.startCol);
} else {
return null;
}
},
//CSS3 Selectors
_pseudo: function(){
/*
* pseudo
* : ':' ':'? [ IDENT | functional_pseudo ]
* ;
*/
var tokenStream = this._tokenStream,
pseudo = null,
colons = ":",
line,
col;
if (tokenStream.match(Tokens.COLON)){
if (tokenStream.match(Tokens.COLON)){
colons += ":";
}
if (tokenStream.match(Tokens.IDENT)){
pseudo = tokenStream.token().value;
line = tokenStream.token().startLine;
col = tokenStream.token().startCol - colons.length;
} else if (tokenStream.peek() == Tokens.FUNCTION){
line = tokenStream.LT(1).startLine;
col = tokenStream.LT(1).startCol - colons.length;
pseudo = this._functional_pseudo();
}
if (pseudo){
pseudo = new SelectorSubPart(colons + pseudo, "pseudo", line, col);
}
}
return pseudo;
},
//CSS3 Selectors
_functional_pseudo: function(){
/*
* functional_pseudo
* : FUNCTION S* expression ')'
* ;
*/
var tokenStream = this._tokenStream,
value = null;
if(tokenStream.match(Tokens.FUNCTION)){
value = tokenStream.token().value;
value += this._readWhitespace();
value += this._expression();
tokenStream.mustMatch(Tokens.RPAREN);
value += ")";
}
return value;
},
//CSS3 Selectors
_expression: function(){
/*
* expression
* : [ [ PLUS | '-' | DIMENSION | NUMBER | STRING | IDENT ] S* ]+
* ;
*/
var tokenStream = this._tokenStream,
value = "";
while(tokenStream.match([Tokens.PLUS, Tokens.MINUS, Tokens.DIMENSION,
Tokens.NUMBER, Tokens.STRING, Tokens.IDENT, Tokens.LENGTH,
Tokens.FREQ, Tokens.ANGLE, Tokens.TIME,
Tokens.RESOLUTION])){
value += tokenStream.token().value;
value += this._readWhitespace();
}
return value.length ? value : null;
},
//CSS3 Selectors
_negation: function(){
/*
* negation
* : NOT S* negation_arg S* ')'
* ;
*/
var tokenStream = this._tokenStream,
line,
col,
value = "",
arg,
subpart = null;
if (tokenStream.match(Tokens.NOT)){
value = tokenStream.token().value;
line = tokenStream.token().startLine;
col = tokenStream.token().startCol;
value += this._readWhitespace();
arg = this._negation_arg();
value += arg;
value += this._readWhitespace();
tokenStream.match(Tokens.RPAREN);
value += tokenStream.token().value;
subpart = new SelectorSubPart(value, "not", line, col);
subpart.args.push(arg);
}
return subpart;
},
//CSS3 Selectors
_negation_arg: function(){
/*
* negation_arg
* : type_selector | universal | HASH | class | attrib | pseudo
* ;
*/
var tokenStream = this._tokenStream,
args = [
this._type_selector,
this._universal,
function(){
return tokenStream.match(Tokens.HASH) ?
new SelectorSubPart(tokenStream.token().value, "id", tokenStream.token().startLine, tokenStream.token().startCol) :
null;
},
this._class,
this._attrib,
this._pseudo
],
arg = null,
i = 0,
len = args.length,
elementName,
line,
col,
part;
line = tokenStream.LT(1).startLine;
col = tokenStream.LT(1).startCol;
while(i < len && arg === null){
arg = args[i].call(this);
i++;
}
//must be a negation arg
if (arg === null){
this._unexpectedToken(tokenStream.LT(1));
}
//it's an element name
if (arg.type == "elementName"){
part = new SelectorPart(arg, [], arg.toString(), line, col);
} else {
part = new SelectorPart(null, [arg], arg.toString(), line, col);
}
return part;
},
_declaration: function(){
/*
* declaration
* : property ':' S* expr prio?
* | /( empty )/
* ;
*/
var tokenStream = this._tokenStream,
property = null,
expr = null,
prio = null,
error = null,
invalid = null,
propertyName= "";
property = this._property();
if (property !== null){
tokenStream.mustMatch(Tokens.COLON);
this._readWhitespace();
expr = this._expr();
//if there's no parts for the value, it's an error
if (!expr || expr.length === 0){
this._unexpectedToken(tokenStream.LT(1));
}
prio = this._prio();
/*
* If hacks should be allowed, then only check the root
* property. If hacks should not be allowed, treat
* _property or *property as invalid properties.
*/
propertyName = property.toString();
if (this.options.starHack && property.hack == "*" ||
this.options.underscoreHack && property.hack == "_") {
propertyName = property.text;
}
try {
this._validateProperty(propertyName, expr);
} catch (ex) {
invalid = ex;
}
this.fire({
type: "property",
property: property,
value: expr,
important: prio,
line: property.line,
col: property.col,
invalid: invalid
});
return true;
} else {
return false;
}
},
_prio: function(){
/*
* prio
* : IMPORTANT_SYM S*
* ;
*/
var tokenStream = this._tokenStream,
result = tokenStream.match(Tokens.IMPORTANT_SYM);
this._readWhitespace();
return result;
},
_expr: function(){
/*
* expr
* : term [ operator term ]*
* ;
*/
var tokenStream = this._tokenStream,
values = [],
//valueParts = [],
value = null,
operator = null;
value = this._term();
if (value !== null){
values.push(value);
do {
operator = this._operator();
//if there's an operator, keep building up the value parts
if (operator){
values.push(operator);
} /*else {
//if there's not an operator, you have a full value
values.push(new PropertyValue(valueParts, valueParts[0].line, valueParts[0].col));
valueParts = [];
}*/
value = this._term();
if (value === null){
break;
} else {
values.push(value);
}
} while(true);
}
//cleanup
/*if (valueParts.length){
values.push(new PropertyValue(valueParts, valueParts[0].line, valueParts[0].col));
}*/
return values.length > 0 ? new PropertyValue(values, values[0].line, values[0].col) : null;
},
_term: function(){
/*
* term
* : unary_operator?
* [ NUMBER S* | PERCENTAGE S* | LENGTH S* | ANGLE S* |
* TIME S* | FREQ S* | function | ie_function ]
* | STRING S* | IDENT S* | URI S* | UNICODERANGE S* | hexcolor
* ;
*/
var tokenStream = this._tokenStream,
unary = null,
value = null,
token,
line,
col;
//returns the operator or null
unary = this._unary_operator();
if (unary !== null){
line = tokenStream.token().startLine;
col = tokenStream.token().startCol;
}
//exception for IE filters
if (tokenStream.peek() == Tokens.IE_FUNCTION && this.options.ieFilters){
value = this._ie_function();
if (unary === null){
line = tokenStream.token().startLine;
col = tokenStream.token().startCol;
}
//see if there's a simple match
} else if (tokenStream.match([Tokens.NUMBER, Tokens.PERCENTAGE, Tokens.LENGTH,
Tokens.ANGLE, Tokens.TIME,
Tokens.FREQ, Tokens.STRING, Tokens.IDENT, Tokens.URI, Tokens.UNICODE_RANGE])){
value = tokenStream.token().value;
if (unary === null){
line = tokenStream.token().startLine;
col = tokenStream.token().startCol;
}
this._readWhitespace();
} else {
//see if it's a color
token = this._hexcolor();
if (token === null){
//if there's no unary, get the start of the next token for line/col info
if (unary === null){
line = tokenStream.LT(1).startLine;
col = tokenStream.LT(1).startCol;
}
//has to be a function
if (value === null){
/*
* This checks for alpha(opacity=0) style of IE
* functions. IE_FUNCTION only presents progid: style.
*/
if (tokenStream.LA(3) == Tokens.EQUALS && this.options.ieFilters){
value = this._ie_function();
} else {
value = this._function();
}
}
/*if (value === null){
return null;
//throw new Error("Expected identifier at line " + tokenStream.token().startLine + ", character " + tokenStream.token().startCol + ".");
}*/
} else {
value = token.value;
if (unary === null){
line = token.startLine;
col = token.startCol;
}
}
}
return value !== null ?
new PropertyValuePart(unary !== null ? unary + value : value, line, col) :
null;
},
_function: function(){
/*
* function
* : FUNCTION S* expr ')' S*
* ;
*/
var tokenStream = this._tokenStream,
functionText = null,
expr = null,
lt;
if (tokenStream.match(Tokens.FUNCTION)){
functionText = tokenStream.token().value;
this._readWhitespace();
expr = this._expr();
functionText += expr;
//START: Horrible hack in case it's an IE filter
if (this.options.ieFilters && tokenStream.peek() == Tokens.EQUALS){
do {
if (this._readWhitespace()){
functionText += tokenStream.token().value;
}
//might be second time in the loop
if (tokenStream.LA(0) == Tokens.COMMA){
functionText += tokenStream.token().value;
}
tokenStream.match(Tokens.IDENT);
functionText += tokenStream.token().value;
tokenStream.match(Tokens.EQUALS);
functionText += tokenStream.token().value;
//functionText += this._term();
lt = tokenStream.peek();
while(lt != Tokens.COMMA && lt != Tokens.S && lt != Tokens.RPAREN){
tokenStream.get();
functionText += tokenStream.token().value;
lt = tokenStream.peek();
}
} while(tokenStream.match([Tokens.COMMA, Tokens.S]));
}
//END: Horrible Hack
tokenStream.match(Tokens.RPAREN);
functionText += ")";
this._readWhitespace();
}
return functionText;
},
_ie_function: function(){
/* (My own extension)
* ie_function
* : IE_FUNCTION S* IDENT '=' term [S* ','? IDENT '=' term]+ ')' S*
* ;
*/
var tokenStream = this._tokenStream,
functionText = null,
expr = null,
lt;
//IE function can begin like a regular function, too
if (tokenStream.match([Tokens.IE_FUNCTION, Tokens.FUNCTION])){
functionText = tokenStream.token().value;
do {
if (this._readWhitespace()){
functionText += tokenStream.token().value;
}
//might be second time in the loop
if (tokenStream.LA(0) == Tokens.COMMA){
functionText += tokenStream.token().value;
}
tokenStream.match(Tokens.IDENT);
functionText += tokenStream.token().value;
tokenStream.match(Tokens.EQUALS);
functionText += tokenStream.token().value;
//functionText += this._term();
lt = tokenStream.peek();
while(lt != Tokens.COMMA && lt != Tokens.S && lt != Tokens.RPAREN){
tokenStream.get();
functionText += tokenStream.token().value;
lt = tokenStream.peek();
}
} while(tokenStream.match([Tokens.COMMA, Tokens.S]));
tokenStream.match(Tokens.RPAREN);
functionText += ")";
this._readWhitespace();
}
return functionText;
},
_hexcolor: function(){
/*
* There is a constraint on the color that it must
* have either 3 or 6 hex-digits (i.e., [0-9a-fA-F])
* after the "#"; e.g., "#000" is OK, but "#abcd" is not.
*
* hexcolor
* : HASH S*
* ;
*/
var tokenStream = this._tokenStream,
token = null,
color;
if(tokenStream.match(Tokens.HASH)){
//need to do some validation here
token = tokenStream.token();
color = token.value;
if (!/#[a-f0-9]{3,6}/i.test(color)){
throw new SyntaxError("Expected a hex color but found '" + color + "' at line " + token.startLine + ", col " + token.startCol + ".", token.startLine, token.startCol);
}
this._readWhitespace();
}
return token;
},
//-----------------------------------------------------------------
// Animations methods
//-----------------------------------------------------------------
_keyframes: function(){
/*
* keyframes:
* : KEYFRAMES_SYM S* keyframe_name S* '{' S* keyframe_rule* '}' {
* ;
*/
var tokenStream = this._tokenStream,
token,
tt,
name,
prefix = "";
tokenStream.mustMatch(Tokens.KEYFRAMES_SYM);
token = tokenStream.token();
if (/^@\-([^\-]+)\-/.test(token.value)) {
prefix = RegExp.$1;
}
this._readWhitespace();
name = this._keyframe_name();
this._readWhitespace();
tokenStream.mustMatch(Tokens.LBRACE);
this.fire({
type: "startkeyframes",
name: name,
prefix: prefix,
line: token.startLine,
col: token.startCol
});
this._readWhitespace();
tt = tokenStream.peek();
//check for key
while(tt == Tokens.IDENT || tt == Tokens.PERCENTAGE) {
this._keyframe_rule();
this._readWhitespace();
tt = tokenStream.peek();
}
this.fire({
type: "endkeyframes",
name: name,
prefix: prefix,
line: token.startLine,
col: token.startCol
});
this._readWhitespace();
tokenStream.mustMatch(Tokens.RBRACE);
},
_keyframe_name: function(){
/*
* keyframe_name:
* : IDENT
* | STRING
* ;
*/
var tokenStream = this._tokenStream,
token;
tokenStream.mustMatch([Tokens.IDENT, Tokens.STRING]);
return SyntaxUnit.fromToken(tokenStream.token());
},
_keyframe_rule: function(){
/*
* keyframe_rule:
* : key_list S*
* '{' S* declaration [ ';' S* declaration ]* '}' S*
* ;
*/
var tokenStream = this._tokenStream,
token,
keyList = this._key_list();
this.fire({
type: "startkeyframerule",
keys: keyList,
line: keyList[0].line,
col: keyList[0].col
});
this._readDeclarations(true);
this.fire({
type: "endkeyframerule",
keys: keyList,
line: keyList[0].line,
col: keyList[0].col
});
},
_key_list: function(){
/*
* key_list:
* : key [ S* ',' S* key]*
* ;
*/
var tokenStream = this._tokenStream,
token,
key,
keyList = [];
//must be least one key
keyList.push(this._key());
this._readWhitespace();
while(tokenStream.match(Tokens.COMMA)){
this._readWhitespace();
keyList.push(this._key());
this._readWhitespace();
}
return keyList;
},
_key: function(){
/*
* There is a restriction that IDENT can be only "from" or "to".
*
* key
* : PERCENTAGE
* | IDENT
* ;
*/
var tokenStream = this._tokenStream,
token;
if (tokenStream.match(Tokens.PERCENTAGE)){
return SyntaxUnit.fromToken(tokenStream.token());
} else if (tokenStream.match(Tokens.IDENT)){
token = tokenStream.token();
if (/from|to/i.test(token.value)){
return SyntaxUnit.fromToken(token);
}
tokenStream.unget();
}
//if it gets here, there wasn't a valid token, so time to explode
this._unexpectedToken(tokenStream.LT(1));
},
//-----------------------------------------------------------------
// Helper methods
//-----------------------------------------------------------------
/**
* Not part of CSS grammar, but useful for skipping over
* combination of white space and HTML-style comments.
* @return {void}
* @method _skipCruft
* @private
*/
_skipCruft: function(){
while(this._tokenStream.match([Tokens.S, Tokens.CDO, Tokens.CDC])){
//noop
}
},
/**
* Not part of CSS grammar, but this pattern occurs frequently
* in the official CSS grammar. Split out here to eliminate
* duplicate code.
* @param {Boolean} checkStart Indicates if the rule should check
* for the left brace at the beginning.
* @param {Boolean} readMargins Indicates if the rule should check
* for margin patterns.
* @return {void}
* @method _readDeclarations
* @private
*/
_readDeclarations: function(checkStart, readMargins){
/*
* Reads the pattern
* S* '{' S* declaration [ ';' S* declaration ]* '}' S*
* or
* S* '{' S* [ declaration | margin ]? [ ';' S* [ declaration | margin ]? ]* '}' S*
* Note that this is how it is described in CSS3 Paged Media, but is actually incorrect.
* A semicolon is only necessary following a delcaration is there's another declaration
* or margin afterwards.
*/
var tokenStream = this._tokenStream,
tt;
this._readWhitespace();
if (checkStart){
tokenStream.mustMatch(Tokens.LBRACE);
}
this._readWhitespace();
try {
while(true){
if (tokenStream.match(Tokens.SEMICOLON) || (readMargins && this._margin())){
//noop
} else if (this._declaration()){
if (!tokenStream.match(Tokens.SEMICOLON)){
break;
}
} else {
break;
}
//if ((!this._margin() && !this._declaration()) || !tokenStream.match(Tokens.SEMICOLON)){
// break;
//}
this._readWhitespace();
}
tokenStream.mustMatch(Tokens.RBRACE);
this._readWhitespace();
} catch (ex) {
if (ex instanceof SyntaxError && !this.options.strict){
//fire error event
this.fire({
type: "error",
error: ex,
message: ex.message,
line: ex.line,
col: ex.col
});
//see if there's another declaration
tt = tokenStream.advance([Tokens.SEMICOLON, Tokens.RBRACE]);
if (tt == Tokens.SEMICOLON){
//if there's a semicolon, then there might be another declaration
this._readDeclarations(false, readMargins);
} else if (tt != Tokens.RBRACE){
//if there's a right brace, the rule is finished so don't do anything
//otherwise, rethrow the error because it wasn't handled properly
throw ex;
}
} else {
//not a syntax error, rethrow it
throw ex;
}
}
},
/**
* In some cases, you can end up with two white space tokens in a
* row. Instead of making a change in every function that looks for
* white space, this function is used to match as much white space
* as necessary.
* @method _readWhitespace
* @return {String} The white space if found, empty string if not.
* @private
*/
_readWhitespace: function(){
var tokenStream = this._tokenStream,
ws = "";
while(tokenStream.match(Tokens.S)){
ws += tokenStream.token().value;
}
return ws;
},
/**
* Throws an error when an unexpected token is found.
* @param {Object} token The token that was found.
* @method _unexpectedToken
* @return {void}
* @private
*/
_unexpectedToken: function(token){
throw new SyntaxError("Unexpected token '" + token.value + "' at line " + token.startLine + ", col " + token.startCol + ".", token.startLine, token.startCol);
},
/**
* Helper method used for parsing subparts of a style sheet.
* @return {void}
* @method _verifyEnd
* @private
*/
_verifyEnd: function(){
if (this._tokenStream.LA(1) != Tokens.EOF){
this._unexpectedToken(this._tokenStream.LT(1));
}
},
//-----------------------------------------------------------------
// Validation methods
//-----------------------------------------------------------------
_validateProperty: function(property, value){
Validation.validate(property, value);
},
//-----------------------------------------------------------------
// Parsing methods
//-----------------------------------------------------------------
parse: function(input){
this._tokenStream = new TokenStream(input, Tokens);
this._stylesheet();
},
parseStyleSheet: function(input){
//just passthrough
return this.parse(input);
},
parseMediaQuery: function(input){
this._tokenStream = new TokenStream(input, Tokens);
var result = this._media_query();
//if there's anything more, then it's an invalid selector
this._verifyEnd();
//otherwise return result
return result;
},
/**
* Parses a property value (everything after the semicolon).
* @return {parserlib.css.PropertyValue} The property value.
* @throws parserlib.util.SyntaxError If an unexpected token is found.
* @method parserPropertyValue
*/
parsePropertyValue: function(input){
this._tokenStream = new TokenStream(input, Tokens);
this._readWhitespace();
var result = this._expr();
//okay to have a trailing white space
this._readWhitespace();
//if there's anything more, then it's an invalid selector
this._verifyEnd();
//otherwise return result
return result;
},
/**
* Parses a complete CSS rule, including selectors and
* properties.
* @param {String} input The text to parser.
* @return {Boolean} True if the parse completed successfully, false if not.
* @method parseRule
*/
parseRule: function(input){
this._tokenStream = new TokenStream(input, Tokens);
//skip any leading white space
this._readWhitespace();
var result = this._ruleset();
//skip any trailing white space
this._readWhitespace();
//if there's anything more, then it's an invalid selector
this._verifyEnd();
//otherwise return result
return result;
},
/**
* Parses a single CSS selector (no comma)
* @param {String} input The text to parse as a CSS selector.
* @return {Selector} An object representing the selector.
* @throws parserlib.util.SyntaxError If an unexpected token is found.
* @method parseSelector
*/
parseSelector: function(input){
this._tokenStream = new TokenStream(input, Tokens);
//skip any leading white space
this._readWhitespace();
var result = this._selector();
//skip any trailing white space
this._readWhitespace();
//if there's anything more, then it's an invalid selector
this._verifyEnd();
//otherwise return result
return result;
},
/**
* Parses an HTML style attribute: a set of CSS declarations
* separated by semicolons.
* @param {String} input The text to parse as a style attribute
* @return {void}
* @method parseStyleAttribute
*/
parseStyleAttribute: function(input){
input += "}"; // for error recovery in _readDeclarations()
this._tokenStream = new TokenStream(input, Tokens);
this._readDeclarations();
}
};
//copy over onto prototype
for (prop in additions){
if (additions.hasOwnProperty(prop)){
proto[prop] = additions[prop];
}
}
return proto;
}();
/*
nth
: S* [ ['-'|'+']? INTEGER? {N} [ S* ['-'|'+'] S* INTEGER ]? |
['-'|'+']? INTEGER | {O}{D}{D} | {E}{V}{E}{N} ] S*
;
*/
/*global Validation, ValidationTypes, ValidationError*/
var Properties = {
//A
"alignment-adjust" : "auto | baseline | before-edge | text-before-edge | middle | central | after-edge | text-after-edge | ideographic | alphabetic | hanging | mathematical | <percentage> | <length>",
"alignment-baseline" : "baseline | use-script | before-edge | text-before-edge | after-edge | text-after-edge | central | middle | ideographic | alphabetic | hanging | mathematical",
"animation" : 1,
"animation-delay" : { multi: "<time>", comma: true },
"animation-direction" : { multi: "normal | alternate", comma: true },
"animation-duration" : { multi: "<time>", comma: true },
"animation-iteration-count" : { multi: "<number> | infinite", comma: true },
"animation-name" : { multi: "none | <ident>", comma: true },
"animation-play-state" : { multi: "running | paused", comma: true },
"animation-timing-function" : 1,
//vendor prefixed
"-moz-animation-delay" : { multi: "<time>", comma: true },
"-moz-animation-direction" : { multi: "normal | alternate", comma: true },
"-moz-animation-duration" : { multi: "<time>", comma: true },
"-moz-animation-iteration-count" : { multi: "<number> | infinite", comma: true },
"-moz-animation-name" : { multi: "none | <ident>", comma: true },
"-moz-animation-play-state" : { multi: "running | paused", comma: true },
"-ms-animation-delay" : { multi: "<time>", comma: true },
"-ms-animation-direction" : { multi: "normal | alternate", comma: true },
"-ms-animation-duration" : { multi: "<time>", comma: true },
"-ms-animation-iteration-count" : { multi: "<number> | infinite", comma: true },
"-ms-animation-name" : { multi: "none | <ident>", comma: true },
"-ms-animation-play-state" : { multi: "running | paused", comma: true },
"-webkit-animation-delay" : { multi: "<time>", comma: true },
"-webkit-animation-direction" : { multi: "normal | alternate", comma: true },
"-webkit-animation-duration" : { multi: "<time>", comma: true },
"-webkit-animation-iteration-count" : { multi: "<number> | infinite", comma: true },
"-webkit-animation-name" : { multi: "none | <ident>", comma: true },
"-webkit-animation-play-state" : { multi: "running | paused", comma: true },
"-o-animation-delay" : { multi: "<time>", comma: true },
"-o-animation-direction" : { multi: "normal | alternate", comma: true },
"-o-animation-duration" : { multi: "<time>", comma: true },
"-o-animation-iteration-count" : { multi: "<number> | infinite", comma: true },
"-o-animation-name" : { multi: "none | <ident>", comma: true },
"-o-animation-play-state" : { multi: "running | paused", comma: true },
"appearance" : "icon | window | desktop | workspace | document | tooltip | dialog | button | push-button | hyperlink | radio-button | checkbox | menu-item | tab | menu | menubar | pull-down-menu | pop-up-menu | list-menu | radio-group | checkbox-group | outline-tree | range | field | combo-box | signature | password | normal | inherit",
"azimuth" : function (expression) {
var simple = "<angle> | leftwards | rightwards | inherit",
direction = "left-side | far-left | left | center-left | center | center-right | right | far-right | right-side",
behind = false,
valid = false,
part;
if (!ValidationTypes.isAny(expression, simple)) {
if (ValidationTypes.isAny(expression, "behind")) {
behind = true;
valid = true;
}
if (ValidationTypes.isAny(expression, direction)) {
valid = true;
if (!behind) {
ValidationTypes.isAny(expression, "behind");
}
}
}
if (expression.hasNext()) {
part = expression.next();
if (valid) {
throw new ValidationError("Expected end of value but found '" + part + "'.", part.line, part.col);
} else {
throw new ValidationError("Expected (<'azimuth'>) but found '" + part + "'.", part.line, part.col);
}
}
},
//B
"backface-visibility" : "visible | hidden",
"background" : 1,
"background-attachment" : { multi: "<attachment>", comma: true },
"background-clip" : { multi: "<box>", comma: true },
"background-color" : "<color> | inherit",
"background-image" : { multi: "<bg-image>", comma: true },
"background-origin" : { multi: "<box>", comma: true },
"background-position" : { multi: "<bg-position>", comma: true },
"background-repeat" : { multi: "<repeat-style>" },
"background-size" : { multi: "<bg-size>", comma: true },
"baseline-shift" : "baseline | sub | super | <percentage> | <length>",
"behavior" : 1,
"binding" : 1,
"bleed" : "<length>",
"bookmark-label" : "<content> | <attr> | <string>",
"bookmark-level" : "none | <integer>",
"bookmark-state" : "open | closed",
"bookmark-target" : "none | <uri> | <attr>",
"border" : "<border-width> || <border-style> || <color>",
"border-bottom" : "<border-width> || <border-style> || <color>",
"border-bottom-color" : "<color>",
"border-bottom-left-radius" : "<x-one-radius>",
"border-bottom-right-radius" : "<x-one-radius>",
"border-bottom-style" : "<border-style>",
"border-bottom-width" : "<border-width>",
"border-collapse" : "collapse | separate | inherit",
"border-color" : { multi: "<color> | inherit", max: 4 },
"border-image" : 1,
"border-image-outset" : { multi: "<length> | <number>", max: 4 },
"border-image-repeat" : { multi: "stretch | repeat | round", max: 2 },
"border-image-slice" : function(expression) {
var valid = false,
numeric = "<number> | <percentage>",
fill = false,
count = 0,
max = 4,
part;
if (ValidationTypes.isAny(expression, "fill")) {
fill = true;
valid = true;
}
while (expression.hasNext() && count < max) {
valid = ValidationTypes.isAny(expression, numeric);
if (!valid) {
break;
}
count++;
}
if (!fill) {
ValidationTypes.isAny(expression, "fill");
} else {
valid = true;
}
if (expression.hasNext()) {
part = expression.next();
if (valid) {
throw new ValidationError("Expected end of value but found '" + part + "'.", part.line, part.col);
} else {
throw new ValidationError("Expected ([<number> | <percentage>]{1,4} && fill?) but found '" + part + "'.", part.line, part.col);
}
}
},
"border-image-source" : "<image> | none",
"border-image-width" : { multi: "<length> | <percentage> | <number> | auto", max: 4 },
"border-left" : "<border-width> || <border-style> || <color>",
"border-left-color" : "<color> | inherit",
"border-left-style" : "<border-style>",
"border-left-width" : "<border-width>",
"border-radius" : function(expression) {
var valid = false,
numeric = "<length> | <percentage>",
slash = false,
fill = false,
count = 0,
max = 8,
part;
while (expression.hasNext() && count < max) {
valid = ValidationTypes.isAny(expression, numeric);
if (!valid) {
if (expression.peek() == "/" && count > 1 && !slash) {
slash = true;
max = count + 5;
expression.next();
} else {
break;
}
}
count++;
}
if (expression.hasNext()) {
part = expression.next();
if (valid) {
throw new ValidationError("Expected end of value but found '" + part + "'.", part.line, part.col);
} else {
throw new ValidationError("Expected (<'border-radius'>) but found '" + part + "'.", part.line, part.col);
}
}
},
"border-right" : "<border-width> || <border-style> || <color>",
"border-right-color" : "<color> | inherit",
"border-right-style" : "<border-style>",
"border-right-width" : "<border-width>",
"border-spacing" : { multi: "<length> | inherit", max: 2 },
"border-style" : { multi: "<border-style>", max: 4 },
"border-top" : "<border-width> || <border-style> || <color>",
"border-top-color" : "<color> | inherit",
"border-top-left-radius" : "<x-one-radius>",
"border-top-right-radius" : "<x-one-radius>",
"border-top-style" : "<border-style>",
"border-top-width" : "<border-width>",
"border-width" : { multi: "<border-width>", max: 4 },
"bottom" : "<margin-width> | inherit",
"box-align" : "start | end | center | baseline | stretch", //http://www.w3.org/TR/2009/WD-css3-flexbox-20090723/
"box-decoration-break" : "slice |clone",
"box-direction" : "normal | reverse | inherit",
"box-flex" : "<number>",
"box-flex-group" : "<integer>",
"box-lines" : "single | multiple",
"box-ordinal-group" : "<integer>",
"box-orient" : "horizontal | vertical | inline-axis | block-axis | inherit",
"box-pack" : "start | end | center | justify",
"box-shadow" : function (expression) {
var result = false,
part;
if (!ValidationTypes.isAny(expression, "none")) {
Validation.multiProperty("<shadow>", expression, true, Infinity);
} else {
if (expression.hasNext()) {
part = expression.next();
throw new ValidationError("Expected end of value but found '" + part + "'.", part.line, part.col);
}
}
},
"box-sizing" : "content-box | border-box | inherit",
"break-after" : "auto | always | avoid | left | right | page | column | avoid-page | avoid-column",
"break-before" : "auto | always | avoid | left | right | page | column | avoid-page | avoid-column",
"break-inside" : "auto | avoid | avoid-page | avoid-column",
//C
"caption-side" : "top | bottom | inherit",
"clear" : "none | right | left | both | inherit",
"clip" : 1,
"color" : "<color> | inherit",
"color-profile" : 1,
"column-count" : "<integer> | auto", //http://www.w3.org/TR/css3-multicol/
"column-fill" : "auto | balance",
"column-gap" : "<length> | normal",
"column-rule" : "<border-width> || <border-style> || <color>",
"column-rule-color" : "<color>",
"column-rule-style" : "<border-style>",
"column-rule-width" : "<border-width>",
"column-span" : "none | all",
"column-width" : "<length> | auto",
"columns" : 1,
"content" : 1,
"counter-increment" : 1,
"counter-reset" : 1,
"crop" : "<shape> | auto",
"cue" : "cue-after | cue-before | inherit",
"cue-after" : 1,
"cue-before" : 1,
"cursor" : 1,
//D
"direction" : "ltr | rtl | inherit",
"display" : "inline | block | list-item | inline-block | table | inline-table | table-row-group | table-header-group | table-footer-group | table-row | table-column-group | table-column | table-cell | table-caption | box | inline-box | grid | inline-grid | none | inherit | -webkit-box",
"dominant-baseline" : 1,
"drop-initial-after-adjust" : "central | middle | after-edge | text-after-edge | ideographic | alphabetic | mathematical | <percentage> | <length>",
"drop-initial-after-align" : "baseline | use-script | before-edge | text-before-edge | after-edge | text-after-edge | central | middle | ideographic | alphabetic | hanging | mathematical",
"drop-initial-before-adjust" : "before-edge | text-before-edge | central | middle | hanging | mathematical | <percentage> | <length>",
"drop-initial-before-align" : "caps-height | baseline | use-script | before-edge | text-before-edge | after-edge | text-after-edge | central | middle | ideographic | alphabetic | hanging | mathematical",
"drop-initial-size" : "auto | line | <length> | <percentage>",
"drop-initial-value" : "initial | <integer>",
//E
"elevation" : "<angle> | below | level | above | higher | lower | inherit",
"empty-cells" : "show | hide | inherit",
//F
"filter" : 1,
"fit" : "fill | hidden | meet | slice",
"fit-position" : 1,
"float" : "left | right | none | inherit",
"float-offset" : 1,
"font" : 1,
"font-family" : 1,
"font-size" : "<absolute-size> | <relative-size> | <length> | <percentage> | inherit",
"font-size-adjust" : "<number> | none | inherit",
"font-stretch" : "normal | ultra-condensed | extra-condensed | condensed | semi-condensed | semi-expanded | expanded | extra-expanded | ultra-expanded | inherit",
"font-style" : "normal | italic | oblique | inherit",
"font-variant" : "normal | small-caps | inherit",
"font-weight" : "normal | bold | bolder | lighter | 100 | 200 | 300 | 400 | 500 | 600 | 700 | 800 | 900 | inherit",
//G
"grid-cell-stacking" : "columns | rows | layer",
"grid-column" : 1,
"grid-columns" : 1,
"grid-column-align" : "start | end | center | stretch",
"grid-column-sizing" : 1,
"grid-column-span" : "<integer>",
"grid-flow" : "none | rows | columns",
"grid-layer" : "<integer>",
"grid-row" : 1,
"grid-rows" : 1,
"grid-row-align" : "start | end | center | stretch",
"grid-row-span" : "<integer>",
"grid-row-sizing" : 1,
//H
"hanging-punctuation" : 1,
"height" : "<margin-width> | inherit",
"hyphenate-after" : "<integer> | auto",
"hyphenate-before" : "<integer> | auto",
"hyphenate-character" : "<string> | auto",
"hyphenate-lines" : "no-limit | <integer>",
"hyphenate-resource" : 1,
"hyphens" : "none | manual | auto",
//I
"icon" : 1,
"image-orientation" : "angle | auto",
"image-rendering" : 1,
"image-resolution" : 1,
"inline-box-align" : "initial | last | <integer>",
//L
"left" : "<margin-width> | inherit",
"letter-spacing" : "<length> | normal | inherit",
"line-height" : "<number> | <length> | <percentage> | normal | inherit",
"line-break" : "auto | loose | normal | strict",
"line-stacking" : 1,
"line-stacking-ruby" : "exclude-ruby | include-ruby",
"line-stacking-shift" : "consider-shifts | disregard-shifts",
"line-stacking-strategy" : "inline-line-height | block-line-height | max-height | grid-height",
"list-style" : 1,
"list-style-image" : "<uri> | none | inherit",
"list-style-position" : "inside | outside | inherit",
"list-style-type" : "disc | circle | square | decimal | decimal-leading-zero | lower-roman | upper-roman | lower-greek | lower-latin | upper-latin | armenian | georgian | lower-alpha | upper-alpha | none | inherit",
//M
"margin" : { multi: "<margin-width> | inherit", max: 4 },
"margin-bottom" : "<margin-width> | inherit",
"margin-left" : "<margin-width> | inherit",
"margin-right" : "<margin-width> | inherit",
"margin-top" : "<margin-width> | inherit",
"mark" : 1,
"mark-after" : 1,
"mark-before" : 1,
"marks" : 1,
"marquee-direction" : 1,
"marquee-play-count" : 1,
"marquee-speed" : 1,
"marquee-style" : 1,
"max-height" : "<length> | <percentage> | none | inherit",
"max-width" : "<length> | <percentage> | none | inherit",
"min-height" : "<length> | <percentage> | inherit",
"min-width" : "<length> | <percentage> | inherit",
"move-to" : 1,
//N
"nav-down" : 1,
"nav-index" : 1,
"nav-left" : 1,
"nav-right" : 1,
"nav-up" : 1,
//O
"opacity" : "<number> | inherit",
"orphans" : "<integer> | inherit",
"outline" : 1,
"outline-color" : "<color> | invert | inherit",
"outline-offset" : 1,
"outline-style" : "<border-style> | inherit",
"outline-width" : "<border-width> | inherit",
"overflow" : "visible | hidden | scroll | auto | inherit",
"overflow-style" : 1,
"overflow-x" : 1,
"overflow-y" : 1,
//P
"padding" : { multi: "<padding-width> | inherit", max: 4 },
"padding-bottom" : "<padding-width> | inherit",
"padding-left" : "<padding-width> | inherit",
"padding-right" : "<padding-width> | inherit",
"padding-top" : "<padding-width> | inherit",
"page" : 1,
"page-break-after" : "auto | always | avoid | left | right | inherit",
"page-break-before" : "auto | always | avoid | left | right | inherit",
"page-break-inside" : "auto | avoid | inherit",
"page-policy" : 1,
"pause" : 1,
"pause-after" : 1,
"pause-before" : 1,
"perspective" : 1,
"perspective-origin" : 1,
"phonemes" : 1,
"pitch" : 1,
"pitch-range" : 1,
"play-during" : 1,
"pointer-events" : "auto | none | visiblePainted | visibleFill | visibleStroke | visible | painted | fill | stroke | all | inherit",
"position" : "static | relative | absolute | fixed | inherit",
"presentation-level" : 1,
"punctuation-trim" : 1,
//Q
"quotes" : 1,
//R
"rendering-intent" : 1,
"resize" : 1,
"rest" : 1,
"rest-after" : 1,
"rest-before" : 1,
"richness" : 1,
"right" : "<margin-width> | inherit",
"rotation" : 1,
"rotation-point" : 1,
"ruby-align" : 1,
"ruby-overhang" : 1,
"ruby-position" : 1,
"ruby-span" : 1,
//S
"size" : 1,
"speak" : "normal | none | spell-out | inherit",
"speak-header" : "once | always | inherit",
"speak-numeral" : "digits | continuous | inherit",
"speak-punctuation" : "code | none | inherit",
"speech-rate" : 1,
"src" : 1,
"stress" : 1,
"string-set" : 1,
"table-layout" : "auto | fixed | inherit",
"tab-size" : "<integer> | <length>",
"target" : 1,
"target-name" : 1,
"target-new" : 1,
"target-position" : 1,
"text-align" : "left | right | center | justify | inherit" ,
"text-align-last" : 1,
"text-decoration" : 1,
"text-emphasis" : 1,
"text-height" : 1,
"text-indent" : "<length> | <percentage> | inherit",
"text-justify" : "auto | none | inter-word | inter-ideograph | inter-cluster | distribute | kashida",
"text-outline" : 1,
"text-overflow" : 1,
"text-rendering" : "auto | optimizeSpeed | optimizeLegibility | geometricPrecision | inherit",
"text-shadow" : 1,
"text-transform" : "capitalize | uppercase | lowercase | none | inherit",
"text-wrap" : "normal | none | avoid",
"top" : "<margin-width> | inherit",
"transform" : 1,
"transform-origin" : 1,
"transform-style" : 1,
"transition" : 1,
"transition-delay" : 1,
"transition-duration" : 1,
"transition-property" : 1,
"transition-timing-function" : 1,
//U
"unicode-bidi" : "normal | embed | bidi-override | inherit",
"user-modify" : "read-only | read-write | write-only | inherit",
"user-select" : "none | text | toggle | element | elements | all | inherit",
//V
"vertical-align" : "<percentage> | <length> | baseline | sub | super | top | text-top | middle | bottom | text-bottom | inherit",
"visibility" : "visible | hidden | collapse | inherit",
"voice-balance" : 1,
"voice-duration" : 1,
"voice-family" : 1,
"voice-pitch" : 1,
"voice-pitch-range" : 1,
"voice-rate" : 1,
"voice-stress" : 1,
"voice-volume" : 1,
"volume" : 1,
//W
"white-space" : "normal | pre | nowrap | pre-wrap | pre-line | inherit",
"white-space-collapse" : 1,
"widows" : "<integer> | inherit",
"width" : "<length> | <percentage> | auto | inherit" ,
"word-break" : "normal | keep-all | break-all",
"word-spacing" : "<length> | normal | inherit",
"word-wrap" : 1,
//Z
"z-index" : "<integer> | auto | inherit",
"zoom" : "<number> | <percentage> | normal"
};
/*global SyntaxUnit, Parser*/
/**
* Represents a selector combinator (whitespace, +, >).
* @namespace parserlib.css
* @class PropertyName
* @extends parserlib.util.SyntaxUnit
* @constructor
* @param {String} text The text representation of the unit.
* @param {String} hack The type of IE hack applied ("*", "_", or null).
* @param {int} line The line of text on which the unit resides.
* @param {int} col The column of text on which the unit resides.
*/
function PropertyName(text, hack, line, col){
SyntaxUnit.call(this, text, line, col, Parser.PROPERTY_NAME_TYPE);
/**
* The type of IE hack applied ("*", "_", or null).
* @type String
* @property hack
*/
this.hack = hack;
}
PropertyName.prototype = new SyntaxUnit();
PropertyName.prototype.constructor = PropertyName;
PropertyName.prototype.toString = function(){
return (this.hack ? this.hack : "") + this.text;
};
/*global SyntaxUnit, Parser*/
/**
* Represents a single part of a CSS property value, meaning that it represents
* just everything single part between ":" and ";". If there are multiple values
* separated by commas, this type represents just one of the values.
* @param {String[]} parts An array of value parts making up this value.
* @param {int} line The line of text on which the unit resides.
* @param {int} col The column of text on which the unit resides.
* @namespace parserlib.css
* @class PropertyValue
* @extends parserlib.util.SyntaxUnit
* @constructor
*/
function PropertyValue(parts, line, col){
SyntaxUnit.call(this, parts.join(" "), line, col, Parser.PROPERTY_VALUE_TYPE);
/**
* The parts that make up the selector.
* @type Array
* @property parts
*/
this.parts = parts;
}
PropertyValue.prototype = new SyntaxUnit();
PropertyValue.prototype.constructor = PropertyValue;
/*global SyntaxUnit, Parser*/
/**
* A utility class that allows for easy iteration over the various parts of a
* property value.
* @param {parserlib.css.PropertyValue} value The property value to iterate over.
* @namespace parserlib.css
* @class PropertyValueIterator
* @constructor
*/
function PropertyValueIterator(value){
/**
* Iterator value
* @type int
* @property _i
* @private
*/
this._i = 0;
/**
* The parts that make up the value.
* @type Array
* @property _parts
* @private
*/
this._parts = value.parts;
/**
* Keeps track of bookmarks along the way.
* @type Array
* @property _marks
* @private
*/
this._marks = [];
/**
* Holds the original property value.
* @type parserlib.css.PropertyValue
* @property value
*/
this.value = value;
}
/**
* Returns the total number of parts in the value.
* @return {int} The total number of parts in the value.
* @method count
*/
PropertyValueIterator.prototype.count = function(){
return this._parts.length;
};
/**
* Indicates if the iterator is positioned at the first item.
* @return {Boolean} True if positioned at first item, false if not.
* @method isFirst
*/
PropertyValueIterator.prototype.isFirst = function(){
return this._i === 0;
};
/**
* Indicates if there are more parts of the property value.
* @return {Boolean} True if there are more parts, false if not.
* @method hasNext
*/
PropertyValueIterator.prototype.hasNext = function(){
return (this._i < this._parts.length);
};
/**
* Marks the current spot in the iteration so it can be restored to
* later on.
* @return {void}
* @method mark
*/
PropertyValueIterator.prototype.mark = function(){
this._marks.push(this._i);
};
/**
* Returns the next part of the property value or null if there is no next
* part. Does not move the internal counter forward.
* @return {parserlib.css.PropertyValuePart} The next part of the property value or null if there is no next
* part.
* @method peek
*/
PropertyValueIterator.prototype.peek = function(count){
return this.hasNext() ? this._parts[this._i + (count || 0)] : null;
};
/**
* Returns the next part of the property value or null if there is no next
* part.
* @return {parserlib.css.PropertyValuePart} The next part of the property value or null if there is no next
* part.
* @method next
*/
PropertyValueIterator.prototype.next = function(){
return this.hasNext() ? this._parts[this._i++] : null;
};
/**
* Returns the previous part of the property value or null if there is no
* previous part.
* @return {parserlib.css.PropertyValuePart} The previous part of the
* property value or null if there is no next part.
* @method previous
*/
PropertyValueIterator.prototype.previous = function(){
return this._i > 0 ? this._parts[--this._i] : null;
};
/**
* Restores the last saved bookmark.
* @return {void}
* @method restore
*/
PropertyValueIterator.prototype.restore = function(){
if (this._marks.length){
this._i = this._marks.pop();
}
};
/*global SyntaxUnit, Parser, Colors*/
/**
* Represents a single part of a CSS property value, meaning that it represents
* just one part of the data between ":" and ";".
* @param {String} text The text representation of the unit.
* @param {int} line The line of text on which the unit resides.
* @param {int} col The column of text on which the unit resides.
* @namespace parserlib.css
* @class PropertyValuePart
* @extends parserlib.util.SyntaxUnit
* @constructor
*/
function PropertyValuePart(text, line, col){
SyntaxUnit.call(this, text, line, col, Parser.PROPERTY_VALUE_PART_TYPE);
/**
* Indicates the type of value unit.
* @type String
* @property type
*/
this.type = "unknown";
//figure out what type of data it is
var temp;
//it is a measurement?
if (/^([+\-]?[\d\.]+)([a-z]+)$/i.test(text)){ //dimension
this.type = "dimension";
this.value = +RegExp.$1;
this.units = RegExp.$2;
//try to narrow down
switch(this.units.toLowerCase()){
case "em":
case "rem":
case "ex":
case "px":
case "cm":
case "mm":
case "in":
case "pt":
case "pc":
case "ch":
this.type = "length";
break;
case "deg":
case "rad":
case "grad":
this.type = "angle";
break;
case "ms":
case "s":
this.type = "time";
break;
case "hz":
case "khz":
this.type = "frequency";
break;
case "dpi":
case "dpcm":
this.type = "resolution";
break;
//default
}
} else if (/^([+\-]?[\d\.]+)%$/i.test(text)){ //percentage
this.type = "percentage";
this.value = +RegExp.$1;
} else if (/^([+\-]?[\d\.]+)%$/i.test(text)){ //percentage
this.type = "percentage";
this.value = +RegExp.$1;
} else if (/^([+\-]?\d+)$/i.test(text)){ //integer
this.type = "integer";
this.value = +RegExp.$1;
} else if (/^([+\-]?[\d\.]+)$/i.test(text)){ //number
this.type = "number";
this.value = +RegExp.$1;
} else if (/^#([a-f0-9]{3,6})/i.test(text)){ //hexcolor
this.type = "color";
temp = RegExp.$1;
if (temp.length == 3){
this.red = parseInt(temp.charAt(0)+temp.charAt(0),16);
this.green = parseInt(temp.charAt(1)+temp.charAt(1),16);
this.blue = parseInt(temp.charAt(2)+temp.charAt(2),16);
} else {
this.red = parseInt(temp.substring(0,2),16);
this.green = parseInt(temp.substring(2,4),16);
this.blue = parseInt(temp.substring(4,6),16);
}
} else if (/^rgb\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)/i.test(text)){ //rgb() color with absolute numbers
this.type = "color";
this.red = +RegExp.$1;
this.green = +RegExp.$2;
this.blue = +RegExp.$3;
} else if (/^rgb\(\s*(\d+)%\s*,\s*(\d+)%\s*,\s*(\d+)%\s*\)/i.test(text)){ //rgb() color with percentages
this.type = "color";
this.red = +RegExp.$1 * 255 / 100;
this.green = +RegExp.$2 * 255 / 100;
this.blue = +RegExp.$3 * 255 / 100;
} else if (/^rgba\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*,\s*([\d\.]+)\s*\)/i.test(text)){ //rgba() color with absolute numbers
this.type = "color";
this.red = +RegExp.$1;
this.green = +RegExp.$2;
this.blue = +RegExp.$3;
this.alpha = +RegExp.$4;
} else if (/^rgba\(\s*(\d+)%\s*,\s*(\d+)%\s*,\s*(\d+)%\s*,\s*([\d\.]+)\s*\)/i.test(text)){ //rgba() color with percentages
this.type = "color";
this.red = +RegExp.$1 * 255 / 100;
this.green = +RegExp.$2 * 255 / 100;
this.blue = +RegExp.$3 * 255 / 100;
this.alpha = +RegExp.$4;
} else if (/^hsl\(\s*(\d+)\s*,\s*(\d+)%\s*,\s*(\d+)%\s*\)/i.test(text)){ //hsl()
this.type = "color";
this.hue = +RegExp.$1;
this.saturation = +RegExp.$2 / 100;
this.lightness = +RegExp.$3 / 100;
} else if (/^hsla\(\s*(\d+)\s*,\s*(\d+)%\s*,\s*(\d+)%\s*,\s*([\d\.]+)\s*\)/i.test(text)){ //hsla() color with percentages
this.type = "color";
this.hue = +RegExp.$1;
this.saturation = +RegExp.$2 / 100;
this.lightness = +RegExp.$3 / 100;
this.alpha = +RegExp.$4;
} else if (/^url\(["']?([^\)"']+)["']?\)/i.test(text)){ //URI
this.type = "uri";
this.uri = RegExp.$1;
} else if (/^([^\(]+)\(/i.test(text)){
this.type = "function";
this.name = RegExp.$1;
this.value = text;
} else if (/^["'][^"']*["']/.test(text)){ //string
this.type = "string";
this.value = eval(text);
} else if (Colors[text.toLowerCase()]){ //named color
this.type = "color";
temp = Colors[text.toLowerCase()].substring(1);
this.red = parseInt(temp.substring(0,2),16);
this.green = parseInt(temp.substring(2,4),16);
this.blue = parseInt(temp.substring(4,6),16);
} else if (/^[\,\/]$/.test(text)){
this.type = "operator";
this.value = text;
} else if (/^[a-z\-\u0080-\uFFFF][a-z0-9\-\u0080-\uFFFF]*$/i.test(text)){
this.type = "identifier";
this.value = text;
}
}
PropertyValuePart.prototype = new SyntaxUnit();
PropertyValuePart.prototype.constructor = PropertyValuePart;
/**
* Create a new syntax unit based solely on the given token.
* Convenience method for creating a new syntax unit when
* it represents a single token instead of multiple.
* @param {Object} token The token object to represent.
* @return {parserlib.css.PropertyValuePart} The object representing the token.
* @static
* @method fromToken
*/
PropertyValuePart.fromToken = function(token){
return new PropertyValuePart(token.value, token.startLine, token.startCol);
};
var Pseudos = {
":first-letter": 1,
":first-line": 1,
":before": 1,
":after": 1
};
Pseudos.ELEMENT = 1;
Pseudos.CLASS = 2;
Pseudos.isElement = function(pseudo){
return pseudo.indexOf("::") === 0 || Pseudos[pseudo.toLowerCase()] == Pseudos.ELEMENT;
};
/*global SyntaxUnit, Parser, Specificity*/
/**
* Represents an entire single selector, including all parts but not
* including multiple selectors (those separated by commas).
* @namespace parserlib.css
* @class Selector
* @extends parserlib.util.SyntaxUnit
* @constructor
* @param {Array} parts Array of selectors parts making up this selector.
* @param {int} line The line of text on which the unit resides.
* @param {int} col The column of text on which the unit resides.
*/
function Selector(parts, line, col){
SyntaxUnit.call(this, parts.join(" "), line, col, Parser.SELECTOR_TYPE);
/**
* The parts that make up the selector.
* @type Array
* @property parts
*/
this.parts = parts;
/**
* The specificity of the selector.
* @type parserlib.css.Specificity
* @property specificity
*/
this.specificity = Specificity.calculate(this);
}
Selector.prototype = new SyntaxUnit();
Selector.prototype.constructor = Selector;
/*global SyntaxUnit, Parser*/
/**
* Represents a single part of a selector string, meaning a single set of
* element name and modifiers. This does not include combinators such as
* spaces, +, >, etc.
* @namespace parserlib.css
* @class SelectorPart
* @extends parserlib.util.SyntaxUnit
* @constructor
* @param {String} elementName The element name in the selector or null
* if there is no element name.
* @param {Array} modifiers Array of individual modifiers for the element.
* May be empty if there are none.
* @param {String} text The text representation of the unit.
* @param {int} line The line of text on which the unit resides.
* @param {int} col The column of text on which the unit resides.
*/
function SelectorPart(elementName, modifiers, text, line, col){
SyntaxUnit.call(this, text, line, col, Parser.SELECTOR_PART_TYPE);
/**
* The tag name of the element to which this part
* of the selector affects.
* @type String
* @property elementName
*/
this.elementName = elementName;
/**
* The parts that come after the element name, such as class names, IDs,
* pseudo classes/elements, etc.
* @type Array
* @property modifiers
*/
this.modifiers = modifiers;
}
SelectorPart.prototype = new SyntaxUnit();
SelectorPart.prototype.constructor = SelectorPart;
/*global SyntaxUnit, Parser*/
/**
* Represents a selector modifier string, meaning a class name, element name,
* element ID, pseudo rule, etc.
* @namespace parserlib.css
* @class SelectorSubPart
* @extends parserlib.util.SyntaxUnit
* @constructor
* @param {String} text The text representation of the unit.
* @param {String} type The type of selector modifier.
* @param {int} line The line of text on which the unit resides.
* @param {int} col The column of text on which the unit resides.
*/
function SelectorSubPart(text, type, line, col){
SyntaxUnit.call(this, text, line, col, Parser.SELECTOR_SUB_PART_TYPE);
/**
* The type of modifier.
* @type String
* @property type
*/
this.type = type;
/**
* Some subparts have arguments, this represents them.
* @type Array
* @property args
*/
this.args = [];
}
SelectorSubPart.prototype = new SyntaxUnit();
SelectorSubPart.prototype.constructor = SelectorSubPart;
/*global Pseudos, SelectorPart*/
/**
* Represents a selector's specificity.
* @namespace parserlib.css
* @class Specificity
* @constructor
* @param {int} a Should be 1 for inline styles, zero for stylesheet styles
* @param {int} b Number of ID selectors
* @param {int} c Number of classes and pseudo classes
* @param {int} d Number of element names and pseudo elements
*/
function Specificity(a, b, c, d){
this.a = a;
this.b = b;
this.c = c;
this.d = d;
}
Specificity.prototype = {
constructor: Specificity,
/**
* Compare this specificity to another.
* @param {Specificity} other The other specificity to compare to.
* @return {int} -1 if the other specificity is larger, 1 if smaller, 0 if equal.
* @method compare
*/
compare: function(other){
var comps = ["a", "b", "c", "d"],
i, len;
for (i=0, len=comps.length; i < len; i++){
if (this[comps[i]] < other[comps[i]]){
return -1;
} else if (this[comps[i]] > other[comps[i]]){
return 1;
}
}
return 0;
},
/**
* Creates a numeric value for the specificity.
* @return {int} The numeric value for the specificity.
* @method valueOf
*/
valueOf: function(){
return (this.a * 1000) + (this.b * 100) + (this.c * 10) + this.d;
},
/**
* Returns a string representation for specificity.
* @return {String} The string representation of specificity.
* @method toString
*/
toString: function(){
return this.a + "," + this.b + "," + this.c + "," + this.d;
}
};
/**
* Calculates the specificity of the given selector.
* @param {parserlib.css.Selector} The selector to calculate specificity for.
* @return {parserlib.css.Specificity} The specificity of the selector.
* @static
* @method calculate
*/
Specificity.calculate = function(selector){
var i, len,
part,
b=0, c=0, d=0;
function updateValues(part){
var i, j, len, num,
elementName = part.elementName ? part.elementName.text : "",
modifier;
if (elementName && elementName.charAt(elementName.length-1) != "*") {
d++;
}
for (i=0, len=part.modifiers.length; i < len; i++){
modifier = part.modifiers[i];
switch(modifier.type){
case "class":
case "attribute":
c++;
break;
case "id":
b++;
break;
case "pseudo":
if (Pseudos.isElement(modifier.text)){
d++;
} else {
c++;
}
break;
case "not":
for (j=0, num=modifier.args.length; j < num; j++){
updateValues(modifier.args[j]);
}
}
}
}
for (i=0, len=selector.parts.length; i < len; i++){
part = selector.parts[i];
if (part instanceof SelectorPart){
updateValues(part);
}
}
return new Specificity(0, b, c, d);
};
/*global Tokens, TokenStreamBase*/
var h = /^[0-9a-fA-F]$/,
nonascii = /^[\u0080-\uFFFF]$/,
nl = /\n|\r\n|\r|\f/;
//-----------------------------------------------------------------------------
// Helper functions
//-----------------------------------------------------------------------------
function isHexDigit(c){
return c !== null && h.test(c);
}
function isDigit(c){
return c !== null && /\d/.test(c);
}
function isWhitespace(c){
return c !== null && /\s/.test(c);
}
function isNewLine(c){
return c !== null && nl.test(c);
}
function isNameStart(c){
return c !== null && (/[a-z_\u0080-\uFFFF\\]/i.test(c));
}
function isNameChar(c){
return c !== null && (isNameStart(c) || /[0-9\-\\]/.test(c));
}
function isIdentStart(c){
return c !== null && (isNameStart(c) || /\-\\/.test(c));
}
function mix(receiver, supplier){
for (var prop in supplier){
if (supplier.hasOwnProperty(prop)){
receiver[prop] = supplier[prop];
}
}
return receiver;
}
//-----------------------------------------------------------------------------
// CSS Token Stream
//-----------------------------------------------------------------------------
/**
* A token stream that produces CSS tokens.
* @param {String|Reader} input The source of text to tokenize.
* @constructor
* @class TokenStream
* @namespace parserlib.css
*/
function TokenStream(input){
TokenStreamBase.call(this, input, Tokens);
}
TokenStream.prototype = mix(new TokenStreamBase(), {
/**
* Overrides the TokenStreamBase method of the same name
* to produce CSS tokens.
* @param {variant} channel The name of the channel to use
* for the next token.
* @return {Object} A token object representing the next token.
* @method _getToken
* @private
*/
_getToken: function(channel){
var c,
reader = this._reader,
token = null,
startLine = reader.getLine(),
startCol = reader.getCol();
c = reader.read();
while(c){
switch(c){
/*
* Potential tokens:
* - COMMENT
* - SLASH
* - CHAR
*/
case "/":
if(reader.peek() == "*"){
token = this.commentToken(c, startLine, startCol);
} else {
token = this.charToken(c, startLine, startCol);
}
break;
/*
* Potential tokens:
* - DASHMATCH
* - INCLUDES
* - PREFIXMATCH
* - SUFFIXMATCH
* - SUBSTRINGMATCH
* - CHAR
*/
case "|":
case "~":
case "^":
case "$":
case "*":
if(reader.peek() == "="){
token = this.comparisonToken(c, startLine, startCol);
} else {
token = this.charToken(c, startLine, startCol);
}
break;
/*
* Potential tokens:
* - STRING
* - INVALID
*/
case "\"":
case "'":
token = this.stringToken(c, startLine, startCol);
break;
/*
* Potential tokens:
* - HASH
* - CHAR
*/
case "#":
if (isNameChar(reader.peek())){
token = this.hashToken(c, startLine, startCol);
} else {
token = this.charToken(c, startLine, startCol);
}
break;
/*
* Potential tokens:
* - DOT
* - NUMBER
* - DIMENSION
* - PERCENTAGE
*/
case ".":
if (isDigit(reader.peek())){
token = this.numberToken(c, startLine, startCol);
} else {
token = this.charToken(c, startLine, startCol);
}
break;
/*
* Potential tokens:
* - CDC
* - MINUS
* - NUMBER
* - DIMENSION
* - PERCENTAGE
*/
case "-":
if (reader.peek() == "-"){ //could be closing HTML-style comment
token = this.htmlCommentEndToken(c, startLine, startCol);
} else if (isNameStart(reader.peek())){
token = this.identOrFunctionToken(c, startLine, startCol);
} else {
token = this.charToken(c, startLine, startCol);
}
break;
/*
* Potential tokens:
* - IMPORTANT_SYM
* - CHAR
*/
case "!":
token = this.importantToken(c, startLine, startCol);
break;
/*
* Any at-keyword or CHAR
*/
case "@":
token = this.atRuleToken(c, startLine, startCol);
break;
/*
* Potential tokens:
* - NOT
* - CHAR
*/
case ":":
token = this.notToken(c, startLine, startCol);
break;
/*
* Potential tokens:
* - CDO
* - CHAR
*/
case "<":
token = this.htmlCommentStartToken(c, startLine, startCol);
break;
/*
* Potential tokens:
* - UNICODE_RANGE
* - URL
* - CHAR
*/
case "U":
case "u":
if (reader.peek() == "+"){
token = this.unicodeRangeToken(c, startLine, startCol);
break;
}
/* falls through */
default:
/*
* Potential tokens:
* - NUMBER
* - DIMENSION
* - LENGTH
* - FREQ
* - TIME
* - EMS
* - EXS
* - ANGLE
*/
if (isDigit(c)){
token = this.numberToken(c, startLine, startCol);
} else
/*
* Potential tokens:
* - S
*/
if (isWhitespace(c)){
token = this.whitespaceToken(c, startLine, startCol);
} else
/*
* Potential tokens:
* - IDENT
*/
if (isIdentStart(c)){
token = this.identOrFunctionToken(c, startLine, startCol);
} else
/*
* Potential tokens:
* - CHAR
* - PLUS
*/
{
token = this.charToken(c, startLine, startCol);
}
}
//make sure this token is wanted
//TODO: check channel
break;
}
if (!token && c === null){
token = this.createToken(Tokens.EOF,null,startLine,startCol);
}
return token;
},
//-------------------------------------------------------------------------
// Methods to create tokens
//-------------------------------------------------------------------------
/**
* Produces a token based on available data and the current
* reader position information. This method is called by other
* private methods to create tokens and is never called directly.
* @param {int} tt The token type.
* @param {String} value The text value of the token.
* @param {int} startLine The beginning line for the character.
* @param {int} startCol The beginning column for the character.
* @param {Object} options (Optional) Specifies a channel property
* to indicate that a different channel should be scanned
* and/or a hide property indicating that the token should
* be hidden.
* @return {Object} A token object.
* @method createToken
*/
createToken: function(tt, value, startLine, startCol, options){
var reader = this._reader;
options = options || {};
return {
value: value,
type: tt,
channel: options.channel,
hide: options.hide || false,
startLine: startLine,
startCol: startCol,
endLine: reader.getLine(),
endCol: reader.getCol()
};
},
//-------------------------------------------------------------------------
// Methods to create specific tokens
//-------------------------------------------------------------------------
/**
* Produces a token for any at-rule. If the at-rule is unknown, then
* the token is for a single "@" character.
* @param {String} first The first character for the token.
* @param {int} startLine The beginning line for the character.
* @param {int} startCol The beginning column for the character.
* @return {Object} A token object.
* @method atRuleToken
*/
atRuleToken: function(first, startLine, startCol){
var rule = first,
reader = this._reader,
tt = Tokens.CHAR,
valid = false,
ident,
c;
/*
* First, mark where we are. There are only four @ rules,
* so anything else is really just an invalid token.
* Basically, if this doesn't match one of the known @
* rules, just return '@' as an unknown token and allow
* parsing to continue after that point.
*/
reader.mark();
//try to find the at-keyword
ident = this.readName();
rule = first + ident;
tt = Tokens.type(rule.toLowerCase());
//if it's not valid, use the first character only and reset the reader
if (tt == Tokens.CHAR || tt == Tokens.UNKNOWN){
if (rule.length > 1){
tt = Tokens.UNKNOWN_SYM;
} else {
tt = Tokens.CHAR;
rule = first;
reader.reset();
}
}
return this.createToken(tt, rule, startLine, startCol);
},
/**
* Produces a character token based on the given character
* and location in the stream. If there's a special (non-standard)
* token name, this is used; otherwise CHAR is used.
* @param {String} c The character for the token.
* @param {int} startLine The beginning line for the character.
* @param {int} startCol The beginning column for the character.
* @return {Object} A token object.
* @method charToken
*/
charToken: function(c, startLine, startCol){
var tt = Tokens.type(c);
if (tt == -1){
tt = Tokens.CHAR;
}
return this.createToken(tt, c, startLine, startCol);
},
/**
* Produces a character token based on the given character
* and location in the stream. If there's a special (non-standard)
* token name, this is used; otherwise CHAR is used.
* @param {String} first The first character for the token.
* @param {int} startLine The beginning line for the character.
* @param {int} startCol The beginning column for the character.
* @return {Object} A token object.
* @method commentToken
*/
commentToken: function(first, startLine, startCol){
var reader = this._reader,
comment = this.readComment(first);
return this.createToken(Tokens.COMMENT, comment, startLine, startCol);
},
/**
* Produces a comparison token based on the given character
* and location in the stream. The next character must be
* read and is already known to be an equals sign.
* @param {String} c The character for the token.
* @param {int} startLine The beginning line for the character.
* @param {int} startCol The beginning column for the character.
* @return {Object} A token object.
* @method comparisonToken
*/
comparisonToken: function(c, startLine, startCol){
var reader = this._reader,
comparison = c + reader.read(),
tt = Tokens.type(comparison) || Tokens.CHAR;
return this.createToken(tt, comparison, startLine, startCol);
},
/**
* Produces a hash token based on the specified information. The
* first character provided is the pound sign (#) and then this
* method reads a name afterward.
* @param {String} first The first character (#) in the hash name.
* @param {int} startLine The beginning line for the character.
* @param {int} startCol The beginning column for the character.
* @return {Object} A token object.
* @method hashToken
*/
hashToken: function(first, startLine, startCol){
var reader = this._reader,
name = this.readName(first);
return this.createToken(Tokens.HASH, name, startLine, startCol);
},
/**
* Produces a CDO or CHAR token based on the specified information. The
* first character is provided and the rest is read by the function to determine
* the correct token to create.
* @param {String} first The first character in the token.
* @param {int} startLine The beginning line for the character.
* @param {int} startCol The beginning column for the character.
* @return {Object} A token object.
* @method htmlCommentStartToken
*/
htmlCommentStartToken: function(first, startLine, startCol){
var reader = this._reader,
text = first;
reader.mark();
text += reader.readCount(3);
if (text == "<!--"){
return this.createToken(Tokens.CDO, text, startLine, startCol);
} else {
reader.reset();
return this.charToken(first, startLine, startCol);
}
},
/**
* Produces a CDC or CHAR token based on the specified information. The
* first character is provided and the rest is read by the function to determine
* the correct token to create.
* @param {String} first The first character in the token.
* @param {int} startLine The beginning line for the character.
* @param {int} startCol The beginning column for the character.
* @return {Object} A token object.
* @method htmlCommentEndToken
*/
htmlCommentEndToken: function(first, startLine, startCol){
var reader = this._reader,
text = first;
reader.mark();
text += reader.readCount(2);
if (text == "-->"){
return this.createToken(Tokens.CDC, text, startLine, startCol);
} else {
reader.reset();
return this.charToken(first, startLine, startCol);
}
},
/**
* Produces an IDENT or FUNCTION token based on the specified information. The
* first character is provided and the rest is read by the function to determine
* the correct token to create.
* @param {String} first The first character in the identifier.
* @param {int} startLine The beginning line for the character.
* @param {int} startCol The beginning column for the character.
* @return {Object} A token object.
* @method identOrFunctionToken
*/
identOrFunctionToken: function(first, startLine, startCol){
var reader = this._reader,
ident = this.readName(first),
tt = Tokens.IDENT;
//if there's a left paren immediately after, it's a URI or function
if (reader.peek() == "("){
ident += reader.read();
if (ident.toLowerCase() == "url("){
tt = Tokens.URI;
ident = this.readURI(ident);
//didn't find a valid URL or there's no closing paren
if (ident.toLowerCase() == "url("){
tt = Tokens.FUNCTION;
}
} else {
tt = Tokens.FUNCTION;
}
} else if (reader.peek() == ":"){ //might be an IE function
//IE-specific functions always being with progid:
if (ident.toLowerCase() == "progid"){
ident += reader.readTo("(");
tt = Tokens.IE_FUNCTION;
}
}
return this.createToken(tt, ident, startLine, startCol);
},
/**
* Produces an IMPORTANT_SYM or CHAR token based on the specified information. The
* first character is provided and the rest is read by the function to determine
* the correct token to create.
* @param {String} first The first character in the token.
* @param {int} startLine The beginning line for the character.
* @param {int} startCol The beginning column for the character.
* @return {Object} A token object.
* @method importantToken
*/
importantToken: function(first, startLine, startCol){
var reader = this._reader,
important = first,
tt = Tokens.CHAR,
temp,
c;
reader.mark();
c = reader.read();
while(c){
//there can be a comment in here
if (c == "/"){
//if the next character isn't a star, then this isn't a valid !important token
if (reader.peek() != "*"){
break;
} else {
temp = this.readComment(c);
if (temp === ""){ //broken!
break;
}
}
} else if (isWhitespace(c)){
important += c + this.readWhitespace();
} else if (/i/i.test(c)){
temp = reader.readCount(8);
if (/mportant/i.test(temp)){
important += c + temp;
tt = Tokens.IMPORTANT_SYM;
}
break; //we're done
} else {
break;
}
c = reader.read();
}
if (tt == Tokens.CHAR){
reader.reset();
return this.charToken(first, startLine, startCol);
} else {
return this.createToken(tt, important, startLine, startCol);
}
},
/**
* Produces a NOT or CHAR token based on the specified information. The
* first character is provided and the rest is read by the function to determine
* the correct token to create.
* @param {String} first The first character in the token.
* @param {int} startLine The beginning line for the character.
* @param {int} startCol The beginning column for the character.
* @return {Object} A token object.
* @method notToken
*/
notToken: function(first, startLine, startCol){
var reader = this._reader,
text = first;
reader.mark();
text += reader.readCount(4);
if (text.toLowerCase() == ":not("){
return this.createToken(Tokens.NOT, text, startLine, startCol);
} else {
reader.reset();
return this.charToken(first, startLine, startCol);
}
},
/**
* Produces a number token based on the given character
* and location in the stream. This may return a token of
* NUMBER, EMS, EXS, LENGTH, ANGLE, TIME, FREQ, DIMENSION,
* or PERCENTAGE.
* @param {String} first The first character for the token.
* @param {int} startLine The beginning line for the character.
* @param {int} startCol The beginning column for the character.
* @return {Object} A token object.
* @method numberToken
*/
numberToken: function(first, startLine, startCol){
var reader = this._reader,
value = this.readNumber(first),
ident,
tt = Tokens.NUMBER,
c = reader.peek();
if (isIdentStart(c)){
ident = this.readName(reader.read());
value += ident;
if (/^em$|^ex$|^px$|^gd$|^rem$|^vw$|^vh$|^vm$|^ch$|^cm$|^mm$|^in$|^pt$|^pc$/i.test(ident)){
tt = Tokens.LENGTH;
} else if (/^deg|^rad$|^grad$/i.test(ident)){
tt = Tokens.ANGLE;
} else if (/^ms$|^s$/i.test(ident)){
tt = Tokens.TIME;
} else if (/^hz$|^khz$/i.test(ident)){
tt = Tokens.FREQ;
} else if (/^dpi$|^dpcm$/i.test(ident)){
tt = Tokens.RESOLUTION;
} else {
tt = Tokens.DIMENSION;
}
} else if (c == "%"){
value += reader.read();
tt = Tokens.PERCENTAGE;
}
return this.createToken(tt, value, startLine, startCol);
},
/**
* Produces a string token based on the given character
* and location in the stream. Since strings may be indicated
* by single or double quotes, a failure to match starting
* and ending quotes results in an INVALID token being generated.
* The first character in the string is passed in and then
* the rest are read up to and including the final quotation mark.
* @param {String} first The first character in the string.
* @param {int} startLine The beginning line for the character.
* @param {int} startCol The beginning column for the character.
* @return {Object} A token object.
* @method stringToken
*/
stringToken: function(first, startLine, startCol){
var delim = first,
string = first,
reader = this._reader,
prev = first,
tt = Tokens.STRING,
c = reader.read();
while(c){
string += c;
//if the delimiter is found with an escapement, we're done.
if (c == delim && prev != "\\"){
break;
}
//if there's a newline without an escapement, it's an invalid string
if (isNewLine(reader.peek()) && c != "\\"){
tt = Tokens.INVALID;
break;
}
//save previous and get next
prev = c;
c = reader.read();
}
//if c is null, that means we're out of input and the string was never closed
if (c === null){
tt = Tokens.INVALID;
}
return this.createToken(tt, string, startLine, startCol);
},
unicodeRangeToken: function(first, startLine, startCol){
var reader = this._reader,
value = first,
temp,
tt = Tokens.CHAR;
//then it should be a unicode range
if (reader.peek() == "+"){
reader.mark();
value += reader.read();
value += this.readUnicodeRangePart(true);
//ensure there's an actual unicode range here
if (value.length == 2){
reader.reset();
} else {
tt = Tokens.UNICODE_RANGE;
//if there's a ? in the first part, there can't be a second part
if (value.indexOf("?") == -1){
if (reader.peek() == "-"){
reader.mark();
temp = reader.read();
temp += this.readUnicodeRangePart(false);
//if there's not another value, back up and just take the first
if (temp.length == 1){
reader.reset();
} else {
value += temp;
}
}
}
}
}
return this.createToken(tt, value, startLine, startCol);
},
/**
* Produces a S token based on the specified information. Since whitespace
* may have multiple characters, this consumes all whitespace characters
* into a single token.
* @param {String} first The first character in the token.
* @param {int} startLine The beginning line for the character.
* @param {int} startCol The beginning column for the character.
* @return {Object} A token object.
* @method whitespaceToken
*/
whitespaceToken: function(first, startLine, startCol){
var reader = this._reader,
value = first + this.readWhitespace();
return this.createToken(Tokens.S, value, startLine, startCol);
},
//-------------------------------------------------------------------------
// Methods to read values from the string stream
//-------------------------------------------------------------------------
readUnicodeRangePart: function(allowQuestionMark){
var reader = this._reader,
part = "",
c = reader.peek();
//first read hex digits
while(isHexDigit(c) && part.length < 6){
reader.read();
part += c;
c = reader.peek();
}
//then read question marks if allowed
if (allowQuestionMark){
while(c == "?" && part.length < 6){
reader.read();
part += c;
c = reader.peek();
}
}
//there can't be any other characters after this point
return part;
},
readWhitespace: function(){
var reader = this._reader,
whitespace = "",
c = reader.peek();
while(isWhitespace(c)){
reader.read();
whitespace += c;
c = reader.peek();
}
return whitespace;
},
readNumber: function(first){
var reader = this._reader,
number = first,
hasDot = (first == "."),
c = reader.peek();
while(c){
if (isDigit(c)){
number += reader.read();
} else if (c == "."){
if (hasDot){
break;
} else {
hasDot = true;
number += reader.read();
}
} else {
break;
}
c = reader.peek();
}
return number;
},
readString: function(){
var reader = this._reader,
delim = reader.read(),
string = delim,
prev = delim,
c = reader.peek();
while(c){
c = reader.read();
string += c;
//if the delimiter is found with an escapement, we're done.
if (c == delim && prev != "\\"){
break;
}
//if there's a newline without an escapement, it's an invalid string
if (isNewLine(reader.peek()) && c != "\\"){
string = "";
break;
}
//save previous and get next
prev = c;
c = reader.peek();
}
//if c is null, that means we're out of input and the string was never closed
if (c === null){
string = "";
}
return string;
},
readURI: function(first){
var reader = this._reader,
uri = first,
inner = "",
c = reader.peek();
reader.mark();
//skip whitespace before
while(c && isWhitespace(c)){
reader.read();
c = reader.peek();
}
//it's a string
if (c == "'" || c == "\""){
inner = this.readString();
} else {
inner = this.readURL();
}
c = reader.peek();
//skip whitespace after
while(c && isWhitespace(c)){
reader.read();
c = reader.peek();
}
//if there was no inner value or the next character isn't closing paren, it's not a URI
if (inner === "" || c != ")"){
uri = first;
reader.reset();
} else {
uri += inner + reader.read();
}
return uri;
},
readURL: function(){
var reader = this._reader,
url = "",
c = reader.peek();
//TODO: Check for escape and nonascii
while (/^[!#$%&\\*-~]$/.test(c)){
url += reader.read();
c = reader.peek();
}
return url;
},
readName: function(first){
var reader = this._reader,
ident = first || "",
c = reader.peek();
while(true){
if (c == "\\"){
ident += this.readEscape(reader.read());
c = reader.peek();
} else if(c && isNameChar(c)){
ident += reader.read();
c = reader.peek();
} else {
break;
}
}
return ident;
},
readEscape: function(first){
var reader = this._reader,
cssEscape = first || "",
i = 0,
c = reader.peek();
if (isHexDigit(c)){
do {
cssEscape += reader.read();
c = reader.peek();
} while(c && isHexDigit(c) && ++i < 6);
}
if (cssEscape.length == 3 && /\s/.test(c) ||
cssEscape.length == 7 || cssEscape.length == 1){
reader.read();
} else {
c = "";
}
return cssEscape + c;
},
readComment: function(first){
var reader = this._reader,
comment = first || "",
c = reader.read();
if (c == "*"){
while(c){
comment += c;
//look for end of comment
if (comment.length > 2 && c == "*" && reader.peek() == "/"){
comment += reader.read();
break;
}
c = reader.read();
}
return comment;
} else {
return "";
}
}
});
var Tokens = [
/*
* The following token names are defined in CSS3 Grammar: http://www.w3.org/TR/css3-syntax/#lexical
*/
//HTML-style comments
{ name: "CDO"},
{ name: "CDC"},
//ignorables
{ name: "S", whitespace: true/*, channel: "ws"*/},
{ name: "COMMENT", comment: true, hide: true, channel: "comment" },
//attribute equality
{ name: "INCLUDES", text: "~="},
{ name: "DASHMATCH", text: "|="},
{ name: "PREFIXMATCH", text: "^="},
{ name: "SUFFIXMATCH", text: "$="},
{ name: "SUBSTRINGMATCH", text: "*="},
//identifier types
{ name: "STRING"},
{ name: "IDENT"},
{ name: "HASH"},
//at-keywords
{ name: "IMPORT_SYM", text: "@import"},
{ name: "PAGE_SYM", text: "@page"},
{ name: "MEDIA_SYM", text: "@media"},
{ name: "FONT_FACE_SYM", text: "@font-face"},
{ name: "CHARSET_SYM", text: "@charset"},
{ name: "NAMESPACE_SYM", text: "@namespace"},
{ name: "UNKNOWN_SYM" },
//{ name: "ATKEYWORD"},
//CSS3 animations
{ name: "KEYFRAMES_SYM", text: [ "@keyframes", "@-webkit-keyframes", "@-moz-keyframes", "@-o-keyframes" ] },
//important symbol
{ name: "IMPORTANT_SYM"},
//measurements
{ name: "LENGTH"},
{ name: "ANGLE"},
{ name: "TIME"},
{ name: "FREQ"},
{ name: "DIMENSION"},
{ name: "PERCENTAGE"},
{ name: "NUMBER"},
//functions
{ name: "URI"},
{ name: "FUNCTION"},
//Unicode ranges
{ name: "UNICODE_RANGE"},
/*
* The following token names are defined in CSS3 Selectors: http://www.w3.org/TR/css3-selectors/#selector-syntax
*/
//invalid string
{ name: "INVALID"},
//combinators
{ name: "PLUS", text: "+" },
{ name: "GREATER", text: ">"},
{ name: "COMMA", text: ","},
{ name: "TILDE", text: "~"},
//modifier
{ name: "NOT"},
/*
* Defined in CSS3 Paged Media
*/
{ name: "TOPLEFTCORNER_SYM", text: "@top-left-corner"},
{ name: "TOPLEFT_SYM", text: "@top-left"},
{ name: "TOPCENTER_SYM", text: "@top-center"},
{ name: "TOPRIGHT_SYM", text: "@top-right"},
{ name: "TOPRIGHTCORNER_SYM", text: "@top-right-corner"},
{ name: "BOTTOMLEFTCORNER_SYM", text: "@bottom-left-corner"},
{ name: "BOTTOMLEFT_SYM", text: "@bottom-left"},
{ name: "BOTTOMCENTER_SYM", text: "@bottom-center"},
{ name: "BOTTOMRIGHT_SYM", text: "@bottom-right"},
{ name: "BOTTOMRIGHTCORNER_SYM", text: "@bottom-right-corner"},
{ name: "LEFTTOP_SYM", text: "@left-top"},
{ name: "LEFTMIDDLE_SYM", text: "@left-middle"},
{ name: "LEFTBOTTOM_SYM", text: "@left-bottom"},
{ name: "RIGHTTOP_SYM", text: "@right-top"},
{ name: "RIGHTMIDDLE_SYM", text: "@right-middle"},
{ name: "RIGHTBOTTOM_SYM", text: "@right-bottom"},
/*
* The following token names are defined in CSS3 Media Queries: http://www.w3.org/TR/css3-mediaqueries/#syntax
*/
/*{ name: "MEDIA_ONLY", state: "media"},
{ name: "MEDIA_NOT", state: "media"},
{ name: "MEDIA_AND", state: "media"},*/
{ name: "RESOLUTION", state: "media"},
/*
* The following token names are not defined in any CSS specification but are used by the lexer.
*/
//not a real token, but useful for stupid IE filters
{ name: "IE_FUNCTION" },
//part of CSS3 grammar but not the Flex code
{ name: "CHAR" },
//TODO: Needed?
//Not defined as tokens, but might as well be
{
name: "PIPE",
text: "|"
},
{
name: "SLASH",
text: "/"
},
{
name: "MINUS",
text: "-"
},
{
name: "STAR",
text: "*"
},
{
name: "LBRACE",
text: "{"
},
{
name: "RBRACE",
text: "}"
},
{
name: "LBRACKET",
text: "["
},
{
name: "RBRACKET",
text: "]"
},
{
name: "EQUALS",
text: "="
},
{
name: "COLON",
text: ":"
},
{
name: "SEMICOLON",
text: ";"
},
{
name: "LPAREN",
text: "("
},
{
name: "RPAREN",
text: ")"
},
{
name: "DOT",
text: "."
}
];
(function(){
var nameMap = [],
typeMap = {};
Tokens.UNKNOWN = -1;
Tokens.unshift({name:"EOF"});
for (var i=0, len = Tokens.length; i < len; i++){
nameMap.push(Tokens[i].name);
Tokens[Tokens[i].name] = i;
if (Tokens[i].text){
if (Tokens[i].text instanceof Array){
for (var j=0; j < Tokens[i].text.length; j++){
typeMap[Tokens[i].text[j]] = i;
}
} else {
typeMap[Tokens[i].text] = i;
}
}
}
Tokens.name = function(tt){
return nameMap[tt];
};
Tokens.type = function(c){
return typeMap[c] || -1;
};
})();
//This file will likely change a lot! Very experimental!
/*global Properties, ValidationTypes, ValidationError, PropertyValueIterator */
var Validation = {
validate: function(property, value){
//normalize name
var name = property.toString().toLowerCase(),
parts = value.parts,
expression = new PropertyValueIterator(value),
spec = Properties[name],
part,
valid,
j, count,
msg,
types,
last,
literals,
max, multi, group;
if (!spec) {
if (name.indexOf("-") !== 0){ //vendor prefixed are ok
throw new ValidationError("Unknown property '" + property + "'.", property.line, property.col);
}
} else if (typeof spec != "number"){
//initialization
if (typeof spec == "string"){
if (spec.indexOf("||") > -1) {
this.groupProperty(spec, expression);
} else {
this.singleProperty(spec, expression, 1);
}
} else if (spec.multi) {
this.multiProperty(spec.multi, expression, spec.comma, spec.max || Infinity);
} else if (typeof spec == "function") {
spec(expression);
}
}
},
singleProperty: function(types, expression, max, partial) {
var result = false,
value = expression.value,
count = 0,
part;
while (expression.hasNext() && count < max) {
result = ValidationTypes.isAny(expression, types);
if (!result) {
break;
}
count++;
}
if (!result) {
if (expression.hasNext() && !expression.isFirst()) {
part = expression.peek();
throw new ValidationError("Expected end of value but found '" + part + "'.", part.line, part.col);
} else {
throw new ValidationError("Expected (" + types + ") but found '" + value + "'.", value.line, value.col);
}
} else if (expression.hasNext()) {
part = expression.next();
throw new ValidationError("Expected end of value but found '" + part + "'.", part.line, part.col);
}
},
multiProperty: function (types, expression, comma, max) {
var result = false,
value = expression.value,
count = 0,
sep = false,
part;
while(expression.hasNext() && !result && count < max) {
if (ValidationTypes.isAny(expression, types)) {
count++;
if (!expression.hasNext()) {
result = true;
} else if (comma) {
if (expression.peek() == ",") {
part = expression.next();
} else {
break;
}
}
} else {
break;
}
}
if (!result) {
if (expression.hasNext() && !expression.isFirst()) {
part = expression.peek();
throw new ValidationError("Expected end of value but found '" + part + "'.", part.line, part.col);
} else {
part = expression.previous();
if (comma && part == ",") {
throw new ValidationError("Expected end of value but found '" + part + "'.", part.line, part.col);
} else {
throw new ValidationError("Expected (" + types + ") but found '" + value + "'.", value.line, value.col);
}
}
} else if (expression.hasNext()) {
part = expression.next();
throw new ValidationError("Expected end of value but found '" + part + "'.", part.line, part.col);
}
},
groupProperty: function (types, expression, comma) {
var result = false,
value = expression.value,
typeCount = types.split("||").length,
groups = { count: 0 },
partial = false,
name,
part;
while(expression.hasNext() && !result) {
name = ValidationTypes.isAnyOfGroup(expression, types);
if (name) {
//no dupes
if (groups[name]) {
break;
} else {
groups[name] = 1;
groups.count++;
partial = true;
if (groups.count == typeCount || !expression.hasNext()) {
result = true;
}
}
} else {
break;
}
}
if (!result) {
if (partial && expression.hasNext()) {
part = expression.peek();
throw new ValidationError("Expected end of value but found '" + part + "'.", part.line, part.col);
} else {
throw new ValidationError("Expected (" + types + ") but found '" + value + "'.", value.line, value.col);
}
} else if (expression.hasNext()) {
part = expression.next();
throw new ValidationError("Expected end of value but found '" + part + "'.", part.line, part.col);
}
}
};
/**
* Type to use when a validation error occurs.
* @class ValidationError
* @namespace parserlib.util
* @constructor
* @param {String} message The error message.
* @param {int} line The line at which the error occurred.
* @param {int} col The column at which the error occurred.
*/
function ValidationError(message, line, col){
/**
* The column at which the error occurred.
* @type int
* @property col
*/
this.col = col;
/**
* The line at which the error occurred.
* @type int
* @property line
*/
this.line = line;
/**
* The text representation of the unit.
* @type String
* @property text
*/
this.message = message;
}
//inherit from Error
ValidationError.prototype = new Error();
//This file will likely change a lot! Very experimental!
/*global Properties, Validation, ValidationError, PropertyValueIterator, console*/
var ValidationTypes = {
isLiteral: function (part, literals) {
var text = part.text.toString().toLowerCase(),
args = literals.split(" | "),
i, len, found = false;
for (i=0,len=args.length; i < len && !found; i++){
if (text == args[i].toLowerCase()){
found = true;
}
}
return found;
},
isSimple: function(type) {
return !!this.simple[type];
},
isComplex: function(type) {
return !!this.complex[type];
},
/**
* Determines if the next part(s) of the given expression
* are any of the given types.
*/
isAny: function (expression, types) {
var args = types.split(" | "),
i, len, found = false;
for (i=0,len=args.length; i < len && !found && expression.hasNext(); i++){
found = this.isType(expression, args[i]);
}
return found;
},
/**
* Determines if the next part(s) of the given expresion
* are one of a group.
*/
isAnyOfGroup: function(expression, types) {
var args = types.split(" || "),
i, len, found = false;
for (i=0,len=args.length; i < len && !found; i++){
found = this.isType(expression, args[i]);
}
return found ? args[i-1] : false;
},
/**
* Determines if the next part(s) of the given expression
* are of a given type.
*/
isType: function (expression, type) {
var part = expression.peek(),
result = false;
if (type.charAt(0) != "<") {
result = this.isLiteral(part, type);
if (result) {
expression.next();
}
} else if (this.simple[type]) {
result = this.simple[type](part);
if (result) {
expression.next();
}
} else {
result = this.complex[type](expression);
}
return result;
},
simple: {
"<absolute-size>": function(part){
return ValidationTypes.isLiteral(part, "xx-small | x-small | small | medium | large | x-large | xx-large");
},
"<attachment>": function(part){
return ValidationTypes.isLiteral(part, "scroll | fixed | local");
},
"<attr>": function(part){
return part.type == "function" && part.name == "attr";
},
"<bg-image>": function(part){
return this["<image>"](part) || this["<gradient>"](part) || part == "none";
},
"<gradient>": function(part) {
return part.type == "function" && /^(?:\-(?:ms|moz|o|webkit)\-)?(?:repeating\-)?(?:radial\-|linear\-)?gradient/i.test(part);
},
"<box>": function(part){
return ValidationTypes.isLiteral(part, "padding-box | border-box | content-box");
},
"<content>": function(part){
return part.type == "function" && part.name == "content";
},
"<relative-size>": function(part){
return ValidationTypes.isLiteral(part, "smaller | larger");
},
//any identifier
"<ident>": function(part){
return part.type == "identifier";
},
"<length>": function(part){
return part.type == "length" || part.type == "number" || part.type == "integer" || part == "0";
},
"<color>": function(part){
return part.type == "color" || part == "transparent";
},
"<number>": function(part){
return part.type == "number" || this["<integer>"](part);
},
"<integer>": function(part){
return part.type == "integer";
},
"<line>": function(part){
return part.type == "integer";
},
"<angle>": function(part){
return part.type == "angle";
},
"<uri>": function(part){
return part.type == "uri";
},
"<image>": function(part){
return this["<uri>"](part);
},
"<percentage>": function(part){
return part.type == "percentage" || part == "0";
},
"<border-width>": function(part){
return this["<length>"](part) || ValidationTypes.isLiteral(part, "thin | medium | thick");
},
"<border-style>": function(part){
return ValidationTypes.isLiteral(part, "none | hidden | dotted | dashed | solid | double | groove | ridge | inset | outset");
},
"<margin-width>": function(part){
return this["<length>"](part) || this["<percentage>"](part) || ValidationTypes.isLiteral(part, "auto");
},
"<padding-width>": function(part){
return this["<length>"](part) || this["<percentage>"](part);
},
"<shape>": function(part){
return part.type == "function" && (part.name == "rect" || part.name == "inset-rect");
},
"<time>": function(part) {
return part.type == "time";
}
},
complex: {
"<bg-position>": function(expression){
var types = this,
result = false,
numeric = "<percentage> | <length>",
xDir = "left | center | right",
yDir = "top | center | bottom",
part,
i, len;
/*
<position> = [
[ left | center | right | top | bottom | <percentage> | <length> ]
|
[ left | center | right | <percentage> | <length> ]
[ top | center | bottom | <percentage> | <length> ]
|
[ center | [ left | right ] [ <percentage> | <length> ]? ] &&
[ center | [ top | bottom ] [ <percentage> | <length> ]? ]
]
*/
if (ValidationTypes.isAny(expression, "top | bottom")) {
result = true;
} else {
//must be two-part
if (ValidationTypes.isAny(expression, numeric)){
if (expression.hasNext()){
result = ValidationTypes.isAny(expression, numeric + " | " + yDir);
}
} else if (ValidationTypes.isAny(expression, xDir)){
if (expression.hasNext()){
//two- or three-part
if (ValidationTypes.isAny(expression, yDir)){
result = true;
ValidationTypes.isAny(expression, numeric);
} else if (ValidationTypes.isAny(expression, numeric)){
//could also be two-part, so check the next part
if (ValidationTypes.isAny(expression, yDir)){
ValidationTypes.isAny(expression, numeric);
}
result = true;
}
}
}
}
return result;
},
"<bg-size>": function(expression){
//<bg-size> = [ <length> | <percentage> | auto ]{1,2} | cover | contain
var types = this,
result = false,
numeric = "<percentage> | <length> | auto",
part,
i, len;
if (ValidationTypes.isAny(expression, "cover | contain")) {
result = true;
} else if (ValidationTypes.isAny(expression, numeric)) {
result = true;
ValidationTypes.isAny(expression, numeric);
}
return result;
},
"<repeat-style>": function(expression){
//repeat-x | repeat-y | [repeat | space | round | no-repeat]{1,2}
var result = false,
values = "repeat | space | round | no-repeat",
part;
if (expression.hasNext()){
part = expression.next();
if (ValidationTypes.isLiteral(part, "repeat-x | repeat-y")) {
result = true;
} else if (ValidationTypes.isLiteral(part, values)) {
result = true;
if (expression.hasNext() && ValidationTypes.isLiteral(expression.peek(), values)) {
expression.next();
}
}
}
return result;
},
"<shadow>": function(expression) {
//inset? && [ <length>{2,4} && <color>? ]
var result = false,
count = 0,
inset = false,
color = false,
part;
if (expression.hasNext()) {
if (ValidationTypes.isAny(expression, "inset")){
inset = true;
}
if (ValidationTypes.isAny(expression, "<color>")) {
color = true;
}
while (ValidationTypes.isAny(expression, "<length>") && count < 4) {
count++;
}
if (expression.hasNext()) {
if (!color) {
ValidationTypes.isAny(expression, "<color>");
}
if (!inset) {
ValidationTypes.isAny(expression, "inset");
}
}
result = (count >= 2 && count <= 4);
}
return result;
},
"<x-one-radius>": function(expression) {
//[ <length> | <percentage> ] [ <length> | <percentage> ]?
var result = false,
count = 0,
numeric = "<length> | <percentage>",
part;
if (ValidationTypes.isAny(expression, numeric)){
result = true;
ValidationTypes.isAny(expression, numeric);
}
return result;
}
}
};
parserlib.css = {
Colors :Colors,
Combinator :Combinator,
Parser :Parser,
PropertyName :PropertyName,
PropertyValue :PropertyValue,
PropertyValuePart :PropertyValuePart,
MediaFeature :MediaFeature,
MediaQuery :MediaQuery,
Selector :Selector,
SelectorPart :SelectorPart,
SelectorSubPart :SelectorSubPart,
Specificity :Specificity,
TokenStream :TokenStream,
Tokens :Tokens,
ValidationError :ValidationError
};
})();
/**
* Main CSSLint object.
* @class CSSLint
* @static
* @extends parserlib.util.EventTarget
*/
/*global parserlib, Reporter*/
var CSSLint = (function(){
var rules = [],
formatters = [],
api = new parserlib.util.EventTarget();
api.version = "0.9.8";
//-------------------------------------------------------------------------
// Rule Management
//-------------------------------------------------------------------------
/**
* Adds a new rule to the engine.
* @param {Object} rule The rule to add.
* @method addRule
*/
api.addRule = function(rule){
rules.push(rule);
rules[rule.id] = rule;
};
/**
* Clears all rule from the engine.
* @method clearRules
*/
api.clearRules = function(){
rules = [];
};
/**
* Returns the rule objects.
* @return An array of rule objects.
* @method getRules
*/
api.getRules = function(){
return [].concat(rules).sort(function(a,b){
return a.id > b.id ? 1 : 0;
});
};
/**
* Returns a ruleset configuration object with all current rules.
* @return A ruleset object.
* @method getRuleset
*/
api.getRuleset = function() {
var ruleset = {},
i = 0,
len = rules.length;
while (i < len){
ruleset[rules[i++].id] = 1; //by default, everything is a warning
}
return ruleset;
};
//-------------------------------------------------------------------------
// Formatters
//-------------------------------------------------------------------------
/**
* Adds a new formatter to the engine.
* @param {Object} formatter The formatter to add.
* @method addFormatter
*/
api.addFormatter = function(formatter) {
// formatters.push(formatter);
formatters[formatter.id] = formatter;
};
/**
* Retrieves a formatter for use.
* @param {String} formatId The name of the format to retrieve.
* @return {Object} The formatter or undefined.
* @method getFormatter
*/
api.getFormatter = function(formatId){
return formatters[formatId];
};
/**
* Formats the results in a particular format for a single file.
* @param {Object} result The results returned from CSSLint.verify().
* @param {String} filename The filename for which the results apply.
* @param {String} formatId The name of the formatter to use.
* @param {Object} options (Optional) for special output handling.
* @return {String} A formatted string for the results.
* @method format
*/
api.format = function(results, filename, formatId, options) {
var formatter = this.getFormatter(formatId),
result = null;
if (formatter){
result = formatter.startFormat();
result += formatter.formatResults(results, filename, options || {});
result += formatter.endFormat();
}
return result;
};
/**
* Indicates if the given format is supported.
* @param {String} formatId The ID of the format to check.
* @return {Boolean} True if the format exists, false if not.
* @method hasFormat
*/
api.hasFormat = function(formatId){
return formatters.hasOwnProperty(formatId);
};
//-------------------------------------------------------------------------
// Verification
//-------------------------------------------------------------------------
/**
* Starts the verification process for the given CSS text.
* @param {String} text The CSS text to verify.
* @param {Object} ruleset (Optional) List of rules to apply. If null, then
* all rules are used. If a rule has a value of 1 then it's a warning,
* a value of 2 means it's an error.
* @return {Object} Results of the verification.
* @method verify
*/
api.verify = function(text, ruleset){
var i = 0,
len = rules.length,
reporter,
lines,
report,
parser = new parserlib.css.Parser({ starHack: true, ieFilters: true,
underscoreHack: true, strict: false });
// normalize line endings
lines = text.replace(/\n\r?/g, "$split$").split('$split$');
if (!ruleset){
ruleset = this.getRuleset();
}
reporter = new Reporter(lines, ruleset);
ruleset.errors = 2; //always report parsing errors as errors
for (i in ruleset){
if(ruleset.hasOwnProperty(i)){
if (rules[i]){
rules[i].init(parser, reporter);
}
}
}
//capture most horrible error type
try {
parser.parse(text);
} catch (ex) {
reporter.error("Fatal error, cannot continue: " + ex.message, ex.line, ex.col, {});
}
report = {
messages : reporter.messages,
stats : reporter.stats
};
//sort by line numbers, rollups at the bottom
report.messages.sort(function (a, b){
if (a.rollup && !b.rollup){
return 1;
} else if (!a.rollup && b.rollup){
return -1;
} else {
return a.line - b.line;
}
});
return report;
};
//-------------------------------------------------------------------------
// Publish the API
//-------------------------------------------------------------------------
return api;
})();
/*global CSSLint*/
/**
* An instance of Report is used to report results of the
* verification back to the main API.
* @class Reporter
* @constructor
* @param {String[]} lines The text lines of the source.
* @param {Object} ruleset The set of rules to work with, including if
* they are errors or warnings.
*/
function Reporter(lines, ruleset){
/**
* List of messages being reported.
* @property messages
* @type String[]
*/
this.messages = [];
/**
* List of statistics being reported.
* @property stats
* @type String[]
*/
this.stats = [];
/**
* Lines of code being reported on. Used to provide contextual information
* for messages.
* @property lines
* @type String[]
*/
this.lines = lines;
/**
* Information about the rules. Used to determine whether an issue is an
* error or warning.
* @property ruleset
* @type Object
*/
this.ruleset = ruleset;
}
Reporter.prototype = {
//restore constructor
constructor: Reporter,
/**
* Report an error.
* @param {String} message The message to store.
* @param {int} line The line number.
* @param {int} col The column number.
* @param {Object} rule The rule this message relates to.
* @method error
*/
error: function(message, line, col, rule){
this.messages.push({
type : "error",
line : line,
col : col,
message : message,
evidence: this.lines[line-1],
rule : rule || {}
});
},
/**
* Report an warning.
* @param {String} message The message to store.
* @param {int} line The line number.
* @param {int} col The column number.
* @param {Object} rule The rule this message relates to.
* @method warn
* @deprecated Use report instead.
*/
warn: function(message, line, col, rule){
this.report(message, line, col, rule);
},
/**
* Report an issue.
* @param {String} message The message to store.
* @param {int} line The line number.
* @param {int} col The column number.
* @param {Object} rule The rule this message relates to.
* @method report
*/
report: function(message, line, col, rule){
this.messages.push({
type : this.ruleset[rule.id] == 2 ? "error" : "warning",
line : line,
col : col,
message : message,
evidence: this.lines[line-1],
rule : rule
});
},
/**
* Report some informational text.
* @param {String} message The message to store.
* @param {int} line The line number.
* @param {int} col The column number.
* @param {Object} rule The rule this message relates to.
* @method info
*/
info: function(message, line, col, rule){
this.messages.push({
type : "info",
line : line,
col : col,
message : message,
evidence: this.lines[line-1],
rule : rule
});
},
/**
* Report some rollup error information.
* @param {String} message The message to store.
* @param {Object} rule The rule this message relates to.
* @method rollupError
*/
rollupError: function(message, rule){
this.messages.push({
type : "error",
rollup : true,
message : message,
rule : rule
});
},
/**
* Report some rollup warning information.
* @param {String} message The message to store.
* @param {Object} rule The rule this message relates to.
* @method rollupWarn
*/
rollupWarn: function(message, rule){
this.messages.push({
type : "warning",
rollup : true,
message : message,
rule : rule
});
},
/**
* Report a statistic.
* @param {String} name The name of the stat to store.
* @param {Variant} value The value of the stat.
* @method stat
*/
stat: function(name, value){
this.stats[name] = value;
}
};
//expose for testing purposes
CSSLint._Reporter = Reporter;
/*global CSSLint*/
/*
* Utility functions that make life easier.
*/
CSSLint.Util = {
/*
* Adds all properties from supplier onto receiver,
* overwriting if the same name already exists on
* reciever.
* @param {Object} The object to receive the properties.
* @param {Object} The object to provide the properties.
* @return {Object} The receiver
*/
mix: function(receiver, supplier){
var prop;
for (prop in supplier){
if (supplier.hasOwnProperty(prop)){
receiver[prop] = supplier[prop];
}
}
return prop;
},
/*
* Polyfill for array indexOf() method.
* @param {Array} values The array to search.
* @param {Variant} value The value to search for.
* @return {int} The index of the value if found, -1 if not.
*/
indexOf: function(values, value){
if (values.indexOf){
return values.indexOf(value);
} else {
for (var i=0, len=values.length; i < len; i++){
if (values[i] === value){
return i;
}
}
return -1;
}
},
/*
* Polyfill for array forEach() method.
* @param {Array} values The array to operate on.
* @param {Function} func The function to call on each item.
* @return {void}
*/
forEach: function(values, func) {
if (values.forEach){
return values.forEach(func);
} else {
for (var i=0, len=values.length; i < len; i++){
func(values[i], i, values);
}
}
}
};
/*global CSSLint*/
/*
* Rule: Don't use adjoining classes (.foo.bar).
*/
CSSLint.addRule({
//rule information
id: "adjoining-classes",
name: "Disallow adjoining classes",
desc: "Don't use adjoining classes.",
browsers: "IE6",
//initialization
init: function(parser, reporter){
var rule = this;
parser.addListener("startrule", function(event){
var selectors = event.selectors,
selector,
part,
modifier,
classCount,
i, j, k;
for (i=0; i < selectors.length; i++){
selector = selectors[i];
for (j=0; j < selector.parts.length; j++){
part = selector.parts[j];
if (part.type == parser.SELECTOR_PART_TYPE){
classCount = 0;
for (k=0; k < part.modifiers.length; k++){
modifier = part.modifiers[k];
if (modifier.type == "class"){
classCount++;
}
if (classCount > 1){
reporter.report("Don't use adjoining classes.", part.line, part.col, rule);
}
}
}
}
}
});
}
});
/*global CSSLint*/
/*
* Rule: Don't use width or height when using padding or border.
*/
CSSLint.addRule({
//rule information
id: "box-model",
name: "Beware of broken box size",
desc: "Don't use width or height when using padding or border.",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this,
widthProperties = {
border: 1,
"border-left": 1,
"border-right": 1,
padding: 1,
"padding-left": 1,
"padding-right": 1
},
heightProperties = {
border: 1,
"border-bottom": 1,
"border-top": 1,
padding: 1,
"padding-bottom": 1,
"padding-top": 1
},
properties;
function startRule(){
properties = {};
}
function endRule(){
var prop, value;
if (properties.height){
for (prop in heightProperties){
if (heightProperties.hasOwnProperty(prop) && properties[prop]){
value = properties[prop].value;
//special case for padding
if (!(prop == "padding" && value.parts.length === 2 && value.parts[0].value === 0)){
reporter.report("Using height with " + prop + " can sometimes make elements larger than you expect.", properties[prop].line, properties[prop].col, rule);
}
}
}
}
if (properties.width){
for (prop in widthProperties){
if (widthProperties.hasOwnProperty(prop) && properties[prop]){
value = properties[prop].value;
if (!(prop == "padding" && value.parts.length === 2 && value.parts[1].value === 0)){
reporter.report("Using width with " + prop + " can sometimes make elements larger than you expect.", properties[prop].line, properties[prop].col, rule);
}
}
}
}
}
parser.addListener("startrule", startRule);
parser.addListener("startfontface", startRule);
parser.addListener("startpage", startRule);
parser.addListener("startpagemargin", startRule);
parser.addListener("startkeyframerule", startRule);
parser.addListener("property", function(event){
var name = event.property.text.toLowerCase();
if (heightProperties[name] || widthProperties[name]){
if (!/^0\S*$/.test(event.value) && !(name == "border" && event.value == "none")){
properties[name] = { line: event.property.line, col: event.property.col, value: event.value };
}
} else {
if (/^(width|height)/i.test(name) && /^(length|percentage)/.test(event.value.parts[0].type)){
properties[name] = 1;
}
}
});
parser.addListener("endrule", endRule);
parser.addListener("endfontface", endRule);
parser.addListener("endpage", endRule);
parser.addListener("endpagemargin", endRule);
parser.addListener("endkeyframerule", endRule);
}
});
/*global CSSLint*/
/*
* Rule: box-sizing doesn't work in IE6 and IE7.
*/
CSSLint.addRule({
//rule information
id: "box-sizing",
name: "Disallow use of box-sizing",
desc: "The box-sizing properties isn't supported in IE6 and IE7.",
browsers: "IE6, IE7",
tags: ["Compatibility"],
//initialization
init: function(parser, reporter){
var rule = this;
parser.addListener("property", function(event){
var name = event.property.text.toLowerCase();
if (name == "box-sizing"){
reporter.report("The box-sizing property isn't supported in IE6 and IE7.", event.line, event.col, rule);
}
});
}
});
/*
* Rule: Include all compatible vendor prefixes to reach a wider
* range of users.
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "compatible-vendor-prefixes",
name: "Require compatible vendor prefixes",
desc: "Include all compatible vendor prefixes to reach a wider range of users.",
browsers: "All",
//initialization
init: function (parser, reporter) {
var rule = this,
compatiblePrefixes,
properties,
prop,
variations,
prefixed,
i,
len,
inKeyFrame = false,
arrayPush = Array.prototype.push,
applyTo = [];
// See http://peter.sh/experiments/vendor-prefixed-css-property-overview/ for details
compatiblePrefixes = {
"animation" : "webkit moz",
"animation-delay" : "webkit moz",
"animation-direction" : "webkit moz",
"animation-duration" : "webkit moz",
"animation-fill-mode" : "webkit moz",
"animation-iteration-count" : "webkit moz",
"animation-name" : "webkit moz",
"animation-play-state" : "webkit moz",
"animation-timing-function" : "webkit moz",
"appearance" : "webkit moz",
"border-end" : "webkit moz",
"border-end-color" : "webkit moz",
"border-end-style" : "webkit moz",
"border-end-width" : "webkit moz",
"border-image" : "webkit moz o",
"border-radius" : "webkit moz",
"border-start" : "webkit moz",
"border-start-color" : "webkit moz",
"border-start-style" : "webkit moz",
"border-start-width" : "webkit moz",
"box-align" : "webkit moz ms",
"box-direction" : "webkit moz ms",
"box-flex" : "webkit moz ms",
"box-lines" : "webkit ms",
"box-ordinal-group" : "webkit moz ms",
"box-orient" : "webkit moz ms",
"box-pack" : "webkit moz ms",
"box-sizing" : "webkit moz",
"box-shadow" : "webkit moz",
"column-count" : "webkit moz ms",
"column-gap" : "webkit moz ms",
"column-rule" : "webkit moz ms",
"column-rule-color" : "webkit moz ms",
"column-rule-style" : "webkit moz ms",
"column-rule-width" : "webkit moz ms",
"column-width" : "webkit moz ms",
"hyphens" : "epub moz",
"line-break" : "webkit ms",
"margin-end" : "webkit moz",
"margin-start" : "webkit moz",
"marquee-speed" : "webkit wap",
"marquee-style" : "webkit wap",
"padding-end" : "webkit moz",
"padding-start" : "webkit moz",
"tab-size" : "moz o",
"text-size-adjust" : "webkit ms",
"transform" : "webkit moz ms o",
"transform-origin" : "webkit moz ms o",
"transition" : "webkit moz o",
"transition-delay" : "webkit moz o",
"transition-duration" : "webkit moz o",
"transition-property" : "webkit moz o",
"transition-timing-function" : "webkit moz o",
"user-modify" : "webkit moz",
"user-select" : "webkit moz ms",
"word-break" : "epub ms",
"writing-mode" : "epub ms"
};
for (prop in compatiblePrefixes) {
if (compatiblePrefixes.hasOwnProperty(prop)) {
variations = [];
prefixed = compatiblePrefixes[prop].split(' ');
for (i = 0, len = prefixed.length; i < len; i++) {
variations.push('-' + prefixed[i] + '-' + prop);
}
compatiblePrefixes[prop] = variations;
arrayPush.apply(applyTo, variations);
}
}
parser.addListener("startrule", function () {
properties = [];
});
parser.addListener("startkeyframes", function (event) {
inKeyFrame = event.prefix || true;
});
parser.addListener("endkeyframes", function (event) {
inKeyFrame = false;
});
parser.addListener("property", function (event) {
var name = event.property;
if (CSSLint.Util.indexOf(applyTo, name.text) > -1) {
// e.g., -moz-transform is okay to be alone in @-moz-keyframes
if (!inKeyFrame || typeof inKeyFrame != "string" ||
name.text.indexOf("-" + inKeyFrame + "-") !== 0) {
properties.push(name);
}
}
});
parser.addListener("endrule", function (event) {
if (!properties.length) {
return;
}
var propertyGroups = {},
i,
len,
name,
prop,
variations,
value,
full,
actual,
item,
propertiesSpecified;
for (i = 0, len = properties.length; i < len; i++) {
name = properties[i];
for (prop in compatiblePrefixes) {
if (compatiblePrefixes.hasOwnProperty(prop)) {
variations = compatiblePrefixes[prop];
if (CSSLint.Util.indexOf(variations, name.text) > -1) {
if (!propertyGroups[prop]) {
propertyGroups[prop] = {
full : variations.slice(0),
actual : [],
actualNodes: []
};
}
if (CSSLint.Util.indexOf(propertyGroups[prop].actual, name.text) === -1) {
propertyGroups[prop].actual.push(name.text);
propertyGroups[prop].actualNodes.push(name);
}
}
}
}
}
for (prop in propertyGroups) {
if (propertyGroups.hasOwnProperty(prop)) {
value = propertyGroups[prop];
full = value.full;
actual = value.actual;
if (full.length > actual.length) {
for (i = 0, len = full.length; i < len; i++) {
item = full[i];
if (CSSLint.Util.indexOf(actual, item) === -1) {
propertiesSpecified = (actual.length === 1) ? actual[0] : (actual.length == 2) ? actual.join(" and ") : actual.join(", ");
reporter.report("The property " + item + " is compatible with " + propertiesSpecified + " and should be included as well.", value.actualNodes[0].line, value.actualNodes[0].col, rule);
}
}
}
}
}
});
}
});
/*
* Rule: Certain properties don't play well with certain display values.
* - float should not be used with inline-block
* - height, width, margin-top, margin-bottom, float should not be used with inline
* - vertical-align should not be used with block
* - margin, float should not be used with table-*
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "display-property-grouping",
name: "Require properties appropriate for display",
desc: "Certain properties shouldn't be used with certain display property values.",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this;
var propertiesToCheck = {
display: 1,
"float": "none",
height: 1,
width: 1,
margin: 1,
"margin-left": 1,
"margin-right": 1,
"margin-bottom": 1,
"margin-top": 1,
padding: 1,
"padding-left": 1,
"padding-right": 1,
"padding-bottom": 1,
"padding-top": 1,
"vertical-align": 1
},
properties;
function reportProperty(name, display, msg){
if (properties[name]){
if (typeof propertiesToCheck[name] != "string" || properties[name].value.toLowerCase() != propertiesToCheck[name]){
reporter.report(msg || name + " can't be used with display: " + display + ".", properties[name].line, properties[name].col, rule);
}
}
}
function startRule(){
properties = {};
}
function endRule(){
var display = properties.display ? properties.display.value : null;
if (display){
switch(display){
case "inline":
//height, width, margin-top, margin-bottom, float should not be used with inline
reportProperty("height", display);
reportProperty("width", display);
reportProperty("margin", display);
reportProperty("margin-top", display);
reportProperty("margin-bottom", display);
reportProperty("float", display, "display:inline has no effect on floated elements (but may be used to fix the IE6 double-margin bug).");
break;
case "block":
//vertical-align should not be used with block
reportProperty("vertical-align", display);
break;
case "inline-block":
//float should not be used with inline-block
reportProperty("float", display);
break;
default:
//margin, float should not be used with table
if (display.indexOf("table-") === 0){
reportProperty("margin", display);
reportProperty("margin-left", display);
reportProperty("margin-right", display);
reportProperty("margin-top", display);
reportProperty("margin-bottom", display);
reportProperty("float", display);
}
//otherwise do nothing
}
}
}
parser.addListener("startrule", startRule);
parser.addListener("startfontface", startRule);
parser.addListener("startkeyframerule", startRule);
parser.addListener("startpagemargin", startRule);
parser.addListener("startpage", startRule);
parser.addListener("property", function(event){
var name = event.property.text.toLowerCase();
if (propertiesToCheck[name]){
properties[name] = { value: event.value.text, line: event.property.line, col: event.property.col };
}
});
parser.addListener("endrule", endRule);
parser.addListener("endfontface", endRule);
parser.addListener("endkeyframerule", endRule);
parser.addListener("endpagemargin", endRule);
parser.addListener("endpage", endRule);
}
});
/*
* Rule: Disallow duplicate background-images (using url).
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "duplicate-background-images",
name: "Disallow duplicate background images",
desc: "Every background-image should be unique. Use a common class for e.g. sprites.",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this,
stack = {};
parser.addListener("property", function(event){
var name = event.property.text,
value = event.value,
i, len;
if (name.match(/background/i)) {
for (i=0, len=value.parts.length; i < len; i++) {
if (value.parts[i].type == 'uri') {
if (typeof stack[value.parts[i].uri] === 'undefined') {
stack[value.parts[i].uri] = event;
}
else {
reporter.report("Background image '" + value.parts[i].uri + "' was used multiple times, first declared at line " + stack[value.parts[i].uri].line + ", col " + stack[value.parts[i].uri].col + ".", event.line, event.col, rule);
}
}
}
}
});
}
});
/*
* Rule: Duplicate properties must appear one after the other. If an already-defined
* property appears somewhere else in the rule, then it's likely an error.
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "duplicate-properties",
name: "Disallow duplicate properties",
desc: "Duplicate properties must appear one after the other.",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this,
properties,
lastProperty;
function startRule(event){
properties = {};
}
parser.addListener("startrule", startRule);
parser.addListener("startfontface", startRule);
parser.addListener("startpage", startRule);
parser.addListener("startpagemargin", startRule);
parser.addListener("startkeyframerule", startRule);
parser.addListener("property", function(event){
var property = event.property,
name = property.text.toLowerCase();
if (properties[name] && (lastProperty != name || properties[name] == event.value.text)){
reporter.report("Duplicate property '" + event.property + "' found.", event.line, event.col, rule);
}
properties[name] = event.value.text;
lastProperty = name;
});
}
});
/*
* Rule: Style rules without any properties defined should be removed.
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "empty-rules",
name: "Disallow empty rules",
desc: "Rules without any properties specified should be removed.",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this,
count = 0;
parser.addListener("startrule", function(){
count=0;
});
parser.addListener("property", function(){
count++;
});
parser.addListener("endrule", function(event){
var selectors = event.selectors;
if (count === 0){
reporter.report("Rule is empty.", selectors[0].line, selectors[0].col, rule);
}
});
}
});
/*
* Rule: There should be no syntax errors. (Duh.)
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "errors",
name: "Parsing Errors",
desc: "This rule looks for recoverable syntax errors.",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this;
parser.addListener("error", function(event){
reporter.error(event.message, event.line, event.col, rule);
});
}
});
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "fallback-colors",
name: "Require fallback colors",
desc: "For older browsers that don't support RGBA, HSL, or HSLA, provide a fallback color.",
browsers: "IE6,IE7,IE8",
//initialization
init: function(parser, reporter){
var rule = this,
lastProperty,
propertiesToCheck = {
color: 1,
background: 1,
"background-color": 1
},
properties;
function startRule(event){
properties = {};
lastProperty = null;
}
parser.addListener("startrule", startRule);
parser.addListener("startfontface", startRule);
parser.addListener("startpage", startRule);
parser.addListener("startpagemargin", startRule);
parser.addListener("startkeyframerule", startRule);
parser.addListener("property", function(event){
var property = event.property,
name = property.text.toLowerCase(),
parts = event.value.parts,
i = 0,
colorType = "",
len = parts.length;
if(propertiesToCheck[name]){
while(i < len){
if (parts[i].type == "color"){
if ("alpha" in parts[i] || "hue" in parts[i]){
if (/([^\)]+)\(/.test(parts[i])){
colorType = RegExp.$1.toUpperCase();
}
if (!lastProperty || (lastProperty.property.text.toLowerCase() != name || lastProperty.colorType != "compat")){
reporter.report("Fallback " + name + " (hex or RGB) should precede " + colorType + " " + name + ".", event.line, event.col, rule);
}
} else {
event.colorType = "compat";
}
}
i++;
}
}
lastProperty = event;
});
}
});
/*
* Rule: You shouldn't use more than 10 floats. If you do, there's probably
* room for some abstraction.
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "floats",
name: "Disallow too many floats",
desc: "This rule tests if the float property is used too many times",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this;
var count = 0;
//count how many times "float" is used
parser.addListener("property", function(event){
if (event.property.text.toLowerCase() == "float" &&
event.value.text.toLowerCase() != "none"){
count++;
}
});
//report the results
parser.addListener("endstylesheet", function(){
reporter.stat("floats", count);
if (count >= 10){
reporter.rollupWarn("Too many floats (" + count + "), you're probably using them for layout. Consider using a grid system instead.", rule);
}
});
}
});
/*
* Rule: Avoid too many @font-face declarations in the same stylesheet.
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "font-faces",
name: "Don't use too many web fonts",
desc: "Too many different web fonts in the same stylesheet.",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this,
count = 0;
parser.addListener("startfontface", function(){
count++;
});
parser.addListener("endstylesheet", function(){
if (count > 5){
reporter.rollupWarn("Too many @font-face declarations (" + count + ").", rule);
}
});
}
});
/*
* Rule: You shouldn't need more than 9 font-size declarations.
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "font-sizes",
name: "Disallow too many font sizes",
desc: "Checks the number of font-size declarations.",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this,
count = 0;
//check for use of "font-size"
parser.addListener("property", function(event){
if (event.property == "font-size"){
count++;
}
});
//report the results
parser.addListener("endstylesheet", function(){
reporter.stat("font-sizes", count);
if (count >= 10){
reporter.rollupWarn("Too many font-size declarations (" + count + "), abstraction needed.", rule);
}
});
}
});
/*
* Rule: When using a vendor-prefixed gradient, make sure to use them all.
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "gradients",
name: "Require all gradient definitions",
desc: "When using a vendor-prefixed gradient, make sure to use them all.",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this,
gradients;
parser.addListener("startrule", function(){
gradients = {
moz: 0,
webkit: 0,
oldWebkit: 0,
ms: 0,
o: 0
};
});
parser.addListener("property", function(event){
if (/\-(moz|ms|o|webkit)(?:\-(?:linear|radial))\-gradient/i.test(event.value)){
gradients[RegExp.$1] = 1;
} else if (/\-webkit\-gradient/i.test(event.value)){
gradients.oldWebkit = 1;
}
});
parser.addListener("endrule", function(event){
var missing = [];
if (!gradients.moz){
missing.push("Firefox 3.6+");
}
if (!gradients.webkit){
missing.push("Webkit (Safari 5+, Chrome)");
}
if (!gradients.oldWebkit){
missing.push("Old Webkit (Safari 4+, Chrome)");
}
if (!gradients.ms){
missing.push("Internet Explorer 10+");
}
if (!gradients.o){
missing.push("Opera 11.1+");
}
if (missing.length && missing.length < 5){
reporter.report("Missing vendor-prefixed CSS gradients for " + missing.join(", ") + ".", event.selectors[0].line, event.selectors[0].col, rule);
}
});
}
});
/*
* Rule: Don't use IDs for selectors.
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "ids",
name: "Disallow IDs in selectors",
desc: "Selectors should not contain IDs.",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this;
parser.addListener("startrule", function(event){
var selectors = event.selectors,
selector,
part,
modifier,
idCount,
i, j, k;
for (i=0; i < selectors.length; i++){
selector = selectors[i];
idCount = 0;
for (j=0; j < selector.parts.length; j++){
part = selector.parts[j];
if (part.type == parser.SELECTOR_PART_TYPE){
for (k=0; k < part.modifiers.length; k++){
modifier = part.modifiers[k];
if (modifier.type == "id"){
idCount++;
}
}
}
}
if (idCount == 1){
reporter.report("Don't use IDs in selectors.", selector.line, selector.col, rule);
} else if (idCount > 1){
reporter.report(idCount + " IDs in the selector, really?", selector.line, selector.col, rule);
}
}
});
}
});
/*
* Rule: Don't use @import, use <link> instead.
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "import",
name: "Disallow @import",
desc: "Don't use @import, use <link> instead.",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this;
parser.addListener("import", function(event){
reporter.report("@import prevents parallel downloads, use <link> instead.", event.line, event.col, rule);
});
}
});
/*
* Rule: Make sure !important is not overused, this could lead to specificity
* war. Display a warning on !important declarations, an error if it's
* used more at least 10 times.
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "important",
name: "Disallow !important",
desc: "Be careful when using !important declaration",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this,
count = 0;
//warn that important is used and increment the declaration counter
parser.addListener("property", function(event){
if (event.important === true){
count++;
reporter.report("Use of !important", event.line, event.col, rule);
}
});
//if there are more than 10, show an error
parser.addListener("endstylesheet", function(){
reporter.stat("important", count);
if (count >= 10){
reporter.rollupWarn("Too many !important declarations (" + count + "), try to use less than 10 to avoid specificity issues.", rule);
}
});
}
});
/*
* Rule: Properties should be known (listed in CSS3 specification) or
* be a vendor-prefixed property.
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "known-properties",
name: "Require use of known properties",
desc: "Properties should be known (listed in CSS3 specification) or be a vendor-prefixed property.",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this;
parser.addListener("property", function(event){
var name = event.property.text.toLowerCase();
// the check is handled entirely by the parser-lib (https://github.com/nzakas/parser-lib)
if (event.invalid) {
reporter.report(event.invalid.message, event.line, event.col, rule);
}
});
}
});
/*
* Rule: outline: none or outline: 0 should only be used in a :focus rule
* and only if there are other properties in the same rule.
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "outline-none",
name: "Disallow outline: none",
desc: "Use of outline: none or outline: 0 should be limited to :focus rules.",
browsers: "All",
tags: ["Accessibility"],
//initialization
init: function(parser, reporter){
var rule = this,
lastRule;
function startRule(event){
if (event.selectors){
lastRule = {
line: event.line,
col: event.col,
selectors: event.selectors,
propCount: 0,
outline: false
};
} else {
lastRule = null;
}
}
function endRule(event){
if (lastRule){
if (lastRule.outline){
if (lastRule.selectors.toString().toLowerCase().indexOf(":focus") == -1){
reporter.report("Outlines should only be modified using :focus.", lastRule.line, lastRule.col, rule);
} else if (lastRule.propCount == 1) {
reporter.report("Outlines shouldn't be hidden unless other visual changes are made.", lastRule.line, lastRule.col, rule);
}
}
}
}
parser.addListener("startrule", startRule);
parser.addListener("startfontface", startRule);
parser.addListener("startpage", startRule);
parser.addListener("startpagemargin", startRule);
parser.addListener("startkeyframerule", startRule);
parser.addListener("property", function(event){
var name = event.property.text.toLowerCase(),
value = event.value;
if (lastRule){
lastRule.propCount++;
if (name == "outline" && (value == "none" || value == "0")){
lastRule.outline = true;
}
}
});
parser.addListener("endrule", endRule);
parser.addListener("endfontface", endRule);
parser.addListener("endpage", endRule);
parser.addListener("endpagemargin", endRule);
parser.addListener("endkeyframerule", endRule);
}
});
/*
* Rule: Don't use classes or IDs with elements (a.foo or a#foo).
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "overqualified-elements",
name: "Disallow overqualified elements",
desc: "Don't use classes or IDs with elements (a.foo or a#foo).",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this,
classes = {};
parser.addListener("startrule", function(event){
var selectors = event.selectors,
selector,
part,
modifier,
i, j, k;
for (i=0; i < selectors.length; i++){
selector = selectors[i];
for (j=0; j < selector.parts.length; j++){
part = selector.parts[j];
if (part.type == parser.SELECTOR_PART_TYPE){
for (k=0; k < part.modifiers.length; k++){
modifier = part.modifiers[k];
if (part.elementName && modifier.type == "id"){
reporter.report("Element (" + part + ") is overqualified, just use " + modifier + " without element name.", part.line, part.col, rule);
} else if (modifier.type == "class"){
if (!classes[modifier]){
classes[modifier] = [];
}
classes[modifier].push({ modifier: modifier, part: part });
}
}
}
}
}
});
parser.addListener("endstylesheet", function(){
var prop;
for (prop in classes){
if (classes.hasOwnProperty(prop)){
//one use means that this is overqualified
if (classes[prop].length == 1 && classes[prop][0].part.elementName){
reporter.report("Element (" + classes[prop][0].part + ") is overqualified, just use " + classes[prop][0].modifier + " without element name.", classes[prop][0].part.line, classes[prop][0].part.col, rule);
}
}
}
});
}
});
/*
* Rule: Headings (h1-h6) should not be qualified (namespaced).
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "qualified-headings",
name: "Disallow qualified headings",
desc: "Headings should not be qualified (namespaced).",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this;
parser.addListener("startrule", function(event){
var selectors = event.selectors,
selector,
part,
i, j;
for (i=0; i < selectors.length; i++){
selector = selectors[i];
for (j=0; j < selector.parts.length; j++){
part = selector.parts[j];
if (part.type == parser.SELECTOR_PART_TYPE){
if (part.elementName && /h[1-6]/.test(part.elementName.toString()) && j > 0){
reporter.report("Heading (" + part.elementName + ") should not be qualified.", part.line, part.col, rule);
}
}
}
}
});
}
});
/*
* Rule: Selectors that look like regular expressions are slow and should be avoided.
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "regex-selectors",
name: "Disallow selectors that look like regexs",
desc: "Selectors that look like regular expressions are slow and should be avoided.",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this;
parser.addListener("startrule", function(event){
var selectors = event.selectors,
selector,
part,
modifier,
i, j, k;
for (i=0; i < selectors.length; i++){
selector = selectors[i];
for (j=0; j < selector.parts.length; j++){
part = selector.parts[j];
if (part.type == parser.SELECTOR_PART_TYPE){
for (k=0; k < part.modifiers.length; k++){
modifier = part.modifiers[k];
if (modifier.type == "attribute"){
if (/([\~\|\^\$\*]=)/.test(modifier)){
reporter.report("Attribute selectors with " + RegExp.$1 + " are slow!", modifier.line, modifier.col, rule);
}
}
}
}
}
}
});
}
});
/*
* Rule: Total number of rules should not exceed x.
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "rules-count",
name: "Rules Count",
desc: "Track how many rules there are.",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this,
count = 0;
//count each rule
parser.addListener("startrule", function(){
count++;
});
parser.addListener("endstylesheet", function(){
reporter.stat("rule-count", count);
});
}
});
/*
* Rule: Use shorthand properties where possible.
*
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "shorthand",
name: "Require shorthand properties",
desc: "Use shorthand properties where possible.",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this,
prop, i, len,
propertiesToCheck = {},
properties,
mapping = {
"margin": [
"margin-top",
"margin-bottom",
"margin-left",
"margin-right"
],
"padding": [
"padding-top",
"padding-bottom",
"padding-left",
"padding-right"
]
};
//initialize propertiesToCheck
for (prop in mapping){
if (mapping.hasOwnProperty(prop)){
for (i=0, len=mapping[prop].length; i < len; i++){
propertiesToCheck[mapping[prop][i]] = prop;
}
}
}
function startRule(event){
properties = {};
}
//event handler for end of rules
function endRule(event){
var prop, i, len, total;
//check which properties this rule has
for (prop in mapping){
if (mapping.hasOwnProperty(prop)){
total=0;
for (i=0, len=mapping[prop].length; i < len; i++){
total += properties[mapping[prop][i]] ? 1 : 0;
}
if (total == mapping[prop].length){
reporter.report("The properties " + mapping[prop].join(", ") + " can be replaced by " + prop + ".", event.line, event.col, rule);
}
}
}
}
parser.addListener("startrule", startRule);
parser.addListener("startfontface", startRule);
//check for use of "font-size"
parser.addListener("property", function(event){
var name = event.property.toString().toLowerCase(),
value = event.value.parts[0].value;
if (propertiesToCheck[name]){
properties[name] = 1;
}
});
parser.addListener("endrule", endRule);
parser.addListener("endfontface", endRule);
}
});
/*
* Rule: Don't use properties with a star prefix.
*
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "star-property-hack",
name: "Disallow properties with a star prefix",
desc: "Checks for the star property hack (targets IE6/7)",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this;
//check if property name starts with "*"
parser.addListener("property", function(event){
var property = event.property;
if (property.hack == "*") {
reporter.report("Property with star prefix found.", event.property.line, event.property.col, rule);
}
});
}
});
/*
* Rule: Don't use text-indent for image replacement if you need to support rtl.
*
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "text-indent",
name: "Disallow negative text-indent",
desc: "Checks for text indent less than -99px",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this,
textIndent,
direction;
function startRule(event){
textIndent = false;
direction = "inherit";
}
//event handler for end of rules
function endRule(event){
if (textIndent && direction != "ltr"){
reporter.report("Negative text-indent doesn't work well with RTL. If you use text-indent for image replacement explicitly set direction for that item to ltr.", textIndent.line, textIndent.col, rule);
}
}
parser.addListener("startrule", startRule);
parser.addListener("startfontface", startRule);
//check for use of "font-size"
parser.addListener("property", function(event){
var name = event.property.toString().toLowerCase(),
value = event.value;
if (name == "text-indent" && value.parts[0].value < -99){
textIndent = event.property;
} else if (name == "direction" && value == "ltr"){
direction = "ltr";
}
});
parser.addListener("endrule", endRule);
parser.addListener("endfontface", endRule);
}
});
/*
* Rule: Don't use properties with a underscore prefix.
*
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "underscore-property-hack",
name: "Disallow properties with an underscore prefix",
desc: "Checks for the underscore property hack (targets IE6)",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this;
//check if property name starts with "_"
parser.addListener("property", function(event){
var property = event.property;
if (property.hack == "_") {
reporter.report("Property with underscore prefix found.", event.property.line, event.property.col, rule);
}
});
}
});
/*
* Rule: Headings (h1-h6) should be defined only once.
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "unique-headings",
name: "Headings should only be defined once",
desc: "Headings should be defined only once.",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this;
var headings = {
h1: 0,
h2: 0,
h3: 0,
h4: 0,
h5: 0,
h6: 0
};
parser.addListener("startrule", function(event){
var selectors = event.selectors,
selector,
part,
pseudo,
i, j;
for (i=0; i < selectors.length; i++){
selector = selectors[i];
part = selector.parts[selector.parts.length-1];
if (part.elementName && /(h[1-6])/i.test(part.elementName.toString())){
for (j=0; j < part.modifiers.length; j++){
if (part.modifiers[j].type == "pseudo"){
pseudo = true;
break;
}
}
if (!pseudo){
headings[RegExp.$1]++;
if (headings[RegExp.$1] > 1) {
reporter.report("Heading (" + part.elementName + ") has already been defined.", part.line, part.col, rule);
}
}
}
}
});
parser.addListener("endstylesheet", function(event){
var prop,
messages = [];
for (prop in headings){
if (headings.hasOwnProperty(prop)){
if (headings[prop] > 1){
messages.push(headings[prop] + " " + prop + "s");
}
}
}
if (messages.length){
reporter.rollupWarn("You have " + messages.join(", ") + " defined in this stylesheet.", rule);
}
});
}
});
/*
* Rule: Don't use universal selector because it's slow.
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "universal-selector",
name: "Disallow universal selector",
desc: "The universal selector (*) is known to be slow.",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this;
parser.addListener("startrule", function(event){
var selectors = event.selectors,
selector,
part,
modifier,
i, j, k;
for (i=0; i < selectors.length; i++){
selector = selectors[i];
part = selector.parts[selector.parts.length-1];
if (part.elementName == "*"){
reporter.report(rule.desc, part.line, part.col, rule);
}
}
});
}
});
/*
* Rule: Don't use unqualified attribute selectors because they're just like universal selectors.
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "unqualified-attributes",
name: "Disallow unqualified attribute selectors",
desc: "Unqualified attribute selectors are known to be slow.",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this;
parser.addListener("startrule", function(event){
var selectors = event.selectors,
selector,
part,
modifier,
i, j, k;
for (i=0; i < selectors.length; i++){
selector = selectors[i];
part = selector.parts[selector.parts.length-1];
if (part.type == parser.SELECTOR_PART_TYPE){
for (k=0; k < part.modifiers.length; k++){
modifier = part.modifiers[k];
if (modifier.type == "attribute" && (!part.elementName || part.elementName == "*")){
reporter.report(rule.desc, part.line, part.col, rule);
}
}
}
}
});
}
});
/*
* Rule: When using a vendor-prefixed property, make sure to
* include the standard one.
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "vendor-prefix",
name: "Require standard property with vendor prefix",
desc: "When using a vendor-prefixed property, make sure to include the standard one.",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this,
properties,
num,
propertiesToCheck = {
"-webkit-border-radius": "border-radius",
"-webkit-border-top-left-radius": "border-top-left-radius",
"-webkit-border-top-right-radius": "border-top-right-radius",
"-webkit-border-bottom-left-radius": "border-bottom-left-radius",
"-webkit-border-bottom-right-radius": "border-bottom-right-radius",
"-o-border-radius": "border-radius",
"-o-border-top-left-radius": "border-top-left-radius",
"-o-border-top-right-radius": "border-top-right-radius",
"-o-border-bottom-left-radius": "border-bottom-left-radius",
"-o-border-bottom-right-radius": "border-bottom-right-radius",
"-moz-border-radius": "border-radius",
"-moz-border-radius-topleft": "border-top-left-radius",
"-moz-border-radius-topright": "border-top-right-radius",
"-moz-border-radius-bottomleft": "border-bottom-left-radius",
"-moz-border-radius-bottomright": "border-bottom-right-radius",
"-moz-column-count": "column-count",
"-webkit-column-count": "column-count",
"-moz-column-gap": "column-gap",
"-webkit-column-gap": "column-gap",
"-moz-column-rule": "column-rule",
"-webkit-column-rule": "column-rule",
"-moz-column-rule-style": "column-rule-style",
"-webkit-column-rule-style": "column-rule-style",
"-moz-column-rule-color": "column-rule-color",
"-webkit-column-rule-color": "column-rule-color",
"-moz-column-rule-width": "column-rule-width",
"-webkit-column-rule-width": "column-rule-width",
"-moz-column-width": "column-width",
"-webkit-column-width": "column-width",
"-webkit-column-span": "column-span",
"-webkit-columns": "columns",
"-moz-box-shadow": "box-shadow",
"-webkit-box-shadow": "box-shadow",
"-moz-transform" : "transform",
"-webkit-transform" : "transform",
"-o-transform" : "transform",
"-ms-transform" : "transform",
"-moz-transform-origin" : "transform-origin",
"-webkit-transform-origin" : "transform-origin",
"-o-transform-origin" : "transform-origin",
"-ms-transform-origin" : "transform-origin",
"-moz-box-sizing" : "box-sizing",
"-webkit-box-sizing" : "box-sizing",
"-moz-user-select" : "user-select",
"-khtml-user-select" : "user-select",
"-webkit-user-select" : "user-select"
};
//event handler for beginning of rules
function startRule(){
properties = {};
num=1;
}
//event handler for end of rules
function endRule(event){
var prop,
i, len,
standard,
needed,
actual,
needsStandard = [];
for (prop in properties){
if (propertiesToCheck[prop]){
needsStandard.push({ actual: prop, needed: propertiesToCheck[prop]});
}
}
for (i=0, len=needsStandard.length; i < len; i++){
needed = needsStandard[i].needed;
actual = needsStandard[i].actual;
if (!properties[needed]){
reporter.report("Missing standard property '" + needed + "' to go along with '" + actual + "'.", properties[actual][0].name.line, properties[actual][0].name.col, rule);
} else {
//make sure standard property is last
if (properties[needed][0].pos < properties[actual][0].pos){
reporter.report("Standard property '" + needed + "' should come after vendor-prefixed property '" + actual + "'.", properties[actual][0].name.line, properties[actual][0].name.col, rule);
}
}
}
}
parser.addListener("startrule", startRule);
parser.addListener("startfontface", startRule);
parser.addListener("startpage", startRule);
parser.addListener("startpagemargin", startRule);
parser.addListener("startkeyframerule", startRule);
parser.addListener("property", function(event){
var name = event.property.text.toLowerCase();
if (!properties[name]){
properties[name] = [];
}
properties[name].push({ name: event.property, value : event.value, pos:num++ });
});
parser.addListener("endrule", endRule);
parser.addListener("endfontface", endRule);
parser.addListener("endpage", endRule);
parser.addListener("endpagemargin", endRule);
parser.addListener("endkeyframerule", endRule);
}
});
/*
* Rule: You don't need to specify units when a value is 0.
*/
/*global CSSLint*/
CSSLint.addRule({
//rule information
id: "zero-units",
name: "Disallow units for 0 values",
desc: "You don't need to specify units when a value is 0.",
browsers: "All",
//initialization
init: function(parser, reporter){
var rule = this;
//count how many times "float" is used
parser.addListener("property", function(event){
var parts = event.value.parts,
i = 0,
len = parts.length;
while(i < len){
if ((parts[i].units || parts[i].type == "percentage") && parts[i].value === 0 && parts[i].type != "time"){
reporter.report("Values of 0 shouldn't have units specified.", parts[i].line, parts[i].col, rule);
}
i++;
}
});
}
});
/*global CSSLint*/
(function() {
/**
* Replace special characters before write to output.
*
* Rules:
* - single quotes is the escape sequence for double-quotes
* - & is the escape sequence for &
* - < is the escape sequence for <
* - > is the escape sequence for >
*
* @param {String} message to escape
* @return escaped message as {String}
*/
var xmlEscape = function(str) {
if (!str || str.constructor !== String) {
return "";
}
return str.replace(/[\"&><]/g, function(match) {
switch (match) {
case "\"":
return """;
case "&":
return "&";
case "<":
return "<";
case ">":
return ">";
}
});
};
CSSLint.addFormatter({
//format information
id: "checkstyle-xml",
name: "Checkstyle XML format",
/**
* Return opening root XML tag.
* @return {String} to prepend before all results
*/
startFormat: function(){
return "<?xml version=\"1.0\" encoding=\"utf-8\"?><checkstyle>";
},
/**
* Return closing root XML tag.
* @return {String} to append after all results
*/
endFormat: function(){
return "</checkstyle>";
},
/**
* Returns message when there is a file read error.
* @param {String} filename The name of the file that caused the error.
* @param {String} message The error message
* @return {String} The error message.
*/
readError: function(filename, message) {
return "<file name=\"" + xmlEscape(filename) + "\"><error line=\"0\" column=\"0\" severty=\"error\" message=\"" + xmlEscape(message) + "\"></error></file>";
},
/**
* Given CSS Lint results for a file, return output for this format.
* @param results {Object} with error and warning messages
* @param filename {String} relative file path
* @param options {Object} (UNUSED for now) specifies special handling of output
* @return {String} output for results
*/
formatResults: function(results, filename, options) {
var messages = results.messages,
output = [];
/**
* Generate a source string for a rule.
* Checkstyle source strings usually resemble Java class names e.g
* net.csslint.SomeRuleName
* @param {Object} rule
* @return rule source as {String}
*/
var generateSource = function(rule) {
if (!rule || !('name' in rule)) {
return "";
}
return 'net.csslint.' + rule.name.replace(/\s/g,'');
};
if (messages.length > 0) {
output.push("<file name=\""+filename+"\">");
CSSLint.Util.forEach(messages, function (message, i) {
//ignore rollups for now
if (!message.rollup) {
output.push("<error line=\"" + message.line + "\" column=\"" + message.col + "\" severity=\"" + message.type + "\"" +
" message=\"" + xmlEscape(message.message) + "\" source=\"" + generateSource(message.rule) +"\"/>");
}
});
output.push("</file>");
}
return output.join("");
}
});
}());
/*global CSSLint*/
CSSLint.addFormatter({
//format information
id: "compact",
name: "Compact, 'porcelain' format",
/**
* Return content to be printed before all file results.
* @return {String} to prepend before all results
*/
startFormat: function() {
return "";
},
/**
* Return content to be printed after all file results.
* @return {String} to append after all results
*/
endFormat: function() {
return "";
},
/**
* Given CSS Lint results for a file, return output for this format.
* @param results {Object} with error and warning messages
* @param filename {String} relative file path
* @param options {Object} (Optional) specifies special handling of output
* @return {String} output for results
*/
formatResults: function(results, filename, options) {
var messages = results.messages,
output = "";
options = options || {};
/**
* Capitalize and return given string.
* @param str {String} to capitalize
* @return {String} capitalized
*/
var capitalize = function(str) {
return str.charAt(0).toUpperCase() + str.slice(1);
};
if (messages.length === 0) {
return options.quiet ? "" : filename + ": Lint Free!";
}
CSSLint.Util.forEach(messages, function(message, i) {
if (message.rollup) {
output += filename + ": " + capitalize(message.type) + " - " + message.message + "\n";
} else {
output += filename + ": " + "line " + message.line +
", col " + message.col + ", " + capitalize(message.type) + " - " + message.message + "\n";
}
});
return output;
}
});
/*global CSSLint*/
CSSLint.addFormatter({
//format information
id: "csslint-xml",
name: "CSSLint XML format",
/**
* Return opening root XML tag.
* @return {String} to prepend before all results
*/
startFormat: function(){
return "<?xml version=\"1.0\" encoding=\"utf-8\"?><csslint>";
},
/**
* Return closing root XML tag.
* @return {String} to append after all results
*/
endFormat: function(){
return "</csslint>";
},
/**
* Given CSS Lint results for a file, return output for this format.
* @param results {Object} with error and warning messages
* @param filename {String} relative file path
* @param options {Object} (UNUSED for now) specifies special handling of output
* @return {String} output for results
*/
formatResults: function(results, filename, options) {
var messages = results.messages,
output = [];
/**
* Replace special characters before write to output.
*
* Rules:
* - single quotes is the escape sequence for double-quotes
* - & is the escape sequence for &
* - < is the escape sequence for <
* - > is the escape sequence for >
*
* @param {String} message to escape
* @return escaped message as {String}
*/
var escapeSpecialCharacters = function(str) {
if (!str || str.constructor !== String) {
return "";
}
return str.replace(/\"/g, "'").replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
};
if (messages.length > 0) {
output.push("<file name=\""+filename+"\">");
CSSLint.Util.forEach(messages, function (message, i) {
if (message.rollup) {
output.push("<issue severity=\"" + message.type + "\" reason=\"" + escapeSpecialCharacters(message.message) + "\" evidence=\"" + escapeSpecialCharacters(message.evidence) + "\"/>");
} else {
output.push("<issue line=\"" + message.line + "\" char=\"" + message.col + "\" severity=\"" + message.type + "\"" +
" reason=\"" + escapeSpecialCharacters(message.message) + "\" evidence=\"" + escapeSpecialCharacters(message.evidence) + "\"/>");
}
});
output.push("</file>");
}
return output.join("");
}
});
/*global CSSLint*/
CSSLint.addFormatter({
//format information
id: "junit-xml",
name: "JUNIT XML format",
/**
* Return opening root XML tag.
* @return {String} to prepend before all results
*/
startFormat: function(){
return "<?xml version=\"1.0\" encoding=\"utf-8\"?><testsuites>";
},
/**
* Return closing root XML tag.
* @return {String} to append after all results
*/
endFormat: function() {
return "</testsuites>";
},
/**
* Given CSS Lint results for a file, return output for this format.
* @param results {Object} with error and warning messages
* @param filename {String} relative file path
* @param options {Object} (UNUSED for now) specifies special handling of output
* @return {String} output for results
*/
formatResults: function(results, filename, options) {
var messages = results.messages,
output = [],
tests = {
'error': 0,
'failure': 0
};
/**
* Generate a source string for a rule.
* JUNIT source strings usually resemble Java class names e.g
* net.csslint.SomeRuleName
* @param {Object} rule
* @return rule source as {String}
*/
var generateSource = function(rule) {
if (!rule || !('name' in rule)) {
return "";
}
return 'net.csslint.' + rule.name.replace(/\s/g,'');
};
/**
* Replace special characters before write to output.
*
* Rules:
* - single quotes is the escape sequence for double-quotes
* - < is the escape sequence for <
* - > is the escape sequence for >
*
* @param {String} message to escape
* @return escaped message as {String}
*/
var escapeSpecialCharacters = function(str) {
if (!str || str.constructor !== String) {
return "";
}
return str.replace(/\"/g, "'").replace(/</g, "<").replace(/>/g, ">");
};
if (messages.length > 0) {
messages.forEach(function (message, i) {
// since junit has no warning class
// all issues as errors
var type = message.type === 'warning' ? 'error' : message.type;
//ignore rollups for now
if (!message.rollup) {
// build the test case seperately, once joined
// we'll add it to a custom array filtered by type
output.push("<testcase time=\"0\" name=\"" + generateSource(message.rule) + "\">");
output.push("<" + type + " message=\"" + escapeSpecialCharacters(message.message) + "\"><![CDATA[" + message.line + ':' + message.col + ':' + escapeSpecialCharacters(message.evidence) + "]]></" + type + ">");
output.push("</testcase>");
tests[type] += 1;
}
});
output.unshift("<testsuite time=\"0\" tests=\"" + messages.length + "\" skipped=\"0\" errors=\"" + tests.error + "\" failures=\"" + tests.failure + "\" package=\"net.csslint\" name=\"" + filename + "\">");
output.push("</testsuite>");
}
return output.join("");
}
});
/*global CSSLint*/
CSSLint.addFormatter({
//format information
id: "lint-xml",
name: "Lint XML format",
/**
* Return opening root XML tag.
* @return {String} to prepend before all results
*/
startFormat: function(){
return "<?xml version=\"1.0\" encoding=\"utf-8\"?><lint>";
},
/**
* Return closing root XML tag.
* @return {String} to append after all results
*/
endFormat: function(){
return "</lint>";
},
/**
* Given CSS Lint results for a file, return output for this format.
* @param results {Object} with error and warning messages
* @param filename {String} relative file path
* @param options {Object} (UNUSED for now) specifies special handling of output
* @return {String} output for results
*/
formatResults: function(results, filename, options) {
var messages = results.messages,
output = [];
/**
* Replace special characters before write to output.
*
* Rules:
* - single quotes is the escape sequence for double-quotes
* - & is the escape sequence for &
* - < is the escape sequence for <
* - > is the escape sequence for >
*
* @param {String} message to escape
* @return escaped message as {String}
*/
var escapeSpecialCharacters = function(str) {
if (!str || str.constructor !== String) {
return "";
}
return str.replace(/\"/g, "'").replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
};
if (messages.length > 0) {
output.push("<file name=\""+filename+"\">");
CSSLint.Util.forEach(messages, function (message, i) {
if (message.rollup) {
output.push("<issue severity=\"" + message.type + "\" reason=\"" + escapeSpecialCharacters(message.message) + "\" evidence=\"" + escapeSpecialCharacters(message.evidence) + "\"/>");
} else {
output.push("<issue line=\"" + message.line + "\" char=\"" + message.col + "\" severity=\"" + message.type + "\"" +
" reason=\"" + escapeSpecialCharacters(message.message) + "\" evidence=\"" + escapeSpecialCharacters(message.evidence) + "\"/>");
}
});
output.push("</file>");
}
return output.join("");
}
});
/*global CSSLint*/
CSSLint.addFormatter({
//format information
id: "text",
name: "Plain Text",
/**
* Return content to be printed before all file results.
* @return {String} to prepend before all results
*/
startFormat: function() {
return "";
},
/**
* Return content to be printed after all file results.
* @return {String} to append after all results
*/
endFormat: function() {
return "";
},
/**
* Given CSS Lint results for a file, return output for this format.
* @param results {Object} with error and warning messages
* @param filename {String} relative file path
* @param options {Object} (Optional) specifies special handling of output
* @return {String} output for results
*/
formatResults: function(results, filename, options) {
var messages = results.messages,
output = "";
options = options || {};
if (messages.length === 0) {
return options.quiet ? "" : "\n\ncsslint: No errors in " + filename + ".";
}
output = "\n\ncsslint: There are " + messages.length + " problems in " + filename + ".";
var pos = filename.lastIndexOf("/"),
shortFilename = filename;
if (pos === -1){
pos = filename.lastIndexOf("\\");
}
if (pos > -1){
shortFilename = filename.substring(pos+1);
}
CSSLint.Util.forEach(messages, function (message, i) {
output = output + "\n\n" + shortFilename;
if (message.rollup) {
output += "\n" + (i+1) + ": " + message.type;
output += "\n" + message.message;
} else {
output += "\n" + (i+1) + ": " + message.type + " at line " + message.line + ", col " + message.col;
output += "\n" + message.message;
output += "\n" + message.evidence;
}
});
return output;
}
});
return CSSLint;
})();<|fim▁end|> | |
<|file_name|>ghost.client.service.js<|end_file_name|><|fim▁begin|>'use strict';
//Ghost service used for communicating with the ghost api
angular.module('ghost').factory('Ghost', ['$http', 'localStorageService',
function($http, localStorageService) {
return {
login: function() {
return $http.get('api/ghost/login').
success(function(data, status, headers, config) {
// this callback will be called asynchronously
// when the response is available
data.authenticator = 'simple-auth-authenticator:oauth2-password-grant';
data.expires_at = data.expires_in + Date.now();
localStorageService.set('ghost-cms:session',data);
}).
error(function(data, status, headers, config) {
// called asynchronously if an error occurs
// or server returns response with an error status.
console.log('ghost login failure');
});
}
};
}
]).factory('GhostPosts', ['$http',
function($http) {
return {
read: function(options) {
return $http.get('api/ghost/posts/slug/' + options.slug).
success(function(data, status, headers, config) {
//console.log(data);
return data;
});
},<|fim▁hole|> return $http.get('api/ghost/posts/tag/' + options.tag).
success(function(data, status, headers, config) {
//console.log(data);
return data;
});
}
};
}
]);<|fim▁end|> | query: function(options) { |
<|file_name|>ExampleUnitTest.java<|end_file_name|><|fim▁begin|>package cn.mutils.app.alipay;
import org.junit.Test;
import static org.junit.Assert.*;
/**<|fim▁hole|> @Test
public void addition_isCorrect() throws Exception {
assertEquals(4, 2 + 2);
}
}<|fim▁end|> | * To work on unit tests, switch the Test Artifact in the Build Variants view.
*/
public class ExampleUnitTest { |
<|file_name|>existing-authority-documents.js<|end_file_name|><|fim▁begin|>'use strict';
const BaseController = require('../controllers/base');
const UploadModel = require('../models/file-upload');
const config = require('../../../config');
const uuid = require('uuid');
const path = require('path');<|fim▁hole|> if (docs.length) {
this.emit('complete', req, res);
}
super.get(req, res, next);
}
process(req, res, next) {
const file = req.files['existing-authority-upload'];
if (file && file.truncated) {
const err = new this.ValidationError('existing-authority-upload', {
type: 'filesize',
arguments: [config.upload.maxfilesize]
}, req, res);
return next({
'existing-authority-upload': err
});
}
if (file && file.data && file.data.length) {
req.form.values['existing-authority-filename'] = file.name;
const model = new UploadModel(file);
return model.save()
.then(result => {
req.form.values['existing-authority-upload'] = result.url;
req.form.values['existing-authority-type'] = file.mimetype;
})
.then(() => next())
.catch(e => {
if (e.code === 'FileExtensionNotAllowed') {
const err = new this.ValidationError('existing-authority-upload', {
type: 'filetype',
arguments: [path.extname(file.name)]
}, req, res);
return next({
'existing-authority-upload': err
});
}
return next(e);
});
}
return next();
}
saveValues(req, res, next) {
const files = req.sessionModel.get('existing-authority-documents') || [];
files.push({
id: uuid.v1(),
url: req.form.values['existing-authority-upload'],
description:
req.form.values['existing-authority-description'] || req.form.values['existing-authority-filename'],
type: req.form.values['existing-authority-type']
});
req.sessionModel.set('existing-authority-documents', files);
const existingAuthorityAttrs = [
'existing-authority-add-another',
'existing-authority-description',
'existing-authority-filename',
'existing-authority-upload',
'existing-authority-type'
];
super.saveValues(req, res, err => {
req.sessionModel.unset(existingAuthorityAttrs);
next(err);
});
}
};<|fim▁end|> |
module.exports = class UploadController extends BaseController {
get(req, res, next) {
const docs = req.sessionModel.get('existing-authority-documents') || []; |
<|file_name|>IssueState.java<|end_file_name|><|fim▁begin|>package com.fastaccess.data.dao.types;
import androidx.annotation.StringRes;
import com.fastaccess.R;
public enum IssueState {
open(R.string.opened),
closed(R.string.closed),
all(R.string.all);
int status;
IssueState(@StringRes int status) {
this.status = status;<|fim▁hole|> }
}<|fim▁end|> | }
@StringRes public int getStatus() {
return status; |
<|file_name|>mail.py<|end_file_name|><|fim▁begin|>#
# Sending emails in combination
# with Motion surveillance software
#
# (c) Dr. Yves J. Hilpisch
# The Python Quants GmbH
#
import smtplib
from datetime import datetime<|fim▁hole|>
def prompt(prompt):
return raw_input(prompt).strip()
fromaddr = '[email protected]' # prompt("From: ")
toaddrs = '[email protected]' # prompt("To: ")
subject = 'Security Alert.' # prompt("Subject: ")
msg = MIMEMultipart()
msg['From'] = fromaddr
msg['To'] = toaddrs
msg['Subject'] = subject
# Add the From: and To: headers at the start!
# msg = ("From: %s\r\nTo: %s\r\n\r\nSubject: %s\r\n"
# % (fromaddr, ", ".join(toaddrs), subject))
# print "Enter message, end with ^D (Unix) or ^Z (Windows):"
# body = ''
#while 1:
# try:
# line = raw_input()
# except EOFError:
# break
# if not line:
# break
# body = body + line
body = 'A motion has been detected.\nTime: %s' % str(datetime.now())
msg.attach(MIMEText(body, 'plain'))
print "Message length is " + repr(len(msg))
smtp = smtplib.SMTP()
# smtp.starttls()
smtp.set_debuglevel(1)
smtp.connect('smtp.hilpisch.com', 587)
smtp.login('hilpisch13', 'henrynikolaus06')
text = msg.as_string()
smtp.sendmail(fromaddr, toaddrs, text)
smtp.quit()
print text<|fim▁end|> | from email.MIMEMultipart import MIMEMultipart
from email.MIMEText import MIMEText |
<|file_name|>sbs_simple_ops.py<|end_file_name|><|fim▁begin|># Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the Apache 2.0 License.
# See the LICENSE file in the project root for more information.
from common import *
import testdata
class oldstyle:
def __init__(self, value): self.value = value
def __repr__(self): return "oldstyle(%s)" % self.value
def __add__(self, other): return self.value + other
def __sub__(self, other): return self.value - other
def __mul__(self, other): return self.value * other
def __div__(self, other): return self.value / other
def __floordiv__(self, other): return self.value // other
def __mod__(self, other): return self.value % other
def __divmod__(self, other): return divmod(self.value, other)
def __pow__(self, other): return self.value ** other
def __lshift__(self, other): return self.value << other
def __rshift__(self, other): return self.value >> other
def __and__(self, other): return self.value & other
def __xor__(self, other): return self.value ^ other
def __or__(self, other): return self.value | other
class oldstyle_reflect:
def __init__(self, value): self.value = value
def __repr__(self): return "oldstyle_reflect(%s)" % self.value
def __radd__(self, other): return other + self.value
def __rsub__(self, other): return other - self.value
def __rmul__(self, other):
print("\toldstyle_reflect.__rmul__")
return other * self.value
def __rdiv__(self, other): return other / self.value
def __rfloordiv__(self, other): return other // self.value
def __rmod__(self, other): return other % self.value
def __rdivmod__(self, other): return divmod(other, self.value)
def __rpow__(self, other): return other ** self.value
def __rlshift__(self, other): return other << self.value
def __rrshift__(self, other): return other >> self.value
def __rand__(self, other): return self.value & other
def __rxor__(self, other): return self.value ^ other
def __ror__(self, other): return self.value | other
class oldstyle_inplace:
def __init__(self, value): self.value = value
def __repr__(self): return "oldstyle_inplace(%s)" % self.value
def __iadd__(self, other): return self.value + other
def __isub__(self, other): return self.value - other
def __imul__(self, other): return self.value * other
def __idiv__(self, other): return self.value / other
def __ifloordiv__(self, other): return self.value // other
def __imod__(self, other): return self.value % other
def __idivmod__(self, other): return divmod(self.value, other)
def __ipow__(self, other): return self.value ** other
def __ilshift__(self, other): return self.value << other
def __irshift__(self, other): return self.value >> other
def __iand__(self, other): return self.value & other
def __ixor__(self, other): return self.value ^ other
def __ior__(self, other): return self.value | other
class oldstyle_notdefined:
def __init__(self, value): self.value = value
def __repr__(self): return "oldstyle_notdefined(%s)" % self.value
class newstyle(object):
def __init__(self, value): self.value = value
def __repr__(self): return "newstyle(%s, %r)" % (self.value, type(self.value))
def __add__(self, other): return self.value + other
def __sub__(self, other): return self.value - other
def __mul__(self, other): return self.value * other
def __div__(self, other): return self.value / other
def __floordiv__(self, other): return self.value // other
def __mod__(self, other): return self.value % other
def __divmod__(self, other): return divmod(self.value, other)
def __pow__(self, other): return self.value ** other
def __lshift__(self, other): return self.value << other
def __rshift__(self, other): return self.value >> other
def __and__(self, other): return self.value & other
def __xor__(self, other): return self.value ^ other
def __or__(self, other): return self.value | other
class newstyle_reflect(object):
def __init__(self, value): self.value = value
def __repr__(self): return "newstyle_reflect(%s, %r)" % (self.value, type(self.value))
def __radd__(self, other): return other + self.value
def __rsub__(self, other): return other - self.value
def __rmul__(self, other):
print("\tnewstyle_reflect.__rmul__")
return other * self.value
def __rdiv__(self, other): return other / self.value
def __rfloordiv__(self, other): return other // self.value
def __rmod__(self, other): return other % self.value
def __rdivmod__(self, other): return divmod(other, self.value)
def __rpow__(self, other): return other ** self.value
def __rlshift__(self, other): return other << self.value
def __rrshift__(self, other): return other >> self.value
def __rand__(self, other): return self.value & other
def __rxor__(self, other): return self.value ^ other
def __ror__(self, other): return self.value | other
class newstyle_inplace(object):
def __init__(self, value): self.value = value
def __repr__(self): return "newstyle_inplace(%s, %r)" % (self.value, type(self.value))
def __iadd__(self, other): return self.value + other
def __isub__(self, other): return self.value - other
def __imul__(self, other): return self.value * other
def __idiv__(self, other): return self.value / other
def __ifloordiv__(self, other): return self.value // other
def __imod__(self, other): return self.value % other
def __idivmod__(self, other): return divmod(self.value, other)
def __ipow__(self, other): return self.value ** other
def __ilshift__(self, other): return self.value << other
def __irshift__(self, other): return self.value >> other
def __iand__(self, other): return self.value & other
def __ixor__(self, other): return self.value ^ other
def __ior__(self, other): return self.value | other
class newstyle_notdefined(object):
def __init__(self, value): self.value = value
def __repr__(self): return "newstyle_notdefined(%s, %r)" % (self.value, type(self.value))
import sys
class common(object):
def normal(self, leftc, rightc):
for a in leftc:
for b in rightc:
try:
printwith("case", a, "+", b, type(a), type(b))
printwithtype(a + b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, "-", b, type(a), type(b))
printwithtype(a - b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, "*", b, type(a), type(b))
printwithtype(a * b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, "/", b, type(a), type(b))
printwithtype(a / b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, "//", b, type(a), type(b))
printwithtype(a // b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, "%", b, type(a), type(b))
printwithtype(a % b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, "**", b, type(a), type(b))
printwithtype(a ** b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, "<<", b, type(a), type(b))
printwithtype(a << b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, ">>", b, type(a), type(b))
printwithtype(a >> b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, "&", b, type(a), type(b))
printwithtype(a & b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, "^", b, type(a), type(b))
printwithtype(a ^ b)
except:
printwith("same", sys.exc_info()[0])
try:
printwith("case", a, "|", b, type(a), type(b))
printwithtype(a | b)
except:
printwith("same", sys.exc_info()[0])
def clone_list(self, l):
l2 = []
for x in l:
if x is newstyle_inplace:
l2.append(newstyle_inplace(x.value))
elif x is oldstyle_inplace:
l2.append(oldstyle_inplace(x.value))
else :
l2.append(x)
return l2
def inplace(self, leftc, rightc):
rc = self.clone_list(rightc)
for b in rc:
lc = self.clone_list(leftc)
for a in lc:
try:
op = "+"
printwith("case", "%s %s= %s" % (a, op, b), type(a), type(b))
a += b
printwithtype(a)
except:
printwith("same", sys.exc_info()[0])
lc = self.clone_list(leftc)
for a in lc:
try:
op = "-"
printwith("case", "%s %s= %s" % (a, op, b), type(a), type(b))
a -= b
printwithtype(a)
except:
printwith("same", sys.exc_info()[0])
lc = self.clone_list(leftc)
for a in lc:
try:
op = "*"
printwith("case", "%s %s= %s" % (a, op, b), type(a), type(b))
a *= b
printwithtype(a)
except:
printwith("same", sys.exc_info()[0])
lc = self.clone_list(leftc)
for a in lc:
try:
op = "//"
printwith("case", "%s %s= %s" % (a, op, b), type(a), type(b))
a //= b
printwithtype(a)
except:
printwith("same", sys.exc_info()[0])
lc = self.clone_list(leftc)
for a in lc:
try:
op = "%"
printwith("case", "%s %s= %s" % (a, op, b), type(a), type(b))
a %= b
printwithtype(a)
except:
printwith("same", sys.exc_info()[0])
lc = self.clone_list(leftc)
for a in lc:
try:
op = "**"
printwith("case", "%s %s= %s" % (a, op, b), type(a), type(b))
a **= b
printwithtype(a)
except:
printwith("same", sys.exc_info()[0])
lc = self.clone_list(leftc)
for a in lc:
try:
op = "<<"<|fim▁hole|> printwithtype(a)
except:
printwith("same", sys.exc_info()[0])
lc = self.clone_list(leftc)
for a in lc:
try:
op = ">>"
printwith("case", "%s %s= %s" % (a, op, b), type(a), type(b))
a >>= b
printwithtype(a)
except:
printwith("same", sys.exc_info()[0])
lc = self.clone_list(leftc)
for a in lc:
try:
op = "&"
printwith("case", "%s %s= %s" % (a, op, b), type(a), type(b))
a &= b
printwithtype(a)
except:
printwith("same", sys.exc_info()[0])
lc = self.clone_list(leftc)
for a in lc:
try:
op = "^"
printwith("case", "%s %s= %s" % (a, op, b), type(a), type(b))
a ^= b
printwithtype(a)
except:
printwith("same", sys.exc_info()[0])
lc = self.clone_list(leftc)
for a in lc:
try:
op = "|"
printwith("case", "%s %s= %s" % (a, op, b), type(a), type(b))
a |= b
printwithtype(a)
except:
printwith("same", sys.exc_info()[0])
class ops_simple(common):
def __init__(self):
self.collection = testdata.merge_lists(
[None],
testdata.list_bool,
testdata.list_int,
testdata.list_float,
testdata.list_long[:-1], # the last number is very long
testdata.list_complex,
testdata.list_myint,
testdata.list_myfloat,
testdata.list_mylong,
testdata.list_mycomplex,
testdata.get_Int64_Byte(),
)
self.collection_oldstyle = [oldstyle(x) for x in self.collection]
self.collection_oldstyle_reflect = [oldstyle_reflect(x) for x in self.collection]
self.collection_oldstyle_notdefined = [oldstyle_notdefined(x) for x in self.collection]
self.collection_newstyle = [newstyle(x) for x in self.collection]
self.collection_newstyle_reflect = [newstyle_reflect(x) for x in self.collection]
self.collection_newstyle_notdefined = [newstyle_notdefined(x) for x in self.collection]
self.collection_oldstyle_inplace = [oldstyle_inplace(x) for x in self.collection]
self.collection_newstyle_inplace = [newstyle_inplace(x) for x in self.collection]
def test_normal(self): super(ops_simple, self).normal(self.collection, self.collection)
def test_normal_oc_left(self): super(ops_simple, self).normal(self.collection_oldstyle, self.collection)
def test_normal_oc_right(self): super(ops_simple, self).normal(self.collection, self.collection_oldstyle)
def test_normal_nc_left(self): super(ops_simple, self).normal(self.collection_newstyle, self.collection)
def test_normal_nc_right(self): super(ops_simple, self).normal(self.collection, self.collection_newstyle)
def test_reflect_oc_right(self): super(ops_simple, self).normal(self.collection, self.collection_oldstyle_reflect)
def test_reflect_nc_right(self): super(ops_simple, self).normal(self.collection, self.collection_newstyle_reflect)
def test_oc_notdefined(self): super(ops_simple, self).normal(self.collection_oldstyle_notdefined, self.collection)
def test_nc_notdefined(self): super(ops_simple, self).normal(self.collection_newstyle_notdefined, self.collection)
def test_oc_notdefined_oc_reflect(self): super(ops_simple, self).normal(self.collection_oldstyle_notdefined, self.collection_oldstyle_reflect)
def test_nc_notdefined_nc_reflect(self): super(ops_simple, self).normal(self.collection_newstyle_notdefined, self.collection_newstyle_reflect)
def test_inplace(self): super(ops_simple, self).inplace(self.collection, self.collection)
def test_inplace_ol(self): super(ops_simple, self).inplace(self.collection_oldstyle_inplace, self.collection)
def test_inplace_nl(self): super(ops_simple, self).inplace(self.collection_newstyle_inplace, self.collection)
runtests(ops_simple)<|fim▁end|> | printwith("case", "%s %s= %s" % (a, op, b), type(a), type(b))
a <<= b |
<|file_name|>types.go<|end_file_name|><|fim▁begin|>/*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package api
import (
"k8s.io/client-go/1.4/pkg/api/resource"
"k8s.io/client-go/1.4/pkg/api/unversioned"
"k8s.io/client-go/1.4/pkg/fields"
"k8s.io/client-go/1.4/pkg/labels"
"k8s.io/client-go/1.4/pkg/runtime"
"k8s.io/client-go/1.4/pkg/types"
"k8s.io/client-go/1.4/pkg/util/intstr"
)
// Common string formats
// ---------------------
// Many fields in this API have formatting requirements. The commonly used
// formats are defined here.
//
// C_IDENTIFIER: This is a string that conforms to the definition of an "identifier"
// in the C language. This is captured by the following regex:
// [A-Za-z_][A-Za-z0-9_]*
// This defines the format, but not the length restriction, which should be
// specified at the definition of any field of this type.
//
// DNS_LABEL: This is a string, no more than 63 characters long, that conforms
// to the definition of a "label" in RFCs 1035 and 1123. This is captured
// by the following regex:
// [a-z0-9]([-a-z0-9]*[a-z0-9])?
//
// DNS_SUBDOMAIN: This is a string, no more than 253 characters long, that conforms
// to the definition of a "subdomain" in RFCs 1035 and 1123. This is captured
// by the following regex:
// [a-z0-9]([-a-z0-9]*[a-z0-9])?(\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*
// or more simply:
// DNS_LABEL(\.DNS_LABEL)*
//
// IANA_SVC_NAME: This is a string, no more than 15 characters long, that
// conforms to the definition of IANA service name in RFC 6335.
// It must contains at least one letter [a-z] and it must contains only [a-z0-9-].
// Hypens ('-') cannot be leading or trailing character of the string
// and cannot be adjacent to other hyphens.
// ObjectMeta is metadata that all persisted resources must have, which includes all objects
// users must create.
type ObjectMeta struct {
// Name is unique within a namespace. Name is required when creating resources, although
// some resources may allow a client to request the generation of an appropriate name
// automatically. Name is primarily intended for creation idempotence and configuration
// definition.
Name string `json:"name,omitempty"`
// GenerateName indicates that the name should be made unique by the server prior to persisting
// it. A non-empty value for the field indicates the name will be made unique (and the name
// returned to the client will be different than the name passed). The value of this field will
// be combined with a unique suffix on the server if the Name field has not been provided.
// The provided value must be valid within the rules for Name, and may be truncated by the length
// of the suffix required to make the value unique on the server.
//
// If this field is specified, and Name is not present, the server will NOT return a 409 if the
// generated name exists - instead, it will either return 201 Created or 500 with Reason
// ServerTimeout indicating a unique name could not be found in the time allotted, and the client
// should retry (optionally after the time indicated in the Retry-After header).
GenerateName string `json:"generateName,omitempty"`
// Namespace defines the space within which name must be unique. An empty namespace is
// equivalent to the "default" namespace, but "default" is the canonical representation.
// Not all objects are required to be scoped to a namespace - the value of this field for
// those objects will be empty.
Namespace string `json:"namespace,omitempty"`
// SelfLink is a URL representing this object.
SelfLink string `json:"selfLink,omitempty"`
// UID is the unique in time and space value for this object. It is typically generated by
// the server on successful creation of a resource and is not allowed to change on PUT
// operations.
UID types.UID `json:"uid,omitempty"`
// An opaque value that represents the version of this resource. May be used for optimistic
// concurrency, change detection, and the watch operation on a resource or set of resources.
// Clients must treat these values as opaque and values may only be valid for a particular
// resource or set of resources. Only servers will generate resource versions.
ResourceVersion string `json:"resourceVersion,omitempty"`
// A sequence number representing a specific generation of the desired state.
// Populated by the system. Read-only.
Generation int64 `json:"generation,omitempty"`
// CreationTimestamp is a timestamp representing the server time when this object was
// created. It is not guaranteed to be set in happens-before order across separate operations.
// Clients may not set this value. It is represented in RFC3339 form and is in UTC.
CreationTimestamp unversioned.Time `json:"creationTimestamp,omitempty"`
// DeletionTimestamp is the time after which this resource will be deleted. This
// field is set by the server when a graceful deletion is requested by the user, and is not
// directly settable by a client. The resource will be deleted (no longer visible from
// resource lists, and not reachable by name) after the time in this field. Once set, this
// value may not be unset or be set further into the future, although it may be shortened
// or the resource may be deleted prior to this time. For example, a user may request that
// a pod is deleted in 30 seconds. The Kubelet will react by sending a graceful termination
// signal to the containers in the pod. Once the resource is deleted in the API, the Kubelet
// will send a hard termination signal to the container.
DeletionTimestamp *unversioned.Time `json:"deletionTimestamp,omitempty"`
// DeletionGracePeriodSeconds records the graceful deletion value set when graceful deletion
// was requested. Represents the most recent grace period, and may only be shortened once set.
DeletionGracePeriodSeconds *int64 `json:"deletionGracePeriodSeconds,omitempty"`
// Labels are key value pairs that may be used to scope and select individual resources.
// Label keys are of the form:
// label-key ::= prefixed-name | name
// prefixed-name ::= prefix '/' name
// prefix ::= DNS_SUBDOMAIN
// name ::= DNS_LABEL
// The prefix is optional. If the prefix is not specified, the key is assumed to be private
// to the user. Other system components that wish to use labels must specify a prefix. The
// "kubernetes.io/" prefix is reserved for use by kubernetes components.
// TODO: replace map[string]string with labels.LabelSet type
Labels map[string]string `json:"labels,omitempty"`
// Annotations are unstructured key value data stored with a resource that may be set by
// external tooling. They are not queryable and should be preserved when modifying
// objects. Annotation keys have the same formatting restrictions as Label keys. See the
// comments on Labels for details.
Annotations map[string]string `json:"annotations,omitempty"`
// List of objects depended by this object. If ALL objects in the list have
// been deleted, this object will be garbage collected. If this object is managed by a controller,
// then an entry in this list will point to this controller, with the controller field set to true.
// There cannot be more than one managing controller.
OwnerReferences []OwnerReference `json:"ownerReferences,omitempty"`
// Must be empty before the object is deleted from the registry. Each entry
// is an identifier for the responsible component that will remove the entry
// from the list. If the deletionTimestamp of the object is non-nil, entries
// in this list can only be removed.
Finalizers []string `json:"finalizers,omitempty"`
}
const (
// NamespaceDefault means the object is in the default namespace which is applied when not specified by clients
NamespaceDefault string = "default"
// NamespaceAll is the default argument to specify on a context when you want to list or filter resources across all namespaces
NamespaceAll string = ""
// NamespaceNone is the argument for a context when there is no namespace.
NamespaceNone string = ""
// NamespaceSystem is the system namespace where we place system components.
NamespaceSystem string = "kube-system"
// TerminationMessagePathDefault means the default path to capture the application termination message running in a container
TerminationMessagePathDefault string = "/dev/termination-log"
)
// Volume represents a named volume in a pod that may be accessed by any containers in the pod.
type Volume struct {
// Required: This must be a DNS_LABEL. Each volume in a pod must have
// a unique name.
Name string `json:"name"`
// The VolumeSource represents the location and type of a volume to mount.
// This is optional for now. If not specified, the Volume is implied to be an EmptyDir.
// This implied behavior is deprecated and will be removed in a future version.
VolumeSource `json:",inline,omitempty"`
}
// VolumeSource represents the source location of a volume to mount.
// Only one of its members may be specified.
type VolumeSource struct {
// HostPath represents file or directory on the host machine that is
// directly exposed to the container. This is generally used for system
// agents or other privileged things that are allowed to see the host
// machine. Most containers will NOT need this.
// ---
// TODO(jonesdl) We need to restrict who can use host directory mounts and who can/can not
// mount host directories as read/write.
HostPath *HostPathVolumeSource `json:"hostPath,omitempty"`
// EmptyDir represents a temporary directory that shares a pod's lifetime.
EmptyDir *EmptyDirVolumeSource `json:"emptyDir,omitempty"`
// GCEPersistentDisk represents a GCE Disk resource that is attached to a
// kubelet's host machine and then exposed to the pod.
GCEPersistentDisk *GCEPersistentDiskVolumeSource `json:"gcePersistentDisk,omitempty"`
// AWSElasticBlockStore represents an AWS EBS disk that is attached to a
// kubelet's host machine and then exposed to the pod.
AWSElasticBlockStore *AWSElasticBlockStoreVolumeSource `json:"awsElasticBlockStore,omitempty"`
// GitRepo represents a git repository at a particular revision.
GitRepo *GitRepoVolumeSource `json:"gitRepo,omitempty"`
// Secret represents a secret that should populate this volume.
Secret *SecretVolumeSource `json:"secret,omitempty"`
// NFS represents an NFS mount on the host that shares a pod's lifetime
NFS *NFSVolumeSource `json:"nfs,omitempty"`
// ISCSIVolumeSource represents an ISCSI Disk resource that is attached to a
// kubelet's host machine and then exposed to the pod.
ISCSI *ISCSIVolumeSource `json:"iscsi,omitempty"`
// Glusterfs represents a Glusterfs mount on the host that shares a pod's lifetime
Glusterfs *GlusterfsVolumeSource `json:"glusterfs,omitempty"`
// PersistentVolumeClaimVolumeSource represents a reference to a PersistentVolumeClaim in the same namespace
PersistentVolumeClaim *PersistentVolumeClaimVolumeSource `json:"persistentVolumeClaim,omitempty"`
// RBD represents a Rados Block Device mount on the host that shares a pod's lifetime
RBD *RBDVolumeSource `json:"rbd,omitempty"`
// FlexVolume represents a generic volume resource that is
// provisioned/attached using a exec based plugin. This is an alpha feature and may change in future.
FlexVolume *FlexVolumeSource `json:"flexVolume,omitempty"`
// Cinder represents a cinder volume attached and mounted on kubelets host machine
Cinder *CinderVolumeSource `json:"cinder,omitempty"`
// CephFS represents a Cephfs mount on the host that shares a pod's lifetime
CephFS *CephFSVolumeSource `json:"cephfs,omitempty"`
// Flocker represents a Flocker volume attached to a kubelet's host machine. This depends on the Flocker control service being running
Flocker *FlockerVolumeSource `json:"flocker,omitempty"`
// DownwardAPI represents metadata about the pod that should populate this volume
DownwardAPI *DownwardAPIVolumeSource `json:"downwardAPI,omitempty"`
// FC represents a Fibre Channel resource that is attached to a kubelet's host machine and then exposed to the pod.
FC *FCVolumeSource `json:"fc,omitempty"`
// AzureFile represents an Azure File Service mount on the host and bind mount to the pod.
AzureFile *AzureFileVolumeSource `json:"azureFile,omitempty"`
// ConfigMap represents a configMap that should populate this volume
ConfigMap *ConfigMapVolumeSource `json:"configMap,omitempty"`
// VsphereVolume represents a vSphere volume attached and mounted on kubelets host machine
VsphereVolume *VsphereVirtualDiskVolumeSource `json:"vsphereVolume,omitempty"`
}
// Similar to VolumeSource but meant for the administrator who creates PVs.
// Exactly one of its members must be set.
type PersistentVolumeSource struct {
// GCEPersistentDisk represents a GCE Disk resource that is attached to a
// kubelet's host machine and then exposed to the pod.
GCEPersistentDisk *GCEPersistentDiskVolumeSource `json:"gcePersistentDisk,omitempty"`
// AWSElasticBlockStore represents an AWS EBS disk that is attached to a
// kubelet's host machine and then exposed to the pod.
AWSElasticBlockStore *AWSElasticBlockStoreVolumeSource `json:"awsElasticBlockStore,omitempty"`
// HostPath represents a directory on the host.
// Provisioned by a developer or tester.
// This is useful for single-node development and testing only!
// On-host storage is not supported in any way and WILL NOT WORK in a multi-node cluster.
HostPath *HostPathVolumeSource `json:"hostPath,omitempty"`
// Glusterfs represents a Glusterfs volume that is attached to a host and exposed to the pod
Glusterfs *GlusterfsVolumeSource `json:"glusterfs,omitempty"`
// NFS represents an NFS mount on the host that shares a pod's lifetime
NFS *NFSVolumeSource `json:"nfs,omitempty"`
// RBD represents a Rados Block Device mount on the host that shares a pod's lifetime
RBD *RBDVolumeSource `json:"rbd,omitempty"`
// ISCSIVolumeSource represents an ISCSI resource that is attached to a
// kubelet's host machine and then exposed to the pod.
ISCSI *ISCSIVolumeSource `json:"iscsi,omitempty"`
// FlexVolume represents a generic volume resource that is
// provisioned/attached using a exec based plugin. This is an alpha feature and may change in future.
FlexVolume *FlexVolumeSource `json:"flexVolume,omitempty"`
// Cinder represents a cinder volume attached and mounted on kubelets host machine
Cinder *CinderVolumeSource `json:"cinder,omitempty"`
// CephFS represents a Ceph FS mount on the host that shares a pod's lifetime
CephFS *CephFSVolumeSource `json:"cephfs,omitempty"`
// FC represents a Fibre Channel resource that is attached to a kubelet's host machine and then exposed to the pod.
FC *FCVolumeSource `json:"fc,omitempty"`
// Flocker represents a Flocker volume attached to a kubelet's host machine. This depends on the Flocker control service being running
Flocker *FlockerVolumeSource `json:"flocker,omitempty"`
// AzureFile represents an Azure File Service mount on the host and bind mount to the pod.
AzureFile *AzureFileVolumeSource `json:"azureFile,omitempty"`
// VsphereVolume represents a vSphere volume attached and mounted on kubelets host machine
VsphereVolume *VsphereVirtualDiskVolumeSource `json:"vsphereVolume,omitempty"`
}
type PersistentVolumeClaimVolumeSource struct {
// ClaimName is the name of a PersistentVolumeClaim in the same namespace as the pod using this volume
ClaimName string `json:"claimName"`
// Optional: Defaults to false (read/write). ReadOnly here
// will force the ReadOnly setting in VolumeMounts
ReadOnly bool `json:"readOnly,omitempty"`
}
// +genclient=true
// +nonNamespaced=true
type PersistentVolume struct {
unversioned.TypeMeta `json:",inline"`
ObjectMeta `json:"metadata,omitempty"`
//Spec defines a persistent volume owned by the cluster
Spec PersistentVolumeSpec `json:"spec,omitempty"`
// Status represents the current information about persistent volume.
Status PersistentVolumeStatus `json:"status,omitempty"`
}
type PersistentVolumeSpec struct {
// Resources represents the actual resources of the volume
Capacity ResourceList `json:"capacity"`
// Source represents the location and type of a volume to mount.
PersistentVolumeSource `json:",inline"`
// AccessModes contains all ways the volume can be mounted
AccessModes []PersistentVolumeAccessMode `json:"accessModes,omitempty"`
// ClaimRef is part of a bi-directional binding between PersistentVolume and PersistentVolumeClaim.
// ClaimRef is expected to be non-nil when bound.
// claim.VolumeName is the authoritative bind between PV and PVC.
// When set to non-nil value, PVC.Spec.Selector of the referenced PVC is
// ignored, i.e. labels of this PV do not need to match PVC selector.
ClaimRef *ObjectReference `json:"claimRef,omitempty"`
// Optional: what happens to a persistent volume when released from its claim.
PersistentVolumeReclaimPolicy PersistentVolumeReclaimPolicy `json:"persistentVolumeReclaimPolicy,omitempty"`
}
// PersistentVolumeReclaimPolicy describes a policy for end-of-life maintenance of persistent volumes
type PersistentVolumeReclaimPolicy string
const (
// PersistentVolumeReclaimRecycle means the volume will be recycled back into the pool of unbound persistent volumes on release from its claim.
// The volume plugin must support Recycling.
PersistentVolumeReclaimRecycle PersistentVolumeReclaimPolicy = "Recycle"
// PersistentVolumeReclaimDelete means the volume will be deleted from Kubernetes on release from its claim.
// The volume plugin must support Deletion.
PersistentVolumeReclaimDelete PersistentVolumeReclaimPolicy = "Delete"
// PersistentVolumeReclaimRetain means the volume will be left in its current phase (Released) for manual reclamation by the administrator.
// The default policy is Retain.
PersistentVolumeReclaimRetain PersistentVolumeReclaimPolicy = "Retain"
)
type PersistentVolumeStatus struct {
// Phase indicates if a volume is available, bound to a claim, or released by a claim
Phase PersistentVolumePhase `json:"phase,omitempty"`
// A human-readable message indicating details about why the volume is in this state.
Message string `json:"message,omitempty"`
// Reason is a brief CamelCase string that describes any failure and is meant for machine parsing and tidy display in the CLI
Reason string `json:"reason,omitempty"`
}
type PersistentVolumeList struct {
unversioned.TypeMeta `json:",inline"`
unversioned.ListMeta `json:"metadata,omitempty"`
Items []PersistentVolume `json:"items"`
}
// +genclient=true
// PersistentVolumeClaim is a user's request for and claim to a persistent volume
type PersistentVolumeClaim struct {
unversioned.TypeMeta `json:",inline"`
ObjectMeta `json:"metadata,omitempty"`
// Spec defines the volume requested by a pod author
Spec PersistentVolumeClaimSpec `json:"spec,omitempty"`
// Status represents the current information about a claim
Status PersistentVolumeClaimStatus `json:"status,omitempty"`
}
type PersistentVolumeClaimList struct {
unversioned.TypeMeta `json:",inline"`
unversioned.ListMeta `json:"metadata,omitempty"`
Items []PersistentVolumeClaim `json:"items"`
}
// PersistentVolumeClaimSpec describes the common attributes of storage devices
// and allows a Source for provider-specific attributes
type PersistentVolumeClaimSpec struct {
// Contains the types of access modes required
AccessModes []PersistentVolumeAccessMode `json:"accessModes,omitempty"`
// A label query over volumes to consider for binding. This selector is
// ignored when VolumeName is set
Selector *unversioned.LabelSelector `json:"selector,omitempty"`
// Resources represents the minimum resources required
Resources ResourceRequirements `json:"resources,omitempty"`
// VolumeName is the binding reference to the PersistentVolume backing this
// claim. When set to non-empty value Selector is not evaluated
VolumeName string `json:"volumeName,omitempty"`
}
type PersistentVolumeClaimStatus struct {
// Phase represents the current phase of PersistentVolumeClaim
Phase PersistentVolumeClaimPhase `json:"phase,omitempty"`
// AccessModes contains all ways the volume backing the PVC can be mounted
AccessModes []PersistentVolumeAccessMode `json:"accessModes,omitempty"`
// Represents the actual resources of the underlying volume
Capacity ResourceList `json:"capacity,omitempty"`
}
type PersistentVolumeAccessMode string
const (
// can be mounted read/write mode to exactly 1 host
ReadWriteOnce PersistentVolumeAccessMode = "ReadWriteOnce"
// can be mounted in read-only mode to many hosts
ReadOnlyMany PersistentVolumeAccessMode = "ReadOnlyMany"
// can be mounted in read/write mode to many hosts
ReadWriteMany PersistentVolumeAccessMode = "ReadWriteMany"
)
type PersistentVolumePhase string
const (
// used for PersistentVolumes that are not available
VolumePending PersistentVolumePhase = "Pending"
// used for PersistentVolumes that are not yet bound
// Available volumes are held by the binder and matched to PersistentVolumeClaims
VolumeAvailable PersistentVolumePhase = "Available"
// used for PersistentVolumes that are bound
VolumeBound PersistentVolumePhase = "Bound"
// used for PersistentVolumes where the bound PersistentVolumeClaim was deleted
// released volumes must be recycled before becoming available again
// this phase is used by the persistent volume claim binder to signal to another process to reclaim the resource
VolumeReleased PersistentVolumePhase = "Released"
// used for PersistentVolumes that failed to be correctly recycled or deleted after being released from a claim
VolumeFailed PersistentVolumePhase = "Failed"
)
type PersistentVolumeClaimPhase string
const (
// used for PersistentVolumeClaims that are not yet bound
ClaimPending PersistentVolumeClaimPhase = "Pending"
// used for PersistentVolumeClaims that are bound
ClaimBound PersistentVolumeClaimPhase = "Bound"
// used for PersistentVolumeClaims that lost their underlying
// PersistentVolume. The claim was bound to a PersistentVolume and this
// volume does not exist any longer and all data on it was lost.
ClaimLost PersistentVolumeClaimPhase = "Lost"
)
// Represents a host path mapped into a pod.
// Host path volumes do not support ownership management or SELinux relabeling.
type HostPathVolumeSource struct {
Path string `json:"path"`
}
// Represents an empty directory for a pod.
// Empty directory volumes support ownership management and SELinux relabeling.
type EmptyDirVolumeSource struct {
// TODO: Longer term we want to represent the selection of underlying
// media more like a scheduling problem - user says what traits they
// need, we give them a backing store that satisfies that. For now
// this will cover the most common needs.
// Optional: what type of storage medium should back this directory.
// The default is "" which means to use the node's default medium.
Medium StorageMedium `json:"medium,omitempty"`
}
// StorageMedium defines ways that storage can be allocated to a volume.
type StorageMedium string
const (
StorageMediumDefault StorageMedium = "" // use whatever the default is for the node
StorageMediumMemory StorageMedium = "Memory" // use memory (tmpfs)
)
// Protocol defines network protocols supported for things like container ports.
type Protocol string
const (
// ProtocolTCP is the TCP protocol.
ProtocolTCP Protocol = "TCP"
// ProtocolUDP is the UDP protocol.
ProtocolUDP Protocol = "UDP"
)
// Represents a Persistent Disk resource in Google Compute Engine.
//
// A GCE PD must exist before mounting to a container. The disk must
// also be in the same GCE project and zone as the kubelet. A GCE PD
// can only be mounted as read/write once or read-only many times. GCE
// PDs support ownership management and SELinux relabeling.
type GCEPersistentDiskVolumeSource struct {
// Unique name of the PD resource. Used to identify the disk in GCE
PDName string `json:"pdName"`
// Filesystem type to mount.
// Must be a filesystem type supported by the host operating system.
// Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.
// TODO: how do we prevent errors in the filesystem from compromising the machine
FSType string `json:"fsType,omitempty"`
// Optional: Partition on the disk to mount.
// If omitted, kubelet will attempt to mount the device name.
// Ex. For /dev/sda1, this field is "1", for /dev/sda, this field is 0 or empty.
Partition int32 `json:"partition,omitempty"`
// Optional: Defaults to false (read/write). ReadOnly here will force
// the ReadOnly setting in VolumeMounts.
ReadOnly bool `json:"readOnly,omitempty"`
}
// Represents an ISCSI disk.
// ISCSI volumes can only be mounted as read/write once.
// ISCSI volumes support ownership management and SELinux relabeling.
type ISCSIVolumeSource struct {
// Required: iSCSI target portal
// the portal is either an IP or ip_addr:port if port is other than default (typically TCP ports 860 and 3260)
TargetPortal string `json:"targetPortal,omitempty"`
// Required: target iSCSI Qualified Name
IQN string `json:"iqn,omitempty"`
// Required: iSCSI target lun number
Lun int32 `json:"lun,omitempty"`
// Optional: Defaults to 'default' (tcp). iSCSI interface name that uses an iSCSI transport.
ISCSIInterface string `json:"iscsiInterface,omitempty"`
// Filesystem type to mount.
// Must be a filesystem type supported by the host operating system.
// Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.
// TODO: how do we prevent errors in the filesystem from compromising the machine
FSType string `json:"fsType,omitempty"`
// Optional: Defaults to false (read/write). ReadOnly here will force
// the ReadOnly setting in VolumeMounts.
ReadOnly bool `json:"readOnly,omitempty"`
}
// Represents a Fibre Channel volume.
// Fibre Channel volumes can only be mounted as read/write once.
// Fibre Channel volumes support ownership management and SELinux relabeling.
type FCVolumeSource struct {
// Required: FC target world wide names (WWNs)
TargetWWNs []string `json:"targetWWNs"`
// Required: FC target lun number
Lun *int32 `json:"lun"`
// Filesystem type to mount.
// Must be a filesystem type supported by the host operating system.
// Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.
// TODO: how do we prevent errors in the filesystem from compromising the machine
FSType string `json:"fsType,omitempty"`
// Optional: Defaults to false (read/write). ReadOnly here will force
// the ReadOnly setting in VolumeMounts.
ReadOnly bool `json:"readOnly,omitempty"`
}
// FlexVolume represents a generic volume resource that is
// provisioned/attached using a exec based plugin. This is an alpha feature and may change in future.
type FlexVolumeSource struct {
// Driver is the name of the driver to use for this volume.
Driver string `json:"driver"`
// Filesystem type to mount.
// Must be a filesystem type supported by the host operating system.
// Ex. "ext4", "xfs", "ntfs". The default filesystem depends on FlexVolume script.
FSType string `json:"fsType,omitempty"`
// Optional: SecretRef is reference to the secret object containing
// sensitive information to pass to the plugin scripts. This may be
// empty if no secret object is specified. If the secret object
// contains more than one secret, all secrets are passed to the plugin
// scripts.
SecretRef *LocalObjectReference `json:"secretRef,omitempty"`
// Optional: Defaults to false (read/write). ReadOnly here will force
// the ReadOnly setting in VolumeMounts.
ReadOnly bool `json:"readOnly,omitempty"`
// Optional: Extra driver options if any.
Options map[string]string `json:"options,omitempty"`
}
// Represents a Persistent Disk resource in AWS.
//
// An AWS EBS disk must exist before mounting to a container. The disk
// must also be in the same AWS zone as the kubelet. A AWS EBS disk
// can only be mounted as read/write once. AWS EBS volumes support
// ownership management and SELinux relabeling.
type AWSElasticBlockStoreVolumeSource struct {
// Unique id of the persistent disk resource. Used to identify the disk in AWS
VolumeID string `json:"volumeID"`
// Filesystem type to mount.
// Must be a filesystem type supported by the host operating system.
// Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.
// TODO: how do we prevent errors in the filesystem from compromising the machine
FSType string `json:"fsType,omitempty"`
// Optional: Partition on the disk to mount.
// If omitted, kubelet will attempt to mount the device name.
// Ex. For /dev/sda1, this field is "1", for /dev/sda, this field is 0 or empty.
Partition int32 `json:"partition,omitempty"`
// Optional: Defaults to false (read/write). ReadOnly here will force
// the ReadOnly setting in VolumeMounts.
ReadOnly bool `json:"readOnly,omitempty"`
}
// Represents a volume that is populated with the contents of a git repository.
// Git repo volumes do not support ownership management.
// Git repo volumes support SELinux relabeling.
type GitRepoVolumeSource struct {
// Repository URL
Repository string `json:"repository"`
// Commit hash, this is optional
Revision string `json:"revision,omitempty"`
// Clone target, this is optional
// Must not contain or start with '..'. If '.' is supplied, the volume directory will be the
// git repository. Otherwise, if specified, the volume will contain the git repository in
// the subdirectory with the given name.
Directory string `json:"directory,omitempty"`
// TODO: Consider credentials here.
}
// Adapts a Secret into a volume.
//
// The contents of the target Secret's Data field will be presented in a volume
// as files using the keys in the Data field as the file names.
// Secret volumes support ownership management and SELinux relabeling.
type SecretVolumeSource struct {
// Name of the secret in the pod's namespace to use.
SecretName string `json:"secretName,omitempty"`
// If unspecified, each key-value pair in the Data field of the referenced
// Secret will be projected into the volume as a file whose name is the
// key and content is the value. If specified, the listed keys will be
// projected into the specified paths, and unlisted keys will not be
// present. If a key is specified which is not present in the Secret,
// the volume setup will error. Paths must be relative and may not contain
// the '..' path or start with '..'.
Items []KeyToPath `json:"items,omitempty"`
}
// Represents an NFS mount that lasts the lifetime of a pod.
// NFS volumes do not support ownership management or SELinux relabeling.
type NFSVolumeSource struct {
// Server is the hostname or IP address of the NFS server
Server string `json:"server"`
// Path is the exported NFS share
Path string `json:"path"`
// Optional: Defaults to false (read/write). ReadOnly here will force
// the NFS export to be mounted with read-only permissions
ReadOnly bool `json:"readOnly,omitempty"`
}
// Represents a Glusterfs mount that lasts the lifetime of a pod.
// Glusterfs volumes do not support ownership management or SELinux relabeling.
type GlusterfsVolumeSource struct {
// Required: EndpointsName is the endpoint name that details Glusterfs topology
EndpointsName string `json:"endpoints"`
// Required: Path is the Glusterfs volume path
Path string `json:"path"`
// Optional: Defaults to false (read/write). ReadOnly here will force
// the Glusterfs to be mounted with read-only permissions
ReadOnly bool `json:"readOnly,omitempty"`
}
// Represents a Rados Block Device mount that lasts the lifetime of a pod.
// RBD volumes support ownership management and SELinux relabeling.
type RBDVolumeSource struct {
// Required: CephMonitors is a collection of Ceph monitors
CephMonitors []string `json:"monitors"`
// Required: RBDImage is the rados image name
RBDImage string `json:"image"`
// Filesystem type to mount.
// Must be a filesystem type supported by the host operating system.
// Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.
// TODO: how do we prevent errors in the filesystem from compromising the machine
FSType string `json:"fsType,omitempty"`
// Optional: RadosPool is the rados pool name,default is rbd
RBDPool string `json:"pool,omitempty"`
// Optional: RBDUser is the rados user name, default is admin
RadosUser string `json:"user,omitempty"`
// Optional: Keyring is the path to key ring for RBDUser, default is /etc/ceph/keyring
Keyring string `json:"keyring,omitempty"`
// Optional: SecretRef is name of the authentication secret for RBDUser, default is nil.
SecretRef *LocalObjectReference `json:"secretRef,omitempty"`
// Optional: Defaults to false (read/write). ReadOnly here will force
// the ReadOnly setting in VolumeMounts.
ReadOnly bool `json:"readOnly,omitempty"`
}
// Represents a cinder volume resource in Openstack. A Cinder volume
// must exist before mounting to a container. The volume must also be
// in the same region as the kubelet. Cinder volumes support ownership
// management and SELinux relabeling.
type CinderVolumeSource struct {
// Unique id of the volume used to identify the cinder volume
VolumeID string `json:"volumeID"`
// Filesystem type to mount.
// Must be a filesystem type supported by the host operating system.
// Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.
FSType string `json:"fsType,omitempty"`
// Optional: Defaults to false (read/write). ReadOnly here will force
// the ReadOnly setting in VolumeMounts.
ReadOnly bool `json:"readOnly,omitempty"`
}
// Represents a Ceph Filesystem mount that lasts the lifetime of a pod
// Cephfs volumes do not support ownership management or SELinux relabeling.
type CephFSVolumeSource struct {
// Required: Monitors is a collection of Ceph monitors
Monitors []string `json:"monitors"`
// Optional: Used as the mounted root, rather than the full Ceph tree, default is /
Path string `json:"path,omitempty"`
// Optional: User is the rados user name, default is admin
User string `json:"user,omitempty"`
// Optional: SecretFile is the path to key ring for User, default is /etc/ceph/user.secret
SecretFile string `json:"secretFile,omitempty"`
// Optional: SecretRef is reference to the authentication secret for User, default is empty.
SecretRef *LocalObjectReference `json:"secretRef,omitempty"`
// Optional: Defaults to false (read/write). ReadOnly here will force
// the ReadOnly setting in VolumeMounts.
ReadOnly bool `json:"readOnly,omitempty"`
}
// Represents a Flocker volume mounted by the Flocker agent.
// Flocker volumes do not support ownership management or SELinux relabeling.
type FlockerVolumeSource struct {
// Required: the volume name. This is going to be store on metadata -> name on the payload for Flocker
DatasetName string `json:"datasetName"`
}
// Represents a volume containing downward API info.
// Downward API volumes support ownership management and SELinux relabeling.
type DownwardAPIVolumeSource struct {
// Items is a list of DownwardAPIVolume file
Items []DownwardAPIVolumeFile `json:"items,omitempty"`
}
// Represents a single file containing information from the downward API
type DownwardAPIVolumeFile struct {
// Required: Path is the relative path name of the file to be created. Must not be absolute or contain the '..' path. Must be utf-8 encoded. The first item of the relative path must not start with '..'
Path string `json:"path"`
// Required: Selects a field of the pod: only annotations, labels, name and namespace are supported.
FieldRef *ObjectFieldSelector `json:"fieldRef,omitempty"`
// Selects a resource of the container: only resources limits and requests
// (limits.cpu, limits.memory, requests.cpu and requests.memory) are currently supported.
ResourceFieldRef *ResourceFieldSelector `json:"resourceFieldRef,omitempty"`
}
// AzureFile represents an Azure File Service mount on the host and bind mount to the pod.
type AzureFileVolumeSource struct {
// the name of secret that contains Azure Storage Account Name and Key
SecretName string `json:"secretName"`
// Share Name
ShareName string `json:"shareName"`
// Defaults to false (read/write). ReadOnly here will force
// the ReadOnly setting in VolumeMounts.
ReadOnly bool `json:"readOnly,omitempty"`
}
// Represents a vSphere volume resource.
type VsphereVirtualDiskVolumeSource struct {
// Path that identifies vSphere volume vmdk
VolumePath string `json:"volumePath"`
// Filesystem type to mount.
// Must be a filesystem type supported by the host operating system.
// Ex. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.
FSType string `json:"fsType,omitempty"`
}
// Adapts a ConfigMap into a volume.
//
// The contents of the target ConfigMap's Data field will be presented in a
// volume as files using the keys in the Data field as the file names, unless
// the items element is populated with specific mappings of keys to paths.
// ConfigMap volumes support ownership management and SELinux relabeling.
type ConfigMapVolumeSource struct {
LocalObjectReference `json:",inline"`
// If unspecified, each key-value pair in the Data field of the referenced
// ConfigMap will be projected into the volume as a file whose name is the
// key and content is the value. If specified, the listed keys will be
// projected into the specified paths, and unlisted keys will not be
// present. If a key is specified which is not present in the ConfigMap,
// the volume setup will error. Paths must be relative and may not contain
// the '..' path or start with '..'.
Items []KeyToPath `json:"items,omitempty"`
}
// Maps a string key to a path within a volume.
type KeyToPath struct {
// The key to project.
Key string `json:"key"`
// The relative path of the file to map the key to.
// May not be an absolute path.
// May not contain the path element '..'.
// May not start with the string '..'.
Path string `json:"path"`
}
// ContainerPort represents a network port in a single container
type ContainerPort struct {
// Optional: If specified, this must be an IANA_SVC_NAME Each named port
// in a pod must have a unique name.
Name string `json:"name,omitempty"`
// Optional: If specified, this must be a valid port number, 0 < x < 65536.
// If HostNetwork is specified, this must match ContainerPort.
HostPort int32 `json:"hostPort,omitempty"`
// Required: This must be a valid port number, 0 < x < 65536.
ContainerPort int32 `json:"containerPort"`
// Required: Supports "TCP" and "UDP".
Protocol Protocol `json:"protocol,omitempty"`
// Optional: What host IP to bind the external port to.
HostIP string `json:"hostIP,omitempty"`
}
// VolumeMount describes a mounting of a Volume within a container.
type VolumeMount struct {
// Required: This must match the Name of a Volume [above].
Name string `json:"name"`
// Optional: Defaults to false (read-write).
ReadOnly bool `json:"readOnly,omitempty"`
// Required. Must not contain ':'.
MountPath string `json:"mountPath"`
// Path within the volume from which the container's volume should be mounted.
// Defaults to "" (volume's root).
SubPath string `json:"subPath,omitempty"`
}
// EnvVar represents an environment variable present in a Container.
type EnvVar struct {
// Required: This must be a C_IDENTIFIER.
Name string `json:"name"`
// Optional: no more than one of the following may be specified.
// Optional: Defaults to ""; variable references $(VAR_NAME) are expanded
// using the previous defined environment variables in the container and
// any service environment variables. If a variable cannot be resolved,
// the reference in the input string will be unchanged. The $(VAR_NAME)
// syntax can be escaped with a double $$, ie: $$(VAR_NAME). Escaped
// references will never be expanded, regardless of whether the variable
// exists or not.
Value string `json:"value,omitempty"`
// Optional: Specifies a source the value of this var should come from.
ValueFrom *EnvVarSource `json:"valueFrom,omitempty"`
}
// EnvVarSource represents a source for the value of an EnvVar.
// Only one of its fields may be set.
type EnvVarSource struct {
// Selects a field of the pod; only name and namespace are supported.
FieldRef *ObjectFieldSelector `json:"fieldRef,omitempty"`
// Selects a resource of the container: only resources limits and requests
// (limits.cpu, limits.memory, requests.cpu and requests.memory) are currently supported.
ResourceFieldRef *ResourceFieldSelector `json:"resourceFieldRef,omitempty"`
// Selects a key of a ConfigMap.
ConfigMapKeyRef *ConfigMapKeySelector `json:"configMapKeyRef,omitempty"`
// Selects a key of a secret in the pod's namespace.
SecretKeyRef *SecretKeySelector `json:"secretKeyRef,omitempty"`
}
// ObjectFieldSelector selects an APIVersioned field of an object.
type ObjectFieldSelector struct {
// Required: Version of the schema the FieldPath is written in terms of.
// If no value is specified, it will be defaulted to the APIVersion of the
// enclosing object.
APIVersion string `json:"apiVersion"`
// Required: Path of the field to select in the specified API version
FieldPath string `json:"fieldPath"`
}
// ResourceFieldSelector represents container resources (cpu, memory) and their output format
type ResourceFieldSelector struct {
// Container name: required for volumes, optional for env vars
ContainerName string `json:"containerName,omitempty"`
// Required: resource to select
Resource string `json:"resource"`
// Specifies the output format of the exposed resources, defaults to "1"
Divisor resource.Quantity `json:"divisor,omitempty"`
}
// Selects a key from a ConfigMap.
type ConfigMapKeySelector struct {
// The ConfigMap to select from.
LocalObjectReference `json:",inline"`
// The key to select.
Key string `json:"key"`
}
// SecretKeySelector selects a key of a Secret.
type SecretKeySelector struct {
// The name of the secret in the pod's namespace to select from.
LocalObjectReference `json:",inline"`
// The key of the secret to select from. Must be a valid secret key.
Key string `json:"key"`
}
// HTTPHeader describes a custom header to be used in HTTP probes
type HTTPHeader struct {
// The header field name
Name string `json:"name"`
// The header field value
Value string `json:"value"`
}
// HTTPGetAction describes an action based on HTTP Get requests.
type HTTPGetAction struct {
// Optional: Path to access on the HTTP server.
Path string `json:"path,omitempty"`
// Required: Name or number of the port to access on the container.
Port intstr.IntOrString `json:"port,omitempty"`
// Optional: Host name to connect to, defaults to the pod IP. You
// probably want to set "Host" in httpHeaders instead.
Host string `json:"host,omitempty"`
// Optional: Scheme to use for connecting to the host, defaults to HTTP.
Scheme URIScheme `json:"scheme,omitempty"`
// Optional: Custom headers to set in the request. HTTP allows repeated headers.
HTTPHeaders []HTTPHeader `json:"httpHeaders,omitempty"`
}
// URIScheme identifies the scheme used for connection to a host for Get actions
type URIScheme string
const (
// URISchemeHTTP means that the scheme used will be http://
URISchemeHTTP URIScheme = "HTTP"
// URISchemeHTTPS means that the scheme used will be https://
URISchemeHTTPS URIScheme = "HTTPS"
)
// TCPSocketAction describes an action based on opening a socket
type TCPSocketAction struct {
// Required: Port to connect to.
Port intstr.IntOrString `json:"port,omitempty"`
}
// ExecAction describes a "run in container" action.
type ExecAction struct {
// Command is the command line to execute inside the container, the working directory for the
// command is root ('/') in the container's filesystem. The command is simply exec'd, it is
// not run inside a shell, so traditional shell instructions ('|', etc) won't work. To use
// a shell, you need to explicitly call out to that shell.
Command []string `json:"command,omitempty"`
}
// Probe describes a health check to be performed against a container to determine whether it is
// alive or ready to receive traffic.
type Probe struct {
// The action taken to determine the health of a container
Handler `json:",inline"`
// Length of time before health checking is activated. In seconds.
InitialDelaySeconds int32 `json:"initialDelaySeconds,omitempty"`
// Length of time before health checking times out. In seconds.
TimeoutSeconds int32 `json:"timeoutSeconds,omitempty"`
// How often (in seconds) to perform the probe.
PeriodSeconds int32 `json:"periodSeconds,omitempty"`
// Minimum consecutive successes for the probe to be considered successful after having failed.
// Must be 1 for liveness.
SuccessThreshold int32 `json:"successThreshold,omitempty"`
// Minimum consecutive failures for the probe to be considered failed after having succeeded.
FailureThreshold int32 `json:"failureThreshold,omitempty"`
}
// PullPolicy describes a policy for if/when to pull a container image
type PullPolicy string
const (
// PullAlways means that kubelet always attempts to pull the latest image. Container will fail If the pull fails.
PullAlways PullPolicy = "Always"
// PullNever means that kubelet never pulls an image, but only uses a local image. Container will fail if the image isn't present
PullNever PullPolicy = "Never"
// PullIfNotPresent means that kubelet pulls if the image isn't present on disk. Container will fail if the image isn't present and the pull fails.
PullIfNotPresent PullPolicy = "IfNotPresent"
)
// Capability represent POSIX capabilities type
type Capability string
// Capabilities represent POSIX capabilities that can be added or removed to a running container.
type Capabilities struct {
// Added capabilities
Add []Capability `json:"add,omitempty"`
// Removed capabilities
Drop []Capability `json:"drop,omitempty"`
}
// ResourceRequirements describes the compute resource requirements.
type ResourceRequirements struct {
// Limits describes the maximum amount of compute resources allowed.
Limits ResourceList `json:"limits,omitempty"`
// Requests describes the minimum amount of compute resources required.
// If Request is omitted for a container, it defaults to Limits if that is explicitly specified,
// otherwise to an implementation-defined value
Requests ResourceList `json:"requests,omitempty"`
}
// Container represents a single container that is expected to be run on the host.
type Container struct {
// Required: This must be a DNS_LABEL. Each container in a pod must
// have a unique name.
Name string `json:"name"`
// Required.
Image string `json:"image"`
// Optional: The docker image's entrypoint is used if this is not provided; cannot be updated.
// Variable references $(VAR_NAME) are expanded using the container's environment. If a variable
// cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax
// can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded,
// regardless of whether the variable exists or not.
Command []string `json:"command,omitempty"`
// Optional: The docker image's cmd is used if this is not provided; cannot be updated.
// Variable references $(VAR_NAME) are expanded using the container's environment. If a variable
// cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax
// can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded,
// regardless of whether the variable exists or not.
Args []string `json:"args,omitempty"`
// Optional: Defaults to Docker's default.
WorkingDir string `json:"workingDir,omitempty"`
Ports []ContainerPort `json:"ports,omitempty"`
Env []EnvVar `json:"env,omitempty"`
// Compute resource requirements.
Resources ResourceRequirements `json:"resources,omitempty"`
VolumeMounts []VolumeMount `json:"volumeMounts,omitempty"`
LivenessProbe *Probe `json:"livenessProbe,omitempty"`
ReadinessProbe *Probe `json:"readinessProbe,omitempty"`
Lifecycle *Lifecycle `json:"lifecycle,omitempty"`
// Required.
TerminationMessagePath string `json:"terminationMessagePath,omitempty"`
// Required: Policy for pulling images for this container
ImagePullPolicy PullPolicy `json:"imagePullPolicy"`
// Optional: SecurityContext defines the security options the container should be run with.
// If set, the fields of SecurityContext override the equivalent fields of PodSecurityContext.
SecurityContext *SecurityContext `json:"securityContext,omitempty"`
// Variables for interactive containers, these have very specialized use-cases (e.g. debugging)
// and shouldn't be used for general purpose containers.
Stdin bool `json:"stdin,omitempty"`
StdinOnce bool `json:"stdinOnce,omitempty"`
TTY bool `json:"tty,omitempty"`
}
// Handler defines a specific action that should be taken
// TODO: pass structured data to these actions, and document that data here.
type Handler struct {
// One and only one of the following should be specified.
// Exec specifies the action to take.
Exec *ExecAction `json:"exec,omitempty"`
// HTTPGet specifies the http request to perform.
HTTPGet *HTTPGetAction `json:"httpGet,omitempty"`
// TCPSocket specifies an action involving a TCP port.
// TODO: implement a realistic TCP lifecycle hook
TCPSocket *TCPSocketAction `json:"tcpSocket,omitempty"`
}
// Lifecycle describes actions that the management system should take in response to container lifecycle
// events. For the PostStart and PreStop lifecycle handlers, management of the container blocks
// until the action is complete, unless the container process fails, in which case the handler is aborted.
type Lifecycle struct {
// PostStart is called immediately after a container is created. If the handler fails, the container
// is terminated and restarted.
PostStart *Handler `json:"postStart,omitempty"`
// PreStop is called immediately before a container is terminated. The reason for termination is
// passed to the handler. Regardless of the outcome of the handler, the container is eventually terminated.
PreStop *Handler `json:"preStop,omitempty"`
}
// The below types are used by kube_client and api_server.
type ConditionStatus string
// These are valid condition statuses. "ConditionTrue" means a resource is in the condition;
// "ConditionFalse" means a resource is not in the condition; "ConditionUnknown" means kubernetes
// can't decide if a resource is in the condition or not. In the future, we could add other
// intermediate conditions, e.g. ConditionDegraded.
const (
ConditionTrue ConditionStatus = "True"
ConditionFalse ConditionStatus = "False"
ConditionUnknown ConditionStatus = "Unknown"
)
type ContainerStateWaiting struct {
// A brief CamelCase string indicating details about why the container is in waiting state.
Reason string `json:"reason,omitempty"`
// A human-readable message indicating details about why the container is in waiting state.
Message string `json:"message,omitempty"`
}
type ContainerStateRunning struct {
StartedAt unversioned.Time `json:"startedAt,omitempty"`
}
type ContainerStateTerminated struct {
ExitCode int32 `json:"exitCode"`
Signal int32 `json:"signal,omitempty"`
Reason string `json:"reason,omitempty"`
Message string `json:"message,omitempty"`
StartedAt unversioned.Time `json:"startedAt,omitempty"`
FinishedAt unversioned.Time `json:"finishedAt,omitempty"`
ContainerID string `json:"containerID,omitempty"`
}
// ContainerState holds a possible state of container.
// Only one of its members may be specified.
// If none of them is specified, the default one is ContainerStateWaiting.
type ContainerState struct {
Waiting *ContainerStateWaiting `json:"waiting,omitempty"`
Running *ContainerStateRunning `json:"running,omitempty"`
Terminated *ContainerStateTerminated `json:"terminated,omitempty"`
}
type ContainerStatus struct {
// Each container in a pod must have a unique name.
Name string `json:"name"`
State ContainerState `json:"state,omitempty"`
LastTerminationState ContainerState `json:"lastState,omitempty"`
// Ready specifies whether the container has passed its readiness check.
Ready bool `json:"ready"`
// Note that this is calculated from dead containers. But those containers are subject to
// garbage collection. This value will get capped at 5 by GC.
RestartCount int32 `json:"restartCount"`
Image string `json:"image"`
ImageID string `json:"imageID"`
ContainerID string `json:"containerID,omitempty"`
}
// PodPhase is a label for the condition of a pod at the current time.
type PodPhase string
// These are the valid statuses of pods.
const (
// PodPending means the pod has been accepted by the system, but one or more of the containers
// has not been started. This includes time before being bound to a node, as well as time spent
// pulling images onto the host.
PodPending PodPhase = "Pending"
// PodRunning means the pod has been bound to a node and all of the containers have been started.
// At least one container is still running or is in the process of being restarted.
PodRunning PodPhase = "Running"
// PodSucceeded means that all containers in the pod have voluntarily terminated
// with a container exit code of 0, and the system is not going to restart any of these containers.
PodSucceeded PodPhase = "Succeeded"
// PodFailed means that all containers in the pod have terminated, and at least one container has
// terminated in a failure (exited with a non-zero exit code or was stopped by the system).
PodFailed PodPhase = "Failed"
// PodUnknown means that for some reason the state of the pod could not be obtained, typically due
// to an error in communicating with the host of the pod.
PodUnknown PodPhase = "Unknown"
)
type PodConditionType string
// These are valid conditions of pod.
const (
// PodScheduled represents status of the scheduling process for this pod.
PodScheduled PodConditionType = "PodScheduled"
// PodReady means the pod is able to service requests and should be added to the
// load balancing pools of all matching services.
PodReady PodConditionType = "Ready"
// PodInitialized means that all init containers in the pod have started successfully.
PodInitialized PodConditionType = "Initialized"
)
type PodCondition struct {
Type PodConditionType `json:"type"`
Status ConditionStatus `json:"status"`
LastProbeTime unversioned.Time `json:"lastProbeTime,omitempty"`
LastTransitionTime unversioned.Time `json:"lastTransitionTime,omitempty"`
Reason string `json:"reason,omitempty"`
Message string `json:"message,omitempty"`
}
// RestartPolicy describes how the container should be restarted.
// Only one of the following restart policies may be specified.
// If none of the following policies is specified, the default one
// is RestartPolicyAlways.
type RestartPolicy string
const (
RestartPolicyAlways RestartPolicy = "Always"
RestartPolicyOnFailure RestartPolicy = "OnFailure"
RestartPolicyNever RestartPolicy = "Never"
)
// PodList is a list of Pods.
type PodList struct {
unversioned.TypeMeta `json:",inline"`
unversioned.ListMeta `json:"metadata,omitempty"`
Items []Pod `json:"items"`
}
// DNSPolicy defines how a pod's DNS will be configured.
type DNSPolicy string
const (
// DNSClusterFirst indicates that the pod should use cluster DNS
// first, if it is available, then fall back on the default (as
// determined by kubelet) DNS settings.
DNSClusterFirst DNSPolicy = "ClusterFirst"
// DNSDefault indicates that the pod should use the default (as
// determined by kubelet) DNS settings.
DNSDefault DNSPolicy = "Default"
)
// A node selector represents the union of the results of one or more label queries
// over a set of nodes; that is, it represents the OR of the selectors represented
// by the node selector terms.
type NodeSelector struct {
//Required. A list of node selector terms. The terms are ORed.
NodeSelectorTerms []NodeSelectorTerm `json:"nodeSelectorTerms"`
}
// A null or empty node selector term matches no objects.
type NodeSelectorTerm struct {
//Required. A list of node selector requirements. The requirements are ANDed.
MatchExpressions []NodeSelectorRequirement `json:"matchExpressions"`
}
// A node selector requirement is a selector that contains values, a key, and an operator
// that relates the key and values.
type NodeSelectorRequirement struct {
// The label key that the selector applies to.
Key string `json:"key" patchStrategy:"merge" patchMergeKey:"key"`
// Represents a key's relationship to a set of values.
// Valid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt.
Operator NodeSelectorOperator `json:"operator"`
// An array of string values. If the operator is In or NotIn,
// the values array must be non-empty. If the operator is Exists or DoesNotExist,
// the values array must be empty. If the operator is Gt or Lt, the values
// array must have a single element, which will be interpreted as an integer.
// This array is replaced during a strategic merge patch.
Values []string `json:"values,omitempty"`
}
// A node selector operator is the set of operators that can be used in
// a node selector requirement.
type NodeSelectorOperator string
const (
NodeSelectorOpIn NodeSelectorOperator = "In"
NodeSelectorOpNotIn NodeSelectorOperator = "NotIn"
NodeSelectorOpExists NodeSelectorOperator = "Exists"
NodeSelectorOpDoesNotExist NodeSelectorOperator = "DoesNotExist"
NodeSelectorOpGt NodeSelectorOperator = "Gt"
NodeSelectorOpLt NodeSelectorOperator = "Lt"
)
// Affinity is a group of affinity scheduling rules.
type Affinity struct {
// Describes node affinity scheduling rules for the pod.
NodeAffinity *NodeAffinity `json:"nodeAffinity,omitempty"`
// Describes pod affinity scheduling rules (e.g. co-locate this pod in the same node, zone, etc. as some other pod(s)).
PodAffinity *PodAffinity `json:"podAffinity,omitempty"`
// Describes pod anti-affinity scheduling rules (e.g. avoid putting this pod in the same node, zone, etc. as some other pod(s)).
PodAntiAffinity *PodAntiAffinity `json:"podAntiAffinity,omitempty"`
}
// Pod affinity is a group of inter pod affinity scheduling rules.
type PodAffinity struct {
// NOT YET IMPLEMENTED. TODO: Uncomment field once it is implemented.
// If the affinity requirements specified by this field are not met at
// scheduling time, the pod will not be scheduled onto the node.
// If the affinity requirements specified by this field cease to be met
// at some point during pod execution (e.g. due to a pod label update), the
// system will try to eventually evict the pod from its node.
// When there are multiple elements, the lists of nodes corresponding to each
// podAffinityTerm are intersected, i.e. all terms must be satisfied.
// RequiredDuringSchedulingRequiredDuringExecution []PodAffinityTerm `json:"requiredDuringSchedulingRequiredDuringExecution,omitempty"`
// If the affinity requirements specified by this field are not met at
// scheduling time, the pod will not be scheduled onto the node.
// If the affinity requirements specified by this field cease to be met
// at some point during pod execution (e.g. due to a pod label update), the
// system may or may not try to eventually evict the pod from its node.
// When there are multiple elements, the lists of nodes corresponding to each
// podAffinityTerm are intersected, i.e. all terms must be satisfied.
RequiredDuringSchedulingIgnoredDuringExecution []PodAffinityTerm `json:"requiredDuringSchedulingIgnoredDuringExecution,omitempty"`
// The scheduler will prefer to schedule pods to nodes that satisfy
// the affinity expressions specified by this field, but it may choose
// a node that violates one or more of the expressions. The node that is
// most preferred is the one with the greatest sum of weights, i.e.
// for each node that meets all of the scheduling requirements (resource
// request, requiredDuringScheduling affinity expressions, etc.),
// compute a sum by iterating through the elements of this field and adding
// "weight" to the sum if the node has pods which matches the corresponding podAffinityTerm; the
// node(s) with the highest sum are the most preferred.
PreferredDuringSchedulingIgnoredDuringExecution []WeightedPodAffinityTerm `json:"preferredDuringSchedulingIgnoredDuringExecution,omitempty"`
}
// Pod anti affinity is a group of inter pod anti affinity scheduling rules.
type PodAntiAffinity struct {
// NOT YET IMPLEMENTED. TODO: Uncomment field once it is implemented.
// If the anti-affinity requirements specified by this field are not met at
// scheduling time, the pod will not be scheduled onto the node.
// If the anti-affinity requirements specified by this field cease to be met
// at some point during pod execution (e.g. due to a pod label update), the
// system will try to eventually evict the pod from its node.
// When there are multiple elements, the lists of nodes corresponding to each
// podAffinityTerm are intersected, i.e. all terms must be satisfied.
// RequiredDuringSchedulingRequiredDuringExecution []PodAffinityTerm `json:"requiredDuringSchedulingRequiredDuringExecution,omitempty"`
// If the anti-affinity requirements specified by this field are not met at
// scheduling time, the pod will not be scheduled onto the node.
// If the anti-affinity requirements specified by this field cease to be met
// at some point during pod execution (e.g. due to a pod label update), the
// system may or may not try to eventually evict the pod from its node.
// When there are multiple elements, the lists of nodes corresponding to each
// podAffinityTerm are intersected, i.e. all terms must be satisfied.
RequiredDuringSchedulingIgnoredDuringExecution []PodAffinityTerm `json:"requiredDuringSchedulingIgnoredDuringExecution,omitempty"`
// The scheduler will prefer to schedule pods to nodes that satisfy
// the anti-affinity expressions specified by this field, but it may choose
// a node that violates one or more of the expressions. The node that is
// most preferred is the one with the greatest sum of weights, i.e.
// for each node that meets all of the scheduling requirements (resource
// request, requiredDuringScheduling anti-affinity expressions, etc.),
// compute a sum by iterating through the elements of this field and adding
// "weight" to the sum if the node has pods which matches the corresponding podAffinityTerm; the
// node(s) with the highest sum are the most preferred.
PreferredDuringSchedulingIgnoredDuringExecution []WeightedPodAffinityTerm `json:"preferredDuringSchedulingIgnoredDuringExecution,omitempty"`
}
// The weights of all of the matched WeightedPodAffinityTerm fields are added per-node to find the most preferred node(s)
type WeightedPodAffinityTerm struct {
// weight associated with matching the corresponding podAffinityTerm,
// in the range 1-100.
Weight int `json:"weight"`
// Required. A pod affinity term, associated with the corresponding weight.
PodAffinityTerm PodAffinityTerm `json:"podAffinityTerm"`
}
// Defines a set of pods (namely those matching the labelSelector
// relative to the given namespace(s)) that this pod should be
// co-located (affinity) or not co-located (anti-affinity) with,
// where co-located is defined as running on a node whose value of
// the label with key <topologyKey> matches that of any node on which
// a pod of the set of pods is running.
type PodAffinityTerm struct {
// A label query over a set of resources, in this case pods.
LabelSelector *unversioned.LabelSelector `json:"labelSelector,omitempty"`
// namespaces specifies which namespaces the labelSelector applies to (matches against);
// nil list means "this pod's namespace," empty list means "all namespaces"
// The json tag here is not "omitempty" since we need to distinguish nil and empty.
// See https://golang.org/pkg/encoding/json/#Marshal for more details.
Namespaces []string `json:"namespaces"`
// This pod should be co-located (affinity) or not co-located (anti-affinity) with the pods matching
// the labelSelector in the specified namespaces, where co-located is defined as running on a node
// whose value of the label with key topologyKey matches that of any node on which any of the
// selected pods is running.
// For PreferredDuringScheduling pod anti-affinity, empty topologyKey is interpreted as "all topologies"
// ("all topologies" here means all the topologyKeys indicated by scheduler command-line argument --failure-domains);
// for affinity and for RequiredDuringScheduling pod anti-affinity, empty topologyKey is not allowed.
TopologyKey string `json:"topologyKey,omitempty"`
}
// Node affinity is a group of node affinity scheduling rules.
type NodeAffinity struct {
// NOT YET IMPLEMENTED. TODO: Uncomment field once it is implemented.
// If the affinity requirements specified by this field are not met at
// scheduling time, the pod will not be scheduled onto the node.
// If the affinity requirements specified by this field cease to be met
// at some point during pod execution (e.g. due to an update), the system
// will try to eventually evict the pod from its node.
// RequiredDuringSchedulingRequiredDuringExecution *NodeSelector `json:"requiredDuringSchedulingRequiredDuringExecution,omitempty"`
// If the affinity requirements specified by this field are not met at
// scheduling time, the pod will not be scheduled onto the node.
// If the affinity requirements specified by this field cease to be met
// at some point during pod execution (e.g. due to an update), the system
// may or may not try to eventually evict the pod from its node.
RequiredDuringSchedulingIgnoredDuringExecution *NodeSelector `json:"requiredDuringSchedulingIgnoredDuringExecution,omitempty"`
// The scheduler will prefer to schedule pods to nodes that satisfy
// the affinity expressions specified by this field, but it may choose
// a node that violates one or more of the expressions. The node that is
// most preferred is the one with the greatest sum of weights, i.e.
// for each node that meets all of the scheduling requirements (resource
// request, requiredDuringScheduling affinity expressions, etc.),
// compute a sum by iterating through the elements of this field and adding
// "weight" to the sum if the node matches the corresponding matchExpressions; the
// node(s) with the highest sum are the most preferred.
PreferredDuringSchedulingIgnoredDuringExecution []PreferredSchedulingTerm `json:"preferredDuringSchedulingIgnoredDuringExecution,omitempty"`
}
// An empty preferred scheduling term matches all objects with implicit weight 0
// (i.e. it's a no-op). A null preferred scheduling term matches no objects (i.e. is also a no-op).
type PreferredSchedulingTerm struct {
// Weight associated with matching the corresponding nodeSelectorTerm, in the range 1-100.
Weight int32 `json:"weight"`
// A node selector term, associated with the corresponding weight.
Preference NodeSelectorTerm `json:"preference"`
}
// The node this Taint is attached to has the effect "effect" on
// any pod that that does not tolerate the Taint.
type Taint struct {
// Required. The taint key to be applied to a node.
Key string `json:"key" patchStrategy:"merge" patchMergeKey:"key"`
// Required. The taint value corresponding to the taint key.
Value string `json:"value,omitempty"`
// Required. The effect of the taint on pods
// that do not tolerate the taint.
// Valid effects are NoSchedule and PreferNoSchedule.
Effect TaintEffect `json:"effect"`
}
type TaintEffect string
const (
// Do not allow new pods to schedule onto the node unless they tolerate the taint,
// but allow all pods submitted to Kubelet without going through the scheduler
// to start, and allow all already-running pods to continue running.
// Enforced by the scheduler.
TaintEffectNoSchedule TaintEffect = "NoSchedule"
// Like TaintEffectNoSchedule, but the scheduler tries not to schedule
// new pods onto the node, rather than prohibiting new pods from scheduling
// onto the node entirely. Enforced by the scheduler.
TaintEffectPreferNoSchedule TaintEffect = "PreferNoSchedule"
// NOT YET IMPLEMENTED. TODO: Uncomment field once it is implemented.
// Do not allow new pods to schedule onto the node unless they tolerate the taint,
// do not allow pods to start on Kubelet unless they tolerate the taint,
// but allow all already-running pods to continue running.
// Enforced by the scheduler and Kubelet.
// TaintEffectNoScheduleNoAdmit TaintEffect = "NoScheduleNoAdmit"
// NOT YET IMPLEMENTED. TODO: Uncomment field once it is implemented.
// Do not allow new pods to schedule onto the node unless they tolerate the taint,
// do not allow pods to start on Kubelet unless they tolerate the taint,
// and evict any already-running pods that do not tolerate the taint.
// Enforced by the scheduler and Kubelet.
// TaintEffectNoScheduleNoAdmitNoExecute = "NoScheduleNoAdmitNoExecute"
)
// The pod this Toleration is attached to tolerates any taint that matches
// the triple <key,value,effect> using the matching operator <operator>.
type Toleration struct {
// Required. Key is the taint key that the toleration applies to.
Key string `json:"key,omitempty" patchStrategy:"merge" patchMergeKey:"key"`
// operator represents a key's relationship to the value.
// Valid operators are Exists and Equal. Defaults to Equal.
// Exists is equivalent to wildcard for value, so that a pod can
// tolerate all taints of a particular category.
Operator TolerationOperator `json:"operator,omitempty"`
// Value is the taint value the toleration matches to.
// If the operator is Exists, the value should be empty, otherwise just a regular string.
Value string `json:"value,omitempty"`
// Effect indicates the taint effect to match. Empty means match all taint effects.
// When specified, allowed values are NoSchedule and PreferNoSchedule.
Effect TaintEffect `json:"effect,omitempty"`
// TODO: For forgiveness (#1574), we'd eventually add at least a grace period
// here, and possibly an occurrence threshold and period.
}
// A toleration operator is the set of operators that can be used in a toleration.
type TolerationOperator string
const (
TolerationOpExists TolerationOperator = "Exists"
TolerationOpEqual TolerationOperator = "Equal"
)
// PodSpec is a description of a pod
type PodSpec struct {
Volumes []Volume `json:"volumes"`
// List of initialization containers belonging to the pod.
InitContainers []Container `json:"-"`
// List of containers belonging to the pod.
Containers []Container `json:"containers"`
RestartPolicy RestartPolicy `json:"restartPolicy,omitempty"`
// Optional duration in seconds the pod needs to terminate gracefully. May be decreased in delete request.
// Value must be non-negative integer. The value zero indicates delete immediately.
// If this value is nil, the default grace period will be used instead.
// The grace period is the duration in seconds after the processes running in the pod are sent
// a termination signal and the time when the processes are forcibly halted with a kill signal.
// Set this value longer than the expected cleanup time for your process.
TerminationGracePeriodSeconds *int64 `json:"terminationGracePeriodSeconds,omitempty"`
// Optional duration in seconds relative to the StartTime that the pod may be active on a node
// before the system actively tries to terminate the pod; value must be positive integer
ActiveDeadlineSeconds *int64 `json:"activeDeadlineSeconds,omitempty"`
// Required: Set DNS policy.
DNSPolicy DNSPolicy `json:"dnsPolicy,omitempty"`
// NodeSelector is a selector which must be true for the pod to fit on a node
NodeSelector map[string]string `json:"nodeSelector,omitempty"`
// ServiceAccountName is the name of the ServiceAccount to use to run this pod
// The pod will be allowed to use secrets referenced by the ServiceAccount
ServiceAccountName string `json:"serviceAccountName"`
// NodeName is a request to schedule this pod onto a specific node. If it is non-empty,
// the scheduler simply schedules this pod onto that node, assuming that it fits resource
// requirements.
NodeName string `json:"nodeName,omitempty"`
// SecurityContext holds pod-level security attributes and common container settings.
// Optional: Defaults to empty. See type description for default values of each field.
SecurityContext *PodSecurityContext `json:"securityContext,omitempty"`
// ImagePullSecrets is an optional list of references to secrets in the same namespace to use for pulling any of the images used by this PodSpec.
// If specified, these secrets will be passed to individual puller implementations for them to use. For example,
// in the case of docker, only DockerConfig type secrets are honored.
ImagePullSecrets []LocalObjectReference `json:"imagePullSecrets,omitempty"`
// Specifies the hostname of the Pod.
// If not specified, the pod's hostname will be set to a system-defined value.
Hostname string `json:"hostname,omitempty"`
// If specified, the fully qualified Pod hostname will be "<hostname>.<subdomain>.<pod namespace>.svc.<cluster domain>".
// If not specified, the pod will not have a domainname at all.
Subdomain string `json:"subdomain,omitempty"`
}
// PodSecurityContext holds pod-level security attributes and common container settings.
// Some fields are also present in container.securityContext. Field values of
// container.securityContext take precedence over field values of PodSecurityContext.
type PodSecurityContext struct {
// Use the host's network namespace. If this option is set, the ports that will be
// used must be specified.
// Optional: Default to false
// +k8s:conversion-gen=false
HostNetwork bool `json:"hostNetwork,omitempty"`
// Use the host's pid namespace.
// Optional: Default to false.
// +k8s:conversion-gen=false
HostPID bool `json:"hostPID,omitempty"`
// Use the host's ipc namespace.
// Optional: Default to false.
// +k8s:conversion-gen=false
HostIPC bool `json:"hostIPC,omitempty"`
// The SELinux context to be applied to all containers.
// If unspecified, the container runtime will allocate a random SELinux context for each
// container. May also be set in SecurityContext. If set in
// both SecurityContext and PodSecurityContext, the value specified in SecurityContext
// takes precedence for that container.
SELinuxOptions *SELinuxOptions `json:"seLinuxOptions,omitempty"`
// The UID to run the entrypoint of the container process.
// Defaults to user specified in image metadata if unspecified.
// May also be set in SecurityContext. If set in both SecurityContext and
// PodSecurityContext, the value specified in SecurityContext takes precedence
// for that container.
RunAsUser *int64 `json:"runAsUser,omitempty"`
// Indicates that the container must run as a non-root user.
// If true, the Kubelet will validate the image at runtime to ensure that it
// does not run as UID 0 (root) and fail to start the container if it does.
// If unset or false, no such validation will be performed.
// May also be set in SecurityContext. If set in both SecurityContext and
// PodSecurityContext, the value specified in SecurityContext takes precedence.
RunAsNonRoot *bool `json:"runAsNonRoot,omitempty"`
// A list of groups applied to the first process run in each container, in addition
// to the container's primary GID. If unspecified, no groups will be added to
// any container.
SupplementalGroups []int64 `json:"supplementalGroups,omitempty"`
// A special supplemental group that applies to all containers in a pod.
// Some volume types allow the Kubelet to change the ownership of that volume
// to be owned by the pod:
//
// 1. The owning GID will be the FSGroup
// 2. The setgid bit is set (new files created in the volume will be owned by FSGroup)
// 3. The permission bits are OR'd with rw-rw----
//
// If unset, the Kubelet will not modify the ownership and permissions of any volume.
FSGroup *int64 `json:"fsGroup,omitempty"`
}
// PodStatus represents information about the status of a pod. Status may trail the actual
// state of a system.
type PodStatus struct {
Phase PodPhase `json:"phase,omitempty"`
Conditions []PodCondition `json:"conditions,omitempty"`
// A human readable message indicating details about why the pod is in this state.
Message string `json:"message,omitempty"`
// A brief CamelCase message indicating details about why the pod is in this state. e.g. 'OutOfDisk'
Reason string `json:"reason,omitempty"`
HostIP string `json:"hostIP,omitempty"`
PodIP string `json:"podIP,omitempty"`
// Date and time at which the object was acknowledged by the Kubelet.
// This is before the Kubelet pulled the container image(s) for the pod.
StartTime *unversioned.Time `json:"startTime,omitempty"`
// The list has one entry per init container in the manifest. The most recent successful
// init container will have ready = true, the most recently started container will have
// startTime set.
// More info: http://releases.k8s.io/HEAD/docs/user-guide/pod-states.md#container-statuses
InitContainerStatuses []ContainerStatus `json:"-"`
// The list has one entry per container in the manifest. Each entry is
// currently the output of `docker inspect`. This output format is *not*
// final and should not be relied upon.
// TODO: Make real decisions about what our info should look like. Re-enable fuzz test
// when we have done this.
ContainerStatuses []ContainerStatus `json:"containerStatuses,omitempty"`
}
// PodStatusResult is a wrapper for PodStatus returned by kubelet that can be encode/decoded
type PodStatusResult struct {
unversioned.TypeMeta `json:",inline"`
ObjectMeta `json:"metadata,omitempty"`
// Status represents the current information about a pod. This data may not be up
// to date.
Status PodStatus `json:"status,omitempty"`
}
// +genclient=true
// Pod is a collection of containers, used as either input (create, update) or as output (list, get).
type Pod struct {
unversioned.TypeMeta `json:",inline"`
ObjectMeta `json:"metadata,omitempty"`
// Spec defines the behavior of a pod.
Spec PodSpec `json:"spec,omitempty"`
// Status represents the current information about a pod. This data may not be up
// to date.
Status PodStatus `json:"status,omitempty"`
}
// PodTemplateSpec describes the data a pod should have when created from a template
type PodTemplateSpec struct {
// Metadata of the pods created from this template.
ObjectMeta `json:"metadata,omitempty"`
// Spec defines the behavior of a pod.
Spec PodSpec `json:"spec,omitempty"`
}
// +genclient=true
// PodTemplate describes a template for creating copies of a predefined pod.
type PodTemplate struct {
unversioned.TypeMeta `json:",inline"`
ObjectMeta `json:"metadata,omitempty"`
// Template defines the pods that will be created from this pod template
Template PodTemplateSpec `json:"template,omitempty"`
}
// PodTemplateList is a list of PodTemplates.
type PodTemplateList struct {
unversioned.TypeMeta `json:",inline"`
unversioned.ListMeta `json:"metadata,omitempty"`
Items []PodTemplate `json:"items"`
}
// ReplicationControllerSpec is the specification of a replication controller.
// As the internal representation of a replication controller, it may have either
// a TemplateRef or a Template set.
type ReplicationControllerSpec struct {
// Replicas is the number of desired replicas.
Replicas int32 `json:"replicas"`
// Selector is a label query over pods that should match the Replicas count.
Selector map[string]string `json:"selector"`
// TemplateRef is a reference to an object that describes the pod that will be created if
// insufficient replicas are detected. This reference is ignored if a Template is set.
// Must be set before converting to a versioned API object
//TemplateRef *ObjectReference `json:"templateRef,omitempty"`
// Template is the object that describes the pod that will be created if
// insufficient replicas are detected. Internally, this takes precedence over a
// TemplateRef.
Template *PodTemplateSpec `json:"template,omitempty"`
}
// ReplicationControllerStatus represents the current status of a replication
// controller.
type ReplicationControllerStatus struct {
// Replicas is the number of actual replicas.
Replicas int32 `json:"replicas"`
// The number of pods that have labels matching the labels of the pod template of the replication controller.
FullyLabeledReplicas int32 `json:"fullyLabeledReplicas,omitempty"`
// ObservedGeneration is the most recent generation observed by the controller.
ObservedGeneration int64 `json:"observedGeneration,omitempty"`
}
// +genclient=true
// ReplicationController represents the configuration of a replication controller.
type ReplicationController struct {
unversioned.TypeMeta `json:",inline"`
ObjectMeta `json:"metadata,omitempty"`
// Spec defines the desired behavior of this replication controller.
Spec ReplicationControllerSpec `json:"spec,omitempty"`
// Status is the current status of this replication controller. This data may be
// out of date by some window of time.
Status ReplicationControllerStatus `json:"status,omitempty"`
}
// ReplicationControllerList is a collection of replication controllers.
type ReplicationControllerList struct {
unversioned.TypeMeta `json:",inline"`
unversioned.ListMeta `json:"metadata,omitempty"`
Items []ReplicationController `json:"items"`
}
const (
// ClusterIPNone - do not assign a cluster IP
// no proxying required and no environment variables should be created for pods
ClusterIPNone = "None"
)
// ServiceList holds a list of services.
type ServiceList struct {
unversioned.TypeMeta `json:",inline"`
unversioned.ListMeta `json:"metadata,omitempty"`
Items []Service `json:"items"`
}
// Session Affinity Type string
type ServiceAffinity string
const (
// ServiceAffinityClientIP is the Client IP based.
ServiceAffinityClientIP ServiceAffinity = "ClientIP"
// ServiceAffinityNone - no session affinity.
ServiceAffinityNone ServiceAffinity = "None"
)
// Service Type string describes ingress methods for a service
type ServiceType string
const (
// ServiceTypeClusterIP means a service will only be accessible inside the
// cluster, via the ClusterIP.
ServiceTypeClusterIP ServiceType = "ClusterIP"
// ServiceTypeNodePort means a service will be exposed on one port of
// every node, in addition to 'ClusterIP' type.
ServiceTypeNodePort ServiceType = "NodePort"
// ServiceTypeLoadBalancer means a service will be exposed via an
// external load balancer (if the cloud provider supports it), in addition
// to 'NodePort' type.
ServiceTypeLoadBalancer ServiceType = "LoadBalancer"
)
// ServiceStatus represents the current status of a service
type ServiceStatus struct {
// LoadBalancer contains the current status of the load-balancer,
// if one is present.
LoadBalancer LoadBalancerStatus `json:"loadBalancer,omitempty"`
}
// LoadBalancerStatus represents the status of a load-balancer
type LoadBalancerStatus struct {
// Ingress is a list containing ingress points for the load-balancer;
// traffic intended for the service should be sent to these ingress points.
Ingress []LoadBalancerIngress `json:"ingress,omitempty"`
}
// LoadBalancerIngress represents the status of a load-balancer ingress point:
// traffic intended for the service should be sent to an ingress point.
type LoadBalancerIngress struct {
// IP is set for load-balancer ingress points that are IP based
// (typically GCE or OpenStack load-balancers)
IP string `json:"ip,omitempty"`
// Hostname is set for load-balancer ingress points that are DNS based
// (typically AWS load-balancers)
Hostname string `json:"hostname,omitempty"`
}
// ServiceSpec describes the attributes that a user creates on a service
type ServiceSpec struct {
// Type determines how the service will be exposed. Valid options: ClusterIP, NodePort, LoadBalancer
Type ServiceType `json:"type,omitempty"`
// Required: The list of ports that are exposed by this service.
Ports []ServicePort `json:"ports"`
// This service will route traffic to pods having labels matching this selector. If empty or not present,
// the service is assumed to have endpoints set by an external process and Kubernetes will not modify
// those endpoints.
Selector map[string]string `json:"selector"`
// ClusterIP is usually assigned by the master. If specified by the user
// we will try to respect it or else fail the request. This field can
// not be changed by updates.
// Valid values are None, empty string (""), or a valid IP address
// None can be specified for headless services when proxying is not required
ClusterIP string `json:"clusterIP,omitempty"`
// ExternalIPs are used by external load balancers, or can be set by
// users to handle external traffic that arrives at a node.
ExternalIPs []string `json:"externalIPs,omitempty"`
// Only applies to Service Type: LoadBalancer
// LoadBalancer will get created with the IP specified in this field.
// This feature depends on whether the underlying cloud-provider supports specifying
// the loadBalancerIP when a load balancer is created.
// This field will be ignored if the cloud-provider does not support the feature.
LoadBalancerIP string `json:"loadBalancerIP,omitempty"`
// Optional: Supports "ClientIP" and "None". Used to maintain session affinity.
SessionAffinity ServiceAffinity `json:"sessionAffinity,omitempty"`
// Optional: If specified and supported by the platform, this will restrict traffic through the cloud-provider
// load-balancer will be restricted to the specified client IPs. This field will be ignored if the
// cloud-provider does not support the feature."
LoadBalancerSourceRanges []string `json:"loadBalancerSourceRanges,omitempty"`
}
type ServicePort struct {
// Optional if only one ServicePort is defined on this service: The
// name of this port within the service. This must be a DNS_LABEL.
// All ports within a ServiceSpec must have unique names. This maps to
// the 'Name' field in EndpointPort objects.
Name string `json:"name"`
// The IP protocol for this port. Supports "TCP" and "UDP".
Protocol Protocol `json:"protocol"`
// The port that will be exposed on the service.
Port int32 `json:"port"`
// Optional: The target port on pods selected by this service. If this
// is a string, it will be looked up as a named port in the target
// Pod's container ports. If this is not specified, the value
// of the 'port' field is used (an identity map).
// This field is ignored for services with clusterIP=None, and should be
// omitted or set equal to the 'port' field.
TargetPort intstr.IntOrString `json:"targetPort"`
// The port on each node on which this service is exposed.
// Default is to auto-allocate a port if the ServiceType of this Service requires one.
NodePort int32 `json:"nodePort"`
}
// +genclient=true
// Service is a named abstraction of software service (for example, mysql) consisting of local port
// (for example 3306) that the proxy listens on, and the selector that determines which pods
// will answer requests sent through the proxy.
type Service struct {
unversioned.TypeMeta `json:",inline"`
ObjectMeta `json:"metadata,omitempty"`
// Spec defines the behavior of a service.
Spec ServiceSpec `json:"spec,omitempty"`
// Status represents the current status of a service.
Status ServiceStatus `json:"status,omitempty"`
}
// +genclient=true
// ServiceAccount binds together:
// * a name, understood by users, and perhaps by peripheral systems, for an identity
// * a principal that can be authenticated and authorized
// * a set of secrets
type ServiceAccount struct {
unversioned.TypeMeta `json:",inline"`
ObjectMeta `json:"metadata,omitempty"`
// Secrets is the list of secrets allowed to be used by pods running using this ServiceAccount
Secrets []ObjectReference `json:"secrets"`
// ImagePullSecrets is a list of references to secrets in the same namespace to use for pulling any images
// in pods that reference this ServiceAccount. ImagePullSecrets are distinct from Secrets because Secrets
// can be mounted in the pod, but ImagePullSecrets are only accessed by the kubelet.
ImagePullSecrets []LocalObjectReference `json:"imagePullSecrets,omitempty"`
}
// ServiceAccountList is a list of ServiceAccount objects
type ServiceAccountList struct {
unversioned.TypeMeta `json:",inline"`
unversioned.ListMeta `json:"metadata,omitempty"`
Items []ServiceAccount `json:"items"`
}
// +genclient=true
// Endpoints is a collection of endpoints that implement the actual service. Example:
// Name: "mysvc",
// Subsets: [
// {
// Addresses: [{"ip": "10.10.1.1"}, {"ip": "10.10.2.2"}],
// Ports: [{"name": "a", "port": 8675}, {"name": "b", "port": 309}]
// },
// {
// Addresses: [{"ip": "10.10.3.3"}],
// Ports: [{"name": "a", "port": 93}, {"name": "b", "port": 76}]
// },
// ]
type Endpoints struct {
unversioned.TypeMeta `json:",inline"`
ObjectMeta `json:"metadata,omitempty"`
// The set of all endpoints is the union of all subsets.
Subsets []EndpointSubset
}
// EndpointSubset is a group of addresses with a common set of ports. The
// expanded set of endpoints is the Cartesian product of Addresses x Ports.
// For example, given:
// {
// Addresses: [{"ip": "10.10.1.1"}, {"ip": "10.10.2.2"}],
// Ports: [{"name": "a", "port": 8675}, {"name": "b", "port": 309}]
// }
// The resulting set of endpoints can be viewed as:
// a: [ 10.10.1.1:8675, 10.10.2.2:8675 ],
// b: [ 10.10.1.1:309, 10.10.2.2:309 ]
type EndpointSubset struct {
Addresses []EndpointAddress
NotReadyAddresses []EndpointAddress
Ports []EndpointPort
}
// EndpointAddress is a tuple that describes single IP address.
type EndpointAddress struct {
// The IP of this endpoint.
// IPv6 is also accepted but not fully supported on all platforms. Also, certain
// kubernetes components, like kube-proxy, are not IPv6 ready.
// TODO: This should allow hostname or IP, see #4447.
IP string
// Optional: Hostname of this endpoint
// Meant to be used by DNS servers etc.
Hostname string `json:"hostname,omitempty"`
// Optional: The kubernetes object related to the entry point.
TargetRef *ObjectReference
}
// EndpointPort is a tuple that describes a single port.
type EndpointPort struct {
// The name of this port (corresponds to ServicePort.Name). Optional
// if only one port is defined. Must be a DNS_LABEL.
Name string
// The port number.
Port int32
// The IP protocol for this port.
Protocol Protocol
}
// EndpointsList is a list of endpoints.
type EndpointsList struct {
unversioned.TypeMeta `json:",inline"`
unversioned.ListMeta `json:"metadata,omitempty"`
Items []Endpoints `json:"items"`
}
// NodeSpec describes the attributes that a node is created with.
type NodeSpec struct {
// PodCIDR represents the pod IP range assigned to the node
// Note: assigning IP ranges to nodes might need to be revisited when we support migratable IPs.
PodCIDR string `json:"podCIDR,omitempty"`
// External ID of the node assigned by some machine database (e.g. a cloud provider)
ExternalID string `json:"externalID,omitempty"`
// ID of the node assigned by the cloud provider
// Note: format is "<ProviderName>://<ProviderSpecificNodeID>"
ProviderID string `json:"providerID,omitempty"`
// Unschedulable controls node schedulability of new pods. By default node is schedulable.
Unschedulable bool `json:"unschedulable,omitempty"`
}
// DaemonEndpoint contains information about a single Daemon endpoint.
type DaemonEndpoint struct {
/*
The port tag was not properly in quotes in earlier releases, so it must be
uppercased for backwards compat (since it was falling back to var name of
'Port').
*/
// Port number of the given endpoint.
Port int32 `json:"Port"`
}
// NodeDaemonEndpoints lists ports opened by daemons running on the Node.
type NodeDaemonEndpoints struct {
// Endpoint on which Kubelet is listening.
KubeletEndpoint DaemonEndpoint `json:"kubeletEndpoint,omitempty"`
}
// NodeSystemInfo is a set of ids/uuids to uniquely identify the node.
type NodeSystemInfo struct {
// Machine ID reported by the node.
MachineID string `json:"machineID"`
// System UUID reported by the node.
SystemUUID string `json:"systemUUID"`
// Boot ID reported by the node.
BootID string `json:"bootID"`
// Kernel Version reported by the node.
KernelVersion string `json:"kernelVersion"`
// OS Image reported by the node.
OSImage string `json:"osImage"`
// ContainerRuntime Version reported by the node.
ContainerRuntimeVersion string `json:"containerRuntimeVersion"`
// Kubelet Version reported by the node.
KubeletVersion string `json:"kubeletVersion"`
// KubeProxy Version reported by the node.
KubeProxyVersion string `json:"kubeProxyVersion"`
// The Operating System reported by the node
OperatingSystem string `json:"operatingSystem"`
// The Architecture reported by the node
Architecture string `json:"architecture"`
}
// NodeStatus is information about the current status of a node.
type NodeStatus struct {
// Capacity represents the total resources of a node.
Capacity ResourceList `json:"capacity,omitempty"`
// Allocatable represents the resources of a node that are available for scheduling.
Allocatable ResourceList `json:"allocatable,omitempty"`
// NodePhase is the current lifecycle phase of the node.
Phase NodePhase `json:"phase,omitempty"`
// Conditions is an array of current node conditions.
Conditions []NodeCondition `json:"conditions,omitempty"`
// Queried from cloud provider, if available.
Addresses []NodeAddress `json:"addresses,omitempty"`
// Endpoints of daemons running on the Node.
DaemonEndpoints NodeDaemonEndpoints `json:"daemonEndpoints,omitempty"`
// Set of ids/uuids to uniquely identify the node.
NodeInfo NodeSystemInfo `json:"nodeInfo,omitempty"`
// List of container images on this node
Images []ContainerImage `json:"images,omitempty"`
// List of attachable volumes in use (mounted) by the node.
VolumesInUse []UniqueVolumeName `json:"volumesInUse,omitempty"`
// List of volumes that are attached to the node.
VolumesAttached []AttachedVolume `json:"volumesAttached,omitempty"`
}
type UniqueVolumeName string
// AttachedVolume describes a volume attached to a node
type AttachedVolume struct {
// Name of the attached volume
Name UniqueVolumeName `json:"name"`
// DevicePath represents the device path where the volume should be available
DevicePath string `json:"devicePath"`
}
// AvoidPods describes pods that should avoid this node. This is the value for a
// Node annotation with key scheduler.alpha.kubernetes.io/preferAvoidPods and
// will eventually become a field of NodeStatus.
type AvoidPods struct {
// Bounded-sized list of signatures of pods that should avoid this node, sorted
// in timestamp order from oldest to newest. Size of the slice is unspecified.
PreferAvoidPods []PreferAvoidPodsEntry `json:"preferAvoidPods,omitempty"`
}
// Describes a class of pods that should avoid this node.
type PreferAvoidPodsEntry struct {
// The class of pods.
PodSignature PodSignature `json:"podSignature"`
// Time at which this entry was added to the list.
EvictionTime unversioned.Time `json:"evictionTime,omitempty"`
// (brief) reason why this entry was added to the list.
Reason string `json:"reason,omitempty"`
// Human readable message indicating why this entry was added to the list.
Message string `json:"message,omitempty"`
}
// Describes the class of pods that should avoid this node.
// Exactly one field should be set.
type PodSignature struct {
// Reference to controller whose pods should avoid this node.
PodController *OwnerReference `json:"podController,omitempty"`
}
// Describe a container image
type ContainerImage struct {
// Names by which this image is known.
Names []string `json:"names"`
// The size of the image in bytes.
SizeBytes int64 `json:"sizeBytes,omitempty"`
}
type NodePhase string
// These are the valid phases of node.
const (
// NodePending means the node has been created/added by the system, but not configured.
NodePending NodePhase = "Pending"
// NodeRunning means the node has been configured and has Kubernetes components running.
NodeRunning NodePhase = "Running"
// NodeTerminated means the node has been removed from the cluster.
NodeTerminated NodePhase = "Terminated"
)
type NodeConditionType string
// These are valid conditions of node. Currently, we don't have enough information to decide
// node condition. In the future, we will add more. The proposed set of conditions are:
// NodeReady, NodeReachable
const (
// NodeReady means kubelet is healthy and ready to accept pods.
NodeReady NodeConditionType = "Ready"
// NodeOutOfDisk means the kubelet will not accept new pods due to insufficient free disk
// space on the node.
NodeOutOfDisk NodeConditionType = "OutOfDisk"
// NodeMemoryPressure means the kubelet is under pressure due to insufficient available memory.
NodeMemoryPressure NodeConditionType = "MemoryPressure"
// NodeDiskPressure means the kubelet is under pressure due to insufficient available disk.
NodeDiskPressure NodeConditionType = "DiskPressure"
// NodeNetworkUnavailable means that network for the node is not correctly configured.
NodeNetworkUnavailable NodeConditionType = "NetworkUnavailable"
)
type NodeCondition struct {
Type NodeConditionType `json:"type"`
Status ConditionStatus `json:"status"`
LastHeartbeatTime unversioned.Time `json:"lastHeartbeatTime,omitempty"`
LastTransitionTime unversioned.Time `json:"lastTransitionTime,omitempty"`
Reason string `json:"reason,omitempty"`
Message string `json:"message,omitempty"`
}
type NodeAddressType string
// These are valid address types of node. NodeLegacyHostIP is used to transit
// from out-dated HostIP field to NodeAddress.
const (
NodeLegacyHostIP NodeAddressType = "LegacyHostIP"
NodeHostName NodeAddressType = "Hostname"
NodeExternalIP NodeAddressType = "ExternalIP"
NodeInternalIP NodeAddressType = "InternalIP"
)
type NodeAddress struct {
Type NodeAddressType `json:"type"`
Address string `json:"address"`
}
// NodeResources is an object for conveying resource information about a node.
// see http://releases.k8s.io/HEAD/docs/design/resources.md for more details.
type NodeResources struct {
// Capacity represents the available resources of a node
Capacity ResourceList `json:"capacity,omitempty"`
}
// ResourceName is the name identifying various resources in a ResourceList.
type ResourceName string
// Resource names must be not more than 63 characters, consisting of upper- or lower-case alphanumeric characters,
// with the -, _, and . characters allowed anywhere, except the first or last character.
// The default convention, matching that for annotations, is to use lower-case names, with dashes, rather than
// camel case, separating compound words.
// Fully-qualified resource typenames are constructed from a DNS-style subdomain, followed by a slash `/` and a name.
const (
// CPU, in cores. (500m = .5 cores)
ResourceCPU ResourceName = "cpu"
// Memory, in bytes. (500Gi = 500GiB = 500 * 1024 * 1024 * 1024)
ResourceMemory ResourceName = "memory"
// Volume size, in bytes (e,g. 5Gi = 5GiB = 5 * 1024 * 1024 * 1024)
ResourceStorage ResourceName = "storage"
// NVIDIA GPU, in devices. Alpha, might change: although fractional and allowing values >1, only one whole device per node is assigned.
ResourceNvidiaGPU ResourceName = "alpha.kubernetes.io/nvidia-gpu"
// Number of Pods that may be running on this Node: see ResourcePods
)
// ResourceList is a set of (resource name, quantity) pairs.
type ResourceList map[ResourceName]resource.Quantity
// +genclient=true
// +nonNamespaced=true
// Node is a worker node in Kubernetes
// The name of the node according to etcd is in ObjectMeta.Name.
type Node struct {
unversioned.TypeMeta `json:",inline"`
ObjectMeta `json:"metadata,omitempty"`
// Spec defines the behavior of a node.
Spec NodeSpec `json:"spec,omitempty"`
// Status describes the current status of a Node
Status NodeStatus `json:"status,omitempty"`
}
// NodeList is a list of nodes.
type NodeList struct {
unversioned.TypeMeta `json:",inline"`
unversioned.ListMeta `json:"metadata,omitempty"`
Items []Node `json:"items"`
}
// NamespaceSpec describes the attributes on a Namespace
type NamespaceSpec struct {
// Finalizers is an opaque list of values that must be empty to permanently remove object from storage
Finalizers []FinalizerName
}
type FinalizerName string
// These are internal finalizer values to Kubernetes, must be qualified name unless defined here
const (
FinalizerKubernetes FinalizerName = "kubernetes"
FinalizerOrphan string = "orphan"
)
// NamespaceStatus is information about the current status of a Namespace.
type NamespaceStatus struct {
// Phase is the current lifecycle phase of the namespace.
Phase NamespacePhase `json:"phase,omitempty"`
}
type NamespacePhase string
// These are the valid phases of a namespace.
const (
// NamespaceActive means the namespace is available for use in the system
NamespaceActive NamespacePhase = "Active"
// NamespaceTerminating means the namespace is undergoing graceful termination
NamespaceTerminating NamespacePhase = "Terminating"
)
// +genclient=true
// +nonNamespaced=true
// A namespace provides a scope for Names.
// Use of multiple namespaces is optional
type Namespace struct {
unversioned.TypeMeta `json:",inline"`
ObjectMeta `json:"metadata,omitempty"`
// Spec defines the behavior of the Namespace.
Spec NamespaceSpec `json:"spec,omitempty"`
// Status describes the current status of a Namespace
Status NamespaceStatus `json:"status,omitempty"`
}
// NamespaceList is a list of Namespaces.
type NamespaceList struct {
unversioned.TypeMeta `json:",inline"`
unversioned.ListMeta `json:"metadata,omitempty"`
Items []Namespace `json:"items"`
}
// Binding ties one object to another - for example, a pod is bound to a node by a scheduler.
type Binding struct {
unversioned.TypeMeta `json:",inline"`
// ObjectMeta describes the object that is being bound.
ObjectMeta `json:"metadata,omitempty"`
// Target is the object to bind to.
Target ObjectReference `json:"target"`
}
// Preconditions must be fulfilled before an operation (update, delete, etc.) is carried out.
type Preconditions struct {
// Specifies the target UID.
UID *types.UID `json:"uid,omitempty"`
}
// DeleteOptions may be provided when deleting an API object
type DeleteOptions struct {
unversioned.TypeMeta `json:",inline"`
// Optional duration in seconds before the object should be deleted. Value must be non-negative integer.
// The value zero indicates delete immediately. If this value is nil, the default grace period for the
// specified type will be used.
GracePeriodSeconds *int64 `json:"gracePeriodSeconds,omitempty"`
// Must be fulfilled before a deletion is carried out. If not possible, a 409 Conflict status will be
// returned.
Preconditions *Preconditions `json:"preconditions,omitempty"`
// Should the dependent objects be orphaned. If true/false, the "orphan"
// finalizer will be added to/removed from the object's finalizers list.
OrphanDependents *bool `json:"orphanDependents,omitempty"`
}
// ExportOptions is the query options to the standard REST get call.
type ExportOptions struct {
unversioned.TypeMeta `json:",inline"`
// Should this value be exported. Export strips fields that a user can not specify.
Export bool `json:"export"`
// Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'
Exact bool `json:"exact"`
}
// ListOptions is the query options to a standard REST list call, and has future support for
// watch calls.
type ListOptions struct {
unversioned.TypeMeta `json:",inline"`
// A selector based on labels
LabelSelector labels.Selector
// A selector based on fields
FieldSelector fields.Selector
// If true, watch for changes to this list
Watch bool
// For watch, it's the resource version to watch.
// For list,
// - if unset, then the result is returned from remote storage based on quorum-read flag;
// - if it's 0, then we simply return what we currently have in cache, no guarantee;
// - if set to non zero, then the result is as fresh as given rv.
ResourceVersion string
// Timeout for the list/watch call.
TimeoutSeconds *int64
}
// PodLogOptions is the query options for a Pod's logs REST call
type PodLogOptions struct {
unversioned.TypeMeta
// Container for which to return logs
Container string
// If true, follow the logs for the pod
Follow bool
// If true, return previous terminated container logs
Previous bool
// A relative time in seconds before the current time from which to show logs. If this value
// precedes the time a pod was started, only logs since the pod start will be returned.
// If this value is in the future, no logs will be returned.
// Only one of sinceSeconds or sinceTime may be specified.
SinceSeconds *int64
// An RFC3339 timestamp from which to show logs. If this value
// precedes the time a pod was started, only logs since the pod start will be returned.
// If this value is in the future, no logs will be returned.
// Only one of sinceSeconds or sinceTime may be specified.
SinceTime *unversioned.Time
// If true, add an RFC3339 or RFC3339Nano timestamp at the beginning of every line
// of log output.
Timestamps bool
// If set, the number of lines from the end of the logs to show. If not specified,
// logs are shown from the creation of the container or sinceSeconds or sinceTime
TailLines *int64
// If set, the number of bytes to read from the server before terminating the
// log output. This may not display a complete final line of logging, and may return
// slightly more or slightly less than the specified limit.
LimitBytes *int64
}
// PodAttachOptions is the query options to a Pod's remote attach call
// TODO: merge w/ PodExecOptions below for stdin, stdout, etc
type PodAttachOptions struct {
unversioned.TypeMeta `json:",inline"`
// Stdin if true indicates that stdin is to be redirected for the attach call
Stdin bool `json:"stdin,omitempty"`
// Stdout if true indicates that stdout is to be redirected for the attach call
Stdout bool `json:"stdout,omitempty"`
// Stderr if true indicates that stderr is to be redirected for the attach call
Stderr bool `json:"stderr,omitempty"`
// TTY if true indicates that a tty will be allocated for the attach call
TTY bool `json:"tty,omitempty"`
// Container to attach to.
Container string `json:"container,omitempty"`
}
// PodExecOptions is the query options to a Pod's remote exec call
type PodExecOptions struct {
unversioned.TypeMeta
// Stdin if true indicates that stdin is to be redirected for the exec call
Stdin bool
// Stdout if true indicates that stdout is to be redirected for the exec call
Stdout bool
// Stderr if true indicates that stderr is to be redirected for the exec call
Stderr bool
// TTY if true indicates that a tty will be allocated for the exec call
TTY bool
// Container in which to execute the command.
Container string
// Command is the remote command to execute; argv array; not executed within a shell.
Command []string
}
// PodProxyOptions is the query options to a Pod's proxy call
type PodProxyOptions struct {
unversioned.TypeMeta
// Path is the URL path to use for the current proxy request
Path string
}
// NodeProxyOptions is the query options to a Node's proxy call
type NodeProxyOptions struct {
unversioned.TypeMeta
// Path is the URL path to use for the current proxy request
Path string
}
// ServiceProxyOptions is the query options to a Service's proxy call.
type ServiceProxyOptions struct {
unversioned.TypeMeta
// Path is the part of URLs that include service endpoints, suffixes,
// and parameters to use for the current proxy request to service.
// For example, the whole request URL is
// http://localhost/api/v1/namespaces/kube-system/services/elasticsearch-logging/_search?q=user:kimchy.
// Path is _search?q=user:kimchy.
Path string
}
// OwnerReference contains enough information to let you identify an owning
// object. Currently, an owning object must be in the same namespace, so there
// is no namespace field.
type OwnerReference struct {
// API version of the referent.
APIVersion string `json:"apiVersion"`
// Kind of the referent.
// More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds
Kind string `json:"kind"`
// Name of the referent.
// More info: http://releases.k8s.io/HEAD/docs/user-guide/identifiers.md#names
Name string `json:"name"`
// UID of the referent.
// More info: http://releases.k8s.io/HEAD/docs/user-guide/identifiers.md#uids
UID types.UID `json:"uid"`
// If true, this reference points to the managing controller.
Controller *bool `json:"controller,omitempty"`
}
// ObjectReference contains enough information to let you inspect or modify the referred object.
type ObjectReference struct {
Kind string `json:"kind,omitempty"`
Namespace string `json:"namespace,omitempty"`
Name string `json:"name,omitempty"`
UID types.UID `json:"uid,omitempty"`
APIVersion string `json:"apiVersion,omitempty"`
ResourceVersion string `json:"resourceVersion,omitempty"`
// Optional. If referring to a piece of an object instead of an entire object, this string
// should contain information to identify the sub-object. For example, if the object
// reference is to a container within a pod, this would take on a value like:
// "spec.containers{name}" (where "name" refers to the name of the container that triggered
// the event) or if no container name is specified "spec.containers[2]" (container with
// index 2 in this pod). This syntax is chosen only to have some well-defined way of
// referencing a part of an object.
// TODO: this design is not final and this field is subject to change in the future.
FieldPath string `json:"fieldPath,omitempty"`
}
// LocalObjectReference contains enough information to let you locate the referenced object inside the same namespace.
type LocalObjectReference struct {
//TODO: Add other useful fields. apiVersion, kind, uid?
Name string
}
type SerializedReference struct {
unversioned.TypeMeta `json:",inline"`
Reference ObjectReference `json:"reference,omitempty"`
}
type EventSource struct {
// Component from which the event is generated.
Component string `json:"component,omitempty"`
// Host name on which the event is generated.
Host string `json:"host,omitempty"`
}
// Valid values for event types (new types could be added in future)
const (
// Information only and will not cause any problems
EventTypeNormal string = "Normal"
// These events are to warn that something might go wrong
EventTypeWarning string = "Warning"
)
// +genclient=true
// Event is a report of an event somewhere in the cluster.
// TODO: Decide whether to store these separately or with the object they apply to.
type Event struct {
unversioned.TypeMeta `json:",inline"`
ObjectMeta `json:"metadata,omitempty"`
// Required. The object that this event is about.
InvolvedObject ObjectReference `json:"involvedObject,omitempty"`
// Optional; this should be a short, machine understandable string that gives the reason
// for this event being generated. For example, if the event is reporting that a container
// can't start, the Reason might be "ImageNotFound".
// TODO: provide exact specification for format.
Reason string `json:"reason,omitempty"`
// Optional. A human-readable description of the status of this operation.
// TODO: decide on maximum length.
Message string `json:"message,omitempty"`
// Optional. The component reporting this event. Should be a short machine understandable string.
Source EventSource `json:"source,omitempty"`
// The time at which the event was first recorded. (Time of server receipt is in TypeMeta.)
FirstTimestamp unversioned.Time `json:"firstTimestamp,omitempty"`
// The time at which the most recent occurrence of this event was recorded.
LastTimestamp unversioned.Time `json:"lastTimestamp,omitempty"`
// The number of times this event has occurred.
Count int32 `json:"count,omitempty"`
// Type of this event (Normal, Warning), new types could be added in the future.
Type string `json:"type,omitempty"`
}
// EventList is a list of events.
type EventList struct {
unversioned.TypeMeta `json:",inline"`
unversioned.ListMeta `json:"metadata,omitempty"`
Items []Event `json:"items"`
}
// List holds a list of objects, which may not be known by the server.
type List struct {
unversioned.TypeMeta `json:",inline"`
unversioned.ListMeta `json:"metadata,omitempty"`
Items []runtime.Object `json:"items"`
}
// A type of object that is limited
type LimitType string
const (
// Limit that applies to all pods in a namespace
LimitTypePod LimitType = "Pod"
// Limit that applies to all containers in a namespace
LimitTypeContainer LimitType = "Container"
)
// LimitRangeItem defines a min/max usage limit for any resource that matches on kind
type LimitRangeItem struct {
// Type of resource that this limit applies to
Type LimitType `json:"type,omitempty"`
// Max usage constraints on this kind by resource name
Max ResourceList `json:"max,omitempty"`
// Min usage constraints on this kind by resource name
Min ResourceList `json:"min,omitempty"`
// Default resource requirement limit value by resource name.
Default ResourceList `json:"default,omitempty"`
// DefaultRequest resource requirement request value by resource name.
DefaultRequest ResourceList `json:"defaultRequest,omitempty"`
// MaxLimitRequestRatio represents the max burst value for the named resource
MaxLimitRequestRatio ResourceList `json:"maxLimitRequestRatio,omitempty"`
}
// LimitRangeSpec defines a min/max usage limit for resources that match on kind
type LimitRangeSpec struct {
// Limits is the list of LimitRangeItem objects that are enforced
Limits []LimitRangeItem `json:"limits"`
}
// +genclient=true
// LimitRange sets resource usage limits for each kind of resource in a Namespace
type LimitRange struct {
unversioned.TypeMeta `json:",inline"`
ObjectMeta `json:"metadata,omitempty"`
// Spec defines the limits enforced
Spec LimitRangeSpec `json:"spec,omitempty"`
}
// LimitRangeList is a list of LimitRange items.
type LimitRangeList struct {
unversioned.TypeMeta `json:",inline"`
unversioned.ListMeta `json:"metadata,omitempty"`
// Items is a list of LimitRange objects
Items []LimitRange `json:"items"`
}
// The following identify resource constants for Kubernetes object types
const (
// Pods, number
ResourcePods ResourceName = "pods"
// Services, number
ResourceServices ResourceName = "services"
// ReplicationControllers, number
ResourceReplicationControllers ResourceName = "replicationcontrollers"
// ResourceQuotas, number
ResourceQuotas ResourceName = "resourcequotas"
// ResourceSecrets, number
ResourceSecrets ResourceName = "secrets"
// ResourceConfigMaps, number
ResourceConfigMaps ResourceName = "configmaps"
// ResourcePersistentVolumeClaims, number
ResourcePersistentVolumeClaims ResourceName = "persistentvolumeclaims"
// ResourceServicesNodePorts, number
ResourceServicesNodePorts ResourceName = "services.nodeports"
// ResourceServicesLoadBalancers, number
ResourceServicesLoadBalancers ResourceName = "services.loadbalancers"
// CPU request, in cores. (500m = .5 cores)
ResourceRequestsCPU ResourceName = "requests.cpu"
// Memory request, in bytes. (500Gi = 500GiB = 500 * 1024 * 1024 * 1024)
ResourceRequestsMemory ResourceName = "requests.memory"
// Storage request, in bytes
ResourceRequestsStorage ResourceName = "requests.storage"
// CPU limit, in cores. (500m = .5 cores)
ResourceLimitsCPU ResourceName = "limits.cpu"
// Memory limit, in bytes. (500Gi = 500GiB = 500 * 1024 * 1024 * 1024)
ResourceLimitsMemory ResourceName = "limits.memory"
)
// A ResourceQuotaScope defines a filter that must match each object tracked by a quota
type ResourceQuotaScope string
const (
// Match all pod objects where spec.activeDeadlineSeconds
ResourceQuotaScopeTerminating ResourceQuotaScope = "Terminating"
// Match all pod objects where !spec.activeDeadlineSeconds
ResourceQuotaScopeNotTerminating ResourceQuotaScope = "NotTerminating"
// Match all pod objects that have best effort quality of service
ResourceQuotaScopeBestEffort ResourceQuotaScope = "BestEffort"
// Match all pod objects that do not have best effort quality of service
ResourceQuotaScopeNotBestEffort ResourceQuotaScope = "NotBestEffort"
)
// ResourceQuotaSpec defines the desired hard limits to enforce for Quota
type ResourceQuotaSpec struct {
// Hard is the set of desired hard limits for each named resource
Hard ResourceList `json:"hard,omitempty"`
// A collection of filters that must match each object tracked by a quota.
// If not specified, the quota matches all objects.
Scopes []ResourceQuotaScope `json:"scopes,omitempty"`
}
// ResourceQuotaStatus defines the enforced hard limits and observed use
type ResourceQuotaStatus struct {
// Hard is the set of enforced hard limits for each named resource
Hard ResourceList `json:"hard,omitempty"`
// Used is the current observed total usage of the resource in the namespace
Used ResourceList `json:"used,omitempty"`
}
// +genclient=true
// ResourceQuota sets aggregate quota restrictions enforced per namespace
type ResourceQuota struct {
unversioned.TypeMeta `json:",inline"`
ObjectMeta `json:"metadata,omitempty"`
// Spec defines the desired quota
Spec ResourceQuotaSpec `json:"spec,omitempty"`
// Status defines the actual enforced quota and its current usage
Status ResourceQuotaStatus `json:"status,omitempty"`
}
// ResourceQuotaList is a list of ResourceQuota items
type ResourceQuotaList struct {
unversioned.TypeMeta `json:",inline"`
unversioned.ListMeta `json:"metadata,omitempty"`
// Items is a list of ResourceQuota objects
Items []ResourceQuota `json:"items"`
}
// +genclient=true
// Secret holds secret data of a certain type. The total bytes of the values in
// the Data field must be less than MaxSecretSize bytes.
type Secret struct {
unversioned.TypeMeta `json:",inline"`
ObjectMeta `json:"metadata,omitempty"`
// Data contains the secret data. Each key must be a valid DNS_SUBDOMAIN
// or leading dot followed by valid DNS_SUBDOMAIN.
// The serialized form of the secret data is a base64 encoded string,
// representing the arbitrary (possibly non-string) data value here.
Data map[string][]byte `json:"data,omitempty"`
// Used to facilitate programmatic handling of secret data.
Type SecretType `json:"type,omitempty"`
}
const MaxSecretSize = 1 * 1024 * 1024
type SecretType string
const (
// SecretTypeOpaque is the default; arbitrary user-defined data
SecretTypeOpaque SecretType = "Opaque"
// SecretTypeServiceAccountToken contains a token that identifies a service account to the API
//
// Required fields:
// - Secret.Annotations["kubernetes.io/service-account.name"] - the name of the ServiceAccount the token identifies
// - Secret.Annotations["kubernetes.io/service-account.uid"] - the UID of the ServiceAccount the token identifies
// - Secret.Data["token"] - a token that identifies the service account to the API
SecretTypeServiceAccountToken SecretType = "kubernetes.io/service-account-token"
// ServiceAccountNameKey is the key of the required annotation for SecretTypeServiceAccountToken secrets
ServiceAccountNameKey = "kubernetes.io/service-account.name"
// ServiceAccountUIDKey is the key of the required annotation for SecretTypeServiceAccountToken secrets
ServiceAccountUIDKey = "kubernetes.io/service-account.uid"
// ServiceAccountTokenKey is the key of the required data for SecretTypeServiceAccountToken secrets
ServiceAccountTokenKey = "token"
// ServiceAccountKubeconfigKey is the key of the optional kubeconfig data for SecretTypeServiceAccountToken secrets
ServiceAccountKubeconfigKey = "kubernetes.kubeconfig"
// ServiceAccountRootCAKey is the key of the optional root certificate authority for SecretTypeServiceAccountToken secrets
ServiceAccountRootCAKey = "ca.crt"
// ServiceAccountNamespaceKey is the key of the optional namespace to use as the default for namespaced API calls
ServiceAccountNamespaceKey = "namespace"
// SecretTypeDockercfg contains a dockercfg file that follows the same format rules as ~/.dockercfg
//
// Required fields:
// - Secret.Data[".dockercfg"] - a serialized ~/.dockercfg file
SecretTypeDockercfg SecretType = "kubernetes.io/dockercfg"
// DockerConfigKey is the key of the required data for SecretTypeDockercfg secrets
DockerConfigKey = ".dockercfg"
// SecretTypeDockerConfigJson contains a dockercfg file that follows the same format rules as ~/.docker/config.json
//
// Required fields:
// - Secret.Data[".dockerconfigjson"] - a serialized ~/.docker/config.json file
SecretTypeDockerConfigJson SecretType = "kubernetes.io/dockerconfigjson"
// DockerConfigJsonKey is the key of the required data for SecretTypeDockerConfigJson secrets
DockerConfigJsonKey = ".dockerconfigjson"
// SecretTypeBasicAuth contains data needed for basic authentication.
//
// Required at least one of fields:
// - Secret.Data["username"] - username used for authentication
// - Secret.Data["password"] - password or token needed for authentication
SecretTypeBasicAuth SecretType = "kubernetes.io/basic-auth"
// BasicAuthUsernameKey is the key of the username for SecretTypeBasicAuth secrets
BasicAuthUsernameKey = "username"
// BasicAuthPasswordKey is the key of the password or token for SecretTypeBasicAuth secrets
BasicAuthPasswordKey = "password"
// SecretTypeSSHAuth contains data needed for SSH authetication.
//
// Required field:
// - Secret.Data["ssh-privatekey"] - private SSH key needed for authentication
SecretTypeSSHAuth SecretType = "kubernetes.io/ssh-auth"
// SSHAuthPrivateKey is the key of the required SSH private key for SecretTypeSSHAuth secrets
SSHAuthPrivateKey = "ssh-privatekey"
// SecretTypeTLS contains information about a TLS client or server secret. It
// is primarily used with TLS termination of the Ingress resource, but may be<|fim▁hole|> // used in other types.
//
// Required fields:
// - Secret.Data["tls.key"] - TLS private key.
// Secret.Data["tls.crt"] - TLS certificate.
// TODO: Consider supporting different formats, specifying CA/destinationCA.
SecretTypeTLS SecretType = "kubernetes.io/tls"
// TLSCertKey is the key for tls certificates in a TLS secret.
TLSCertKey = "tls.crt"
// TLSPrivateKeyKey is the key for the private key field in a TLS secret.
TLSPrivateKeyKey = "tls.key"
)
type SecretList struct {
unversioned.TypeMeta `json:",inline"`
unversioned.ListMeta `json:"metadata,omitempty"`
Items []Secret `json:"items"`
}
// +genclient=true
// ConfigMap holds configuration data for components or applications to consume.
type ConfigMap struct {
unversioned.TypeMeta `json:",inline"`
ObjectMeta `json:"metadata,omitempty"`
// Data contains the configuration data.
// Each key must be a valid DNS_SUBDOMAIN with an optional leading dot.
Data map[string]string `json:"data,omitempty"`
}
// ConfigMapList is a resource containing a list of ConfigMap objects.
type ConfigMapList struct {
unversioned.TypeMeta `json:",inline"`
unversioned.ListMeta `json:"metadata,omitempty"`
// Items is the list of ConfigMaps.
Items []ConfigMap `json:"items"`
}
// These constants are for remote command execution and port forwarding and are
// used by both the client side and server side components.
//
// This is probably not the ideal place for them, but it didn't seem worth it
// to create pkg/exec and pkg/portforward just to contain a single file with
// constants in it. Suggestions for more appropriate alternatives are
// definitely welcome!
const (
// Enable stdin for remote command execution
ExecStdinParam = "input"
// Enable stdout for remote command execution
ExecStdoutParam = "output"
// Enable stderr for remote command execution
ExecStderrParam = "error"
// Enable TTY for remote command execution
ExecTTYParam = "tty"
// Command to run for remote command execution
ExecCommandParamm = "command"
// Name of header that specifies stream type
StreamType = "streamType"
// Value for streamType header for stdin stream
StreamTypeStdin = "stdin"
// Value for streamType header for stdout stream
StreamTypeStdout = "stdout"
// Value for streamType header for stderr stream
StreamTypeStderr = "stderr"
// Value for streamType header for data stream
StreamTypeData = "data"
// Value for streamType header for error stream
StreamTypeError = "error"
// Value for streamType header for terminal resize stream
StreamTypeResize = "resize"
// Name of header that specifies the port being forwarded
PortHeader = "port"
// Name of header that specifies a request ID used to associate the error
// and data streams for a single forwarded connection
PortForwardRequestIDHeader = "requestID"
)
// Similarly to above, these are constants to support HTTP PATCH utilized by
// both the client and server that didn't make sense for a whole package to be
// dedicated to.
type PatchType string
const (
JSONPatchType PatchType = "application/json-patch+json"
MergePatchType PatchType = "application/merge-patch+json"
StrategicMergePatchType PatchType = "application/strategic-merge-patch+json"
)
// Type and constants for component health validation.
type ComponentConditionType string
// These are the valid conditions for the component.
const (
ComponentHealthy ComponentConditionType = "Healthy"
)
type ComponentCondition struct {
Type ComponentConditionType `json:"type"`
Status ConditionStatus `json:"status"`
Message string `json:"message,omitempty"`
Error string `json:"error,omitempty"`
}
// +genclient=true
// +nonNamespaced=true
// ComponentStatus (and ComponentStatusList) holds the cluster validation info.
type ComponentStatus struct {
unversioned.TypeMeta `json:",inline"`
ObjectMeta `json:"metadata,omitempty"`
Conditions []ComponentCondition `json:"conditions,omitempty"`
}
type ComponentStatusList struct {
unversioned.TypeMeta `json:",inline"`
unversioned.ListMeta `json:"metadata,omitempty"`
Items []ComponentStatus `json:"items"`
}
// SecurityContext holds security configuration that will be applied to a container.
// Some fields are present in both SecurityContext and PodSecurityContext. When both
// are set, the values in SecurityContext take precedence.
type SecurityContext struct {
// The capabilities to add/drop when running containers.
// Defaults to the default set of capabilities granted by the container runtime.
Capabilities *Capabilities `json:"capabilities,omitempty"`
// Run container in privileged mode.
// Processes in privileged containers are essentially equivalent to root on the host.
// Defaults to false.
Privileged *bool `json:"privileged,omitempty"`
// The SELinux context to be applied to the container.
// If unspecified, the container runtime will allocate a random SELinux context for each
// container. May also be set in PodSecurityContext. If set in both SecurityContext and
// PodSecurityContext, the value specified in SecurityContext takes precedence.
SELinuxOptions *SELinuxOptions `json:"seLinuxOptions,omitempty"`
// The UID to run the entrypoint of the container process.
// Defaults to user specified in image metadata if unspecified.
// May also be set in PodSecurityContext. If set in both SecurityContext and
// PodSecurityContext, the value specified in SecurityContext takes precedence.
RunAsUser *int64 `json:"runAsUser,omitempty"`
// Indicates that the container must run as a non-root user.
// If true, the Kubelet will validate the image at runtime to ensure that it
// does not run as UID 0 (root) and fail to start the container if it does.
// If unset or false, no such validation will be performed.
// May also be set in PodSecurityContext. If set in both SecurityContext and
// PodSecurityContext, the value specified in SecurityContext takes precedence.
RunAsNonRoot *bool `json:"runAsNonRoot,omitempty"`
// The read-only root filesystem allows you to restrict the locations that an application can write
// files to, ensuring the persistent data can only be written to mounts.
ReadOnlyRootFilesystem *bool `json:"readOnlyRootFilesystem,omitempty"`
}
// SELinuxOptions are the labels to be applied to the container.
type SELinuxOptions struct {
// SELinux user label
User string `json:"user,omitempty"`
// SELinux role label
Role string `json:"role,omitempty"`
// SELinux type label
Type string `json:"type,omitempty"`
// SELinux level label.
Level string `json:"level,omitempty"`
}
// RangeAllocation is an opaque API object (not exposed to end users) that can be persisted to record
// the global allocation state of the cluster. The schema of Range and Data generic, in that Range
// should be a string representation of the inputs to a range (for instance, for IP allocation it
// might be a CIDR) and Data is an opaque blob understood by an allocator which is typically a
// binary range. Consumers should use annotations to record additional information (schema version,
// data encoding hints). A range allocation should *ALWAYS* be recreatable at any time by observation
// of the cluster, thus the object is less strongly typed than most.
type RangeAllocation struct {
unversioned.TypeMeta `json:",inline"`
ObjectMeta `json:"metadata,omitempty"`
// A string representing a unique label for a range of resources, such as a CIDR "10.0.0.0/8" or
// port range "10000-30000". Range is not strongly schema'd here. The Range is expected to define
// a start and end unless there is an implicit end.
Range string `json:"range"`
// A byte array representing the serialized state of a range allocation. Additional clarifiers on
// the type or format of data should be represented with annotations. For IP allocations, this is
// represented as a bit array starting at the base IP of the CIDR in Range, with each bit representing
// a single allocated address (the fifth bit on CIDR 10.0.0.0/8 is 10.0.0.4).
Data []byte `json:"data"`
}
const (
// "default-scheduler" is the name of default scheduler.
DefaultSchedulerName = "default-scheduler"
// RequiredDuringScheduling affinity is not symmetric, but there is an implicit PreferredDuringScheduling affinity rule
// corresponding to every RequiredDuringScheduling affinity rule.
// When the --hard-pod-affinity-weight scheduler flag is not specified,
// DefaultHardPodAffinityWeight defines the weight of the implicit PreferredDuringScheduling affinity rule.
DefaultHardPodAffinitySymmetricWeight int = 1
// When the --failure-domains scheduler flag is not specified,
// DefaultFailureDomains defines the set of label keys used when TopologyKey is empty in PreferredDuringScheduling anti-affinity.
DefaultFailureDomains string = unversioned.LabelHostname + "," + unversioned.LabelZoneFailureDomain + "," + unversioned.LabelZoneRegion
)<|fim▁end|> | |
<|file_name|>battlelog.mohw.inject.js<|end_file_name|><|fim▁begin|>/// <reference path="windowbutton/battlelog.windowbutton.js" />
/// <reference path="playbar/battlelog.bf3.playbar.js" />
/// <reference path="dialog/battlelog.bf3.dialog.js" />
/// <reference path="stats/battlelog.bf3.stats.js" />
var baseurl = 'http://battlelogium.github.io/Battlelogium/Battlelogium.Core/Javascript';
function injectOnce() {
if (document.getElementById('_windowbutton') == null) {
injectScript('_windowbutton', baseurl+'/windowbutton/battlelog.windowbutton.min.js');
}
if (document.getElementById('css_windowbutton') == null) {
injectCSS('css_windowbutton', baseurl + '/windowbutton/battlelog.windowbutton.min.css');
}
if (document.getElementById('css_misc') == null) {
injectCSS('css_misc', baseurl + '/misc/battlelog.misc.min.css');<|fim▁hole|> }
if (document.getElementById('_battlelogplaybar') == null) {
injectScript('_battlelogplaybar', baseurl + '/playbar/battlelog.mohw.playbar.min.js');
}
if (document.getElementById('_battlelogstats') == null) {
injectScript('_battlelogstats', baseurl + '/stats/battlelog.mohw.stats.min.js');
}
if (document.getElementById('_battlelogsettings') == null) {
injectScript('_battlelogsettings', baseurl + '/settings/battlelog.mohw.settings.min.js');
}
}
function runCustomJS() {
try {
battlelogplaybar.fixQuickMatchButtons();
windowbutton.addWindowButtons();
windowbutton.updateMaximizeButton();
battlelogplaybar.fixEAPlaybarButtons();
battlelogplaybar.addPlaybarButton(battlelogplaybar.createPlaybarButton('btnServers', 'SERVERS', 'location.href = "http://battlelog.battlefield.com/mohw/servers/"'));
$("#base-header-secondary-nav>ul>li>a:contains('Buy Battlefield 4')").remove();
} catch (error) {
}
if (window.location.href.match(/\/soldier\//) != null) {
battlelogstats.overview();
}
if (window.location.href == 'http://battlelog.battlefield.com/mohw/profile/edit/') {
battlelogsettings.addSettingsSection();
}
}
function injectScript(id, url) {
var script = document.createElement('script');
script.setAttribute('src', url);
script.setAttribute('id', id);
document.getElementsByTagName('head')[0].appendChild(script);
}
function injectCSS(id, url) {
var script = document.createElement('link');
script.setAttribute('rel', 'stylesheet');
script.setAttribute('type', 'text/css');
script.setAttribute('href', url);
script.setAttribute('id', id);
document.getElementsByTagName('head')[0].appendChild(script);
}
injectOnce();<|fim▁end|> | |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from django.contrib import admin
from django.utils.translation import ugettext as _
from .models import AbuseReport, SearchTermRecord
admin.site.register(AbuseReport)
<|fim▁hole|> search_fields = ('term', )
def get_user_full_name(self, obj):
if obj.user is None:
return "(%s)" % _(u"None")
return obj.user.get_full_name()
get_user_full_name.short_description = "user"
admin.site.register(SearchTermRecord, SearchTermAdmin)<|fim▁end|> |
class SearchTermAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'ip_address', 'get_user_full_name', ) |
<|file_name|>test_analyzers.py<|end_file_name|><|fim▁begin|># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from socorro.lib import datetimeutil
from socorro.unittest.external.es.base import (
ElasticsearchTestCase,
SuperSearchWithFields,
minimum_es_version,
)
# Uncomment these lines to decrease verbosity of the elasticsearch library
# while running unit tests.<|fim▁hole|># import logging
# logging.getLogger('elasticsearch').setLevel(logging.ERROR)
# logging.getLogger('requests').setLevel(logging.ERROR)
class IntegrationTestAnalyzers(ElasticsearchTestCase):
"""Test the custom analyzers we create in our indices. """
def setUp(self):
super(IntegrationTestAnalyzers, self).setUp()
self.api = SuperSearchWithFields(config=self.config)
self.now = datetimeutil.utc_now()
@minimum_es_version('1.0')
def test_semicolon_keywords(self):
"""Test the analyzer called `semicolon_keywords`.
That analyzer creates tokens (terms) by splitting the input on
semicolons (;) only.
"""
self.index_crash({
'date_processed': self.now,
'app_init_dlls': '/path/to/dll;;foo;C:\\bar\\boo',
})
self.index_crash({
'date_processed': self.now,
'app_init_dlls': '/path/to/dll;D:\\bar\\boo',
})
self.refresh_index()
res = self.api.get(
app_init_dlls='/path/to/dll',
_facets=['app_init_dlls'],
)
assert res['total'] == 2
assert 'app_init_dlls' in res['facets']
facet_terms = [x['term'] for x in res['facets']['app_init_dlls']]
assert '/path/to/dll' in facet_terms
assert 'c:\\bar\\boo' in facet_terms
assert 'foo' in facet_terms<|fim▁end|> | |
<|file_name|>demo.js<|end_file_name|><|fim▁begin|>export async function getNodeSummary(nodeId, nodeType) {
const bioentityUrl = `${biolink}bioentity/${nodeType}/${nodeId}`;
console.log('getNodeSummary bioentityUrl', nodeId, nodeType, bioentityUrl);
const params = {
fetch_objects: true,
unselect_evidence: false,
exclude_automatic_assertions: false,
use_compact_associations: false,
rows: 100,
};
const resp = await axios.get(bioentityUrl, { params });
const responseData = resp.data;
const graphUrl = `${biolink}graph/node/${nodeId}`;
const graphResponse = await axios.get(graphUrl);
const graphResponseData = graphResponse.data;
responseData.edges = graphResponseData.edges;
responseData.nodes = graphResponseData.nodes;<|fim▁hole|> return responseData;
}<|fim▁end|> | |
<|file_name|>eq.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.<|fim▁hole|>// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use ast::{MetaItem, Item, Expr};
use codemap::Span;
use ext::base::ExtCtxt;
use ext::build::AstBuilder;
use ext::deriving::generic::*;
use ext::deriving::generic::ty::*;
use parse::token::InternedString;
pub fn expand_deriving_eq(cx: &mut ExtCtxt,
span: Span,
mitem: @MetaItem,
item: @Item,
push: |@Item|) {
// structures are equal if all fields are equal, and non equal, if
// any fields are not equal or if the enum variants are different
fn cs_eq(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> @Expr {
cs_and(|cx, span, _, _| cx.expr_bool(span, false),
cx, span, substr)
}
fn cs_ne(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> @Expr {
cs_or(|cx, span, _, _| cx.expr_bool(span, true),
cx, span, substr)
}
macro_rules! md (
($name:expr, $f:ident) => { {
let inline = cx.meta_word(span, InternedString::new("inline"));
let attrs = vec!(cx.attribute(span, inline));
MethodDef {
name: $name,
generics: LifetimeBounds::empty(),
explicit_self: borrowed_explicit_self(),
args: vec!(borrowed_self()),
ret_ty: Literal(Path::new(vec!("bool"))),
attributes: attrs,
const_nonmatching: true,
combine_substructure: combine_substructure(|a, b, c| {
$f(a, b, c)
})
}
} }
);
let trait_def = TraitDef {
span: span,
attributes: Vec::new(),
path: Path::new(vec!("std", "cmp", "PartialEq")),
additional_bounds: Vec::new(),
generics: LifetimeBounds::empty(),
methods: vec!(
md!("eq", cs_eq),
md!("ne", cs_ne)
)
};
trait_def.expand(cx, mitem, item, push)
}<|fim▁end|> | // |
<|file_name|>classes.py<|end_file_name|><|fim▁begin|><|fim▁hole|>class Class(Document):
name = StringField(required = True)
meta = {
"allow_inheritance": False
}<|fim▁end|> | from mongoengine import *
|
<|file_name|>emf_export.py<|end_file_name|><|fim▁begin|># Copyright (C) 2009 Jeremy S. Sanders
# Email: Jeremy Sanders <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
##############################################################################
"""A paint engine to produce EMF exports.
Requires: PyQt-x11-gpl-4.6-snapshot-20090906.tar.gz
sip-4.9-snapshot-20090906.tar.gz
pyemf
"""
import struct
import pyemf
from .. import qtall as qt
inch_mm = 25.4
scale = 100
def isStockObject(obj):
"""Is this a stock windows object."""
return (obj & 0x80000000) != 0
class _EXTCREATEPEN(pyemf._EMR._EXTCREATEPEN):
"""Extended pen creation record with custom line style."""
typedef = [
('i','handle',0),
('i','offBmi',0),
('i','cbBmi',0),
('i','offBits',0),
('i','cbBits',0),
('i','style'),
('i','penwidth'),
('i','brushstyle'),
('i','color'),
('i','brushhatch',0),
('i','numstyleentries')
]
def __init__(self, style=pyemf.PS_SOLID, width=1, color=0,
styleentries=[]):
"""Create pen.
styleentries is a list of dash and space lengths."""
pyemf._EMR._EXTCREATEPEN.__init__(self)
self.style = style
self.penwidth = width
self.color = pyemf._normalizeColor(color)
self.brushstyle = 0x0 # solid
if style & pyemf.PS_STYLE_MASK != pyemf.PS_USERSTYLE:
styleentries = []
self.numstyleentries = len(styleentries)
if styleentries:
self.unhandleddata = struct.pack(
"i"*self.numstyleentries, *styleentries)
def hasHandle(self):
return True
class EMFPaintEngine(qt.QPaintEngine):
"""Custom EMF paint engine."""
def __init__(self, width_in, height_in, dpi=75):
qt.QPaintEngine.__init__(
self,
qt.QPaintEngine.Antialiasing |
qt.QPaintEngine.PainterPaths |
qt.QPaintEngine.PrimitiveTransform |
qt.QPaintEngine.PaintOutsidePaintEvent |
qt.QPaintEngine.PatternBrush
)
self.width = width_in
self.height = height_in
self.dpi = dpi
def begin(self, paintdevice):
self.emf = pyemf.EMF(self.width, self.height, int(self.dpi*scale))
self.pen = self.emf.GetStockObject(pyemf.BLACK_PEN)
self.pencolor = (0, 0, 0)
self.brush = self.emf.GetStockObject(pyemf.NULL_BRUSH)
self.paintdevice = paintdevice
return True
def drawLines(self, lines):
"""Draw lines to emf output."""
for line in lines:
self.emf.Polyline(
[ (int(line.x1()*scale), int(line.y1()*scale)),
(int(line.x2()*scale), int(line.y2()*scale)) ] )
def drawPolygon(self, points, mode):
"""Draw polygon on output."""
# print "Polygon"
pts = [(int(p.x()*scale), int(p.y()*scale)) for p in points]
if mode == qt.QPaintEngine.PolylineMode:
self.emf.Polyline(pts)
else:
self.emf.SetPolyFillMode({
qt.QPaintEngine.WindingMode: pyemf.WINDING,
qt.QPaintEngine.OddEvenMode: pyemf.ALTERNATE,
qt.QPaintEngine.ConvexMode: pyemf.WINDING
})
self.emf.Polygon(pts)
def drawEllipse(self, rect):
"""Draw an ellipse."""
# print "ellipse"
args = (
int(rect.left()*scale), int(rect.top()*scale),
int(rect.right()*scale), int(rect.bottom()*scale),
int(rect.left()*scale), int(rect.top()*scale),
int(rect.left()*scale), int(rect.top()*scale),
)
self.emf.Pie(*args)
self.emf.Arc(*args)
def drawPoints(self, points):
"""Draw points."""
# print "points"
for pt in points:
x, y = (pt.x()-0.5)*scale, (pt.y()-0.5)*scale
self.emf.Pie(
int(x), int(y),
int((pt.x()+0.5)*scale), int((pt.y()+0.5)*scale),
int(x), int(y), int(x), int(y) )
def drawPixmap(self, r, pixmap, sr):
"""Draw pixmap to display."""
# convert pixmap to BMP format
bytearr = qt.QByteArray()
buf = qt.QBuffer(bytearr)
buf.open(qt.QIODevice.WriteOnly)
pixmap.save(buf, "BMP")
# chop off bmp header to get DIB
bmp = bytes(buf.data())
dib = bmp[0xe:]
hdrsize, = struct.unpack('<i', bmp[0xe:0x12])
dataindex, = struct.unpack('<i', bmp[0xa:0xe])
datasize, = struct.unpack('<i', bmp[0x22:0x26])
epix = pyemf._EMR._STRETCHDIBITS()
epix.rclBounds_left = int(r.left()*scale)
epix.rclBounds_top = int(r.top()*scale)
epix.rclBounds_right = int(r.right()*scale)
epix.rclBounds_bottom = int(r.bottom()*scale)
epix.xDest = int(r.left()*scale)
epix.yDest = int(r.top()*scale)
epix.cxDest = int(r.width()*scale)
epix.cyDest = int(r.height()*scale)
epix.xSrc = int(sr.left())
epix.ySrc = int(sr.top())
epix.cxSrc = int(sr.width())
epix.cySrc = int(sr.height())
epix.dwRop = 0xcc0020 # SRCCOPY
offset = epix.format.minstructsize + 8
epix.offBmiSrc = offset
epix.cbBmiSrc = hdrsize
epix.offBitsSrc = offset + dataindex - 0xe
epix.cbBitsSrc = datasize<|fim▁hole|> epix.unhandleddata = dib
self.emf._append(epix)
def _createPath(self, path):
"""Convert qt path to emf path"""
self.emf.BeginPath()
count = path.elementCount()
i = 0
#print "Start path"
while i < count:
e = path.elementAt(i)
if e.type == qt.QPainterPath.MoveToElement:
self.emf.MoveTo( int(e.x*scale), int(e.y*scale) )
#print "M", e.x*scale, e.y*scale
elif e.type == qt.QPainterPath.LineToElement:
self.emf.LineTo( int(e.x*scale), int(e.y*scale) )
#print "L", e.x*scale, e.y*scale
elif e.type == qt.QPainterPath.CurveToElement:
e1 = path.elementAt(i+1)
e2 = path.elementAt(i+2)
params = (
( int(e.x*scale), int(e.y*scale) ),
( int(e1.x*scale), int(e1.y*scale) ),
( int(e2.x*scale), int(e2.y*scale) ),
)
self.emf.PolyBezierTo(params)
#print "C", params
i += 2
else:
assert False
i += 1
ef = path.elementAt(0)
el = path.elementAt(count-1)
if ef.x == el.x and ef.y == el.y:
self.emf.CloseFigure()
#print "closing"
self.emf.EndPath()
def drawPath(self, path):
"""Draw a path on the output."""
# print "path"
self._createPath(path)
self.emf.StrokeAndFillPath()
def drawTextItem(self, pt, textitem):
"""Convert text to a path and draw it.
"""
# print "text", pt, textitem.text()
path = qt.QPainterPath()
path.addText(pt, textitem.font(), textitem.text())
fill = self.emf.CreateSolidBrush(self.pencolor)
self.emf.SelectObject(fill)
self._createPath(path)
self.emf.FillPath()
self.emf.SelectObject(self.brush)
self.emf.DeleteObject(fill)
def end(self):
return True
def saveFile(self, filename):
self.emf.save(filename)
def _updatePen(self, pen):
"""Update the pen to the currently selected one."""
# line style
style = {
qt.Qt.NoPen: pyemf.PS_NULL,
qt.Qt.SolidLine: pyemf.PS_SOLID,
qt.Qt.DashLine: pyemf.PS_DASH,
qt.Qt.DotLine: pyemf.PS_DOT,
qt.Qt.DashDotLine: pyemf.PS_DASHDOT,
qt.Qt.DashDotDotLine: pyemf.PS_DASHDOTDOT,
qt.Qt.CustomDashLine: pyemf.PS_USERSTYLE,
}[pen.style()]
if style != pyemf.PS_NULL:
# set cap style
style |= {
qt.Qt.FlatCap: pyemf.PS_ENDCAP_FLAT,
qt.Qt.SquareCap: pyemf.PS_ENDCAP_SQUARE,
qt.Qt.RoundCap: pyemf.PS_ENDCAP_ROUND,
}[pen.capStyle()]
# set join style
style |= {
qt.Qt.MiterJoin: pyemf.PS_JOIN_MITER,
qt.Qt.BevelJoin: pyemf.PS_JOIN_BEVEL,
qt.Qt.RoundJoin: pyemf.PS_JOIN_ROUND,
qt.Qt.SvgMiterJoin: pyemf.PS_JOIN_MITER,
}[pen.joinStyle()]
# use proper widths of lines
style |= pyemf.PS_GEOMETRIC
width = int(pen.widthF()*scale)
qc = pen.color()
color = (qc.red(), qc.green(), qc.blue())
self.pencolor = color
if pen.style() == qt.Qt.CustomDashLine:
# make an extended pen if we need a custom dash pattern
dash = [int(pen.widthF()*scale*f) for f in pen.dashPattern()]
newpen = self.emf._appendHandle( _EXTCREATEPEN(
style, width=width, color=color, styleentries=dash))
else:
# use a standard create pen
newpen = self.emf.CreatePen(style, width, color)
self.emf.SelectObject(newpen)
# delete old pen if it is not a stock object
if not isStockObject(self.pen):
self.emf.DeleteObject(self.pen)
self.pen = newpen
def _updateBrush(self, brush):
"""Update to selected brush."""
style = brush.style()
qc = brush.color()
color = (qc.red(), qc.green(), qc.blue())
# print "brush", color
if style == qt.Qt.SolidPattern:
newbrush = self.emf.CreateSolidBrush(color)
elif style == qt.Qt.NoBrush:
newbrush = self.emf.GetStockObject(pyemf.NULL_BRUSH)
else:
try:
hatch = {
qt.Qt.HorPattern: pyemf.HS_HORIZONTAL,
qt.Qt.VerPattern: pyemf.HS_VERTICAL,
qt.Qt.CrossPattern: pyemf.HS_CROSS,
qt.Qt.BDiagPattern: pyemf.HS_BDIAGONAL,
qt.Qt.FDiagPattern: pyemf.HS_FDIAGONAL,
qt.Qt.DiagCrossPattern: pyemf.HS_DIAGCROSS
}[brush.style()]
except KeyError:
newbrush = self.emf.CreateSolidBrush(color)
else:
newbrush = self.emf.CreateHatchBrush(hatch, color)
self.emf.SelectObject(newbrush)
if not isStockObject(self.brush):
self.emf.DeleteObject(self.brush)
self.brush = newbrush
def _updateClipPath(self, path, operation):
"""Update clipping path."""
# print "clip"
if operation != qt.Qt.NoClip:
self._createPath(path)
clipmode = {
qt.Qt.ReplaceClip: pyemf.RGN_COPY,
qt.Qt.IntersectClip: pyemf.RGN_AND,
}[operation]
else:
# is this the only wave to get rid of clipping?
self.emf.BeginPath()
self.emf.MoveTo(0,0)
w = int(self.width*self.dpi*scale)
h = int(self.height*self.dpi*scale)
self.emf.LineTo(w, 0)
self.emf.LineTo(w, h)
self.emf.LineTo(0, h)
self.emf.CloseFigure()
self.emf.EndPath()
clipmode = pyemf.RGN_COPY
self.emf.SelectClipPath(mode=clipmode)
def _updateTransform(self, m):
"""Update transformation."""
self.emf.SetWorldTransform(
m.m11(), m.m12(),
m.m21(), m.m22(),
m.dx()*scale, m.dy()*scale)
def updateState(self, state):
"""Examine what has changed in state and call apropriate function."""
ss = state.state()
if ss & qt.QPaintEngine.DirtyPen:
self._updatePen(state.pen())
if ss & qt.QPaintEngine.DirtyBrush:
self._updateBrush(state.brush())
if ss & qt.QPaintEngine.DirtyTransform:
self._updateTransform(state.transform())
if ss & qt.QPaintEngine.DirtyClipPath:
self._updateClipPath(state.clipPath(), state.clipOperation())
if ss & qt.QPaintEngine.DirtyClipRegion:
path = qt.QPainterPath()
path.addRegion(state.clipRegion())
self._updateClipPath(path, state.clipOperation())
def type(self):
return qt.QPaintEngine.PostScript
class EMFPaintDevice(qt.QPaintDevice):
"""Paint device for EMF paint engine."""
def __init__(self, width_in, height_in, dpi=75):
qt.QPaintDevice.__init__(self)
self.engine = EMFPaintEngine(width_in, height_in, dpi=dpi)
def paintEngine(self):
return self.engine
def metric(self, m):
"""Return the metrics of the painter."""
if m == qt.QPaintDevice.PdmWidth:
return int(self.engine.width * self.engine.dpi)
elif m == qt.QPaintDevice.PdmHeight:
return int(self.engine.height * self.engine.dpi)
elif m == qt.QPaintDevice.PdmWidthMM:
return int(self.engine.width * inch_mm)
elif m == qt.QPaintDevice.PdmHeightMM:
return int(self.engine.height * inch_mm)
elif m == qt.QPaintDevice.PdmNumColors:
return 2147483647
elif m == qt.QPaintDevice.PdmDepth:
return 24
elif m == qt.QPaintDevice.PdmDpiX:
return int(self.engine.dpi)
elif m == qt.QPaintDevice.PdmDpiY:
return int(self.engine.dpi)
elif m == qt.QPaintDevice.PdmPhysicalDpiX:
return int(self.engine.dpi)
elif m == qt.QPaintDevice.PdmPhysicalDpiY:
return int(self.engine.dpi)
elif m == qt.QPaintDevice.PdmDevicePixelRatio:
return 1
# Qt >= 5.6
elif m == getattr(qt.QPaintDevice, 'PdmDevicePixelRatioScaled', -1):
return 1
else:
# fall back
return qt.QPaintDevice.metric(self, m)<|fim▁end|> | epix.iUsageSrc = 0x0 # DIB_RGB_COLORS
|
<|file_name|>test_copy.py<|end_file_name|><|fim▁begin|>import filecmp
from transfert import Resource
from transfert.actions import copy
def estimate_nb_cycles(len_data, chunk_size):
return (len_data // chunk_size) + [0, 1][(len_data % chunk_size) > 0]
def test_simple_local_copy(tmpdir):
src = tmpdir.join('alpha')
dst = tmpdir.join('beta')
<|fim▁hole|> assert src.check()
assert not dst.check()
copy(Resource('file://' + src.strpath),
Resource('file://' + dst.strpath))
assert src.check()
assert dst.check()
assert filecmp.cmp(src.strpath, dst.strpath)
def test_simple_local_copy_with_callback(tmpdir):
def wrapper(size):
nonlocal count
count += 1
count = 0
src = tmpdir.join('alpha')
dst = tmpdir.join('beta')
data = b'some data'
src.write(data)
chunk_size = 1
assert src.check()
assert not dst.check()
copy(Resource('file://' + src.strpath),
Resource('file://' + dst.strpath,),
size=chunk_size,
callback_freq=1,
callback=wrapper)
assert src.check()
assert dst.check()
assert filecmp.cmp(src.strpath, dst.strpath)
assert count == estimate_nb_cycles(len(data), chunk_size)
dst.remove()
count = 0
chunk_size = 2
assert src.check()
assert not dst.check()
copy(Resource('file://' + src.strpath),
Resource('file://' + dst.strpath,),
size=chunk_size,
callback_freq=1,
callback=wrapper)
assert src.check()
assert dst.check()
assert filecmp.cmp(src.strpath, dst.strpath)
assert count == estimate_nb_cycles(len(data), chunk_size)<|fim▁end|> | src.write('some data') |
<|file_name|>sale_report.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import tools
from openerp.osv import fields, osv
class sale_report(osv.osv):
_name = "sale.report"
_description = "Sales Orders Statistics"
_auto = False
_rec_name = 'date'
_columns = {
'date': fields.datetime('Date Order', readonly=True),
'date_confirm': fields.date('Date Confirm', readonly=True),
'product_id': fields.many2one('product.product', 'Product', readonly=True),
'product_uom': fields.many2one('product.uom', 'Unit of Measure', readonly=True),
'product_uom_qty': fields.float('# of Qty', readonly=True),
'partner_id': fields.many2one('res.partner', 'Partner', readonly=True),
'company_id': fields.many2one('res.company', 'Company', readonly=True),
'user_id': fields.many2one('res.users', 'Salesperson', readonly=True),
'price_total': fields.float('Total Price', readonly=True),
'delay': fields.float('Commitment Delay', digits=(16,2), readonly=True),
'categ_id': fields.many2one('product.category','Category of Product', readonly=True),
'nbr': fields.integer('# of Lines', readonly=True),
'state': fields.selection([
('draft', 'Quotation'),
('waiting_date', 'Waiting Schedule'),
('manual', 'Manual In Progress'),
('progress', 'In Progress'),
('invoice_except', 'Invoice Exception'),
('done', 'Done'),
('cancel', 'Cancelled')
], 'Order Status', readonly=True),
'pricelist_id': fields.many2one('product.pricelist', 'Pricelist', readonly=True),
'analytic_account_id': fields.many2one('account.analytic.account', 'Analytic Account', readonly=True),
'section_id': fields.many2one('crm.case.section', 'Sales Team'),
}
_order = 'date desc'
def _select(self):
select_str = """
SELECT min(l.id) as id,
l.product_id as product_id,
t.uom_id as product_uom,
sum(l.product_uom_qty / u.factor * u2.factor) as product_uom_qty,
sum(l.product_uom_qty * l.price_unit * (100.0-l.discount) / 100.0) as price_total,
count(*) as nbr,
s.date_order as date,
s.date_confirm as date_confirm,
s.partner_id as partner_id,
s.user_id as user_id,
s.company_id as company_id,
extract(epoch from avg(date_trunc('day',s.date_confirm)-date_trunc('day',s.create_date)))/(24*60*60)::decimal(16,2) as delay,
s.state,
t.categ_id as categ_id,
s.pricelist_id as pricelist_id,
s.project_id as analytic_account_id,
s.section_id as section_id
"""
return select_str
def _from(self):
from_str = """
sale_order_line l
join sale_order s on (l.order_id=s.id)
left join product_product p on (l.product_id=p.id)<|fim▁hole|> left join product_template t on (p.product_tmpl_id=t.id)
left join product_uom u on (u.id=l.product_uom)
left join product_uom u2 on (u2.id=t.uom_id)
"""
return from_str
def _group_by(self):
group_by_str = """
GROUP BY l.product_id,
l.order_id,
t.uom_id,
t.categ_id,
s.date_order,
s.date_confirm,
s.partner_id,
s.user_id,
s.company_id,
s.state,
s.pricelist_id,
s.project_id,
s.section_id
"""
return group_by_str
def init(self, cr):
# self._table = sale_report
tools.drop_view_if_exists(cr, self._table)
cr.execute("""CREATE or REPLACE VIEW %s as (
%s
FROM ( %s )
%s
)""" % (self._table, self._select(), self._from(), self._group_by()))
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|> | |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import { Increment, Decrement } from '../actions'
export default (state = 0, action) => {
switch (action.constructor) {
case Increment:
return state + 1
case Decrement:
return state - 1
default:<|fim▁hole|>}<|fim▁end|> | return state
} |
<|file_name|>topBarSection.ts<|end_file_name|><|fim▁begin|>module ngFoundation.directives {
//@NgDirective('topBarSection')
class TopBarSectionDirective implements ng.IDirective {
template = '<section class="top-bar-section" ng-transclude></section>';
restrict = "E";
transclude = true;
replace = true;
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>verify.py<|end_file_name|><|fim▁begin|>import calendar
import json
from datetime import datetime
from time import gmtime, time
from urlparse import parse_qsl, urlparse
from wsgiref.handlers import format_date_time
import jwt
from browserid.errors import ExpiredSignatureError
from django_statsd.clients import statsd
from receipts import certs
from lib.cef_loggers import receipt_cef
from lib.crypto.receipt import sign
from lib.utils import static_url
from services.utils import settings
from utils import (CONTRIB_CHARGEBACK, CONTRIB_NO_CHARGE, CONTRIB_PURCHASE,
CONTRIB_REFUND, log_configure, log_exception, log_info,
mypool)
# Go configure the log.
log_configure()
# This has to be imported after the settings (utils).
import receipts # NOQA, used for patching in the tests
status_codes = {
200: '200 OK',
405: '405 Method Not Allowed',
500: '500 Internal Server Error',
}
class VerificationError(Exception):
pass
class InvalidReceipt(Exception):
"""
InvalidReceipt takes a message, which is then displayed back to the app so
they can understand the failure.
"""
pass
class RefundedReceipt(Exception):
pass
class Verify:
def __init__(self, receipt, environ):
self.receipt = receipt
self.environ = environ
# This is so the unit tests can override the connection.
self.conn, self.cursor = None, None
def check_full(self):
"""
This is the default that verify will use, this will
do the entire stack of checks.
"""
receipt_domain = urlparse(static_url('WEBAPPS_RECEIPT_URL')).netloc
try:
self.decoded = self.decode()
self.check_type('purchase-receipt')
self.check_url(receipt_domain)
self.check_purchase()
except InvalidReceipt, err:
return self.invalid(str(err))
except RefundedReceipt:
return self.refund()
return self.ok_or_expired()
def check_without_purchase(self):
"""
This is what the developer and reviewer receipts do, we aren't
expecting a purchase, but require a specific type and install.
"""
try:
self.decoded = self.decode()
self.check_type('developer-receipt', 'reviewer-receipt')
self.check_url(settings.DOMAIN)
except InvalidReceipt, err:
return self.invalid(str(err))
return self.ok_or_expired()
def check_without_db(self, status):
"""
This is what test receipts do, no purchase or install check.
In this case the return is custom to the caller.
"""
assert status in ['ok', 'expired', 'invalid', 'refunded']
try:
self.decoded = self.decode()
self.check_type('test-receipt')
self.check_url(settings.DOMAIN)
except InvalidReceipt, err:
return self.invalid(str(err))
return getattr(self, status)()
def decode(self):
"""
Verifies that the receipt can be decoded and that the initial
contents of the receipt are correct.
If its invalid, then just return invalid rather than give out any
information.
"""
try:
receipt = decode_receipt(self.receipt)
except:
log_exception({'receipt': '%s...' % self.receipt[:10],
'app': self.get_app_id(raise_exception=False)})
log_info('Error decoding receipt')
raise InvalidReceipt('ERROR_DECODING')
try:
assert receipt['user']['type'] == 'directed-identifier'
except (AssertionError, KeyError):
log_info('No directed-identifier supplied')
raise InvalidReceipt('NO_DIRECTED_IDENTIFIER')
return receipt
def check_type(self, *types):
"""
Verifies that the type of receipt is what we expect.
"""
if self.decoded.get('typ', '') not in types:
log_info('Receipt type not in %s' % ','.join(types))
raise InvalidReceipt('WRONG_TYPE')
def check_url(self, domain):
"""
Verifies that the URL of the verification is what we expect.
:param domain: the domain you expect the receipt to be verified at,
note that "real" receipts are verified at a different domain
from the main marketplace domain.
"""
path = self.environ['PATH_INFO']
parsed = urlparse(self.decoded.get('verify', ''))
if parsed.netloc != domain:
log_info('Receipt had invalid domain')
raise InvalidReceipt('WRONG_DOMAIN')
if parsed.path != path:
log_info('Receipt had the wrong path')
raise InvalidReceipt('WRONG_PATH')
def get_user(self):
"""
Attempt to retrieve the user information from the receipt.
"""
try:
return self.decoded['user']['value']
except KeyError:
# If somehow we got a valid receipt without a uuid
# that's a problem. Log here.
log_info('No user in receipt')
raise InvalidReceipt('NO_USER')
def get_storedata(self):
"""
Attempt to retrieve the storedata information from the receipt.
"""
try:
storedata = self.decoded['product']['storedata']
return dict(parse_qsl(storedata))
except Exception, e:
log_info('Invalid store data: {err}'.format(err=e))
raise InvalidReceipt('WRONG_STOREDATA')
def get_app_id(self, raise_exception=True):
"""
Attempt to retrieve the app id from the storedata in the receipt.
"""
try:
return int(self.get_storedata()['id'])
except Exception, e:
if raise_exception:
# There was some value for storedata but it was invalid.
log_info('Invalid store data for app id: {err}'.format(
err=e))
raise InvalidReceipt('WRONG_STOREDATA')
def get_contribution_id(self):
"""
Attempt to retrieve the contribution id
from the storedata in the receipt.
"""
try:
return int(self.get_storedata()['contrib'])
except Exception, e:
# There was some value for storedata but it was invalid.
log_info('Invalid store data for contrib id: {err}'.format(
err=e))
raise InvalidReceipt('WRONG_STOREDATA')
def get_inapp_id(self):
"""
Attempt to retrieve the inapp id
from the storedata in the receipt.
"""
return self.get_storedata()['inapp_id']
def setup_db(self):
"""
Establish a connection to the database.
All database calls are done at a low level and avoid the
Django ORM.
"""
if not self.cursor:
self.conn = mypool.connect()
self.cursor = self.conn.cursor()
def check_purchase(self):
"""
Verifies that the app or inapp has been purchased.
"""
storedata = self.get_storedata()
if 'contrib' in storedata:
self.check_purchase_inapp()
else:
self.check_purchase_app()
def check_purchase_inapp(self):
"""
Verifies that the inapp has been purchased.
"""
self.setup_db()
sql = """SELECT i.guid, c.type FROM stats_contributions c
JOIN inapp_products i ON i.id=c.inapp_product_id
WHERE c.id = %(contribution_id)s LIMIT 1;"""
self.cursor.execute(
sql,
{'contribution_id': self.get_contribution_id()}
)
result = self.cursor.fetchone()
if not result:
log_info('Invalid in-app receipt, no purchase')
raise InvalidReceipt('NO_PURCHASE')
contribution_inapp_id, purchase_type = result
self.check_purchase_type(purchase_type)
self.check_inapp_product(contribution_inapp_id)
def check_inapp_product(self, contribution_inapp_id):
if contribution_inapp_id != self.get_inapp_id():
log_info('Invalid receipt, inapp_id does not match')
raise InvalidReceipt('NO_PURCHASE')
def check_purchase_app(self):
"""
Verifies that the app has been purchased by the user.
"""
self.setup_db()
sql = """SELECT type FROM addon_purchase
WHERE addon_id = %(app_id)s
AND uuid = %(uuid)s LIMIT 1;"""
self.cursor.execute(sql, {'app_id': self.get_app_id(),
'uuid': self.get_user()})
result = self.cursor.fetchone()
if not result:
log_info('Invalid app receipt, no purchase')
raise InvalidReceipt('NO_PURCHASE')
self.check_purchase_type(result[0])
def check_purchase_type(self, purchase_type):
"""
Verifies that the purchase type is of a valid type.
"""
if purchase_type in (CONTRIB_REFUND, CONTRIB_CHARGEBACK):
log_info('Valid receipt, but refunded')
raise RefundedReceipt
elif purchase_type in (CONTRIB_PURCHASE, CONTRIB_NO_CHARGE):
log_info('Valid receipt')
return
else:
log_info('Valid receipt, but invalid contribution')
raise InvalidReceipt('WRONG_PURCHASE')
def invalid(self, reason=''):
receipt_cef.log(
self.environ,
self.get_app_id(raise_exception=False),
'verify',
'Invalid receipt'<|fim▁hole|> )
return {'status': 'invalid', 'reason': reason}
def ok_or_expired(self):
# This receipt is ok now let's check it's expiry.
# If it's expired, we'll have to return a new receipt
try:
expire = int(self.decoded.get('exp', 0))
except ValueError:
log_info('Error with expiry in the receipt')
return self.expired()
now = calendar.timegm(gmtime()) + 10 # For any clock skew.
if now > expire:
log_info('This receipt has expired: %s UTC < %s UTC'
% (datetime.utcfromtimestamp(expire),
datetime.utcfromtimestamp(now)))
return self.expired()
return self.ok()
def ok(self):
return {'status': 'ok'}
def refund(self):
receipt_cef.log(
self.environ,
self.get_app_id(raise_exception=False),
'verify',
'Refunded receipt'
)
return {'status': 'refunded'}
def expired(self):
receipt_cef.log(
self.environ,
self.get_app_id(raise_exception=False),
'verify',
'Expired receipt'
)
if settings.WEBAPPS_RECEIPT_EXPIRED_SEND:
self.decoded['exp'] = (calendar.timegm(gmtime()) +
settings.WEBAPPS_RECEIPT_EXPIRY_SECONDS)
# Log that we are signing a new receipt as well.
receipt_cef.log(
self.environ,
self.get_app_id(raise_exception=False),
'sign',
'Expired signing request'
)
return {'status': 'expired',
'receipt': sign(self.decoded)}
return {'status': 'expired'}
def get_headers(length):
return [('Access-Control-Allow-Origin', '*'),
('Access-Control-Allow-Methods', 'POST'),
('Content-Type', 'application/json'),
('Content-Length', str(length)),
('Cache-Control', 'no-cache'),
('Last-Modified', format_date_time(time()))]
def decode_receipt(receipt):
"""
Cracks the receipt using the private key. This will probably change
to using the cert at some point, especially when we get the HSM.
"""
with statsd.timer('services.decode'):
if settings.SIGNING_SERVER_ACTIVE:
verifier = certs.ReceiptVerifier(valid_issuers=
settings.SIGNING_VALID_ISSUERS)
try:
result = verifier.verify(receipt)
except ExpiredSignatureError:
# Until we can do something meaningful with this, just ignore.
return jwt.decode(receipt.split('~')[1], verify=False)
if not result:
raise VerificationError()
return jwt.decode(receipt.split('~')[1], verify=False)
else:
key = jwt.rsa_load(settings.WEBAPPS_RECEIPT_KEY)
raw = jwt.decode(receipt, key)
return raw
def status_check(environ):
output = ''
# Check we can read from the users_install table, should be nice and
# fast. Anything that fails here, connecting to db, accessing table
# will be an error we need to know about.
if not settings.SIGNING_SERVER_ACTIVE:
return 500, 'SIGNING_SERVER_ACTIVE is not set'
try:
conn = mypool.connect()
cursor = conn.cursor()
cursor.execute('SELECT id FROM users_install ORDER BY id DESC LIMIT 1')
except Exception, err:
return 500, str(err)
return 200, output
def receipt_check(environ):
output = ''
with statsd.timer('services.verify'):
data = environ['wsgi.input'].read()
try:
verify = Verify(data, environ)
return 200, json.dumps(verify.check_full())
except:
log_exception('<none>')
return 500, ''
return output
def application(environ, start_response):
body = ''
path = environ.get('PATH_INFO', '')
if path == '/services/status/':
status, body = status_check(environ)
else:
# Only allow POST through as per spec.
if environ.get('REQUEST_METHOD') != 'POST':
status = 405
else:
status, body = receipt_check(environ)
start_response(status_codes[status], get_headers(len(body)))
return [body]<|fim▁end|> | |
<|file_name|>lookups.py<|end_file_name|><|fim▁begin|>bg_image_modes = ('stretch', 'tile', 'center', 'right', 'left')
transitions_jquery_ui = (
'blind', 'bounce', 'clip', 'drop', 'explode', 'fade', 'fold',
'highlight', 'puff', 'pulsate', 'scale', 'shake', 'size', 'slide'
)
transitions_animatecss = (
'bounceIn',
'bounceInDown',
'bounceInLeft',
'bounceInRight',
'bounceInUp',
'fadeIn',
'fadeInDown',
'fadeInDownBig',
'fadeInLeft',
'fadeInLeftBig',
'fadeInRight',
'fadeInRightBig',
'fadeInUp',
'fadeInUpBig',
'flipInX',
'flipInY',
'lightSpeedIn',
'rotateIn',
'rotateInDownLeft',<|fim▁hole|> 'rollIn',
'zoomIn',
'zoomInDown',
'zoomInLeft',
'zoomInRight',
'zoomInUp',
'slideInDown',
'slideInLeft',
'slideInRight',
'slideInUp',
)<|fim▁end|> | 'rotateInDownRight',
'rotateInUpLeft',
'rotateInUpRight', |
<|file_name|>config.py<|end_file_name|><|fim▁begin|>import collections
import difflib
import inspect
import logging
import os.path
import warnings
import os
import importlib
import cherrypy
import yaml
from yaml import load
try:
from yaml import CLoader as Loader
except ImportError:
from yaml import Loader
json = None
for pkg in ['ujson', 'yajl', 'simplejson', 'cjson', 'json']:
try:
json = importlib.import_module(pkg)
except:
pass
else:
break
from blueberrypy.email import Mailer
from blueberrypy.exc import (BlueberryPyNotConfiguredError,
BlueberryPyConfigurationError)
logger = logging.getLogger(__name__)
class BlueberryPyConfiguration(object):
class _YAMLLoader(Loader):
"""YAML loader supporting additional tags."""
def __init__(self, *args, **kwargs):
super(BlueberryPyConfiguration._YAMLLoader, self).__init__(*args, **kwargs)
self._setup_loader()
def register_tag(self, tag, callback):
yaml.add_constructor(tag, callback, Loader=self.__class__)
def _tag_env_var(self, loader, node):
env_var_name = loader.construct_scalar(node)
return os.getenv(env_var_name)
def _tag_first_of(self, loader, node):
seq = loader.construct_sequence(node)
for v in seq:
if v is not None:
return v
raise yaml.YAMLError('At least one of values passed to !FirstOf tag must be not None')
def _setup_loader(self):
self.register_tag('!EnvVar', self._tag_env_var)
self.register_tag('!FirstOf', self._tag_first_of)
def __init__(self, config_dir=None, app_config=None, logging_config=None,
webassets_env=None, environment=None,
env_var_name='BLUEBERRYPY_CONFIG'):
"""Loads BlueberryPy configuration from `config_dir` if supplied.
If `app_config` or `logging_config` or `webassets_env` are given, they
will be used instead of the configuration files found from `config_dir`.
If `environment` is given, it must be an existing CherryPy environment.
If `environment` is `production`, and `config_dir` is given, the `prod`
subdirectory will be searched for configuration files, otherwise the
`dev` subdirectory` will be searched.
If `env_var_name` is given, it must be an existing environment
variable, it will override values from YAML config.
Upon initialization of this configuration object, all the configuration
will be validated for sanity and either BlueberryPyConfigurationError or
BlueberryPyNotConfiguredError will be thrown if insane. For less severe
configuration insanity cases, a warning will be emitted instead.
:arg config_dir: a path, str
:arg app_config: a CherryPy config, dict
:arg logging_config: a logging config, dict
:arg webassets_env: a webassets environment, webassets.Environment
:arg environment: a CherryPy configuration environment, str
:arg env_var_name: an environment variable name for configuration, str
"""
ENV_CONFIG = self.__class__._load_env_var(env_var_name)
CWD = os.getcwdu() if getattr(os, "getcwdu", None) else os.getcwd()
if ENV_CONFIG.get('global', {}).get('CWD') and \
os.path.isdir(
os.path.join(ENV_CONFIG['global']['CWD'], 'src')):
CWD = ENV_CONFIG['global']['CWD']
if config_dir is None:
self.config_dir = config_dir = os.path.join(CWD, "config")
else:
self.config_dir = config_dir = os.path.abspath(config_dir)
if environment == "production":
self.config_dir = config_dir = os.path.join(config_dir, "prod")
elif environment == "test_suite" and os.path.exists(os.path.join(config_dir, "test")):
self.config_dir = config_dir = os.path.join(config_dir, "test")
else:
self.config_dir = config_dir = os.path.join(config_dir, "dev")
config_file_paths = {}
app_yml_path = os.path.join(config_dir, "app.yml")
logging_yml_path = os.path.join(config_dir, "logging.yml")
bundles_yml_path = os.path.join(config_dir, "bundles.yml")
# A local-only config, which overrides the app.yml values
app_override_yml_path = os.path.join(config_dir, "app.override.yml")
if os.path.exists(app_yml_path):
config_file_paths["app_yml"] = app_yml_path
if os.path.exists(logging_yml_path):
config_file_paths["logging_yml"] = logging_yml_path
if os.path.exists(bundles_yml_path):
config_file_paths["bundles_yml"] = bundles_yml_path
if os.path.exists(app_override_yml_path):
config_file_paths["app_override_yml"] = app_override_yml_path
self._config_file_paths = config_file_paths
if "app_yml" in config_file_paths and not app_config:
with open(config_file_paths["app_yml"]) as app_yml:
self._app_config = load(app_yml, self._YAMLLoader)
# If the overrides file exists, override the app config values
# with ones from app.override.yml
if "app_override_yml" in config_file_paths:
app_override_config = {}
with open(config_file_paths["app_override_yml"]) as app_override_yml:
app_override_config = load(app_override_yml, self._YAMLLoader)
self._app_config = self.__class__.merge_dicts(
self._app_config,
app_override_config
)
if "logging_yml" in config_file_paths and not logging_config:
with open(config_file_paths["logging_yml"]) as logging_yml:<|fim▁hole|> from webassets.loaders import YAMLLoader
self._webassets_env = YAMLLoader(config_file_paths["bundles_yml"]).load_environment()
if app_config:
self._app_config = dict(app_config)
try:
# Merge JSON from environment variable
self._app_config = self.__class__.merge_dicts(self._app_config, ENV_CONFIG)
except AttributeError:
if ENV_CONFIG: # not an empty dict
self._app_config = ENV_CONFIG
# Don't re-raise exception, self.validate() will do this later
if logging_config:
self._logging_config = dict(logging_config)
if webassets_env is not None:
self._webassets_env = webassets_env
self.validate() # Checks that all attributes are pre-populated
# Convert relative paths to absolute where needed
# self.validate() will fail if there's no app_config['controllers']
for _ in self._app_config['controllers']:
section = self._app_config['controllers'][_]
for r in section:
if isinstance(section[r], dict):
for __ in ['tools.staticdir.root',
'tools.staticfile.root']:
pth = section[r].get(__)
if pth is not None and not pth.startswith('/'):
self._app_config['controllers'][_][r][__] = \
os.path.join(CWD, pth)
# Convert relative paths of logs in handlers
# self.validate() will fail if there's no self._logging_config
for handler_name, handler_config in (getattr(self, '_logging_config', {}) or {}).get('handlers', {}).viewitems():
pth = handler_config.get('filename')
if pth is not None and not pth.startswith('/'):
self._logging_config['handlers'][handler_name]['filename'] = \
os.path.join(CWD, pth)
if environment == "backlash":
self.setup_backlash_environment()
@property
def config_file_paths(self):
if self._config_file_paths:
sorted_kv_pairs = tuple(((k, self._config_file_paths[k])
for k in sorted(self._config_file_paths.viewkeys())))
paths = collections.namedtuple("config_file_paths", [e[0] for e in sorted_kv_pairs])
return paths(*[e[1] for e in sorted_kv_pairs])
@property
def project_metadata(self):
return self.app_config["project_metadata"]
@property
def use_logging(self):
return self.app_config.get("global", {}).get("engine.logging.on", False)
@property
def use_redis(self):
if self.controllers_config:
for _, controller_config in self.controllers_config.viewitems():
controller_config = controller_config.copy()
controller_config.pop("controller")
for path_config in controller_config.viewvalues():
if path_config.get("tools.sessions.storage_type") == "redis":
return True
return False
@property
def use_sqlalchemy(self):
return self.app_config.get("global", {}).get("engine.sqlalchemy.on", False)
@property
def use_jinja2(self):
return "jinja2" in self.app_config
@property
def use_webassets(self):
return self.use_jinja2 and self.app_config["jinja2"].get("use_webassets", False)
@property
def use_email(self):
return "email" in self.app_config
@property
def controllers_config(self):
return self.app_config.get("controllers")
@property
def app_config(self):
return self._app_config
@property
def logging_config(self):
return getattr(self, "_logging_config", None)
@property
def webassets_env(self):
return getattr(self, "_webassets_env", None)
@property
def jinja2_config(self):
if self.use_jinja2:
conf = self.app_config["jinja2"].copy()
conf.pop("use_webassets", None)
return conf
@property
def sqlalchemy_config(self):
if self.use_sqlalchemy:
if "sqlalchemy_engine" in self.app_config:
saconf = self.app_config["sqlalchemy_engine"].copy()
return {"sqlalchemy_engine": saconf}
else:
return dict([(k, v) for k, v in self.app_config.viewitems()
if k.startswith("sqlalchemy_engine")])
@property
def email_config(self):
return self.app_config.get("email")
def setup_backlash_environment(self):
"""
Returns a new copy of this configuration object configured to run under
the backlash defbugger environment and ensure it is created for
cherrypy's config object.
"""
try:
from backlash import DebuggedApplication
except ImportError:
warnings.warn("backlash not installed")
return
cherrypy._cpconfig.environments["backlash"] = {
"log.wsgi": True,
"request.throw_errors": True,
"log.screen": False,
"engine.autoreload_on": False
}
def remove_error_options(section):
section.pop("request.handler_error", None)
section.pop("request.error_response", None)
section.pop("tools.err_redirect.on", None)
section.pop("tools.log_headers.on", None)
section.pop("tools.log_tracebacks.on", None)
for k in section.copy().viewkeys():
if k.startswith("error_page.") or \
k.startswith("request.error_page."):
section.pop(k)
for section_name, section in self.app_config.viewitems():
if section_name.startswith("/") or section_name == "global":
remove_error_options(section)
wsgi_pipeline = []
if "/" in self.app_config:
wsgi_pipeline = self.app_config["/"].get("wsgi.pipeline", [])
else:
self.app_config["/"] = {}
wsgi_pipeline.insert(0, ("backlash", DebuggedApplication))
self.app_config["/"]["wsgi.pipeline"] = wsgi_pipeline
def validate(self):
# no need to check for cp config, which will be checked on startup
if not hasattr(self, "_app_config") or not self.app_config:
raise BlueberryPyNotConfiguredError("BlueberryPy application configuration not found.")
if self.use_sqlalchemy and not self.sqlalchemy_config:
raise BlueberryPyNotConfiguredError("SQLAlchemy configuration not found.")
if self.use_webassets:
if self.webassets_env is None:
raise BlueberryPyNotConfiguredError("Webassets configuration not found.")
elif len(self.webassets_env) == 0:
raise BlueberryPyNotConfiguredError("No bundles found in webassets env.")
if self.use_jinja2 and not self.jinja2_config:
raise BlueberryPyNotConfiguredError("Jinja2 configuration not found.")
if self.use_logging and not self.logging_config:
warnings.warn("BlueberryPy application-specific logging "
"configuration not found. Continuing without "
"BlueberryPy's logging plugin.")
if self.use_email:
if not self.email_config:
warnings.warn("BlueberryPy email configuration is empty.")
else:
try:
signature = inspect.signature(Mailer.__init__)
argnames = frozenset(signature.parameters.keys()[1:])
except AttributeError:
mailer_ctor_argspec = inspect.getargspec(Mailer.__init__)
argnames = frozenset(mailer_ctor_argspec.args[1:])
for key in self.email_config.viewkeys():
if key not in argnames:
closest_match = difflib.get_close_matches(key, argnames, 1)
closest_match = ((closest_match and " Did you mean %r?" % closest_match[0])
or "")
warnings.warn(("Unknown key %r found for [email]." % key) + closest_match)
if not self.controllers_config:
raise BlueberryPyConfigurationError("You must declare at least one controller.")
else:
for script_name, section in self.controllers_config.viewitems():
controller = section.get("controller")
if controller is None:
raise BlueberryPyConfigurationError("You must define a controller in the "
"[controllers][%s] section." % script_name)
elif isinstance(controller, cherrypy.dispatch.RoutesDispatcher):
if not controller.controllers:
warnings.warn("Controller %r has no connected routes." % script_name)
else:
for member_name, member_obj in inspect.getmembers(controller):
if member_name == "exposed" and member_obj:
break
elif (hasattr(member_obj, "exposed") and
member_obj.exposed is True):
break
else:
warnings.warn("Controller %r has no exposed method." % script_name)
@classmethod
def _load_env_var(cls, env_var_name):
env_conf = {}
try:
env_conf = json.loads(os.getenv(env_var_name),
object_hook=cls._callable_json_loader)
except ValueError:
# Don't use simplejson.JSONDecodeError, since it only exists in
# simplejson implementation and is a subclass of ValueError
# See: https://github.com/Yelp/mrjob/issues/544
logger.error('${} is not a valid JSON string!'
.format(env_var_name))
except TypeError:
logger.warning('${} environment variable is not set!'
.format(env_var_name))
except:
logger.exception('Could not parse ${} environment variable for an '
'unknown reason!'.format(env_var_name))
return env_conf
@staticmethod
def get_callable_from_str(s):
python_module, python_name = s.rsplit('.', 1)
return getattr(importlib.import_module(python_module), python_name)
@classmethod
def _callable_json_loader(cls, obj):
if isinstance(obj, str):
if obj.startswith('!!python/name:'):
cllbl = cls.get_callable_from_str(obj.split(':', 1)[-1])
return cllbl if callable(cllbl) else obj
if isinstance(obj, dict):
keys = tuple(filter(lambda _: _.startswith('!!python/object:'),
obj.keys()))
for k in keys:
cllbl = cls.get_callable_from_str(k.split(':', 1)[-1])
return cllbl(**obj[k]) if callable(cllbl) else obj
return obj
@classmethod
def merge_dicts(cls, base, overrides):
'''Recursive helper for merging of two dicts'''
for k in overrides.keys():
if k in base:
if isinstance(base[k], dict) and isinstance(overrides[k], dict):
base[k] = cls.merge_dicts(base[k], overrides[k])
elif isinstance(overrides[k], list) and \
not isinstance(base[k], list):
base[k] = [base[k]] + overrides[k]
elif isinstance(base[k], list) and \
not isinstance(overrides[k], list):
base[k] = base[k] + [overrides[k]]
elif not isinstance(base[k], dict):
base[k] = overrides[k]
else:
base[k].update(overrides[k])
else:
base[k] = overrides[k]
return base<|fim▁end|> | self._logging_config = load(logging_yml, self._YAMLLoader)
if "bundles_yml" in config_file_paths and not webassets_env: |
<|file_name|>ProbabilitisticModels.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# A search engine based on probabilitistic models of the information retrival.
# Author - Janu Verma
# email - [email protected]
# http://januverma.wordpress.com/
# @januverma
import sys
from pydoc import help
import os
from collections import defaultdict
from math import log, sqrt
import operator
class ProbModel:
"""
Implements probabilitistic models for information retrieval.
"""
def __init__(self, directory):
"""
Arguments:
directory - Directory of documents to be searched.
"""
self.corpus = os.listdir(directory)
self.text = {}
for f in self.corpus:
f = os.path.join(directory,f)
with open(f) as doc:
info = doc.read()
self.text[f] = info
def words(self, document):
"""
All the words in a document.
Arguments:
document : A textual document.
Returns:
A list containing all the words in the document.
"""
words = document.split()
words = [x.lower() for x in words]
words = [x for x in words if len(x) >= 2and not x.isdigit()]
return words
def word_freq(self, wordlist):
"""
Build a dictionary of words with the frequencies of their occurance in the document.
Arguments:
document : A list of all the words in a document.
Returns:
A dictionary containing all the words in the document with their frequencies.
"""
wordFreq = defaultdict(int)
for w in wordlist:
wordFreq[w] += 1
return wordFreq
def vocabalury(self):
"""
All the words in the corpus.
Returns:
A list of all the words in the corpus.
"""
allWords = []
allDocs = self.text
for d in allDocs.keys():
d = allDocs[d]
docWords = self.words(d)
allWords.extend(docWords)
return allWords
def doc_freq(self):
"""
Compute the document frequency of all the terms in the corpus.
Returns:
A dictionary of all the terms in the corpus with their document frequency.
"""
allWords = self.vocabalury()
allWords = set(allWords)
allDocs = self.text
docFreq = defaultdict(int)
for x in allWords:
for d in allDocs.keys():
d = allDocs[d]
docTerms = self.words(d)
if (x in docTerms):
docFreq[x] += 1
return docFreq
def docScore(self, document, query, k, b):
"""
Compute the log odds ratio of the document being relevant to the query.
Arguments:
document : A textual document.
query : The search query.
k : tuning parameter for term frequency.
b : tuning parameter for for document length.
Returns:
A floating variable score
"""
# total number of docs
n = len(self.corpus)
# words in the document
docText = self.words(document)
# length of the document
l = len(docText)
# average length of a document
l_av = float(len(self.vocabalury()))/n
# document frequency dict
df = self.doc_freq()
# words in the document
tokens = self.words(document)
#term frequency dict
tf = self.word_freq(tokens)
<|fim▁hole|> score = 0
# query
queryWords = self.words(query)
for x in queryWords:
try:
tf_x = tf[x]
except:
continue
try:
df_x = df[x]
except:
continue
# inverse document frequency of the term.
idf = log(n/df_x)
# correction factor
correction = float((k + 1)*(tf_x))/(k*(1-b) + b*(l/(l_av)) + (tf_x))
# total contribution
contribution = idf * correction
score += contribution
return score
def ranking(self, query, k, b):
"""
Ranking of the documents based on their relevance to the query.
Arguments:
query: The search query
Returns:
A dictionary of all the documents in the corpus with their corresponding relevance odds ratio.
"""
if (k != None):
k = k
else:
k = 0
if (b != None):
b = b
else:
b = 0
documents = self.text
rankingDict = defaultdict(float)
for d in documents.keys():
docText = documents[d]
score = self.docScore(docText, query, k, b)
rankingDict[d] = score
return rankingDict
def search(self, query, n_docs, k=None, b=None):
"""
Returns documents which are most relavant to the query.
Ranking is done by decreasing odds ratio for the document to be relevant for the query.
Arguments:
String query : Search query
Integer n_docs : Number of matching documents retrived.
Float k : tuning parameter for term frequency, (0<=k<=1).
A value of 0 corresponds to a binary model (no term frequency),
and a large value corresponds to using raw term frequency
Float b: tuning parameter for for document length, (0<=b<=1).
b = 1 corresponds to fully scaling the term weight by the document length,
while b = 0 corresponds to no length normalization.
Returns:
A list of length n_docs containing documents most relevant to the search query.
The list if sorted in the descending order.
"""
if (n_docs > len(self.corpus)):
n_docs = len(self.corpus)
relevantDocs = []
if (k != None):
k = k
if (b != None):
b = b
rankings = self.ranking(query, k, b)
rankings = sorted(rankings.iteritems(), key=operator.itemgetter(1), reverse=True)
for i in range(n_docs):
u,v = rankings[i]
relevantDocs.append(u)
return relevantDocs<|fim▁end|> | # inittalize the score for the document |
<|file_name|>utils.go<|end_file_name|><|fim▁begin|>package provision
import (
"bytes"
"fmt"
"io/ioutil"
"net/url"
"path"
"path/filepath"
"strconv"
"strings"
log "github.com/Sirupsen/logrus"
"github.com/docker/machine/libmachine/auth"
"github.com/docker/machine/libmachine/provision/pkgaction"
"github.com/docker/machine/libmachine/swarm"
"github.com/docker/machine/utils"
)
type DockerOptions struct {
EngineOptions string
EngineOptionsPath string
}
func installDockerGeneric(p Provisioner) error {
// install docker - until cloudinit we use ubuntu everywhere so we
// just install it using the docker repos
if output, err := p.SSHCommand("if ! type docker; then curl -sSL https://get.docker.com | sh -; fi"); err != nil {
var buf bytes.Buffer
if _, err := buf.ReadFrom(output.Stderr); err != nil {
return err
}
return fmt.Errorf("error installing docker: %s\n", buf.String())
}
return nil
}
func ConfigureAuth(p Provisioner, authOptions auth.AuthOptions) error {
var (
err error
)
machineName := p.GetDriver().GetMachineName()
org := machineName
bits := 2048
ip, err := p.GetDriver().GetIP()
if err != nil {
return err
}
// copy certs to client dir for docker client
machineDir := filepath.Join(utils.GetMachineDir(), machineName)
if err := utils.CopyFile(authOptions.CaCertPath, filepath.Join(machineDir, "ca.pem")); err != nil {
log.Fatalf("Error copying ca.pem to machine dir: %s", err)
}
if err := utils.CopyFile(authOptions.ClientCertPath, filepath.Join(machineDir, "cert.pem")); err != nil {
log.Fatalf("Error copying cert.pem to machine dir: %s", err)
}
if err := utils.CopyFile(authOptions.ClientKeyPath, filepath.Join(machineDir, "key.pem")); err != nil {
log.Fatalf("Error copying key.pem to machine dir: %s", err)
}
log.Debugf("generating server cert: %s ca-key=%s private-key=%s org=%s",
authOptions.ServerCertPath,
authOptions.CaCertPath,
authOptions.PrivateKeyPath,
org,
)
// TODO: Switch to passing just authOptions to this func
// instead of all these individual fields
err = utils.GenerateCert(
[]string{ip},
authOptions.ServerCertPath,
authOptions.ServerKeyPath,
authOptions.CaCertPath,
authOptions.PrivateKeyPath,
org,
bits,
)
if err != nil {
return fmt.Errorf("error generating server cert: %s", err)
}
if err := p.Service("docker", pkgaction.Stop); err != nil {
return err
}
dockerDir := p.GetDockerOptionsDir()
if _, err := p.SSHCommand(fmt.Sprintf("sudo mkdir -p %s", dockerDir)); err != nil {
return err
}
// upload certs and configure TLS auth
caCert, err := ioutil.ReadFile(authOptions.CaCertPath)
if err != nil {
return err
}
// due to windows clients, we cannot use filepath.Join as the paths
// will be mucked on the linux hosts
machineCaCertPath := path.Join(dockerDir, "ca.pem")
authOptions.CaCertRemotePath = machineCaCertPath
serverCert, err := ioutil.ReadFile(authOptions.ServerCertPath)
if err != nil {
return err
}
machineServerCertPath := path.Join(dockerDir, "server.pem")
authOptions.ServerCertRemotePath = machineServerCertPath
serverKey, err := ioutil.ReadFile(authOptions.ServerKeyPath)
if err != nil {
return err<|fim▁hole|> if _, err = p.SSHCommand(fmt.Sprintf("echo \"%s\" | sudo tee %s", string(caCert), machineCaCertPath)); err != nil {
return err
}
if _, err = p.SSHCommand(fmt.Sprintf("echo \"%s\" | sudo tee %s", string(serverKey), machineServerKeyPath)); err != nil {
return err
}
if _, err = p.SSHCommand(fmt.Sprintf("echo \"%s\" | sudo tee %s", string(serverCert), machineServerCertPath)); err != nil {
return err
}
dockerUrl, err := p.GetDriver().GetURL()
if err != nil {
return err
}
u, err := url.Parse(dockerUrl)
if err != nil {
return err
}
dockerPort := 2376
parts := strings.Split(u.Host, ":")
if len(parts) == 2 {
dPort, err := strconv.Atoi(parts[1])
if err != nil {
return err
}
dockerPort = dPort
}
dkrcfg, err := p.GenerateDockerOptions(dockerPort, authOptions)
if err != nil {
return err
}
if _, err = p.SSHCommand(fmt.Sprintf("echo \"%s\" | sudo tee -a %s", dkrcfg.EngineOptions, dkrcfg.EngineOptionsPath)); err != nil {
return err
}
if err := p.Service("docker", pkgaction.Start); err != nil {
return err
}
return nil
}
func getDefaultDaemonOpts(driverName string, authOptions auth.AuthOptions) string {
return fmt.Sprintf(`--tlsverify --tlscacert=%s --tlskey=%s --tlscert=%s %s`,
authOptions.CaCertRemotePath,
authOptions.ServerKeyRemotePath,
authOptions.ServerCertRemotePath,
fmt.Sprintf("--label=provider=%s", driverName),
)
}
func configureSwarm(p Provisioner, swarmOptions swarm.SwarmOptions) error {
if !swarmOptions.IsSwarm {
return nil
}
basePath := p.GetDockerOptionsDir()
ip, err := p.GetDriver().GetIP()
if err != nil {
return err
}
tlsCaCert := path.Join(basePath, "ca.pem")
tlsCert := path.Join(basePath, "server.pem")
tlsKey := path.Join(basePath, "server-key.pem")
masterArgs := fmt.Sprintf("--tlsverify --tlscacert=%s --tlscert=%s --tlskey=%s -H %s %s",
tlsCaCert, tlsCert, tlsKey, swarmOptions.Host, swarmOptions.Discovery)
nodeArgs := fmt.Sprintf("--addr %s:2376 %s", ip, swarmOptions.Discovery)
u, err := url.Parse(swarmOptions.Host)
if err != nil {
return err
}
parts := strings.Split(u.Host, ":")
port := parts[1]
// TODO: Do not hardcode daemon port, ask the driver
if err := utils.WaitForDocker(ip, 2376); err != nil {
return err
}
if _, err := p.SSHCommand(fmt.Sprintf("sudo docker pull %s", swarm.DockerImage)); err != nil {
return err
}
dockerDir := p.GetDockerOptionsDir()
// if master start master agent
if swarmOptions.Master {
log.Debug("launching swarm master")
log.Debugf("master args: %s", masterArgs)
if _, err = p.SSHCommand(fmt.Sprintf("sudo docker run -d -p %s:%s --restart=always --name swarm-agent-master -v %s:%s %s manage %s",
port, port, dockerDir, dockerDir, swarm.DockerImage, masterArgs)); err != nil {
return err
}
}
// start node agent
log.Debug("launching swarm node")
log.Debugf("node args: %s", nodeArgs)
if _, err = p.SSHCommand(fmt.Sprintf("sudo docker run -d --restart=always --name swarm-agent -v %s:%s %s join %s",
dockerDir, dockerDir, swarm.DockerImage, nodeArgs)); err != nil {
return err
}
return nil
}<|fim▁end|> | }
machineServerKeyPath := path.Join(dockerDir, "server-key.pem")
authOptions.ServerKeyRemotePath = machineServerKeyPath
|
<|file_name|>PreCastEvent.java<|end_file_name|><|fim▁begin|>package com.elmakers.mine.bukkit.api.event;
import org.bukkit.event.Cancellable;
import org.bukkit.event.Event;
import org.bukkit.event.HandlerList;
import com.elmakers.mine.bukkit.api.magic.Mage;
import com.elmakers.mine.bukkit.api.spell.Spell;
/**
* A custom event that the Magic plugin will fire any time a
* Mage casts a Spell.
*/
public class PreCastEvent extends Event implements Cancellable {
private boolean cancelled;
<|fim▁hole|>
private static final HandlerList handlers = new HandlerList();
public PreCastEvent(Mage mage, Spell spell) {
this.mage = mage;
this.spell = spell;
}
@Override
public HandlerList getHandlers() {
return handlers;
}
public static HandlerList getHandlerList() {
return handlers;
}
@Override
public boolean isCancelled() {
return cancelled;
}
@Override
public void setCancelled(boolean cancelled) {
this.cancelled = cancelled;
}
public Mage getMage() {
return mage;
}
public Spell getSpell() {
return spell;
}
}<|fim▁end|> | private final Mage mage;
private final Spell spell; |
<|file_name|>file.py<|end_file_name|><|fim▁begin|>import os
import re
import mimetypes
from collections import namedtuple<|fim▁hole|>
from app.lib.helpers import (
http400,
http404,
)
DEFAULT_CONTENT_TYPE = 'application/octet-stream'
FILENAME_PARSER = re.compile(r"(?P<id>\d+)_(?P<name>.+)\.(?P<format>.+)")
ParsedFile = namedtuple('ParsedFile', [
'original_name', 'id', 'name'
])
def parse_filename(filename: str):
if not filename:
return
match = FILENAME_PARSER.match(filename)
if match is not None:
id, name, frmt = match.groups()
return ParsedFile(
"{}_{}.{}".format(id, name, frmt),
id,
"{}.{}".format(name, frmt),
)
def get_cache_headers():
return {
"Cache-Control": "public, max-age=31536000",
"Etag": "'CacheForever'",
"Last-Modified": "Wed, 21 Oct 2015 07:28:00 GMT"
}
def get_no_cache_headers():
return {
"Cache-Control": "no-store, no-cache, max-age=0",
"Pragma": "no-cache",
}
def get_file_headers(file_name):
type, _ = mimetypes.guess_type(file_name)
headers = {
"Content-Type": "{}"
.format(type),
"Content-Disposition": 'attachment;filename="{}"'
.format(file_name),
}
return headers
def get_non_cached_file_headers(file_name):
headers = get_file_headers(file_name)
headers.update(**get_no_cache_headers())
return headers
def get_cached_file_headers(file_name):
headers = get_file_headers(file_name)
headers.update(**get_cache_headers())
return headers
def allowed_file(filename):
return (
'.' in filename
and filename.rsplit('.', 1)[1] in {
[
'bmp', 'eps', 'icns', 'im', 'msp', 'pcx', 'ppm',
'png', 'tiff', 'ico', 'jpg', 'jpeg', 'gif',
]
}
)
def file_response(name, body, source):
if body is None:
return http404()
return web.Response(
body=body,
headers={
'X-File-Source': source,
**get_file_headers(name)
}
)
async def store_mp3_handler(request):
reader = await request.multipart()
# /!\ Don't forget to validate your inputs /!\
file = await reader.next()
filename = file.filename
if not allowed_file(filename):
return http400()
# You cannot rely on Content-Length if transfer is chunked.
size = 0
with open(os.path.join('/media/', filename), 'wb') as f:
while True:
chunk = await file.read_chunk() # 8192 bytes by default.
if not chunk:
break
size += len(chunk)
f.write(chunk)
return web.Response(
text='{} sized of {} successfully stored'.format(filename, size)
)<|fim▁end|> |
from aiohttp import web |
<|file_name|>text.rs<|end_file_name|><|fim▁begin|>use std::borrow::ToOwned;
use std::any::Any;
use { GenericEvent, TEXT };
/// When receiving text from user, such as typing a character
pub trait TextEvent {
/// Creates a text event.
fn from_text(text: &str, old_event: &Self) -> Option<Self>;
/// Calls closure if this is a text event.
fn text<U, F>(&self, f: F) -> Option<U>
where F: FnMut(&str) -> U;
/// Returns text arguments.
fn text_args(&self) -> Option<String> {
self.text(|text| text.to_owned())
}
}
impl<T: GenericEvent> TextEvent for T {
fn from_text(text: &str, old_event: &Self) -> Option<Self> {
GenericEvent::from_args(TEXT, &text.to_owned() as &Any, old_event)
}
fn text<U, F>(&self, mut f: F) -> Option<U>
where F: FnMut(&str) -> U<|fim▁hole|> return None;
}
self.with_args(|any| {
if let Some(text) = any.downcast_ref::<String>() {
Some(f(&text))
} else {
panic!("Expected &str")
}
})
}
}
#[cfg(test)]
mod tests {
use super::*;
use test::Bencher;
#[test]
fn test_input_text() {
use input::Input;
let e = Input::Text("".to_string());
let x: Option<Input> = TextEvent::from_text("hello", &e);
let y: Option<Input> = x.clone().unwrap().text(|text|
TextEvent::from_text(text, x.as_ref().unwrap())).unwrap();
assert_eq!(x, y);
}
#[bench]
fn bench_input_text(bencher: &mut Bencher) {
use input::Input;
let e = Input::Text("".to_string());
bencher.iter(|| {
let _: Option<Input> = TextEvent::from_text("hello", &e);
});
}
#[test]
fn test_event_text() {
use Event;
use input::Input;
let e = Event::Input(Input::Text("".to_string()));
let x: Option<Event> = TextEvent::from_text("hello", &e);
let y: Option<Event> = x.clone().unwrap().text(|text|
TextEvent::from_text(text, x.as_ref().unwrap())).unwrap();
assert_eq!(x, y);
}
#[bench]
fn bench_event_text(bencher: &mut Bencher) {
use Event;
use input::Input;
let e = Event::Input(Input::Text("".to_string()));
bencher.iter(|| {
let _: Option<Event> = TextEvent::from_text("hello", &e);
});
}
}<|fim▁end|> | {
if self.event_id() != TEXT { |
<|file_name|>signals.js<|end_file_name|><|fim▁begin|>// This is not the set of all possible signals.
//
// It IS, however, the set of all signals that trigger
// an exit on either Linux or BSD systems. Linux is a
// superset of the signal names supported on BSD, and
// the unknown signals just fail to register, so we can
// catch that easily enough.
//
// Don't bother with SIGKILL. It's uncatchable, which
// means that we can't fire any callbacks anyway.
//
// If a user does happen to register a handler on a non-
// fatal signal like SIGWINCH or something, and then
// exit, it'll end up firing `process.emit('exit')`, so
// the handler will be fired anyway.
//
// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised
// artificially, inherently leave the process in a
// state from which it is not safe to try and enter JS
// listeners.
module.exports = [
'SIGABRT',
'SIGALRM',
'SIGHUP',
'SIGINT',
'SIGTERM'
]
if (process.platform !== 'win32') {
module.exports.push(
'SIGVTALRM',
'SIGXCPU',
'SIGXFSZ',
'SIGUSR2',
'SIGTRAP',
'SIGSYS',
'SIGQUIT',
'SIGIOT'
// should detect profiler and enable/disable accordingly.
// see #21
// 'SIGPROF'
)
}
if (process.platform === 'linux') {
module.exports.push(
'SIGIO',
'SIGPOLL',
'SIGPWR',
'SIGSTKFLT',
'SIGUNUSED'
<|fim▁hole|><|fim▁end|> | )
} |
<|file_name|>interface.go<|end_file_name|><|fim▁begin|>package autonat
import (
"context"
"github.com/libp2p/go-libp2p-core/network"
"github.com/libp2p/go-libp2p-core/peer"
ma "github.com/multiformats/go-multiaddr"
)
// AutoNAT is the interface for NAT autodiscovery
type AutoNAT interface {
// Status returns the current NAT status
Status() network.Reachability
// PublicAddr returns the public dial address when NAT status is public and an
// error otherwise
PublicAddr() (ma.Multiaddr, error)
}
// Client is a stateless client interface to AutoNAT peers
type Client interface {
// DialBack requests from a peer providing AutoNAT services to test dial back
// and report the address on a successful connection.
DialBack(ctx context.Context, p peer.ID) (ma.Multiaddr, error)
}
// AddrFunc is a function returning the candidate addresses for the local host.
type AddrFunc func() []ma.Multiaddr<|fim▁hole|>// Option is an Autonat option for configuration
type Option func(*config) error<|fim▁end|> | |
<|file_name|>gone-exception.js<|end_file_name|><|fim▁begin|>/**
* MIT License
*
* Copyright (c) 2017 ITGorillaz
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/<|fim▁hole|>
'use strict';
const HttpException = require('./http-exception');
const HttpStatus = require('http-status-codes');
/**
* The GoneException class.
*
* This is a concrete class of the the HttpException class.
* It represents a gone http response.
*
* @author tommelo
*/
class GoneException extends HttpException {
/**
* The class constructor
*
* @param {Object} entity The object to be serialized as response body
* @param {Error} cause The error cause
*/
constructor(entity, cause) {
cause = cause || new Error('GoneException');
super(entity, HttpStatus.GONE, cause);
}
}
module.exports = GoneException;<|fim▁end|> | |
<|file_name|>test_user_has_group.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.tests.common import TransactionCase
from odoo.exceptions import ValidationError
class TestHasGroup(TransactionCase):
def setUp(self):
super(TestHasGroup, self).setUp()
self.group0 = 'test_user_has_group.group0'
self.group1 = 'test_user_has_group.group1'
group0, group1 = self.env['res.groups']._load_records([
dict(xml_id=self.group0, values={'name': 'group0'}),
dict(xml_id=self.group1, values={'name': 'group1'}),
])
self.test_user = self.env['res.users'].create({
'login': 'testuser',
'partner_id': self.env['res.partner'].create({
'name': "Strawman Test User"
}).id,
'groups_id': [(6, 0, [group0.id])]
})
self.grp_internal_xml_id = 'base.group_user'
self.grp_internal = self.env.ref(self.grp_internal_xml_id)
self.grp_portal_xml_id = 'base.group_portal'
self.grp_portal = self.env.ref(self.grp_portal_xml_id)
self.grp_public_xml_id = 'base.group_public'
self.grp_public = self.env.ref(self.grp_public_xml_id)
def test_env_uid(self):
Users = self.env['res.users'].with_user(self.test_user)
self.assertTrue(
Users.has_group(self.group0),
"the test user should belong to group0"
)
self.assertFalse(
Users.has_group(self.group1),
"the test user should *not* belong to group1"
)
def test_record(self):
self.assertTrue(
self.test_user.has_group(self.group0),
"the test user should belong to group0",
)
self.assertFalse(
self.test_user.has_group(self.group1),
"the test user shoudl not belong to group1"
)
def test_portal_creation(self):
"""Here we check that portal user creation fails if it tries to create a user
who would also have group_user by implied_group.
Otherwise, it succeeds with the groups we asked for.
"""
grp_public = self.env.ref('base.group_public')
grp_test_portal_xml_id = 'test_user_has_group.portal_implied_group'
grp_test_portal = self.env['res.groups']._load_records([
dict(xml_id=grp_test_portal_xml_id, values={'name': 'Test Group Portal'})
])
grp_test_internal1 = self.env['res.groups']._load_records([
dict(xml_id='test_user_has_group.internal_implied_group1', values={'name': 'Test Group Itnernal 1'})
])
grp_test_internal2_xml_id = 'test_user_has_group.internal_implied_group2'
grp_test_internal2 = self.env['res.groups']._load_records([
dict(xml_id=grp_test_internal2_xml_id, values={'name': 'Test Group Internal 2'})
])
self.grp_portal.implied_ids = grp_test_portal
grp_test_internal1.implied_ids = False
grp_test_internal2.implied_ids = False
portal_user = self.env['res.users'].create({
'login': 'portalTest',
'name': 'Portal test',
'sel_groups_%s_%s_%s' % (self.grp_internal.id, self.grp_portal.id, grp_public.id): self.grp_portal.id,
'sel_groups_%s_%s' % (grp_test_internal1.id, grp_test_internal2.id): grp_test_internal2.id,
})
self.assertTrue(
portal_user.has_group(self.grp_portal_xml_id),
"The portal user should belong to '%s'" % self.grp_portal_xml_id,
)
self.assertTrue(
portal_user.has_group(grp_test_portal_xml_id),
"The portal user should belong to '%s'" % grp_test_portal_xml_id,
)
self.assertTrue(
portal_user.has_group(grp_test_internal2_xml_id),
"The portal user should belong to '%s'" % grp_test_internal2_xml_id
)
self.assertFalse(
portal_user.has_group(self.grp_internal_xml_id),
"The portal user should not belong to '%s'" % self.grp_internal_xml_id
)
portal_user.unlink() # otherwise, badly modifying the implication would raise
grp_test_internal1.implied_ids = self.grp_internal
grp_test_internal2.implied_ids = self.grp_internal
with self.assertRaises(ValidationError): # current group implications forbid to create a portal user
portal_user = self.env['res.users'].create({
'login': 'portalFail',
'name': 'Portal fail',
'sel_groups_%s_%s_%s' % (self.grp_internal.id, self.grp_portal.id, grp_public.id): self.grp_portal.id,
'sel_groups_%s_%s' % (grp_test_internal1.id, grp_test_internal2.id): grp_test_internal2.id,
})
def test_portal_write(self):
"""Check that adding a new group to a portal user works as expected,
except if it implies group_user/public, in chich case it should raise.
"""
grp_test_portal = self.env["res.groups"].create({"name": "implied by portal"})
self.grp_portal.implied_ids = grp_test_portal
portal_user = self.env['res.users'].create({
'login': 'portalTest2',
'name': 'Portal test 2',
'groups_id': [(6, 0, [self.grp_portal.id])],
})
self.assertEqual(
portal_user.groups_id, (self.grp_portal + grp_test_portal),
"The portal user should have the implied group.",
)
grp_fail = self.env["res.groups"].create(
{"name": "fail", "implied_ids": [(6, 0, [self.grp_internal.id])]})
with self.assertRaises(ValidationError):
portal_user.write({'groups_id': [(4, grp_fail.id)]})
def test_two_user_types(self):
#Create a user with two groups of user types kind (Internal and Portal)
grp_test = self.env['res.groups']._load_records([
dict(xml_id='test_two_user_types.implied_groups', values={'name': 'Test Group'})
])
grp_test.implied_ids += self.grp_internal
grp_test.implied_ids += self.grp_portal
with self.assertRaises(ValidationError):
self.env['res.users'].create({
'login': 'test_two_user_types',
'name': "Test User with two user types",
'groups_id': [(6, 0, [grp_test.id])]
})
#Add a user with portal to the group Internal
test_user = self.env['res.users'].create({
'login': 'test_user_portal',
'name': "Test User with two user types",
'groups_id': [(6, 0, [self.grp_portal.id])]
})
with self.assertRaises(ValidationError):
self.grp_internal.users = [(4, test_user.id)]
def test_two_user_types_implied_groups(self):
"""Contrarily to test_two_user_types, we simply add an implied_id to a group.
This will trigger the addition of the relevant users to the relevant groups;
if, say, this was done in SQL and thus bypassing the ORM, it would bypass the constraints
and thus give us a case uncovered by the aforementioned test.
"""
grp_test = self.env["res.groups"].create(
{"name": "test", "implied_ids": [(6, 0, [self.grp_internal.id])]})
test_user = self.env['res.users'].create({
'login': 'test_user_portal',
'name': "Test User with one user types",
'groups_id': [(6, 0, [grp_test.id])]
})
with self.assertRaises(ValidationError):
grp_test.write({'implied_ids': [(4, self.grp_portal.id)]})
def test_demote_user(self):
"""When a user is demoted to the status of portal/public,
we should strip him of all his (previous) rights
"""
group_0 = self.env.ref(self.group0) # the group to which test_user already belongs
group_U = self.env["res.groups"].create({"name": "U", "implied_ids": [(6, 0, [self.grp_internal.id])]})
self.grp_internal.implied_ids = False # only there to simplify the test by not having to care about its trans_implied_ids
self.test_user.write({'groups_id': [(4, group_U.id)]})<|fim▁hole|> )
# Now we demote him. The JS framework sends 3 and 4 commands,
# which is what we write here, but it should work even with a 5 command or whatever.
self.test_user.write({'groups_id': [
(3, self.grp_internal.id),
(3, self.grp_public.id),
(4, self.grp_portal.id),
]})
# if we screw up the removing groups/adding the implied ids, we could end up in two situations:
# 1. we have a portal user with way too much rights (e.g. 'Contact Creation', which does not imply any other group)
# 2. because a group may be (transitively) implying group_user, then it would raise an exception
# so as a compromise we remove all groups when demoting a user
# (even technical display groups, e.g. TaxB2B, which could be re-added later)
self.assertEqual(
self.test_user.groups_id, (self.grp_portal),
"Here the portal group does not imply any other group, so we should only have this group.",
)
def test_implied_groups(self):
""" We check that the adding of implied ids works correctly for normal users and portal users.
In the second case, working normally means raising if a group implies to give 'group_user'
rights to a portal user.
"""
U = self.env["res.users"]
G = self.env["res.groups"]
group_user = self.env.ref('base.group_user')
group_portal = self.env.ref('base.group_portal')
group_no_one = self.env.ref('base.group_no_one')
group_A = G.create({"name": "A"})
group_AA = G.create({"name": "AA", "implied_ids": [(6, 0, [group_A.id])]})
group_B = G.create({"name": "B"})
group_BB = G.create({"name": "BB", "implied_ids": [(6, 0, [group_B.id])]})
# user_a is a normal user, so we expect groups to be added when we add them,
# as well as 'implied_groups'; otherwise nothing else should happen.
# By contrast, for a portal user we want implied groups not to be added
# if and only if it would not give group_user (or group_public) privileges
user_a = U.create({"name": "a", "login": "a", "groups_id": [(6, 0, [group_AA.id, group_user.id])]})
self.assertEqual(user_a.groups_id, (group_AA + group_A + group_user + group_no_one))
user_b = U.create({"name": "b", "login": "b", "groups_id": [(6, 0, [group_portal.id, group_AA.id])]})
self.assertEqual(user_b.groups_id, (group_AA + group_A + group_portal))
# user_b is not an internal user, but giving it a new group just added a new group
(user_a + user_b).write({"groups_id": [(4, group_BB.id)]})
self.assertEqual(user_a.groups_id, (group_AA + group_A + group_BB + group_B + group_user + group_no_one))
self.assertEqual(user_b.groups_id, (group_AA + group_A + group_BB + group_B + group_portal))
# now we create a group that implies the group_user
# adding it to a user should work normally, whereas adding it to a portal user should raise
group_C = G.create({"name": "C", "implied_ids": [(6, 0, [group_user.id])]})
user_a.write({"groups_id": [(4, group_C.id)]})
self.assertEqual(user_a.groups_id, (group_AA + group_A + group_BB + group_B + group_C + group_user + group_no_one))
with self.assertRaises(ValidationError):
user_b.write({"groups_id": [(4, group_C.id)]})<|fim▁end|> | self.assertEqual(
self.test_user.groups_id, (group_0 + group_U + self.grp_internal),
"We should have our 2 groups and the implied user group", |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.md') as readme_file:
readme = readme_file.read()
with open('HISTORY.md') as history_file:
history = history_file.read()
requirements = [
'requests'
]
test_requirements = [
# TODO: put package test requirements here
]
setup(
name='python_wpapi',
version='0.3.4',
description="Simple wrapper around the Wordpress REST API",
long_description=readme + '\n\n' + history,
author="Lucas Lobosque",
author_email='[email protected]',
url='https://github.com/Lobosque/python_wpapi',
packages=[
'python_wpapi',
],
package_dir={'python_wpapi':
'python_wpapi'},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,<|fim▁hole|> keywords='python_wpapi',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements
)<|fim▁end|> | |
<|file_name|>OptionsApp.ts<|end_file_name|><|fim▁begin|>/// <reference path="../../all.d.ts" />
module OptionsApp {<|fim▁hole|> .run(Helpers.setCurrentLanguage)
.run(Helpers.trackPage)
.controller('OptionsCtrl', OptionsCtrl)
.directive('checkboxOption', () => {
return {
template: `<input ng-model="param" type="checkbox"/>
<span class="fa-stack">
<i class="fa fa-square fa-stack-2x"></i>
<i ng-if="param" class="fa fa-check fa-stack-1x fa-inverse"></i>
</span>`,
scope: {
param: '=?'
}
}
})
.filter('kFilter', function () {
return function (input: any, decimals: any) {
const suffixes = ['K', 'M', 'G', 'T', 'P', 'E'];
if(isNaN(input)) return null;
if(input < 1000) return input;
const exp = Math.floor(Math.log(input) / Math.log(1000));
return (input / Math.pow(1000, exp)).toFixed(decimals) + suffixes[exp - 1];
};
});
}<|fim▁end|> | angular.module('OptionsApp', ['HeaderApp', 'gettext', 'angular-google-analytics'])
.config(Helpers.setAnaliticSetting) |
<|file_name|>slo.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Middleware that will provide Static Large Object (SLO) support.
This feature is very similar to Dynamic Large Object (DLO) support in that
it allows the user to upload many objects concurrently and afterwards
download them as a single object. It is different in that it does not rely
on eventually consistent container listings to do so. Instead, a user
defined manifest of the object segments is used.
----------------------
Uploading the Manifest
----------------------
After the user has uploaded the objects to be concatenated a manifest is
uploaded. The request must be a PUT with the query parameter::
?multipart-manifest=put
The body of this request will be an ordered list of files in
json data format. The data to be supplied for each segment is::
path: the path to the segment (not including account)
/container/object_name
etag: the etag given back when the segment was PUT
size_bytes: the size of the segment in bytes
The format of the list will be::
json:
[{"path": "/cont/object",
"etag": "etagoftheobjectsegment",
"size_bytes": 1048576}, ...]
The number of object segments is limited to a configurable amount, default
1000. Each segment, except for the final one, must be at least 1 megabyte
(configurable). On upload, the middleware will head every segment passed in and
verify the size and etag of each. If any of the objects do not match (not
found, size/etag mismatch, below minimum size) then the user will receive a 4xx
error response. If everything does match, the user will receive a 2xx response
and the SLO object is ready for downloading.
Behind the scenes, on success, a json manifest generated from the user input is
sent to object servers with an extra "X-Static-Large-Object: True" header
and a modified Content-Type. The parameter: swift_bytes=$total_size will be
appended to the existing Content-Type, where total_size is the sum of all
the included segments' size_bytes. This extra parameter will be hidden from
the user.
Manifest files can reference objects in separate containers, which will improve
concurrent upload speed. Objects can be referenced by multiple manifests. The
segments of a SLO manifest can even be other SLO manifests. Treat them as any
other object i.e., use the Etag and Content-Length given on the PUT of the
sub-SLO in the manifest to the parent SLO.
-------------------------
Retrieving a Large Object
-------------------------
A GET request to the manifest object will return the concatenation of the
objects from the manifest much like DLO. If any of the segments from the
manifest are not found or their Etag/Content Length no longer match the
connection will drop. In this case a 409 Conflict will be logged in the proxy
logs and the user will receive incomplete results.
The headers from this GET or HEAD request will return the metadata attached
to the manifest object itself with some exceptions::
Content-Length: the total size of the SLO (the sum of the sizes of
the segments in the manifest)
X-Static-Large-Object: True
Etag: the etag of the SLO (generated the same way as DLO)
A GET request with the query parameter::
?multipart-manifest=get
Will return the actual manifest file itself. This is generated json and does
not match the data sent from the original multipart-manifest=put. This call's
main purpose is for debugging.
When the manifest object is uploaded you are more or less guaranteed that
every segment in the manifest exists and matched the specifications.
However, there is nothing that prevents the user from breaking the
SLO download by deleting/replacing a segment referenced in the manifest. It is
left to the user use caution in handling the segments.
-----------------------
Deleting a Large Object
-----------------------
A DELETE request will just delete the manifest object itself.
A DELETE with a query parameter::
?multipart-manifest=delete
will delete all the segments referenced in the manifest and then the manifest
itself. The failure response will be similar to the bulk delete middleware.
------------------------
Modifying a Large Object
------------------------
PUTs / POSTs will work as expected, PUTs will just overwrite the manifest
object for example.
------------------
Container Listings
------------------
In a container listing the size listed for SLO manifest objects will be the
total_size of the concatenated segments in the manifest. The overall
X-Container-Bytes-Used for the container (and subsequently for the account)
will not reflect total_size of the manifest but the actual size of the json
data stored. The reason for this somewhat confusing discrepancy is we want the
container listing to reflect the size of the manifest object when it is
downloaded. We do not, however, want to count the bytes-used twice (for both
the manifest and the segments it's referring to) in the container and account
metadata which can be used for stats purposes.
"""
from urllib import quote
from cStringIO import StringIO
from datetime import datetime
import mimetypes
from hashlib import md5
from swift.common.swob import Request, HTTPBadRequest, HTTPServerError, \
HTTPMethodNotAllowed, HTTPRequestEntityTooLarge, HTTPLengthRequired, \
HTTPOk, HTTPPreconditionFailed, HTTPException, HTTPNotFound, \
HTTPUnauthorized
from swift.common.utils import json, get_logger, config_true_value
from swift.common.constraints import check_utf8, MAX_BUFFERED_SLO_SEGMENTS
from swift.common.http import HTTP_NOT_FOUND, HTTP_UNAUTHORIZED
from swift.common.wsgi import WSGIContext
from swift.common.middleware.bulk import get_response_body, \
ACCEPTABLE_FORMATS, Bulk
def parse_input(raw_data):
"""
Given a request will parse the body and return a list of dictionaries
:raises: HTTPException on parse errors
:returns: a list of dictionaries on success
"""
try:
parsed_data = json.loads(raw_data)
except ValueError:
raise HTTPBadRequest("Manifest must be valid json.")
req_keys = set(['path', 'etag', 'size_bytes'])
try:
for seg_dict in parsed_data:
if (set(seg_dict) != req_keys or
'/' not in seg_dict['path'].lstrip('/')):
raise HTTPBadRequest('Invalid SLO Manifest File')
except (AttributeError, TypeError):
raise HTTPBadRequest('Invalid SLO Manifest File')
return parsed_data
class SloContext(WSGIContext):
def __init__(self, slo, slo_etag):
WSGIContext.__init__(self, slo.app)
self.slo_etag = '"' + slo_etag.hexdigest() + '"'
def handle_slo_put(self, req, start_response):
app_resp = self._app_call(req.environ)
for i in xrange(len(self._response_headers)):
if self._response_headers[i][0].lower() == 'etag':
self._response_headers[i] = ('Etag', self.slo_etag)
break
start_response(self._response_status,
self._response_headers,
self._response_exc_info)
return app_resp
class StaticLargeObject(object):
"""
StaticLargeObject Middleware
See above for a full description.
The proxy logs created for any subrequests made will have swift.source set
to "SLO".
:param app: The next WSGI filter or app in the paste.deploy chain.
:param conf: The configuration dict for the middleware.
"""
def __init__(self, app, conf):
self.conf = conf
self.app = app
self.logger = get_logger(conf, log_route='slo')
self.max_manifest_segments = int(self.conf.get('max_manifest_segments',
1000))
self.max_manifest_size = int(self.conf.get('max_manifest_size',
1024 * 1024 * 2))
self.min_segment_size = int(self.conf.get('min_segment_size',
1024 * 1024))
self.bulk_deleter = Bulk(app, {})
def handle_multipart_put(self, req, start_response):
"""
Will handle the PUT of a SLO manifest.
Heads every object in manifest to check if is valid and if so will
save a manifest generated from the user input. Uses WSGIContext to
call self.app and start_response and returns a WSGI iterator.
:params req: a swob.Request with an obj in path
:raises: HttpException on errors
"""
try:
vrs, account, container, obj = req.split_path(1, 4, True)
except ValueError:
return self.app(req.environ, start_response)
if req.content_length > self.max_manifest_size:
raise HTTPRequestEntityTooLarge(
"Manifest File > %d bytes" % self.max_manifest_size)
if req.headers.get('X-Copy-From'):
raise HTTPMethodNotAllowed(
'Multipart Manifest PUTs cannot be Copy requests')
if req.content_length is None and \
req.headers.get('transfer-encoding', '').lower() != 'chunked':
raise HTTPLengthRequired(request=req)
parsed_data = parse_input(req.body_file.read(self.max_manifest_size))
problem_segments = []
if len(parsed_data) > self.max_manifest_segments:
raise HTTPRequestEntityTooLarge(
'Number segments must be <= %d' % self.max_manifest_segments)
total_size = 0
out_content_type = req.accept.best_match(ACCEPTABLE_FORMATS)
if not out_content_type:
out_content_type = 'text/plain'
data_for_storage = []
slo_etag = md5()
for index, seg_dict in enumerate(parsed_data):
obj_name = seg_dict['path']
if isinstance(obj_name, unicode):
obj_name = obj_name.encode('utf-8')
obj_path = '/'.join(['', vrs, account, obj_name.lstrip('/')])
try:
seg_size = int(seg_dict['size_bytes'])
except (ValueError, TypeError):
raise HTTPBadRequest('Invalid Manifest File')
if seg_size < self.min_segment_size and \
(index == 0 or index < len(parsed_data) - 1):
raise HTTPBadRequest(
'Each segment, except the last, must be larger than '
'%d bytes.' % self.min_segment_size)
new_env = req.environ.copy()
new_env['PATH_INFO'] = obj_path
new_env['REQUEST_METHOD'] = 'HEAD'
new_env['swift.source'] = 'SLO'
del(new_env['wsgi.input'])
del(new_env['QUERY_STRING'])
new_env['CONTENT_LENGTH'] = 0
new_env['HTTP_USER_AGENT'] = \
'%s MultipartPUT' % req.environ.get('HTTP_USER_AGENT')
headers = {'fingerprint':seg_dict['etag']}
head_seg_resp = \
Request.blank(obj_path, headers=headers, environ=new_env).get_response(self.app)
if head_seg_resp.is_success:
total_size += seg_size
if seg_size != head_seg_resp.content_length:
problem_segments.append([quote(obj_name), 'Size Mismatch'])
if seg_dict['etag'] == head_seg_resp.etag:
slo_etag.update(seg_dict['etag'])
else:
problem_segments.append([quote(obj_name), 'Etag Mismatch'])
if head_seg_resp.last_modified:
last_modified = head_seg_resp.last_modified
else:
# shouldn't happen
last_modified = datetime.now()
last_modified_formatted = \
last_modified.strftime('%Y-%m-%dT%H:%M:%S.%f')
seg_data = {'name': '/' + seg_dict['path'].lstrip('/'),
'bytes': seg_size,
'hash': seg_dict['etag'],
'content_type': head_seg_resp.content_type,
'last_modified': last_modified_formatted}
if config_true_value(
head_seg_resp.headers.get('X-Static-Large-Object')):
seg_data['sub_slo'] = True
data_for_storage.append(seg_data)
else:
problem_segments.append([quote(obj_name),
head_seg_resp.status])
if problem_segments:
resp_body = get_response_body(
out_content_type, {}, problem_segments)
raise HTTPBadRequest(resp_body, content_type=out_content_type)
env = req.environ
if not env.get('CONTENT_TYPE'):
guessed_type, _junk = mimetypes.guess_type(req.path_info)
env['CONTENT_TYPE'] = guessed_type or 'application/octet-stream'
env['swift.content_type_overriden'] = True
env['CONTENT_TYPE'] += ";swift_bytes=%d" % total_size
env['HTTP_X_STATIC_LARGE_OBJECT'] = 'True'
json_data = json.dumps(data_for_storage)
env['CONTENT_LENGTH'] = str(len(json_data))
env['wsgi.input'] = StringIO(json_data)
slo_context = SloContext(self, slo_etag)
return slo_context.handle_slo_put(req, start_response)
def get_segments_to_delete_iter(self, req):
"""
A generator function to be used to delete all the segments and
sub-segments referenced in a manifest.
:params req: a swob.Request with an SLO manifest in path
:raises HTTPPreconditionFailed: on invalid UTF8 in request path
:raises HTTPBadRequest: on too many buffered sub segments and
on invalid SLO manifest path
"""
if not check_utf8(req.path_info):
raise HTTPPreconditionFailed(
request=req, body='Invalid UTF8 or contains NULL')
try:
vrs, account, container, obj = req.split_path(4, 4, True)
except ValueError:
raise HTTPBadRequest('Invalid SLO manifiest path')
segments = [{
'sub_slo': True,
'name': ('/%s/%s' % (container, obj)).decode('utf-8')}]
while segments:
if len(segments) > MAX_BUFFERED_SLO_SEGMENTS:
raise HTTPBadRequest(
'Too many buffered slo segments to delete.')
seg_data = segments.pop(0)
if seg_data.get('sub_slo'):
try:
segments.extend(
self.get_slo_segments(seg_data['name'], req))
except HTTPException as err:
# allow bulk delete response to report errors
seg_data['error'] = {'code': err.status_int,
'message': err.body}
# add manifest back to be deleted after segments
seg_data['sub_slo'] = False
segments.append(seg_data)
else:
seg_data['name'] = seg_data['name'].encode('utf-8')
yield seg_data
def get_slo_segments(self, obj_name, req):
"""
Performs a swob.Request and returns the SLO manifest's segments.
:raises HTTPServerError: on unable to load obj_name or
on unable to load the SLO manifest data.
:raises HTTPBadRequest: on not an SLO manifest<|fim▁hole|> """
vrs, account, _junk = req.split_path(2, 3, True)
new_env = req.environ.copy()
new_env['REQUEST_METHOD'] = 'GET'
del(new_env['wsgi.input'])
new_env['QUERY_STRING'] = 'multipart-manifest=get'
new_env['CONTENT_LENGTH'] = 0
new_env['HTTP_USER_AGENT'] = \
'%s MultipartDELETE' % new_env.get('HTTP_USER_AGENT')
new_env['swift.source'] = 'SLO'
new_env['PATH_INFO'] = (
'/%s/%s/%s' % (
vrs, account,
obj_name.lstrip('/'))).encode('utf-8')
resp = Request.blank('', new_env).get_response(self.app)
if resp.is_success:
if config_true_value(resp.headers.get('X-Static-Large-Object')):
try:
return json.loads(resp.body)
except ValueError:
raise HTTPServerError('Unable to load SLO manifest')
else:
raise HTTPBadRequest('Not an SLO manifest')
elif resp.status_int == HTTP_NOT_FOUND:
raise HTTPNotFound('SLO manifest not found')
elif resp.status_int == HTTP_UNAUTHORIZED:
raise HTTPUnauthorized('401 Unauthorized')
else:
raise HTTPServerError('Unable to load SLO manifest or segment.')
def handle_multipart_delete(self, req):
"""
Will delete all the segments in the SLO manifest and then, if
successful, will delete the manifest file.
:params req: a swob.Request with an obj in path
:returns: swob.Response whose app_iter set to Bulk.handle_delete_iter
"""
resp = HTTPOk(request=req)
out_content_type = req.accept.best_match(ACCEPTABLE_FORMATS)
if out_content_type:
resp.content_type = out_content_type
resp.app_iter = self.bulk_deleter.handle_delete_iter(
req, objs_to_delete=self.get_segments_to_delete_iter(req),
user_agent='MultipartDELETE', swift_source='SLO',
out_content_type=out_content_type)
return resp
def __call__(self, env, start_response):
"""
WSGI entry point
"""
req = Request(env)
try:
vrs, account, container, obj = req.split_path(1, 4, True)
except ValueError:
return self.app(env, start_response)
try:
if obj:
if req.method == 'PUT' and \
req.params.get('multipart-manifest') == 'put':
return self.handle_multipart_put(req, start_response)
if req.method == 'DELETE' and \
req.params.get('multipart-manifest') == 'delete':
return self.handle_multipart_delete(req)(env,
start_response)
if 'X-Static-Large-Object' in req.headers:
raise HTTPBadRequest(
request=req,
body='X-Static-Large-Object is a reserved header. '
'To create a static large object add query param '
'multipart-manifest=put.')
except HTTPException as err_resp:
return err_resp(env, start_response)
return self.app(env, start_response)
def filter_factory(global_conf, **local_conf):
conf = global_conf.copy()
conf.update(local_conf)
def slo_filter(app):
return StaticLargeObject(app, conf)
return slo_filter<|fim▁end|> | :raises HTTPNotFound: on SLO manifest not found
:returns: SLO manifest's segments |
<|file_name|>espree.js<|end_file_name|><|fim▁begin|>/**
* @fileoverview Main Espree file that converts Acorn into Esprima output.
*
* This file contains code from the following MIT-licensed projects:
* 1. Acorn
* 2. Babylon
* 3. Babel-ESLint
*
* This file also contains code from Esprima, which is BSD licensed.
*
* Acorn is Copyright 2012-2015 Acorn Contributors (https://github.com/marijnh/acorn/blob/master/AUTHORS)
* Babylon is Copyright 2014-2015 various contributors (https://github.com/babel/babel/blob/master/packages/babylon/AUTHORS)
* Babel-ESLint is Copyright 2014-2015 Sebastian McKenzie <[email protected]><|fim▁hole|> *
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Esprima is Copyright (c) jQuery Foundation, Inc. and Contributors, All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/* eslint no-undefined:0, no-use-before-define: 0 */
import * as acorn from "acorn";
import jsx from "acorn-jsx";
import espree from "./lib/espree.js";
import espreeVersion from "./lib/version.js";
import * as visitorKeys from "eslint-visitor-keys";
import { getLatestEcmaVersion, getSupportedEcmaVersions } from "./lib/options.js";
// To initialize lazily.
const parsers = {
_regular: null,
_jsx: null,
get regular() {
if (this._regular === null) {
this._regular = acorn.Parser.extend(espree());
}
return this._regular;
},
get jsx() {
if (this._jsx === null) {
this._jsx = acorn.Parser.extend(jsx(), espree());
}
return this._jsx;
},
get(options) {
const useJsx = Boolean(
options &&
options.ecmaFeatures &&
options.ecmaFeatures.jsx
);
return useJsx ? this.jsx : this.regular;
}
};
//------------------------------------------------------------------------------
// Tokenizer
//------------------------------------------------------------------------------
/**
* Tokenizes the given code.
* @param {string} code The code to tokenize.
* @param {Object} options Options defining how to tokenize.
* @returns {Token[]} An array of tokens.
* @throws {SyntaxError} If the input code is invalid.
* @private
*/
export function tokenize(code, options) {
const Parser = parsers.get(options);
// Ensure to collect tokens.
if (!options || options.tokens !== true) {
options = Object.assign({}, options, { tokens: true }); // eslint-disable-line no-param-reassign
}
return new Parser(options, code).tokenize();
}
//------------------------------------------------------------------------------
// Parser
//------------------------------------------------------------------------------
/**
* Parses the given code.
* @param {string} code The code to tokenize.
* @param {Object} options Options defining how to tokenize.
* @returns {ASTNode} The "Program" AST node.
* @throws {SyntaxError} If the input code is invalid.
*/
export function parse(code, options) {
const Parser = parsers.get(options);
return new Parser(options, code).parse();
}
//------------------------------------------------------------------------------
// Public
//------------------------------------------------------------------------------
export const version = espreeVersion;
/* istanbul ignore next */
export const VisitorKeys = (function() {
return visitorKeys.KEYS;
}());
// Derive node types from VisitorKeys
/* istanbul ignore next */
export const Syntax = (function() {
let name,
types = {};
if (typeof Object.create === "function") {
types = Object.create(null);
}
for (name in VisitorKeys) {
if (Object.hasOwnProperty.call(VisitorKeys, name)) {
types[name] = name;
}
}
if (typeof Object.freeze === "function") {
Object.freeze(types);
}
return types;
}());
export const latestEcmaVersion = getLatestEcmaVersion();
export const supportedEcmaVersions = getSupportedEcmaVersions();<|fim▁end|> | |
<|file_name|>CommentsAdapter.java<|end_file_name|><|fim▁begin|>package org.shikimori.library.adapters;
import android.content.Context;
import android.view.View;
import android.view.ViewGroup;
import com.nostra13.universalimageloader.core.ImageLoader;
import org.shikimori.library.R;
import org.shikimori.library.adapters.base.BaseListAdapter;
import org.shikimori.library.adapters.holder.SettingsHolder;
import org.shikimori.library.objects.ActionQuote;
import org.shikimori.library.objects.one.ItemCommentsShiki;
import org.shikimori.library.tool.ProjectTool;
import org.shikimori.library.tool.hs;
import java.util.Date;
import java.util.List;
/**
* Created by LeshiyGS on 1.04.2015.
*/
public class CommentsAdapter extends BaseListAdapter<ItemCommentsShiki, SettingsHolder> implements View.OnClickListener {
private View.OnClickListener clickListener;
public CommentsAdapter(Context context, List<ItemCommentsShiki> list) {
super(context, list, R.layout.item_shiki_comments_list, SettingsHolder.class);
}
String formatDate(long date, String format) {
return hs.getStringDate(format, new Date(date));
}
@Override
public void setListeners(SettingsHolder holder) {
super.setListeners(holder);
if (clickListener != null)
holder.ivSettings.setOnClickListener(clickListener);
holder.ivPoster.setOnClickListener(this);
}
@Override
public SettingsHolder getViewHolder(View v) {
SettingsHolder hol = super.getViewHolder(v);
hol.ivSettings = find(v, R.id.icSettings);
return hol;
}
<|fim▁hole|> @Override
public void setValues(SettingsHolder holder, ItemCommentsShiki item, int position) {
holder.tvName.setText(item.nickname);
Date date = hs.getDateFromString("yyyy-MM-dd'T'HH:mm:ss.SSSZ", item.created_at);
String sdate = formatDate(date.getTime(), "dd MMMM yyyy HH:mm");
holder.tvDate.setText(sdate);
// HtmlText text = new HtmlText(getContext(), false);
// text.setText(item.html_body, holder.tvText);
holder.llBodyHtml.removeAllViews();
holder.llBodyHtml.setTag(R.id.icQuote, new ActionQuote(item.user_id, item.nickname, item.id));
// initDescription(item, holder.llBodyHtml);
if (item.parsedContent.getParent() != null)
((ViewGroup) item.parsedContent.getParent()).removeAllViews();
holder.llBodyHtml.addView(item.parsedContent);
holder.ivSettings.setTag(position);
// очищаем картинку перед загрузкой чтобы она при прокрутке не мигала
holder.ivPoster.setImageDrawable(null);
holder.ivPoster.setTag(position);
ImageLoader.getInstance().displayImage(item.image_x160, holder.ivPoster);
}
@Override
public void onClick(View v) {
// this is user
if (v.getId() == R.id.ivPoster) {
ItemCommentsShiki item = getItem((int) v.getTag());
ProjectTool.goToUser(getContext(), item.user_id);
}
}
public ItemCommentsShiki getItemById(String id){
for (int i = 0; i < getCount(); i++) {
ItemCommentsShiki item = getItem(i);
if(item.id.equals(id))
return item;
}
return null;
}
}<|fim▁end|> | public void setOnSettingsListener(View.OnClickListener clickListener) {
this.clickListener = clickListener;
}
|
<|file_name|>StatsDateLib.py<|end_file_name|><|fim▁begin|>"""
##############################################################################
##
##
## @name : StatsDateLib.py
##
## @license : MetPX Copyright (C) 2004-2006 Environment Canada
## MetPX comes with ABSOLUTELY NO WARRANTY; For details type see the file
## named COPYING in the root of the source directory tree.
##
## @author : Nicholas Lemay
##
## @since : 29-05-2006 , last updated on 08-04-2008
##
##
## @summary: Contains many usefull date manipulation methods wich are
## to be used throughout the stats library.
##
##############################################################################
"""
import time, sys, os
sys.path.insert(1, os.path.dirname( os.path.abspath(__file__) ) + '/../../')
from pxStats.lib.StatsPaths import StatsPaths
from pxStats.lib.LanguageTools import LanguageTools
CURRENT_MODULE_ABS_PATH = os.path.abspath(__file__).replace( ".pyc", ".py" )
"""
- Small function that adds pxLib to sys path.
"""
STATSPATHS = StatsPaths( )
STATSPATHS.setPaths( LanguageTools.getMainApplicationLanguage() )
sys.path.append( STATSPATHS.PXLIB )
"""
Globals
"""
MINUTE = 60
HOUR = 60 * MINUTE<|fim▁hole|>
class StatsDateLib:
global _
_ = LanguageTools.getTranslatorForModule( CURRENT_MODULE_ABS_PATH )
#Constants can be removed once we add methods to the datelibrary and include it
MINUTE = 60
HOUR = 60 * MINUTE
DAY = 24 * HOUR
MINUTES_PER_DAY = 24*60
LIST_OF_MONTHS_3LETTER_FORMAT = [ _("Jan"), _("Feb"), _("Mar"), _("Apr"), _("May"), _("Jun"), _("Jul"), _("Aug"), _("Sep"), _("Oct"), _("Nov"), _("Dec") ]
LIST_OF_MONTHS=[ _("January"), _("February"), _("March"), _("April"), _("May"), _("June"), _("July"), _("August"), _("September"), _("October"), _("November"), _("December") ]
def setLanguage( language ):
"""
@summary : sets specified language as the
language used for translations
throughout the entire class.
"""
if language in LanguageTools.getSupportedLanguages() :
global _
_ = LanguageTools.getTranslatorForModule( CURRENT_MODULE_ABS_PATH, language )
setLanguage = staticmethod( setLanguage )
def addMonthsToIsoDate( isodate, monthstoAdd ):
"""
@summary : Add a certain number of months to a date.
@param isodate: Date in iso format to which to add months.
@param monthstoAdd: Number of months to add.( 0 or bigger)
@return : The resulting date. Will return the date received as a parameter
if error occurs
"""
monthsWith30Days = [4,6,9,11]
validDate = True
resultingDate = isodate
try :
StatsDateLib.getSecondsSinceEpoch( isodate )
except:
validDate = False
if validDate == True :
dayFromDate = int(isodate.split( "-" )[2].split( " " )[0])
monthFromDate = int(isodate.split( "-" )[1])
yearFromDate = int(isodate.split( "-" )[0])
hourFromDate = isodate.split( " " )[1]
yearsToAdd , resultingMonth = divmod( ( monthFromDate + monthstoAdd ), 12 )
if resultingMonth == 0:
resultingMonth = 12
yearsToAdd = yearsToAdd -1
resultingYear = yearFromDate + yearsToAdd
if resultingMonth in monthsWith30Days and dayFromDate == 31 :
resultingDay = 30
elif resultingMonth == 2 and (dayFromDate == 30 or dayFromDate == 31):
if ( ( resultingYear%4 == 0 and resultingYear%100 !=0 ) or resultingYear%400 == 0 ):
resultingDay = 29
else:
resultingDay = 28
else:
resultingDay = dayFromDate
if len(str(resultingDay)) < 2:
resultingDay = '0' + str(resultingDay)
if len(str(resultingMonth)) < 2:
resultingMonth = '0' + str(resultingMonth)
resultingDate = str( resultingYear ) + '-' + str( resultingMonth ) + '-' + str( resultingDay ) + ' ' + str( hourFromDate )
return resultingDate
addMonthsToIsoDate = staticmethod( addMonthsToIsoDate )
def getCurrentTimeInIsoformat():
"""
@summary : Returns current system time in iso format.
@return : Returns current system time in iso format.
"""
currentTimeInEpochFormat = time.time()
return StatsDateLib.getIsoFromEpoch( currentTimeInEpochFormat )
getCurrentTimeInIsoformat = staticmethod( getCurrentTimeInIsoformat )
def isValidIsoDate( isoDate ):
"""
@summary : Verifies whether or not the received
date is a valid iso format date.
@return : Returns whether or not the received
date is a valid iso format date.
"""
isValid = True
try:
StatsDateLib.getSecondsSinceEpoch( isoDate )
except:
isValid = False
return isValid
isValidIsoDate = staticmethod(isValidIsoDate)
def getYearMonthDayInStrfTime( timeInEpochFormat ):
"""
@summary : Return the year month day in strftime
based on an epoch date.
@param timeInEpochFormat : Time, in seconds since epoch format
from which you want to get the year month day.
@return : a three item tuple containing the following :
- year
- month
- day
"""
global _
months = { "January": _("January"), "February": _("February"), "March":_("March"), "April":_("April"),\
"May":_("May"), "June":_("June"), "July":_("July"), "August":_("August"), "September":_("September"),\
"October":_("October"), "November":_("November"), "December":_("December") }
year = time.strftime( '%Y', time.gmtime(timeInEpochFormat) )
month = time.strftime( '%B', time.gmtime(timeInEpochFormat) )
day = time.strftime( '%d', time.gmtime(timeInEpochFormat) )
month = months[month]
return year, month, day
getYearMonthDayInStrfTime = staticmethod(getYearMonthDayInStrfTime)
def getDayOfTheWeek( timeInEpochFormat ):
"""
@summary : Return the year month day in strftime
based on an epoch date.
@Note : The returned day of the week will be written in the language
that has currently been set.
@param : Time, in seconds since epoch format
from which you want to get the day of the week.
"""
global _
days = { "Mon": _("Mon"), "Tue": _("Tue"), "Wed": _("Wed"), "Thu": _("Thu"),\
"Fri": _("Fri"),"Sat": _("Sat"),"Sun": _("Sun"), "Monday": _("Monday"),\
"Tuesday": _("Tuesday"), "Wednesday": _("Wednesday"), "Thursday": _("Thursday"),\
"Friday": _("Friday"),"Saturday": _("Saturday"),"Sunday":_("Sunday") }
day = time.strftime( "%a", time.gmtime( timeInEpochFormat ) )
day = days[day]
return day
getDayOfTheWeek = staticmethod( getDayOfTheWeek )
def getStartEndFromPreviousDay( currentTime, nbDays = 1 ):
"""
Returns the start and end time of
the day prior to the currentTime.
currentTime must be in iso format.
start and end are returned in iso format.
"""
end = StatsDateLib.getIsoTodaysMidnight( currentTime )
yesterday = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch( currentTime ) - (24*60*60) )
start = StatsDateLib.getIsoTodaysMidnight( yesterday )
return start, end
getStartEndFromPreviousDay = staticmethod( getStartEndFromPreviousDay )
def getStartEndFromPreviousWeek( currentTime, nbWeeks = 1 ):
"""
Returns the start and end time of
the week prior to the currentTime.
currentTime must be in iso format.
start and end are returned in iso format.
"""
currentTimeInSecs = StatsDateLib.getSecondsSinceEpoch( currentTime )
weekDay = int(time.strftime( "%w", time.gmtime( currentTimeInSecs ) ))
endInSecs = currentTimeInSecs - ( weekDay*24*60*60 )
startInSecs = endInSecs - ( 7*24*60*60 )
start = StatsDateLib.getIsoTodaysMidnight( StatsDateLib.getIsoFromEpoch( startInSecs ) )
end = StatsDateLib.getIsoTodaysMidnight( StatsDateLib.getIsoFromEpoch( endInSecs ) )
return start, end
getStartEndFromPreviousWeek = staticmethod( getStartEndFromPreviousWeek )
def getStartEndFromPreviousMonth( currentTime ):
"""
Returns the start and end time of
the month prior to the currentTime.
currentTime must be in iso format.
start and end are returned in iso format.
"""
date = currentTime.split()[0]
splitDate = date.split("-")
end = splitDate[0] + "-" + splitDate[1] + "-" + "01 00:00:00"
splitTime = currentTime.split()
date = splitTime[0]
splitDate = date.split("-")
if int( splitDate[1] ) != 1 :
month = int( splitDate[1] ) - 1
if month < 10 :
month = "0" + str( month )
splitDate[1] = month
else:
year = int( splitDate[0] ) - 1
splitDate[0] = str(year)
splitDate[1] = "01"
firstDayOfPreviousMonth = str( splitDate[0] ) + "-" + str( splitDate[1] ) + "-01"
start = firstDayOfPreviousMonth + " 00:00:00"
return start, end
getStartEndFromPreviousMonth = staticmethod( getStartEndFromPreviousMonth )
def getStartEndFromPreviousYear( currentTime ):
"""
Returns the start and end time of
the day prior to the currentTime.
currentTime must be in iso format.
start and end are returned in iso format.
"""
year = currentTime.split("-")[0]
year = str( int(year)-1 )
start = year + "-01-01 00:00:00"
year = currentTime.split("-")[0]
end = year + "-01-01 00:00:00"
return start, end
getStartEndFromPreviousYear = staticmethod( getStartEndFromPreviousYear )
def getStartEndFromCurrentDay( currentTime ):
"""
Returns the start and end time of
the current day.
currentTime must be in iso format.
start and end are returned in iso format.
"""
start = StatsDateLib.getIsoTodaysMidnight( currentTime )
tomorrow = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch( currentTime ) + 24*60*60 )
end = StatsDateLib.getIsoTodaysMidnight( tomorrow )
return start, end
getStartEndFromCurrentDay = staticmethod( getStartEndFromCurrentDay )
def getStartEndFromCurrentWeek( currentTime ):
"""
Returns the start and end time of
the currentweek.
currentTime must be in iso format.
start and end are returned in iso format.
"""
currentTimeInSecs = StatsDateLib.getSecondsSinceEpoch( currentTime )
weekDay = int(time.strftime( "%w", time.gmtime( currentTimeInSecs ) ))
endInSecs = currentTimeInSecs + ( ( 7 - weekDay)*24*60*60 )
end = StatsDateLib.getIsoTodaysMidnight( StatsDateLib.getIsoFromEpoch( endInSecs ) )
startInSecs = currentTimeInSecs - ( weekDay*24*60*60 )
start = StatsDateLib.getIsoTodaysMidnight( StatsDateLib.getIsoFromEpoch( startInSecs ) )
return start, end
getStartEndFromCurrentWeek = staticmethod( getStartEndFromCurrentWeek )
def getStartEndFromCurrentMonth( currentTime ):
"""
Returns the start and end time of
the currentDay.
currentTime must be in iso format.
start and end are returned in iso format.
"""
splitTime = currentTime.split()
date = splitTime[0]
splitDate = date.split( "-" )
start = splitDate[0] + "-" + splitDate[1] + "-01 00:00:00"
if int( splitDate[1] ) != 12 :
month = int( splitDate[1] ) + 1
if month < 10:
month = "0" + str( month )
splitDate[1] = month
else:
year = int( splitDate[0] ) + 1
splitDate[0] = str(year)
splitDate[1] = "01"
firstDayOfMonth = str( splitDate[0] ) + "-" + str( splitDate[1] ) + "-01"
end = firstDayOfMonth + " 00:00:00"
return start, end
getStartEndFromCurrentMonth = staticmethod( getStartEndFromCurrentMonth )
def getStartEndFromCurrentYear( currentTime ):
"""
Returns the start and end time of
the currentDay.
currentTime must be in iso format.
start and end are returned in iso format.
"""
year = currentTime.split("-")[0]
start = year + "-01-01 00:00:00"
year = currentTime.split("-")[0]
year = str( int(year)+1 )
end = year + "-01-01 00:00:00"
return start, end
getStartEndFromCurrentYear = staticmethod( getStartEndFromCurrentYear )
def getHoursFromIso( iso = '2005-08-30 20:06:59' ):
"""
Returns the hours field from a iso format date.
"""
iso = iso.split(" ")[1]
hours, minutes, seconds = iso.split(':')
return hours
getHoursFromIso = staticmethod( getHoursFromIso )
def getMinutesFromIso( iso = '2005-08-30 20:06:59' ):
"""
Returns the minute field from a iso format date.
"""
hours, minutes, seconds = iso.split(':')
return minutes
getMinutesFromIso = staticmethod( getMinutesFromIso )
def rewindXDays( date = '2005-08-30 20:06:59' , x = 0 ):
"""
Takes an iso format date and substract the number
of days specified by x.
"""
seconds = StatsDateLib.getSecondsSinceEpoch( date )
seconds = seconds - ( x * 24*60*60 )
rewindedDate = StatsDateLib.getIsoFromEpoch( seconds )
return rewindedDate
rewindXDays = staticmethod( rewindXDays )
def getNumberOfDaysBetween( date1 = '2005-08-30 20:06:59', date2 = '2005-08-30 20:06:59' ):
"""
Takes two iso format dates and returns the number of days between them
"""
seconds1 = StatsDateLib.getSecondsSinceEpoch( date1 ) - StatsDateLib.getSecondsSinceStartOfDay( date1 )
seconds2 = StatsDateLib.getSecondsSinceEpoch( date2 ) - StatsDateLib.getSecondsSinceStartOfDay( date2 )
numberOfDays = abs( float( (seconds1-seconds2) /( 24*60*60 ) ) )
numberOfDays = int( numberOfDays )
return numberOfDays
getNumberOfDaysBetween = staticmethod( getNumberOfDaysBetween )
def areDifferentDays( date1 = '2005-08-30 20:06:59', date2 = '2005-08-30 20:06:59' ):
"""
Takes two iso format dates and returns whether or not both date are on different days.
"""
day1 = date1.split( " " )[0]
day2 = date2.split( " " )[0]
return day1 != day2
areDifferentDays = staticmethod( areDifferentDays )
def getSecondsSinceEpoch(date='2005-08-30 20:06:59', format='%Y-%m-%d %H:%M:%S'):
try:
timeStruct = time.strptime(date, format)
except:
print "date tried : %s" %date
return time.mktime(timeStruct)
getSecondsSinceEpoch = staticmethod( getSecondsSinceEpoch )
def getIsoLastMinuteOfDay( iso = '2005-08-30 20:06:59' ):
"""
Takes an iso format date like 2005-08-30 20:06:59.
Replaces hour, minutes and seconds by last minute of day.
Returns 2005-08-30 23:59:59.
"""
iso = iso.split( " " )
iso = iso[0]
iso = iso + " 23:59:59"
return iso
getIsoLastMinuteOfDay = staticmethod( getIsoLastMinuteOfDay )
def getIsoTodaysMidnight( iso ):
"""
Takes an iso format date like 2005-08-30 20:06:59.
Replaces hour, minutes and seconds by 00.
Returns 2005-08-30 00:00:00.
"""
iso = iso.split( " " )
iso = iso[0]
iso = iso + " 00:00:00"
return iso
getIsoTodaysMidnight = staticmethod( getIsoTodaysMidnight )
def getIsoWithRoundedHours( iso ):
"""
Takes an iso format date like 2005-08-30 20:06:59.
Replaces minutes and seconds by 00.
Returns 2005-08-30 20:00:00.
"""
iso = iso.split( ":" )
iso = iso[0]
iso = iso + ":00:00"
return iso
getIsoWithRoundedHours = staticmethod( getIsoWithRoundedHours )
def getIsoWithRoundedSeconds( iso ):
"""
Takes a numbers of seconds since epoch and tranforms it in iso format
2005-08-30 20:06:59. Replaces minutes and seconds by 00 thus returning
2005-08-30 20:00:00.
"""
#print "iso before modif : %s" %iso
iso = iso.split( ":" )
iso = iso[0] + ":" + iso[1] + ":00"
return iso
getIsoWithRoundedSeconds = staticmethod( getIsoWithRoundedSeconds )
def getSeconds(string):
# Should be used with string of following format: hh:mm:ss
hours, minutes, seconds = string.split(':')
return int(hours) * HOUR + int(minutes) * MINUTE + int(seconds)
getSeconds = staticmethod( getSeconds )
def getHoursSinceStartOfDay( date='2005-08-30 20:06:59' ):
"""
This method takes an iso style date and returns the number
of hours that have passed since 00:00:00 of the same day.
"""
try:
splitDate = date.split( " " )
splitDate = splitDate[1]
splitDate = splitDate.split( ":" )
hoursSinceStartOfDay = int( splitDate[0] )
return hoursSinceStartOfDay
except:
print "Cannot convert %s in getMinutesSinceStartOfDay. " %date
sys.exit()
getHoursSinceStartOfDay = staticmethod(getHoursSinceStartOfDay)
def isoDateDashed( date = "20060613162653" ):
"""
This method takes in parameter a non dashed iso date and
returns the date dashed and the time with : as seperator.
"""
dashedDate = '%Y-%m-%d %H:%M:%S' %date
return dashedDate
isoDateDashed = staticmethod( isoDateDashed )
def getMinutesSinceStartOfDay( date='2005-08-30 20:06:59' ):
"""
This method receives an iso date as parameter and returns the number of minutes
wich have passed since the start of that day.
"""
try:
splitDate = date.split( " " )
splitDate = splitDate[1]
splitDate = splitDate.split( ":" )
minutesSinceStartOfDay = int( splitDate[0] ) * 60 + int( splitDate[1] )
return minutesSinceStartOfDay
except:
print "Cannot convert %s in getMinutesSinceStartOfDay. " %date
sys.exit()
getMinutesSinceStartOfDay = staticmethod( getMinutesSinceStartOfDay )
def getSecondsSinceStartOfDay( date='2005-08-30 20:06:59' ):
"""
This method receives an iso date as parameter and returns the number of seconds
wich have passed since the start of that day.
"""
try:
splitDate = date.split( " " )
splitDate = splitDate[1]
splitDate = splitDate.split( ":" )
minutesSinceStartOfDay = ( int( splitDate[0] ) * 60 *60 ) + ( int( splitDate[1] ) *60 ) + int( splitDate[2] )
return minutesSinceStartOfDay
except:
print "Cannot convert %s in getMinutesSinceStartOfDay. " %date
sys.exit()
getSecondsSinceStartOfDay = staticmethod( getSecondsSinceStartOfDay )
def getNumericMonthFromString( month ) :
"""
This method takes a month in the string format and returns the month.
Returns 00 if month is unknown.
"""
value = '00'
if month == 'Jan' :
value = '01'
elif month == 'Feb' :
value = '02'
elif month == 'Mar' :
value = '03'
elif month == 'Apr' :
value = '04'
elif month == 'May' :
value = '05'
elif month == 'Jun' :
value = '06'
elif month == 'Jul' :
value = '07'
elif month == 'Aug' :
value = '08'
elif month == 'Sep' :
value = '09'
elif month == 'Oct' :
value = '10'
elif month == 'Nov' :
value = '11'
elif month == 'Dec' :
value = '12'
return value
getNumericMonthFromString = staticmethod( getNumericMonthFromString )
def getIsoFromEpoch( seconds ):
"""
Take a number of seconds built with getSecondsSinceEpoch
and returns a date in the format of '2005-08-30 20:06:59'
Thu May 18 13:00:00 2006
"""
timeString = time.ctime( seconds )
timeString = timeString.replace( " ", " " )#in speicla case there may be two spaces
splitTimeString = timeString.split( " " )
if int(splitTimeString[2]) < 10 :
splitTimeString[2] = "0" + splitTimeString[2]
originalDate = splitTimeString[4] + '-' + StatsDateLib.getNumericMonthFromString ( splitTimeString[1] ) + '-' + splitTimeString[2] + ' ' + splitTimeString[3]
return originalDate
getIsoFromEpoch = staticmethod ( getIsoFromEpoch )
def getOriginalHour( seconds ):
"""
Take a number of seconds built with getSecondsSinceEpoch
and returns a date in the format of '2005-08-30 20:06:59'
Thu May 18 13:00:00 2006
"""
timeString = time.ctime( seconds )
splitTimeString = timeString.split( " " )
originalHour = splitTimeString[3]
originalHour = originalHour.split( ":" )
originalHour = originalHour[0]
return originalHour
getOriginalHour = staticmethod ( getOriginalHour )
def getSeparators( width=DAY, interval = 20*MINUTE ):
separators = []
for value in range( interval, width+interval, interval ):
separators.append( value )
return separators
getSeparators = staticmethod( getSeparators )
def getSeparatorsWithStartTime( startTime = "2006-06-06 00:00:00", width=DAY, interval=60*MINUTE ):
"""
This method works exactly like getSeparators but it uses a start time to set
the separators
"""
separators = []
startTime = StatsDateLib.getSecondsSinceEpoch(startTime)
if interval <= width :
for value in range( int(interval+startTime), int( width+interval+startTime ), int( interval ) ):
separators.append( StatsDateLib.getIsoFromEpoch(value) )
if separators[ len(separators)-1 ] > width+startTime :
separators[ len(separators)-1 ] = StatsDateLib.getIsoFromEpoch(width+startTime)
return separators
getSeparatorsWithStartTime = staticmethod( getSeparatorsWithStartTime )
def getStartEndInIsoFormat( timeOfTheCall, span, spanType = "", fixedCurrent = False, fixedPrevious = False ):
"""
@summary : Calculates the start and end of a timespan based on specified parameters.
@param timeOfTheCall: Time at which these graphics were requested. In format.
@param spanOfTheGraphics: Span in hours of the graphics.
@param graphicType : daily | weekly | monthly | yearly
@param fixedCurrent: Whether to use the fixedCurrent day, week month or year.
@param fixedPrevious: Whether to use the fixedPrevious day week month or year.
"""
global _
#TODO :fixStartEnd method???
if fixedPrevious :
if spanType == _("daily") :
start, end = StatsDateLib.getStartEndFromPreviousDay( timeOfTheCall )
elif spanType == _("weekly"):
start, end = StatsDateLib.getStartEndFromPreviousWeek( timeOfTheCall )
elif spanType == _("monthly"):
start, end = StatsDateLib.getStartEndFromPreviousMonth( timeOfTheCall )
elif spanType == _("yearly") :
start, end = StatsDateLib.getStartEndFromPreviousYear( timeOfTheCall )
elif fixedCurrent:
if spanType == _("daily") :
start, end = StatsDateLib.getStartEndFromCurrentDay( timeOfTheCall )
elif spanType ==_("weekly"):
start, end = StatsDateLib.getStartEndFromCurrentWeek( timeOfTheCall )
elif spanType == _("monthly"):
start, end = StatsDateLib.getStartEndFromCurrentMonth( timeOfTheCall )
elif spanType == _("yearly"):
start, end = StatsDateLib.getStartEndFromCurrentYear( timeOfTheCall )
else:
if spanType == _("daily") :
start = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch( timeOfTheCall ) - StatsDateLib.DAY )
elif spanType == _("weekly"):
start = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch( timeOfTheCall ) - ( 7 * StatsDateLib.DAY ) )
elif spanType == _("monthly"):
start = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch( timeOfTheCall ) - ( 30 * StatsDateLib.DAY ) )
elif spanType == _("yearly") :
start = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch( timeOfTheCall ) - ( 365 * StatsDateLib.DAY ) )
else:
start = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch( timeOfTheCall ) - span*60*60 )
end = timeOfTheCall
return start, end
getStartEndInIsoFormat = staticmethod( getStartEndInIsoFormat )
if __name__ == "__main__":
print ""
print ""
print "getIsoFromEpoch test #1 : "
print ""
print "StatsDateLib.getIsoFromEpoch(0) : "
print "Expected result : %s " %("1970-01-01 00:00:00")
print "Obtained result : %s " %StatsDateLib.getIsoFromEpoch(0)
if not StatsDateLib.getIsoFromEpoch(0) == "1970-01-01 00:00:00" : raise AssertionError("getIsoFromEpoch test #1 is broken.")
print ""
print ""
print "getNumberOfDaysBetween test #1 : "
print ""
print "StatsDateLib.getNumberOfDaysBetween( '2005-08-31 00:00:01','2005-08-30 23:59:59' ) : "
print "Expected result : %s " %("1")
print "Obtained result : %s " %StatsDateLib.getNumberOfDaysBetween( '2005-08-31 00:00:01','2005-08-30 23:59:59' )
if not StatsDateLib.getNumberOfDaysBetween( '2005-08-31 00:00:01','2005-08-30 23:59:59' ) == 1 : raise AssertionError("getNumberOfDaysBetween test #1 is broken.")
print ""
print ""
print "addMonthsToIsoDate test #1(basic test) : "
print ""
print """StatsDateLib.addMonthsToIsoDate( "2007-10-15 12:00:00", 1) : """
print "Expected result : %s " %("2007-11-15 12:00:00")
print "Obtained result : %s " %StatsDateLib.addMonthsToIsoDate( "2007-10-15 12:00:00", 1)
if not StatsDateLib.addMonthsToIsoDate( "2007-10-15 12:00:00", 1) == "2007-11-15 12:00:00" : raise AssertionError("addMonthsToIsoDate test #1 is broken.")
print ""
print ""
print "addMonthsToIsoDate test #2(test year increment): "
print ""
print """StatsDateLib.addMonthsToIsoDate( "2007-10-15 12:00:00", 15) : """
print "Expected result : %s " %("2009-01-15 12:00:00")
print "Obtained result : %s " %StatsDateLib.addMonthsToIsoDate( "2007-10-15 12:00:00", 15)
if not StatsDateLib.addMonthsToIsoDate( "2007-10-15 12:00:00", 15) == "2009-01-15 12:00:00" : raise AssertionError("addMonthsToIsoDate test #2 is broken.")
print ""
print ""
print "addMonthsToIsoDate test #3 (test day number too high in bissextile year): "
print ""
print """StatsDateLib.addMonthsToIsoDate( "2008-01-31 12:00:00", 1) : """
print "Expected result : %s " %("2008-02-29 12:00:00")
print "Obtained result : %s " %StatsDateLib.addMonthsToIsoDate( "2008-01-31 12:00:00", 1)
if not StatsDateLib.addMonthsToIsoDate( "2008-01-31 12:00:00", 1) == "2008-02-29 12:00:00" : raise AssertionError("addMonthsToIsoDate test #3 is broken.")<|fim▁end|> | DAY = 24 * HOUR
MINUTES_PER_DAY = 24*60
|
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>import makeParser from "./parser";
<|fim▁hole|><|fim▁end|> | export { makeParser }; |
<|file_name|>publish.js<|end_file_name|><|fim▁begin|>'use strict';
<|fim▁hole|>function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
var _Subject = require('../Subject');
var _Subject2 = _interopRequireDefault(_Subject);
var _multicast = require('./multicast');
var _multicast2 = _interopRequireDefault(_multicast);
function subjectFactory() {
return new _Subject2['default']();
}
function publish() {
return _multicast2['default'].call(this, subjectFactory);
}
//# sourceMappingURL=publish.js.map
module.exports = exports['default'];
//# sourceMappingURL=publish.js.map<|fim▁end|> | exports.__esModule = true;
exports['default'] = publish;
|
<|file_name|>tvec.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(non_camel_case_types)]
use back::abi;
use llvm;
use llvm::ValueRef;
use trans::base::*;
use trans::base;
use trans::build::*;
use trans::cleanup;
use trans::cleanup::CleanupMethods;
use trans::common::*;
use trans::consts;
use trans::datum::*;
use trans::debuginfo::DebugLoc;
use trans::expr::{Dest, Ignore, SaveIn};
use trans::expr;
use trans::machine::llsize_of_alloc;
use trans::type_::Type;
use trans::type_of;
use middle::ty::{self, Ty};
use util::ppaux::ty_to_string;
use syntax::ast;
use syntax::parse::token::InternedString;
#[derive(Copy, Clone)]
struct VecTypes<'tcx> {
unit_ty: Ty<'tcx>,
llunit_ty: Type
}
impl<'tcx> VecTypes<'tcx> {
pub fn to_string<'a>(&self, ccx: &CrateContext<'a, 'tcx>) -> String {
format!("VecTypes {{unit_ty={}, llunit_ty={}}}",
ty_to_string(ccx.tcx(), self.unit_ty),
ccx.tn().type_to_string(self.llunit_ty))
}
}
pub fn trans_fixed_vstore<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
expr: &ast::Expr,
dest: expr::Dest)
-> Block<'blk, 'tcx> {
//!
//
// [...] allocates a fixed-size array and moves it around "by value".
// In this case, it means that the caller has already given us a location
// to store the array of the suitable size, so all we have to do is
// generate the content.
debug!("trans_fixed_vstore(expr={}, dest={})",
bcx.expr_to_string(expr), dest.to_string(bcx.ccx()));
let vt = vec_types_from_expr(bcx, expr);
return match dest {
Ignore => write_content(bcx, &vt, expr, expr, dest),
SaveIn(lldest) => {
// lldest will have type *[T x N], but we want the type *T,
// so use GEP to convert:
let lldest = GEPi(bcx, lldest, &[0, 0]);
write_content(bcx, &vt, expr, expr, SaveIn(lldest))
}
};
}
/// &[...] allocates memory on the stack and writes the values into it, returning the vector (the
/// caller must make the reference). "..." is similar except that the memory can be statically
/// allocated and we return a reference (strings are always by-ref).
pub fn trans_slice_vec<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
slice_expr: &ast::Expr,
content_expr: &ast::Expr)
-> DatumBlock<'blk, 'tcx, Expr> {
let fcx = bcx.fcx;
let ccx = fcx.ccx;
let mut bcx = bcx;
debug!("trans_slice_vec(slice_expr={})",
bcx.expr_to_string(slice_expr));
let vec_ty = node_id_type(bcx, slice_expr.id);
// Handle the "..." case (returns a slice since strings are always unsized):
if let ast::ExprLit(ref lit) = content_expr.node {
if let ast::LitStr(ref s, _) = lit.node {
let scratch = rvalue_scratch_datum(bcx, vec_ty, "");
bcx = trans_lit_str(bcx,
content_expr,
s.clone(),
SaveIn(scratch.val));
return DatumBlock::new(bcx, scratch.to_expr_datum());
}
}
// Handle the &[...] case:
let vt = vec_types_from_expr(bcx, content_expr);
let count = elements_required(bcx, content_expr);
debug!(" vt={}, count={}", vt.to_string(ccx), count);
let fixed_ty = ty::mk_vec(bcx.tcx(),
vt.unit_ty,
Some(count));
let llfixed_ty = type_of::type_of(bcx.ccx(), fixed_ty);
// Always create an alloca even if zero-sized, to preserve
// the non-null invariant of the inner slice ptr
let llfixed = base::alloca(bcx, llfixed_ty, "");
if count > 0 {
// Arrange for the backing array to be cleaned up.
let cleanup_scope = cleanup::temporary_scope(bcx.tcx(), content_expr.id);
fcx.schedule_lifetime_end(cleanup_scope, llfixed);
fcx.schedule_drop_mem(cleanup_scope, llfixed, fixed_ty);
// Generate the content into the backing array.
// llfixed has type *[T x N], but we want the type *T,
// so use GEP to convert
bcx = write_content(bcx, &vt, slice_expr, content_expr,
SaveIn(GEPi(bcx, llfixed, &[0, 0])));
};
immediate_rvalue_bcx(bcx, llfixed, vec_ty).to_expr_datumblock()
}
/// Literal strings translate to slices into static memory. This is different from
/// trans_slice_vstore() above because it doesn't need to copy the content anywhere.
pub fn trans_lit_str<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
lit_expr: &ast::Expr,
str_lit: InternedString,
dest: Dest)
-> Block<'blk, 'tcx> {
debug!("trans_lit_str(lit_expr={}, dest={})",
bcx.expr_to_string(lit_expr),
dest.to_string(bcx.ccx()));
match dest {
Ignore => bcx,
SaveIn(lldest) => {
let bytes = str_lit.len();
let llbytes = C_uint(bcx.ccx(), bytes);
let llcstr = C_cstr(bcx.ccx(), str_lit, false);
let llcstr = consts::ptrcast(llcstr, Type::i8p(bcx.ccx()));
Store(bcx, llcstr, GEPi(bcx, lldest, &[0, abi::FAT_PTR_ADDR]));
Store(bcx, llbytes, GEPi(bcx, lldest, &[0, abi::FAT_PTR_EXTRA]));
bcx
}
}
}
fn write_content<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
vt: &VecTypes<'tcx>,
vstore_expr: &ast::Expr,
content_expr: &ast::Expr,
dest: Dest)
-> Block<'blk, 'tcx> {
let _icx = push_ctxt("tvec::write_content");
let fcx = bcx.fcx;
let mut bcx = bcx;
debug!("write_content(vt={}, dest={}, vstore_expr={})",
vt.to_string(bcx.ccx()),
dest.to_string(bcx.ccx()),
bcx.expr_to_string(vstore_expr));
match content_expr.node {
ast::ExprLit(ref lit) => {
match lit.node {
ast::LitStr(ref s, _) => {
match dest {
Ignore => return bcx,
SaveIn(lldest) => {
let bytes = s.len();
let llbytes = C_uint(bcx.ccx(), bytes);
let llcstr = C_cstr(bcx.ccx(), (*s).clone(), false);
base::call_memcpy(bcx,
lldest,
llcstr,
llbytes,
1);
return bcx;
}
}
}
_ => {
bcx.tcx().sess.span_bug(content_expr.span,
"unexpected evec content");
}
}
}
ast::ExprVec(ref elements) => {
match dest {
Ignore => {
for element in elements {
bcx = expr::trans_into(bcx, &**element, Ignore);
}
}
SaveIn(lldest) => {
let temp_scope = fcx.push_custom_cleanup_scope();
for (i, element) in elements.iter().enumerate() {
let lleltptr = GEPi(bcx, lldest, &[i]);
debug!("writing index {} with lleltptr={}",
i, bcx.val_to_string(lleltptr));
bcx = expr::trans_into(bcx, &**element,
SaveIn(lleltptr));
let scope = cleanup::CustomScope(temp_scope);<|fim▁hole|> }
}
return bcx;
}
ast::ExprRepeat(ref element, ref count_expr) => {
match dest {
Ignore => {
return expr::trans_into(bcx, &**element, Ignore);
}
SaveIn(lldest) => {
match ty::eval_repeat_count(bcx.tcx(), &**count_expr) {
0 => expr::trans_into(bcx, &**element, Ignore),
1 => expr::trans_into(bcx, &**element, SaveIn(lldest)),
count => {
let elem = unpack_datum!(bcx, expr::trans(bcx, &**element));
let bcx = iter_vec_loop(bcx, lldest, vt,
C_uint(bcx.ccx(), count),
|set_bcx, lleltptr, _| {
elem.shallow_copy(set_bcx, lleltptr)
});
bcx
}
}
}
}
}
_ => {
bcx.tcx().sess.span_bug(content_expr.span,
"unexpected vec content");
}
}
}
fn vec_types_from_expr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, vec_expr: &ast::Expr)
-> VecTypes<'tcx> {
let vec_ty = node_id_type(bcx, vec_expr.id);
vec_types(bcx, ty::sequence_element_type(bcx.tcx(), vec_ty))
}
fn vec_types<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, unit_ty: Ty<'tcx>)
-> VecTypes<'tcx> {
VecTypes {
unit_ty: unit_ty,
llunit_ty: type_of::type_of(bcx.ccx(), unit_ty)
}
}
fn elements_required(bcx: Block, content_expr: &ast::Expr) -> usize {
//! Figure out the number of elements we need to store this content
match content_expr.node {
ast::ExprLit(ref lit) => {
match lit.node {
ast::LitStr(ref s, _) => s.len(),
_ => {
bcx.tcx().sess.span_bug(content_expr.span,
"unexpected evec content")
}
}
},
ast::ExprVec(ref es) => es.len(),
ast::ExprRepeat(_, ref count_expr) => {
ty::eval_repeat_count(bcx.tcx(), &**count_expr)
}
_ => bcx.tcx().sess.span_bug(content_expr.span,
"unexpected vec content")
}
}
/// Converts a fixed-length vector into the slice pair. The vector should be stored in `llval`
/// which should be by ref.
pub fn get_fixed_base_and_len(bcx: Block,
llval: ValueRef,
vec_length: usize)
-> (ValueRef, ValueRef) {
let ccx = bcx.ccx();
let base = expr::get_dataptr(bcx, llval);
let len = C_uint(ccx, vec_length);
(base, len)
}
/// Converts a vector into the slice pair. The vector should be stored in `llval` which should be
/// by-reference. If you have a datum, you would probably prefer to call
/// `Datum::get_base_and_len()` which will handle any conversions for you.
pub fn get_base_and_len<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
llval: ValueRef,
vec_ty: Ty<'tcx>)
-> (ValueRef, ValueRef) {
let ccx = bcx.ccx();
match vec_ty.sty {
ty::ty_vec(_, Some(n)) => get_fixed_base_and_len(bcx, llval, n),
ty::ty_vec(_, None) | ty::ty_str => {
let base = Load(bcx, expr::get_dataptr(bcx, llval));
let len = Load(bcx, expr::get_len(bcx, llval));
(base, len)
}
// Only used for pattern matching.
ty::ty_uniq(ty) | ty::ty_rptr(_, ty::mt{ty, ..}) => {
let inner = if type_is_sized(bcx.tcx(), ty) {
Load(bcx, llval)
} else {
llval
};
get_base_and_len(bcx, inner, ty)
},
_ => ccx.sess().bug("unexpected type in get_base_and_len"),
}
}
fn iter_vec_loop<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
data_ptr: ValueRef,
vt: &VecTypes<'tcx>,
count: ValueRef,
f: F)
-> Block<'blk, 'tcx> where
F: FnOnce(Block<'blk, 'tcx>, ValueRef, Ty<'tcx>) -> Block<'blk, 'tcx>,
{
let _icx = push_ctxt("tvec::iter_vec_loop");
if bcx.unreachable.get() {
return bcx;
}
let fcx = bcx.fcx;
let loop_bcx = fcx.new_temp_block("expr_repeat");
let next_bcx = fcx.new_temp_block("expr_repeat: next");
Br(bcx, loop_bcx.llbb, DebugLoc::None);
let loop_counter = Phi(loop_bcx, bcx.ccx().int_type(),
&[C_uint(bcx.ccx(), 0 as usize)], &[bcx.llbb]);
let bcx = loop_bcx;
let lleltptr = if llsize_of_alloc(bcx.ccx(), vt.llunit_ty) == 0 {
data_ptr
} else {
InBoundsGEP(bcx, data_ptr, &[loop_counter])
};
let bcx = f(bcx, lleltptr, vt.unit_ty);
let plusone = Add(bcx, loop_counter, C_uint(bcx.ccx(), 1us), DebugLoc::None);
AddIncomingToPhi(loop_counter, plusone, bcx.llbb);
let cond_val = ICmp(bcx, llvm::IntULT, plusone, count, DebugLoc::None);
CondBr(bcx, cond_val, loop_bcx.llbb, next_bcx.llbb, DebugLoc::None);
next_bcx
}
pub fn iter_vec_raw<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
data_ptr: ValueRef,
unit_ty: Ty<'tcx>,
len: ValueRef,
f: F)
-> Block<'blk, 'tcx> where
F: FnOnce(Block<'blk, 'tcx>, ValueRef, Ty<'tcx>) -> Block<'blk, 'tcx>,
{
let _icx = push_ctxt("tvec::iter_vec_raw");
let fcx = bcx.fcx;
let vt = vec_types(bcx, unit_ty);
if llsize_of_alloc(bcx.ccx(), vt.llunit_ty) == 0 {
// Special-case vectors with elements of size 0 so they don't go out of bounds (#9890)
iter_vec_loop(bcx, data_ptr, &vt, len, f)
} else {
// Calculate the last pointer address we want to handle.
let data_end_ptr = InBoundsGEP(bcx, data_ptr, &[len]);
// Now perform the iteration.
let header_bcx = fcx.new_temp_block("iter_vec_loop_header");
Br(bcx, header_bcx.llbb, DebugLoc::None);
let data_ptr =
Phi(header_bcx, val_ty(data_ptr), &[data_ptr], &[bcx.llbb]);
let not_yet_at_end =
ICmp(header_bcx, llvm::IntULT, data_ptr, data_end_ptr, DebugLoc::None);
let body_bcx = fcx.new_temp_block("iter_vec_loop_body");
let next_bcx = fcx.new_temp_block("iter_vec_next");
CondBr(header_bcx, not_yet_at_end, body_bcx.llbb, next_bcx.llbb, DebugLoc::None);
let body_bcx = f(body_bcx, data_ptr, unit_ty);
AddIncomingToPhi(data_ptr, InBoundsGEP(body_bcx, data_ptr,
&[C_int(bcx.ccx(), 1)]),
body_bcx.llbb);
Br(body_bcx, header_bcx.llbb, DebugLoc::None);
next_bcx
}
}<|fim▁end|> | fcx.schedule_lifetime_end(scope, lleltptr);
fcx.schedule_drop_mem(scope, lleltptr, vt.unit_ty);
}
fcx.pop_custom_cleanup_scope(temp_scope); |
<|file_name|>imperative_grad.py<|end_file_name|><|fim▁begin|># Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#<|fim▁hole|># You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Code for backpropagation using the tape utilities."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.ops.unconnected_gradients import UnconnectedGradients
from tensorflow.python.util import compat
VSpace = collections.namedtuple("VSpace", [
"aggregate_fn", "num_elements_fn", "zeros_fn", "ones_fn",
"zeros_like_fn", "ones_like_fn", "graph_shape_fn"
])
def imperative_grad(tape,
target,
sources,
output_gradients=None,
sources_raw=None,
unconnected_gradients=UnconnectedGradients.NONE):
"""Computes gradients from the imperatively defined tape on top of the stack.
Works by filtering the tape, computing how many downstream usages are of each
tensor and entry, and repeatedly applying backward functions until we have
gradients for all sources.
Args:
tape: the gradient tape which stores the trace.
target: either a Tensor or list of Tensors to be differentiated.
sources: list of Tensors for which we want gradients
output_gradients: if not None, a list of gradient provided for each Target,
or None if we are to use the target's computed downstream gradient.
sources_raw: if not None, a list of the source python objects from which the
sources were generated. Should have the same length as sources. Only needs
to be populated if unconnected_gradients is 'zero'.
unconnected_gradients: determines the value returned if the target and
sources are unconnected. When 'none' the value returned is None wheras when
'zero' a zero tensor in the same shape as the sources is returned.
Returns:
the gradient wrt each of the sources.
Raises:
ValueError: if the arguments are invalid.
RuntimeError: if something goes wrong.
"""
try:
unconnected_gradients = UnconnectedGradients(unconnected_gradients)
except ValueError:
raise ValueError(
"Unknown value for unconnected_gradients: %r" % unconnected_gradients)
return pywrap_tensorflow.TFE_Py_TapeGradient(
tape._tape, # pylint: disable=protected-access
target,
sources,
output_gradients,
sources_raw,
compat.as_str(unconnected_gradients.value))<|fim▁end|> | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. |
<|file_name|>tvec.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(non_camel_case_types)]
use back::abi;
use llvm;
use llvm::{ValueRef};
use trans::base::*;
use trans::base;
use trans::build::*;
use trans::cleanup;
use trans::cleanup::CleanupMethods;
use trans::common::*;
use trans::consts;
use trans::datum::*;
use trans::expr::{Dest, Ignore, SaveIn};
use trans::expr;
use trans::glue;
use trans::machine;
use trans::machine::llsize_of_alloc;
use trans::type_::Type;
use trans::type_of;
use middle::ty::{self, Ty};
use util::ppaux::ty_to_string;
use syntax::ast;
use syntax::parse::token::InternedString;
fn get_len(bcx: Block, vptr: ValueRef) -> ValueRef {
let _icx = push_ctxt("tvec::get_lenl");
Load(bcx, expr::get_len(bcx, vptr))
}
fn get_dataptr(bcx: Block, vptr: ValueRef) -> ValueRef {
let _icx = push_ctxt("tvec::get_dataptr");
Load(bcx, expr::get_dataptr(bcx, vptr))
}
pub fn make_drop_glue_unboxed<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
vptr: ValueRef,
unit_ty: Ty<'tcx>,
should_deallocate: bool)
-> Block<'blk, 'tcx> {
let not_null = IsNotNull(bcx, vptr);
with_cond(bcx, not_null, |bcx| {
let ccx = bcx.ccx();
let tcx = bcx.tcx();
let _icx = push_ctxt("tvec::make_drop_glue_unboxed");
let dataptr = get_dataptr(bcx, vptr);
let bcx = if type_needs_drop(tcx, unit_ty) {
let len = get_len(bcx, vptr);
iter_vec_raw(bcx, dataptr, unit_ty, len, |bb, vv, tt| glue::drop_ty(bb, vv, tt, None))
} else {
bcx
};
if should_deallocate {
let llty = type_of::type_of(ccx, unit_ty);
let unit_size = llsize_of_alloc(ccx, llty);
if unit_size != 0 {
let len = get_len(bcx, vptr);
let not_empty = ICmp(bcx, llvm::IntNE, len, C_uint(ccx, 0u));
with_cond(bcx, not_empty, |bcx| {
let llalign = C_uint(ccx, machine::llalign_of_min(ccx, llty));
let size = Mul(bcx, C_uint(ccx, unit_size), len);
glue::trans_exchange_free_dyn(bcx, dataptr, size, llalign)
})
} else {
bcx
}
} else {
bcx
}
})
}
#[derive(Copy)]
pub struct VecTypes<'tcx> {
pub unit_ty: Ty<'tcx>,
pub llunit_ty: Type,
pub llunit_alloc_size: u64
}
impl<'tcx> VecTypes<'tcx> {
pub fn to_string<'a>(&self, ccx: &CrateContext<'a, 'tcx>) -> String {
format!("VecTypes {{unit_ty={}, llunit_ty={}, llunit_alloc_size={}}}",
ty_to_string(ccx.tcx(), self.unit_ty),
ccx.tn().type_to_string(self.llunit_ty),
self.llunit_alloc_size)
}
}
pub fn trans_fixed_vstore<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
expr: &ast::Expr,
dest: expr::Dest)
-> Block<'blk, 'tcx> {
//!
//
// [...] allocates a fixed-size array and moves it around "by value".
// In this case, it means that the caller has already given us a location
// to store the array of the suitable size, so all we have to do is
// generate the content.
debug!("trans_fixed_vstore(expr={}, dest={})",
bcx.expr_to_string(expr), dest.to_string(bcx.ccx()));
let vt = vec_types_from_expr(bcx, expr);
return match dest {
Ignore => write_content(bcx, &vt, expr, expr, dest),
SaveIn(lldest) => {
// lldest will have type *[T x N], but we want the type *T,
// so use GEP to convert:
let lldest = GEPi(bcx, lldest, &[0, 0]);
write_content(bcx, &vt, expr, expr, SaveIn(lldest))
}
};
}
/// &[...] allocates memory on the stack and writes the values into it, returning the vector (the
/// caller must make the reference). "..." is similar except that the memory can be statically
/// allocated and we return a reference (strings are always by-ref).
pub fn trans_slice_vec<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
slice_expr: &ast::Expr,
content_expr: &ast::Expr)
-> DatumBlock<'blk, 'tcx, Expr> {
let fcx = bcx.fcx;
let ccx = fcx.ccx;
let mut bcx = bcx;
debug!("trans_slice_vec(slice_expr={})",
bcx.expr_to_string(slice_expr));
let vec_ty = node_id_type(bcx, slice_expr.id);
// Handle the "..." case (returns a slice since strings are always unsized):
if let ast::ExprLit(ref lit) = content_expr.node {
if let ast::LitStr(ref s, _) = lit.node {
let scratch = rvalue_scratch_datum(bcx, vec_ty, "");
bcx = trans_lit_str(bcx,
content_expr,
s.clone(),
SaveIn(scratch.val));
return DatumBlock::new(bcx, scratch.to_expr_datum());
}
}
// Handle the &[...] case:
let vt = vec_types_from_expr(bcx, content_expr);
let count = elements_required(bcx, content_expr);
debug!(" vt={}, count={}", vt.to_string(ccx), count);
let llcount = C_uint(ccx, count);
let fixed_ty = ty::mk_vec(bcx.tcx(),
vt.unit_ty,
Some(count));
let llfixed_ty = type_of::type_of(bcx.ccx(), fixed_ty).ptr_to();
let llfixed = if count == 0 {
// Just create a zero-sized alloca to preserve
// the non-null invariant of the inner slice ptr
let llfixed = base::arrayalloca(bcx, vt.llunit_ty, llcount);
BitCast(bcx, llfixed, llfixed_ty)
} else {
// Make a fixed-length backing array and allocate it on the stack.
let llfixed = base::arrayalloca(bcx, vt.llunit_ty, llcount);
// Arrange for the backing array to be cleaned up.
let llfixed_casted = BitCast(bcx, llfixed, llfixed_ty);
let cleanup_scope = cleanup::temporary_scope(bcx.tcx(), content_expr.id);
fcx.schedule_lifetime_end(cleanup_scope, llfixed_casted);
fcx.schedule_drop_mem(cleanup_scope, llfixed_casted, fixed_ty);
// Generate the content into the backing array.
bcx = write_content(bcx, &vt, slice_expr,
content_expr, SaveIn(llfixed));
llfixed_casted
};
immediate_rvalue_bcx(bcx, llfixed, vec_ty).to_expr_datumblock()
}
/// Literal strings translate to slices into static memory. This is different from
/// trans_slice_vstore() above because it doesn't need to copy the content anywhere.
pub fn trans_lit_str<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
lit_expr: &ast::Expr,
str_lit: InternedString,
dest: Dest)
-> Block<'blk, 'tcx> {
debug!("trans_lit_str(lit_expr={}, dest={})",
bcx.expr_to_string(lit_expr),
dest.to_string(bcx.ccx()));
match dest {
Ignore => bcx,
SaveIn(lldest) => {
let bytes = str_lit.get().len();
let llbytes = C_uint(bcx.ccx(), bytes);
let llcstr = C_cstr(bcx.ccx(), str_lit, false);
let llcstr = consts::ptrcast(llcstr, Type::i8p(bcx.ccx()));
Store(bcx, llcstr, GEPi(bcx, lldest, &[0u, abi::FAT_PTR_ADDR]));
Store(bcx, llbytes, GEPi(bcx, lldest, &[0u, abi::FAT_PTR_EXTRA]));
bcx
}
}
}
pub fn write_content<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
vt: &VecTypes<'tcx>,
vstore_expr: &ast::Expr,
content_expr: &ast::Expr,
dest: Dest)
-> Block<'blk, 'tcx> {
let _icx = push_ctxt("tvec::write_content");
let fcx = bcx.fcx;
let mut bcx = bcx;
debug!("write_content(vt={}, dest={}, vstore_expr={})",
vt.to_string(bcx.ccx()),
dest.to_string(bcx.ccx()),
bcx.expr_to_string(vstore_expr));
match content_expr.node {
ast::ExprLit(ref lit) => {
match lit.node {
ast::LitStr(ref s, _) => {
match dest {
Ignore => return bcx,
SaveIn(lldest) => {
let bytes = s.get().len();
let llbytes = C_uint(bcx.ccx(), bytes);
let llcstr = C_cstr(bcx.ccx(), (*s).clone(), false);
base::call_memcpy(bcx,
lldest,
llcstr,
llbytes,
1);
return bcx;
}
}
}
_ => {
bcx.tcx().sess.span_bug(content_expr.span,
"unexpected evec content");
}
}
}
ast::ExprVec(ref elements) => {
match dest {
Ignore => {
for element in elements.iter() {
bcx = expr::trans_into(bcx, &**element, Ignore);
}
}
SaveIn(lldest) => {
let temp_scope = fcx.push_custom_cleanup_scope();
for (i, element) in elements.iter().enumerate() {
let lleltptr = GEPi(bcx, lldest, &[i]);<|fim▁hole|> SaveIn(lleltptr));
let scope = cleanup::CustomScope(temp_scope);
fcx.schedule_lifetime_end(scope, lleltptr);
fcx.schedule_drop_mem(scope, lleltptr, vt.unit_ty);
}
fcx.pop_custom_cleanup_scope(temp_scope);
}
}
return bcx;
}
ast::ExprRepeat(ref element, ref count_expr) => {
match dest {
Ignore => {
return expr::trans_into(bcx, &**element, Ignore);
}
SaveIn(lldest) => {
match ty::eval_repeat_count(bcx.tcx(), &**count_expr) {
0 => bcx,
1 => expr::trans_into(bcx, &**element, SaveIn(lldest)),
count => {
let elem = unpack_datum!(bcx, expr::trans(bcx, &**element));
let bcx = iter_vec_loop(bcx, lldest, vt,
C_uint(bcx.ccx(), count),
|set_bcx, lleltptr, _| {
elem.shallow_copy(set_bcx, lleltptr)
});
elem.add_clean_if_rvalue(bcx, element.id);
bcx
}
}
}
}
}
_ => {
bcx.tcx().sess.span_bug(content_expr.span,
"unexpected vec content");
}
}
}
pub fn vec_types_from_expr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
vec_expr: &ast::Expr)
-> VecTypes<'tcx> {
let vec_ty = node_id_type(bcx, vec_expr.id);
vec_types(bcx, ty::sequence_element_type(bcx.tcx(), vec_ty))
}
pub fn vec_types<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
unit_ty: Ty<'tcx>)
-> VecTypes<'tcx> {
let ccx = bcx.ccx();
let llunit_ty = type_of::type_of(ccx, unit_ty);
let llunit_alloc_size = llsize_of_alloc(ccx, llunit_ty);
VecTypes {
unit_ty: unit_ty,
llunit_ty: llunit_ty,
llunit_alloc_size: llunit_alloc_size
}
}
pub fn elements_required(bcx: Block, content_expr: &ast::Expr) -> uint {
//! Figure out the number of elements we need to store this content
match content_expr.node {
ast::ExprLit(ref lit) => {
match lit.node {
ast::LitStr(ref s, _) => s.get().len(),
_ => {
bcx.tcx().sess.span_bug(content_expr.span,
"unexpected evec content")
}
}
},
ast::ExprVec(ref es) => es.len(),
ast::ExprRepeat(_, ref count_expr) => {
ty::eval_repeat_count(bcx.tcx(), &**count_expr)
}
_ => bcx.tcx().sess.span_bug(content_expr.span,
"unexpected vec content")
}
}
/// Converts a fixed-length vector into the slice pair. The vector should be stored in `llval`
/// which should be by ref.
pub fn get_fixed_base_and_len(bcx: Block,
llval: ValueRef,
vec_length: uint)
-> (ValueRef, ValueRef) {
let ccx = bcx.ccx();
let base = expr::get_dataptr(bcx, llval);
let len = C_uint(ccx, vec_length);
(base, len)
}
fn get_slice_base_and_len(bcx: Block,
llval: ValueRef)
-> (ValueRef, ValueRef) {
let base = Load(bcx, GEPi(bcx, llval, &[0u, abi::FAT_PTR_ADDR]));
let len = Load(bcx, GEPi(bcx, llval, &[0u, abi::FAT_PTR_EXTRA]));
(base, len)
}
/// Converts a vector into the slice pair. The vector should be stored in `llval` which should be
/// by-reference. If you have a datum, you would probably prefer to call
/// `Datum::get_base_and_len()` which will handle any conversions for you.
pub fn get_base_and_len(bcx: Block,
llval: ValueRef,
vec_ty: Ty)
-> (ValueRef, ValueRef) {
let ccx = bcx.ccx();
match vec_ty.sty {
ty::ty_vec(_, Some(n)) => get_fixed_base_and_len(bcx, llval, n),
ty::ty_open(ty) => match ty.sty {
ty::ty_vec(_, None) | ty::ty_str => get_slice_base_and_len(bcx, llval),
_ => ccx.sess().bug("unexpected type in get_base_and_len")
},
// Only used for pattern matching.
ty::ty_uniq(ty) | ty::ty_rptr(_, ty::mt{ty, ..}) => match ty.sty {
ty::ty_vec(_, None) | ty::ty_str => get_slice_base_and_len(bcx, llval),
ty::ty_vec(_, Some(n)) => {
let base = GEPi(bcx, Load(bcx, llval), &[0u, 0u]);
(base, C_uint(ccx, n))
}
_ => ccx.sess().bug("unexpected type in get_base_and_len"),
},
_ => ccx.sess().bug("unexpected type in get_base_and_len"),
}
}
pub fn iter_vec_loop<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
data_ptr: ValueRef,
vt: &VecTypes<'tcx>,
count: ValueRef,
f: F)
-> Block<'blk, 'tcx> where
F: FnOnce(Block<'blk, 'tcx>, ValueRef, Ty<'tcx>) -> Block<'blk, 'tcx>,
{
let _icx = push_ctxt("tvec::iter_vec_loop");
let fcx = bcx.fcx;
let next_bcx = fcx.new_temp_block("expr_repeat: while next");
let loop_bcx = fcx.new_temp_block("expr_repeat");
let cond_bcx = fcx.new_temp_block("expr_repeat: loop cond");
let body_bcx = fcx.new_temp_block("expr_repeat: body: set");
let inc_bcx = fcx.new_temp_block("expr_repeat: body: inc");
Br(bcx, loop_bcx.llbb);
let loop_counter = {
// i = 0
let i = alloca(loop_bcx, bcx.ccx().int_type(), "__i");
Store(loop_bcx, C_uint(bcx.ccx(), 0u), i);
Br(loop_bcx, cond_bcx.llbb);
i
};
{ // i < count
let lhs = Load(cond_bcx, loop_counter);
let rhs = count;
let cond_val = ICmp(cond_bcx, llvm::IntULT, lhs, rhs);
CondBr(cond_bcx, cond_val, body_bcx.llbb, next_bcx.llbb);
}
{ // loop body
let i = Load(body_bcx, loop_counter);
let lleltptr = if vt.llunit_alloc_size == 0 {
data_ptr
} else {
InBoundsGEP(body_bcx, data_ptr, &[i])
};
let body_bcx = f(body_bcx, lleltptr, vt.unit_ty);
Br(body_bcx, inc_bcx.llbb);
}
{ // i += 1
let i = Load(inc_bcx, loop_counter);
let plusone = Add(inc_bcx, i, C_uint(bcx.ccx(), 1u));
Store(inc_bcx, plusone, loop_counter);
Br(inc_bcx, cond_bcx.llbb);
}
next_bcx
}
pub fn iter_vec_raw<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
data_ptr: ValueRef,
unit_ty: Ty<'tcx>,
len: ValueRef,
f: F)
-> Block<'blk, 'tcx> where
F: FnOnce(Block<'blk, 'tcx>, ValueRef, Ty<'tcx>) -> Block<'blk, 'tcx>,
{
let _icx = push_ctxt("tvec::iter_vec_raw");
let fcx = bcx.fcx;
let vt = vec_types(bcx, unit_ty);
if vt.llunit_alloc_size == 0 {
// Special-case vectors with elements of size 0 so they don't go out of bounds (#9890)
iter_vec_loop(bcx, data_ptr, &vt, len, f)
} else {
// Calculate the last pointer address we want to handle.
let data_end_ptr = InBoundsGEP(bcx, data_ptr, &[len]);
// Now perform the iteration.
let header_bcx = fcx.new_temp_block("iter_vec_loop_header");
Br(bcx, header_bcx.llbb);
let data_ptr =
Phi(header_bcx, val_ty(data_ptr), &[data_ptr], &[bcx.llbb]);
let not_yet_at_end =
ICmp(header_bcx, llvm::IntULT, data_ptr, data_end_ptr);
let body_bcx = fcx.new_temp_block("iter_vec_loop_body");
let next_bcx = fcx.new_temp_block("iter_vec_next");
CondBr(header_bcx, not_yet_at_end, body_bcx.llbb, next_bcx.llbb);
let body_bcx = f(body_bcx, data_ptr, vt.unit_ty);
AddIncomingToPhi(data_ptr, InBoundsGEP(body_bcx, data_ptr,
&[C_int(bcx.ccx(), 1i)]),
body_bcx.llbb);
Br(body_bcx, header_bcx.llbb);
next_bcx
}
}<|fim▁end|> | debug!("writing index {} with lleltptr={}",
i, bcx.val_to_string(lleltptr));
bcx = expr::trans_into(bcx, &**element, |
<|file_name|>Print.cpp<|end_file_name|><|fim▁begin|>/*
Print.cpp - Base class that provides print() and println()
Copyright (c) 2008 David A. Mellis. All right reserved.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
Modified 23 November 2006 by David A. Mellis
Modified 20 Aug 2014 by MediaTek Inc.
*/
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <math.h>
#include "Arduino.h"
#include "Print.h"
// Public Methods //////////////////////////////////////////////////////////////
/* default implementation: may be overridden */
size_t Print::write(const uint8_t *buffer, size_t size)
{
size_t n = 0;
while (size--) {
n += write(*buffer++);
}
return n;
}
size_t Print::print(const __FlashStringHelper *ifsh)
{
return print(reinterpret_cast<const char *>(ifsh));
}
size_t Print::print(const String &s)
{
return write(s.c_str(), s.length());
}
size_t Print::print(const char str[])
{
return write(str);
}
size_t Print::print(char c)
{
return write(c);
}
size_t Print::print(unsigned char b, int base)
{
return print((unsigned long) b, base);
}
size_t Print::print(int n, int base)
{
return print((long) n, base);
}
size_t Print::print(unsigned int n, int base)
{
return print((unsigned long) n, base);
}
size_t Print::print(long n, int base)
{
if (base == 0) {
return write(n);
} else if (base == 10) {
if (n < 0) {
int t = print('-');
n = -n;
return printNumber(n, 10) + t;
}
return printNumber(n, 10);
} else {
return printNumber(n, base);
}
}
size_t Print::print(unsigned long n, int base)
{
if (base == 0) return write(n);
else return printNumber(n, base);
}
size_t Print::print(double n, int digits)
{
return printFloat(n, digits);
}
size_t Print::println(const __FlashStringHelper *ifsh)
{
size_t n = print(ifsh);
n += println();
return n;
}
size_t Print::print(const Printable& x)
{
return x.printTo(*this);
}
size_t Print::println(void)
{
size_t n = print('\r');
n += print('\n');
return n;
}
size_t Print::println(const String &s)
{
size_t n = print(s);
n += println();
return n;
}
size_t Print::println(const char c[])
{
size_t n = print(c);
n += println();
return n;
}
size_t Print::println(char c)
{
size_t n = print(c);
n += println();
return n;
}
size_t Print::println(unsigned char b, int base)
{
size_t n = print(b, base);
n += println();
return n;
}
size_t Print::println(int num, int base)
{
size_t n = print(num, base);
n += println();
return n;
}
size_t Print::println(unsigned int num, int base)
{
size_t n = print(num, base);
n += println();
return n;
}
size_t Print::println(long num, int base)<|fim▁hole|>}
size_t Print::println(unsigned long num, int base)
{
size_t n = print(num, base);
n += println();
return n;
}
size_t Print::println(double num, int digits)
{
size_t n = print(num, digits);
n += println();
return n;
}
size_t Print::println(const Printable& x)
{
size_t n = print(x);
n += println();
return n;
}
#include <stdarg.h>
size_t Print::printf(const char *fmt, ...)
{
va_list args;
char buf[256] = {0};
va_start(args, fmt);
vsprintf(buf, fmt, args);
va_end(args);
return write(buf, strlen(buf));
}
// Private Methods /////////////////////////////////////////////////////////////
size_t Print::printNumber(unsigned long n, uint8_t base) {
char buf[8 * sizeof(long) + 1]; // Assumes 8-bit chars plus zero byte.
char *str = &buf[sizeof(buf) - 1];
*str = '\0';
// prevent crash if called with base == 1
if (base < 2) base = 10;
do {
unsigned long m = n;
n /= base;
char c = m - base * n;
*--str = c < 10 ? c + '0' : c + 'A' - 10;
} while(n);
return write(str);
}
size_t Print::printFloat(double number, uint8_t digits)
{
size_t n = 0;
if (isnan(number)) return print("nan");
if (isinf(number)) return print("inf");
if (number > 4294967040.0) return print ("ovf"); // constant determined empirically
if (number <-4294967040.0) return print ("ovf"); // constant determined empirically
// Handle negative numbers
if (number < 0.0)
{
n += print('-');
number = -number;
}
// Round correctly so that print(1.999, 2) prints as "2.00"
double rounding = 0.5;
for (uint8_t i=0; i<digits; ++i)
rounding /= 10.0;
number += rounding;
// Extract the integer part of the number and print it
unsigned long int_part = (unsigned long)number;
double remainder = number - (double)int_part;
n += print(int_part);
// Print the decimal point, but only if there are digits beyond
if (digits > 0) {
n += print(".");
}
// Extract digits from the remainder one at a time
while (digits-- > 0)
{
remainder *= 10.0;
int toPrint = int(remainder);
n += print(toPrint);
remainder -= toPrint;
}
return n;
}<|fim▁end|> | {
size_t n = print(num, base);
n += println();
return n; |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns, include, url<|fim▁hole|> url(r'^$', 'webinterface.view.dashboard.main'),
url(r'^dashboard/$', 'webinterface.view.dashboard.main'),
url(r'^login/$', 'webinterface.view.login.main'),
url(r'^login/ajax/$', 'webinterface.view.login.ajax'),
url(r'^settings/$', 'webinterface.view.settings.main'),
url(r'^settings/ajax/$', 'webinterface.view.settings.ajax'),
url(r'^orders/$', 'webinterface.view.orders.main'),
url(r'^orders/ajax/$', 'webinterface.view.orders.ajax'),
)<|fim▁end|> |
urlpatterns = patterns('', |
<|file_name|>sse-c.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2015-2021 MinIO, Inc.
//
// This file is part of MinIO Object Storage stack
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
package crypto
import (
"bytes"
"context"
"crypto/md5"
"encoding/base64"
"net/http"
xhttp "github.com/minio/minio/internal/http"
"github.com/minio/minio/internal/logger"
)
type ssec struct{}
var (
// SSEC represents AWS SSE-C. It provides functionality to handle
// SSE-C requests.
SSEC = ssec{}
_ Type = SSEC
)
// String returns the SSE domain as string. For SSE-C the
// domain is "SSE-C".
func (ssec) String() string { return "SSE-C" }
// IsRequested returns true if the HTTP headers contains
// at least one SSE-C header. SSE-C copy headers are ignored.
func (ssec) IsRequested(h http.Header) bool {
if _, ok := h[xhttp.AmzServerSideEncryptionCustomerAlgorithm]; ok {
return true
}
if _, ok := h[xhttp.AmzServerSideEncryptionCustomerKey]; ok {
return true
}
if _, ok := h[xhttp.AmzServerSideEncryptionCustomerKeyMD5]; ok {
return true
}
return false
}
// IsEncrypted returns true if the metadata contains an SSE-C
// entry inidicating that the object has been encrypted using
// SSE-C.
func (ssec) IsEncrypted(metadata map[string]string) bool {
if _, ok := metadata[MetaSealedKeySSEC]; ok {
return true
}
return false
}
// ParseHTTP parses the SSE-C headers and returns the SSE-C client key
// on success. SSE-C copy headers are ignored.
func (ssec) ParseHTTP(h http.Header) (key [32]byte, err error) {
if h.Get(xhttp.AmzServerSideEncryptionCustomerAlgorithm) != xhttp.AmzEncryptionAES {
return key, ErrInvalidCustomerAlgorithm
}
if h.Get(xhttp.AmzServerSideEncryptionCustomerKey) == "" {
return key, ErrMissingCustomerKey
}
if h.Get(xhttp.AmzServerSideEncryptionCustomerKeyMD5) == "" {
return key, ErrMissingCustomerKeyMD5
}
clientKey, err := base64.StdEncoding.DecodeString(h.Get(xhttp.AmzServerSideEncryptionCustomerKey))
if err != nil || len(clientKey) != 32 { // The client key must be 256 bits long
return key, ErrInvalidCustomerKey
}
keyMD5, err := base64.StdEncoding.DecodeString(h.Get(xhttp.AmzServerSideEncryptionCustomerKeyMD5))
if md5Sum := md5.Sum(clientKey); err != nil || !bytes.Equal(md5Sum[:], keyMD5) {
return key, ErrCustomerKeyMD5Mismatch
}
copy(key[:], clientKey)
return key, nil
}
// UnsealObjectKey extracts and decrypts the sealed object key
// from the metadata using the SSE-C client key of the HTTP headers
// and returns the decrypted object key.
func (s3 ssec) UnsealObjectKey(h http.Header, metadata map[string]string, bucket, object string) (key ObjectKey, err error) {
clientKey, err := s3.ParseHTTP(h)
if err != nil {
return
}
return unsealObjectKey(clientKey[:], metadata, bucket, object)
}
// CreateMetadata encodes the sealed key into the metadata
// and returns the modified metadata. It allocates a new
// metadata map if metadata is nil.
func (ssec) CreateMetadata(metadata map[string]string, sealedKey SealedKey) map[string]string {
if sealedKey.Algorithm != SealAlgorithm {
logger.CriticalIf(context.Background(), Errorf("The seal algorithm '%s' is invalid for SSE-C", sealedKey.Algorithm))
}
if metadata == nil {
metadata = make(map[string]string, 3)
}
metadata[MetaAlgorithm] = SealAlgorithm
metadata[MetaIV] = base64.StdEncoding.EncodeToString(sealedKey.IV[:])
metadata[MetaSealedKeySSEC] = base64.StdEncoding.EncodeToString(sealedKey.Key[:])
return metadata
}
// ParseMetadata extracts all SSE-C related values from the object metadata
// and checks whether they are well-formed. It returns the sealed object key
// on success.
func (ssec) ParseMetadata(metadata map[string]string) (sealedKey SealedKey, err error) {
// Extract all required values from object metadata
b64IV, ok := metadata[MetaIV]
if !ok {
return sealedKey, errMissingInternalIV
}
algorithm, ok := metadata[MetaAlgorithm]
if !ok {
return sealedKey, errMissingInternalSealAlgorithm
}
b64SealedKey, ok := metadata[MetaSealedKeySSEC]
if !ok {
return sealedKey, Errorf("The object metadata is missing the internal sealed key for SSE-C")
}
// Check whether all extracted values are well-formed
iv, err := base64.StdEncoding.DecodeString(b64IV)
if err != nil || len(iv) != 32 {<|fim▁hole|> return sealedKey, errInvalidInternalSealAlgorithm
}
encryptedKey, err := base64.StdEncoding.DecodeString(b64SealedKey)
if err != nil || len(encryptedKey) != 64 {
return sealedKey, Errorf("The internal sealed key for SSE-C is invalid")
}
sealedKey.Algorithm = algorithm
copy(sealedKey.IV[:], iv)
copy(sealedKey.Key[:], encryptedKey)
return sealedKey, nil
}<|fim▁end|> | return sealedKey, errInvalidInternalIV
}
if algorithm != SealAlgorithm && algorithm != InsecureSealAlgorithm { |
<|file_name|>PetitionsHandler.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2005-2010 MaNGOS <http://getmangos.com/>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include "Common.h"
#include "Language.h"
#include "WorldPacket.h"
#include "WorldSession.h"
#include "World.h"
#include "ObjectMgr.h"
#include "Log.h"
#include "Opcodes.h"
#include "Guild.h"
#include "ArenaTeam.h"
#include "GossipDef.h"
#include "SocialMgr.h"
/*enum PetitionType // dbc data
{
PETITION_TYPE_GUILD = 1,
PETITION_TYPE_ARENA_TEAM = 3
};*/
// Charters ID in item_template
#define GUILD_CHARTER 5863
#define GUILD_CHARTER_COST 1000 // 10 S
#define ARENA_TEAM_CHARTER_2v2 23560
#define ARENA_TEAM_CHARTER_2v2_COST 800000 // 80 G
#define ARENA_TEAM_CHARTER_3v3 23561
#define ARENA_TEAM_CHARTER_3v3_COST 1200000 // 120 G
#define ARENA_TEAM_CHARTER_5v5 23562
#define ARENA_TEAM_CHARTER_5v5_COST 2000000 // 200 G
#define CHARTER_DISPLAY_ID 16161
void WorldSession::HandlePetitionBuyOpcode(WorldPacket & recv_data)
{
DEBUG_LOG("Received opcode CMSG_PETITION_BUY");
recv_data.hexlike();
uint64 guidNPC;
uint32 unk2;
std::string name;
recv_data >> guidNPC; // NPC GUID
recv_data.read_skip<uint32>(); // 0
recv_data.read_skip<uint64>(); // 0
recv_data >> name; // name
recv_data.read_skip<uint32>(); // 0
recv_data.read_skip<uint32>(); // 0
recv_data.read_skip<uint32>(); // 0
recv_data.read_skip<uint32>(); // 0
recv_data.read_skip<uint32>(); // 0
recv_data.read_skip<uint32>(); // 0
recv_data.read_skip<uint32>(); // 0
recv_data.read_skip<uint32>(); // 0
recv_data.read_skip<uint32>(); // 0
recv_data.read_skip<uint32>(); // 0
recv_data.read_skip<uint16>(); // 0
recv_data.read_skip<uint8>(); // 0
recv_data >> unk2; // index
recv_data.read_skip<uint32>(); // 0
DEBUG_LOG("Petitioner with GUID %u tried sell petition: name %s", GUID_LOPART(guidNPC), name.c_str());
// prevent cheating
Creature *pCreature = GetPlayer()->GetNPCIfCanInteractWith(guidNPC,UNIT_NPC_FLAG_PETITIONER);
if (!pCreature)
{
DEBUG_LOG("WORLD: HandlePetitionBuyOpcode - Unit (GUID: %u) not found or you can't interact with him.", GUID_LOPART(guidNPC));
return;
}
// remove fake death
if(GetPlayer()->hasUnitState(UNIT_STAT_DIED))
GetPlayer()->RemoveSpellsCausingAura(SPELL_AURA_FEIGN_DEATH);
uint32 charterid = 0;
uint32 cost = 0;
uint32 type = 0;
if(pCreature->isTabardDesigner())
{
// if tabard designer, then trying to buy a guild charter.
// do not let if already in guild.
if(_player->GetGuildId())
return;
charterid = GUILD_CHARTER;
cost = GUILD_CHARTER_COST;
type = 9;
}
else
{
// TODO: find correct opcode
if(_player->getLevel() < sWorld.getConfig(CONFIG_UINT32_MAX_PLAYER_LEVEL))
{
SendNotification(LANG_ARENA_ONE_TOOLOW, sWorld.getConfig(CONFIG_UINT32_MAX_PLAYER_LEVEL));
return;
}
switch(unk2)
{
case 1:
charterid = ARENA_TEAM_CHARTER_2v2;
cost = ARENA_TEAM_CHARTER_2v2_COST;
type = 2; // 2v2
break;
case 2:
charterid = ARENA_TEAM_CHARTER_3v3;
cost = ARENA_TEAM_CHARTER_3v3_COST;
type = 3; // 3v3
break;
case 3:
charterid = ARENA_TEAM_CHARTER_5v5;
cost = ARENA_TEAM_CHARTER_5v5_COST;
type = 5; // 5v5
break;
default:
DEBUG_LOG("unknown selection at buy petition: %u", unk2);
return;
}
if(_player->GetArenaTeamId(unk2 - 1))
{
SendArenaTeamCommandResult(ERR_ARENA_TEAM_CREATE_S, name, "", ERR_ALREADY_IN_ARENA_TEAM);
return;
}
}
if(type == 9)
{
if(sObjectMgr.GetGuildByName(name))
{
SendGuildCommandResult(GUILD_CREATE_S, name, ERR_GUILD_NAME_EXISTS_S);
return;
}
if(sObjectMgr.IsReservedName(name) || !ObjectMgr::IsValidCharterName(name))
{
SendGuildCommandResult(GUILD_CREATE_S, name, ERR_GUILD_NAME_INVALID);
return;
}
}
else
{
if(sObjectMgr.GetArenaTeamByName(name))
{
SendArenaTeamCommandResult(ERR_ARENA_TEAM_CREATE_S, name, "", ERR_ARENA_TEAM_NAME_EXISTS_S);
return;
}
if(sObjectMgr.IsReservedName(name) || !ObjectMgr::IsValidCharterName(name))
{
SendArenaTeamCommandResult(ERR_ARENA_TEAM_CREATE_S, name, "", ERR_ARENA_TEAM_NAME_INVALID);
return;
}
}
ItemPrototype const *pProto = ObjectMgr::GetItemPrototype(charterid);
if(!pProto)
{
_player->SendBuyError(BUY_ERR_CANT_FIND_ITEM, NULL, charterid, 0);
return;
}
if(_player->GetMoney() < cost)
{ //player hasn't got enough money
_player->SendBuyError(BUY_ERR_NOT_ENOUGHT_MONEY, pCreature, charterid, 0);
return;
}
ItemPosCountVec dest;
uint8 msg = _player->CanStoreNewItem(NULL_BAG, NULL_SLOT, dest, charterid, pProto->BuyCount );
if(msg != EQUIP_ERR_OK)
{
_player->SendBuyError(msg, pCreature, charterid, 0);
return;
}
_player->ModifyMoney(-(int32)cost);
Item *charter = _player->StoreNewItem(dest, charterid, true);
if(!charter)
return;
charter->SetUInt32Value(ITEM_FIELD_ENCHANTMENT_1_1, charter->GetGUIDLow());
// ITEM_FIELD_ENCHANTMENT_1_1 is guild/arenateam id
// ITEM_FIELD_ENCHANTMENT_1_1+1 is current signatures count (showed on item)
charter->SetState(ITEM_CHANGED, _player);
_player->SendNewItem(charter, 1, true, false);
// a petition is invalid, if both the owner and the type matches
// we checked above, if this player is in an arenateam, so this must be data corruption
QueryResult *result = CharacterDatabase.PQuery("SELECT petitionguid FROM petition WHERE ownerguid = '%u' AND type = '%u'", _player->GetGUIDLow(), type);
std::ostringstream ssInvalidPetitionGUIDs;
if (result)
{
do
{
Field *fields = result->Fetch();
ssInvalidPetitionGUIDs << "'" << fields[0].GetUInt32() << "' , ";
} while (result->NextRow());
delete result;
}
// delete petitions with the same guid as this one
ssInvalidPetitionGUIDs << "'" << charter->GetGUIDLow() << "'";
DEBUG_LOG("Invalid petition GUIDs: %s", ssInvalidPetitionGUIDs.str().c_str());
CharacterDatabase.escape_string(name);
CharacterDatabase.BeginTransaction();
CharacterDatabase.PExecute("DELETE FROM petition WHERE petitionguid IN ( %s )", ssInvalidPetitionGUIDs.str().c_str());
CharacterDatabase.PExecute("DELETE FROM petition_sign WHERE petitionguid IN ( %s )", ssInvalidPetitionGUIDs.str().c_str());
CharacterDatabase.PExecute("INSERT INTO petition (ownerguid, petitionguid, name, type) VALUES ('%u', '%u', '%s', '%u')",
_player->GetGUIDLow(), charter->GetGUIDLow(), name.c_str(), type);
CharacterDatabase.CommitTransaction();
}
void WorldSession::HandlePetitionShowSignOpcode(WorldPacket & recv_data)
{
// ok
DEBUG_LOG("Received opcode CMSG_PETITION_SHOW_SIGNATURES");
//recv_data.hexlike();
uint8 signs = 0;
uint64 petitionguid;
recv_data >> petitionguid; // petition guid
// solve (possible) some strange compile problems with explicit use GUID_LOPART(petitionguid) at some GCC versions (wrong code optimization in compiler?)
uint32 petitionguid_low = GUID_LOPART(petitionguid);
QueryResult *result = CharacterDatabase.PQuery("SELECT type FROM petition WHERE petitionguid = '%u'", petitionguid_low);
if(!result)
{
sLog.outError("any petition on server...");
return;
}
Field *fields = result->Fetch();
uint32 type = fields[0].GetUInt32();
delete result;
// if guild petition and has guild => error, return;
if(type == 9 && _player->GetGuildId())
return;
result = CharacterDatabase.PQuery("SELECT playerguid FROM petition_sign WHERE petitionguid = '%u'", petitionguid_low);
// result==NULL also correct in case no sign yet
if(result)
signs = (uint8)result->GetRowCount();
DEBUG_LOG("CMSG_PETITION_SHOW_SIGNATURES petition entry: '%u'", petitionguid_low);
WorldPacket data(SMSG_PETITION_SHOW_SIGNATURES, (8+8+4+1+signs*12));
data << uint64(petitionguid); // petition guid
data << _player->GetObjectGuid(); // owner guid
data << uint32(petitionguid_low); // guild guid (in mangos always same as GUID_LOPART(petitionguid)
data << uint8(signs); // sign's count
for(uint8 i = 1; i <= signs; ++i)
{
Field *fields2 = result->Fetch();
uint64 plguid = fields2[0].GetUInt64();
data << uint64(plguid); // Player GUID
data << uint32(0); // there 0 ...
result->NextRow();
}
delete result;
SendPacket(&data);
}
void WorldSession::HandlePetitionQueryOpcode(WorldPacket & recv_data)
{
DEBUG_LOG("Received opcode CMSG_PETITION_QUERY");
//recv_data.hexlike();
uint32 guildguid;
ObjectGuid petitionguid;
recv_data >> guildguid; // in mangos always same as GUID_LOPART(petitionguid)
recv_data >> petitionguid; // petition guid
DEBUG_LOG("CMSG_PETITION_QUERY Petition %s Guild GUID %u", petitionguid.GetString().c_str(), guildguid);
SendPetitionQueryOpcode(petitionguid);
}
void WorldSession::SendPetitionQueryOpcode(ObjectGuid petitionguid)
{
uint32 petitionLowGuid = petitionguid.GetCounter();
ObjectGuid ownerguid;
uint32 type;
std::string name = "NO_NAME_FOR_GUID";
uint8 signs = 0;
QueryResult *result = CharacterDatabase.PQuery(
"SELECT ownerguid, name, "
" (SELECT COUNT(playerguid) FROM petition_sign WHERE petition_sign.petitionguid = '%u') AS signs, "
" type "
"FROM petition WHERE petitionguid = '%u'", petitionLowGuid, petitionLowGuid);
if (result)
{
Field* fields = result->Fetch();
ownerguid = ObjectGuid(HIGHGUID_PLAYER, fields[0].GetUInt32());
name = fields[1].GetCppString();
signs = fields[2].GetUInt8();
type = fields[3].GetUInt32();
delete result;
}
else
{
DEBUG_LOG("CMSG_PETITION_QUERY failed for petition (GUID: %u)", petitionLowGuid);
return;
}
WorldPacket data(SMSG_PETITION_QUERY_RESPONSE, (4+8+name.size()+1+1+4*13));
data << uint32(petitionLowGuid); // guild/team guid (in mangos always same as GUID_LOPART(petition guid)
data << ownerguid; // charter owner guid
data << name; // name (guild/arena team)
data << uint8(0); // 1
if (type == 9)
{
data << uint32(9);
data << uint32(9);
data << uint32(0); // bypass client - side limitation, a different value is needed here for each petition
}
else
{
data << type-1;
data << type-1;
data << type; // bypass client - side limitation, a different value is needed here for each petition
}
data << uint32(0); // 5
data << uint32(0); // 6
data << uint32(0); // 7
data << uint32(0); // 8
data << uint16(0); // 9 2 bytes field
data << uint32(0); // 10
data << uint32(0); // 11
data << uint32(0); // 13 count of next strings?
data << uint32(0); // 14
if (type == 9)
data << uint32(0); // 15 0 - guild, 1 - arena team
else
data << uint32(1);
SendPacket(&data);
}
void WorldSession::HandlePetitionRenameOpcode(WorldPacket & recv_data)
{
DEBUG_LOG("Received opcode MSG_PETITION_RENAME"); // ok
//recv_data.hexlike();
uint64 petitionguid;
uint32 type;
std::string newname;
recv_data >> petitionguid; // guid
recv_data >> newname; // new name
Item *item = _player->GetItemByGuid(petitionguid);
if(!item)
return;
QueryResult *result = CharacterDatabase.PQuery("SELECT type FROM petition WHERE petitionguid = '%u'", GUID_LOPART(petitionguid));
if(result)
{
Field* fields = result->Fetch();
type = fields[0].GetUInt32();
delete result;
}
else
{
DEBUG_LOG("CMSG_PETITION_QUERY failed for petition (GUID: %u)", GUID_LOPART(petitionguid));
return;
}
if(type == 9)
{
if(sObjectMgr.GetGuildByName(newname))
{
SendGuildCommandResult(GUILD_CREATE_S, newname, ERR_GUILD_NAME_EXISTS_S);
return;
}
if(sObjectMgr.IsReservedName(newname) || !ObjectMgr::IsValidCharterName(newname))
{
SendGuildCommandResult(GUILD_CREATE_S, newname, ERR_GUILD_NAME_INVALID);
return;
}
}
else
{
if(sObjectMgr.GetArenaTeamByName(newname))
{
SendArenaTeamCommandResult(ERR_ARENA_TEAM_CREATE_S, newname, "", ERR_ARENA_TEAM_NAME_EXISTS_S);
return;
}
if(sObjectMgr.IsReservedName(newname) || !ObjectMgr::IsValidCharterName(newname))
{
SendArenaTeamCommandResult(ERR_ARENA_TEAM_CREATE_S, newname, "", ERR_ARENA_TEAM_NAME_INVALID);
return;
}
}
std::string db_newname = newname;
CharacterDatabase.escape_string(db_newname);
CharacterDatabase.PExecute("UPDATE petition SET name = '%s' WHERE petitionguid = '%u'",
db_newname.c_str(), GUID_LOPART(petitionguid));
DEBUG_LOG("Petition (GUID: %u) renamed to '%s'", GUID_LOPART(petitionguid), newname.c_str());
WorldPacket data(MSG_PETITION_RENAME, (8+newname.size()+1));
data << uint64(petitionguid);
data << newname;
SendPacket(&data);
}
void WorldSession::HandlePetitionSignOpcode(WorldPacket & recv_data)
{
DEBUG_LOG("Received opcode CMSG_PETITION_SIGN"); // ok
//recv_data.hexlike();
Field *fields;
ObjectGuid petitionGuid;
uint8 unk;
recv_data >> petitionGuid; // petition guid
recv_data >> unk;
uint32 petitionLowGuid = petitionGuid.GetCounter();
QueryResult *result = CharacterDatabase.PQuery(
"SELECT ownerguid, "
" (SELECT COUNT(playerguid) FROM petition_sign WHERE petition_sign.petitionguid = '%u') AS signs, "
" type "
"FROM petition WHERE petitionguid = '%u'", petitionLowGuid, petitionLowGuid);
if(!result)
{
sLog.outError("any petition on server...");
return;
}
fields = result->Fetch();
uint32 ownerLowGuid = fields[0].GetUInt32();
ObjectGuid ownerguid = ObjectGuid(HIGHGUID_PLAYER, ownerLowGuid);
uint8 signs = fields[1].GetUInt8();
uint32 type = fields[2].GetUInt32();
delete result;
if (ownerguid == _player->GetObjectGuid())
return;
// not let enemies sign guild charter
if (!sWorld.getConfig(CONFIG_BOOL_ALLOW_TWO_SIDE_INTERACTION_GUILD) &&
GetPlayer()->GetTeam() != sObjectMgr.GetPlayerTeamByGUID(ownerguid))
{
if(type != 9)
SendArenaTeamCommandResult(ERR_ARENA_TEAM_INVITE_SS, "", "", ERR_ARENA_TEAM_NOT_ALLIED);
else
SendGuildCommandResult(GUILD_CREATE_S, "", ERR_GUILD_NOT_ALLIED);
return;
}
if(type != 9)
{
if(_player->getLevel() < sWorld.getConfig(CONFIG_UINT32_MAX_PLAYER_LEVEL))
{
SendArenaTeamCommandResult(ERR_ARENA_TEAM_CREATE_S, "", _player->GetName(), ERR_ARENA_TEAM_TARGET_TOO_LOW_S);
return;
}
uint8 slot = ArenaTeam::GetSlotByType(type);
if(slot >= MAX_ARENA_SLOT)
return;
if(_player->GetArenaTeamId(slot))
{
SendArenaTeamCommandResult(ERR_ARENA_TEAM_INVITE_SS, "", _player->GetName(), ERR_ALREADY_IN_ARENA_TEAM_S);
return;
}
if(_player->GetArenaTeamIdInvited())
{
SendArenaTeamCommandResult(ERR_ARENA_TEAM_INVITE_SS, "", _player->GetName(), ERR_ALREADY_INVITED_TO_ARENA_TEAM_S);
return;
}
}
else
{
if(_player->GetGuildId())
{
SendGuildCommandResult(GUILD_INVITE_S, _player->GetName(), ERR_ALREADY_IN_GUILD_S);
return;
}
if(_player->GetGuildIdInvited())
{
SendGuildCommandResult(GUILD_INVITE_S, _player->GetName(), ERR_ALREADY_INVITED_TO_GUILD_S);
return;
}
}
if(++signs > type) // client signs maximum
return;
//client doesn't allow to sign petition two times by one character, but not check sign by another character from same account
//not allow sign another player from already sign player account
result = CharacterDatabase.PQuery("SELECT playerguid FROM petition_sign WHERE player_account = '%u' AND petitionguid = '%u'", GetAccountId(), petitionLowGuid);
if(result)
{
delete result;
WorldPacket data(SMSG_PETITION_SIGN_RESULTS, (8+8+4));
data << petitionGuid;
data << _player->GetObjectGuid();
data << uint32(PETITION_SIGN_ALREADY_SIGNED);
// close at signer side
SendPacket(&data);
// update for owner if online
if(Player *owner = sObjectMgr.GetPlayer(ownerguid))
owner->GetSession()->SendPacket(&data);
return;
}
CharacterDatabase.PExecute("INSERT INTO petition_sign (ownerguid,petitionguid, playerguid, player_account) VALUES ('%u', '%u', '%u','%u')",
ownerLowGuid, petitionLowGuid, _player->GetGUIDLow(), GetAccountId());
DEBUG_LOG("PETITION SIGN: GUID %u by player: %s (GUID: %u Account: %u)",
petitionLowGuid, _player->GetName(), _player->GetGUIDLow(), GetAccountId());
WorldPacket data(SMSG_PETITION_SIGN_RESULTS, (8+8+4));
data << petitionGuid;
data << _player->GetObjectGuid();
data << uint32(PETITION_SIGN_OK);
// close at signer side
SendPacket(&data);
// update signs count on charter, required testing...
//Item *item = _player->GetItemByGuid(petitionguid));
//if(item)
// item->SetUInt32Value(ITEM_FIELD_ENCHANTMENT_1_1+1, signs);
// update for owner if online
if(Player *owner = sObjectMgr.GetPlayer(ownerguid))
owner->GetSession()->SendPacket(&data);
}
void WorldSession::HandlePetitionDeclineOpcode(WorldPacket & recv_data)
{
DEBUG_LOG("Received opcode MSG_PETITION_DECLINE"); // ok
//recv_data.hexlike();
ObjectGuid petitionGuid;
recv_data >> petitionGuid; // petition guid
DEBUG_LOG("Petition %s declined by %s", petitionGuid.GetString().c_str(), _player->GetObjectGuid().GetString().c_str());
uint32 petitionLowGuid = petitionGuid.GetCounter();
QueryResult *result = CharacterDatabase.PQuery("SELECT ownerguid FROM petition WHERE petitionguid = '%u'", petitionLowGuid);
if (!result)
return;
Field *fields = result->Fetch();
ObjectGuid ownerguid = ObjectGuid(HIGHGUID_PLAYER, fields[0].GetUInt32());
delete result;
Player *owner = sObjectMgr.GetPlayer(ownerguid);
if(owner) // petition owner online
{
WorldPacket data(MSG_PETITION_DECLINE, 8);
data << _player->GetObjectGuid();
owner->GetSession()->SendPacket(&data);
}
}
void WorldSession::HandleOfferPetitionOpcode(WorldPacket & recv_data)
{
DEBUG_LOG("Received opcode CMSG_OFFER_PETITION"); // ok
//recv_data.hexlike();
uint8 signs = 0;
uint64 petitionguid, plguid;
uint32 type, junk;
Player *player;
recv_data >> junk; // this is not petition type!
recv_data >> petitionguid; // petition guid
recv_data >> plguid; // player guid
player = ObjectAccessor::FindPlayer(plguid);
if (!player)
return;
QueryResult *result = CharacterDatabase.PQuery("SELECT type FROM petition WHERE petitionguid = '%u'", GUID_LOPART(petitionguid));
if (!result)
return;
Field *fields = result->Fetch();
type = fields[0].GetUInt32();
delete result;
DEBUG_LOG("OFFER PETITION: type %u, GUID1 %u, to player id: %u", type, GUID_LOPART(petitionguid), GUID_LOPART(plguid));
if (!sWorld.getConfig(CONFIG_BOOL_ALLOW_TWO_SIDE_INTERACTION_GUILD) && GetPlayer()->GetTeam() != player->GetTeam() )
{
if(type != 9)
SendArenaTeamCommandResult(ERR_ARENA_TEAM_INVITE_SS, "", "", ERR_ARENA_TEAM_NOT_ALLIED);
else
SendGuildCommandResult(GUILD_CREATE_S, "", ERR_GUILD_NOT_ALLIED);
return;
}
if(type != 9)
{
if(player->getLevel() < sWorld.getConfig(CONFIG_UINT32_MAX_PLAYER_LEVEL))
{
// player is too low level to join an arena team
SendArenaTeamCommandResult(ERR_ARENA_TEAM_CREATE_S, "", player->GetName(), ERR_ARENA_TEAM_TARGET_TOO_LOW_S);
return;
}
uint8 slot = ArenaTeam::GetSlotByType(type);
if(slot >= MAX_ARENA_SLOT)
return;
if(player->GetArenaTeamId(slot))
{
// player is already in an arena team
SendArenaTeamCommandResult(ERR_ARENA_TEAM_CREATE_S, "", player->GetName(), ERR_ALREADY_IN_ARENA_TEAM_S);
return;
}
if(player->GetArenaTeamIdInvited())
{
SendArenaTeamCommandResult(ERR_ARENA_TEAM_INVITE_SS, "", _player->GetName(), ERR_ALREADY_INVITED_TO_ARENA_TEAM_S);
return;
}
}
else
{
if(player->GetGuildId())
{
SendGuildCommandResult(GUILD_INVITE_S, _player->GetName(), ERR_ALREADY_IN_GUILD_S);
return;
}
if(player->GetGuildIdInvited())
{
SendGuildCommandResult(GUILD_INVITE_S, _player->GetName(), ERR_ALREADY_INVITED_TO_GUILD_S);
return;
}
}
result = CharacterDatabase.PQuery("SELECT playerguid FROM petition_sign WHERE petitionguid = '%u'", GUID_LOPART(petitionguid));
// result==NULL also correct charter without signs
if(result)
signs = (uint8)result->GetRowCount();
WorldPacket data(SMSG_PETITION_SHOW_SIGNATURES, (8+8+4+signs+signs*12));
data << uint64(petitionguid); // petition guid
data << _player->GetObjectGuid(); // owner guid
data << uint32(GUID_LOPART(petitionguid)); // guild guid (in mangos always same as GUID_LOPART(petition guid)
data << uint8(signs); // sign's count
for(uint8 i = 1; i <= signs; ++i)
{
Field *fields2 = result->Fetch();
plguid = fields2[0].GetUInt64();
data << uint64(plguid); // Player GUID
data << uint32(0); // there 0 ...
result->NextRow();
}<|fim▁hole|> player->GetSession()->SendPacket(&data);
}
void WorldSession::HandleTurnInPetitionOpcode(WorldPacket & recv_data)
{
DEBUG_LOG("Received opcode CMSG_TURN_IN_PETITION"); // ok
//recv_data.hexlike();
WorldPacket data;
uint64 petitionguid;
uint32 ownerguidlo;
uint32 type;
std::string name;
recv_data >> petitionguid;
DEBUG_LOG("Petition %u turned in by %u", GUID_LOPART(petitionguid), _player->GetGUIDLow());
// data
QueryResult *result = CharacterDatabase.PQuery("SELECT ownerguid, name, type FROM petition WHERE petitionguid = '%u'", GUID_LOPART(petitionguid));
if(result)
{
Field *fields = result->Fetch();
ownerguidlo = fields[0].GetUInt32();
name = fields[1].GetCppString();
type = fields[2].GetUInt32();
delete result;
}
else
{
sLog.outError("petition table has broken data!");
return;
}
if(type == 9)
{
if(_player->GetGuildId())
{
data.Initialize(SMSG_TURN_IN_PETITION_RESULTS, 4);
data << uint32(PETITION_TURN_ALREADY_IN_GUILD); // already in guild
_player->GetSession()->SendPacket(&data);
return;
}
}
else
{
uint8 slot = ArenaTeam::GetSlotByType(type);
if(slot >= MAX_ARENA_SLOT)
return;
if(_player->GetArenaTeamId(slot))
{
//data.Initialize(SMSG_TURN_IN_PETITION_RESULTS, 4);
//data << (uint32)PETITION_TURN_ALREADY_IN_GUILD; // already in guild
//_player->GetSession()->SendPacket(&data);
SendArenaTeamCommandResult(ERR_ARENA_TEAM_CREATE_S, name, "", ERR_ALREADY_IN_ARENA_TEAM);
return;
}
}
if(_player->GetGUIDLow() != ownerguidlo)
return;
// signs
uint8 signs;
result = CharacterDatabase.PQuery("SELECT playerguid FROM petition_sign WHERE petitionguid = '%u'", GUID_LOPART(petitionguid));
if(result)
signs = (uint8)result->GetRowCount();
else
signs = 0;
uint32 count;
//if(signs < sWorld.getConfig(CONFIG_UINT32_MIN_PETITION_SIGNS))
if(type == 9)
count = sWorld.getConfig(CONFIG_UINT32_MIN_PETITION_SIGNS);
else
count = type - 1;
if(signs < count)
{
data.Initialize(SMSG_TURN_IN_PETITION_RESULTS, 4);
data << uint32(PETITION_TURN_NEED_MORE_SIGNATURES); // need more signatures...
SendPacket(&data);
delete result;
return;
}
if(type == 9)
{
if(sObjectMgr.GetGuildByName(name))
{
SendGuildCommandResult(GUILD_CREATE_S, name, ERR_GUILD_NAME_EXISTS_S);
delete result;
return;
}
}
else
{
if(sObjectMgr.GetArenaTeamByName(name))
{
SendArenaTeamCommandResult(ERR_ARENA_TEAM_CREATE_S, name, "", ERR_ARENA_TEAM_NAME_EXISTS_S);
delete result;
return;
}
}
// and at last charter item check
Item *item = _player->GetItemByGuid(petitionguid);
if(!item)
{
delete result;
return;
}
// OK!
// delete charter item
_player->DestroyItem(item->GetBagSlot(), item->GetSlot(), true);
if(type == 9) // create guild
{
Guild* guild = new Guild;
if(!guild->Create(_player, name))
{
delete guild;
delete result;
return;
}
// register guild and add guildmaster
sObjectMgr.AddGuild(guild);
// add members
for(uint8 i = 0; i < signs; ++i)
{
Field* fields = result->Fetch();
ObjectGuid signguid = ObjectGuid(HIGHGUID_PLAYER, fields[0].GetUInt32());
if (signguid.IsEmpty())
continue;
guild->AddMember(signguid, guild->GetLowestRank());
result->NextRow();
}
}
else // or arena team
{
ArenaTeam* at = new ArenaTeam;
if (!at->Create(_player->GetObjectGuid(), type, name))
{
sLog.outError("PetitionsHandler: arena team create failed.");
delete at;
delete result;
return;
}
uint32 icon, iconcolor, border, bordercolor, backgroud;
recv_data >> backgroud >> icon >> iconcolor >> border >> bordercolor;
at->SetEmblem(backgroud, icon, iconcolor, border, bordercolor);
// register team and add captain
sObjectMgr.AddArenaTeam(at);
DEBUG_LOG("PetitonsHandler: arena team added to objmrg");
// add members
for(uint8 i = 0; i < signs; ++i)
{
Field* fields = result->Fetch();
ObjectGuid memberGUID = ObjectGuid(HIGHGUID_PLAYER, fields[0].GetUInt32());
if (memberGUID.IsEmpty())
continue;
DEBUG_LOG("PetitionsHandler: adding arena member %s", memberGUID.GetString().c_str());
at->AddMember(memberGUID);
result->NextRow();
}
}
delete result;
CharacterDatabase.BeginTransaction();
CharacterDatabase.PExecute("DELETE FROM petition WHERE petitionguid = '%u'", GUID_LOPART(petitionguid));
CharacterDatabase.PExecute("DELETE FROM petition_sign WHERE petitionguid = '%u'", GUID_LOPART(petitionguid));
CharacterDatabase.CommitTransaction();
// created
DEBUG_LOG("TURN IN PETITION GUID %u", GUID_LOPART(petitionguid));
data.Initialize(SMSG_TURN_IN_PETITION_RESULTS, 4);
data << uint32(PETITION_TURN_OK);
SendPacket(&data);
}
void WorldSession::HandlePetitionShowListOpcode(WorldPacket & recv_data)
{
DEBUG_LOG("Received CMSG_PETITION_SHOWLIST"); // ok
//recv_data.hexlike();
uint64 guid;
recv_data >> guid;
SendPetitionShowList(guid);
}
void WorldSession::SendPetitionShowList(uint64 guid)
{
Creature *pCreature = GetPlayer()->GetNPCIfCanInteractWith(guid, UNIT_NPC_FLAG_PETITIONER);
if (!pCreature)
{
DEBUG_LOG("WORLD: HandlePetitionShowListOpcode - Unit (GUID: %u) not found or you can't interact with him.", uint32(GUID_LOPART(guid)));
return;
}
// remove fake death
if(GetPlayer()->hasUnitState(UNIT_STAT_DIED))
GetPlayer()->RemoveSpellsCausingAura(SPELL_AURA_FEIGN_DEATH);
uint8 count = 0;
if(pCreature->isTabardDesigner())
count = 1;
else
count = 3;
WorldPacket data(SMSG_PETITION_SHOWLIST, 8+1+4*6);
data << uint64(guid); // npc guid
data << uint8(count); // count
if(count == 1)
{
data << uint32(1); // index
data << uint32(GUILD_CHARTER); // charter entry
data << uint32(CHARTER_DISPLAY_ID); // charter display id
data << uint32(GUILD_CHARTER_COST); // charter cost
data << uint32(0); // unknown
data << uint32(9); // required signs?
}
else
{
// 2v2
data << uint32(1); // index
data << uint32(ARENA_TEAM_CHARTER_2v2); // charter entry
data << uint32(CHARTER_DISPLAY_ID); // charter display id
data << uint32(ARENA_TEAM_CHARTER_2v2_COST); // charter cost
data << uint32(2); // unknown
data << uint32(2); // required signs?
// 3v3
data << uint32(2); // index
data << uint32(ARENA_TEAM_CHARTER_3v3); // charter entry
data << uint32(CHARTER_DISPLAY_ID); // charter display id
data << uint32(ARENA_TEAM_CHARTER_3v3_COST); // charter cost
data << uint32(3); // unknown
data << uint32(3); // required signs?
// 5v5
data << uint32(3); // index
data << uint32(ARENA_TEAM_CHARTER_5v5); // charter entry
data << uint32(CHARTER_DISPLAY_ID); // charter display id
data << uint32(ARENA_TEAM_CHARTER_5v5_COST); // charter cost
data << uint32(5); // unknown
data << uint32(5); // required signs?
}
//for(uint8 i = 0; i < count; ++i)
//{
// data << uint32(i); // index
// data << uint32(GUILD_CHARTER); // charter entry
// data << uint32(CHARTER_DISPLAY_ID); // charter display id
// data << uint32(GUILD_CHARTER_COST+i); // charter cost
// data << uint32(0); // unknown
// data << uint32(9); // required signs?
//}
SendPacket(&data);
DEBUG_LOG("Sent SMSG_PETITION_SHOWLIST");
}<|fim▁end|> |
delete result; |
<|file_name|>rabid_mott.py<|end_file_name|><|fim▁begin|>import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options<|fim▁hole|>from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('rabid_mott')
mobileTemplate.setLevel(6)
mobileTemplate.setDifficulty(Difficulty.NORMAL)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(False)
mobileTemplate.setScale(1)
mobileTemplate.setMeatType("Carnivore Meat")
mobileTemplate.setMeatAmount(15)
mobileTemplate.setHideType("Bristly Hide")
mobileTemplate.setHideAmount(15)
mobileTemplate.setBoneType("Animal Bones")
mobileTemplate.setBoneAmount(5)
mobileTemplate.setSocialGroup("self")
mobileTemplate.setAssistRange(2)
mobileTemplate.setStalker(False)
mobileTemplate.setOptionsBitmask(Options.ATTACKABLE)
templates = Vector()
templates.add('object/mobile/shared_mott.iff')
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/melee/unarmed/shared_unarmed_default.iff', WeaponType.UNARMED, 1.0, 6, 'kinetic')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
attacks.add('bm_charge_2')
attacks.add('bm_slash_2')
mobileTemplate.setDefaultAttack('creatureMeleeAttack')
mobileTemplate.setAttacks(attacks)
core.spawnService.addMobileTemplate('rabid_mott', mobileTemplate)
return<|fim▁end|> | |
<|file_name|>debug-words.rs<|end_file_name|><|fim▁begin|>use textwrap::WordSeparator;<|fim▁hole|>fn main() {
#[cfg(feature = "unicode-linebreak")]
let word_separator = WordSeparator::UnicodeBreakProperties;
#[cfg(not(feature = "unicode-linebreak"))]
let word_separator = WordSeparator::AsciiSpace;
let args = std::env::args().skip(1).collect::<Vec<_>>();
let text = args.join(" ");
let words = word_separator.find_words(&text).collect::<Vec<_>>();
println!("word_separator = {:?}", word_separator);
println!("text = {:?}", text);
println!("words = {:#?}", words);
}<|fim▁end|> | |
<|file_name|>portal-directives.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {
ComponentFactoryResolver,
ComponentRef,
Directive,
EmbeddedViewRef,
EventEmitter,
NgModule,
OnDestroy,
OnInit,
Output,
TemplateRef,
ViewContainerRef,
Inject,
} from '@angular/core';
import {DOCUMENT} from '@angular/common';
import {BasePortalOutlet, ComponentPortal, Portal, TemplatePortal, DomPortal} from './portal';
/**
* Directive version of a `TemplatePortal`. Because the directive *is* a TemplatePortal,
* the directive instance itself can be attached to a host, enabling declarative use of portals.
*/
@Directive({
selector: '[cdkPortal]',
exportAs: 'cdkPortal',
})
export class CdkPortal extends TemplatePortal {
constructor(templateRef: TemplateRef<any>, viewContainerRef: ViewContainerRef) {
super(templateRef, viewContainerRef);
}
}
/**
* @deprecated Use `CdkPortal` instead.
* @breaking-change 9.0.0
*/
@Directive({
selector: '[cdk-portal], [portal]',
exportAs: 'cdkPortal',
providers: [{
provide: CdkPortal,
useExisting: TemplatePortalDirective
}]
})
export class TemplatePortalDirective extends CdkPortal {}
/**
* Possible attached references to the CdkPortalOutlet.
*/
export type CdkPortalOutletAttachedRef = ComponentRef<any> | EmbeddedViewRef<any> | null;
/**
* Directive version of a PortalOutlet. Because the directive *is* a PortalOutlet, portals can be
* directly attached to it, enabling declarative use.
*
* Usage:
* `<ng-template [cdkPortalOutlet]="greeting"></ng-template>`
*/
@Directive({
selector: '[cdkPortalOutlet]',
exportAs: 'cdkPortalOutlet',
inputs: ['portal: cdkPortalOutlet']
})
export class CdkPortalOutlet extends BasePortalOutlet implements OnInit, OnDestroy {
private _document: Document;
/** Whether the portal component is initialized. */
private _isInitialized = false;
/** Reference to the currently-attached component/view ref. */
private _attachedRef: CdkPortalOutletAttachedRef;
constructor(
private _componentFactoryResolver: ComponentFactoryResolver,
private _viewContainerRef: ViewContainerRef,
/**
* @deprecated `_document` parameter to be made required.
* @breaking-change 9.0.0
*/
@Inject(DOCUMENT) _document?: any) {
super();
this._document = _document;
}
/** Portal associated with the Portal outlet. */
get portal(): Portal<any> | null {
return this._attachedPortal;
}
set portal(portal: Portal<any> | null) {
// Ignore the cases where the `portal` is set to a falsy value before the lifecycle hooks have
// run. This handles the cases where the user might do something like `<div cdkPortalOutlet>`
// and attach a portal programmatically in the parent component. When Angular does the first CD
// round, it will fire the setter with empty string, causing the user's content to be cleared.
if (this.hasAttached() && !portal && !this._isInitialized) {
return;
}
if (this.hasAttached()) {
super.detach();
}
if (portal) {
super.attach(portal);
}
this._attachedPortal = portal;
}
/** Emits when a portal is attached to the outlet. */
@Output() attached: EventEmitter<CdkPortalOutletAttachedRef> =
new EventEmitter<CdkPortalOutletAttachedRef>();
/** Component or view reference that is attached to the portal. */
get attachedRef(): CdkPortalOutletAttachedRef {
return this._attachedRef;
}
ngOnInit() {
this._isInitialized = true;
}
ngOnDestroy() {
super.dispose();
this._attachedPortal = null;
this._attachedRef = null;
}
/**
* Attach the given ComponentPortal to this PortalOutlet using the ComponentFactoryResolver.
*
* @param portal Portal to be attached to the portal outlet.
* @returns Reference to the created component.
*/
attachComponentPortal<T>(portal: ComponentPortal<T>): ComponentRef<T> {
portal.setAttachedHost(this);
// If the portal specifies an origin, use that as the logical location of the component
// in the application tree. Otherwise use the location of this PortalOutlet.
const viewContainerRef = portal.viewContainerRef != null ?
portal.viewContainerRef :
this._viewContainerRef;
const resolver = portal.componentFactoryResolver || this._componentFactoryResolver;
const componentFactory = resolver.resolveComponentFactory(portal.component);
const ref = viewContainerRef.createComponent(
componentFactory, viewContainerRef.length,
portal.injector || viewContainerRef.injector);
// If we're using a view container that's different from the injected one (e.g. when the portal
// specifies its own) we need to move the component into the outlet, otherwise it'll be rendered
// inside of the alternate view container.
if (viewContainerRef !== this._viewContainerRef) {
this._getRootNode().appendChild((ref.hostView as EmbeddedViewRef<any>).rootNodes[0]);
}
super.setDisposeFn(() => ref.destroy());
this._attachedPortal = portal;
this._attachedRef = ref;
this.attached.emit(ref);
return ref;
}
/**
* Attach the given TemplatePortal to this PortalHost as an embedded View.
* @param portal Portal to be attached.
* @returns Reference to the created embedded view.
*/
attachTemplatePortal<C>(portal: TemplatePortal<C>): EmbeddedViewRef<C> {
portal.setAttachedHost(this);
const viewRef = this._viewContainerRef.createEmbeddedView(portal.templateRef, portal.context);
super.setDisposeFn(() => this._viewContainerRef.clear());
this._attachedPortal = portal;
this._attachedRef = viewRef;
this.attached.emit(viewRef);
return viewRef;
}
/**
* Attaches the given DomPortal to this PortalHost by moving all of the portal content into it.
* @param portal Portal to be attached.
* @deprecated To be turned into a method.
* @breaking-change 10.0.0
*/
attachDomPortal = (portal: DomPortal) => {
// @breaking-change 9.0.0 Remove check and error once the
// `_document` constructor parameter is required.
if (!this._document && (typeof ngDevMode === 'undefined' || ngDevMode)) {
throw Error('Cannot attach DOM portal without _document constructor parameter');
}
const element = portal.element;
if (!element.parentNode && (typeof ngDevMode === 'undefined' || ngDevMode)) {
throw Error('DOM portal content must be attached to a parent node.');
}
// Anchor used to save the element's previous position so
// that we can restore it when the portal is detached.
const anchorNode = this._document.createComment('dom-portal');
portal.setAttachedHost(this);
element.parentNode!.insertBefore(anchorNode, element);
this._getRootNode().appendChild(element);
super.setDisposeFn(() => {
if (anchorNode.parentNode) {
anchorNode.parentNode!.replaceChild(element, anchorNode);
}
});
}
/** Gets the root node of the portal outlet. */
private _getRootNode(): HTMLElement {
const nativeElement: Node = this._viewContainerRef.element.nativeElement;
// The directive could be set on a template which will result in a comment
// node being the root. Use the comment's parent node if that is the case.
return (nativeElement.nodeType === nativeElement.ELEMENT_NODE ?
nativeElement : nativeElement.parentNode!) as HTMLElement;
}
static ngAcceptInputType_portal: Portal<any> | null | undefined | '';
}
/**
* @deprecated Use `CdkPortalOutlet` instead.
* @breaking-change 9.0.0
*/
@Directive({
selector: '[cdkPortalHost], [portalHost]',
exportAs: 'cdkPortalHost',<|fim▁hole|> }]
})
export class PortalHostDirective extends CdkPortalOutlet {}
@NgModule({
exports: [CdkPortal, CdkPortalOutlet, TemplatePortalDirective, PortalHostDirective],
declarations: [CdkPortal, CdkPortalOutlet, TemplatePortalDirective, PortalHostDirective],
})
export class PortalModule {}<|fim▁end|> | inputs: ['portal: cdkPortalHost'],
providers: [{
provide: CdkPortalOutlet,
useExisting: PortalHostDirective |
<|file_name|>ownershipmovessource0.rs<|end_file_name|><|fim▁begin|>// Cette fonction prend possession d'un entier alloué dans le tas.
fn destroy_box(c: Box<i32>) {
println!("Destroying a box that contains {}", c);
// `c` est détruit et la mémoire va être libérée.
}<|fim▁hole|> let x = 5u32;
// On copie `x` dans `y` - aucune ressource n'a été transféré
// (l'ownership n'a pas été transféré).
let y = x;
// Les deux valeurs peuvent être utilisées indépendamment.
println!("x is {}, and y is {}", x, y);
// `a` est un pointeur sur un entier alloué dans le tas.
let a = Box::new(5i32);
println!("a contains: {}", a);
// On transfert `a` dans `b`.
let b = a;
// L'adresse du pointeur `a` est copié (et non la donnée) dans `b`.
// `a` et `b` sont désormais des pointeurs sur la même donnée allouée dans le
// tas, mais `b` la possède, désormais.
// Erreur! `a` ne peut plus accéder à la donnée car il ne possède plus
// le bloc mémoire.
// println!("a contains: {}", a);
// TODO ^ Essayez de décommenter cette ligne.
// Cette fonction prend possession de la mémoire allouée dans le tas
// à partir de `b`.
destroy_box(b);
// Puisque la mémoire allouée a été libérée à partir d'ici,
// cette action consisterait à déréférencer de la mémoire libérée,
// mais cela est interdit par le compilateur.
// Erreur! `b` ne peut plus accéder à la donnée car il ne possède plus
// le bloc mémoire.
// println!("b contains: {}", b);
// TODO ^ Essayez de décommenter cette ligne.
}<|fim▁end|> |
fn main() {
// Entier alloué dans la pile. |
<|file_name|>test_api.py<|end_file_name|><|fim▁begin|># Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT<|fim▁hole|>
import mock
from jacket.compute import test
from jacket.compute.virt.disk.mount import api
from jacket.compute.virt.disk.mount import block
from jacket.compute.virt.disk.mount import loop
from jacket.compute.virt.disk.mount import nbd
from jacket.compute.virt.image import model as imgmodel
PARTITION = 77
ORIG_DEVICE = "/dev/null"
AUTOMAP_PARTITION = "/dev/nullp77"
MAP_PARTITION = "/dev/mapper/nullp77"
class MountTestCase(test.NoDBTestCase):
def setUp(self):
super(MountTestCase, self).setUp()
def _test_map_dev(self, partition):
mount = api.Mount(mock.sentinel.image, mock.sentinel.mount_dir)
mount.device = ORIG_DEVICE
mount.partition = partition
mount.map_dev()
return mount
@mock.patch('compute.utils.trycmd')
def _test_map_dev_with_trycmd(self, partition, trycmd):
trycmd.return_value = [None, None]
mount = self._test_map_dev(partition)
self.assertEqual(1, trycmd.call_count) # don't care about args
return mount
def _exists_effect(self, data):
def exists_effect(filename):
try:
v = data[filename]
if isinstance(v, list):
if len(v) > 0:
return v.pop(0)
self.fail("Out of items for: %s" % filename)
return v
except KeyError:
self.fail("Unexpected call with: %s" % filename)
return exists_effect
def _check_calls(self, exists, filenames):
self.assertEqual([mock.call(x) for x in filenames],
exists.call_args_list)
@mock.patch('os.path.exists')
def test_map_dev_partition_search(self, exists):
exists.side_effect = self._exists_effect({
ORIG_DEVICE: True})
mount = self._test_map_dev(-1)
self._check_calls(exists, [ORIG_DEVICE])
self.assertNotEqual("", mount.error)
self.assertFalse(mount.mapped)
@mock.patch('os.path.exists')
def test_map_dev_good(self, exists):
exists.side_effect = self._exists_effect({
ORIG_DEVICE: True,
AUTOMAP_PARTITION: False,
MAP_PARTITION: [False, True]})
mount = self._test_map_dev_with_trycmd(PARTITION)
self._check_calls(exists,
[ORIG_DEVICE, AUTOMAP_PARTITION, MAP_PARTITION, MAP_PARTITION])
self.assertEqual("", mount.error)
self.assertTrue(mount.mapped)
@mock.patch('os.path.exists')
def test_map_dev_error(self, exists):
exists.side_effect = self._exists_effect({
ORIG_DEVICE: True,
AUTOMAP_PARTITION: False,
MAP_PARTITION: False})
mount = self._test_map_dev_with_trycmd(PARTITION)
self._check_calls(exists,
[ORIG_DEVICE, AUTOMAP_PARTITION, MAP_PARTITION, MAP_PARTITION])
self.assertNotEqual("", mount.error)
self.assertFalse(mount.mapped)
@mock.patch('os.path.exists')
def test_map_dev_automap(self, exists):
exists.side_effect = self._exists_effect({
ORIG_DEVICE: True,
AUTOMAP_PARTITION: True})
mount = self._test_map_dev(PARTITION)
self._check_calls(exists,
[ORIG_DEVICE, AUTOMAP_PARTITION, AUTOMAP_PARTITION])
self.assertEqual(AUTOMAP_PARTITION, mount.mapped_device)
self.assertTrue(mount.automapped)
self.assertTrue(mount.mapped)
@mock.patch('os.path.exists')
def test_map_dev_else(self, exists):
exists.side_effect = self._exists_effect({
ORIG_DEVICE: True,
AUTOMAP_PARTITION: True})
mount = self._test_map_dev(None)
self._check_calls(exists, [ORIG_DEVICE])
self.assertEqual(ORIG_DEVICE, mount.mapped_device)
self.assertFalse(mount.automapped)
self.assertTrue(mount.mapped)
def test_instance_for_format_raw(self):
image = imgmodel.LocalFileImage("/some/file.raw",
imgmodel.FORMAT_RAW)
mount_dir = '/mount/dir'
partition = -1
inst = api.Mount.instance_for_format(image, mount_dir, partition)
self.assertIsInstance(inst, loop.LoopMount)
def test_instance_for_format_qcow2(self):
image = imgmodel.LocalFileImage("/some/file.qcows",
imgmodel.FORMAT_QCOW2)
mount_dir = '/mount/dir'
partition = -1
inst = api.Mount.instance_for_format(image, mount_dir, partition)
self.assertIsInstance(inst, nbd.NbdMount)
def test_instance_for_format_block(self):
image = imgmodel.LocalBlockImage(
"/dev/mapper/instances--instance-0000001_disk",)
mount_dir = '/mount/dir'
partition = -1
inst = api.Mount.instance_for_format(image, mount_dir, partition)
self.assertIsInstance(inst, block.BlockMount)
def test_instance_for_device_loop(self):
image = mock.MagicMock()
mount_dir = '/mount/dir'
partition = -1
device = '/dev/loop0'
inst = api.Mount.instance_for_device(image, mount_dir, partition,
device)
self.assertIsInstance(inst, loop.LoopMount)
def test_instance_for_device_loop_partition(self):
image = mock.MagicMock()
mount_dir = '/mount/dir'
partition = 1
device = '/dev/mapper/loop0p1'
inst = api.Mount.instance_for_device(image, mount_dir, partition,
device)
self.assertIsInstance(inst, loop.LoopMount)
def test_instance_for_device_nbd(self):
image = mock.MagicMock()
mount_dir = '/mount/dir'
partition = -1
device = '/dev/nbd0'
inst = api.Mount.instance_for_device(image, mount_dir, partition,
device)
self.assertIsInstance(inst, nbd.NbdMount)
def test_instance_for_device_nbd_partition(self):
image = mock.MagicMock()
mount_dir = '/mount/dir'
partition = 1
device = '/dev/mapper/nbd0p1'
inst = api.Mount.instance_for_device(image, mount_dir, partition,
device)
self.assertIsInstance(inst, nbd.NbdMount)
def test_instance_for_device_block(self):
image = mock.MagicMock()
mount_dir = '/mount/dir'
partition = -1
device = '/dev/mapper/instances--instance-0000001_disk'
inst = api.Mount.instance_for_device(image, mount_dir, partition,
device)
self.assertIsInstance(inst, block.BlockMount)
def test_instance_for_device_block_partiton(self,):
image = mock.MagicMock()
mount_dir = '/mount/dir'
partition = 1
device = '/dev/mapper/instances--instance-0000001_diskp1'
inst = api.Mount.instance_for_device(image, mount_dir, partition,
device)
self.assertIsInstance(inst, block.BlockMount)<|fim▁end|> | # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License. |
<|file_name|>font.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Specified values for font properties
#[cfg(feature = "gecko")]
use crate::gecko_bindings::bindings;
use crate::parser::{Parse, ParserContext};
use crate::properties::longhands::system_font::SystemFont;
use crate::values::computed::font::{FamilyName, FontFamilyList, FontStyleAngle, SingleFontFamily};
use crate::values::computed::{font as computed, Length, NonNegativeLength};
use crate::values::computed::{Angle as ComputedAngle, Percentage as ComputedPercentage};
use crate::values::computed::{CSSPixelLength, Context, ToComputedValue};
use crate::values::generics::font::VariationValue;
use crate::values::generics::font::{self as generics, FeatureTagValue, FontSettings, FontTag};
use crate::values::generics::NonNegative;
use crate::values::specified::length::{FontBaseSize, AU_PER_PT, AU_PER_PX};
use crate::values::specified::{AllowQuirks, Angle, Integer, LengthPercentage};
use crate::values::specified::{NoCalcLength, NonNegativeNumber, Number, Percentage};
use crate::values::CustomIdent;
use crate::Atom;
use cssparser::{Parser, Token};
#[cfg(feature = "gecko")]
use malloc_size_of::{MallocSizeOf, MallocSizeOfOps};
use std::fmt::{self, Write};
use style_traits::values::SequenceWriter;
use style_traits::{CssWriter, KeywordsCollectFn, ParseError};
use style_traits::{SpecifiedValueInfo, StyleParseErrorKind, ToCss};
// FIXME(emilio): The system font code is copy-pasta, and should be cleaned up.
macro_rules! system_font_methods {
($ty:ident, $field:ident) => {
system_font_methods!($ty);
fn compute_system(&self, _context: &Context) -> <$ty as ToComputedValue>::ComputedValue {
debug_assert!(matches!(*self, $ty::System(..)));
#[cfg(feature = "gecko")]
{
_context.cached_system_font.as_ref().unwrap().$field.clone()
}
#[cfg(feature = "servo")]
{
unreachable!()
}
}
};
($ty:ident) => {
/// Get a specified value that represents a system font.
pub fn system_font(f: SystemFont) -> Self {
$ty::System(f)
}
/// Retreive a SystemFont from the specified value.
pub fn get_system(&self) -> Option<SystemFont> {
if let $ty::System(s) = *self {
Some(s)
} else {
None
}
}
};
}
const DEFAULT_SCRIPT_MIN_SIZE_PT: u32 = 8;
const DEFAULT_SCRIPT_SIZE_MULTIPLIER: f64 = 0.71;
/// The minimum font-weight value per:
///
/// https://drafts.csswg.org/css-fonts-4/#font-weight-numeric-values
pub const MIN_FONT_WEIGHT: f32 = 1.;
/// The maximum font-weight value per:
///
/// https://drafts.csswg.org/css-fonts-4/#font-weight-numeric-values
pub const MAX_FONT_WEIGHT: f32 = 1000.;
/// A specified font-weight value.
///
/// https://drafts.csswg.org/css-fonts-4/#propdef-font-weight
#[derive(Clone, Copy, Debug, MallocSizeOf, Parse, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
pub enum FontWeight {
/// `<font-weight-absolute>`
Absolute(AbsoluteFontWeight),
/// Bolder variant
Bolder,
/// Lighter variant
Lighter,
/// System font variant.
#[css(skip)]
System(SystemFont),
}
impl FontWeight {
system_font_methods!(FontWeight, font_weight);
/// `normal`
#[inline]
pub fn normal() -> Self {
FontWeight::Absolute(AbsoluteFontWeight::Normal)
}
/// Get a specified FontWeight from a gecko keyword
pub fn from_gecko_keyword(kw: u32) -> Self {
debug_assert!(kw % 100 == 0);
debug_assert!(kw as f32 <= MAX_FONT_WEIGHT);
FontWeight::Absolute(AbsoluteFontWeight::Weight(Number::new(kw as f32)))
}
}
impl ToComputedValue for FontWeight {
type ComputedValue = computed::FontWeight;
#[inline]
fn to_computed_value(&self, context: &Context) -> Self::ComputedValue {
match *self {
FontWeight::Absolute(ref abs) => abs.compute(),
FontWeight::Bolder => context
.builder
.get_parent_font()
.clone_font_weight()
.bolder(),
FontWeight::Lighter => context
.builder
.get_parent_font()
.clone_font_weight()
.lighter(),
FontWeight::System(_) => self.compute_system(context),
}
}
#[inline]
fn from_computed_value(computed: &computed::FontWeight) -> Self {
FontWeight::Absolute(AbsoluteFontWeight::Weight(Number::from_computed_value(
&computed.0,
)))
}
}
/// An absolute font-weight value for a @font-face rule.
///
/// https://drafts.csswg.org/css-fonts-4/#font-weight-absolute-values
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
pub enum AbsoluteFontWeight {
/// A `<number>`, with the additional constraints specified in:
///
/// https://drafts.csswg.org/css-fonts-4/#font-weight-numeric-values
Weight(Number),
/// Normal font weight. Same as 400.
Normal,
/// Bold font weight. Same as 700.
Bold,
}
impl AbsoluteFontWeight {
/// Returns the computed value for this absolute font weight.
pub fn compute(&self) -> computed::FontWeight {
match *self {
AbsoluteFontWeight::Weight(weight) => {
computed::FontWeight(weight.get().max(MIN_FONT_WEIGHT).min(MAX_FONT_WEIGHT))
},
AbsoluteFontWeight::Normal => computed::FontWeight::normal(),
AbsoluteFontWeight::Bold => computed::FontWeight::bold(),
}
}
}
impl Parse for AbsoluteFontWeight {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
if let Ok(number) = input.try(|input| Number::parse(context, input)) {
// We could add another AllowedNumericType value, but it doesn't
// seem worth it just for a single property with such a weird range,
// so we do the clamping here manually.
if !number.was_calc() &&
(number.get() < MIN_FONT_WEIGHT || number.get() > MAX_FONT_WEIGHT)
{
return Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError));
}
return Ok(AbsoluteFontWeight::Weight(number));
}
Ok(try_match_ident_ignore_ascii_case! { input,
"normal" => AbsoluteFontWeight::Normal,
"bold" => AbsoluteFontWeight::Bold,
})
}
}
/// The specified value of the `font-style` property, without the system font
/// crap.
pub type SpecifiedFontStyle = generics::FontStyle<Angle>;
impl ToCss for SpecifiedFontStyle {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
match *self {
generics::FontStyle::Normal => dest.write_str("normal"),
generics::FontStyle::Italic => dest.write_str("italic"),
generics::FontStyle::Oblique(ref angle) => {
dest.write_str("oblique")?;
if *angle != Self::default_angle() {
dest.write_char(' ')?;
angle.to_css(dest)?;
}
Ok(())
},
}
}
}
impl Parse for SpecifiedFontStyle {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
Ok(try_match_ident_ignore_ascii_case! { input,
"normal" => generics::FontStyle::Normal,
"italic" => generics::FontStyle::Italic,
"oblique" => {
let angle = input.try(|input| Self::parse_angle(context, input))
.unwrap_or_else(|_| Self::default_angle());
generics::FontStyle::Oblique(angle)
},
})
}
}
impl ToComputedValue for SpecifiedFontStyle {
type ComputedValue = computed::FontStyle;
fn to_computed_value(&self, _: &Context) -> Self::ComputedValue {
match *self {
generics::FontStyle::Normal => generics::FontStyle::Normal,
generics::FontStyle::Italic => generics::FontStyle::Italic,
generics::FontStyle::Oblique(ref angle) => {
generics::FontStyle::Oblique(FontStyleAngle(Self::compute_angle(angle)))
},
}
}
fn from_computed_value(computed: &Self::ComputedValue) -> Self {
match *computed {
generics::FontStyle::Normal => generics::FontStyle::Normal,
generics::FontStyle::Italic => generics::FontStyle::Italic,
generics::FontStyle::Oblique(ref angle) => {
generics::FontStyle::Oblique(Angle::from_computed_value(&angle.0))
},
}
}
}
/// The default angle for `font-style: oblique`.
///
/// NOTE(emilio): As of right now this diverges from the spec, which specifies
/// 20, because it's not updated yet to account for the resolution in:
///
/// https://github.com/w3c/csswg-drafts/issues/2295
pub const DEFAULT_FONT_STYLE_OBLIQUE_ANGLE_DEGREES: f32 = 14.;
/// From https://drafts.csswg.org/css-fonts-4/#valdef-font-style-oblique-angle:
///
/// Values less than -90deg or values greater than 90deg are
/// invalid and are treated as parse errors.
///
/// The maximum angle value that `font-style: oblique` should compute to.
pub const FONT_STYLE_OBLIQUE_MAX_ANGLE_DEGREES: f32 = 90.;
/// The minimum angle value that `font-style: oblique` should compute to.
pub const FONT_STYLE_OBLIQUE_MIN_ANGLE_DEGREES: f32 = -90.;
impl SpecifiedFontStyle {
/// Gets a clamped angle in degrees from a specified Angle.
pub fn compute_angle_degrees(angle: &Angle) -> f32 {
angle
.degrees()
.max(FONT_STYLE_OBLIQUE_MIN_ANGLE_DEGREES)
.min(FONT_STYLE_OBLIQUE_MAX_ANGLE_DEGREES)
}
fn compute_angle(angle: &Angle) -> ComputedAngle {
ComputedAngle::from_degrees(Self::compute_angle_degrees(angle))
}
/// Parse a suitable angle for font-style: oblique.
pub fn parse_angle<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Angle, ParseError<'i>> {
let angle = Angle::parse(context, input)?;
if angle.was_calc() {
return Ok(angle);
}
let degrees = angle.degrees();
if degrees < FONT_STYLE_OBLIQUE_MIN_ANGLE_DEGREES ||
degrees > FONT_STYLE_OBLIQUE_MAX_ANGLE_DEGREES
{
return Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError));
}
return Ok(angle);
}
/// The default angle for `font-style: oblique`.
pub fn default_angle() -> Angle {
Angle::from_degrees(
DEFAULT_FONT_STYLE_OBLIQUE_ANGLE_DEGREES,
/* was_calc = */ false,
)
}
}
/// The specified value of the `font-style` property.
#[derive(Clone, Copy, Debug, MallocSizeOf, Parse, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
#[allow(missing_docs)]
pub enum FontStyle {
Specified(SpecifiedFontStyle),
#[css(skip)]
System(SystemFont),
}
impl FontStyle {
/// Return the `normal` value.
#[inline]
pub fn normal() -> Self {
FontStyle::Specified(generics::FontStyle::Normal)
}
system_font_methods!(FontStyle, font_style);
}
impl ToComputedValue for FontStyle {
type ComputedValue = computed::FontStyle;
fn to_computed_value(&self, context: &Context) -> Self::ComputedValue {
match *self {
FontStyle::Specified(ref specified) => specified.to_computed_value(context),
FontStyle::System(..) => self.compute_system(context),
}
}
fn from_computed_value(computed: &Self::ComputedValue) -> Self {
FontStyle::Specified(SpecifiedFontStyle::from_computed_value(computed))
}
}
/// A value for the `font-stretch` property.
///
/// https://drafts.csswg.org/css-fonts-4/#font-stretch-prop
///
/// TODO(emilio): We could derive Parse if we had NonNegativePercentage.
#[allow(missing_docs)]
#[derive(Clone, Copy, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
#[repr(u8)]
pub enum FontStretch {
Stretch(Percentage),
Keyword(FontStretchKeyword),
#[css(skip)]
System(SystemFont),
}
/// A keyword value for `font-stretch`.
#[derive(Clone, Copy, Debug, MallocSizeOf, Parse, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
#[allow(missing_docs)]
pub enum FontStretchKeyword {
Normal,
Condensed,
UltraCondensed,
ExtraCondensed,
SemiCondensed,
SemiExpanded,
Expanded,
ExtraExpanded,
UltraExpanded,
}
impl FontStretchKeyword {
/// Resolves the value of the keyword as specified in:
///
/// https://drafts.csswg.org/css-fonts-4/#font-stretch-prop
pub fn compute(&self) -> ComputedPercentage {
use self::FontStretchKeyword::*;
ComputedPercentage(match *self {
UltraCondensed => 0.5,
ExtraCondensed => 0.625,
Condensed => 0.75,
SemiCondensed => 0.875,
Normal => 1.,
SemiExpanded => 1.125,
Expanded => 1.25,
ExtraExpanded => 1.5,
UltraExpanded => 2.,
})
}
/// Does the opposite operation to `compute`, in order to serialize keywords
/// if possible.
pub fn from_percentage(percentage: f32) -> Option<Self> {
use self::FontStretchKeyword::*;
// NOTE(emilio): Can't use `match` because of rust-lang/rust#41620.
if percentage == 0.5 {
return Some(UltraCondensed);
}
if percentage == 0.625 {
return Some(ExtraCondensed);
}
if percentage == 0.75 {
return Some(Condensed);
}
if percentage == 0.875 {
return Some(SemiCondensed);
}
if percentage == 1. {
return Some(Normal);
}
if percentage == 1.125 {
return Some(SemiExpanded);
}
if percentage == 1.25 {
return Some(Expanded);
}
if percentage == 1.5 {
return Some(ExtraExpanded);
}
if percentage == 2. {
return Some(UltraExpanded);
}
None
}
}
impl FontStretch {
/// `normal`.
pub fn normal() -> Self {
FontStretch::Keyword(FontStretchKeyword::Normal)
}
system_font_methods!(FontStretch, font_stretch);
}
impl Parse for FontStretch {
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
// From https://drafts.csswg.org/css-fonts-4/#font-stretch-prop:
//
// Values less than 0% are not allowed and are treated as parse
// errors.
if let Ok(percentage) = input.try(|input| Percentage::parse_non_negative(context, input)) {
return Ok(FontStretch::Stretch(percentage));
}
Ok(FontStretch::Keyword(FontStretchKeyword::parse(input)?))
}
}
impl ToComputedValue for FontStretch {
type ComputedValue = computed::FontStretch;
fn to_computed_value(&self, context: &Context) -> Self::ComputedValue {
match *self {
FontStretch::Stretch(ref percentage) => {
computed::FontStretch(NonNegative(percentage.to_computed_value(context)))
},
FontStretch::Keyword(ref kw) => computed::FontStretch(NonNegative(kw.compute())),
FontStretch::System(_) => self.compute_system(context),
}
}
fn from_computed_value(computed: &Self::ComputedValue) -> Self {
FontStretch::Stretch(Percentage::from_computed_value(&(computed.0).0))
}
}
/// CSS font keywords
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Copy,
Debug,
MallocSizeOf,
Parse,
PartialEq,
SpecifiedValueInfo,
ToAnimatedValue,
ToAnimatedZero,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
#[allow(missing_docs)]
pub enum KeywordSize {
#[css(keyword = "xx-small")]
XXSmall,
XSmall,
Small,
Medium,
Large,
XLarge,
#[css(keyword = "xx-large")]
XXLarge,
#[css(keyword = "xxx-large")]
XXXLarge,
}
impl KeywordSize {
/// Convert to an HTML <font size> value
#[inline]
pub fn html_size(self) -> u8 {
self as u8
}
}
impl Default for KeywordSize {
fn default() -> Self {
KeywordSize::Medium
}
}
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Copy,
Debug,
MallocSizeOf,
PartialEq,
ToAnimatedValue,
ToAnimatedZero,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
/// Additional information for keyword-derived font sizes.
pub struct KeywordInfo {
/// The keyword used
pub kw: KeywordSize,
/// A factor to be multiplied by the computed size of the keyword
#[css(skip)]
pub factor: f32,
/// An additional fixed offset to add to the kw * factor in the case of
/// `calc()`.
#[css(skip)]
pub offset: CSSPixelLength,
}
impl KeywordInfo {
/// KeywordInfo value for font-size: medium
pub fn medium() -> Self {
Self::new(KeywordSize::Medium)
}
fn new(kw: KeywordSize) -> Self {
KeywordInfo {
kw,
factor: 1.,
offset: CSSPixelLength::new(0.),
}
}
/// Computes the final size for this font-size keyword, accounting for
/// text-zoom.
fn to_computed_value(&self, context: &Context) -> CSSPixelLength {
let base = context.maybe_zoom_text(self.kw.to_length(context).0);
base * self.factor + context.maybe_zoom_text(self.offset)
}
/// Given a parent keyword info (self), apply an additional factor/offset to
/// it.
fn compose(self, factor: f32) -> Self {
KeywordInfo {
kw: self.kw,
factor: self.factor * factor,
offset: self.offset * factor,
}
}
}
impl SpecifiedValueInfo for KeywordInfo {
fn collect_completion_keywords(f: KeywordsCollectFn) {
<KeywordSize as SpecifiedValueInfo>::collect_completion_keywords(f);
}
}
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
/// A specified font-size value
pub enum FontSize {
/// A length; e.g. 10px.
Length(LengthPercentage),
/// A keyword value, along with a ratio and absolute offset.
/// The ratio in any specified keyword value
/// will be 1 (with offset 0), but we cascade keywordness even
/// after font-relative (percent and em) values
/// have been applied, which is where the ratio
/// comes in. The offset comes in if we cascaded a calc value,
/// where the font-relative portion (em and percentage) will
/// go into the ratio, and the remaining units all computed together
/// will go into the offset.
/// See bug 1355707.
Keyword(KeywordInfo),
/// font-size: smaller
Smaller,
/// font-size: larger
Larger,
/// Derived from a specified system font.
#[css(skip)]
System(SystemFont),
}
/// Specifies a prioritized list of font family names or generic family names.
#[derive(Clone, Debug, Eq, PartialEq, ToCss, ToShmem)]
#[cfg_attr(feature = "servo", derive(Hash))]
pub enum FontFamily {
/// List of `font-family`
#[css(comma)]
Values(#[css(iterable)] FontFamilyList),
/// System font
#[css(skip)]
System(SystemFont),
}
impl FontFamily {
system_font_methods!(FontFamily, font_family);
/// Parse a specified font-family value
pub fn parse_specified<'i, 't>(input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i>> {
let values = input.parse_comma_separated(SingleFontFamily::parse)?;
Ok(FontFamily::Values(FontFamilyList::new(
values.into_boxed_slice(),
)))
}
}
impl ToComputedValue for FontFamily {
type ComputedValue = computed::FontFamily;
fn to_computed_value(&self, context: &Context) -> Self::ComputedValue {
match *self {
FontFamily::Values(ref v) => computed::FontFamily {
families: v.clone(),
is_system_font: false,
},
FontFamily::System(_) => self.compute_system(context),
}
}
fn from_computed_value(other: &computed::FontFamily) -> Self {
FontFamily::Values(other.families.clone())
}
}
#[cfg(feature = "gecko")]
impl MallocSizeOf for FontFamily {
fn size_of(&self, _ops: &mut MallocSizeOfOps) -> usize {
match *self {
FontFamily::Values(ref v) => {
// Although a SharedFontList object is refcounted, we always
// attribute its size to the specified value, as long as it's
// not a value in SharedFontList::sSingleGenerics.
if matches!(v, FontFamilyList::SharedFontList(_)) {
let ptr = v.shared_font_list().get();
unsafe { bindings::Gecko_SharedFontList_SizeOfIncludingThis(ptr) }
} else {
0
}
},
FontFamily::System(_) => 0,
}
}
}
impl Parse for FontFamily {
/// <family-name>#
/// <family-name> = <string> | [ <ident>+ ]
/// TODO: <generic-family>
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<FontFamily, ParseError<'i>> {
FontFamily::parse_specified(input)
}
}
impl SpecifiedValueInfo for FontFamily {}
/// `FamilyName::parse` is based on `SingleFontFamily::parse` and not the other way around
/// because we want the former to exclude generic family keywords.
impl Parse for FamilyName {
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
match SingleFontFamily::parse(input) {
Ok(SingleFontFamily::FamilyName(name)) => Ok(name),
Ok(SingleFontFamily::Generic(_)) => {
Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError))
},
Err(e) => Err(e),
}
}
}
#[derive(Clone, Copy, Debug, MallocSizeOf, Parse, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
/// Preserve the readability of text when font fallback occurs
pub enum FontSizeAdjust {
/// None variant
None,
/// Number variant
Number(NonNegativeNumber),
/// system font
#[css(skip)]
System(SystemFont),
}
impl FontSizeAdjust {
#[inline]
/// Default value of font-size-adjust
pub fn none() -> Self {
FontSizeAdjust::None
}
system_font_methods!(FontSizeAdjust, font_size_adjust);
}
impl ToComputedValue for FontSizeAdjust {
type ComputedValue = computed::FontSizeAdjust;
fn to_computed_value(&self, context: &Context) -> Self::ComputedValue {
match *self {
FontSizeAdjust::None => computed::FontSizeAdjust::None,
FontSizeAdjust::Number(ref n) => {
// The computed version handles clamping of animated values
// itself.
computed::FontSizeAdjust::Number(n.to_computed_value(context).0)
},
FontSizeAdjust::System(_) => self.compute_system(context),
}
}
fn from_computed_value(computed: &computed::FontSizeAdjust) -> Self {
match *computed {
computed::FontSizeAdjust::None => FontSizeAdjust::None,
computed::FontSizeAdjust::Number(v) => {
FontSizeAdjust::Number(NonNegativeNumber::from_computed_value(&v.into()))
},
}
}
}
/// This is the ratio applied for font-size: larger
/// and smaller by both Firefox and Chrome
const LARGER_FONT_SIZE_RATIO: f32 = 1.2;
/// The default font size.
pub const FONT_MEDIUM_PX: i32 = 16;
impl KeywordSize {
#[inline]
#[cfg(feature = "servo")]
fn to_length(&self, _: &Context) -> NonNegativeLength {
let medium = Length::new(FONT_MEDIUM_PX as f32);
// https://drafts.csswg.org/css-fonts-3/#font-size-prop
NonNegative(match *self {
KeywordSize::XXSmall => medium * 3.0 / 5.0,
KeywordSize::XSmall => medium * 3.0 / 4.0,
KeywordSize::Small => medium * 8.0 / 9.0,
KeywordSize::Medium => medium,
KeywordSize::Large => medium * 6.0 / 5.0,
KeywordSize::XLarge => medium * 3.0 / 2.0,
KeywordSize::XXLarge => medium * 2.0,
KeywordSize::XXXLarge => medium * 3.0,
})
}
#[cfg(feature = "gecko")]
#[inline]
fn to_length(&self, cx: &Context) -> NonNegativeLength {
use crate::context::QuirksMode;
// The tables in this function are originally from
// nsRuleNode::CalcFontPointSize in Gecko:
//
// https://searchfox.org/mozilla-central/rev/c05d9d61188d32b8/layout/style/nsRuleNode.cpp#3150
//
// Mapping from base size and HTML size to pixels
// The first index is (base_size - 9), the second is the
// HTML size. "0" is CSS keyword xx-small, not HTML size 0,
// since HTML size 0 is the same as 1.
//
// xxs xs s m l xl xxl -
// - 0/1 2 3 4 5 6 7
static FONT_SIZE_MAPPING: [[i32; 8]; 8] = [
[9, 9, 9, 9, 11, 14, 18, 27],
[9, 9, 9, 10, 12, 15, 20, 30],
[9, 9, 10, 11, 13, 17, 22, 33],
[9, 9, 10, 12, 14, 18, 24, 36],
[9, 10, 12, 13, 16, 20, 26, 39],
[9, 10, 12, 14, 17, 21, 28, 42],
[9, 10, 13, 15, 18, 23, 30, 45],
[9, 10, 13, 16, 18, 24, 32, 48],
];
// This table gives us compatibility with WinNav4 for the default fonts only.
// In WinNav4, the default fonts were:
//
// Times/12pt == Times/16px at 96ppi
// Courier/10pt == Courier/13px at 96ppi
//
// xxs xs s m l xl xxl -
// - 1 2 3 4 5 6 7
static QUIRKS_FONT_SIZE_MAPPING: [[i32; 8]; 8] = [
[9, 9, 9, 9, 11, 14, 18, 28],
[9, 9, 9, 10, 12, 15, 20, 31],
[9, 9, 9, 11, 13, 17, 22, 34],
[9, 9, 10, 12, 14, 18, 24, 37],
[9, 9, 10, 13, 16, 20, 26, 40],
[9, 9, 11, 14, 17, 21, 28, 42],
[9, 10, 12, 15, 17, 23, 30, 45],
[9, 10, 13, 16, 18, 24, 32, 48],
];
static FONT_SIZE_FACTORS: [i32; 8] = [60, 75, 89, 100, 120, 150, 200, 300];
let ref gecko_font = cx.style().get_font().gecko();
let base_size = unsafe {
Atom::with(gecko_font.mLanguage.mRawPtr, |atom| {
cx.font_metrics_provider
.get_size(atom, gecko_font.mGenericID)
})
};
let base_size_px = base_size.px().round() as i32;
let html_size = self.html_size() as usize;
NonNegative(if base_size_px >= 9 && base_size_px <= 16 {
let mapping = if cx.quirks_mode == QuirksMode::Quirks {
QUIRKS_FONT_SIZE_MAPPING
} else {
FONT_SIZE_MAPPING
};
Length::new(mapping[(base_size_px - 9) as usize][html_size] as f32)
} else {
base_size * FONT_SIZE_FACTORS[html_size] as f32 / 100.0
})
}
}
impl FontSize {
/// <https://html.spec.whatwg.org/multipage/#rules-for-parsing-a-legacy-font-size>
pub fn from_html_size(size: u8) -> Self {
FontSize::Keyword(KeywordInfo::new(match size {
// If value is less than 1, let it be 1.
0 | 1 => KeywordSize::XSmall,
2 => KeywordSize::Small,
3 => KeywordSize::Medium,
4 => KeywordSize::Large,
5 => KeywordSize::XLarge,
6 => KeywordSize::XXLarge,
// If value is greater than 7, let it be 7.
_ => KeywordSize::XXXLarge,
}))
}
/// Compute it against a given base font size
pub fn to_computed_value_against(
&self,
context: &Context,
base_size: FontBaseSize,
) -> computed::FontSize {
use crate::values::specified::length::FontRelativeLength;
let compose_keyword = |factor| {
context
.style()
.get_parent_font()
.clone_font_size()
.keyword_info
.map(|i| i.compose(factor))
};
let mut info = None;
let size = match *self {
FontSize::Length(LengthPercentage::Length(NoCalcLength::FontRelative(value))) => {
if let FontRelativeLength::Em(em) = value {
// If the parent font was keyword-derived, this is too.
// Tack the em unit onto the factor
info = compose_keyword(em);
}
value.to_computed_value(context, base_size)
},
FontSize::Length(LengthPercentage::Length(NoCalcLength::ServoCharacterWidth(
value,
))) => value.to_computed_value(base_size.resolve(context)),
FontSize::Length(LengthPercentage::Length(NoCalcLength::Absolute(ref l))) => {
context.maybe_zoom_text(l.to_computed_value(context))
},
FontSize::Length(LengthPercentage::Length(ref l)) => l.to_computed_value(context),
FontSize::Length(LengthPercentage::Percentage(pc)) => {
// If the parent font was keyword-derived, this is too.
// Tack the % onto the factor
info = compose_keyword(pc.0);
base_size.resolve(context) * pc.0
},
FontSize::Length(LengthPercentage::Calc(ref calc)) => {
let calc = calc.to_computed_value_zoomed(context, base_size);
calc.resolve(base_size.resolve(context))
},
FontSize::Keyword(i) => {
// As a specified keyword, this is keyword derived
info = Some(i);
i.to_computed_value(context).clamp_to_non_negative()
},
FontSize::Smaller => {
info = compose_keyword(1. / LARGER_FONT_SIZE_RATIO);
FontRelativeLength::Em(1. / LARGER_FONT_SIZE_RATIO)
.to_computed_value(context, base_size)
},
FontSize::Larger => {
info = compose_keyword(LARGER_FONT_SIZE_RATIO);
FontRelativeLength::Em(LARGER_FONT_SIZE_RATIO).to_computed_value(context, base_size)
},
FontSize::System(_) => {
#[cfg(feature = "servo")]
{
unreachable!()
}
#[cfg(feature = "gecko")]
{
context
.cached_system_font
.as_ref()
.unwrap()
.font_size
.size
.0
}
},
};
computed::FontSize {
size: NonNegative(size),
keyword_info: info,
}
}
}
impl ToComputedValue for FontSize {
type ComputedValue = computed::FontSize;
#[inline]
fn to_computed_value(&self, context: &Context) -> computed::FontSize {
self.to_computed_value_against(context, FontBaseSize::InheritedStyle)
}
#[inline]
fn from_computed_value(computed: &computed::FontSize) -> Self {
FontSize::Length(LengthPercentage::Length(
ToComputedValue::from_computed_value(&computed.size.0),
))
}
}
impl FontSize {
system_font_methods!(FontSize);
/// Get initial value for specified font size.
#[inline]
pub fn medium() -> Self {
FontSize::Keyword(KeywordInfo::medium())
}
/// Parses a font-size, with quirks.
pub fn parse_quirky<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
allow_quirks: AllowQuirks,
) -> Result<FontSize, ParseError<'i>> {
if let Ok(lp) =
input.try(|i| LengthPercentage::parse_non_negative_quirky(context, i, allow_quirks))
{
return Ok(FontSize::Length(lp));
}
if let Ok(kw) = input.try(KeywordSize::parse) {
return Ok(FontSize::Keyword(KeywordInfo::new(kw)));
}
try_match_ident_ignore_ascii_case! { input,
"smaller" => Ok(FontSize::Smaller),
"larger" => Ok(FontSize::Larger),
}
}
}
impl Parse for FontSize {
/// <length> | <percentage> | <absolute-size> | <relative-size>
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<FontSize, ParseError<'i>> {
FontSize::parse_quirky(context, input, AllowQuirks::No)
}
}
bitflags! {
#[cfg_attr(feature = "servo", derive(MallocSizeOf))]
/// Flags of variant alternates in bit
struct VariantAlternatesParsingFlags: u8 {
/// None of variant alternates enabled
const NORMAL = 0;
/// Historical forms
const HISTORICAL_FORMS = 0x01;
/// Stylistic Alternates
const STYLISTIC = 0x02;
/// Stylistic Sets
const STYLESET = 0x04;
/// Character Variant
const CHARACTER_VARIANT = 0x08;
/// Swash glyphs
const SWASH = 0x10;
/// Ornaments glyphs
const ORNAMENTS = 0x20;
/// Annotation forms
const ANNOTATION = 0x40;
}
}
#[derive(
Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss, ToResolvedValue, ToShmem,
)]
#[repr(C, u8)]
/// Set of variant alternates
pub enum VariantAlternates {
/// Enables display of stylistic alternates
#[css(function)]
Stylistic(CustomIdent),
/// Enables display with stylistic sets
#[css(comma, function)]
Styleset(#[css(iterable)] crate::OwnedSlice<CustomIdent>),
/// Enables display of specific character variants
#[css(comma, function)]
CharacterVariant(#[css(iterable)] crate::OwnedSlice<CustomIdent>),
/// Enables display of swash glyphs
#[css(function)]
Swash(CustomIdent),
/// Enables replacement of default glyphs with ornaments
#[css(function)]
Ornaments(CustomIdent),
/// Enables display of alternate annotation forms
#[css(function)]
Annotation(CustomIdent),
/// Enables display of historical forms
HistoricalForms,
}
#[derive(
Clone,
Debug,
Default,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToCss,
ToResolvedValue,
ToShmem,
)]
#[repr(transparent)]
/// List of Variant Alternates
pub struct VariantAlternatesList(
#[css(if_empty = "normal", iterable)] crate::OwnedSlice<VariantAlternates>,
);
impl VariantAlternatesList {
/// Returns the length of all variant alternates.
pub fn len(&self) -> usize {
self.0.iter().fold(0, |acc, alternate| match *alternate {
VariantAlternates::Swash(_) |
VariantAlternates::Stylistic(_) |
VariantAlternates::Ornaments(_) |
VariantAlternates::Annotation(_) => acc + 1,
VariantAlternates::Styleset(ref slice) |
VariantAlternates::CharacterVariant(ref slice) => acc + slice.len(),
_ => acc,
})
}
}
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
/// Control over the selection of these alternate glyphs
pub enum FontVariantAlternates {
/// Use alternative glyph from value
Value(VariantAlternatesList),
/// Use system font glyph
#[css(skip)]
System(SystemFont),
}
impl FontVariantAlternates {
#[inline]
/// Get initial specified value with VariantAlternatesList
pub fn get_initial_specified_value() -> Self {
FontVariantAlternates::Value(Default::default())
}
system_font_methods!(FontVariantAlternates, font_variant_alternates);
}
impl ToComputedValue for FontVariantAlternates {
type ComputedValue = computed::FontVariantAlternates;
fn to_computed_value(&self, context: &Context) -> computed::FontVariantAlternates {
match *self {
FontVariantAlternates::Value(ref v) => v.clone(),
FontVariantAlternates::System(_) => self.compute_system(context),
}
}
fn from_computed_value(other: &computed::FontVariantAlternates) -> Self {
FontVariantAlternates::Value(other.clone())
}
}
impl Parse for FontVariantAlternates {
/// normal |
/// [ stylistic(<feature-value-name>) ||
/// historical-forms ||
/// styleset(<feature-value-name> #) ||
/// character-variant(<feature-value-name> #) ||
/// swash(<feature-value-name>) ||
/// ornaments(<feature-value-name>) ||
/// annotation(<feature-value-name>) ]
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<FontVariantAlternates, ParseError<'i>> {
if input
.try(|input| input.expect_ident_matching("normal"))
.is_ok()
{
return Ok(FontVariantAlternates::Value(Default::default()));
}
let mut alternates = Vec::new();
let mut parsed_alternates = VariantAlternatesParsingFlags::empty();
macro_rules! check_if_parsed(
($input:expr, $flag:path) => (
if parsed_alternates.contains($flag) {
return Err($input.new_custom_error(StyleParseErrorKind::UnspecifiedError))
}
parsed_alternates |= $flag;
)
);
while let Ok(_) = input.try(|input| match *input.next()? {
Token::Ident(ref value) if value.eq_ignore_ascii_case("historical-forms") => {
check_if_parsed!(input, VariantAlternatesParsingFlags::HISTORICAL_FORMS);
alternates.push(VariantAlternates::HistoricalForms);
Ok(())
},
Token::Function(ref name) => {
let name = name.clone();
input.parse_nested_block(|i| {
match_ignore_ascii_case! { &name,
"swash" => {
check_if_parsed!(i, VariantAlternatesParsingFlags::SWASH);
let location = i.current_source_location();
let ident = CustomIdent::from_ident(location, i.expect_ident()?, &[])?;
alternates.push(VariantAlternates::Swash(ident));
Ok(())
},
"stylistic" => {
check_if_parsed!(i, VariantAlternatesParsingFlags::STYLISTIC);
let location = i.current_source_location();
let ident = CustomIdent::from_ident(location, i.expect_ident()?, &[])?;
alternates.push(VariantAlternates::Stylistic(ident));
Ok(())
},
"ornaments" => {
check_if_parsed!(i, VariantAlternatesParsingFlags::ORNAMENTS);
let location = i.current_source_location();
let ident = CustomIdent::from_ident(location, i.expect_ident()?, &[])?;
alternates.push(VariantAlternates::Ornaments(ident));
Ok(())
},
"annotation" => {
check_if_parsed!(i, VariantAlternatesParsingFlags::ANNOTATION);
let location = i.current_source_location();
let ident = CustomIdent::from_ident(location, i.expect_ident()?, &[])?;
alternates.push(VariantAlternates::Annotation(ident));
Ok(())
},
"styleset" => {
check_if_parsed!(i, VariantAlternatesParsingFlags::STYLESET);
let idents = i.parse_comma_separated(|i| {
let location = i.current_source_location();
CustomIdent::from_ident(location, i.expect_ident()?, &[])
})?;
alternates.push(VariantAlternates::Styleset(idents.into()));
Ok(())
},
"character-variant" => {
check_if_parsed!(i, VariantAlternatesParsingFlags::CHARACTER_VARIANT);
let idents = i.parse_comma_separated(|i| {
let location = i.current_source_location();
CustomIdent::from_ident(location, i.expect_ident()?, &[])
})?;
alternates.push(VariantAlternates::CharacterVariant(idents.into()));
Ok(())
},
_ => return Err(i.new_custom_error(StyleParseErrorKind::UnspecifiedError)),
}
})
},
_ => Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError)),
}) {}
if parsed_alternates.is_empty() {
return Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError));
}
Ok(FontVariantAlternates::Value(VariantAlternatesList(
alternates.into(),
)))
}
}
macro_rules! impl_variant_east_asian {
{
$(
$(#[$($meta:tt)+])*
$ident:ident / $css:expr => $gecko:ident = $value:expr,
)+
} => {
bitflags! {
#[derive(MallocSizeOf, ToResolvedValue, ToShmem)]
/// Vairants for east asian variant
pub struct VariantEastAsian: u16 {
/// None of the features
const NORMAL = 0;
$(
$(#[$($meta)+])*
const $ident = $value;
)+
}
}
impl ToCss for VariantEastAsian {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
if self.is_empty() {
return dest.write_str("normal");
}
let mut writer = SequenceWriter::new(dest, " ");
$(
if self.intersects(VariantEastAsian::$ident) {
writer.raw_item($css)?;
}
)+
Ok(())
}
}
/// Asserts that all variant-east-asian matches its NS_FONT_VARIANT_EAST_ASIAN_* value.
#[cfg(feature = "gecko")]
#[inline]
pub fn assert_variant_east_asian_matches() {
use crate::gecko_bindings::structs;
$(
debug_assert_eq!(structs::$gecko as u16, VariantEastAsian::$ident.bits());
)+
}
impl SpecifiedValueInfo for VariantEastAsian {
fn collect_completion_keywords(f: KeywordsCollectFn) {
f(&["normal", $($css,)+]);
}
}
}
}
impl_variant_east_asian! {
/// Enables rendering of JIS78 forms (OpenType feature: jp78)
JIS78 / "jis78" => NS_FONT_VARIANT_EAST_ASIAN_JIS78 = 0x01,
/// Enables rendering of JIS83 forms (OpenType feature: jp83).
JIS83 / "jis83" => NS_FONT_VARIANT_EAST_ASIAN_JIS83 = 0x02,
/// Enables rendering of JIS90 forms (OpenType feature: jp90).
JIS90 / "jis90" => NS_FONT_VARIANT_EAST_ASIAN_JIS90 = 0x04,
/// Enables rendering of JIS2004 forms (OpenType feature: jp04).
JIS04 / "jis04" => NS_FONT_VARIANT_EAST_ASIAN_JIS04 = 0x08,
/// Enables rendering of simplified forms (OpenType feature: smpl).
SIMPLIFIED / "simplified" => NS_FONT_VARIANT_EAST_ASIAN_SIMPLIFIED = 0x10,
/// Enables rendering of traditional forms (OpenType feature: trad).
TRADITIONAL / "traditional" => NS_FONT_VARIANT_EAST_ASIAN_TRADITIONAL = 0x20,
/// Enables rendering of full-width variants (OpenType feature: fwid).
FULL_WIDTH / "full-width" => NS_FONT_VARIANT_EAST_ASIAN_FULL_WIDTH = 0x40,
/// Enables rendering of proportionally-spaced variants (OpenType feature: pwid).
PROPORTIONAL_WIDTH / "proportional-width" => NS_FONT_VARIANT_EAST_ASIAN_PROP_WIDTH = 0x80,
/// Enables display of ruby variant glyphs (OpenType feature: ruby).
RUBY / "ruby" => NS_FONT_VARIANT_EAST_ASIAN_RUBY = 0x100,
}
#[cfg(feature = "gecko")]
impl VariantEastAsian {
/// Obtain a specified value from a Gecko keyword value
///
/// Intended for use with presentation attributes, not style structs
pub fn from_gecko_keyword(kw: u16) -> Self {
Self::from_bits_truncate(kw)
}
/// Transform into gecko keyword
pub fn to_gecko_keyword(self) -> u16 {
self.bits()
}
}
#[cfg(feature = "gecko")]
impl_gecko_keyword_conversions!(VariantEastAsian, u16);
#[cfg_attr(feature = "gecko", derive(MallocSizeOf))]
#[derive(Clone, Copy, Debug, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
/// Allows control of glyph substitution and sizing in East Asian text.
pub enum FontVariantEastAsian {
/// Value variant with `variant-east-asian`
Value(VariantEastAsian),
/// System font variant
#[css(skip)]
System(SystemFont),
}
impl FontVariantEastAsian {
#[inline]
/// Get default `font-variant-east-asian` with `empty` variant
pub fn empty() -> Self {
FontVariantEastAsian::Value(VariantEastAsian::empty())
}
system_font_methods!(FontVariantEastAsian, font_variant_east_asian);
}
impl ToComputedValue for FontVariantEastAsian {
type ComputedValue = computed::FontVariantEastAsian;
fn to_computed_value(&self, context: &Context) -> computed::FontVariantEastAsian {
match *self {
FontVariantEastAsian::Value(ref v) => v.clone(),
FontVariantEastAsian::System(_) => self.compute_system(context),
}
}
fn from_computed_value(other: &computed::FontVariantEastAsian) -> Self {
FontVariantEastAsian::Value(other.clone())
}
}
impl Parse for FontVariantEastAsian {
/// normal | [ <east-asian-variant-values> || <east-asian-width-values> || ruby ]
/// <east-asian-variant-values> = [ jis78 | jis83 | jis90 | jis04 | simplified | traditional ]
/// <east-asian-width-values> = [ full-width | proportional-width ]
fn parse<'i, 't>(
_context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<FontVariantEastAsian, ParseError<'i>> {
let mut result = VariantEastAsian::empty();
if input
.try(|input| input.expect_ident_matching("normal"))
.is_ok()
{
return Ok(FontVariantEastAsian::Value(result));<|fim▁hole|> match_ignore_ascii_case! { &input.expect_ident().map_err(|_| ())?,
"jis78" =>
exclusive_value!((result, VariantEastAsian::JIS78 | VariantEastAsian::JIS83 |
VariantEastAsian::JIS90 | VariantEastAsian::JIS04 |
VariantEastAsian::SIMPLIFIED | VariantEastAsian::TRADITIONAL
) => VariantEastAsian::JIS78),
"jis83" =>
exclusive_value!((result, VariantEastAsian::JIS78 | VariantEastAsian::JIS83 |
VariantEastAsian::JIS90 | VariantEastAsian::JIS04 |
VariantEastAsian::SIMPLIFIED | VariantEastAsian::TRADITIONAL
) => VariantEastAsian::JIS83),
"jis90" =>
exclusive_value!((result, VariantEastAsian::JIS78 | VariantEastAsian::JIS83 |
VariantEastAsian::JIS90 | VariantEastAsian::JIS04 |
VariantEastAsian::SIMPLIFIED | VariantEastAsian::TRADITIONAL
) => VariantEastAsian::JIS90),
"jis04" =>
exclusive_value!((result, VariantEastAsian::JIS78 | VariantEastAsian::JIS83 |
VariantEastAsian::JIS90 | VariantEastAsian::JIS04 |
VariantEastAsian::SIMPLIFIED | VariantEastAsian::TRADITIONAL
) => VariantEastAsian::JIS04),
"simplified" =>
exclusive_value!((result, VariantEastAsian::JIS78 | VariantEastAsian::JIS83 |
VariantEastAsian::JIS90 | VariantEastAsian::JIS04 |
VariantEastAsian::SIMPLIFIED | VariantEastAsian::TRADITIONAL
) => VariantEastAsian::SIMPLIFIED),
"traditional" =>
exclusive_value!((result, VariantEastAsian::JIS78 | VariantEastAsian::JIS83 |
VariantEastAsian::JIS90 | VariantEastAsian::JIS04 |
VariantEastAsian::SIMPLIFIED | VariantEastAsian::TRADITIONAL
) => VariantEastAsian::TRADITIONAL),
"full-width" =>
exclusive_value!((result, VariantEastAsian::FULL_WIDTH |
VariantEastAsian::PROPORTIONAL_WIDTH
) => VariantEastAsian::FULL_WIDTH),
"proportional-width" =>
exclusive_value!((result, VariantEastAsian::FULL_WIDTH |
VariantEastAsian::PROPORTIONAL_WIDTH
) => VariantEastAsian::PROPORTIONAL_WIDTH),
"ruby" =>
exclusive_value!((result, VariantEastAsian::RUBY) => VariantEastAsian::RUBY),
_ => return Err(()),
},
)
}) {
result.insert(flag);
}
if !result.is_empty() {
Ok(FontVariantEastAsian::Value(result))
} else {
Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError))
}
}
}
macro_rules! impl_variant_ligatures {
{
$(
$(#[$($meta:tt)+])*
$ident:ident / $css:expr => $gecko:ident = $value:expr,
)+
} => {
bitflags! {
#[derive(MallocSizeOf, ToResolvedValue, ToShmem)]
/// Variants of ligatures
pub struct VariantLigatures: u16 {
/// Specifies that common default features are enabled
const NORMAL = 0;
$(
$(#[$($meta)+])*
const $ident = $value;
)+
}
}
impl ToCss for VariantLigatures {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
if self.is_empty() {
return dest.write_str("normal");
}
if self.contains(VariantLigatures::NONE) {
return dest.write_str("none");
}
let mut writer = SequenceWriter::new(dest, " ");
$(
if self.intersects(VariantLigatures::$ident) {
writer.raw_item($css)?;
}
)+
Ok(())
}
}
/// Asserts that all variant-east-asian matches its NS_FONT_VARIANT_EAST_ASIAN_* value.
#[cfg(feature = "gecko")]
#[inline]
pub fn assert_variant_ligatures_matches() {
use crate::gecko_bindings::structs;
$(
debug_assert_eq!(structs::$gecko as u16, VariantLigatures::$ident.bits());
)+
}
impl SpecifiedValueInfo for VariantLigatures {
fn collect_completion_keywords(f: KeywordsCollectFn) {
f(&["normal", $($css,)+]);
}
}
}
}
impl_variant_ligatures! {
/// Specifies that all types of ligatures and contextual forms
/// covered by this property are explicitly disabled
NONE / "none" => NS_FONT_VARIANT_LIGATURES_NONE = 0x01,
/// Enables display of common ligatures
COMMON_LIGATURES / "common-ligatures" => NS_FONT_VARIANT_LIGATURES_COMMON = 0x02,
/// Disables display of common ligatures
NO_COMMON_LIGATURES / "no-common-ligatures" => NS_FONT_VARIANT_LIGATURES_NO_COMMON = 0x04,
/// Enables display of discretionary ligatures
DISCRETIONARY_LIGATURES / "discretionary-ligatures" => NS_FONT_VARIANT_LIGATURES_DISCRETIONARY = 0x08,
/// Disables display of discretionary ligatures
NO_DISCRETIONARY_LIGATURES / "no-discretionary-ligatures" => NS_FONT_VARIANT_LIGATURES_NO_DISCRETIONARY = 0x10,
/// Enables display of historical ligatures
HISTORICAL_LIGATURES / "historical-ligatures" => NS_FONT_VARIANT_LIGATURES_HISTORICAL = 0x20,
/// Disables display of historical ligatures
NO_HISTORICAL_LIGATURES / "no-historical-ligatures" => NS_FONT_VARIANT_LIGATURES_NO_HISTORICAL = 0x40,
/// Enables display of contextual alternates
CONTEXTUAL / "contextual" => NS_FONT_VARIANT_LIGATURES_CONTEXTUAL = 0x80,
/// Disables display of contextual alternates
NO_CONTEXTUAL / "no-contextual" => NS_FONT_VARIANT_LIGATURES_NO_CONTEXTUAL = 0x100,
}
#[cfg(feature = "gecko")]
impl VariantLigatures {
/// Obtain a specified value from a Gecko keyword value
///
/// Intended for use with presentation attributes, not style structs
pub fn from_gecko_keyword(kw: u16) -> Self {
Self::from_bits_truncate(kw)
}
/// Transform into gecko keyword
pub fn to_gecko_keyword(self) -> u16 {
self.bits()
}
}
#[cfg(feature = "gecko")]
impl_gecko_keyword_conversions!(VariantLigatures, u16);
#[cfg_attr(feature = "gecko", derive(MallocSizeOf))]
#[derive(Clone, Copy, Debug, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
/// Ligatures and contextual forms are ways of combining glyphs
/// to produce more harmonized forms
pub enum FontVariantLigatures {
/// Value variant with `variant-ligatures`
Value(VariantLigatures),
/// System font variant
#[css(skip)]
System(SystemFont),
}
impl FontVariantLigatures {
system_font_methods!(FontVariantLigatures, font_variant_ligatures);
/// Default value of `font-variant-ligatures` as `empty`
#[inline]
pub fn empty() -> FontVariantLigatures {
FontVariantLigatures::Value(VariantLigatures::empty())
}
#[inline]
/// Get `none` variant of `font-variant-ligatures`
pub fn none() -> FontVariantLigatures {
FontVariantLigatures::Value(VariantLigatures::NONE)
}
}
impl ToComputedValue for FontVariantLigatures {
type ComputedValue = computed::FontVariantLigatures;
fn to_computed_value(&self, context: &Context) -> computed::FontVariantLigatures {
match *self {
FontVariantLigatures::Value(ref v) => v.clone(),
FontVariantLigatures::System(_) => self.compute_system(context),
}
}
fn from_computed_value(other: &computed::FontVariantLigatures) -> Self {
FontVariantLigatures::Value(other.clone())
}
}
impl Parse for FontVariantLigatures {
/// normal | none |
/// [ <common-lig-values> ||
/// <discretionary-lig-values> ||
/// <historical-lig-values> ||
/// <contextual-alt-values> ]
/// <common-lig-values> = [ common-ligatures | no-common-ligatures ]
/// <discretionary-lig-values> = [ discretionary-ligatures | no-discretionary-ligatures ]
/// <historical-lig-values> = [ historical-ligatures | no-historical-ligatures ]
/// <contextual-alt-values> = [ contextual | no-contextual ]
fn parse<'i, 't>(
_context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<FontVariantLigatures, ParseError<'i>> {
let mut result = VariantLigatures::empty();
if input
.try(|input| input.expect_ident_matching("normal"))
.is_ok()
{
return Ok(FontVariantLigatures::Value(result));
}
if input
.try(|input| input.expect_ident_matching("none"))
.is_ok()
{
return Ok(FontVariantLigatures::Value(VariantLigatures::NONE));
}
while let Ok(flag) = input.try(|input| {
Ok(
match_ignore_ascii_case! { &input.expect_ident().map_err(|_| ())?,
"common-ligatures" =>
exclusive_value!((result, VariantLigatures::COMMON_LIGATURES |
VariantLigatures::NO_COMMON_LIGATURES
) => VariantLigatures::COMMON_LIGATURES),
"no-common-ligatures" =>
exclusive_value!((result, VariantLigatures::COMMON_LIGATURES |
VariantLigatures::NO_COMMON_LIGATURES
) => VariantLigatures::NO_COMMON_LIGATURES),
"discretionary-ligatures" =>
exclusive_value!((result, VariantLigatures::DISCRETIONARY_LIGATURES |
VariantLigatures::NO_DISCRETIONARY_LIGATURES
) => VariantLigatures::DISCRETIONARY_LIGATURES),
"no-discretionary-ligatures" =>
exclusive_value!((result, VariantLigatures::DISCRETIONARY_LIGATURES |
VariantLigatures::NO_DISCRETIONARY_LIGATURES
) => VariantLigatures::NO_DISCRETIONARY_LIGATURES),
"historical-ligatures" =>
exclusive_value!((result, VariantLigatures::HISTORICAL_LIGATURES |
VariantLigatures::NO_HISTORICAL_LIGATURES
) => VariantLigatures::HISTORICAL_LIGATURES),
"no-historical-ligatures" =>
exclusive_value!((result, VariantLigatures::HISTORICAL_LIGATURES |
VariantLigatures::NO_HISTORICAL_LIGATURES
) => VariantLigatures::NO_HISTORICAL_LIGATURES),
"contextual" =>
exclusive_value!((result, VariantLigatures::CONTEXTUAL |
VariantLigatures::NO_CONTEXTUAL
) => VariantLigatures::CONTEXTUAL),
"no-contextual" =>
exclusive_value!((result, VariantLigatures::CONTEXTUAL |
VariantLigatures::NO_CONTEXTUAL
) => VariantLigatures::NO_CONTEXTUAL),
_ => return Err(()),
},
)
}) {
result.insert(flag);
}
if !result.is_empty() {
Ok(FontVariantLigatures::Value(result))
} else {
Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError))
}
}
}
macro_rules! impl_variant_numeric {
{
$(
$(#[$($meta:tt)+])*
$ident:ident / $css:expr => $gecko:ident = $value:expr,
)+
} => {
bitflags! {
#[derive(MallocSizeOf, ToResolvedValue, ToShmem)]
/// Vairants of numeric values
pub struct VariantNumeric: u8 {
/// None of other variants are enabled.
const NORMAL = 0;
$(
$(#[$($meta)+])*
const $ident = $value;
)+
}
}
impl ToCss for VariantNumeric {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
if self.is_empty() {
return dest.write_str("normal");
}
let mut writer = SequenceWriter::new(dest, " ");
$(
if self.intersects(VariantNumeric::$ident) {
writer.raw_item($css)?;
}
)+
Ok(())
}
}
/// Asserts that all variant-east-asian matches its NS_FONT_VARIANT_EAST_ASIAN_* value.
#[cfg(feature = "gecko")]
#[inline]
pub fn assert_variant_numeric_matches() {
use crate::gecko_bindings::structs;
$(
debug_assert_eq!(structs::$gecko as u8, VariantNumeric::$ident.bits());
)+
}
impl SpecifiedValueInfo for VariantNumeric {
fn collect_completion_keywords(f: KeywordsCollectFn) {
f(&["normal", $($css,)+]);
}
}
}
}
impl_variant_numeric! {
/// Enables display of lining numerals.
LINING_NUMS / "lining-nums" => NS_FONT_VARIANT_NUMERIC_LINING = 0x01,
/// Enables display of old-style numerals.
OLDSTYLE_NUMS / "oldstyle-nums" => NS_FONT_VARIANT_NUMERIC_OLDSTYLE = 0x02,
/// Enables display of proportional numerals.
PROPORTIONAL_NUMS / "proportional-nums" => NS_FONT_VARIANT_NUMERIC_PROPORTIONAL = 0x04,
/// Enables display of tabular numerals.
TABULAR_NUMS / "tabular-nums" => NS_FONT_VARIANT_NUMERIC_TABULAR = 0x08,
/// Enables display of lining diagonal fractions.
DIAGONAL_FRACTIONS / "diagonal-fractions" => NS_FONT_VARIANT_NUMERIC_DIAGONAL_FRACTIONS = 0x10,
/// Enables display of lining stacked fractions.
STACKED_FRACTIONS / "stacked-fractions" => NS_FONT_VARIANT_NUMERIC_STACKED_FRACTIONS = 0x20,
/// Enables display of letter forms used with ordinal numbers.
ORDINAL / "ordinal" => NS_FONT_VARIANT_NUMERIC_ORDINAL = 0x80,
/// Enables display of slashed zeros.
SLASHED_ZERO / "slashed-zero" => NS_FONT_VARIANT_NUMERIC_SLASHZERO = 0x40,
}
#[cfg(feature = "gecko")]
impl VariantNumeric {
/// Obtain a specified value from a Gecko keyword value
///
/// Intended for use with presentation attributes, not style structs
pub fn from_gecko_keyword(kw: u8) -> Self {
Self::from_bits_truncate(kw)
}
/// Transform into gecko keyword
pub fn to_gecko_keyword(self) -> u8 {
self.bits()
}
}
#[cfg(feature = "gecko")]
impl_gecko_keyword_conversions!(VariantNumeric, u8);
#[cfg_attr(feature = "gecko", derive(MallocSizeOf))]
#[derive(Clone, Copy, Debug, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
/// Specifies control over numerical forms.
pub enum FontVariantNumeric {
/// Value variant with `variant-numeric`
Value(VariantNumeric),
/// System font
#[css(skip)]
System(SystemFont),
}
impl FontVariantNumeric {
#[inline]
/// Default value of `font-variant-numeric` as `empty`
pub fn empty() -> FontVariantNumeric {
FontVariantNumeric::Value(VariantNumeric::empty())
}
system_font_methods!(FontVariantNumeric, font_variant_numeric);
}
impl ToComputedValue for FontVariantNumeric {
type ComputedValue = computed::FontVariantNumeric;
fn to_computed_value(&self, context: &Context) -> computed::FontVariantNumeric {
match *self {
FontVariantNumeric::Value(ref v) => v.clone(),
FontVariantNumeric::System(_) => self.compute_system(context),
}
}
fn from_computed_value(other: &computed::FontVariantNumeric) -> Self {
FontVariantNumeric::Value(other.clone())
}
}
impl Parse for FontVariantNumeric {
/// normal |
/// [ <numeric-figure-values> ||
/// <numeric-spacing-values> ||
/// <numeric-fraction-values> ||
/// ordinal ||
/// slashed-zero ]
/// <numeric-figure-values> = [ lining-nums | oldstyle-nums ]
/// <numeric-spacing-values> = [ proportional-nums | tabular-nums ]
/// <numeric-fraction-values> = [ diagonal-fractions | stacked-fractions ]
fn parse<'i, 't>(
_context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<FontVariantNumeric, ParseError<'i>> {
let mut result = VariantNumeric::empty();
if input
.try(|input| input.expect_ident_matching("normal"))
.is_ok()
{
return Ok(FontVariantNumeric::Value(result));
}
while let Ok(flag) = input.try(|input| {
Ok(
match_ignore_ascii_case! { &input.expect_ident().map_err(|_| ())?,
"ordinal" =>
exclusive_value!((result, VariantNumeric::ORDINAL) => VariantNumeric::ORDINAL),
"slashed-zero" =>
exclusive_value!((result, VariantNumeric::SLASHED_ZERO) => VariantNumeric::SLASHED_ZERO),
"lining-nums" =>
exclusive_value!((result, VariantNumeric::LINING_NUMS |
VariantNumeric::OLDSTYLE_NUMS
) => VariantNumeric::LINING_NUMS),
"oldstyle-nums" =>
exclusive_value!((result, VariantNumeric::LINING_NUMS |
VariantNumeric::OLDSTYLE_NUMS
) => VariantNumeric::OLDSTYLE_NUMS),
"proportional-nums" =>
exclusive_value!((result, VariantNumeric::PROPORTIONAL_NUMS |
VariantNumeric::TABULAR_NUMS
) => VariantNumeric::PROPORTIONAL_NUMS),
"tabular-nums" =>
exclusive_value!((result, VariantNumeric::PROPORTIONAL_NUMS |
VariantNumeric::TABULAR_NUMS
) => VariantNumeric::TABULAR_NUMS),
"diagonal-fractions" =>
exclusive_value!((result, VariantNumeric::DIAGONAL_FRACTIONS |
VariantNumeric::STACKED_FRACTIONS
) => VariantNumeric::DIAGONAL_FRACTIONS),
"stacked-fractions" =>
exclusive_value!((result, VariantNumeric::DIAGONAL_FRACTIONS |
VariantNumeric::STACKED_FRACTIONS
) => VariantNumeric::STACKED_FRACTIONS),
_ => return Err(()),
},
)
}) {
result.insert(flag);
}
if !result.is_empty() {
Ok(FontVariantNumeric::Value(result))
} else {
Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError))
}
}
}
/// This property provides low-level control over OpenType or TrueType font features.
pub type SpecifiedFontFeatureSettings = FontSettings<FeatureTagValue<Integer>>;
/// Define initial settings that apply when the font defined by an @font-face
/// rule is rendered.
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
pub enum FontFeatureSettings {
/// Value of `FontSettings`
Value(SpecifiedFontFeatureSettings),
/// System font
#[css(skip)]
System(SystemFont),
}
impl FontFeatureSettings {
#[inline]
/// Get default value of `font-feature-settings` as normal
pub fn normal() -> FontFeatureSettings {
FontFeatureSettings::Value(FontSettings::normal())
}
system_font_methods!(FontFeatureSettings, font_feature_settings);
}
impl ToComputedValue for FontFeatureSettings {
type ComputedValue = computed::FontFeatureSettings;
fn to_computed_value(&self, context: &Context) -> computed::FontFeatureSettings {
match *self {
FontFeatureSettings::Value(ref v) => v.to_computed_value(context),
FontFeatureSettings::System(_) => self.compute_system(context),
}
}
fn from_computed_value(other: &computed::FontFeatureSettings) -> Self {
FontFeatureSettings::Value(ToComputedValue::from_computed_value(other))
}
}
impl Parse for FontFeatureSettings {
/// normal | <feature-tag-value>#
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<FontFeatureSettings, ParseError<'i>> {
SpecifiedFontFeatureSettings::parse(context, input).map(FontFeatureSettings::Value)
}
}
#[derive(
Clone,
Copy,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
ToResolvedValue,
ToShmem,
)]
/// Whether user agents are allowed to synthesize bold or oblique font faces
/// when a font family lacks bold or italic faces
pub struct FontSynthesis {
/// If a `font-weight` is requested that the font family does not contain,
/// the user agent may synthesize the requested weight from the weights
/// that do exist in the font family.
#[css(represents_keyword)]
pub weight: bool,
/// If a font-style is requested that the font family does not contain,
/// the user agent may synthesize the requested style from the normal face in the font family.
#[css(represents_keyword)]
pub style: bool,
}
impl FontSynthesis {
#[inline]
/// Get the default value of font-synthesis
pub fn get_initial_value() -> Self {
FontSynthesis {
weight: true,
style: true,
}
}
}
impl Parse for FontSynthesis {
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<FontSynthesis, ParseError<'i>> {
let mut result = FontSynthesis {
weight: false,
style: false,
};
try_match_ident_ignore_ascii_case! { input,
"none" => Ok(result),
"weight" => {
result.weight = true;
if input.try(|input| input.expect_ident_matching("style")).is_ok() {
result.style = true;
}
Ok(result)
},
"style" => {
result.style = true;
if input.try(|input| input.expect_ident_matching("weight")).is_ok() {
result.weight = true;
}
Ok(result)
},
}
}
}
impl ToCss for FontSynthesis {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
if self.weight && self.style {
dest.write_str("weight style")
} else if self.style {
dest.write_str("style")
} else if self.weight {
dest.write_str("weight")
} else {
dest.write_str("none")
}
}
}
#[cfg(feature = "gecko")]
impl From<u8> for FontSynthesis {
fn from(bits: u8) -> FontSynthesis {
use crate::gecko_bindings::structs;
FontSynthesis {
weight: bits & structs::NS_FONT_SYNTHESIS_WEIGHT as u8 != 0,
style: bits & structs::NS_FONT_SYNTHESIS_STYLE as u8 != 0,
}
}
}
#[cfg(feature = "gecko")]
impl From<FontSynthesis> for u8 {
fn from(v: FontSynthesis) -> u8 {
use crate::gecko_bindings::structs;
let mut bits: u8 = 0;
if v.weight {
bits |= structs::NS_FONT_SYNTHESIS_WEIGHT as u8;
}
if v.style {
bits |= structs::NS_FONT_SYNTHESIS_STYLE as u8;
}
bits
}
}
#[derive(Clone, Debug, Eq, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
/// Allows authors to explicitly specify the language system of the font,
/// overriding the language system implied by the content language
pub enum FontLanguageOverride {
/// When rendering with OpenType fonts,
/// the content language of the element is
/// used to infer the OpenType language system
Normal,
/// Single three-letter case-sensitive OpenType language system tag,
/// specifies the OpenType language system to be used instead of
/// the language system implied by the language of the element
Override(Box<str>),
/// Use system font
#[css(skip)]
System(SystemFont),
}
impl FontLanguageOverride {
#[inline]
/// Get default value with `normal`
pub fn normal() -> FontLanguageOverride {
FontLanguageOverride::Normal
}
/// The ToComputedValue implementation for non-system-font
/// FontLanguageOverride, used for @font-face descriptors.
#[inline]
pub fn compute_non_system(&self) -> computed::FontLanguageOverride {
match *self {
FontLanguageOverride::Normal => computed::FontLanguageOverride::zero(),
FontLanguageOverride::Override(ref lang) => {
computed::FontLanguageOverride::from_str(lang)
},
FontLanguageOverride::System(..) => unreachable!(),
}
}
system_font_methods!(FontLanguageOverride, font_language_override);
}
impl ToComputedValue for FontLanguageOverride {
type ComputedValue = computed::FontLanguageOverride;
#[inline]
fn to_computed_value(&self, context: &Context) -> computed::FontLanguageOverride {
match *self {
FontLanguageOverride::System(_) => self.compute_system(context),
_ => self.compute_non_system(),
}
}
#[inline]
fn from_computed_value(computed: &computed::FontLanguageOverride) -> Self {
if *computed == computed::FontLanguageOverride::zero() {
return FontLanguageOverride::Normal;
}
FontLanguageOverride::Override(computed.to_str(&mut [0; 4]).into())
}
}
impl Parse for FontLanguageOverride {
/// normal | <string>
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<FontLanguageOverride, ParseError<'i>> {
if input
.try(|input| input.expect_ident_matching("normal"))
.is_ok()
{
return Ok(FontLanguageOverride::Normal);
}
let string = input.expect_string()?;
Ok(FontLanguageOverride::Override(
string.as_ref().to_owned().into_boxed_str(),
))
}
}
/// This property provides low-level control over OpenType or TrueType font
/// variations.
pub type SpecifiedFontVariationSettings = FontSettings<VariationValue<Number>>;
/// Define initial settings that apply when the font defined by an @font-face
/// rule is rendered.
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
pub enum FontVariationSettings {
/// Value of `FontSettings`
Value(SpecifiedFontVariationSettings),
/// System font
#[css(skip)]
System(SystemFont),
}
impl FontVariationSettings {
#[inline]
/// Get default value of `font-variation-settings` as normal
pub fn normal() -> FontVariationSettings {
FontVariationSettings::Value(FontSettings::normal())
}
system_font_methods!(FontVariationSettings, font_variation_settings);
}
impl ToComputedValue for FontVariationSettings {
type ComputedValue = computed::FontVariationSettings;
fn to_computed_value(&self, context: &Context) -> computed::FontVariationSettings {
match *self {
FontVariationSettings::Value(ref v) => v.to_computed_value(context),
FontVariationSettings::System(_) => self.compute_system(context),
}
}
fn from_computed_value(other: &computed::FontVariationSettings) -> Self {
FontVariationSettings::Value(ToComputedValue::from_computed_value(other))
}
}
impl Parse for FontVariationSettings {
/// normal | <variation-tag-value>#
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<FontVariationSettings, ParseError<'i>> {
SpecifiedFontVariationSettings::parse(context, input).map(FontVariationSettings::Value)
}
}
fn parse_one_feature_value<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Integer, ParseError<'i>> {
if let Ok(integer) = input.try(|i| Integer::parse_non_negative(context, i)) {
return Ok(integer);
}
try_match_ident_ignore_ascii_case! { input,
"on" => Ok(Integer::new(1)),
"off" => Ok(Integer::new(0)),
}
}
impl Parse for FeatureTagValue<Integer> {
/// https://drafts.csswg.org/css-fonts-4/#feature-tag-value
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
let tag = FontTag::parse(context, input)?;
let value = input
.try(|i| parse_one_feature_value(context, i))
.unwrap_or_else(|_| Integer::new(1));
Ok(Self { tag, value })
}
}
impl Parse for VariationValue<Number> {
/// This is the `<string> <number>` part of the font-variation-settings
/// syntax.
fn parse<'i, 't>(
context: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<Self, ParseError<'i>> {
let tag = FontTag::parse(context, input)?;
let value = Number::parse(context, input)?;
Ok(Self { tag, value })
}
}
#[derive(
Clone,
Copy,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
/// text-zoom. Enable if true, disable if false
pub struct XTextZoom(#[css(skip)] pub bool);
impl Parse for XTextZoom {
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<XTextZoom, ParseError<'i>> {
debug_assert!(
false,
"Should be set directly by presentation attributes only."
);
Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError))
}
}
#[derive(
Clone,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
/// Internal property that reflects the lang attribute
pub struct XLang(#[css(skip)] pub Atom);
impl XLang {
#[inline]
/// Get default value for `-x-lang`
pub fn get_initial_value() -> XLang {
XLang(atom!(""))
}
}
impl Parse for XLang {
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<XLang, ParseError<'i>> {
debug_assert!(
false,
"Should be set directly by presentation attributes only."
);
Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError))
}
}
#[cfg_attr(feature = "gecko", derive(MallocSizeOf))]
#[derive(Clone, Copy, Debug, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
/// Specifies the minimum font size allowed due to changes in scriptlevel.
/// Ref: https://wiki.mozilla.org/MathML:mstyle
pub struct MozScriptMinSize(pub NoCalcLength);
impl MozScriptMinSize {
#[inline]
/// Calculate initial value of -moz-script-min-size.
pub fn get_initial_value() -> Length {
Length::new(DEFAULT_SCRIPT_MIN_SIZE_PT as f32 * (AU_PER_PT / AU_PER_PX))
}
}
impl Parse for MozScriptMinSize {
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<MozScriptMinSize, ParseError<'i>> {
debug_assert!(
false,
"Should be set directly by presentation attributes only."
);
Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError))
}
}
#[cfg_attr(feature = "gecko", derive(MallocSizeOf))]
#[derive(Clone, Copy, Debug, PartialEq, SpecifiedValueInfo, ToCss, ToShmem)]
/// Changes the scriptlevel in effect for the children.
/// Ref: https://wiki.mozilla.org/MathML:mstyle
///
/// The main effect of scriptlevel is to control the font size.
/// https://www.w3.org/TR/MathML3/chapter3.html#presm.scriptlevel
pub enum MozScriptLevel {
/// Change `font-size` relatively.
Relative(i32),
/// Change `font-size` absolutely.
///
/// Should only be serialized by presentation attributes, so even though
/// serialization for this would look the same as for the `Relative`
/// variant, it is unexposed, so no big deal.
#[css(function)]
MozAbsolute(i32),
/// Change `font-size` automatically.
Auto,
}
impl Parse for MozScriptLevel {
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<MozScriptLevel, ParseError<'i>> {
// We don't bother to handle calc here.
if let Ok(i) = input.try(|i| i.expect_integer()) {
return Ok(MozScriptLevel::Relative(i));
}
input.expect_ident_matching("auto")?;
Ok(MozScriptLevel::Auto)
}
}
#[cfg_attr(feature = "gecko", derive(MallocSizeOf))]
#[derive(
Clone,
Copy,
Debug,
PartialEq,
SpecifiedValueInfo,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
/// Specifies the multiplier to be used to adjust font size
/// due to changes in scriptlevel.
///
/// Ref: https://www.w3.org/TR/MathML3/chapter3.html#presm.mstyle.attrs
pub struct MozScriptSizeMultiplier(pub f32);
impl MozScriptSizeMultiplier {
#[inline]
/// Get default value of `-moz-script-size-multiplier`
pub fn get_initial_value() -> MozScriptSizeMultiplier {
MozScriptSizeMultiplier(DEFAULT_SCRIPT_SIZE_MULTIPLIER as f32)
}
}
impl Parse for MozScriptSizeMultiplier {
fn parse<'i, 't>(
_: &ParserContext,
input: &mut Parser<'i, 't>,
) -> Result<MozScriptSizeMultiplier, ParseError<'i>> {
debug_assert!(
false,
"Should be set directly by presentation attributes only."
);
Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError))
}
}
impl From<f32> for MozScriptSizeMultiplier {
fn from(v: f32) -> Self {
MozScriptSizeMultiplier(v)
}
}
impl From<MozScriptSizeMultiplier> for f32 {
fn from(v: MozScriptSizeMultiplier) -> f32 {
v.0
}
}<|fim▁end|> | }
while let Ok(flag) = input.try(|input| {
Ok( |
<|file_name|>JmsEventManager.java<|end_file_name|><|fim▁begin|>package it.breex.bus.impl.jms;
import it.breex.bus.event.AbstractResponseEvent;
import it.breex.bus.event.EventData;
import it.breex.bus.event.EventHandler;
import it.breex.bus.event.RequestEvent;
import it.breex.bus.impl.AbstractEventManager;
import java.util.UUID;
import javax.jms.Connection;
import javax.jms.ConnectionFactory;
import javax.jms.Destination;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.MessageConsumer;
import javax.jms.MessageListener;
import javax.jms.MessageProducer;
import javax.jms.ObjectMessage;
import javax.jms.Queue;
import javax.jms.Session;
public class JmsEventManager extends AbstractEventManager {
private final static String DEFAULT_REQUEST_QUEUE = "breexDefaulRequestQueue";
private final String nodeId = UUID.randomUUID().toString();
private final boolean transacted = false;
private final int acknowledgeMode = Session.AUTO_ACKNOWLEDGE;
private final Connection jmsConnection;
private final Session session;
private final Queue requestQueue;
private final MessageProducer requestMessageProducer;
private final Queue responseQueue;
private final MessageProducer responseMessageProducer;
public JmsEventManager(ConnectionFactory jmsConnectionFactory) {
try {
jmsConnection = jmsConnectionFactory.createConnection();
jmsConnection.start();
session = jmsConnection.createSession(transacted, acknowledgeMode);
requestQueue = session.createQueue(DEFAULT_REQUEST_QUEUE);
requestMessageProducer = session.createProducer(requestQueue);
responseQueue = session.createTemporaryQueue();
responseMessageProducer = session.createProducer(null);
session.createConsumer(responseQueue).setMessageListener(new MessageListener() {
@Override
public void onMessage(Message message) {
try {
EventData<?> eventData = (EventData<?>) ((ObjectMessage) message).getObject();
getLogger().debug("Event Response received. Event name: [{}], sender id: [{}]", eventData.getName(),
eventData.getSenderId());
//logger.debug("Event Response received. Event name: [{}], sender id: [{}]", eventData.eventId.eventName, eventData.eventId.nodeId);
AbstractResponseEvent responseEvent = new AbstractResponseEvent(eventData) {
};
processResponse(responseEvent, getResponseHandlers().remove(eventData.getId()));
} catch (JMSException e) {
new RuntimeException(e);
}
}
});
} catch (JMSException e) {
throw new RuntimeException(e);
}
}
@Override
public String getLocalNodeId() {
return nodeId;
}
@Override
protected <I, O> void prepareResponse(EventData<I> requestEventData, EventData<O> responseEventData) {
try {
Message responseMessage = session.createObjectMessage(responseEventData);
responseMessageProducer.send((Destination) requestEventData.getTransportData(), responseMessage);
} catch (JMSException e) {
new RuntimeException(e);
}
}
@Override
protected <I, O> void registerCallback(String eventName, EventHandler<RequestEvent<I, O>> eventHandler) {
getLogger().debug("Registering event. Event name: [{}]", eventName);
MessageConsumer eventConsumer;
try {
eventConsumer = session.createConsumer(requestQueue, "JMSCorrelationID='" + eventName + "'");
eventConsumer.setMessageListener(new MessageListener() {
@Override
public void onMessage(Message message) {
EventData<I> requestEventData;
try {
requestEventData = (EventData<I>) ((ObjectMessage) message).getObject();
getLogger().debug("Received event. Event name: [{}] CorrelationID: [{}]", requestEventData.getName(),
message.getJMSCorrelationID());
processRequest(requestEventData);
} catch (JMSException e) {
new RuntimeException(e);
}
}
});
} catch (JMSException e) {
new RuntimeException(e);
}
}
@Override
protected <I> void prepareRequest(EventData<I> eventData) {
try {<|fim▁hole|> requestMessageProducer.send(message);
} catch (JMSException e) {
new RuntimeException(e);
}
}
}<|fim▁end|> | eventData.setTransportData(responseQueue);
ObjectMessage message = session.createObjectMessage(eventData);
message.setJMSCorrelationID(eventData.getName());
message.setJMSReplyTo(responseQueue); |
<|file_name|>diffservers1.js<|end_file_name|><|fim▁begin|>s = new ShardingTest( "diffservers1" , 2 );
assert.eq( 2 , s.config.shards.count() , "server count wrong" );
assert.eq( 2 , s._connections[0].getDB( "config" ).shards.count() , "where are servers!" );
assert.eq( 0 , s._connections[1].getDB( "config" ).shards.count() , "shouldn't be here" );
test1 = s.getDB( "test1" ).foo;<|fim▁hole|>
assert( ! s.admin.runCommand( { addshard: "sdd$%" } ).ok , "bad hostname" );
assert( ! s.admin.runCommand( { addshard: "127.0.0.1:43415" } ).ok , "host not up" );
assert( ! s.admin.runCommand( { addshard: "10.0.0.1:43415" } ).ok , "allowed shard in IP when config is localhost" );
s.stop();<|fim▁end|> | test1.save( { a : 1 } );
test1.save( { a : 2 } );
test1.save( { a : 3 } );
assert( 3 , test1.count() ); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.