prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>client.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
import datetime
import jwt
import re
import logging
from six.moves.urllib.parse import parse_qs, urlparse, urlsplit
from sentry.integrations.atlassian_connect import get_query_hash
from sentry.shared_integrations.exceptions import ApiError
from sentry.integrations.client import ApiClient
from sentry.utils.http import absolute_uri
logger = logging.getLogger("sentry.integrations.jira")
JIRA_KEY = "%s.jira" % (urlparse(absolute_uri()).hostname,)
ISSUE_KEY_RE = re.compile(r"^[A-Za-z][A-Za-z0-9]*-\d+$")
class JiraCloud(object):
"""
Contains the jira-cloud specifics that a JiraClient needs
in order to communicate with jira
"""
def __init__(self, shared_secret):
self.shared_secret = shared_secret
@property
def cache_prefix(self):
return "sentry-jira-2:"
def request_hook(self, method, path, data, params, **kwargs):
"""
Used by Jira Client to apply the jira-cloud authentication
"""
# handle params that are already part of the path
url_params = dict(parse_qs(urlsplit(path).query))
url_params.update(params or {})
path = path.split("?")[0]
jwt_payload = {
"iss": JIRA_KEY,
"iat": datetime.datetime.utcnow(),
"exp": datetime.datetime.utcnow() + datetime.timedelta(seconds=5 * 60),
"qsh": get_query_hash(path, method.upper(), url_params),
}
encoded_jwt = jwt.encode(jwt_payload, self.shared_secret)
params = dict(jwt=encoded_jwt, **(url_params or {}))
request_spec = kwargs.copy()
request_spec.update(dict(method=method, path=path, data=data, params=params))
return request_spec
def user_id_field(self):
"""
Jira-Cloud requires GDPR compliant API usage so we have to use accountId
"""
return "accountId"
def user_query_param(self):
"""
Jira-Cloud requires GDPR compliant API usage so we have to use query
"""
return "query"
def user_id_get_param(self):
"""
Jira-Cloud requires GDPR compliant API usage so we have to use accountId
"""
return "accountId"
class JiraApiClient(ApiClient):
# TODO: Update to v3 endpoints
COMMENTS_URL = "/rest/api/2/issue/%s/comment"
COMMENT_URL = "/rest/api/2/issue/%s/comment/%s"
STATUS_URL = "/rest/api/2/status"
CREATE_URL = "/rest/api/2/issue"
ISSUE_URL = "/rest/api/2/issue/%s"
META_URL = "/rest/api/2/issue/createmeta"
PRIORITIES_URL = "/rest/api/2/priority"
PROJECT_URL = "/rest/api/2/project"
SEARCH_URL = "/rest/api/2/search/"
VERSIONS_URL = "/rest/api/2/project/%s/versions"
USERS_URL = "/rest/api/2/user/assignable/search"
USER_URL = "/rest/api/2/user"
SERVER_INFO_URL = "/rest/api/2/serverInfo"
ASSIGN_URL = "/rest/api/2/issue/%s/assignee"
TRANSITION_URL = "/rest/api/2/issue/%s/transitions"
EMAIL_URL = "/rest/api/3/user/email"
integration_name = "jira"
# This timeout is completely arbitrary. Jira doesn't give us any
# caching headers to work with. Ideally we want a duration that
# lets the user make their second jira issue with cached data.<|fim▁hole|> self.base_url = base_url
# `jira_style` encapsulates differences between jira server & jira cloud.
# We only support one API version for Jira, but server/cloud require different
# authentication mechanisms and caching.
self.jira_style = jira_style
super(JiraApiClient, self).__init__(verify_ssl, logging_context)
def get_cache_prefix(self):
return self.jira_style.cache_prefix
def request(self, method, path, data=None, params=None, **kwargs):
"""
Use the request_hook method for our specific style of Jira to
add authentication data and transform parameters.
"""
request_spec = self.jira_style.request_hook(method, path, data, params, **kwargs)
if "headers" not in request_spec:
request_spec["headers"] = {}
# Force adherence to the GDPR compliant API conventions.
# See
# https://developer.atlassian.com/cloud/jira/platform/deprecation-notice-user-privacy-api-migration-guide
request_spec["headers"]["x-atlassian-force-account-id"] = "true"
return self._request(**request_spec)
def user_id_get_param(self):
return self.jira_style.user_id_get_param()
def user_id_field(self):
return self.jira_style.user_id_field()
def user_query_param(self):
return self.jira_style.user_query_param()
def get_issue(self, issue_id):
return self.get(self.ISSUE_URL % (issue_id,))
def search_issues(self, query):
# check if it looks like an issue id
if ISSUE_KEY_RE.match(query):
jql = 'id="%s"' % query.replace('"', '\\"')
else:
jql = 'text ~ "%s"' % query.replace('"', '\\"')
return self.get(self.SEARCH_URL, params={"jql": jql})
def create_comment(self, issue_key, comment):
return self.post(self.COMMENTS_URL % issue_key, data={"body": comment})
def update_comment(self, issue_key, comment_id, comment):
return self.put(self.COMMENT_URL % (issue_key, comment_id), data={"body": comment})
def get_projects_list(self):
return self.get_cached(self.PROJECT_URL)
def get_project_key_for_id(self, project_id):
if not project_id:
return ""
projects = self.get_projects_list()
for project in projects:
if project["id"] == project_id:
return project["key"].encode("utf-8")
return ""
def get_create_meta_for_project(self, project):
params = {"expand": "projects.issuetypes.fields", "projectIds": project}
metas = self.get_cached(self.META_URL, params=params)
# We saw an empty JSON response come back from the API :(
if not metas:
logger.info(
"jira.get-create-meta.empty-response",
extra={"base_url": self.base_url, "project": project},
)
return None
# XXX(dcramer): document how this is possible, if it even is
if len(metas["projects"]) > 1:
raise ApiError(u"More than one project found matching {}.".format(project))
try:
return metas["projects"][0]
except IndexError:
logger.info(
"jira.get-create-meta.key-error",
extra={"base_url": self.base_url, "project": project},
)
return None
def get_versions(self, project):
return self.get_cached(self.VERSIONS_URL % project)
def get_priorities(self):
return self.get_cached(self.PRIORITIES_URL)
def get_users_for_project(self, project):
# Jira Server wants a project key, while cloud is indifferent.
project_key = self.get_project_key_for_id(project)
return self.get_cached(self.USERS_URL, params={"project": project_key})
def search_users_for_project(self, project, username):
# Jira Server wants a project key, while cloud is indifferent.
project_key = self.get_project_key_for_id(project)
return self.get_cached(
self.USERS_URL, params={"project": project_key, self.user_query_param(): username}
)
def search_users_for_issue(self, issue_key, email):
return self.get_cached(
self.USERS_URL, params={"issueKey": issue_key, self.user_query_param(): email}
)
def get_user(self, user_id):
user_id_get_param = self.user_id_get_param()
return self.get_cached(self.USER_URL, params={user_id_get_param: user_id})
def create_issue(self, raw_form_data):
data = {"fields": raw_form_data}
return self.post(self.CREATE_URL, data=data)
def get_server_info(self):
return self.get(self.SERVER_INFO_URL)
def get_valid_statuses(self):
return self.get_cached(self.STATUS_URL)
def get_transitions(self, issue_key):
return self.get_cached(self.TRANSITION_URL % issue_key)["transitions"]
def transition_issue(self, issue_key, transition_id):
return self.post(self.TRANSITION_URL % issue_key, {"transition": {"id": transition_id}})
def assign_issue(self, key, name_or_account_id):
user_id_field = self.user_id_field()
return self.put(self.ASSIGN_URL % key, data={user_id_field: name_or_account_id})
def get_email(self, account_id):
user = self.get_cached(self.EMAIL_URL, params={"accountId": account_id})
return user.get("email")<|fim▁end|> | cache_time = 240
def __init__(self, base_url, jira_style, verify_ssl, logging_context=None): |
<|file_name|>log-knows-the-names-of-variants-in-std.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[derive(Clone, Show)]
enum foo {
a(uint),
b(String),
}
fn check_log<T: std::fmt::Show>(exp: String, v: T) {
assert_eq!(exp, format!("{:?}", v));
}
pub fn main() {
let mut x = Some(foo::a(22u));
let exp = "Some(a(22u))".to_string();
let act = format!("{:?}", x);
assert_eq!(act, exp);
check_log(exp, x);
x = None;
let exp = "None".to_string();
let act = format!("{:?}", x);
assert_eq!(act, exp);
check_log(exp, x);
}<|fim▁end|> | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT |
<|file_name|>IPoint.java<|end_file_name|><|fim▁begin|>/*---
iGeo - http://igeo.jp
Copyright (c) 2002-2013 Satoru Sugihara
This file is part of iGeo.
iGeo is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation, version 3.
iGeo is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with iGeo. If not, see <http://www.gnu.org/licenses/>.
---*/
package igeo;
import java.awt.*;
import igeo.gui.*;
/**
Class of point object.
@author Satoru Sugihara
*/
public class IPoint extends IGeometry implements IVecI{
//public IVecI pos;
public IVec pos;
public IPoint(){ pos = new IVec(); initPoint(null); }
public IPoint(IVec v){ pos = v; initPoint(null); }
public IPoint(IVecI v){ pos = v.get(); initPoint(null); }
public IPoint(double x, double y, double z){ pos = new IVec(x,y,z); initPoint(null); }
public IPoint(double x, double y){ pos = new IVec(x,y); initPoint(null); }
public IPoint(IServerI s){ super(s); pos = new IVec(0,0,0); initPoint(s); }
public IPoint(IServerI s, IVec v){ super(s); pos = v; initPoint(s); }
public IPoint(IServerI s, IVecI v){ super(s); pos = v.get(); initPoint(s); }
public IPoint(IServerI s, double x, double y, double z){
super(s); pos = new IVec(x,y,z); initPoint(s);
}
public IPoint(IServerI s, double x, double y){
super(s); pos = new IVec(x,y); initPoint(s);
}
public IPoint(IPoint p){
super(p);
pos = p.pos.dup();
initPoint(p.server);
//setColor(p.getColor());
}
public IPoint(IServerI s, IPoint p){
super(s,p);
pos = p.pos.dup();
initPoint(s);
//setColor(p.getColor());
}
public /*protected*/ void initPoint(IServerI s){
if(pos==null){
IOut.err("null value is set in IPoint"); //
return;
}
// // costly to use instanceof?
//if(pos instanceof IVec) parameter = (IVec)pos;
//else if(pos instanceof IVecR) parameter = (IVecR)pos;
//else if(pos instanceof IVec4) parameter = (IVec4)pos;
//else if(pos instanceof IVec4R) parameter = (IVec4R)pos;
//addGraphic(new IPointGraphic(this));
if(graphics==null) initGraphic(s); // not init when using copy constructor
}
public IGraphicObject createGraphic(IGraphicMode m){
if(m.isNone()) return null;
return new IPointGraphic(this);
}
synchronized public double x(){ return pos.x(); }
synchronized public double y(){ return pos.y(); }
synchronized public double z(){ return pos.z(); }
synchronized public IPoint x(double vx){ pos.x(vx); return this; }
synchronized public IPoint y(double vy){ pos.y(vy); return this; }
synchronized public IPoint z(double vz){ pos.z(vz); return this; }
synchronized public IPoint x(IDoubleI vx){ pos.x(vx); return this; }
synchronized public IPoint y(IDoubleI vy){ pos.y(vy); return this; }
synchronized public IPoint z(IDoubleI vz){ pos.z(vz); return this; }
synchronized public IPoint x(IVecI vx){ pos.x(vx); return this; }
synchronized public IPoint y(IVecI vy){ pos.y(vy); return this; }
synchronized public IPoint z(IVecI vz){ pos.z(vz); return this; }
synchronized public IPoint x(IVec2I vx){ pos.x(vx); return this; }
synchronized public IPoint y(IVec2I vy){ pos.y(vy); return this; }
synchronized public double x(ISwitchE e){ return pos.x(e); }
synchronized public double y(ISwitchE e){ return pos.y(e); }
synchronized public double z(ISwitchE e){ return pos.z(e); }
synchronized public IDouble x(ISwitchR r){ return pos.x(r); }
synchronized public IDouble y(ISwitchR r){ return pos.y(r); }
synchronized public IDouble z(ISwitchR r){ return pos.z(r); }
//synchronized public IVec get(){ return pos.get(); } // when pos is IVecI
synchronized public IVec get(){ return pos; }
/** passing position field */
synchronized public IVec pos(){ return pos; }
/** center is same with position */
synchronized public IVec center(){ return pos(); }
synchronized public IPoint dup(){ return new IPoint(this); }
synchronized public IVec2 to2d(){ return pos.to2d(); }
synchronized public IVec2 to2d(IVecI projectionDir){ return pos.to2d(projectionDir); }
synchronized public IVec2 to2d(IVecI xaxis, IVecI yaxis){ return pos.to2d(xaxis,yaxis); }
synchronized public IVec2 to2d(IVecI xaxis, IVecI yaxis, IVecI origin){ return pos.to2d(xaxis,yaxis,origin); }
synchronized public IVec4 to4d(){ return pos.to4d(); }
synchronized public IVec4 to4d(double w){ return pos.to4d(w); }
synchronized public IVec4 to4d(IDoubleI w){ return pos.to4d(w); }
synchronized public IDouble getX(){ return pos.getX(); }
synchronized public IDouble getY(){ return pos.getY(); }
synchronized public IDouble getZ(){ return pos.getZ(); }
synchronized public IPoint set(IVecI v){ pos.set(v); return this; }
synchronized public IPoint set(double x, double y, double z){ pos.set(x,y,z); return this;}
synchronized public IPoint set(IDoubleI x, IDoubleI y, IDoubleI z){ pos.set(x,y,z); return this; }
synchronized public IPoint add(double x, double y, double z){ pos.add(x,y,z); return this; }
synchronized public IPoint add(IDoubleI x, IDoubleI y, IDoubleI z){ pos.add(x,y,z); return this; }
synchronized public IPoint add(IVecI v){ pos.add(v); return this; }
synchronized public IPoint sub(double x, double y, double z){ pos.sub(x,y,z); return this; }
synchronized public IPoint sub(IDoubleI x, IDoubleI y, IDoubleI z){ pos.sub(x,y,z); return this; }
synchronized public IPoint sub(IVecI v){ pos.sub(v); return this; }
synchronized public IPoint mul(IDoubleI v){ pos.mul(v); return this; }
synchronized public IPoint mul(double v){ pos.mul(v); return this; }
synchronized public IPoint div(IDoubleI v){ pos.div(v); return this; }
synchronized public IPoint div(double v){ pos.div(v); return this; }
synchronized public IPoint neg(){ pos.neg(); return this; }
synchronized public IPoint rev(){ return neg(); }
synchronized public IPoint flip(){ return neg(); }
synchronized public IPoint zero(){ pos.zero(); return this; }
/** scale add */
synchronized public IPoint add(IVecI v, double f){ pos.add(v,f); return this; }
synchronized public IPoint add(IVecI v, IDoubleI f){ pos.add(v,f); return this; }
/** scale add alias */
synchronized public IPoint add(double f, IVecI v){ return add(v,f); }
synchronized public IPoint add(IDoubleI f, IVecI v){ return add(v,f); }
synchronized public double dot(IVecI v){ return pos.dot(v); }
synchronized public double dot(double vx, double vy, double vz){ return pos.dot(vx,vy,vz); }
synchronized public double dot(ISwitchE e, IVecI v){ return pos.dot(e,v); }
synchronized public IDouble dot(ISwitchR r, IVecI v){ return pos.dot(r,v); }
// creating IPoint is too much (in terms of memory occupancy)
//synchronized public IPoint cross(IVecI v){ return dup().set(pos.cross(v)); }
synchronized public IVec cross(IVecI v){ return pos.cross(v); }
synchronized public IVec cross(double vx, double vy, double vz){ return pos.cross(vx,vy,vz); }
synchronized public double len(){ return pos.len(); }
synchronized public double len(ISwitchE e){ return pos.len(e); }
synchronized public IDouble len(ISwitchR r){ return pos.len(r); }
synchronized public double len2(){ return pos.len2(); }
synchronized public double len2(ISwitchE e){ return pos.len2(e); }
synchronized public IDouble len2(ISwitchR r){ return pos.len2(r); }
synchronized public IPoint len(IDoubleI l){ pos.len(l); return this; }
synchronized public IPoint len(double l){ pos.len(l); return this; }
synchronized public IPoint unit(){ pos.unit(); return this; }
synchronized public double dist(IVecI v){ return pos.dist(v); }
synchronized public double dist(double vx, double vy, double vz){ return pos.dist(vx,vy,vz); }
synchronized public double dist(ISwitchE e, IVecI v){ return pos.dist(e,v); }
synchronized public IDouble dist(ISwitchR r, IVecI v){ return pos.dist(r,v); }
synchronized public double dist2(IVecI v){ return pos.dist2(v); }
synchronized public double dist2(double vx, double vy, double vz){ return pos.dist2(vx,vy,vz); }
synchronized public double dist2(ISwitchE e, IVecI v){ return pos.dist2(e,v); }
synchronized public IDouble dist2(ISwitchR r, IVecI v){ return pos.dist2(r,v); }
synchronized public boolean eq(IVecI v){ return pos.eq(v); }
synchronized public boolean eq(double vx, double vy, double vz){ return pos.eq(vx,vy,vz); }
synchronized public boolean eq(ISwitchE e, IVecI v){ return pos.eq(e,v); }
synchronized public IBool eq(ISwitchR r, IVecI v){ return pos.eq(r,v); }
synchronized public boolean eq(IVecI v, double tolerance){ return pos.eq(v,tolerance); }
synchronized public boolean eq(double vx, double vy, double vz, double tolerance){ return pos.eq(vx,vy,vz,tolerance); }
synchronized public boolean eq(ISwitchE e, IVecI v, double tolerance){ return pos.eq(e,v,tolerance); }
synchronized public IBool eq(ISwitchR r, IVecI v, IDoubleI tolerance){ return pos.eq(r,v,tolerance); }
synchronized public boolean eqX(IVecI v){ return pos.eqX(v); }
synchronized public boolean eqY(IVecI v){ return pos.eqY(v); }
synchronized public boolean eqZ(IVecI v){ return pos.eqZ(v); }
synchronized public boolean eqX(double vx){ return pos.eqX(vx); }
synchronized public boolean eqY(double vy){ return pos.eqY(vy); }
synchronized public boolean eqZ(double vz){ return pos.eqZ(vz); }
synchronized public boolean eqX(ISwitchE e, IVecI v){ return pos.eqX(e,v); }
synchronized public boolean eqY(ISwitchE e, IVecI v){ return pos.eqY(e,v); }
synchronized public boolean eqZ(ISwitchE e, IVecI v){ return pos.eqZ(e,v); }
synchronized public IBool eqX(ISwitchR r, IVecI v){ return pos.eqX(r,v); }
synchronized public IBool eqY(ISwitchR r, IVecI v){ return pos.eqY(r,v); }
synchronized public IBool eqZ(ISwitchR r, IVecI v){ return pos.eqZ(r,v); }
synchronized public boolean eqX(IVecI v, double tolerance){ return pos.eqX(v,tolerance); }
synchronized public boolean eqY(IVecI v, double tolerance){ return pos.eqY(v,tolerance); }
synchronized public boolean eqZ(IVecI v, double tolerance){ return pos.eqZ(v,tolerance); }
synchronized public boolean eqX(double vx, double tolerance){ return pos.eqX(vx,tolerance); }
synchronized public boolean eqY(double vy, double tolerance){ return pos.eqY(vy,tolerance); }
synchronized public boolean eqZ(double vz, double tolerance){ return pos.eqZ(vz,tolerance); }
synchronized public boolean eqX(ISwitchE e, IVecI v, double tolerance){ return pos.eqX(e,v,tolerance); }
synchronized public boolean eqY(ISwitchE e, IVecI v, double tolerance){ return pos.eqY(e,v,tolerance); }
synchronized public boolean eqZ(ISwitchE e, IVecI v, double tolerance){ return pos.eqZ(e,v,tolerance); }
synchronized public IBool eqX(ISwitchR r, IVecI v, IDoubleI tolerance){ return pos.eqX(r,v,tolerance); }
synchronized public IBool eqY(ISwitchR r, IVecI v, IDoubleI tolerance){ return pos.eqY(r,v,tolerance); }
synchronized public IBool eqZ(ISwitchR r, IVecI v, IDoubleI tolerance){ return pos.eqZ(r,v,tolerance); }
synchronized public double angle(IVecI v){ return pos.angle(v); }
synchronized public double angle(double vx, double vy, double vz){ return pos.angle(vx,vy,vz); }
synchronized public double angle(ISwitchE e, IVecI v){ return pos.angle(e,v); }
synchronized public IDouble angle(ISwitchR r, IVecI v){ return pos.angle(r,v); }
synchronized public double angle(IVecI v, IVecI axis){ return pos.angle(v,axis); }
synchronized public double angle(double vx, double vy, double vz, double axisX, double axisY, double axisZ){
return pos.angle(vx,vy,vz,axisX,axisY,axisZ);
}
synchronized public double angle(ISwitchE e, IVecI v, IVecI axis){ return pos.angle(e,v,axis); }
synchronized public IDouble angle(ISwitchR r, IVecI v, IVecI axis){ return pos.angle(r,v,axis); }
synchronized public IPoint rot(IDoubleI angle){ pos.rot(angle); return this; }
synchronized public IPoint rot(double angle){ pos.rot(angle); return this; }
synchronized public IPoint rot(IVecI axis, IDoubleI angle){ pos.rot(axis,angle); return this; }
synchronized public IPoint rot(IVecI axis, double angle){ pos.rot(axis,angle); return this; }
synchronized public IPoint rot(double axisX, double axisY, double axisZ, double angle){
pos.rot(axisX,axisY,axisZ,angle); return this;
}
synchronized public IPoint rot(IVecI center, IVecI axis, double angle){
pos.rot(center, axis,angle); return this;
}
synchronized public IPoint rot(double centerX, double centerY, double centerZ,
double axisX, double axisY, double axisZ, double angle){
pos.rot(centerX, centerY, centerZ, axisX, axisY, axisZ, angle); return this;
}
synchronized public IPoint rot(IVecI center, IVecI axis, IDoubleI angle){
pos.rot(center, axis,angle); return this;
}
/** Rotate to destination direction vector. */
synchronized public IPoint rot(IVecI axis, IVecI destDir){ pos.rot(axis,destDir); return this; }
/** Rotate to destination point location. */
synchronized public IPoint rot(IVecI center, IVecI axis, IVecI destPt){
pos.rot(center,axis,destPt); return this;
}
synchronized public IPoint rot2(IDoubleI angle){ pos.rot2(angle); return this; }
synchronized public IPoint rot2(double angle){ pos.rot2(angle); return this; }
synchronized public IPoint rot2(IVecI center, double angle){ pos.rot2(center, angle); return this; }
synchronized public IPoint rot2(double centerX, double centerY, double angle){
pos.rot2(centerX, centerY, angle); return this;
}
synchronized public IPoint rot2(IVecI center, IDoubleI angle){ pos.rot2(center, angle); return this; }
/** Rotate to destination direction vector. */
synchronized public IPoint rot2(IVecI destDir){ pos.rot2(destDir); return this; }
/** Rotate to destination point location. */
synchronized public IPoint rot2(IVecI center, IVecI destPt){ pos.rot2(center,destPt); return this; }
/** alias of mul */
synchronized public IPoint scale(IDoubleI f){ pos.scale(f); return this; }
/** alias of mul */
synchronized public IPoint scale(double f){ pos.scale(f); return this; }
synchronized public IPoint scale(IVecI center, IDoubleI f){ pos.scale(center,f); return this; }
synchronized public IPoint scale(IVecI center, double f){ pos.scale(center,f); return this; }
synchronized public IPoint scale(double centerX, double centerY, double centerZ, double f){
pos.scale(centerX, centerY, centerZ, f); return this;
}
/** scale only in 1 direction */
synchronized public IPoint scale1d(IVecI axis, double f){ pos.scale1d(axis,f); return this; }
synchronized public IPoint scale1d(double axisX, double axisY, double axisZ, double f){
pos.scale1d(axisX,axisY,axisZ,f); return this;
}
synchronized public IPoint scale1d(IVecI axis, IDoubleI f){ pos.scale1d(axis,f); return this; }
synchronized public IPoint scale1d(IVecI center, IVecI axis, double f){
pos.scale1d(center,axis,f); return this;
}
synchronized public IPoint scale1d(double centerX, double centerY, double centerZ,
double axisX, double axisY, double axisZ, double f){
pos.scale1d(centerX,centerY,centerZ,axisX,axisY,axisZ,f); return this;
}
synchronized public IPoint scale1d(IVecI center, IVecI axis, IDoubleI f){
pos.scale1d(center,axis,f); return this;
}
/** reflect (mirror) 3 dimensionally to the other side of the plane */
synchronized public IPoint ref(IVecI planeDir){ pos.ref(planeDir); return this; }
/** reflect (mirror) 3 dimensionally to the other side of the plane */
synchronized public IPoint ref(double planeX, double planeY, double planeZ){
pos.ref(planeX,planeY,planeZ); return this;
}
/** reflect (mirror) 3 dimensionally to the other side of the plane */
synchronized public IPoint ref(IVecI center, IVecI planeDir){
pos.ref(center,planeDir); return this;
}
/** reflect (mirror) 3 dimensionally to the other side of the plane */
synchronized public IPoint ref(double centerX, double centerY, double centerZ,
double planeX, double planeY, double planeZ){
pos.ref(centerX,centerY,centerZ,planeX,planeY,planeZ); return this;
}
/** reflect (mirror) 3 dimensionally to the other side of the plane */
synchronized public IPoint mirror(IVecI planeDir){ pos.ref(planeDir); return this; }
/** reflect (mirror) 3 dimensionally to the other side of the plane */
synchronized public IPoint mirror(double planeX, double planeY, double planeZ){
pos.ref(planeX,planeY,planeZ); return this;
}
/** reflect (mirror) 3 dimensionally to the other side of the plane */
synchronized public IPoint mirror(IVecI center, IVecI planeDir){
pos.ref(center,planeDir); return this;
}
/** reflect (mirror) 3 dimensionally to the other side of the plane */
synchronized public IPoint mirror(double centerX, double centerY, double centerZ,
double planeX, double planeY, double planeZ){
pos.ref(centerX,centerY,centerZ,planeX,planeY,planeZ); return this;
}
/** shear operation */
synchronized public IPoint shear(double sxy, double syx, double syz,
double szy, double szx, double sxz){
pos.shear(sxy,syx,syz,szy,szx,sxz); return this;
}
synchronized public IPoint shear(IDoubleI sxy, IDoubleI syx, IDoubleI syz,
IDoubleI szy, IDoubleI szx, IDoubleI sxz){
pos.shear(sxy,syx,syz,szy,szx,sxz); return this;
}
synchronized public IPoint shear(IVecI center, double sxy, double syx, double syz,
double szy, double szx, double sxz){
pos.shear(center,sxy,syx,syz,szy,szx,sxz); return this;
}
synchronized public IPoint shear(IVecI center, IDoubleI sxy, IDoubleI syx, IDoubleI syz,
IDoubleI szy, IDoubleI szx, IDoubleI sxz){
pos.shear(center,sxy,syx,syz,szy,szx,sxz); return this;
}
synchronized public IPoint shearXY(double sxy, double syx){ pos.shearXY(sxy,syx); return this; }
synchronized public IPoint shearXY(IDoubleI sxy, IDoubleI syx){ pos.shearXY(sxy,syx); return this; }
synchronized public IPoint shearXY(IVecI center, double sxy, double syx){
pos.shearXY(center,sxy,syx); return this;
}
synchronized public IPoint shearXY(IVecI center, IDoubleI sxy, IDoubleI syx){
pos.shearXY(center,sxy,syx); return this;
}
synchronized public IPoint shearYZ(double syz, double szy){ pos.shearYZ(syz,szy); return this; }
synchronized public IPoint shearYZ(IDoubleI syz, IDoubleI szy){ pos.shearYZ(syz,szy); return this; }
synchronized public IPoint shearYZ(IVecI center, double syz, double szy){
pos.shearYZ(center,syz,szy); return this;
}
synchronized public IPoint shearYZ(IVecI center, IDoubleI syz, IDoubleI szy){
pos.shearYZ(center,syz,szy); return this;
}
synchronized public IPoint shearZX(double szx, double sxz){ pos.shearZX(szx,sxz); return this; }
synchronized public IPoint shearZX(IDoubleI szx, IDoubleI sxz){ pos.shearZX(szx,sxz); return this; }
synchronized public IPoint shearZX(IVecI center, double szx, double sxz){
pos.shearZX(center,szx,sxz); return this;
}
synchronized public IPoint shearZX(IVecI center, IDoubleI szx, IDoubleI sxz){
pos.shearZX(center,szx,sxz); return this;
}
/** translate is alias of add() */
synchronized public IPoint translate(double x, double y, double z){ pos.translate(x,y,z); return this; }
synchronized public IPoint translate(IDoubleI x, IDoubleI y, IDoubleI z){ pos.translate(x,y,z); return this; }
synchronized public IPoint translate(IVecI v){ pos.translate(v); return this; }
synchronized public IPoint transform(IMatrix3I mat){ pos.transform(mat); return this; }
synchronized public IPoint transform(IMatrix4I mat){ pos.transform(mat); return this; }
synchronized public IPoint transform(IVecI xvec, IVecI yvec, IVecI zvec){
pos.transform(xvec,yvec,zvec); return this;
}
synchronized public IPoint transform(IVecI xvec, IVecI yvec, IVecI zvec, IVecI translate){
pos.transform(xvec,yvec,zvec,translate); return this;
}
/** mv() is alias of add() */
synchronized public IPoint mv(double x, double y, double z){ return add(x,y,z); }
synchronized public IPoint mv(IDoubleI x, IDoubleI y, IDoubleI z){ return add(x,y,z); }
synchronized public IPoint mv(IVecI v){ return add(v); }
// method name cp() is used as getting control point method in curve and surface but here used also as copy because of the priority of variable fitting of diversed users' mind set over the clarity of the code organization
/** cp() is alias of dup() */
synchronized public IPoint cp(){ return dup(); }
/** cp() is alias of dup().add() */
synchronized public IPoint cp(double x, double y, double z){ return dup().add(x,y,z); }
synchronized public IPoint cp(IDoubleI x, IDoubleI y, IDoubleI z){ return dup().add(x,y,z); }
synchronized public IPoint cp(IVecI v){ return dup().add(v); }
// methods creating new instance // returns IPoint?, not IVec?
// returns IVec, not IPoint (2011/10/12)
//synchronized public IPoint diff(IVecI v){ return dup().sub(v); }
synchronized public IVec dif(IVecI v){ return pos.dif(v); }
synchronized public IVec dif(double vx, double vy, double vz){ return pos.dif(vx,vy,vz); }
synchronized public IVec diff(IVecI v){ return dif(v); }
synchronized public IVec diff(double vx, double vy, double vz){ return dif(vx,vy,vz); }
//synchronized public IPoint mid(IVecI v){ return dup().add(v).div(2); }
synchronized public IVec mid(IVecI v){ return pos.mid(v); }
synchronized public IVec mid(double vx, double vy, double vz){ return pos.mid(vx,vy,vz); }
//synchronized public IPoint sum(IVecI v){ return dup().add(v); }
synchronized public IVec sum(IVecI v){ return pos.sum(v); }
synchronized public IVec sum(double vx, double vy, double vz){ return pos.sum(vx,vy,vz); }
//synchronized public IPoint sum(IVecI... v){ IPoint ret = this.dup(); for(IVecI vi: v) ret.add(vi); return ret; }
synchronized public IVec sum(IVecI... v){ return pos.sum(v); }
//synchronized public IPoint bisect(IVecI v){ return dup().unit().add(v.dup().unit()); }
synchronized public IVec bisect(IVecI v){ return pos.bisect(v); }
synchronized public IVec bisect(double vx, double vy, double vz){ return pos.bisect(vx,vy,vz); }
/**
weighted sum.
@return IVec
*/
//synchronized public IPoint sum(IVecI v2, double w1, double w2){ return dup().mul(w1).add(v2,w2); }
synchronized public IVec sum(IVecI v2, double w1, double w2){ return pos.sum(v2,w1,w2); }
//synchronized public IPoint sum(IVecI v2, double w2){ return dup().mul(1.0-w2).add(v2,w2); }
synchronized public IVec sum(IVecI v2, double w2){ return pos.sum(v2,w2); }
//synchronized public IPoint sum(IVecI v2, IDoubleI w1, IDoubleI w2){ return dup().mul(w1).add(v2,w2); }
synchronized public IVec sum(IVecI v2, IDoubleI w1, IDoubleI w2){ return sum(v2,w1,w2); }
//synchronized public IPoint sum(IVecI v2, IDoubleI w2){ return dup().mul(new IDouble(1.0).sub(w2)).add(v2,w2); }
synchronized public IVec sum(IVecI v2, IDoubleI w2){ return sum(v2,w2); }
/** alias of cross. (not unitized ... ?) */
//synchronized public IPoint nml(IVecI v){ return cross(v); }
synchronized public IVec nml(IVecI v){ return pos.nml(v); }
synchronized public IVec nml(double vx, double vy, double vz){ return pos.nml(vx,vy,vz); }
/** create normal vector from 3 points of self, pt1 and pt2 */
//synchronized public IPoint nml(IVecI pt1, IVecI pt2){ return this.diff(pt1).cross(this.diff(pt2)).unit(); }
synchronized public IVec nml(IVecI pt1, IVecI pt2){ return pos.nml(pt1,pt2); }
synchronized public IVec nml(double vx1, double vy1, double vz1, double vx2, double vy2, double vz2){
return pos.nml(vx1,vy1,vz1,vx2,vy2,vz2);
}
/** checking x, y, and z is valid number (not Infinite, nor NaN). */
synchronized public boolean isValid(){ if(pos==null){ return false; } return pos.isValid(); }
<|fim▁hole|> }
/** default setting in each object class; to be overridden in a child class */
public IAttribute defaultAttribute(){
IAttribute a = new IAttribute();
a.weight = IConfig.pointSize;
return a;
}
/** set size of dot in graphic ; it's just alias of weight() */
synchronized public IPoint setSize(double sz){ return weight(sz); }
synchronized public IPoint size(double sz){ return weight(sz); }
/*
synchronized public IPoint setSize(double sz){ return size(sz); }
synchronized public IPoint size(double sz){
for(int i=0; graphics!=null && i<graphics.size(); i++)
if(graphics.get(i) instanceof IPointGraphic)
((IPointGraphic)graphics.get(i)).size(sz);
return this;
}
*/
synchronized public double getSize(){ return size(); }
public double size(){
if(graphics==null){
IOut.err("no graphics is set"); //
return -1;
}
for(int i=0; graphics!=null && i<graphics.size(); i++)
if(graphics.get(i) instanceof IPointGraphic)
return ((IPointGraphic)graphics.get(i)).size();
return -1;
}
synchronized public IPoint name(String nm){ super.name(nm); return this; }
synchronized public IPoint layer(ILayer l){ super.layer(l); return this; }
synchronized public IPoint layer(String l){ super.layer(l); return this; }
synchronized public IPoint attr(IAttribute at){ super.attr(at); return this; }
synchronized public IPoint hide(){ super.hide(); return this; }
synchronized public IPoint show(){ super.show(); return this; }
synchronized public IPoint clr(IColor c){ super.clr(c); return this; }
synchronized public IPoint clr(IColor c, int alpha){ super.clr(c,alpha); return this; }
synchronized public IPoint clr(IColor c, float alpha){ super.clr(c,alpha); return this; }
synchronized public IPoint clr(IColor c, double alpha){ super.clr(c,alpha); return this; }
synchronized public IPoint clr(IObject o){ super.clr(o); return this; }
synchronized public IPoint clr(Color c){ super.clr(c); return this; }
synchronized public IPoint clr(Color c, int alpha){ super.clr(c,alpha); return this; }
synchronized public IPoint clr(Color c, float alpha){ super.clr(c,alpha); return this; }
synchronized public IPoint clr(Color c, double alpha){ super.clr(c,alpha); return this; }
synchronized public IPoint clr(int gray){ super.clr(gray); return this; }
synchronized public IPoint clr(float fgray){ super.clr(fgray); return this; }
synchronized public IPoint clr(double dgray){ super.clr(dgray); return this; }
synchronized public IPoint clr(int gray, int alpha){ super.clr(gray,alpha); return this; }
synchronized public IPoint clr(float fgray, float falpha){ super.clr(fgray,falpha); return this; }
synchronized public IPoint clr(double dgray, double dalpha){ super.clr(dgray,dalpha); return this; }
synchronized public IPoint clr(int r, int g, int b){ super.clr(r,g,b); return this; }
synchronized public IPoint clr(float fr, float fg, float fb){ super.clr(fr,fg,fb); return this; }
synchronized public IPoint clr(double dr, double dg, double db){ super.clr(dr,dg,db); return this; }
synchronized public IPoint clr(int r, int g, int b, int a){ super.clr(r,g,b,a); return this; }
synchronized public IPoint clr(float fr, float fg, float fb, float fa){ super.clr(fr,fg,fb,fa); return this; }
synchronized public IPoint clr(double dr, double dg, double db, double da){ super.clr(dr,dg,db,da); return this; }
synchronized public IPoint hsb(float h, float s, float b, float a){ super.hsb(h,s,b,a); return this; }
synchronized public IPoint hsb(double h, double s, double b, double a){ super.hsb(h,s,b,a); return this; }
synchronized public IPoint hsb(float h, float s, float b){ super.hsb(h,s,b); return this; }
synchronized public IPoint hsb(double h, double s, double b){ super.hsb(h,s,b); return this; }
synchronized public IPoint setColor(IColor c){ super.setColor(c); return this; }
synchronized public IPoint setColor(IColor c, int alpha){ super.setColor(c,alpha); return this; }
synchronized public IPoint setColor(IColor c, float alpha){ super.setColor(c,alpha); return this; }
synchronized public IPoint setColor(IColor c, double alpha){ super.setColor(c,alpha); return this; }
synchronized public IPoint setColor(Color c){ super.setColor(c); return this; }
synchronized public IPoint setColor(Color c, int alpha){ super.setColor(c,alpha); return this; }
synchronized public IPoint setColor(Color c, float alpha){ super.setColor(c,alpha); return this; }
synchronized public IPoint setColor(Color c, double alpha){ super.setColor(c,alpha); return this; }
synchronized public IPoint setColor(int gray){ super.setColor(gray); return this; }
synchronized public IPoint setColor(float fgray){ super.setColor(fgray); return this; }
synchronized public IPoint setColor(double dgray){ super.setColor(dgray); return this; }
synchronized public IPoint setColor(int gray, int alpha){ super.setColor(gray,alpha); return this; }
synchronized public IPoint setColor(float fgray, float falpha){ super.setColor(fgray,falpha); return this; }
synchronized public IPoint setColor(double dgray, double dalpha){ super.setColor(dgray,dalpha); return this; }
synchronized public IPoint setColor(int r, int g, int b){ super.setColor(r,g,b); return this; }
synchronized public IPoint setColor(float fr, float fg, float fb){ super.setColor(fr,fg,fb); return this; }
synchronized public IPoint setColor(double dr, double dg, double db){ super.setColor(dr,dg,db); return this; }
synchronized public IPoint setColor(int r, int g, int b, int a){ super.setColor(r,g,b,a); return this; }
synchronized public IPoint setColor(float fr, float fg, float fb, float fa){ super.setColor(fr,fg,fb,fa); return this; }
synchronized public IPoint setColor(double dr, double dg, double db, double da){ super.setColor(dr,dg,db,da); return this; }
synchronized public IPoint setHSBColor(float h, float s, float b, float a){ super.setHSBColor(h,s,b,a); return this; }
synchronized public IPoint setHSBColor(double h, double s, double b, double a){ super.setHSBColor(h,s,b,a); return this; }
synchronized public IPoint setHSBColor(float h, float s, float b){ super.setHSBColor(h,s,b); return this; }
synchronized public IPoint setHSBColor(double h, double s, double b){ super.setHSBColor(h,s,b); return this; }
synchronized public IPoint weight(double w){ super.weight(w); return this; }
synchronized public IPoint weight(float w){ super.weight(w); return this; }
}<|fim▁end|> |
synchronized public String toString(){
if(pos==null) return super.toString();
return pos.toString(); |
<|file_name|>server.js<|end_file_name|><|fim▁begin|>// Get dependencies
const express = require('express');
const path = require('path');
const http = require('http');
const bodyParser = require('body-parser');
// Get our API routes
const api = require('./server/routes/api');
<|fim▁hole|>app.use(bodyParser.urlencoded({ extended: false }));
// Point static path to dist
app.use(express.static(path.join(__dirname, 'dist')));
// Set our api routes
app.use('/api', api);
// Catch all other routes and return the index file
app.get('*', (req, res) => {
res.sendFile(path.join(__dirname, 'dist/index.html'));
});
/**
* Get port from environment and store in Express.
*/
const port = process.env.PORT || '3000';
app.set('port', port);
/**
* Create HTTP server.
*/
const server = http.createServer(app);
/**
* Listen on provided port, on all network interfaces.
*/
server.listen(port, () => console.log(`API running on localhost:${port}`));<|fim▁end|> | const app = express();
// Parsers for POST data
app.use(bodyParser.json()); |
<|file_name|>xxx.js<|end_file_name|><|fim▁begin|>onst bcrypt = require('bcrypt-nodejs');
const crypto = require('crypto');
console.log('start');
<|fim▁hole|> console.log(hash);
});
});<|fim▁end|> | bcrypt.genSalt(10, (err, salt) => {
bcrypt.hash('passwd', salt, null, (err, hash) => { |
<|file_name|>validation.ts<|end_file_name|><|fim▁begin|>/*
* Copyright © 2018 Lisk Foundation
*
* See the LICENSE file at the top-level directory of this distribution
* for licensing information.
*
* Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation,
* no part of this software, including this file, may be copied, modified,
* propagated, or distributed except according to the terms contained in the
* LICENSE file.
*
* Removal or modification of this copyright notice is prohibited.
*
*/
import * as BigNum from '@liskhq/bignum';
import { hexToBuffer } from '@liskhq/lisk-cryptography';
import {
gte as isVersionGte,
gtr as isGreaterThanVersionInRange,
ltr as isLessThanVersionInRange,<|fim▁hole|> valid as isValidVersion,
validRange as isValidRangeVersion,
} from 'semver';
import * as validator from 'validator';
import {
MAX_EIGHT_BYTE_NUMBER,
MAX_INT64,
MAX_PUBLIC_KEY_LENGTH,
} from './constants';
export const isNullCharacterIncluded = (input: string): boolean =>
new RegExp('\\0|\\U00000000').test(input);
export const isUsername = (username: string): boolean => {
if (isNullCharacterIncluded(username)) {
return false;
}
if (username !== username.trim().toLowerCase()) {
return false;
}
if (/^[0-9]{1,21}[L|l]$/g.test(username)) {
return false;
}
if (!/^[a-z0-9!@$&_.]+$/g.test(username)) {
return false;
}
return true;
};
export const isSignature = (signature: string): boolean =>
/^[a-f0-9]{128}$/i.test(signature);
export const isGreaterThanZero = (amount: BigNum): boolean => amount.cmp(0) > 0;
export const isGreaterThanMaxTransactionAmount = (amount: BigNum): boolean =>
amount.cmp(MAX_INT64) > 0;
export const isGreaterThanMaxTransactionId = (id: BigNum): boolean =>
id.cmp(MAX_EIGHT_BYTE_NUMBER) > 0;
export const isNumberString = (num: string): boolean => {
if (typeof num !== 'string') {
return false;
}
return validator.isInt(num);
};
export const isValidInteger = (num: unknown): boolean =>
typeof num === 'number' ? Math.floor(num) === num : false;
export const hasNoDuplicate = (values: ReadonlyArray<string>): boolean => {
const unique = [...new Set(values)];
return unique.length === values.length;
};
export const isStringBufferLessThan = (data: unknown, max: number): boolean => {
if (typeof data !== 'string') {
return false;
}
return Buffer.from(data).length <= max;
};
export const isHexString = (data: unknown): boolean => {
if (typeof data !== 'string') {
return false;
}
return data === '' || /^[a-f0-9]+$/i.test(data);
};
export const isEncryptedPassphrase = (data: string): boolean => {
// Explanation of regex structure:
// - 1 or more 'key=value' pairs delimited with '&'
// Examples:
// - cipherText=abcd1234
// - cipherText=abcd1234&iterations=10000&iv=ef012345
// NOTE: Maximum lengths chosen here are arbitrary
const keyRegExp = /[a-zA-Z0-9]{2,15}/;
const valueRegExp = /[a-f0-9]{1,256}/;
const keyValueRegExp = new RegExp(
`${keyRegExp.source}=${valueRegExp.source}`,
);
const encryptedPassphraseRegExp = new RegExp(
`^(${keyValueRegExp.source})(?:&(${keyValueRegExp.source})){0,10}$`,
);
return encryptedPassphraseRegExp.test(data);
};
export const isSemVer = (version: string): boolean => !!isValidVersion(version);
export const isRangedSemVer = (version: string): boolean =>
!!isValidRangeVersion(version);
export const isLessThanRangedVersion = isLessThanVersionInRange;
export const isGreaterThanRangedVersion = isGreaterThanVersionInRange;
export const isProtocolString = (data: string) =>
/^(\d|[1-9]\d{1,2})\.(\d|[1-9]\d{1,2})$/.test(data);
const IPV4_NUMBER = 4;
const IPV6_NUMBER = 6;
export const isIPV4 = (data: string): boolean =>
validator.isIP(data, IPV4_NUMBER);
export const isIPV6 = (data: string): boolean =>
validator.isIP(data, IPV6_NUMBER);
export const isIP = (data: string): boolean => isIPV4(data) || isIPV6(data);
export const isPort = (port: string) => validator.isPort(port);
export const validatePublicKeysForDuplicates = (
publicKeys: ReadonlyArray<string>,
): boolean =>
publicKeys.every((element, index) => {
if (publicKeys.slice(index + 1).includes(element)) {
throw new Error(`Duplicated public key: ${publicKeys[index]}.`);
}
return true;
});
export const isStringEndsWith = (
target: string,
suffixes: ReadonlyArray<string>,
): boolean => suffixes.some(suffix => target.endsWith(suffix));
export const isVersionMatch = isVersionGte;
export const validatePublicKey = (publicKey: string): boolean => {
const publicKeyBuffer = hexToBuffer(publicKey);
if (publicKeyBuffer.length !== MAX_PUBLIC_KEY_LENGTH) {
throw new Error(
`Public key ${publicKey} length differs from the expected 32 bytes for a public key.`,
);
}
return true;
};
export const validatePublicKeys = (
publicKeys: ReadonlyArray<string>,
): boolean =>
publicKeys.every(validatePublicKey) &&
validatePublicKeysForDuplicates(publicKeys);
export const validateKeysgroup = (
keysgroup: ReadonlyArray<string>,
min: number,
max: number,
): boolean => {
if (keysgroup.length < min || keysgroup.length > max) {
throw new Error(
`Expected between ${min} and ${max} public keys in the keysgroup.`,
);
}
return validatePublicKeys(keysgroup);
};
const MIN_ADDRESS_LENGTH = 2;
const MAX_ADDRESS_LENGTH = 22;
const BASE_TEN = 10;
export const validateAddress = (address: string): boolean => {
if (
address.length < MIN_ADDRESS_LENGTH ||
address.length > MAX_ADDRESS_LENGTH
) {
throw new Error(
'Address length does not match requirements. Expected between 2 and 22 characters.',
);
}
if (address[address.length - 1] !== 'L') {
throw new Error(
'Address format does not match requirements. Expected "L" at the end.',
);
}
if (address.includes('.')) {
throw new Error(
'Address format does not match requirements. Address includes invalid character: `.`.',
);
}
const addressString = address.slice(0, -1);
const addressNumber = new BigNum(addressString);
if (addressNumber.cmp(new BigNum(MAX_EIGHT_BYTE_NUMBER)) > 0) {
throw new Error(
'Address format does not match requirements. Address out of maximum range.',
);
}
if (addressString !== addressNumber.toString(BASE_TEN)) {
throw new Error(
"Address string format does not match it's number representation.",
);
}
return true;
};
export const validateNonTransferAmount = (data: string): boolean =>
isNumberString(data) && data === '0';
export const validateTransferAmount = (data: string): boolean =>
isNumberString(data) &&
isGreaterThanZero(new BigNum(data)) &&
!isGreaterThanMaxTransactionAmount(new BigNum(data));
export const validateFee = (data: string): boolean =>
isNumberString(data) &&
isGreaterThanZero(new BigNum(data)) &&
!isGreaterThanMaxTransactionAmount(new BigNum(data));<|fim▁end|> | |
<|file_name|>a_quick_test5.py<|end_file_name|><|fim▁begin|>"""
This example uses OpenGL via Pyglet and draws
a bunch of rectangles on the screen.
"""
import random
import time
import pyglet.gl as GL
import pyglet
import ctypes
# Set up the constants
SCREEN_WIDTH = 700
SCREEN_HEIGHT = 500
RECT_WIDTH = 50
RECT_HEIGHT = 50
class Shape():
def __init__(self):
self.x = 0
self.y = 0
class VertexBuffer():
""" Class to hold vertex buffer info. """
def __init__(self, vbo_id, size):
self.vbo_id = vbo_id
self.size = size
def add_rect(rect_list, x, y, width, height, color):
""" Create a vertex buffer for a rectangle. """
rect_list.extend([-width / 2, -height / 2,
width / 2, -height / 2,
width / 2, height / 2,
-width / 2, height / 2])
def create_vbo_for_rects(v2f):
vbo_id = GL.GLuint()
GL.glGenBuffers(1, ctypes.pointer(vbo_id))
data2 = (GL.GLfloat*len(v2f))(*v2f)
GL.glBindBuffer(GL.GL_ARRAY_BUFFER, vbo_id)
GL.glBufferData(GL.GL_ARRAY_BUFFER, ctypes.sizeof(data2), data2,
GL.GL_STATIC_DRAW)
shape = VertexBuffer(vbo_id, len(v2f)//2)
return shape
def render_rect_filled(shape, x, y):
""" Render the shape at the right spot. """
# Set color
GL.glDisable(GL.GL_BLEND)
GL.glColor4ub(shape.color[0], shape.color[1], shape.color[2], 255)
GL.glBindBuffer(GL.GL_ARRAY_BUFFER, shape.vbo_id)
GL.glVertexPointer(2, GL.GL_FLOAT, 0, 0)
GL.glLoadIdentity()
GL.glTranslatef(x + shape.width / 2, y + shape.height / 2, 0)
<|fim▁hole|>
class MyApplication():
""" Main application class. """
def setup(self):
""" Set up the game and initialize the variables. """
# Set background to white
GL.glClearColor(1, 1, 1, 1)
self.rect_list = []
self.shape_list = []
for i in range(2000):
x = random.randrange(0, SCREEN_WIDTH)
y = random.randrange(0, SCREEN_HEIGHT)
width = random.randrange(20, 71)
height = random.randrange(20, 71)
d_x = random.randrange(-3, 4)
d_y = random.randrange(-3, 4)
red = random.randrange(256)
blue = random.randrange(256)
green = random.randrange(256)
alpha = random.randrange(256)
color = (red, blue, green, alpha)
shape = Shape()
shape.x = x
shape.y = y
self.shape_list.append(shape)
add_rect(self.rect_list, 0, 0, width, height, color)
print("Creating vbo for {} vertices.".format(len(self.rect_list) // 2))
self.rect_vbo = create_vbo_for_rects(self.rect_list)
print("VBO {}".format(self.rect_vbo.vbo_id))
def animate(self, dt):
""" Move everything """
pass
def on_draw(self):
"""
Render the screen.
"""
start = time.time()
float_size = ctypes.sizeof(ctypes.c_float)
record_len = 10 * float_size
GL.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT)
GL.glMatrixMode(GL.GL_MODELVIEW)
GL.glEnableClientState(GL.GL_VERTEX_ARRAY)
GL.glColor4ub(255, 0, 0, 255)
GL.glBindBuffer(GL.GL_ARRAY_BUFFER, self.rect_vbo.vbo_id)
GL.glVertexPointer(2, GL.GL_FLOAT, record_len, 0)
for i in range(len(self.shape_list)):
shape = self.shape_list[i]
GL.glLoadIdentity()
GL.glTranslatef(shape.x, shape.y, 0)
GL.glDrawArrays(GL.GL_QUADS, i * 8, 8)
# GL.glDrawArrays(GL.GL_QUADS,
# 0,
# self.rect_vbo.size)
elapsed = time.time() - start
print(elapsed)
def main():
window = pyglet.window.Window(SCREEN_WIDTH, SCREEN_HEIGHT)
app = MyApplication()
app.setup()
pyglet.clock.schedule_interval(app.animate, 1/60)
@window.event
def on_draw():
window.clear()
app.on_draw()
pyglet.app.run()
main()<|fim▁end|> | GL.glDrawArrays(GL.GL_QUADS, 0, shape.size)
|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|><|fim▁hole|> ChartReaderingView, export_view, ListReaderingView, DeleteBeehiveView, \
ModifyBeehiveView, summary_view
urlpatterns = [
url(r'^ajouter$', AddBeehiveView.as_view(), name='add-beehive'),
url(r'^(?P<pk>\d+)/$', summary_view, name='summary'),
url(r'^(?P<pk>\d+)/voir/tableau/$', ListReaderingView.as_view(),
name='table'),
url(r'^(?P<pk>\d+)/voir/graphiques/$', ChartReaderingView.as_view(),
name='charts'),
url(r'^(?P<pk>\d+)/exporter/$', export_view, name='export'),
url(r'^(?P<pk>\d+)/modifier/$', ModifyBeehiveView.as_view(),
name='modify-beehive'),
url(r'^(?P<pk>\d+)/supprimer/$', DeleteBeehiveView.as_view(),
name='delete-beehive'),
url(r'^supprimer-releve/(?P<pk>\d+)/$', delete_readering_view,
name='delete-readering'),
]<|fim▁end|> | from django.conf.urls import url
from rpi.beehive.views import AddBeehiveView, delete_readering_view, \ |
<|file_name|>test_response.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from emojibot.utils.response import Response
<|fim▁hole|> response = Response()
assert isinstance(response, Response)<|fim▁end|> | def test_constructor(): |
<|file_name|>test_shared_network_extension.py<|end_file_name|><|fim▁begin|># Copyright 2015 Hewlett-Packard Development Company, L.P.dsvsv
# Copyright 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.tests.api import base
from neutron.tests.tempest import config
from neutron.tests.tempest import test
from tempest_lib.common.utils import data_utils
CONF = config.CONF
class SharedNetworksTest(base.BaseAdminNetworkTest):
@classmethod
def resource_setup(cls):
super(SharedNetworksTest, cls).resource_setup()
cls.shared_network = cls.create_shared_network()
@test.idempotent_id('6661d219-b96d-4597-ad10-55766ce4abf7')
def test_create_update_shared_network(self):
shared_network = self.create_shared_network()
net_id = shared_network['id']
self.assertEqual('ACTIVE', shared_network['status'])
self.assertIsNotNone(shared_network['id'])
self.assertTrue(self.shared_network['shared'])
new_name = "New_shared_network"
body = self.admin_client.update_network(net_id, name=new_name,
admin_state_up=False,
shared=False)
updated_net = body['network']
self.assertEqual(new_name, updated_net['name'])
self.assertFalse(updated_net['shared'])
self.assertFalse(updated_net['admin_state_up'])
@test.idempotent_id('9c31fabb-0181-464f-9ace-95144fe9ca77')
def test_create_port_shared_network_as_non_admin_tenant(self):
# create a port as non admin
body = self.client.create_port(network_id=self.shared_network['id'])
port = body['port']
self.addCleanup(self.admin_client.delete_port, port['id'])
# verify the tenant id of admin network and non admin port
self.assertNotEqual(self.shared_network['tenant_id'],
port['tenant_id'])
@test.idempotent_id('3e39c4a6-9caf-4710-88f1-d20073c6dd76')
def test_create_bulk_shared_network(self):
# Creates 2 networks in one request
net_nm = [data_utils.rand_name('network'),
data_utils.rand_name('network')]
body = self.admin_client.create_bulk_network(net_nm, shared=True)
created_networks = body['networks']
for net in created_networks:
self.addCleanup(self.admin_client.delete_network, net['id'])
self.assertIsNotNone(net['id'])
self.assertTrue(net['shared'])
def _list_shared_networks(self, user):
body = user.list_networks(shared=True)
networks_list = [net['id'] for net in body['networks']]
self.assertIn(self.shared_network['id'], networks_list)
self.assertTrue(self.shared_network['shared'])
@test.idempotent_id('a064a9fd-e02f-474a-8159-f828cd636a28')
def test_list_shared_networks(self):
# List the shared networks and confirm that
# shared network extension attribute is returned for those networks
# that are created as shared
self._list_shared_networks(self.admin_client)
self._list_shared_networks(self.client)
def _show_shared_network(self, user):
body = user.show_network(self.shared_network['id'])
show_shared_net = body['network']
self.assertEqual(self.shared_network['name'], show_shared_net['name'])<|fim▁hole|> @test.idempotent_id('e03c92a2-638d-4bfa-b50a-b1f66f087e58')
def test_show_shared_networks_attribute(self):
# Show a shared network and confirm that
# shared network extension attribute is returned.
self._show_shared_network(self.admin_client)
self._show_shared_network(self.client)<|fim▁end|> | self.assertEqual(self.shared_network['id'], show_shared_net['id'])
self.assertTrue(show_shared_net['shared'])
|
<|file_name|>generate.rs<|end_file_name|><|fim▁begin|>//! Generate valid parse trees.
use grammar::repr::*;
use rand::{self, Rng};
use std::iter::Iterator;
#[derive(PartialEq, Eq)]
pub enum ParseTree {
Nonterminal(NonterminalString, Vec<ParseTree>),
Terminal(TerminalString),
}
pub fn random_parse_tree(grammar: &Grammar, symbol: NonterminalString) -> ParseTree {
let mut gen = Generator {
grammar: grammar,
rng: rand::thread_rng(),
depth: 0,
};
loop {
// sometimes, the random walk overflows the stack, so we have a max, and if
// it is exceeded, we just try again
if let Some(result) = gen.nonterminal(symbol.clone()) {
return result;
}
gen.depth = 0;
}
}
struct Generator<'grammar> {
grammar: &'grammar Grammar,
rng: rand::rngs::ThreadRng,
depth: u32,
}
const MAX_DEPTH: u32 = 10000;
impl<'grammar> Generator<'grammar> {
fn nonterminal(&mut self, nt: NonterminalString) -> Option<ParseTree> {
if self.depth > MAX_DEPTH {
return None;
}
self.depth += 1;
let productions = self.grammar.productions_for(&nt);
let index: usize = self.rng.gen_range(0, productions.len());
let production = &productions[index];
let trees: Option<Vec<_>> = production
.symbols
.iter()
.map(|sym| self.symbol(sym.clone()))
.collect();
trees.map(|trees| ParseTree::Nonterminal(nt, trees))
}
fn symbol(&mut self, symbol: Symbol) -> Option<ParseTree> {
match symbol {
Symbol::Nonterminal(nt) => self.nonterminal(nt),
Symbol::Terminal(t) => Some(ParseTree::Terminal(t)),
}
}
}
impl ParseTree {
pub fn terminals(&self) -> Vec<TerminalString> {
let mut vec = vec![];
self.push_terminals(&mut vec);
vec
}
fn push_terminals(&self, vec: &mut Vec<TerminalString>) {
match *self {
ParseTree::Terminal(ref s) => vec.push(s.clone()),
ParseTree::Nonterminal(_, ref trees) => {
for tree in trees {
tree.push_terminals(vec);
}
}<|fim▁hole|><|fim▁end|> | }
}
} |
<|file_name|>SeekBarPreference.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2013 The OmniROM Project
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.android.systemui.tuner;
import android.content.Context;
import android.content.res.TypedArray;
import android.preference.Preference;
import android.util.AttributeSet;
import android.view.View;
import android.view.ViewGroup;
import android.widget.SeekBar;
import android.widget.SeekBar.OnSeekBarChangeListener;
import android.widget.TextView;
import com.android.systemui.R;
public class SeekBarPreference extends Preference implements
OnSeekBarChangeListener {
public static int maximum = 100;
public static int interval = 5;
private TextView monitorBox;
private SeekBar bar;
int currentValue = 100;
private OnPreferenceChangeListener changer;
public SeekBarPreference(Context context, AttributeSet attrs) {
super(context, attrs);
}
@Override
protected View onCreateView(ViewGroup parent) {
View layout = View.inflate(getContext(), R.layout.qs_slider_preference,
null);
monitorBox = (TextView) layout.findViewById(R.id.monitor_box);
bar = (SeekBar) layout.findViewById(R.id.seek_bar);
bar.setProgress(currentValue);
monitorBox.setText(String.valueOf(currentValue) + "%");
bar.setOnSeekBarChangeListener(this);
return layout;
}
public void setInitValue(int progress) {
currentValue = progress;
}
@Override
protected Object onGetDefaultValue(TypedArray a, int index) {
// TODO Auto-generated method stub
return super.onGetDefaultValue(a, index);
}
@Override
public void setOnPreferenceChangeListener(
OnPreferenceChangeListener onPreferenceChangeListener) {
changer = onPreferenceChangeListener;
super.setOnPreferenceChangeListener(onPreferenceChangeListener);
}
@Override
public void onProgressChanged(SeekBar seekBar, int progress,
boolean fromUser) {
progress = Math.round(((float) progress) / interval) * interval;
currentValue = progress;
monitorBox.setText(String.valueOf(progress) + "%");
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {<|fim▁hole|><|fim▁end|> | changer.onPreferenceChange(this, Integer.toString(currentValue));
}
} |
<|file_name|>MimeMapperTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.mss.internal.mime;
import org.junit.Assert;
import org.junit.Test;
/**
* Test the functionality of MimeMapper
*/
public class MimeMapperTest {
@Test
public void testMimeMappingForKnownExtension() throws MimeMappingException {
String mimeType = MimeMapper.getMimeType("png");
Assert.assertEquals("image/png", mimeType);
}
@Test(expected = MimeMappingException.class)
public void testMimeMappingForUnknownExtension() throws MimeMappingException {
MimeMapper.getMimeType("unknownext");<|fim▁hole|> }
}<|fim▁end|> | |
<|file_name|>Dot.js<|end_file_name|><|fim▁begin|>var React = require('react-native');
var {
StyleSheet,
View,
Animated,
} = React;
var Dot = React.createClass({
propTypes: {
isPlacedCorrectly: React.PropTypes.bool.isRequired,
},
getInitialState: function() {
return {
scale: new Animated.Value(this.props.isPlacedCorrectly ? 1 : 0.1),
visible: this.props.isPlacedCorrectly,
};
},
componentWillReceiveProps: function(nextProps) {
if (!this.props.isPlacedCorrectly && nextProps.isPlacedCorrectly) {
this.animateShow();
} else if (this.props.isPlacedCorrectly && !nextProps.isPlacedCorrectly) {
this.animateHide();
}
},
animateShow: function() {
this.setState({visible: true}, () => {
Animated.timing(this.state.scale, {
toValue: 1,
duration: 100,
}).start();
});
},
animateHide: function() {
Animated.timing(this.state.scale, {
toValue: 0.1,
duration: 100,
}).start(() => this.setState({visible: false}));
},
render: function() {<|fim▁hole|> if (!this.state.visible) {
return null;
}
return (
<Animated.View style={[styles.dot, {transform: [{scale: this.state.scale}]}]}/>
);
},
});
var styles = StyleSheet.create({
dot: {
backgroundColor: '#FF3366',
width: 6,
height: 6,
borderRadius: 3,
margin: 3,
},
});
module.exports = Dot;<|fim▁end|> | |
<|file_name|>news_u.cpp<|end_file_name|><|fim▁begin|>// Copyright 2014 Citra Emulator Project<|fim▁hole|>
#include "common/log.h"
#include "core/hle/hle.h"
#include "core/hle/service/news_u.h"
////////////////////////////////////////////////////////////////////////////////////////////////////
// Namespace NEWS_U
namespace NEWS_U {
const Interface::FunctionInfo FunctionTable[] = {
{0x000100C8, nullptr, "AddNotification"},
};
////////////////////////////////////////////////////////////////////////////////////////////////////
// Interface class
Interface::Interface() {
Register(FunctionTable, ARRAY_SIZE(FunctionTable));
}
} // namespace<|fim▁end|> | // Licensed under GPLv2 or any later version
// Refer to the license.txt file included. |
<|file_name|>_bgcolor.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators
class BgcolorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self, plotly_name="bgcolor", parent_name="sankey.node.hoverlabel", **kwargs<|fim▁hole|> super(BgcolorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "style"),
**kwargs
)<|fim▁end|> | ): |
<|file_name|>pool.go<|end_file_name|><|fim▁begin|>package pool
import (
"context"
"errors"
"net"
"sync"
"sync/atomic"
"time"
"github.com/go-redis/redis/v8/internal"
)
var (
// ErrClosed performs any operation on the closed client will return this error.
ErrClosed = errors.New("redis: client is closed")
// ErrPoolTimeout timed out waiting to get a connection from the connection pool.
ErrPoolTimeout = errors.New("redis: connection pool timeout")
)
var timers = sync.Pool{
New: func() interface{} {
t := time.NewTimer(time.Hour)
t.Stop()
return t
},
}
// Stats contains pool state information and accumulated stats.
type Stats struct {
Hits uint32 // number of times free connection was found in the pool
Misses uint32 // number of times free connection was NOT found in the pool
Timeouts uint32 // number of times a wait timeout occurred
TotalConns uint32 // number of total connections in the pool
IdleConns uint32 // number of idle connections in the pool
StaleConns uint32 // number of stale connections removed from the pool
}
type Pooler interface {
NewConn(context.Context) (*Conn, error)
CloseConn(*Conn) error
Get(context.Context) (*Conn, error)
Put(context.Context, *Conn)
Remove(context.Context, *Conn, error)
Len() int
IdleLen() int
Stats() *Stats
Close() error
}
type Options struct {
Dialer func(context.Context) (net.Conn, error)
OnClose func(*Conn) error
PoolFIFO bool
PoolSize int
MinIdleConns int
MaxConnAge time.Duration
PoolTimeout time.Duration
IdleTimeout time.Duration
IdleCheckFrequency time.Duration
}
type lastDialErrorWrap struct {
err error
}
type ConnPool struct {
opt *Options
dialErrorsNum uint32 // atomic
lastDialError atomic.Value
queue chan struct{}
connsMu sync.Mutex
conns []*Conn
idleConns []*Conn
poolSize int
idleConnsLen int
stats Stats
_closed uint32 // atomic
closedCh chan struct{}
}
var _ Pooler = (*ConnPool)(nil)
func NewConnPool(opt *Options) *ConnPool {
p := &ConnPool{
opt: opt,
queue: make(chan struct{}, opt.PoolSize),
conns: make([]*Conn, 0, opt.PoolSize),
idleConns: make([]*Conn, 0, opt.PoolSize),
closedCh: make(chan struct{}),
}
p.connsMu.Lock()
p.checkMinIdleConns()
p.connsMu.Unlock()
if opt.IdleTimeout > 0 && opt.IdleCheckFrequency > 0 {
go p.reaper(opt.IdleCheckFrequency)
}
return p
}
func (p *ConnPool) checkMinIdleConns() {
if p.opt.MinIdleConns == 0 {
return
}
for p.poolSize < p.opt.PoolSize && p.idleConnsLen < p.opt.MinIdleConns {
p.poolSize++
p.idleConnsLen++
go func() {
err := p.addIdleConn()
if err != nil {
p.connsMu.Lock()
p.poolSize--
p.idleConnsLen--
p.connsMu.Unlock()
}
}()
}
}
func (p *ConnPool) addIdleConn() error {
cn, err := p.dialConn(context.TODO(), true)
if err != nil {
return err
}
p.connsMu.Lock()
p.conns = append(p.conns, cn)
p.idleConns = append(p.idleConns, cn)
p.connsMu.Unlock()
return nil
}
func (p *ConnPool) NewConn(ctx context.Context) (*Conn, error) {
return p.newConn(ctx, false)
}
func (p *ConnPool) newConn(ctx context.Context, pooled bool) (*Conn, error) {
cn, err := p.dialConn(ctx, pooled)
if err != nil {
return nil, err
}
p.connsMu.Lock()
p.conns = append(p.conns, cn)
if pooled {
// If pool is full remove the cn on next Put.
if p.poolSize >= p.opt.PoolSize {
cn.pooled = false
} else {
p.poolSize++
}
}
p.connsMu.Unlock()
return cn, nil
}
func (p *ConnPool) dialConn(ctx context.Context, pooled bool) (*Conn, error) {
if p.closed() {
return nil, ErrClosed
}
if atomic.LoadUint32(&p.dialErrorsNum) >= uint32(p.opt.PoolSize) {
return nil, p.getLastDialError()
}
netConn, err := p.opt.Dialer(ctx)
if err != nil {
p.setLastDialError(err)
if atomic.AddUint32(&p.dialErrorsNum, 1) == uint32(p.opt.PoolSize) {
go p.tryDial()
}
return nil, err
}
cn := NewConn(netConn)
cn.pooled = pooled
return cn, nil
}
func (p *ConnPool) tryDial() {
for {
if p.closed() {
return
}
conn, err := p.opt.Dialer(context.Background())
if err != nil {
p.setLastDialError(err)
time.Sleep(time.Second)
continue
}
atomic.StoreUint32(&p.dialErrorsNum, 0)
_ = conn.Close()
return
}
}
func (p *ConnPool) setLastDialError(err error) {
p.lastDialError.Store(&lastDialErrorWrap{err: err})
}
func (p *ConnPool) getLastDialError() error {
err, _ := p.lastDialError.Load().(*lastDialErrorWrap)
if err != nil {
return err.err
}
return nil
}
// Get returns existed connection from the pool or creates a new one.
func (p *ConnPool) Get(ctx context.Context) (*Conn, error) {
if p.closed() {
return nil, ErrClosed
}
if err := p.waitTurn(ctx); err != nil {
return nil, err
}
for {
p.connsMu.Lock()
cn := p.popIdle()
p.connsMu.Unlock()
if cn == nil {
break
}
if p.isStaleConn(cn) {
_ = p.CloseConn(cn)
continue
}
atomic.AddUint32(&p.stats.Hits, 1)
return cn, nil
}
atomic.AddUint32(&p.stats.Misses, 1)
newcn, err := p.newConn(ctx, true)
if err != nil {
p.freeTurn()
return nil, err
}
return newcn, nil
}
func (p *ConnPool) getTurn() {
p.queue <- struct{}{}
}
func (p *ConnPool) waitTurn(ctx context.Context) error {
select {
case <-ctx.Done():
return ctx.Err()
default:
}
select {
case p.queue <- struct{}{}:
return nil
default:
}
timer := timers.Get().(*time.Timer)
timer.Reset(p.opt.PoolTimeout)
select {
case <-ctx.Done():
if !timer.Stop() {
<-timer.C
}
timers.Put(timer)
return ctx.Err()
case p.queue <- struct{}{}:
if !timer.Stop() {
<-timer.C
}
timers.Put(timer)
return nil
case <-timer.C:
timers.Put(timer)
atomic.AddUint32(&p.stats.Timeouts, 1)
return ErrPoolTimeout
}
}
func (p *ConnPool) freeTurn() {
<-p.queue
}
func (p *ConnPool) popIdle() *Conn {
n := len(p.idleConns)
if n == 0 {
return nil
}
var cn *Conn
if p.opt.PoolFIFO {
cn = p.idleConns[0]
copy(p.idleConns, p.idleConns[1:])
p.idleConns = p.idleConns[:n-1]
} else {
idx := n - 1
cn = p.idleConns[idx]
p.idleConns = p.idleConns[:idx]
}
p.idleConnsLen--
p.checkMinIdleConns()
return cn
}
func (p *ConnPool) Put(ctx context.Context, cn *Conn) {
if cn.rd.Buffered() > 0 {
internal.Logger.Printf(ctx, "Conn has unread data")
p.Remove(ctx, cn, BadConnError{})
return
}
if !cn.pooled {
p.Remove(ctx, cn, nil)
return
}
p.connsMu.Lock()
p.idleConns = append(p.idleConns, cn)
p.idleConnsLen++
p.connsMu.Unlock()
p.freeTurn()
}
func (p *ConnPool) Remove(ctx context.Context, cn *Conn, reason error) {
p.removeConnWithLock(cn)
p.freeTurn()
_ = p.closeConn(cn)
}
func (p *ConnPool) CloseConn(cn *Conn) error {
p.removeConnWithLock(cn)
return p.closeConn(cn)
}
func (p *ConnPool) removeConnWithLock(cn *Conn) {
p.connsMu.Lock()
p.removeConn(cn)
p.connsMu.Unlock()
}
func (p *ConnPool) removeConn(cn *Conn) {
for i, c := range p.conns {
if c == cn {<|fim▁hole|> p.poolSize--
p.checkMinIdleConns()
}
return
}
}
}
func (p *ConnPool) closeConn(cn *Conn) error {
if p.opt.OnClose != nil {
_ = p.opt.OnClose(cn)
}
return cn.Close()
}
// Len returns total number of connections.
func (p *ConnPool) Len() int {
p.connsMu.Lock()
n := len(p.conns)
p.connsMu.Unlock()
return n
}
// IdleLen returns number of idle connections.
func (p *ConnPool) IdleLen() int {
p.connsMu.Lock()
n := p.idleConnsLen
p.connsMu.Unlock()
return n
}
func (p *ConnPool) Stats() *Stats {
idleLen := p.IdleLen()
return &Stats{
Hits: atomic.LoadUint32(&p.stats.Hits),
Misses: atomic.LoadUint32(&p.stats.Misses),
Timeouts: atomic.LoadUint32(&p.stats.Timeouts),
TotalConns: uint32(p.Len()),
IdleConns: uint32(idleLen),
StaleConns: atomic.LoadUint32(&p.stats.StaleConns),
}
}
func (p *ConnPool) closed() bool {
return atomic.LoadUint32(&p._closed) == 1
}
func (p *ConnPool) Filter(fn func(*Conn) bool) error {
p.connsMu.Lock()
defer p.connsMu.Unlock()
var firstErr error
for _, cn := range p.conns {
if fn(cn) {
if err := p.closeConn(cn); err != nil && firstErr == nil {
firstErr = err
}
}
}
return firstErr
}
func (p *ConnPool) Close() error {
if !atomic.CompareAndSwapUint32(&p._closed, 0, 1) {
return ErrClosed
}
close(p.closedCh)
var firstErr error
p.connsMu.Lock()
for _, cn := range p.conns {
if err := p.closeConn(cn); err != nil && firstErr == nil {
firstErr = err
}
}
p.conns = nil
p.poolSize = 0
p.idleConns = nil
p.idleConnsLen = 0
p.connsMu.Unlock()
return firstErr
}
func (p *ConnPool) reaper(frequency time.Duration) {
ticker := time.NewTicker(frequency)
defer ticker.Stop()
for {
select {
case <-ticker.C:
// It is possible that ticker and closedCh arrive together,
// and select pseudo-randomly pick ticker case, we double
// check here to prevent being executed after closed.
if p.closed() {
return
}
_, err := p.ReapStaleConns()
if err != nil {
internal.Logger.Printf(context.Background(), "ReapStaleConns failed: %s", err)
continue
}
case <-p.closedCh:
return
}
}
}
func (p *ConnPool) ReapStaleConns() (int, error) {
var n int
for {
p.getTurn()
p.connsMu.Lock()
cn := p.reapStaleConn()
p.connsMu.Unlock()
p.freeTurn()
if cn != nil {
_ = p.closeConn(cn)
n++
} else {
break
}
}
atomic.AddUint32(&p.stats.StaleConns, uint32(n))
return n, nil
}
func (p *ConnPool) reapStaleConn() *Conn {
if len(p.idleConns) == 0 {
return nil
}
cn := p.idleConns[0]
if !p.isStaleConn(cn) {
return nil
}
p.idleConns = append(p.idleConns[:0], p.idleConns[1:]...)
p.idleConnsLen--
p.removeConn(cn)
return cn
}
func (p *ConnPool) isStaleConn(cn *Conn) bool {
if p.opt.IdleTimeout == 0 && p.opt.MaxConnAge == 0 {
return false
}
now := time.Now()
if p.opt.IdleTimeout > 0 && now.Sub(cn.UsedAt()) >= p.opt.IdleTimeout {
return true
}
if p.opt.MaxConnAge > 0 && now.Sub(cn.createdAt) >= p.opt.MaxConnAge {
return true
}
return false
}<|fim▁end|> | p.conns = append(p.conns[:i], p.conns[i+1:]...)
if cn.pooled { |
<|file_name|>api.pb.cc<|end_file_name|><|fim▁begin|>// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: cockroach/proto/api.proto
#define INTERNAL_SUPPRESS_PROTOBUF_FIELD_DEPRECATION
#include "cockroach/proto/api.pb.h"
#include <algorithm>
#include <google/protobuf/stubs/common.h>
#include <google/protobuf/stubs/once.h>
#include <google/protobuf/io/coded_stream.h>
#include <google/protobuf/wire_format_lite_inl.h>
#include <google/protobuf/descriptor.h>
#include <google/protobuf/generated_message_reflection.h>
#include <google/protobuf/reflection_ops.h>
#include <google/protobuf/wire_format.h>
// @@protoc_insertion_point(includes)
namespace cockroach {
namespace proto {
namespace {
const ::google::protobuf::Descriptor* ClientCmdID_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
ClientCmdID_reflection_ = NULL;
const ::google::protobuf::Descriptor* RequestHeader_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
RequestHeader_reflection_ = NULL;
const ::google::protobuf::Descriptor* ResponseHeader_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
ResponseHeader_reflection_ = NULL;
const ::google::protobuf::Descriptor* GetRequest_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
GetRequest_reflection_ = NULL;
const ::google::protobuf::Descriptor* GetResponse_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
GetResponse_reflection_ = NULL;
const ::google::protobuf::Descriptor* PutRequest_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
PutRequest_reflection_ = NULL;
const ::google::protobuf::Descriptor* PutResponse_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
PutResponse_reflection_ = NULL;
const ::google::protobuf::Descriptor* ConditionalPutRequest_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
ConditionalPutRequest_reflection_ = NULL;
const ::google::protobuf::Descriptor* ConditionalPutResponse_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
ConditionalPutResponse_reflection_ = NULL;
const ::google::protobuf::Descriptor* IncrementRequest_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
IncrementRequest_reflection_ = NULL;
const ::google::protobuf::Descriptor* IncrementResponse_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
IncrementResponse_reflection_ = NULL;
const ::google::protobuf::Descriptor* DeleteRequest_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
DeleteRequest_reflection_ = NULL;
const ::google::protobuf::Descriptor* DeleteResponse_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
DeleteResponse_reflection_ = NULL;
const ::google::protobuf::Descriptor* DeleteRangeRequest_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
DeleteRangeRequest_reflection_ = NULL;
const ::google::protobuf::Descriptor* DeleteRangeResponse_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
DeleteRangeResponse_reflection_ = NULL;
const ::google::protobuf::Descriptor* ScanRequest_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
ScanRequest_reflection_ = NULL;
const ::google::protobuf::Descriptor* ScanResponse_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
ScanResponse_reflection_ = NULL;
const ::google::protobuf::Descriptor* EndTransactionRequest_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
EndTransactionRequest_reflection_ = NULL;
const ::google::protobuf::Descriptor* EndTransactionResponse_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
EndTransactionResponse_reflection_ = NULL;
const ::google::protobuf::Descriptor* RequestUnion_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
RequestUnion_reflection_ = NULL;
struct RequestUnionOneofInstance {
const ::cockroach::proto::GetRequest* get_;
const ::cockroach::proto::PutRequest* put_;
const ::cockroach::proto::ConditionalPutRequest* conditional_put_;
const ::cockroach::proto::IncrementRequest* increment_;
const ::cockroach::proto::DeleteRequest* delete__;
const ::cockroach::proto::DeleteRangeRequest* delete_range_;
const ::cockroach::proto::ScanRequest* scan_;
const ::cockroach::proto::EndTransactionRequest* end_transaction_;
}* RequestUnion_default_oneof_instance_ = NULL;
const ::google::protobuf::Descriptor* ResponseUnion_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
ResponseUnion_reflection_ = NULL;
struct ResponseUnionOneofInstance {
const ::cockroach::proto::GetResponse* get_;
const ::cockroach::proto::PutResponse* put_;
const ::cockroach::proto::ConditionalPutResponse* conditional_put_;
const ::cockroach::proto::IncrementResponse* increment_;
const ::cockroach::proto::DeleteResponse* delete__;
const ::cockroach::proto::DeleteRangeResponse* delete_range_;
const ::cockroach::proto::ScanResponse* scan_;
const ::cockroach::proto::EndTransactionResponse* end_transaction_;
}* ResponseUnion_default_oneof_instance_ = NULL;
const ::google::protobuf::Descriptor* BatchRequest_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
BatchRequest_reflection_ = NULL;
const ::google::protobuf::Descriptor* BatchResponse_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
BatchResponse_reflection_ = NULL;
const ::google::protobuf::Descriptor* AdminSplitRequest_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
AdminSplitRequest_reflection_ = NULL;
const ::google::protobuf::Descriptor* AdminSplitResponse_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
AdminSplitResponse_reflection_ = NULL;
const ::google::protobuf::Descriptor* AdminMergeRequest_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
AdminMergeRequest_reflection_ = NULL;
const ::google::protobuf::Descriptor* AdminMergeResponse_descriptor_ = NULL;
const ::google::protobuf::internal::GeneratedMessageReflection*
AdminMergeResponse_reflection_ = NULL;
const ::google::protobuf::EnumDescriptor* ReadConsistencyType_descriptor_ = NULL;
} // namespace
void protobuf_AssignDesc_cockroach_2fproto_2fapi_2eproto() {
protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
const ::google::protobuf::FileDescriptor* file =
::google::protobuf::DescriptorPool::generated_pool()->FindFileByName(
"cockroach/proto/api.proto");
GOOGLE_CHECK(file != NULL);
ClientCmdID_descriptor_ = file->message_type(0);
static const int ClientCmdID_offsets_[2] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ClientCmdID, wall_time_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ClientCmdID, random_),
};
ClientCmdID_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
ClientCmdID_descriptor_,
ClientCmdID::default_instance_,
ClientCmdID_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ClientCmdID, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ClientCmdID, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(ClientCmdID));
RequestHeader_descriptor_ = file->message_type(1);
static const int RequestHeader_offsets_[10] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(RequestHeader, timestamp_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(RequestHeader, cmd_id_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(RequestHeader, key_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(RequestHeader, end_key_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(RequestHeader, user_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(RequestHeader, replica_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(RequestHeader, raft_id_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(RequestHeader, user_priority_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(RequestHeader, txn_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(RequestHeader, read_consistency_),
};
RequestHeader_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
RequestHeader_descriptor_,
RequestHeader::default_instance_,
RequestHeader_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(RequestHeader, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(RequestHeader, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(RequestHeader));
ResponseHeader_descriptor_ = file->message_type(2);
static const int ResponseHeader_offsets_[3] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ResponseHeader, error_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ResponseHeader, timestamp_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ResponseHeader, txn_),
};
ResponseHeader_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
ResponseHeader_descriptor_,
ResponseHeader::default_instance_,
ResponseHeader_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ResponseHeader, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ResponseHeader, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(ResponseHeader));
GetRequest_descriptor_ = file->message_type(3);
static const int GetRequest_offsets_[1] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(GetRequest, header_),
};
GetRequest_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
GetRequest_descriptor_,
GetRequest::default_instance_,
GetRequest_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(GetRequest, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(GetRequest, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(GetRequest));
GetResponse_descriptor_ = file->message_type(4);
static const int GetResponse_offsets_[2] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(GetResponse, header_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(GetResponse, value_),
};
GetResponse_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
GetResponse_descriptor_,
GetResponse::default_instance_,
GetResponse_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(GetResponse, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(GetResponse, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(GetResponse));
PutRequest_descriptor_ = file->message_type(5);
static const int PutRequest_offsets_[2] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(PutRequest, header_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(PutRequest, value_),
};
PutRequest_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
PutRequest_descriptor_,
PutRequest::default_instance_,
PutRequest_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(PutRequest, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(PutRequest, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(PutRequest));
PutResponse_descriptor_ = file->message_type(6);
static const int PutResponse_offsets_[1] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(PutResponse, header_),
};
PutResponse_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
PutResponse_descriptor_,
PutResponse::default_instance_,
PutResponse_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(PutResponse, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(PutResponse, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(PutResponse));
ConditionalPutRequest_descriptor_ = file->message_type(7);
static const int ConditionalPutRequest_offsets_[3] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ConditionalPutRequest, header_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ConditionalPutRequest, value_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ConditionalPutRequest, exp_value_),
};
ConditionalPutRequest_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
ConditionalPutRequest_descriptor_,
ConditionalPutRequest::default_instance_,
ConditionalPutRequest_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ConditionalPutRequest, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ConditionalPutRequest, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(ConditionalPutRequest));
ConditionalPutResponse_descriptor_ = file->message_type(8);
static const int ConditionalPutResponse_offsets_[1] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ConditionalPutResponse, header_),
};
ConditionalPutResponse_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
ConditionalPutResponse_descriptor_,
ConditionalPutResponse::default_instance_,
ConditionalPutResponse_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ConditionalPutResponse, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ConditionalPutResponse, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(ConditionalPutResponse));
IncrementRequest_descriptor_ = file->message_type(9);
static const int IncrementRequest_offsets_[2] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(IncrementRequest, header_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(IncrementRequest, increment_),
};
IncrementRequest_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
IncrementRequest_descriptor_,
IncrementRequest::default_instance_,
IncrementRequest_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(IncrementRequest, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(IncrementRequest, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(IncrementRequest));
IncrementResponse_descriptor_ = file->message_type(10);
static const int IncrementResponse_offsets_[2] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(IncrementResponse, header_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(IncrementResponse, new_value_),
};
IncrementResponse_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
IncrementResponse_descriptor_,
IncrementResponse::default_instance_,
IncrementResponse_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(IncrementResponse, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(IncrementResponse, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(IncrementResponse));
DeleteRequest_descriptor_ = file->message_type(11);
static const int DeleteRequest_offsets_[1] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DeleteRequest, header_),
};
DeleteRequest_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
DeleteRequest_descriptor_,
DeleteRequest::default_instance_,
DeleteRequest_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DeleteRequest, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DeleteRequest, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(DeleteRequest));
DeleteResponse_descriptor_ = file->message_type(12);
static const int DeleteResponse_offsets_[1] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DeleteResponse, header_),
};
DeleteResponse_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
DeleteResponse_descriptor_,
DeleteResponse::default_instance_,
DeleteResponse_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DeleteResponse, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DeleteResponse, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(DeleteResponse));
DeleteRangeRequest_descriptor_ = file->message_type(13);
static const int DeleteRangeRequest_offsets_[2] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DeleteRangeRequest, header_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DeleteRangeRequest, max_entries_to_delete_),
};
DeleteRangeRequest_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
DeleteRangeRequest_descriptor_,
DeleteRangeRequest::default_instance_,
DeleteRangeRequest_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DeleteRangeRequest, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DeleteRangeRequest, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(DeleteRangeRequest));
DeleteRangeResponse_descriptor_ = file->message_type(14);
static const int DeleteRangeResponse_offsets_[2] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DeleteRangeResponse, header_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DeleteRangeResponse, num_deleted_),
};
DeleteRangeResponse_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
DeleteRangeResponse_descriptor_,
DeleteRangeResponse::default_instance_,
DeleteRangeResponse_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DeleteRangeResponse, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(DeleteRangeResponse, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(DeleteRangeResponse));
ScanRequest_descriptor_ = file->message_type(15);
static const int ScanRequest_offsets_[2] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ScanRequest, header_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ScanRequest, max_results_),
};
ScanRequest_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
ScanRequest_descriptor_,
ScanRequest::default_instance_,
ScanRequest_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ScanRequest, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ScanRequest, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(ScanRequest));
ScanResponse_descriptor_ = file->message_type(16);
static const int ScanResponse_offsets_[2] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ScanResponse, header_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ScanResponse, rows_),
};
ScanResponse_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
ScanResponse_descriptor_,
ScanResponse::default_instance_,
ScanResponse_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ScanResponse, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ScanResponse, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(ScanResponse));
EndTransactionRequest_descriptor_ = file->message_type(17);
static const int EndTransactionRequest_offsets_[3] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EndTransactionRequest, header_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EndTransactionRequest, commit_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EndTransactionRequest, internal_commit_trigger_),
};
EndTransactionRequest_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
EndTransactionRequest_descriptor_,
EndTransactionRequest::default_instance_,
EndTransactionRequest_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EndTransactionRequest, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EndTransactionRequest, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(EndTransactionRequest));
EndTransactionResponse_descriptor_ = file->message_type(18);
static const int EndTransactionResponse_offsets_[3] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EndTransactionResponse, header_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EndTransactionResponse, commit_wait_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EndTransactionResponse, resolved_),
};
EndTransactionResponse_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
EndTransactionResponse_descriptor_,
EndTransactionResponse::default_instance_,
EndTransactionResponse_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EndTransactionResponse, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(EndTransactionResponse, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(EndTransactionResponse));
RequestUnion_descriptor_ = file->message_type(19);
static const int RequestUnion_offsets_[9] = {
PROTO2_GENERATED_DEFAULT_ONEOF_FIELD_OFFSET(RequestUnion_default_oneof_instance_, get_),
PROTO2_GENERATED_DEFAULT_ONEOF_FIELD_OFFSET(RequestUnion_default_oneof_instance_, put_),
PROTO2_GENERATED_DEFAULT_ONEOF_FIELD_OFFSET(RequestUnion_default_oneof_instance_, conditional_put_),
PROTO2_GENERATED_DEFAULT_ONEOF_FIELD_OFFSET(RequestUnion_default_oneof_instance_, increment_),
PROTO2_GENERATED_DEFAULT_ONEOF_FIELD_OFFSET(RequestUnion_default_oneof_instance_, delete__),
PROTO2_GENERATED_DEFAULT_ONEOF_FIELD_OFFSET(RequestUnion_default_oneof_instance_, delete_range_),
PROTO2_GENERATED_DEFAULT_ONEOF_FIELD_OFFSET(RequestUnion_default_oneof_instance_, scan_),
PROTO2_GENERATED_DEFAULT_ONEOF_FIELD_OFFSET(RequestUnion_default_oneof_instance_, end_transaction_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(RequestUnion, value_),
};
RequestUnion_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
RequestUnion_descriptor_,
RequestUnion::default_instance_,
RequestUnion_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(RequestUnion, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(RequestUnion, _unknown_fields_),
-1,
RequestUnion_default_oneof_instance_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(RequestUnion, _oneof_case_[0]),
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(RequestUnion));
ResponseUnion_descriptor_ = file->message_type(20);
static const int ResponseUnion_offsets_[9] = {
PROTO2_GENERATED_DEFAULT_ONEOF_FIELD_OFFSET(ResponseUnion_default_oneof_instance_, get_),
PROTO2_GENERATED_DEFAULT_ONEOF_FIELD_OFFSET(ResponseUnion_default_oneof_instance_, put_),
PROTO2_GENERATED_DEFAULT_ONEOF_FIELD_OFFSET(ResponseUnion_default_oneof_instance_, conditional_put_),
PROTO2_GENERATED_DEFAULT_ONEOF_FIELD_OFFSET(ResponseUnion_default_oneof_instance_, increment_),
PROTO2_GENERATED_DEFAULT_ONEOF_FIELD_OFFSET(ResponseUnion_default_oneof_instance_, delete__),
PROTO2_GENERATED_DEFAULT_ONEOF_FIELD_OFFSET(ResponseUnion_default_oneof_instance_, delete_range_),
PROTO2_GENERATED_DEFAULT_ONEOF_FIELD_OFFSET(ResponseUnion_default_oneof_instance_, scan_),
PROTO2_GENERATED_DEFAULT_ONEOF_FIELD_OFFSET(ResponseUnion_default_oneof_instance_, end_transaction_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ResponseUnion, value_),
};
ResponseUnion_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
ResponseUnion_descriptor_,
ResponseUnion::default_instance_,
ResponseUnion_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ResponseUnion, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ResponseUnion, _unknown_fields_),
-1,
ResponseUnion_default_oneof_instance_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(ResponseUnion, _oneof_case_[0]),
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(ResponseUnion));
BatchRequest_descriptor_ = file->message_type(21);
static const int BatchRequest_offsets_[2] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(BatchRequest, header_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(BatchRequest, requests_),
};
BatchRequest_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
BatchRequest_descriptor_,
BatchRequest::default_instance_,
BatchRequest_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(BatchRequest, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(BatchRequest, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(BatchRequest));
BatchResponse_descriptor_ = file->message_type(22);
static const int BatchResponse_offsets_[2] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(BatchResponse, header_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(BatchResponse, responses_),
};
BatchResponse_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
BatchResponse_descriptor_,
BatchResponse::default_instance_,
BatchResponse_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(BatchResponse, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(BatchResponse, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(BatchResponse));
AdminSplitRequest_descriptor_ = file->message_type(23);
static const int AdminSplitRequest_offsets_[2] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(AdminSplitRequest, header_),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(AdminSplitRequest, split_key_),
};
AdminSplitRequest_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
AdminSplitRequest_descriptor_,
AdminSplitRequest::default_instance_,
AdminSplitRequest_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(AdminSplitRequest, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(AdminSplitRequest, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(AdminSplitRequest));
AdminSplitResponse_descriptor_ = file->message_type(24);
static const int AdminSplitResponse_offsets_[1] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(AdminSplitResponse, header_),
};
AdminSplitResponse_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
AdminSplitResponse_descriptor_,
AdminSplitResponse::default_instance_,
AdminSplitResponse_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(AdminSplitResponse, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(AdminSplitResponse, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(AdminSplitResponse));
AdminMergeRequest_descriptor_ = file->message_type(25);
static const int AdminMergeRequest_offsets_[1] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(AdminMergeRequest, header_),
};
AdminMergeRequest_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
AdminMergeRequest_descriptor_,
AdminMergeRequest::default_instance_,
AdminMergeRequest_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(AdminMergeRequest, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(AdminMergeRequest, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(AdminMergeRequest));
AdminMergeResponse_descriptor_ = file->message_type(26);
static const int AdminMergeResponse_offsets_[1] = {
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(AdminMergeResponse, header_),
};
AdminMergeResponse_reflection_ =
new ::google::protobuf::internal::GeneratedMessageReflection(
AdminMergeResponse_descriptor_,
AdminMergeResponse::default_instance_,
AdminMergeResponse_offsets_,
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(AdminMergeResponse, _has_bits_[0]),
GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(AdminMergeResponse, _unknown_fields_),
-1,
::google::protobuf::DescriptorPool::generated_pool(),
::google::protobuf::MessageFactory::generated_factory(),
sizeof(AdminMergeResponse));
ReadConsistencyType_descriptor_ = file->enum_type(0);
}
namespace {
GOOGLE_PROTOBUF_DECLARE_ONCE(protobuf_AssignDescriptors_once_);
inline void protobuf_AssignDescriptorsOnce() {
::google::protobuf::GoogleOnceInit(&protobuf_AssignDescriptors_once_,
&protobuf_AssignDesc_cockroach_2fproto_2fapi_2eproto);
}
void protobuf_RegisterTypes(const ::std::string&) {
protobuf_AssignDescriptorsOnce();
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
ClientCmdID_descriptor_, &ClientCmdID::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
RequestHeader_descriptor_, &RequestHeader::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
ResponseHeader_descriptor_, &ResponseHeader::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
GetRequest_descriptor_, &GetRequest::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
GetResponse_descriptor_, &GetResponse::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
PutRequest_descriptor_, &PutRequest::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
PutResponse_descriptor_, &PutResponse::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
ConditionalPutRequest_descriptor_, &ConditionalPutRequest::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
ConditionalPutResponse_descriptor_, &ConditionalPutResponse::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
IncrementRequest_descriptor_, &IncrementRequest::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
IncrementResponse_descriptor_, &IncrementResponse::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
DeleteRequest_descriptor_, &DeleteRequest::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
DeleteResponse_descriptor_, &DeleteResponse::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
DeleteRangeRequest_descriptor_, &DeleteRangeRequest::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
DeleteRangeResponse_descriptor_, &DeleteRangeResponse::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
ScanRequest_descriptor_, &ScanRequest::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
ScanResponse_descriptor_, &ScanResponse::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
EndTransactionRequest_descriptor_, &EndTransactionRequest::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
EndTransactionResponse_descriptor_, &EndTransactionResponse::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
RequestUnion_descriptor_, &RequestUnion::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
ResponseUnion_descriptor_, &ResponseUnion::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
BatchRequest_descriptor_, &BatchRequest::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
BatchResponse_descriptor_, &BatchResponse::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
AdminSplitRequest_descriptor_, &AdminSplitRequest::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
AdminSplitResponse_descriptor_, &AdminSplitResponse::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
AdminMergeRequest_descriptor_, &AdminMergeRequest::default_instance());
::google::protobuf::MessageFactory::InternalRegisterGeneratedMessage(
AdminMergeResponse_descriptor_, &AdminMergeResponse::default_instance());
}
} // namespace
void protobuf_ShutdownFile_cockroach_2fproto_2fapi_2eproto() {
delete ClientCmdID::default_instance_;
delete ClientCmdID_reflection_;
delete RequestHeader::default_instance_;
delete RequestHeader_reflection_;
delete ResponseHeader::default_instance_;
delete ResponseHeader_reflection_;
delete GetRequest::default_instance_;
delete GetRequest_reflection_;
delete GetResponse::default_instance_;
delete GetResponse_reflection_;
delete PutRequest::default_instance_;
delete PutRequest_reflection_;
delete PutResponse::default_instance_;
delete PutResponse_reflection_;
delete ConditionalPutRequest::default_instance_;
delete ConditionalPutRequest_reflection_;
delete ConditionalPutResponse::default_instance_;
delete ConditionalPutResponse_reflection_;
delete IncrementRequest::default_instance_;
delete IncrementRequest_reflection_;
delete IncrementResponse::default_instance_;
delete IncrementResponse_reflection_;
delete DeleteRequest::default_instance_;
delete DeleteRequest_reflection_;
delete DeleteResponse::default_instance_;
delete DeleteResponse_reflection_;
delete DeleteRangeRequest::default_instance_;
delete DeleteRangeRequest_reflection_;
delete DeleteRangeResponse::default_instance_;
delete DeleteRangeResponse_reflection_;
delete ScanRequest::default_instance_;
delete ScanRequest_reflection_;
delete ScanResponse::default_instance_;
delete ScanResponse_reflection_;
delete EndTransactionRequest::default_instance_;
delete EndTransactionRequest_reflection_;
delete EndTransactionResponse::default_instance_;
delete EndTransactionResponse_reflection_;
delete RequestUnion::default_instance_;
delete RequestUnion_default_oneof_instance_;
delete RequestUnion_reflection_;
delete ResponseUnion::default_instance_;
delete ResponseUnion_default_oneof_instance_;
delete ResponseUnion_reflection_;
delete BatchRequest::default_instance_;
delete BatchRequest_reflection_;
delete BatchResponse::default_instance_;
delete BatchResponse_reflection_;
delete AdminSplitRequest::default_instance_;
delete AdminSplitRequest_reflection_;
delete AdminSplitResponse::default_instance_;
delete AdminSplitResponse_reflection_;
delete AdminMergeRequest::default_instance_;
delete AdminMergeRequest_reflection_;
delete AdminMergeResponse::default_instance_;
delete AdminMergeResponse_reflection_;
}
void protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto() {
static bool already_here = false;
if (already_here) return;
already_here = true;
GOOGLE_PROTOBUF_VERIFY_VERSION;
::cockroach::proto::protobuf_AddDesc_cockroach_2fproto_2fconfig_2eproto();
::cockroach::proto::protobuf_AddDesc_cockroach_2fproto_2fdata_2eproto();
::cockroach::proto::protobuf_AddDesc_cockroach_2fproto_2ferrors_2eproto();
::gogoproto::protobuf_AddDesc_gogoproto_2fgogo_2eproto();
::google::protobuf::DescriptorPool::InternalAddGeneratedFile(
"\n\031cockroach/proto/api.proto\022\017cockroach.p"
"roto\032\034cockroach/proto/config.proto\032\032cock"
"roach/proto/data.proto\032\034cockroach/proto/"
"errors.proto\032\024gogoproto/gogo.proto\"<\n\013Cl"
"ientCmdID\022\027\n\twall_time\030\001 \001(\003B\004\310\336\037\000\022\024\n\006ra"
"ndom\030\002 \001(\003B\004\310\336\037\000\"\252\003\n\rRequestHeader\0223\n\tti"
"mestamp\030\001 \001(\0132\032.cockroach.proto.Timestam"
"pB\004\310\336\037\000\022;\n\006cmd_id\030\002 \001(\0132\034.cockroach.prot"
"o.ClientCmdIDB\r\310\336\037\000\342\336\037\005CmdID\022\030\n\003key\030\003 \001("
"\014B\013\310\336\037\000\332\336\037\003Key\022\034\n\007end_key\030\004 \001(\014B\013\310\336\037\000\332\336\037"
"\003Key\022\022\n\004user\030\005 \001(\tB\004\310\336\037\000\022/\n\007replica\030\006 \001("
"\0132\030.cockroach.proto.ReplicaB\004\310\336\037\000\022\037\n\007raf"
"t_id\030\007 \001(\003B\016\310\336\037\000\342\336\037\006RaftID\022\030\n\ruser_prior"
"ity\030\010 \001(\005:\0011\022)\n\003txn\030\t \001(\0132\034.cockroach.pr"
"oto.Transaction\022D\n\020read_consistency\030\n \001("
"\0162$.cockroach.proto.ReadConsistencyTypeB"
"\004\310\336\037\000\"\227\001\n\016ResponseHeader\022%\n\005error\030\001 \001(\0132"
"\026.cockroach.proto.Error\0223\n\ttimestamp\030\002 \001"
"(\0132\032.cockroach.proto.TimestampB\004\310\336\037\000\022)\n\003"
"txn\030\003 \001(\0132\034.cockroach.proto.Transaction\""
"F\n\nGetRequest\0228\n\006header\030\001 \001(\0132\036.cockroac"
"h.proto.RequestHeaderB\010\310\336\037\000\320\336\037\001\"o\n\013GetRe"
"sponse\0229\n\006header\030\001 \001(\0132\037.cockroach.proto"
".ResponseHeaderB\010\310\336\037\000\320\336\037\001\022%\n\005value\030\002 \001(\013"
"2\026.cockroach.proto.Value\"s\n\nPutRequest\0228"
"\n\006header\030\001 \001(\0132\036.cockroach.proto.Request"
"HeaderB\010\310\336\037\000\320\336\037\001\022+\n\005value\030\002 \001(\0132\026.cockro"
"ach.proto.ValueB\004\310\336\037\000\"H\n\013PutResponse\0229\n\006"
"header\030\001 \001(\0132\037.cockroach.proto.ResponseH"
"eaderB\010\310\336\037\000\320\336\037\001\"\251\001\n\025ConditionalPutReques"
"t\0228\n\006header\030\001 \001(\0132\036.cockroach.proto.Requ"
"estHeaderB\010\310\336\037\000\320\336\037\001\022+\n\005value\030\002 \001(\0132\026.coc"
"kroach.proto.ValueB\004\310\336\037\000\022)\n\texp_value\030\003 "
"\001(\0132\026.cockroach.proto.Value\"S\n\026Condition"
"alPutResponse\0229\n\006header\030\001 \001(\0132\037.cockroac"
"h.proto.ResponseHeaderB\010\310\336\037\000\320\336\037\001\"e\n\020Incr"
"ementRequest\0228\n\006header\030\001 \001(\0132\036.cockroach"
".proto.RequestHeaderB\010\310\336\037\000\320\336\037\001\022\027\n\tincrem"
"ent\030\002 \001(\003B\004\310\336\037\000\"g\n\021IncrementResponse\0229\n\006"
"header\030\001 \001(\0132\037.cockroach.proto.ResponseH"
"eaderB\010\310\336\037\000\320\336\037\001\022\027\n\tnew_value\030\002 \001(\003B\004\310\336\037\000"
"\"I\n\rDeleteRequest\0228\n\006header\030\001 \001(\0132\036.cock"
"roach.proto.RequestHeaderB\010\310\336\037\000\320\336\037\001\"K\n\016D"
"eleteResponse\0229\n\006header\030\001 \001(\0132\037.cockroac"
"h.proto.ResponseHeaderB\010\310\336\037\000\320\336\037\001\"s\n\022Dele"
"teRangeRequest\0228\n\006header\030\001 \001(\0132\036.cockroa"
"ch.proto.RequestHeaderB\010\310\336\037\000\320\336\037\001\022#\n\025max_"
"entries_to_delete\030\002 \001(\003B\004\310\336\037\000\"k\n\023DeleteR"
"angeResponse\0229\n\006header\030\001 \001(\0132\037.cockroach"
".proto.ResponseHeaderB\010\310\336\037\000\320\336\037\001\022\031\n\013num_d"
"eleted\030\002 \001(\003B\004\310\336\037\000\"b\n\013ScanRequest\0228\n\006hea"
"der\030\001 \001(\0132\036.cockroach.proto.RequestHeade"
"rB\010\310\336\037\000\320\336\037\001\022\031\n\013max_results\030\002 \001(\003B\004\310\336\037\000\"x"
"\n\014ScanResponse\0229\n\006header\030\001 \001(\0132\037.cockroa"
"ch.proto.ResponseHeaderB\010\310\336\037\000\320\336\037\001\022-\n\004row"
"s\030\002 \003(\0132\031.cockroach.proto.KeyValueB\004\310\336\037\000"
"\"\260\001\n\025EndTransactionRequest\0228\n\006header\030\001 \001"
"(\0132\036.cockroach.proto.RequestHeaderB\010\310\336\037\000"
"\320\336\037\001\022\024\n\006commit\030\002 \001(\010B\004\310\336\037\000\022G\n\027internal_c"
"ommit_trigger\030\003 \001(\0132&.cockroach.proto.In"
"ternalCommitTrigger\"\211\001\n\026EndTransactionRe"
"sponse\0229\n\006header\030\001 \001(\0132\037.cockroach.proto"
".ResponseHeaderB\010\310\336\037\000\320\336\037\001\022\031\n\013commit_wait"
"\030\002 \001(\003B\004\310\336\037\000\022\031\n\010resolved\030\003 \003(\014B\007\332\336\037\003Key\""
"\320\003\n\014RequestUnion\022*\n\003get\030\002 \001(\0132\033.cockroac"
"h.proto.GetRequestH\000\022*\n\003put\030\003 \001(\0132\033.cock"
"roach.proto.PutRequestH\000\022A\n\017conditional_"
"put\030\004 \001(\0132&.cockroach.proto.ConditionalP"
"utRequestH\000\0226\n\tincrement\030\005 \001(\0132!.cockroa"
"ch.proto.IncrementRequestH\000\0220\n\006delete\030\006 "
"\001(\0132\036.cockroach.proto.DeleteRequestH\000\022;\n"
"\014delete_range\030\007 \001(\0132#.cockroach.proto.De"
"leteRangeRequestH\000\022,\n\004scan\030\010 \001(\0132\034.cockr"
"oach.proto.ScanRequestH\000\022A\n\017end_transact"
"ion\030\t \001(\0132&.cockroach.proto.EndTransacti"
"onRequestH\000:\004\310\240\037\001B\007\n\005value\"\331\003\n\rResponseU"
"nion\022+\n\003get\030\002 \001(\0132\034.cockroach.proto.GetR"
"esponseH\000\022+\n\003put\030\003 \001(\0132\034.cockroach.proto"
".PutResponseH\000\022B\n\017conditional_put\030\004 \001(\0132"
"\'.cockroach.proto.ConditionalPutResponse"
"H\000\0227\n\tincrement\030\005 \001(\0132\".cockroach.proto."
"IncrementResponseH\000\0221\n\006delete\030\006 \001(\0132\037.co"
"ckroach.proto.DeleteResponseH\000\022<\n\014delete"
"_range\030\007 \001(\0132$.cockroach.proto.DeleteRan"
"geResponseH\000\022-\n\004scan\030\010 \001(\0132\035.cockroach.p"
"roto.ScanResponseH\000\022B\n\017end_transaction\030\t"
" \001(\0132\'.cockroach.proto.EndTransactionRes"
"ponseH\000:\004\310\240\037\001B\007\n\005value\"\177\n\014BatchRequest\0228"
"\n\006header\030\001 \001(\0132\036.cockroach.proto.Request"
"HeaderB\010\310\336\037\000\320\336\037\001\0225\n\010requests\030\002 \003(\0132\035.coc"
"kroach.proto.RequestUnionB\004\310\336\037\000\"\203\001\n\rBatc"
"hResponse\0229\n\006header\030\001 \001(\0132\037.cockroach.pr"
"oto.ResponseHeaderB\010\310\336\037\000\320\336\037\001\0227\n\tresponse"
"s\030\002 \003(\0132\036.cockroach.proto.ResponseUnionB"
"\004\310\336\037\000\"m\n\021AdminSplitRequest\0228\n\006header\030\001 \001"
"(\0132\036.cockroach.proto.RequestHeaderB\010\310\336\037\000"
"\320\336\037\001\022\036\n\tsplit_key\030\002 \001(\014B\013\310\336\037\000\332\336\037\003Key\"O\n\022"
"AdminSplitResponse\0229\n\006header\030\001 \001(\0132\037.coc"
"kroach.proto.ResponseHeaderB\010\310\336\037\000\320\336\037\001\"M\n"
"\021AdminMergeRequest\0228\n\006header\030\001 \001(\0132\036.coc"
"kroach.proto.RequestHeaderB\010\310\336\037\000\320\336\037\001\"O\n\022"
"AdminMergeResponse\0229\n\006header\030\001 \001(\0132\037.coc"
"kroach.proto.ResponseHeaderB\010\310\336\037\000\320\336\037\001*L\n"
"\023ReadConsistencyType\022\016\n\nCONSISTENT\020\000\022\r\n\t"
"CONSENSUS\020\001\022\020\n\014INCONSISTENT\020\002\032\004\210\243\036\000B\023Z\005p"
"roto\340\342\036\001\310\342\036\001\320\342\036\001", 4216);
::google::protobuf::MessageFactory::InternalRegisterGeneratedFile(
"cockroach/proto/api.proto", &protobuf_RegisterTypes);
ClientCmdID::default_instance_ = new ClientCmdID();
RequestHeader::default_instance_ = new RequestHeader();
ResponseHeader::default_instance_ = new ResponseHeader();
GetRequest::default_instance_ = new GetRequest();
GetResponse::default_instance_ = new GetResponse();
PutRequest::default_instance_ = new PutRequest();
PutResponse::default_instance_ = new PutResponse();
ConditionalPutRequest::default_instance_ = new ConditionalPutRequest();
ConditionalPutResponse::default_instance_ = new ConditionalPutResponse();
IncrementRequest::default_instance_ = new IncrementRequest();
IncrementResponse::default_instance_ = new IncrementResponse();
DeleteRequest::default_instance_ = new DeleteRequest();
DeleteResponse::default_instance_ = new DeleteResponse();
DeleteRangeRequest::default_instance_ = new DeleteRangeRequest();
DeleteRangeResponse::default_instance_ = new DeleteRangeResponse();
ScanRequest::default_instance_ = new ScanRequest();
ScanResponse::default_instance_ = new ScanResponse();
EndTransactionRequest::default_instance_ = new EndTransactionRequest();
EndTransactionResponse::default_instance_ = new EndTransactionResponse();
RequestUnion::default_instance_ = new RequestUnion();
RequestUnion_default_oneof_instance_ = new RequestUnionOneofInstance;
ResponseUnion::default_instance_ = new ResponseUnion();
ResponseUnion_default_oneof_instance_ = new ResponseUnionOneofInstance;
BatchRequest::default_instance_ = new BatchRequest();
BatchResponse::default_instance_ = new BatchResponse();
AdminSplitRequest::default_instance_ = new AdminSplitRequest();
AdminSplitResponse::default_instance_ = new AdminSplitResponse();
AdminMergeRequest::default_instance_ = new AdminMergeRequest();
AdminMergeResponse::default_instance_ = new AdminMergeResponse();
ClientCmdID::default_instance_->InitAsDefaultInstance();
RequestHeader::default_instance_->InitAsDefaultInstance();
ResponseHeader::default_instance_->InitAsDefaultInstance();
GetRequest::default_instance_->InitAsDefaultInstance();
GetResponse::default_instance_->InitAsDefaultInstance();
PutRequest::default_instance_->InitAsDefaultInstance();
PutResponse::default_instance_->InitAsDefaultInstance();
ConditionalPutRequest::default_instance_->InitAsDefaultInstance();
ConditionalPutResponse::default_instance_->InitAsDefaultInstance();
IncrementRequest::default_instance_->InitAsDefaultInstance();
IncrementResponse::default_instance_->InitAsDefaultInstance();
DeleteRequest::default_instance_->InitAsDefaultInstance();
DeleteResponse::default_instance_->InitAsDefaultInstance();
DeleteRangeRequest::default_instance_->InitAsDefaultInstance();
DeleteRangeResponse::default_instance_->InitAsDefaultInstance();
ScanRequest::default_instance_->InitAsDefaultInstance();
ScanResponse::default_instance_->InitAsDefaultInstance();
EndTransactionRequest::default_instance_->InitAsDefaultInstance();
EndTransactionResponse::default_instance_->InitAsDefaultInstance();
RequestUnion::default_instance_->InitAsDefaultInstance();
ResponseUnion::default_instance_->InitAsDefaultInstance();
BatchRequest::default_instance_->InitAsDefaultInstance();
BatchResponse::default_instance_->InitAsDefaultInstance();
AdminSplitRequest::default_instance_->InitAsDefaultInstance();
AdminSplitResponse::default_instance_->InitAsDefaultInstance();
AdminMergeRequest::default_instance_->InitAsDefaultInstance();
AdminMergeResponse::default_instance_->InitAsDefaultInstance();
::google::protobuf::internal::OnShutdown(&protobuf_ShutdownFile_cockroach_2fproto_2fapi_2eproto);
}
// Force AddDescriptors() to be called at static initialization time.
struct StaticDescriptorInitializer_cockroach_2fproto_2fapi_2eproto {
StaticDescriptorInitializer_cockroach_2fproto_2fapi_2eproto() {
protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
}
} static_descriptor_initializer_cockroach_2fproto_2fapi_2eproto_;
const ::google::protobuf::EnumDescriptor* ReadConsistencyType_descriptor() {
protobuf_AssignDescriptorsOnce();
return ReadConsistencyType_descriptor_;
}
bool ReadConsistencyType_IsValid(int value) {
switch(value) {
case 0:
case 1:
case 2:
return true;
default:
return false;
}
}
// ===================================================================
#ifndef _MSC_VER
const int ClientCmdID::kWallTimeFieldNumber;
const int ClientCmdID::kRandomFieldNumber;
#endif // !_MSC_VER
ClientCmdID::ClientCmdID()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.ClientCmdID)
}
void ClientCmdID::InitAsDefaultInstance() {
}
ClientCmdID::ClientCmdID(const ClientCmdID& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.ClientCmdID)
}
void ClientCmdID::SharedCtor() {
_cached_size_ = 0;
wall_time_ = GOOGLE_LONGLONG(0);
random_ = GOOGLE_LONGLONG(0);
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
ClientCmdID::~ClientCmdID() {
// @@protoc_insertion_point(destructor:cockroach.proto.ClientCmdID)
SharedDtor();
}
void ClientCmdID::SharedDtor() {
if (this != default_instance_) {
}
}
void ClientCmdID::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* ClientCmdID::descriptor() {
protobuf_AssignDescriptorsOnce();
return ClientCmdID_descriptor_;
}
const ClientCmdID& ClientCmdID::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
ClientCmdID* ClientCmdID::default_instance_ = NULL;
ClientCmdID* ClientCmdID::New() const {
return new ClientCmdID;
}
void ClientCmdID::Clear() {
#define OFFSET_OF_FIELD_(f) (reinterpret_cast<char*>( \
&reinterpret_cast<ClientCmdID*>(16)->f) - \
reinterpret_cast<char*>(16))
#define ZR_(first, last) do { \
size_t f = OFFSET_OF_FIELD_(first); \
size_t n = OFFSET_OF_FIELD_(last) - f + sizeof(last); \
::memset(&first, 0, n); \
} while (0)
ZR_(wall_time_, random_);
#undef OFFSET_OF_FIELD_
#undef ZR_
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool ClientCmdID::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.ClientCmdID)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional int64 wall_time = 1;
case 1: {
if (tag == 8) {
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::int64, ::google::protobuf::internal::WireFormatLite::TYPE_INT64>(
input, &wall_time_)));
set_has_wall_time();
} else {
goto handle_unusual;
}
if (input->ExpectTag(16)) goto parse_random;
break;
}
// optional int64 random = 2;
case 2: {
if (tag == 16) {
parse_random:
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::int64, ::google::protobuf::internal::WireFormatLite::TYPE_INT64>(
input, &random_)));
set_has_random();
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.ClientCmdID)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.ClientCmdID)
return false;
#undef DO_
}
void ClientCmdID::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.ClientCmdID)
// optional int64 wall_time = 1;
if (has_wall_time()) {
::google::protobuf::internal::WireFormatLite::WriteInt64(1, this->wall_time(), output);
}
// optional int64 random = 2;
if (has_random()) {
::google::protobuf::internal::WireFormatLite::WriteInt64(2, this->random(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.ClientCmdID)
}
::google::protobuf::uint8* ClientCmdID::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.ClientCmdID)
// optional int64 wall_time = 1;
if (has_wall_time()) {
target = ::google::protobuf::internal::WireFormatLite::WriteInt64ToArray(1, this->wall_time(), target);
}
// optional int64 random = 2;
if (has_random()) {
target = ::google::protobuf::internal::WireFormatLite::WriteInt64ToArray(2, this->random(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.ClientCmdID)
return target;
}
int ClientCmdID::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// optional int64 wall_time = 1;
if (has_wall_time()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::Int64Size(
this->wall_time());
}
// optional int64 random = 2;
if (has_random()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::Int64Size(
this->random());
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void ClientCmdID::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const ClientCmdID* source =
::google::protobuf::internal::dynamic_cast_if_available<const ClientCmdID*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void ClientCmdID::MergeFrom(const ClientCmdID& from) {
GOOGLE_CHECK_NE(&from, this);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_wall_time()) {
set_wall_time(from.wall_time());
}
if (from.has_random()) {
set_random(from.random());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void ClientCmdID::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void ClientCmdID::CopyFrom(const ClientCmdID& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool ClientCmdID::IsInitialized() const {
return true;
}
void ClientCmdID::Swap(ClientCmdID* other) {
if (other != this) {
std::swap(wall_time_, other->wall_time_);
std::swap(random_, other->random_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata ClientCmdID::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = ClientCmdID_descriptor_;
metadata.reflection = ClientCmdID_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int RequestHeader::kTimestampFieldNumber;
const int RequestHeader::kCmdIdFieldNumber;
const int RequestHeader::kKeyFieldNumber;
const int RequestHeader::kEndKeyFieldNumber;
const int RequestHeader::kUserFieldNumber;
const int RequestHeader::kReplicaFieldNumber;
const int RequestHeader::kRaftIdFieldNumber;
const int RequestHeader::kUserPriorityFieldNumber;
const int RequestHeader::kTxnFieldNumber;
const int RequestHeader::kReadConsistencyFieldNumber;
#endif // !_MSC_VER
RequestHeader::RequestHeader()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.RequestHeader)
}
void RequestHeader::InitAsDefaultInstance() {
timestamp_ = const_cast< ::cockroach::proto::Timestamp*>(&::cockroach::proto::Timestamp::default_instance());
cmd_id_ = const_cast< ::cockroach::proto::ClientCmdID*>(&::cockroach::proto::ClientCmdID::default_instance());
replica_ = const_cast< ::cockroach::proto::Replica*>(&::cockroach::proto::Replica::default_instance());
txn_ = const_cast< ::cockroach::proto::Transaction*>(&::cockroach::proto::Transaction::default_instance());
}
RequestHeader::RequestHeader(const RequestHeader& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.RequestHeader)
}
void RequestHeader::SharedCtor() {
::google::protobuf::internal::GetEmptyString();
_cached_size_ = 0;
timestamp_ = NULL;
cmd_id_ = NULL;
key_ = const_cast< ::std::string*>(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
end_key_ = const_cast< ::std::string*>(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
user_ = const_cast< ::std::string*>(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
replica_ = NULL;
raft_id_ = GOOGLE_LONGLONG(0);
user_priority_ = 1;
txn_ = NULL;
read_consistency_ = 0;
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
RequestHeader::~RequestHeader() {
// @@protoc_insertion_point(destructor:cockroach.proto.RequestHeader)
SharedDtor();
}
void RequestHeader::SharedDtor() {
if (key_ != &::google::protobuf::internal::GetEmptyStringAlreadyInited()) {
delete key_;
}
if (end_key_ != &::google::protobuf::internal::GetEmptyStringAlreadyInited()) {
delete end_key_;
}
if (user_ != &::google::protobuf::internal::GetEmptyStringAlreadyInited()) {
delete user_;
}
if (this != default_instance_) {
delete timestamp_;
delete cmd_id_;
delete replica_;
delete txn_;
}
}
void RequestHeader::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* RequestHeader::descriptor() {
protobuf_AssignDescriptorsOnce();
return RequestHeader_descriptor_;
}
const RequestHeader& RequestHeader::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
RequestHeader* RequestHeader::default_instance_ = NULL;
RequestHeader* RequestHeader::New() const {
return new RequestHeader;
}
void RequestHeader::Clear() {
if (_has_bits_[0 / 32] & 255) {
if (has_timestamp()) {
if (timestamp_ != NULL) timestamp_->::cockroach::proto::Timestamp::Clear();
}
if (has_cmd_id()) {
if (cmd_id_ != NULL) cmd_id_->::cockroach::proto::ClientCmdID::Clear();
}
if (has_key()) {
if (key_ != &::google::protobuf::internal::GetEmptyStringAlreadyInited()) {
key_->clear();
}
}
if (has_end_key()) {
if (end_key_ != &::google::protobuf::internal::GetEmptyStringAlreadyInited()) {
end_key_->clear();
}
}
if (has_user()) {
if (user_ != &::google::protobuf::internal::GetEmptyStringAlreadyInited()) {
user_->clear();
}
}
if (has_replica()) {
if (replica_ != NULL) replica_->::cockroach::proto::Replica::Clear();
}
raft_id_ = GOOGLE_LONGLONG(0);
user_priority_ = 1;
}
if (_has_bits_[8 / 32] & 768) {
if (has_txn()) {
if (txn_ != NULL) txn_->::cockroach::proto::Transaction::Clear();
}
read_consistency_ = 0;
}
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool RequestHeader::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.RequestHeader)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.Timestamp timestamp = 1;
case 1: {
if (tag == 10) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_timestamp()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(18)) goto parse_cmd_id;
break;
}
// optional .cockroach.proto.ClientCmdID cmd_id = 2;
case 2: {
if (tag == 18) {
parse_cmd_id:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_cmd_id()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(26)) goto parse_key;
break;
}
// optional bytes key = 3;
case 3: {
if (tag == 26) {
parse_key:
DO_(::google::protobuf::internal::WireFormatLite::ReadBytes(
input, this->mutable_key()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(34)) goto parse_end_key;
break;
}
// optional bytes end_key = 4;
case 4: {
if (tag == 34) {
parse_end_key:
DO_(::google::protobuf::internal::WireFormatLite::ReadBytes(
input, this->mutable_end_key()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(42)) goto parse_user;
break;
}
// optional string user = 5;
case 5: {
if (tag == 42) {
parse_user:
DO_(::google::protobuf::internal::WireFormatLite::ReadString(
input, this->mutable_user()));
::google::protobuf::internal::WireFormat::VerifyUTF8StringNamedField(
this->user().data(), this->user().length(),
::google::protobuf::internal::WireFormat::PARSE,
"user");
} else {
goto handle_unusual;
}
if (input->ExpectTag(50)) goto parse_replica;
break;
}
// optional .cockroach.proto.Replica replica = 6;
case 6: {
if (tag == 50) {
parse_replica:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_replica()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(56)) goto parse_raft_id;
break;
}
// optional int64 raft_id = 7;
case 7: {
if (tag == 56) {
parse_raft_id:
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::int64, ::google::protobuf::internal::WireFormatLite::TYPE_INT64>(
input, &raft_id_)));
set_has_raft_id();
} else {
goto handle_unusual;
}
if (input->ExpectTag(64)) goto parse_user_priority;
break;
}
// optional int32 user_priority = 8 [default = 1];
case 8: {
if (tag == 64) {
parse_user_priority:
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::int32, ::google::protobuf::internal::WireFormatLite::TYPE_INT32>(
input, &user_priority_)));
set_has_user_priority();
} else {
goto handle_unusual;
}
if (input->ExpectTag(74)) goto parse_txn;
break;
}
// optional .cockroach.proto.Transaction txn = 9;
case 9: {
if (tag == 74) {
parse_txn:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_txn()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(80)) goto parse_read_consistency;
break;
}
// optional .cockroach.proto.ReadConsistencyType read_consistency = 10;
case 10: {
if (tag == 80) {
parse_read_consistency:
int value;
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
int, ::google::protobuf::internal::WireFormatLite::TYPE_ENUM>(
input, &value)));
if (::cockroach::proto::ReadConsistencyType_IsValid(value)) {
set_read_consistency(static_cast< ::cockroach::proto::ReadConsistencyType >(value));
} else {
mutable_unknown_fields()->AddVarint(10, value);
}
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.RequestHeader)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.RequestHeader)
return false;
#undef DO_
}
void RequestHeader::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.RequestHeader)
// optional .cockroach.proto.Timestamp timestamp = 1;
if (has_timestamp()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, this->timestamp(), output);
}
// optional .cockroach.proto.ClientCmdID cmd_id = 2;
if (has_cmd_id()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
2, this->cmd_id(), output);
}
// optional bytes key = 3;
if (has_key()) {
::google::protobuf::internal::WireFormatLite::WriteBytesMaybeAliased(
3, this->key(), output);
}
// optional bytes end_key = 4;
if (has_end_key()) {
::google::protobuf::internal::WireFormatLite::WriteBytesMaybeAliased(
4, this->end_key(), output);
}
// optional string user = 5;
if (has_user()) {
::google::protobuf::internal::WireFormat::VerifyUTF8StringNamedField(
this->user().data(), this->user().length(),
::google::protobuf::internal::WireFormat::SERIALIZE,
"user");
::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased(
5, this->user(), output);
}
// optional .cockroach.proto.Replica replica = 6;
if (has_replica()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
6, this->replica(), output);
}
// optional int64 raft_id = 7;
if (has_raft_id()) {
::google::protobuf::internal::WireFormatLite::WriteInt64(7, this->raft_id(), output);
}
// optional int32 user_priority = 8 [default = 1];
if (has_user_priority()) {
::google::protobuf::internal::WireFormatLite::WriteInt32(8, this->user_priority(), output);
}
// optional .cockroach.proto.Transaction txn = 9;
if (has_txn()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
9, this->txn(), output);
}
// optional .cockroach.proto.ReadConsistencyType read_consistency = 10;
if (has_read_consistency()) {
::google::protobuf::internal::WireFormatLite::WriteEnum(
10, this->read_consistency(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.RequestHeader)
}
::google::protobuf::uint8* RequestHeader::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.RequestHeader)
// optional .cockroach.proto.Timestamp timestamp = 1;
if (has_timestamp()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
1, this->timestamp(), target);
}
// optional .cockroach.proto.ClientCmdID cmd_id = 2;
if (has_cmd_id()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
2, this->cmd_id(), target);
}
// optional bytes key = 3;
if (has_key()) {
target =
::google::protobuf::internal::WireFormatLite::WriteBytesToArray(
3, this->key(), target);
}
// optional bytes end_key = 4;
if (has_end_key()) {
target =
::google::protobuf::internal::WireFormatLite::WriteBytesToArray(
4, this->end_key(), target);
}
// optional string user = 5;
if (has_user()) {
::google::protobuf::internal::WireFormat::VerifyUTF8StringNamedField(
this->user().data(), this->user().length(),
::google::protobuf::internal::WireFormat::SERIALIZE,
"user");
target =
::google::protobuf::internal::WireFormatLite::WriteStringToArray(
5, this->user(), target);
}
// optional .cockroach.proto.Replica replica = 6;
if (has_replica()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
6, this->replica(), target);
}
// optional int64 raft_id = 7;
if (has_raft_id()) {
target = ::google::protobuf::internal::WireFormatLite::WriteInt64ToArray(7, this->raft_id(), target);
}
// optional int32 user_priority = 8 [default = 1];
if (has_user_priority()) {
target = ::google::protobuf::internal::WireFormatLite::WriteInt32ToArray(8, this->user_priority(), target);
}
// optional .cockroach.proto.Transaction txn = 9;
if (has_txn()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
9, this->txn(), target);
}
// optional .cockroach.proto.ReadConsistencyType read_consistency = 10;
if (has_read_consistency()) {
target = ::google::protobuf::internal::WireFormatLite::WriteEnumToArray(
10, this->read_consistency(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.RequestHeader)
return target;
}
int RequestHeader::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// optional .cockroach.proto.Timestamp timestamp = 1;
if (has_timestamp()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->timestamp());
}
// optional .cockroach.proto.ClientCmdID cmd_id = 2;
if (has_cmd_id()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->cmd_id());
}
// optional bytes key = 3;
if (has_key()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::BytesSize(
this->key());
}
// optional bytes end_key = 4;
if (has_end_key()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::BytesSize(
this->end_key());
}
// optional string user = 5;
if (has_user()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::StringSize(
this->user());
}
// optional .cockroach.proto.Replica replica = 6;
if (has_replica()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->replica());
}
// optional int64 raft_id = 7;
if (has_raft_id()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::Int64Size(
this->raft_id());
}
// optional int32 user_priority = 8 [default = 1];
if (has_user_priority()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::Int32Size(
this->user_priority());
}
}
if (_has_bits_[8 / 32] & (0xffu << (8 % 32))) {
// optional .cockroach.proto.Transaction txn = 9;
if (has_txn()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->txn());
}
// optional .cockroach.proto.ReadConsistencyType read_consistency = 10;
if (has_read_consistency()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::EnumSize(this->read_consistency());
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void RequestHeader::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const RequestHeader* source =
::google::protobuf::internal::dynamic_cast_if_available<const RequestHeader*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void RequestHeader::MergeFrom(const RequestHeader& from) {
GOOGLE_CHECK_NE(&from, this);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_timestamp()) {
mutable_timestamp()->::cockroach::proto::Timestamp::MergeFrom(from.timestamp());
}
if (from.has_cmd_id()) {
mutable_cmd_id()->::cockroach::proto::ClientCmdID::MergeFrom(from.cmd_id());
}
if (from.has_key()) {
set_key(from.key());
}
if (from.has_end_key()) {
set_end_key(from.end_key());
}
if (from.has_user()) {
set_user(from.user());
}
if (from.has_replica()) {
mutable_replica()->::cockroach::proto::Replica::MergeFrom(from.replica());
}
if (from.has_raft_id()) {
set_raft_id(from.raft_id());
}
if (from.has_user_priority()) {
set_user_priority(from.user_priority());
}
}
if (from._has_bits_[8 / 32] & (0xffu << (8 % 32))) {
if (from.has_txn()) {
mutable_txn()->::cockroach::proto::Transaction::MergeFrom(from.txn());
}
if (from.has_read_consistency()) {
set_read_consistency(from.read_consistency());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void RequestHeader::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void RequestHeader::CopyFrom(const RequestHeader& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool RequestHeader::IsInitialized() const {
return true;
}
void RequestHeader::Swap(RequestHeader* other) {
if (other != this) {
std::swap(timestamp_, other->timestamp_);
std::swap(cmd_id_, other->cmd_id_);
std::swap(key_, other->key_);
std::swap(end_key_, other->end_key_);
std::swap(user_, other->user_);
std::swap(replica_, other->replica_);
std::swap(raft_id_, other->raft_id_);
std::swap(user_priority_, other->user_priority_);
std::swap(txn_, other->txn_);
std::swap(read_consistency_, other->read_consistency_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata RequestHeader::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = RequestHeader_descriptor_;
metadata.reflection = RequestHeader_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int ResponseHeader::kErrorFieldNumber;
const int ResponseHeader::kTimestampFieldNumber;
const int ResponseHeader::kTxnFieldNumber;
#endif // !_MSC_VER
ResponseHeader::ResponseHeader()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.ResponseHeader)
}
void ResponseHeader::InitAsDefaultInstance() {
error_ = const_cast< ::cockroach::proto::Error*>(&::cockroach::proto::Error::default_instance());
timestamp_ = const_cast< ::cockroach::proto::Timestamp*>(&::cockroach::proto::Timestamp::default_instance());
txn_ = const_cast< ::cockroach::proto::Transaction*>(&::cockroach::proto::Transaction::default_instance());
}
ResponseHeader::ResponseHeader(const ResponseHeader& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.ResponseHeader)
}
void ResponseHeader::SharedCtor() {
_cached_size_ = 0;
error_ = NULL;
timestamp_ = NULL;
txn_ = NULL;
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
ResponseHeader::~ResponseHeader() {
// @@protoc_insertion_point(destructor:cockroach.proto.ResponseHeader)
SharedDtor();
}
void ResponseHeader::SharedDtor() {
if (this != default_instance_) {
delete error_;
delete timestamp_;
delete txn_;
}
}
void ResponseHeader::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* ResponseHeader::descriptor() {
protobuf_AssignDescriptorsOnce();
return ResponseHeader_descriptor_;
}
const ResponseHeader& ResponseHeader::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
ResponseHeader* ResponseHeader::default_instance_ = NULL;
ResponseHeader* ResponseHeader::New() const {
return new ResponseHeader;
}
void ResponseHeader::Clear() {
if (_has_bits_[0 / 32] & 7) {
if (has_error()) {
if (error_ != NULL) error_->::cockroach::proto::Error::Clear();
}
if (has_timestamp()) {
if (timestamp_ != NULL) timestamp_->::cockroach::proto::Timestamp::Clear();
}
if (has_txn()) {
if (txn_ != NULL) txn_->::cockroach::proto::Transaction::Clear();
}
}
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool ResponseHeader::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.ResponseHeader)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.Error error = 1;
case 1: {
if (tag == 10) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_error()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(18)) goto parse_timestamp;
break;
}
// optional .cockroach.proto.Timestamp timestamp = 2;
case 2: {
if (tag == 18) {
parse_timestamp:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_timestamp()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(26)) goto parse_txn;
break;
}
// optional .cockroach.proto.Transaction txn = 3;
case 3: {
if (tag == 26) {
parse_txn:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_txn()));
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.ResponseHeader)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.ResponseHeader)
return false;
#undef DO_
}
void ResponseHeader::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.ResponseHeader)
// optional .cockroach.proto.Error error = 1;
if (has_error()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, this->error(), output);
}
// optional .cockroach.proto.Timestamp timestamp = 2;
if (has_timestamp()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
2, this->timestamp(), output);
}
// optional .cockroach.proto.Transaction txn = 3;
if (has_txn()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
3, this->txn(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.ResponseHeader)
}
::google::protobuf::uint8* ResponseHeader::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.ResponseHeader)
// optional .cockroach.proto.Error error = 1;
if (has_error()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
1, this->error(), target);
}
// optional .cockroach.proto.Timestamp timestamp = 2;
if (has_timestamp()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
2, this->timestamp(), target);
}
// optional .cockroach.proto.Transaction txn = 3;
if (has_txn()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
3, this->txn(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.ResponseHeader)
return target;
}
int ResponseHeader::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// optional .cockroach.proto.Error error = 1;
if (has_error()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->error());
}
// optional .cockroach.proto.Timestamp timestamp = 2;
if (has_timestamp()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->timestamp());
}
// optional .cockroach.proto.Transaction txn = 3;
if (has_txn()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->txn());
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void ResponseHeader::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const ResponseHeader* source =
::google::protobuf::internal::dynamic_cast_if_available<const ResponseHeader*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void ResponseHeader::MergeFrom(const ResponseHeader& from) {
GOOGLE_CHECK_NE(&from, this);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_error()) {
mutable_error()->::cockroach::proto::Error::MergeFrom(from.error());
}
if (from.has_timestamp()) {
mutable_timestamp()->::cockroach::proto::Timestamp::MergeFrom(from.timestamp());
}
if (from.has_txn()) {
mutable_txn()->::cockroach::proto::Transaction::MergeFrom(from.txn());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void ResponseHeader::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void ResponseHeader::CopyFrom(const ResponseHeader& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool ResponseHeader::IsInitialized() const {
return true;
}
void ResponseHeader::Swap(ResponseHeader* other) {
if (other != this) {
std::swap(error_, other->error_);
std::swap(timestamp_, other->timestamp_);
std::swap(txn_, other->txn_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata ResponseHeader::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = ResponseHeader_descriptor_;
metadata.reflection = ResponseHeader_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int GetRequest::kHeaderFieldNumber;
#endif // !_MSC_VER
GetRequest::GetRequest()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.GetRequest)
}
void GetRequest::InitAsDefaultInstance() {
header_ = const_cast< ::cockroach::proto::RequestHeader*>(&::cockroach::proto::RequestHeader::default_instance());
}
GetRequest::GetRequest(const GetRequest& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.GetRequest)
}
void GetRequest::SharedCtor() {
_cached_size_ = 0;
header_ = NULL;
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
GetRequest::~GetRequest() {
// @@protoc_insertion_point(destructor:cockroach.proto.GetRequest)
SharedDtor();
}
void GetRequest::SharedDtor() {
if (this != default_instance_) {
delete header_;
}
}
void GetRequest::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* GetRequest::descriptor() {
protobuf_AssignDescriptorsOnce();
return GetRequest_descriptor_;
}
const GetRequest& GetRequest::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
GetRequest* GetRequest::default_instance_ = NULL;
GetRequest* GetRequest::New() const {
return new GetRequest;
}
void GetRequest::Clear() {
if (has_header()) {
if (header_ != NULL) header_->::cockroach::proto::RequestHeader::Clear();
}
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool GetRequest::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.GetRequest)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.RequestHeader header = 1;
case 1: {
if (tag == 10) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_header()));
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.GetRequest)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.GetRequest)
return false;
#undef DO_
}
void GetRequest::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.GetRequest)
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, this->header(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.GetRequest)
}
::google::protobuf::uint8* GetRequest::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.GetRequest)
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
1, this->header(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.GetRequest)
return target;
}
int GetRequest::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->header());
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void GetRequest::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const GetRequest* source =
::google::protobuf::internal::dynamic_cast_if_available<const GetRequest*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void GetRequest::MergeFrom(const GetRequest& from) {
GOOGLE_CHECK_NE(&from, this);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_header()) {
mutable_header()->::cockroach::proto::RequestHeader::MergeFrom(from.header());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void GetRequest::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void GetRequest::CopyFrom(const GetRequest& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool GetRequest::IsInitialized() const {
return true;
}
void GetRequest::Swap(GetRequest* other) {
if (other != this) {
std::swap(header_, other->header_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata GetRequest::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = GetRequest_descriptor_;
metadata.reflection = GetRequest_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int GetResponse::kHeaderFieldNumber;
const int GetResponse::kValueFieldNumber;
#endif // !_MSC_VER
GetResponse::GetResponse()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.GetResponse)
}
void GetResponse::InitAsDefaultInstance() {
header_ = const_cast< ::cockroach::proto::ResponseHeader*>(&::cockroach::proto::ResponseHeader::default_instance());
value_ = const_cast< ::cockroach::proto::Value*>(&::cockroach::proto::Value::default_instance());
}
GetResponse::GetResponse(const GetResponse& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.GetResponse)
}
void GetResponse::SharedCtor() {
_cached_size_ = 0;
header_ = NULL;
value_ = NULL;
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
GetResponse::~GetResponse() {
// @@protoc_insertion_point(destructor:cockroach.proto.GetResponse)
SharedDtor();
}
void GetResponse::SharedDtor() {
if (this != default_instance_) {
delete header_;
delete value_;
}
}
void GetResponse::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* GetResponse::descriptor() {
protobuf_AssignDescriptorsOnce();
return GetResponse_descriptor_;
}
const GetResponse& GetResponse::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
GetResponse* GetResponse::default_instance_ = NULL;
GetResponse* GetResponse::New() const {
return new GetResponse;
}
void GetResponse::Clear() {
if (_has_bits_[0 / 32] & 3) {
if (has_header()) {
if (header_ != NULL) header_->::cockroach::proto::ResponseHeader::Clear();
}
if (has_value()) {
if (value_ != NULL) value_->::cockroach::proto::Value::Clear();
}
}
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool GetResponse::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.GetResponse)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.ResponseHeader header = 1;
case 1: {
if (tag == 10) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_header()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(18)) goto parse_value;
break;
}
// optional .cockroach.proto.Value value = 2;
case 2: {
if (tag == 18) {
parse_value:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_value()));
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.GetResponse)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.GetResponse)
return false;
#undef DO_
}
void GetResponse::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.GetResponse)
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, this->header(), output);
}
// optional .cockroach.proto.Value value = 2;
if (has_value()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
2, this->value(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.GetResponse)
}
::google::protobuf::uint8* GetResponse::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.GetResponse)
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
1, this->header(), target);
}
// optional .cockroach.proto.Value value = 2;
if (has_value()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
2, this->value(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.GetResponse)
return target;
}
int GetResponse::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->header());
}
// optional .cockroach.proto.Value value = 2;
if (has_value()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->value());
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void GetResponse::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const GetResponse* source =
::google::protobuf::internal::dynamic_cast_if_available<const GetResponse*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void GetResponse::MergeFrom(const GetResponse& from) {
GOOGLE_CHECK_NE(&from, this);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_header()) {
mutable_header()->::cockroach::proto::ResponseHeader::MergeFrom(from.header());
}
if (from.has_value()) {
mutable_value()->::cockroach::proto::Value::MergeFrom(from.value());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void GetResponse::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void GetResponse::CopyFrom(const GetResponse& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool GetResponse::IsInitialized() const {
return true;
}
void GetResponse::Swap(GetResponse* other) {
if (other != this) {
std::swap(header_, other->header_);
std::swap(value_, other->value_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata GetResponse::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = GetResponse_descriptor_;
metadata.reflection = GetResponse_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int PutRequest::kHeaderFieldNumber;
const int PutRequest::kValueFieldNumber;
#endif // !_MSC_VER
PutRequest::PutRequest()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.PutRequest)
}
void PutRequest::InitAsDefaultInstance() {
header_ = const_cast< ::cockroach::proto::RequestHeader*>(&::cockroach::proto::RequestHeader::default_instance());
value_ = const_cast< ::cockroach::proto::Value*>(&::cockroach::proto::Value::default_instance());
}
PutRequest::PutRequest(const PutRequest& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.PutRequest)
}
void PutRequest::SharedCtor() {
_cached_size_ = 0;
header_ = NULL;
value_ = NULL;
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
PutRequest::~PutRequest() {
// @@protoc_insertion_point(destructor:cockroach.proto.PutRequest)
SharedDtor();
}
void PutRequest::SharedDtor() {
if (this != default_instance_) {
delete header_;
delete value_;
}
}
void PutRequest::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;<|fim▁hole|>}
const ::google::protobuf::Descriptor* PutRequest::descriptor() {
protobuf_AssignDescriptorsOnce();
return PutRequest_descriptor_;
}
const PutRequest& PutRequest::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
PutRequest* PutRequest::default_instance_ = NULL;
PutRequest* PutRequest::New() const {
return new PutRequest;
}
void PutRequest::Clear() {
if (_has_bits_[0 / 32] & 3) {
if (has_header()) {
if (header_ != NULL) header_->::cockroach::proto::RequestHeader::Clear();
}
if (has_value()) {
if (value_ != NULL) value_->::cockroach::proto::Value::Clear();
}
}
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool PutRequest::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.PutRequest)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.RequestHeader header = 1;
case 1: {
if (tag == 10) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_header()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(18)) goto parse_value;
break;
}
// optional .cockroach.proto.Value value = 2;
case 2: {
if (tag == 18) {
parse_value:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_value()));
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.PutRequest)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.PutRequest)
return false;
#undef DO_
}
void PutRequest::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.PutRequest)
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, this->header(), output);
}
// optional .cockroach.proto.Value value = 2;
if (has_value()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
2, this->value(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.PutRequest)
}
::google::protobuf::uint8* PutRequest::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.PutRequest)
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
1, this->header(), target);
}
// optional .cockroach.proto.Value value = 2;
if (has_value()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
2, this->value(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.PutRequest)
return target;
}
int PutRequest::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->header());
}
// optional .cockroach.proto.Value value = 2;
if (has_value()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->value());
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void PutRequest::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const PutRequest* source =
::google::protobuf::internal::dynamic_cast_if_available<const PutRequest*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void PutRequest::MergeFrom(const PutRequest& from) {
GOOGLE_CHECK_NE(&from, this);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_header()) {
mutable_header()->::cockroach::proto::RequestHeader::MergeFrom(from.header());
}
if (from.has_value()) {
mutable_value()->::cockroach::proto::Value::MergeFrom(from.value());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void PutRequest::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void PutRequest::CopyFrom(const PutRequest& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool PutRequest::IsInitialized() const {
return true;
}
void PutRequest::Swap(PutRequest* other) {
if (other != this) {
std::swap(header_, other->header_);
std::swap(value_, other->value_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata PutRequest::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = PutRequest_descriptor_;
metadata.reflection = PutRequest_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int PutResponse::kHeaderFieldNumber;
#endif // !_MSC_VER
PutResponse::PutResponse()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.PutResponse)
}
void PutResponse::InitAsDefaultInstance() {
header_ = const_cast< ::cockroach::proto::ResponseHeader*>(&::cockroach::proto::ResponseHeader::default_instance());
}
PutResponse::PutResponse(const PutResponse& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.PutResponse)
}
void PutResponse::SharedCtor() {
_cached_size_ = 0;
header_ = NULL;
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
PutResponse::~PutResponse() {
// @@protoc_insertion_point(destructor:cockroach.proto.PutResponse)
SharedDtor();
}
void PutResponse::SharedDtor() {
if (this != default_instance_) {
delete header_;
}
}
void PutResponse::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* PutResponse::descriptor() {
protobuf_AssignDescriptorsOnce();
return PutResponse_descriptor_;
}
const PutResponse& PutResponse::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
PutResponse* PutResponse::default_instance_ = NULL;
PutResponse* PutResponse::New() const {
return new PutResponse;
}
void PutResponse::Clear() {
if (has_header()) {
if (header_ != NULL) header_->::cockroach::proto::ResponseHeader::Clear();
}
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool PutResponse::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.PutResponse)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.ResponseHeader header = 1;
case 1: {
if (tag == 10) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_header()));
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.PutResponse)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.PutResponse)
return false;
#undef DO_
}
void PutResponse::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.PutResponse)
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, this->header(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.PutResponse)
}
::google::protobuf::uint8* PutResponse::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.PutResponse)
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
1, this->header(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.PutResponse)
return target;
}
int PutResponse::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->header());
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void PutResponse::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const PutResponse* source =
::google::protobuf::internal::dynamic_cast_if_available<const PutResponse*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void PutResponse::MergeFrom(const PutResponse& from) {
GOOGLE_CHECK_NE(&from, this);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_header()) {
mutable_header()->::cockroach::proto::ResponseHeader::MergeFrom(from.header());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void PutResponse::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void PutResponse::CopyFrom(const PutResponse& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool PutResponse::IsInitialized() const {
return true;
}
void PutResponse::Swap(PutResponse* other) {
if (other != this) {
std::swap(header_, other->header_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata PutResponse::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = PutResponse_descriptor_;
metadata.reflection = PutResponse_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int ConditionalPutRequest::kHeaderFieldNumber;
const int ConditionalPutRequest::kValueFieldNumber;
const int ConditionalPutRequest::kExpValueFieldNumber;
#endif // !_MSC_VER
ConditionalPutRequest::ConditionalPutRequest()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.ConditionalPutRequest)
}
void ConditionalPutRequest::InitAsDefaultInstance() {
header_ = const_cast< ::cockroach::proto::RequestHeader*>(&::cockroach::proto::RequestHeader::default_instance());
value_ = const_cast< ::cockroach::proto::Value*>(&::cockroach::proto::Value::default_instance());
exp_value_ = const_cast< ::cockroach::proto::Value*>(&::cockroach::proto::Value::default_instance());
}
ConditionalPutRequest::ConditionalPutRequest(const ConditionalPutRequest& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.ConditionalPutRequest)
}
void ConditionalPutRequest::SharedCtor() {
_cached_size_ = 0;
header_ = NULL;
value_ = NULL;
exp_value_ = NULL;
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
ConditionalPutRequest::~ConditionalPutRequest() {
// @@protoc_insertion_point(destructor:cockroach.proto.ConditionalPutRequest)
SharedDtor();
}
void ConditionalPutRequest::SharedDtor() {
if (this != default_instance_) {
delete header_;
delete value_;
delete exp_value_;
}
}
void ConditionalPutRequest::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* ConditionalPutRequest::descriptor() {
protobuf_AssignDescriptorsOnce();
return ConditionalPutRequest_descriptor_;
}
const ConditionalPutRequest& ConditionalPutRequest::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
ConditionalPutRequest* ConditionalPutRequest::default_instance_ = NULL;
ConditionalPutRequest* ConditionalPutRequest::New() const {
return new ConditionalPutRequest;
}
void ConditionalPutRequest::Clear() {
if (_has_bits_[0 / 32] & 7) {
if (has_header()) {
if (header_ != NULL) header_->::cockroach::proto::RequestHeader::Clear();
}
if (has_value()) {
if (value_ != NULL) value_->::cockroach::proto::Value::Clear();
}
if (has_exp_value()) {
if (exp_value_ != NULL) exp_value_->::cockroach::proto::Value::Clear();
}
}
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool ConditionalPutRequest::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.ConditionalPutRequest)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.RequestHeader header = 1;
case 1: {
if (tag == 10) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_header()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(18)) goto parse_value;
break;
}
// optional .cockroach.proto.Value value = 2;
case 2: {
if (tag == 18) {
parse_value:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_value()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(26)) goto parse_exp_value;
break;
}
// optional .cockroach.proto.Value exp_value = 3;
case 3: {
if (tag == 26) {
parse_exp_value:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_exp_value()));
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.ConditionalPutRequest)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.ConditionalPutRequest)
return false;
#undef DO_
}
void ConditionalPutRequest::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.ConditionalPutRequest)
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, this->header(), output);
}
// optional .cockroach.proto.Value value = 2;
if (has_value()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
2, this->value(), output);
}
// optional .cockroach.proto.Value exp_value = 3;
if (has_exp_value()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
3, this->exp_value(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.ConditionalPutRequest)
}
::google::protobuf::uint8* ConditionalPutRequest::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.ConditionalPutRequest)
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
1, this->header(), target);
}
// optional .cockroach.proto.Value value = 2;
if (has_value()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
2, this->value(), target);
}
// optional .cockroach.proto.Value exp_value = 3;
if (has_exp_value()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
3, this->exp_value(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.ConditionalPutRequest)
return target;
}
int ConditionalPutRequest::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->header());
}
// optional .cockroach.proto.Value value = 2;
if (has_value()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->value());
}
// optional .cockroach.proto.Value exp_value = 3;
if (has_exp_value()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->exp_value());
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void ConditionalPutRequest::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const ConditionalPutRequest* source =
::google::protobuf::internal::dynamic_cast_if_available<const ConditionalPutRequest*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void ConditionalPutRequest::MergeFrom(const ConditionalPutRequest& from) {
GOOGLE_CHECK_NE(&from, this);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_header()) {
mutable_header()->::cockroach::proto::RequestHeader::MergeFrom(from.header());
}
if (from.has_value()) {
mutable_value()->::cockroach::proto::Value::MergeFrom(from.value());
}
if (from.has_exp_value()) {
mutable_exp_value()->::cockroach::proto::Value::MergeFrom(from.exp_value());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void ConditionalPutRequest::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void ConditionalPutRequest::CopyFrom(const ConditionalPutRequest& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool ConditionalPutRequest::IsInitialized() const {
return true;
}
void ConditionalPutRequest::Swap(ConditionalPutRequest* other) {
if (other != this) {
std::swap(header_, other->header_);
std::swap(value_, other->value_);
std::swap(exp_value_, other->exp_value_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata ConditionalPutRequest::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = ConditionalPutRequest_descriptor_;
metadata.reflection = ConditionalPutRequest_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int ConditionalPutResponse::kHeaderFieldNumber;
#endif // !_MSC_VER
ConditionalPutResponse::ConditionalPutResponse()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.ConditionalPutResponse)
}
void ConditionalPutResponse::InitAsDefaultInstance() {
header_ = const_cast< ::cockroach::proto::ResponseHeader*>(&::cockroach::proto::ResponseHeader::default_instance());
}
ConditionalPutResponse::ConditionalPutResponse(const ConditionalPutResponse& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.ConditionalPutResponse)
}
void ConditionalPutResponse::SharedCtor() {
_cached_size_ = 0;
header_ = NULL;
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
ConditionalPutResponse::~ConditionalPutResponse() {
// @@protoc_insertion_point(destructor:cockroach.proto.ConditionalPutResponse)
SharedDtor();
}
void ConditionalPutResponse::SharedDtor() {
if (this != default_instance_) {
delete header_;
}
}
void ConditionalPutResponse::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* ConditionalPutResponse::descriptor() {
protobuf_AssignDescriptorsOnce();
return ConditionalPutResponse_descriptor_;
}
const ConditionalPutResponse& ConditionalPutResponse::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
ConditionalPutResponse* ConditionalPutResponse::default_instance_ = NULL;
ConditionalPutResponse* ConditionalPutResponse::New() const {
return new ConditionalPutResponse;
}
void ConditionalPutResponse::Clear() {
if (has_header()) {
if (header_ != NULL) header_->::cockroach::proto::ResponseHeader::Clear();
}
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool ConditionalPutResponse::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.ConditionalPutResponse)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.ResponseHeader header = 1;
case 1: {
if (tag == 10) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_header()));
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.ConditionalPutResponse)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.ConditionalPutResponse)
return false;
#undef DO_
}
void ConditionalPutResponse::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.ConditionalPutResponse)
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, this->header(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.ConditionalPutResponse)
}
::google::protobuf::uint8* ConditionalPutResponse::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.ConditionalPutResponse)
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
1, this->header(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.ConditionalPutResponse)
return target;
}
int ConditionalPutResponse::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->header());
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void ConditionalPutResponse::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const ConditionalPutResponse* source =
::google::protobuf::internal::dynamic_cast_if_available<const ConditionalPutResponse*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void ConditionalPutResponse::MergeFrom(const ConditionalPutResponse& from) {
GOOGLE_CHECK_NE(&from, this);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_header()) {
mutable_header()->::cockroach::proto::ResponseHeader::MergeFrom(from.header());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void ConditionalPutResponse::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void ConditionalPutResponse::CopyFrom(const ConditionalPutResponse& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool ConditionalPutResponse::IsInitialized() const {
return true;
}
void ConditionalPutResponse::Swap(ConditionalPutResponse* other) {
if (other != this) {
std::swap(header_, other->header_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata ConditionalPutResponse::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = ConditionalPutResponse_descriptor_;
metadata.reflection = ConditionalPutResponse_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int IncrementRequest::kHeaderFieldNumber;
const int IncrementRequest::kIncrementFieldNumber;
#endif // !_MSC_VER
IncrementRequest::IncrementRequest()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.IncrementRequest)
}
void IncrementRequest::InitAsDefaultInstance() {
header_ = const_cast< ::cockroach::proto::RequestHeader*>(&::cockroach::proto::RequestHeader::default_instance());
}
IncrementRequest::IncrementRequest(const IncrementRequest& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.IncrementRequest)
}
void IncrementRequest::SharedCtor() {
_cached_size_ = 0;
header_ = NULL;
increment_ = GOOGLE_LONGLONG(0);
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
IncrementRequest::~IncrementRequest() {
// @@protoc_insertion_point(destructor:cockroach.proto.IncrementRequest)
SharedDtor();
}
void IncrementRequest::SharedDtor() {
if (this != default_instance_) {
delete header_;
}
}
void IncrementRequest::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* IncrementRequest::descriptor() {
protobuf_AssignDescriptorsOnce();
return IncrementRequest_descriptor_;
}
const IncrementRequest& IncrementRequest::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
IncrementRequest* IncrementRequest::default_instance_ = NULL;
IncrementRequest* IncrementRequest::New() const {
return new IncrementRequest;
}
void IncrementRequest::Clear() {
if (_has_bits_[0 / 32] & 3) {
if (has_header()) {
if (header_ != NULL) header_->::cockroach::proto::RequestHeader::Clear();
}
increment_ = GOOGLE_LONGLONG(0);
}
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool IncrementRequest::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.IncrementRequest)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.RequestHeader header = 1;
case 1: {
if (tag == 10) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_header()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(16)) goto parse_increment;
break;
}
// optional int64 increment = 2;
case 2: {
if (tag == 16) {
parse_increment:
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::int64, ::google::protobuf::internal::WireFormatLite::TYPE_INT64>(
input, &increment_)));
set_has_increment();
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.IncrementRequest)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.IncrementRequest)
return false;
#undef DO_
}
void IncrementRequest::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.IncrementRequest)
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, this->header(), output);
}
// optional int64 increment = 2;
if (has_increment()) {
::google::protobuf::internal::WireFormatLite::WriteInt64(2, this->increment(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.IncrementRequest)
}
::google::protobuf::uint8* IncrementRequest::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.IncrementRequest)
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
1, this->header(), target);
}
// optional int64 increment = 2;
if (has_increment()) {
target = ::google::protobuf::internal::WireFormatLite::WriteInt64ToArray(2, this->increment(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.IncrementRequest)
return target;
}
int IncrementRequest::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->header());
}
// optional int64 increment = 2;
if (has_increment()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::Int64Size(
this->increment());
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void IncrementRequest::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const IncrementRequest* source =
::google::protobuf::internal::dynamic_cast_if_available<const IncrementRequest*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void IncrementRequest::MergeFrom(const IncrementRequest& from) {
GOOGLE_CHECK_NE(&from, this);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_header()) {
mutable_header()->::cockroach::proto::RequestHeader::MergeFrom(from.header());
}
if (from.has_increment()) {
set_increment(from.increment());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void IncrementRequest::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void IncrementRequest::CopyFrom(const IncrementRequest& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool IncrementRequest::IsInitialized() const {
return true;
}
void IncrementRequest::Swap(IncrementRequest* other) {
if (other != this) {
std::swap(header_, other->header_);
std::swap(increment_, other->increment_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata IncrementRequest::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = IncrementRequest_descriptor_;
metadata.reflection = IncrementRequest_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int IncrementResponse::kHeaderFieldNumber;
const int IncrementResponse::kNewValueFieldNumber;
#endif // !_MSC_VER
IncrementResponse::IncrementResponse()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.IncrementResponse)
}
void IncrementResponse::InitAsDefaultInstance() {
header_ = const_cast< ::cockroach::proto::ResponseHeader*>(&::cockroach::proto::ResponseHeader::default_instance());
}
IncrementResponse::IncrementResponse(const IncrementResponse& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.IncrementResponse)
}
void IncrementResponse::SharedCtor() {
_cached_size_ = 0;
header_ = NULL;
new_value_ = GOOGLE_LONGLONG(0);
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
IncrementResponse::~IncrementResponse() {
// @@protoc_insertion_point(destructor:cockroach.proto.IncrementResponse)
SharedDtor();
}
void IncrementResponse::SharedDtor() {
if (this != default_instance_) {
delete header_;
}
}
void IncrementResponse::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* IncrementResponse::descriptor() {
protobuf_AssignDescriptorsOnce();
return IncrementResponse_descriptor_;
}
const IncrementResponse& IncrementResponse::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
IncrementResponse* IncrementResponse::default_instance_ = NULL;
IncrementResponse* IncrementResponse::New() const {
return new IncrementResponse;
}
void IncrementResponse::Clear() {
if (_has_bits_[0 / 32] & 3) {
if (has_header()) {
if (header_ != NULL) header_->::cockroach::proto::ResponseHeader::Clear();
}
new_value_ = GOOGLE_LONGLONG(0);
}
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool IncrementResponse::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.IncrementResponse)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.ResponseHeader header = 1;
case 1: {
if (tag == 10) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_header()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(16)) goto parse_new_value;
break;
}
// optional int64 new_value = 2;
case 2: {
if (tag == 16) {
parse_new_value:
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::int64, ::google::protobuf::internal::WireFormatLite::TYPE_INT64>(
input, &new_value_)));
set_has_new_value();
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.IncrementResponse)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.IncrementResponse)
return false;
#undef DO_
}
void IncrementResponse::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.IncrementResponse)
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, this->header(), output);
}
// optional int64 new_value = 2;
if (has_new_value()) {
::google::protobuf::internal::WireFormatLite::WriteInt64(2, this->new_value(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.IncrementResponse)
}
::google::protobuf::uint8* IncrementResponse::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.IncrementResponse)
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
1, this->header(), target);
}
// optional int64 new_value = 2;
if (has_new_value()) {
target = ::google::protobuf::internal::WireFormatLite::WriteInt64ToArray(2, this->new_value(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.IncrementResponse)
return target;
}
int IncrementResponse::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->header());
}
// optional int64 new_value = 2;
if (has_new_value()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::Int64Size(
this->new_value());
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void IncrementResponse::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const IncrementResponse* source =
::google::protobuf::internal::dynamic_cast_if_available<const IncrementResponse*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void IncrementResponse::MergeFrom(const IncrementResponse& from) {
GOOGLE_CHECK_NE(&from, this);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_header()) {
mutable_header()->::cockroach::proto::ResponseHeader::MergeFrom(from.header());
}
if (from.has_new_value()) {
set_new_value(from.new_value());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void IncrementResponse::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void IncrementResponse::CopyFrom(const IncrementResponse& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool IncrementResponse::IsInitialized() const {
return true;
}
void IncrementResponse::Swap(IncrementResponse* other) {
if (other != this) {
std::swap(header_, other->header_);
std::swap(new_value_, other->new_value_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata IncrementResponse::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = IncrementResponse_descriptor_;
metadata.reflection = IncrementResponse_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int DeleteRequest::kHeaderFieldNumber;
#endif // !_MSC_VER
DeleteRequest::DeleteRequest()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.DeleteRequest)
}
void DeleteRequest::InitAsDefaultInstance() {
header_ = const_cast< ::cockroach::proto::RequestHeader*>(&::cockroach::proto::RequestHeader::default_instance());
}
DeleteRequest::DeleteRequest(const DeleteRequest& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.DeleteRequest)
}
void DeleteRequest::SharedCtor() {
_cached_size_ = 0;
header_ = NULL;
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
DeleteRequest::~DeleteRequest() {
// @@protoc_insertion_point(destructor:cockroach.proto.DeleteRequest)
SharedDtor();
}
void DeleteRequest::SharedDtor() {
if (this != default_instance_) {
delete header_;
}
}
void DeleteRequest::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* DeleteRequest::descriptor() {
protobuf_AssignDescriptorsOnce();
return DeleteRequest_descriptor_;
}
const DeleteRequest& DeleteRequest::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
DeleteRequest* DeleteRequest::default_instance_ = NULL;
DeleteRequest* DeleteRequest::New() const {
return new DeleteRequest;
}
void DeleteRequest::Clear() {
if (has_header()) {
if (header_ != NULL) header_->::cockroach::proto::RequestHeader::Clear();
}
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool DeleteRequest::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.DeleteRequest)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.RequestHeader header = 1;
case 1: {
if (tag == 10) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_header()));
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.DeleteRequest)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.DeleteRequest)
return false;
#undef DO_
}
void DeleteRequest::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.DeleteRequest)
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, this->header(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.DeleteRequest)
}
::google::protobuf::uint8* DeleteRequest::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.DeleteRequest)
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
1, this->header(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.DeleteRequest)
return target;
}
int DeleteRequest::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->header());
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void DeleteRequest::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const DeleteRequest* source =
::google::protobuf::internal::dynamic_cast_if_available<const DeleteRequest*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void DeleteRequest::MergeFrom(const DeleteRequest& from) {
GOOGLE_CHECK_NE(&from, this);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_header()) {
mutable_header()->::cockroach::proto::RequestHeader::MergeFrom(from.header());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void DeleteRequest::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void DeleteRequest::CopyFrom(const DeleteRequest& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool DeleteRequest::IsInitialized() const {
return true;
}
void DeleteRequest::Swap(DeleteRequest* other) {
if (other != this) {
std::swap(header_, other->header_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata DeleteRequest::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = DeleteRequest_descriptor_;
metadata.reflection = DeleteRequest_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int DeleteResponse::kHeaderFieldNumber;
#endif // !_MSC_VER
DeleteResponse::DeleteResponse()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.DeleteResponse)
}
void DeleteResponse::InitAsDefaultInstance() {
header_ = const_cast< ::cockroach::proto::ResponseHeader*>(&::cockroach::proto::ResponseHeader::default_instance());
}
DeleteResponse::DeleteResponse(const DeleteResponse& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.DeleteResponse)
}
void DeleteResponse::SharedCtor() {
_cached_size_ = 0;
header_ = NULL;
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
DeleteResponse::~DeleteResponse() {
// @@protoc_insertion_point(destructor:cockroach.proto.DeleteResponse)
SharedDtor();
}
void DeleteResponse::SharedDtor() {
if (this != default_instance_) {
delete header_;
}
}
void DeleteResponse::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* DeleteResponse::descriptor() {
protobuf_AssignDescriptorsOnce();
return DeleteResponse_descriptor_;
}
const DeleteResponse& DeleteResponse::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
DeleteResponse* DeleteResponse::default_instance_ = NULL;
DeleteResponse* DeleteResponse::New() const {
return new DeleteResponse;
}
void DeleteResponse::Clear() {
if (has_header()) {
if (header_ != NULL) header_->::cockroach::proto::ResponseHeader::Clear();
}
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool DeleteResponse::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.DeleteResponse)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.ResponseHeader header = 1;
case 1: {
if (tag == 10) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_header()));
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.DeleteResponse)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.DeleteResponse)
return false;
#undef DO_
}
void DeleteResponse::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.DeleteResponse)
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, this->header(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.DeleteResponse)
}
::google::protobuf::uint8* DeleteResponse::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.DeleteResponse)
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
1, this->header(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.DeleteResponse)
return target;
}
int DeleteResponse::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->header());
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void DeleteResponse::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const DeleteResponse* source =
::google::protobuf::internal::dynamic_cast_if_available<const DeleteResponse*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void DeleteResponse::MergeFrom(const DeleteResponse& from) {
GOOGLE_CHECK_NE(&from, this);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_header()) {
mutable_header()->::cockroach::proto::ResponseHeader::MergeFrom(from.header());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void DeleteResponse::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void DeleteResponse::CopyFrom(const DeleteResponse& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool DeleteResponse::IsInitialized() const {
return true;
}
void DeleteResponse::Swap(DeleteResponse* other) {
if (other != this) {
std::swap(header_, other->header_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata DeleteResponse::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = DeleteResponse_descriptor_;
metadata.reflection = DeleteResponse_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int DeleteRangeRequest::kHeaderFieldNumber;
const int DeleteRangeRequest::kMaxEntriesToDeleteFieldNumber;
#endif // !_MSC_VER
DeleteRangeRequest::DeleteRangeRequest()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.DeleteRangeRequest)
}
void DeleteRangeRequest::InitAsDefaultInstance() {
header_ = const_cast< ::cockroach::proto::RequestHeader*>(&::cockroach::proto::RequestHeader::default_instance());
}
DeleteRangeRequest::DeleteRangeRequest(const DeleteRangeRequest& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.DeleteRangeRequest)
}
void DeleteRangeRequest::SharedCtor() {
_cached_size_ = 0;
header_ = NULL;
max_entries_to_delete_ = GOOGLE_LONGLONG(0);
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
DeleteRangeRequest::~DeleteRangeRequest() {
// @@protoc_insertion_point(destructor:cockroach.proto.DeleteRangeRequest)
SharedDtor();
}
void DeleteRangeRequest::SharedDtor() {
if (this != default_instance_) {
delete header_;
}
}
void DeleteRangeRequest::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* DeleteRangeRequest::descriptor() {
protobuf_AssignDescriptorsOnce();
return DeleteRangeRequest_descriptor_;
}
const DeleteRangeRequest& DeleteRangeRequest::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
DeleteRangeRequest* DeleteRangeRequest::default_instance_ = NULL;
DeleteRangeRequest* DeleteRangeRequest::New() const {
return new DeleteRangeRequest;
}
void DeleteRangeRequest::Clear() {
if (_has_bits_[0 / 32] & 3) {
if (has_header()) {
if (header_ != NULL) header_->::cockroach::proto::RequestHeader::Clear();
}
max_entries_to_delete_ = GOOGLE_LONGLONG(0);
}
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool DeleteRangeRequest::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.DeleteRangeRequest)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.RequestHeader header = 1;
case 1: {
if (tag == 10) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_header()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(16)) goto parse_max_entries_to_delete;
break;
}
// optional int64 max_entries_to_delete = 2;
case 2: {
if (tag == 16) {
parse_max_entries_to_delete:
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::int64, ::google::protobuf::internal::WireFormatLite::TYPE_INT64>(
input, &max_entries_to_delete_)));
set_has_max_entries_to_delete();
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.DeleteRangeRequest)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.DeleteRangeRequest)
return false;
#undef DO_
}
void DeleteRangeRequest::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.DeleteRangeRequest)
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, this->header(), output);
}
// optional int64 max_entries_to_delete = 2;
if (has_max_entries_to_delete()) {
::google::protobuf::internal::WireFormatLite::WriteInt64(2, this->max_entries_to_delete(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.DeleteRangeRequest)
}
::google::protobuf::uint8* DeleteRangeRequest::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.DeleteRangeRequest)
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
1, this->header(), target);
}
// optional int64 max_entries_to_delete = 2;
if (has_max_entries_to_delete()) {
target = ::google::protobuf::internal::WireFormatLite::WriteInt64ToArray(2, this->max_entries_to_delete(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.DeleteRangeRequest)
return target;
}
int DeleteRangeRequest::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->header());
}
// optional int64 max_entries_to_delete = 2;
if (has_max_entries_to_delete()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::Int64Size(
this->max_entries_to_delete());
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void DeleteRangeRequest::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const DeleteRangeRequest* source =
::google::protobuf::internal::dynamic_cast_if_available<const DeleteRangeRequest*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void DeleteRangeRequest::MergeFrom(const DeleteRangeRequest& from) {
GOOGLE_CHECK_NE(&from, this);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_header()) {
mutable_header()->::cockroach::proto::RequestHeader::MergeFrom(from.header());
}
if (from.has_max_entries_to_delete()) {
set_max_entries_to_delete(from.max_entries_to_delete());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void DeleteRangeRequest::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void DeleteRangeRequest::CopyFrom(const DeleteRangeRequest& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool DeleteRangeRequest::IsInitialized() const {
return true;
}
void DeleteRangeRequest::Swap(DeleteRangeRequest* other) {
if (other != this) {
std::swap(header_, other->header_);
std::swap(max_entries_to_delete_, other->max_entries_to_delete_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata DeleteRangeRequest::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = DeleteRangeRequest_descriptor_;
metadata.reflection = DeleteRangeRequest_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int DeleteRangeResponse::kHeaderFieldNumber;
const int DeleteRangeResponse::kNumDeletedFieldNumber;
#endif // !_MSC_VER
DeleteRangeResponse::DeleteRangeResponse()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.DeleteRangeResponse)
}
void DeleteRangeResponse::InitAsDefaultInstance() {
header_ = const_cast< ::cockroach::proto::ResponseHeader*>(&::cockroach::proto::ResponseHeader::default_instance());
}
DeleteRangeResponse::DeleteRangeResponse(const DeleteRangeResponse& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.DeleteRangeResponse)
}
void DeleteRangeResponse::SharedCtor() {
_cached_size_ = 0;
header_ = NULL;
num_deleted_ = GOOGLE_LONGLONG(0);
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
DeleteRangeResponse::~DeleteRangeResponse() {
// @@protoc_insertion_point(destructor:cockroach.proto.DeleteRangeResponse)
SharedDtor();
}
void DeleteRangeResponse::SharedDtor() {
if (this != default_instance_) {
delete header_;
}
}
void DeleteRangeResponse::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* DeleteRangeResponse::descriptor() {
protobuf_AssignDescriptorsOnce();
return DeleteRangeResponse_descriptor_;
}
const DeleteRangeResponse& DeleteRangeResponse::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
DeleteRangeResponse* DeleteRangeResponse::default_instance_ = NULL;
DeleteRangeResponse* DeleteRangeResponse::New() const {
return new DeleteRangeResponse;
}
void DeleteRangeResponse::Clear() {
if (_has_bits_[0 / 32] & 3) {
if (has_header()) {
if (header_ != NULL) header_->::cockroach::proto::ResponseHeader::Clear();
}
num_deleted_ = GOOGLE_LONGLONG(0);
}
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool DeleteRangeResponse::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.DeleteRangeResponse)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.ResponseHeader header = 1;
case 1: {
if (tag == 10) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_header()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(16)) goto parse_num_deleted;
break;
}
// optional int64 num_deleted = 2;
case 2: {
if (tag == 16) {
parse_num_deleted:
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::int64, ::google::protobuf::internal::WireFormatLite::TYPE_INT64>(
input, &num_deleted_)));
set_has_num_deleted();
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.DeleteRangeResponse)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.DeleteRangeResponse)
return false;
#undef DO_
}
void DeleteRangeResponse::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.DeleteRangeResponse)
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, this->header(), output);
}
// optional int64 num_deleted = 2;
if (has_num_deleted()) {
::google::protobuf::internal::WireFormatLite::WriteInt64(2, this->num_deleted(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.DeleteRangeResponse)
}
::google::protobuf::uint8* DeleteRangeResponse::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.DeleteRangeResponse)
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
1, this->header(), target);
}
// optional int64 num_deleted = 2;
if (has_num_deleted()) {
target = ::google::protobuf::internal::WireFormatLite::WriteInt64ToArray(2, this->num_deleted(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.DeleteRangeResponse)
return target;
}
int DeleteRangeResponse::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->header());
}
// optional int64 num_deleted = 2;
if (has_num_deleted()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::Int64Size(
this->num_deleted());
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void DeleteRangeResponse::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const DeleteRangeResponse* source =
::google::protobuf::internal::dynamic_cast_if_available<const DeleteRangeResponse*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void DeleteRangeResponse::MergeFrom(const DeleteRangeResponse& from) {
GOOGLE_CHECK_NE(&from, this);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_header()) {
mutable_header()->::cockroach::proto::ResponseHeader::MergeFrom(from.header());
}
if (from.has_num_deleted()) {
set_num_deleted(from.num_deleted());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void DeleteRangeResponse::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void DeleteRangeResponse::CopyFrom(const DeleteRangeResponse& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool DeleteRangeResponse::IsInitialized() const {
return true;
}
void DeleteRangeResponse::Swap(DeleteRangeResponse* other) {
if (other != this) {
std::swap(header_, other->header_);
std::swap(num_deleted_, other->num_deleted_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata DeleteRangeResponse::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = DeleteRangeResponse_descriptor_;
metadata.reflection = DeleteRangeResponse_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int ScanRequest::kHeaderFieldNumber;
const int ScanRequest::kMaxResultsFieldNumber;
#endif // !_MSC_VER
ScanRequest::ScanRequest()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.ScanRequest)
}
void ScanRequest::InitAsDefaultInstance() {
header_ = const_cast< ::cockroach::proto::RequestHeader*>(&::cockroach::proto::RequestHeader::default_instance());
}
ScanRequest::ScanRequest(const ScanRequest& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.ScanRequest)
}
void ScanRequest::SharedCtor() {
_cached_size_ = 0;
header_ = NULL;
max_results_ = GOOGLE_LONGLONG(0);
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
ScanRequest::~ScanRequest() {
// @@protoc_insertion_point(destructor:cockroach.proto.ScanRequest)
SharedDtor();
}
void ScanRequest::SharedDtor() {
if (this != default_instance_) {
delete header_;
}
}
void ScanRequest::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* ScanRequest::descriptor() {
protobuf_AssignDescriptorsOnce();
return ScanRequest_descriptor_;
}
const ScanRequest& ScanRequest::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
ScanRequest* ScanRequest::default_instance_ = NULL;
ScanRequest* ScanRequest::New() const {
return new ScanRequest;
}
void ScanRequest::Clear() {
if (_has_bits_[0 / 32] & 3) {
if (has_header()) {
if (header_ != NULL) header_->::cockroach::proto::RequestHeader::Clear();
}
max_results_ = GOOGLE_LONGLONG(0);
}
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool ScanRequest::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.ScanRequest)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.RequestHeader header = 1;
case 1: {
if (tag == 10) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_header()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(16)) goto parse_max_results;
break;
}
// optional int64 max_results = 2;
case 2: {
if (tag == 16) {
parse_max_results:
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::int64, ::google::protobuf::internal::WireFormatLite::TYPE_INT64>(
input, &max_results_)));
set_has_max_results();
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.ScanRequest)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.ScanRequest)
return false;
#undef DO_
}
void ScanRequest::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.ScanRequest)
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, this->header(), output);
}
// optional int64 max_results = 2;
if (has_max_results()) {
::google::protobuf::internal::WireFormatLite::WriteInt64(2, this->max_results(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.ScanRequest)
}
::google::protobuf::uint8* ScanRequest::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.ScanRequest)
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
1, this->header(), target);
}
// optional int64 max_results = 2;
if (has_max_results()) {
target = ::google::protobuf::internal::WireFormatLite::WriteInt64ToArray(2, this->max_results(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.ScanRequest)
return target;
}
int ScanRequest::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->header());
}
// optional int64 max_results = 2;
if (has_max_results()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::Int64Size(
this->max_results());
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void ScanRequest::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const ScanRequest* source =
::google::protobuf::internal::dynamic_cast_if_available<const ScanRequest*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void ScanRequest::MergeFrom(const ScanRequest& from) {
GOOGLE_CHECK_NE(&from, this);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_header()) {
mutable_header()->::cockroach::proto::RequestHeader::MergeFrom(from.header());
}
if (from.has_max_results()) {
set_max_results(from.max_results());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void ScanRequest::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void ScanRequest::CopyFrom(const ScanRequest& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool ScanRequest::IsInitialized() const {
return true;
}
void ScanRequest::Swap(ScanRequest* other) {
if (other != this) {
std::swap(header_, other->header_);
std::swap(max_results_, other->max_results_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata ScanRequest::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = ScanRequest_descriptor_;
metadata.reflection = ScanRequest_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int ScanResponse::kHeaderFieldNumber;
const int ScanResponse::kRowsFieldNumber;
#endif // !_MSC_VER
ScanResponse::ScanResponse()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.ScanResponse)
}
void ScanResponse::InitAsDefaultInstance() {
header_ = const_cast< ::cockroach::proto::ResponseHeader*>(&::cockroach::proto::ResponseHeader::default_instance());
}
ScanResponse::ScanResponse(const ScanResponse& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.ScanResponse)
}
void ScanResponse::SharedCtor() {
_cached_size_ = 0;
header_ = NULL;
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
ScanResponse::~ScanResponse() {
// @@protoc_insertion_point(destructor:cockroach.proto.ScanResponse)
SharedDtor();
}
void ScanResponse::SharedDtor() {
if (this != default_instance_) {
delete header_;
}
}
void ScanResponse::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* ScanResponse::descriptor() {
protobuf_AssignDescriptorsOnce();
return ScanResponse_descriptor_;
}
const ScanResponse& ScanResponse::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
ScanResponse* ScanResponse::default_instance_ = NULL;
ScanResponse* ScanResponse::New() const {
return new ScanResponse;
}
void ScanResponse::Clear() {
if (has_header()) {
if (header_ != NULL) header_->::cockroach::proto::ResponseHeader::Clear();
}
rows_.Clear();
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool ScanResponse::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.ScanResponse)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.ResponseHeader header = 1;
case 1: {
if (tag == 10) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_header()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(18)) goto parse_rows;
break;
}
// repeated .cockroach.proto.KeyValue rows = 2;
case 2: {
if (tag == 18) {
parse_rows:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, add_rows()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(18)) goto parse_rows;
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.ScanResponse)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.ScanResponse)
return false;
#undef DO_
}
void ScanResponse::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.ScanResponse)
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, this->header(), output);
}
// repeated .cockroach.proto.KeyValue rows = 2;
for (int i = 0; i < this->rows_size(); i++) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
2, this->rows(i), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.ScanResponse)
}
::google::protobuf::uint8* ScanResponse::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.ScanResponse)
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
1, this->header(), target);
}
// repeated .cockroach.proto.KeyValue rows = 2;
for (int i = 0; i < this->rows_size(); i++) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
2, this->rows(i), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.ScanResponse)
return target;
}
int ScanResponse::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->header());
}
}
// repeated .cockroach.proto.KeyValue rows = 2;
total_size += 1 * this->rows_size();
for (int i = 0; i < this->rows_size(); i++) {
total_size +=
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->rows(i));
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void ScanResponse::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const ScanResponse* source =
::google::protobuf::internal::dynamic_cast_if_available<const ScanResponse*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void ScanResponse::MergeFrom(const ScanResponse& from) {
GOOGLE_CHECK_NE(&from, this);
rows_.MergeFrom(from.rows_);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_header()) {
mutable_header()->::cockroach::proto::ResponseHeader::MergeFrom(from.header());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void ScanResponse::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void ScanResponse::CopyFrom(const ScanResponse& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool ScanResponse::IsInitialized() const {
return true;
}
void ScanResponse::Swap(ScanResponse* other) {
if (other != this) {
std::swap(header_, other->header_);
rows_.Swap(&other->rows_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata ScanResponse::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = ScanResponse_descriptor_;
metadata.reflection = ScanResponse_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int EndTransactionRequest::kHeaderFieldNumber;
const int EndTransactionRequest::kCommitFieldNumber;
const int EndTransactionRequest::kInternalCommitTriggerFieldNumber;
#endif // !_MSC_VER
EndTransactionRequest::EndTransactionRequest()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.EndTransactionRequest)
}
void EndTransactionRequest::InitAsDefaultInstance() {
header_ = const_cast< ::cockroach::proto::RequestHeader*>(&::cockroach::proto::RequestHeader::default_instance());
internal_commit_trigger_ = const_cast< ::cockroach::proto::InternalCommitTrigger*>(&::cockroach::proto::InternalCommitTrigger::default_instance());
}
EndTransactionRequest::EndTransactionRequest(const EndTransactionRequest& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.EndTransactionRequest)
}
void EndTransactionRequest::SharedCtor() {
_cached_size_ = 0;
header_ = NULL;
commit_ = false;
internal_commit_trigger_ = NULL;
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
EndTransactionRequest::~EndTransactionRequest() {
// @@protoc_insertion_point(destructor:cockroach.proto.EndTransactionRequest)
SharedDtor();
}
void EndTransactionRequest::SharedDtor() {
if (this != default_instance_) {
delete header_;
delete internal_commit_trigger_;
}
}
void EndTransactionRequest::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* EndTransactionRequest::descriptor() {
protobuf_AssignDescriptorsOnce();
return EndTransactionRequest_descriptor_;
}
const EndTransactionRequest& EndTransactionRequest::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
EndTransactionRequest* EndTransactionRequest::default_instance_ = NULL;
EndTransactionRequest* EndTransactionRequest::New() const {
return new EndTransactionRequest;
}
void EndTransactionRequest::Clear() {
if (_has_bits_[0 / 32] & 7) {
if (has_header()) {
if (header_ != NULL) header_->::cockroach::proto::RequestHeader::Clear();
}
commit_ = false;
if (has_internal_commit_trigger()) {
if (internal_commit_trigger_ != NULL) internal_commit_trigger_->::cockroach::proto::InternalCommitTrigger::Clear();
}
}
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool EndTransactionRequest::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.EndTransactionRequest)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.RequestHeader header = 1;
case 1: {
if (tag == 10) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_header()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(16)) goto parse_commit;
break;
}
// optional bool commit = 2;
case 2: {
if (tag == 16) {
parse_commit:
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
bool, ::google::protobuf::internal::WireFormatLite::TYPE_BOOL>(
input, &commit_)));
set_has_commit();
} else {
goto handle_unusual;
}
if (input->ExpectTag(26)) goto parse_internal_commit_trigger;
break;
}
// optional .cockroach.proto.InternalCommitTrigger internal_commit_trigger = 3;
case 3: {
if (tag == 26) {
parse_internal_commit_trigger:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_internal_commit_trigger()));
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.EndTransactionRequest)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.EndTransactionRequest)
return false;
#undef DO_
}
void EndTransactionRequest::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.EndTransactionRequest)
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, this->header(), output);
}
// optional bool commit = 2;
if (has_commit()) {
::google::protobuf::internal::WireFormatLite::WriteBool(2, this->commit(), output);
}
// optional .cockroach.proto.InternalCommitTrigger internal_commit_trigger = 3;
if (has_internal_commit_trigger()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
3, this->internal_commit_trigger(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.EndTransactionRequest)
}
::google::protobuf::uint8* EndTransactionRequest::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.EndTransactionRequest)
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
1, this->header(), target);
}
// optional bool commit = 2;
if (has_commit()) {
target = ::google::protobuf::internal::WireFormatLite::WriteBoolToArray(2, this->commit(), target);
}
// optional .cockroach.proto.InternalCommitTrigger internal_commit_trigger = 3;
if (has_internal_commit_trigger()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
3, this->internal_commit_trigger(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.EndTransactionRequest)
return target;
}
int EndTransactionRequest::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->header());
}
// optional bool commit = 2;
if (has_commit()) {
total_size += 1 + 1;
}
// optional .cockroach.proto.InternalCommitTrigger internal_commit_trigger = 3;
if (has_internal_commit_trigger()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->internal_commit_trigger());
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void EndTransactionRequest::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const EndTransactionRequest* source =
::google::protobuf::internal::dynamic_cast_if_available<const EndTransactionRequest*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void EndTransactionRequest::MergeFrom(const EndTransactionRequest& from) {
GOOGLE_CHECK_NE(&from, this);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_header()) {
mutable_header()->::cockroach::proto::RequestHeader::MergeFrom(from.header());
}
if (from.has_commit()) {
set_commit(from.commit());
}
if (from.has_internal_commit_trigger()) {
mutable_internal_commit_trigger()->::cockroach::proto::InternalCommitTrigger::MergeFrom(from.internal_commit_trigger());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void EndTransactionRequest::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void EndTransactionRequest::CopyFrom(const EndTransactionRequest& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool EndTransactionRequest::IsInitialized() const {
return true;
}
void EndTransactionRequest::Swap(EndTransactionRequest* other) {
if (other != this) {
std::swap(header_, other->header_);
std::swap(commit_, other->commit_);
std::swap(internal_commit_trigger_, other->internal_commit_trigger_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata EndTransactionRequest::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = EndTransactionRequest_descriptor_;
metadata.reflection = EndTransactionRequest_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int EndTransactionResponse::kHeaderFieldNumber;
const int EndTransactionResponse::kCommitWaitFieldNumber;
const int EndTransactionResponse::kResolvedFieldNumber;
#endif // !_MSC_VER
EndTransactionResponse::EndTransactionResponse()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.EndTransactionResponse)
}
void EndTransactionResponse::InitAsDefaultInstance() {
header_ = const_cast< ::cockroach::proto::ResponseHeader*>(&::cockroach::proto::ResponseHeader::default_instance());
}
EndTransactionResponse::EndTransactionResponse(const EndTransactionResponse& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.EndTransactionResponse)
}
void EndTransactionResponse::SharedCtor() {
::google::protobuf::internal::GetEmptyString();
_cached_size_ = 0;
header_ = NULL;
commit_wait_ = GOOGLE_LONGLONG(0);
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
EndTransactionResponse::~EndTransactionResponse() {
// @@protoc_insertion_point(destructor:cockroach.proto.EndTransactionResponse)
SharedDtor();
}
void EndTransactionResponse::SharedDtor() {
if (this != default_instance_) {
delete header_;
}
}
void EndTransactionResponse::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* EndTransactionResponse::descriptor() {
protobuf_AssignDescriptorsOnce();
return EndTransactionResponse_descriptor_;
}
const EndTransactionResponse& EndTransactionResponse::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
EndTransactionResponse* EndTransactionResponse::default_instance_ = NULL;
EndTransactionResponse* EndTransactionResponse::New() const {
return new EndTransactionResponse;
}
void EndTransactionResponse::Clear() {
if (_has_bits_[0 / 32] & 3) {
if (has_header()) {
if (header_ != NULL) header_->::cockroach::proto::ResponseHeader::Clear();
}
commit_wait_ = GOOGLE_LONGLONG(0);
}
resolved_.Clear();
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool EndTransactionResponse::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.EndTransactionResponse)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.ResponseHeader header = 1;
case 1: {
if (tag == 10) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_header()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(16)) goto parse_commit_wait;
break;
}
// optional int64 commit_wait = 2;
case 2: {
if (tag == 16) {
parse_commit_wait:
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::int64, ::google::protobuf::internal::WireFormatLite::TYPE_INT64>(
input, &commit_wait_)));
set_has_commit_wait();
} else {
goto handle_unusual;
}
if (input->ExpectTag(26)) goto parse_resolved;
break;
}
// repeated bytes resolved = 3;
case 3: {
if (tag == 26) {
parse_resolved:
DO_(::google::protobuf::internal::WireFormatLite::ReadBytes(
input, this->add_resolved()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(26)) goto parse_resolved;
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.EndTransactionResponse)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.EndTransactionResponse)
return false;
#undef DO_
}
void EndTransactionResponse::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.EndTransactionResponse)
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, this->header(), output);
}
// optional int64 commit_wait = 2;
if (has_commit_wait()) {
::google::protobuf::internal::WireFormatLite::WriteInt64(2, this->commit_wait(), output);
}
// repeated bytes resolved = 3;
for (int i = 0; i < this->resolved_size(); i++) {
::google::protobuf::internal::WireFormatLite::WriteBytes(
3, this->resolved(i), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.EndTransactionResponse)
}
::google::protobuf::uint8* EndTransactionResponse::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.EndTransactionResponse)
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
1, this->header(), target);
}
// optional int64 commit_wait = 2;
if (has_commit_wait()) {
target = ::google::protobuf::internal::WireFormatLite::WriteInt64ToArray(2, this->commit_wait(), target);
}
// repeated bytes resolved = 3;
for (int i = 0; i < this->resolved_size(); i++) {
target = ::google::protobuf::internal::WireFormatLite::
WriteBytesToArray(3, this->resolved(i), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.EndTransactionResponse)
return target;
}
int EndTransactionResponse::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->header());
}
// optional int64 commit_wait = 2;
if (has_commit_wait()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::Int64Size(
this->commit_wait());
}
}
// repeated bytes resolved = 3;
total_size += 1 * this->resolved_size();
for (int i = 0; i < this->resolved_size(); i++) {
total_size += ::google::protobuf::internal::WireFormatLite::BytesSize(
this->resolved(i));
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void EndTransactionResponse::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const EndTransactionResponse* source =
::google::protobuf::internal::dynamic_cast_if_available<const EndTransactionResponse*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void EndTransactionResponse::MergeFrom(const EndTransactionResponse& from) {
GOOGLE_CHECK_NE(&from, this);
resolved_.MergeFrom(from.resolved_);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_header()) {
mutable_header()->::cockroach::proto::ResponseHeader::MergeFrom(from.header());
}
if (from.has_commit_wait()) {
set_commit_wait(from.commit_wait());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void EndTransactionResponse::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void EndTransactionResponse::CopyFrom(const EndTransactionResponse& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool EndTransactionResponse::IsInitialized() const {
return true;
}
void EndTransactionResponse::Swap(EndTransactionResponse* other) {
if (other != this) {
std::swap(header_, other->header_);
std::swap(commit_wait_, other->commit_wait_);
resolved_.Swap(&other->resolved_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata EndTransactionResponse::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = EndTransactionResponse_descriptor_;
metadata.reflection = EndTransactionResponse_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int RequestUnion::kGetFieldNumber;
const int RequestUnion::kPutFieldNumber;
const int RequestUnion::kConditionalPutFieldNumber;
const int RequestUnion::kIncrementFieldNumber;
const int RequestUnion::kDeleteFieldNumber;
const int RequestUnion::kDeleteRangeFieldNumber;
const int RequestUnion::kScanFieldNumber;
const int RequestUnion::kEndTransactionFieldNumber;
#endif // !_MSC_VER
RequestUnion::RequestUnion()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.RequestUnion)
}
void RequestUnion::InitAsDefaultInstance() {
RequestUnion_default_oneof_instance_->get_ = const_cast< ::cockroach::proto::GetRequest*>(&::cockroach::proto::GetRequest::default_instance());
RequestUnion_default_oneof_instance_->put_ = const_cast< ::cockroach::proto::PutRequest*>(&::cockroach::proto::PutRequest::default_instance());
RequestUnion_default_oneof_instance_->conditional_put_ = const_cast< ::cockroach::proto::ConditionalPutRequest*>(&::cockroach::proto::ConditionalPutRequest::default_instance());
RequestUnion_default_oneof_instance_->increment_ = const_cast< ::cockroach::proto::IncrementRequest*>(&::cockroach::proto::IncrementRequest::default_instance());
RequestUnion_default_oneof_instance_->delete__ = const_cast< ::cockroach::proto::DeleteRequest*>(&::cockroach::proto::DeleteRequest::default_instance());
RequestUnion_default_oneof_instance_->delete_range_ = const_cast< ::cockroach::proto::DeleteRangeRequest*>(&::cockroach::proto::DeleteRangeRequest::default_instance());
RequestUnion_default_oneof_instance_->scan_ = const_cast< ::cockroach::proto::ScanRequest*>(&::cockroach::proto::ScanRequest::default_instance());
RequestUnion_default_oneof_instance_->end_transaction_ = const_cast< ::cockroach::proto::EndTransactionRequest*>(&::cockroach::proto::EndTransactionRequest::default_instance());
}
RequestUnion::RequestUnion(const RequestUnion& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.RequestUnion)
}
void RequestUnion::SharedCtor() {
_cached_size_ = 0;
::memset(_has_bits_, 0, sizeof(_has_bits_));
clear_has_value();
}
RequestUnion::~RequestUnion() {
// @@protoc_insertion_point(destructor:cockroach.proto.RequestUnion)
SharedDtor();
}
void RequestUnion::SharedDtor() {
if (has_value()) {
clear_value();
}
if (this != default_instance_) {
}
}
void RequestUnion::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* RequestUnion::descriptor() {
protobuf_AssignDescriptorsOnce();
return RequestUnion_descriptor_;
}
const RequestUnion& RequestUnion::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
RequestUnion* RequestUnion::default_instance_ = NULL;
RequestUnion* RequestUnion::New() const {
return new RequestUnion;
}
void RequestUnion::clear_value() {
switch(value_case()) {
case kGet: {
delete value_.get_;
break;
}
case kPut: {
delete value_.put_;
break;
}
case kConditionalPut: {
delete value_.conditional_put_;
break;
}
case kIncrement: {
delete value_.increment_;
break;
}
case kDelete: {
delete value_.delete__;
break;
}
case kDeleteRange: {
delete value_.delete_range_;
break;
}
case kScan: {
delete value_.scan_;
break;
}
case kEndTransaction: {
delete value_.end_transaction_;
break;
}
case VALUE_NOT_SET: {
break;
}
}
_oneof_case_[0] = VALUE_NOT_SET;
}
void RequestUnion::Clear() {
clear_value();
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool RequestUnion::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.RequestUnion)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.GetRequest get = 2;
case 2: {
if (tag == 18) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_get()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(26)) goto parse_put;
break;
}
// optional .cockroach.proto.PutRequest put = 3;
case 3: {
if (tag == 26) {
parse_put:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_put()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(34)) goto parse_conditional_put;
break;
}
// optional .cockroach.proto.ConditionalPutRequest conditional_put = 4;
case 4: {
if (tag == 34) {
parse_conditional_put:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_conditional_put()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(42)) goto parse_increment;
break;
}
// optional .cockroach.proto.IncrementRequest increment = 5;
case 5: {
if (tag == 42) {
parse_increment:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_increment()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(50)) goto parse_delete;
break;
}
// optional .cockroach.proto.DeleteRequest delete = 6;
case 6: {
if (tag == 50) {
parse_delete:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_delete_()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(58)) goto parse_delete_range;
break;
}
// optional .cockroach.proto.DeleteRangeRequest delete_range = 7;
case 7: {
if (tag == 58) {
parse_delete_range:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_delete_range()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(66)) goto parse_scan;
break;
}
// optional .cockroach.proto.ScanRequest scan = 8;
case 8: {
if (tag == 66) {
parse_scan:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_scan()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(74)) goto parse_end_transaction;
break;
}
// optional .cockroach.proto.EndTransactionRequest end_transaction = 9;
case 9: {
if (tag == 74) {
parse_end_transaction:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_end_transaction()));
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.RequestUnion)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.RequestUnion)
return false;
#undef DO_
}
void RequestUnion::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.RequestUnion)
// optional .cockroach.proto.GetRequest get = 2;
if (has_get()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
2, this->get(), output);
}
// optional .cockroach.proto.PutRequest put = 3;
if (has_put()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
3, this->put(), output);
}
// optional .cockroach.proto.ConditionalPutRequest conditional_put = 4;
if (has_conditional_put()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
4, this->conditional_put(), output);
}
// optional .cockroach.proto.IncrementRequest increment = 5;
if (has_increment()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
5, this->increment(), output);
}
// optional .cockroach.proto.DeleteRequest delete = 6;
if (has_delete_()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
6, this->delete_(), output);
}
// optional .cockroach.proto.DeleteRangeRequest delete_range = 7;
if (has_delete_range()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
7, this->delete_range(), output);
}
// optional .cockroach.proto.ScanRequest scan = 8;
if (has_scan()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
8, this->scan(), output);
}
// optional .cockroach.proto.EndTransactionRequest end_transaction = 9;
if (has_end_transaction()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
9, this->end_transaction(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.RequestUnion)
}
::google::protobuf::uint8* RequestUnion::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.RequestUnion)
// optional .cockroach.proto.GetRequest get = 2;
if (has_get()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
2, this->get(), target);
}
// optional .cockroach.proto.PutRequest put = 3;
if (has_put()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
3, this->put(), target);
}
// optional .cockroach.proto.ConditionalPutRequest conditional_put = 4;
if (has_conditional_put()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
4, this->conditional_put(), target);
}
// optional .cockroach.proto.IncrementRequest increment = 5;
if (has_increment()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
5, this->increment(), target);
}
// optional .cockroach.proto.DeleteRequest delete = 6;
if (has_delete_()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
6, this->delete_(), target);
}
// optional .cockroach.proto.DeleteRangeRequest delete_range = 7;
if (has_delete_range()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
7, this->delete_range(), target);
}
// optional .cockroach.proto.ScanRequest scan = 8;
if (has_scan()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
8, this->scan(), target);
}
// optional .cockroach.proto.EndTransactionRequest end_transaction = 9;
if (has_end_transaction()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
9, this->end_transaction(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.RequestUnion)
return target;
}
int RequestUnion::ByteSize() const {
int total_size = 0;
switch (value_case()) {
// optional .cockroach.proto.GetRequest get = 2;
case kGet: {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->get());
break;
}
// optional .cockroach.proto.PutRequest put = 3;
case kPut: {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->put());
break;
}
// optional .cockroach.proto.ConditionalPutRequest conditional_put = 4;
case kConditionalPut: {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->conditional_put());
break;
}
// optional .cockroach.proto.IncrementRequest increment = 5;
case kIncrement: {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->increment());
break;
}
// optional .cockroach.proto.DeleteRequest delete = 6;
case kDelete: {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->delete_());
break;
}
// optional .cockroach.proto.DeleteRangeRequest delete_range = 7;
case kDeleteRange: {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->delete_range());
break;
}
// optional .cockroach.proto.ScanRequest scan = 8;
case kScan: {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->scan());
break;
}
// optional .cockroach.proto.EndTransactionRequest end_transaction = 9;
case kEndTransaction: {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->end_transaction());
break;
}
case VALUE_NOT_SET: {
break;
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void RequestUnion::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const RequestUnion* source =
::google::protobuf::internal::dynamic_cast_if_available<const RequestUnion*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void RequestUnion::MergeFrom(const RequestUnion& from) {
GOOGLE_CHECK_NE(&from, this);
switch (from.value_case()) {
case kGet: {
mutable_get()->::cockroach::proto::GetRequest::MergeFrom(from.get());
break;
}
case kPut: {
mutable_put()->::cockroach::proto::PutRequest::MergeFrom(from.put());
break;
}
case kConditionalPut: {
mutable_conditional_put()->::cockroach::proto::ConditionalPutRequest::MergeFrom(from.conditional_put());
break;
}
case kIncrement: {
mutable_increment()->::cockroach::proto::IncrementRequest::MergeFrom(from.increment());
break;
}
case kDelete: {
mutable_delete_()->::cockroach::proto::DeleteRequest::MergeFrom(from.delete_());
break;
}
case kDeleteRange: {
mutable_delete_range()->::cockroach::proto::DeleteRangeRequest::MergeFrom(from.delete_range());
break;
}
case kScan: {
mutable_scan()->::cockroach::proto::ScanRequest::MergeFrom(from.scan());
break;
}
case kEndTransaction: {
mutable_end_transaction()->::cockroach::proto::EndTransactionRequest::MergeFrom(from.end_transaction());
break;
}
case VALUE_NOT_SET: {
break;
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void RequestUnion::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void RequestUnion::CopyFrom(const RequestUnion& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool RequestUnion::IsInitialized() const {
return true;
}
void RequestUnion::Swap(RequestUnion* other) {
if (other != this) {
std::swap(value_, other->value_);
std::swap(_oneof_case_[0], other->_oneof_case_[0]);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata RequestUnion::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = RequestUnion_descriptor_;
metadata.reflection = RequestUnion_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int ResponseUnion::kGetFieldNumber;
const int ResponseUnion::kPutFieldNumber;
const int ResponseUnion::kConditionalPutFieldNumber;
const int ResponseUnion::kIncrementFieldNumber;
const int ResponseUnion::kDeleteFieldNumber;
const int ResponseUnion::kDeleteRangeFieldNumber;
const int ResponseUnion::kScanFieldNumber;
const int ResponseUnion::kEndTransactionFieldNumber;
#endif // !_MSC_VER
ResponseUnion::ResponseUnion()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.ResponseUnion)
}
void ResponseUnion::InitAsDefaultInstance() {
ResponseUnion_default_oneof_instance_->get_ = const_cast< ::cockroach::proto::GetResponse*>(&::cockroach::proto::GetResponse::default_instance());
ResponseUnion_default_oneof_instance_->put_ = const_cast< ::cockroach::proto::PutResponse*>(&::cockroach::proto::PutResponse::default_instance());
ResponseUnion_default_oneof_instance_->conditional_put_ = const_cast< ::cockroach::proto::ConditionalPutResponse*>(&::cockroach::proto::ConditionalPutResponse::default_instance());
ResponseUnion_default_oneof_instance_->increment_ = const_cast< ::cockroach::proto::IncrementResponse*>(&::cockroach::proto::IncrementResponse::default_instance());
ResponseUnion_default_oneof_instance_->delete__ = const_cast< ::cockroach::proto::DeleteResponse*>(&::cockroach::proto::DeleteResponse::default_instance());
ResponseUnion_default_oneof_instance_->delete_range_ = const_cast< ::cockroach::proto::DeleteRangeResponse*>(&::cockroach::proto::DeleteRangeResponse::default_instance());
ResponseUnion_default_oneof_instance_->scan_ = const_cast< ::cockroach::proto::ScanResponse*>(&::cockroach::proto::ScanResponse::default_instance());
ResponseUnion_default_oneof_instance_->end_transaction_ = const_cast< ::cockroach::proto::EndTransactionResponse*>(&::cockroach::proto::EndTransactionResponse::default_instance());
}
ResponseUnion::ResponseUnion(const ResponseUnion& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.ResponseUnion)
}
void ResponseUnion::SharedCtor() {
_cached_size_ = 0;
::memset(_has_bits_, 0, sizeof(_has_bits_));
clear_has_value();
}
ResponseUnion::~ResponseUnion() {
// @@protoc_insertion_point(destructor:cockroach.proto.ResponseUnion)
SharedDtor();
}
void ResponseUnion::SharedDtor() {
if (has_value()) {
clear_value();
}
if (this != default_instance_) {
}
}
void ResponseUnion::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* ResponseUnion::descriptor() {
protobuf_AssignDescriptorsOnce();
return ResponseUnion_descriptor_;
}
const ResponseUnion& ResponseUnion::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
ResponseUnion* ResponseUnion::default_instance_ = NULL;
ResponseUnion* ResponseUnion::New() const {
return new ResponseUnion;
}
void ResponseUnion::clear_value() {
switch(value_case()) {
case kGet: {
delete value_.get_;
break;
}
case kPut: {
delete value_.put_;
break;
}
case kConditionalPut: {
delete value_.conditional_put_;
break;
}
case kIncrement: {
delete value_.increment_;
break;
}
case kDelete: {
delete value_.delete__;
break;
}
case kDeleteRange: {
delete value_.delete_range_;
break;
}
case kScan: {
delete value_.scan_;
break;
}
case kEndTransaction: {
delete value_.end_transaction_;
break;
}
case VALUE_NOT_SET: {
break;
}
}
_oneof_case_[0] = VALUE_NOT_SET;
}
void ResponseUnion::Clear() {
clear_value();
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool ResponseUnion::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.ResponseUnion)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.GetResponse get = 2;
case 2: {
if (tag == 18) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_get()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(26)) goto parse_put;
break;
}
// optional .cockroach.proto.PutResponse put = 3;
case 3: {
if (tag == 26) {
parse_put:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_put()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(34)) goto parse_conditional_put;
break;
}
// optional .cockroach.proto.ConditionalPutResponse conditional_put = 4;
case 4: {
if (tag == 34) {
parse_conditional_put:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_conditional_put()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(42)) goto parse_increment;
break;
}
// optional .cockroach.proto.IncrementResponse increment = 5;
case 5: {
if (tag == 42) {
parse_increment:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_increment()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(50)) goto parse_delete;
break;
}
// optional .cockroach.proto.DeleteResponse delete = 6;
case 6: {
if (tag == 50) {
parse_delete:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_delete_()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(58)) goto parse_delete_range;
break;
}
// optional .cockroach.proto.DeleteRangeResponse delete_range = 7;
case 7: {
if (tag == 58) {
parse_delete_range:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_delete_range()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(66)) goto parse_scan;
break;
}
// optional .cockroach.proto.ScanResponse scan = 8;
case 8: {
if (tag == 66) {
parse_scan:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_scan()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(74)) goto parse_end_transaction;
break;
}
// optional .cockroach.proto.EndTransactionResponse end_transaction = 9;
case 9: {
if (tag == 74) {
parse_end_transaction:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_end_transaction()));
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.ResponseUnion)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.ResponseUnion)
return false;
#undef DO_
}
void ResponseUnion::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.ResponseUnion)
// optional .cockroach.proto.GetResponse get = 2;
if (has_get()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
2, this->get(), output);
}
// optional .cockroach.proto.PutResponse put = 3;
if (has_put()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
3, this->put(), output);
}
// optional .cockroach.proto.ConditionalPutResponse conditional_put = 4;
if (has_conditional_put()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
4, this->conditional_put(), output);
}
// optional .cockroach.proto.IncrementResponse increment = 5;
if (has_increment()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
5, this->increment(), output);
}
// optional .cockroach.proto.DeleteResponse delete = 6;
if (has_delete_()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
6, this->delete_(), output);
}
// optional .cockroach.proto.DeleteRangeResponse delete_range = 7;
if (has_delete_range()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
7, this->delete_range(), output);
}
// optional .cockroach.proto.ScanResponse scan = 8;
if (has_scan()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
8, this->scan(), output);
}
// optional .cockroach.proto.EndTransactionResponse end_transaction = 9;
if (has_end_transaction()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
9, this->end_transaction(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.ResponseUnion)
}
::google::protobuf::uint8* ResponseUnion::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.ResponseUnion)
// optional .cockroach.proto.GetResponse get = 2;
if (has_get()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
2, this->get(), target);
}
// optional .cockroach.proto.PutResponse put = 3;
if (has_put()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
3, this->put(), target);
}
// optional .cockroach.proto.ConditionalPutResponse conditional_put = 4;
if (has_conditional_put()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
4, this->conditional_put(), target);
}
// optional .cockroach.proto.IncrementResponse increment = 5;
if (has_increment()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
5, this->increment(), target);
}
// optional .cockroach.proto.DeleteResponse delete = 6;
if (has_delete_()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
6, this->delete_(), target);
}
// optional .cockroach.proto.DeleteRangeResponse delete_range = 7;
if (has_delete_range()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
7, this->delete_range(), target);
}
// optional .cockroach.proto.ScanResponse scan = 8;
if (has_scan()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
8, this->scan(), target);
}
// optional .cockroach.proto.EndTransactionResponse end_transaction = 9;
if (has_end_transaction()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
9, this->end_transaction(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.ResponseUnion)
return target;
}
int ResponseUnion::ByteSize() const {
int total_size = 0;
switch (value_case()) {
// optional .cockroach.proto.GetResponse get = 2;
case kGet: {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->get());
break;
}
// optional .cockroach.proto.PutResponse put = 3;
case kPut: {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->put());
break;
}
// optional .cockroach.proto.ConditionalPutResponse conditional_put = 4;
case kConditionalPut: {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->conditional_put());
break;
}
// optional .cockroach.proto.IncrementResponse increment = 5;
case kIncrement: {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->increment());
break;
}
// optional .cockroach.proto.DeleteResponse delete = 6;
case kDelete: {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->delete_());
break;
}
// optional .cockroach.proto.DeleteRangeResponse delete_range = 7;
case kDeleteRange: {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->delete_range());
break;
}
// optional .cockroach.proto.ScanResponse scan = 8;
case kScan: {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->scan());
break;
}
// optional .cockroach.proto.EndTransactionResponse end_transaction = 9;
case kEndTransaction: {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->end_transaction());
break;
}
case VALUE_NOT_SET: {
break;
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void ResponseUnion::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const ResponseUnion* source =
::google::protobuf::internal::dynamic_cast_if_available<const ResponseUnion*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void ResponseUnion::MergeFrom(const ResponseUnion& from) {
GOOGLE_CHECK_NE(&from, this);
switch (from.value_case()) {
case kGet: {
mutable_get()->::cockroach::proto::GetResponse::MergeFrom(from.get());
break;
}
case kPut: {
mutable_put()->::cockroach::proto::PutResponse::MergeFrom(from.put());
break;
}
case kConditionalPut: {
mutable_conditional_put()->::cockroach::proto::ConditionalPutResponse::MergeFrom(from.conditional_put());
break;
}
case kIncrement: {
mutable_increment()->::cockroach::proto::IncrementResponse::MergeFrom(from.increment());
break;
}
case kDelete: {
mutable_delete_()->::cockroach::proto::DeleteResponse::MergeFrom(from.delete_());
break;
}
case kDeleteRange: {
mutable_delete_range()->::cockroach::proto::DeleteRangeResponse::MergeFrom(from.delete_range());
break;
}
case kScan: {
mutable_scan()->::cockroach::proto::ScanResponse::MergeFrom(from.scan());
break;
}
case kEndTransaction: {
mutable_end_transaction()->::cockroach::proto::EndTransactionResponse::MergeFrom(from.end_transaction());
break;
}
case VALUE_NOT_SET: {
break;
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void ResponseUnion::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void ResponseUnion::CopyFrom(const ResponseUnion& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool ResponseUnion::IsInitialized() const {
return true;
}
void ResponseUnion::Swap(ResponseUnion* other) {
if (other != this) {
std::swap(value_, other->value_);
std::swap(_oneof_case_[0], other->_oneof_case_[0]);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata ResponseUnion::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = ResponseUnion_descriptor_;
metadata.reflection = ResponseUnion_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int BatchRequest::kHeaderFieldNumber;
const int BatchRequest::kRequestsFieldNumber;
#endif // !_MSC_VER
BatchRequest::BatchRequest()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.BatchRequest)
}
void BatchRequest::InitAsDefaultInstance() {
header_ = const_cast< ::cockroach::proto::RequestHeader*>(&::cockroach::proto::RequestHeader::default_instance());
}
BatchRequest::BatchRequest(const BatchRequest& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.BatchRequest)
}
void BatchRequest::SharedCtor() {
_cached_size_ = 0;
header_ = NULL;
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
BatchRequest::~BatchRequest() {
// @@protoc_insertion_point(destructor:cockroach.proto.BatchRequest)
SharedDtor();
}
void BatchRequest::SharedDtor() {
if (this != default_instance_) {
delete header_;
}
}
void BatchRequest::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* BatchRequest::descriptor() {
protobuf_AssignDescriptorsOnce();
return BatchRequest_descriptor_;
}
const BatchRequest& BatchRequest::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
BatchRequest* BatchRequest::default_instance_ = NULL;
BatchRequest* BatchRequest::New() const {
return new BatchRequest;
}
void BatchRequest::Clear() {
if (has_header()) {
if (header_ != NULL) header_->::cockroach::proto::RequestHeader::Clear();
}
requests_.Clear();
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool BatchRequest::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.BatchRequest)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.RequestHeader header = 1;
case 1: {
if (tag == 10) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_header()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(18)) goto parse_requests;
break;
}
// repeated .cockroach.proto.RequestUnion requests = 2;
case 2: {
if (tag == 18) {
parse_requests:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, add_requests()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(18)) goto parse_requests;
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.BatchRequest)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.BatchRequest)
return false;
#undef DO_
}
void BatchRequest::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.BatchRequest)
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, this->header(), output);
}
// repeated .cockroach.proto.RequestUnion requests = 2;
for (int i = 0; i < this->requests_size(); i++) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
2, this->requests(i), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.BatchRequest)
}
::google::protobuf::uint8* BatchRequest::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.BatchRequest)
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
1, this->header(), target);
}
// repeated .cockroach.proto.RequestUnion requests = 2;
for (int i = 0; i < this->requests_size(); i++) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
2, this->requests(i), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.BatchRequest)
return target;
}
int BatchRequest::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->header());
}
}
// repeated .cockroach.proto.RequestUnion requests = 2;
total_size += 1 * this->requests_size();
for (int i = 0; i < this->requests_size(); i++) {
total_size +=
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->requests(i));
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void BatchRequest::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const BatchRequest* source =
::google::protobuf::internal::dynamic_cast_if_available<const BatchRequest*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void BatchRequest::MergeFrom(const BatchRequest& from) {
GOOGLE_CHECK_NE(&from, this);
requests_.MergeFrom(from.requests_);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_header()) {
mutable_header()->::cockroach::proto::RequestHeader::MergeFrom(from.header());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void BatchRequest::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void BatchRequest::CopyFrom(const BatchRequest& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool BatchRequest::IsInitialized() const {
return true;
}
void BatchRequest::Swap(BatchRequest* other) {
if (other != this) {
std::swap(header_, other->header_);
requests_.Swap(&other->requests_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata BatchRequest::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = BatchRequest_descriptor_;
metadata.reflection = BatchRequest_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int BatchResponse::kHeaderFieldNumber;
const int BatchResponse::kResponsesFieldNumber;
#endif // !_MSC_VER
BatchResponse::BatchResponse()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.BatchResponse)
}
void BatchResponse::InitAsDefaultInstance() {
header_ = const_cast< ::cockroach::proto::ResponseHeader*>(&::cockroach::proto::ResponseHeader::default_instance());
}
BatchResponse::BatchResponse(const BatchResponse& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.BatchResponse)
}
void BatchResponse::SharedCtor() {
_cached_size_ = 0;
header_ = NULL;
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
BatchResponse::~BatchResponse() {
// @@protoc_insertion_point(destructor:cockroach.proto.BatchResponse)
SharedDtor();
}
void BatchResponse::SharedDtor() {
if (this != default_instance_) {
delete header_;
}
}
void BatchResponse::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* BatchResponse::descriptor() {
protobuf_AssignDescriptorsOnce();
return BatchResponse_descriptor_;
}
const BatchResponse& BatchResponse::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
BatchResponse* BatchResponse::default_instance_ = NULL;
BatchResponse* BatchResponse::New() const {
return new BatchResponse;
}
void BatchResponse::Clear() {
if (has_header()) {
if (header_ != NULL) header_->::cockroach::proto::ResponseHeader::Clear();
}
responses_.Clear();
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool BatchResponse::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.BatchResponse)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.ResponseHeader header = 1;
case 1: {
if (tag == 10) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_header()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(18)) goto parse_responses;
break;
}
// repeated .cockroach.proto.ResponseUnion responses = 2;
case 2: {
if (tag == 18) {
parse_responses:
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, add_responses()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(18)) goto parse_responses;
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.BatchResponse)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.BatchResponse)
return false;
#undef DO_
}
void BatchResponse::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.BatchResponse)
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, this->header(), output);
}
// repeated .cockroach.proto.ResponseUnion responses = 2;
for (int i = 0; i < this->responses_size(); i++) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
2, this->responses(i), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.BatchResponse)
}
::google::protobuf::uint8* BatchResponse::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.BatchResponse)
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
1, this->header(), target);
}
// repeated .cockroach.proto.ResponseUnion responses = 2;
for (int i = 0; i < this->responses_size(); i++) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
2, this->responses(i), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.BatchResponse)
return target;
}
int BatchResponse::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->header());
}
}
// repeated .cockroach.proto.ResponseUnion responses = 2;
total_size += 1 * this->responses_size();
for (int i = 0; i < this->responses_size(); i++) {
total_size +=
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->responses(i));
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void BatchResponse::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const BatchResponse* source =
::google::protobuf::internal::dynamic_cast_if_available<const BatchResponse*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void BatchResponse::MergeFrom(const BatchResponse& from) {
GOOGLE_CHECK_NE(&from, this);
responses_.MergeFrom(from.responses_);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_header()) {
mutable_header()->::cockroach::proto::ResponseHeader::MergeFrom(from.header());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void BatchResponse::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void BatchResponse::CopyFrom(const BatchResponse& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool BatchResponse::IsInitialized() const {
return true;
}
void BatchResponse::Swap(BatchResponse* other) {
if (other != this) {
std::swap(header_, other->header_);
responses_.Swap(&other->responses_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata BatchResponse::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = BatchResponse_descriptor_;
metadata.reflection = BatchResponse_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int AdminSplitRequest::kHeaderFieldNumber;
const int AdminSplitRequest::kSplitKeyFieldNumber;
#endif // !_MSC_VER
AdminSplitRequest::AdminSplitRequest()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.AdminSplitRequest)
}
void AdminSplitRequest::InitAsDefaultInstance() {
header_ = const_cast< ::cockroach::proto::RequestHeader*>(&::cockroach::proto::RequestHeader::default_instance());
}
AdminSplitRequest::AdminSplitRequest(const AdminSplitRequest& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.AdminSplitRequest)
}
void AdminSplitRequest::SharedCtor() {
::google::protobuf::internal::GetEmptyString();
_cached_size_ = 0;
header_ = NULL;
split_key_ = const_cast< ::std::string*>(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
AdminSplitRequest::~AdminSplitRequest() {
// @@protoc_insertion_point(destructor:cockroach.proto.AdminSplitRequest)
SharedDtor();
}
void AdminSplitRequest::SharedDtor() {
if (split_key_ != &::google::protobuf::internal::GetEmptyStringAlreadyInited()) {
delete split_key_;
}
if (this != default_instance_) {
delete header_;
}
}
void AdminSplitRequest::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* AdminSplitRequest::descriptor() {
protobuf_AssignDescriptorsOnce();
return AdminSplitRequest_descriptor_;
}
const AdminSplitRequest& AdminSplitRequest::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
AdminSplitRequest* AdminSplitRequest::default_instance_ = NULL;
AdminSplitRequest* AdminSplitRequest::New() const {
return new AdminSplitRequest;
}
void AdminSplitRequest::Clear() {
if (_has_bits_[0 / 32] & 3) {
if (has_header()) {
if (header_ != NULL) header_->::cockroach::proto::RequestHeader::Clear();
}
if (has_split_key()) {
if (split_key_ != &::google::protobuf::internal::GetEmptyStringAlreadyInited()) {
split_key_->clear();
}
}
}
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool AdminSplitRequest::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.AdminSplitRequest)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.RequestHeader header = 1;
case 1: {
if (tag == 10) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_header()));
} else {
goto handle_unusual;
}
if (input->ExpectTag(18)) goto parse_split_key;
break;
}
// optional bytes split_key = 2;
case 2: {
if (tag == 18) {
parse_split_key:
DO_(::google::protobuf::internal::WireFormatLite::ReadBytes(
input, this->mutable_split_key()));
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.AdminSplitRequest)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.AdminSplitRequest)
return false;
#undef DO_
}
void AdminSplitRequest::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.AdminSplitRequest)
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, this->header(), output);
}
// optional bytes split_key = 2;
if (has_split_key()) {
::google::protobuf::internal::WireFormatLite::WriteBytesMaybeAliased(
2, this->split_key(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.AdminSplitRequest)
}
::google::protobuf::uint8* AdminSplitRequest::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.AdminSplitRequest)
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
1, this->header(), target);
}
// optional bytes split_key = 2;
if (has_split_key()) {
target =
::google::protobuf::internal::WireFormatLite::WriteBytesToArray(
2, this->split_key(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.AdminSplitRequest)
return target;
}
int AdminSplitRequest::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->header());
}
// optional bytes split_key = 2;
if (has_split_key()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::BytesSize(
this->split_key());
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void AdminSplitRequest::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const AdminSplitRequest* source =
::google::protobuf::internal::dynamic_cast_if_available<const AdminSplitRequest*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void AdminSplitRequest::MergeFrom(const AdminSplitRequest& from) {
GOOGLE_CHECK_NE(&from, this);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_header()) {
mutable_header()->::cockroach::proto::RequestHeader::MergeFrom(from.header());
}
if (from.has_split_key()) {
set_split_key(from.split_key());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void AdminSplitRequest::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void AdminSplitRequest::CopyFrom(const AdminSplitRequest& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool AdminSplitRequest::IsInitialized() const {
return true;
}
void AdminSplitRequest::Swap(AdminSplitRequest* other) {
if (other != this) {
std::swap(header_, other->header_);
std::swap(split_key_, other->split_key_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata AdminSplitRequest::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = AdminSplitRequest_descriptor_;
metadata.reflection = AdminSplitRequest_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int AdminSplitResponse::kHeaderFieldNumber;
#endif // !_MSC_VER
AdminSplitResponse::AdminSplitResponse()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.AdminSplitResponse)
}
void AdminSplitResponse::InitAsDefaultInstance() {
header_ = const_cast< ::cockroach::proto::ResponseHeader*>(&::cockroach::proto::ResponseHeader::default_instance());
}
AdminSplitResponse::AdminSplitResponse(const AdminSplitResponse& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.AdminSplitResponse)
}
void AdminSplitResponse::SharedCtor() {
_cached_size_ = 0;
header_ = NULL;
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
AdminSplitResponse::~AdminSplitResponse() {
// @@protoc_insertion_point(destructor:cockroach.proto.AdminSplitResponse)
SharedDtor();
}
void AdminSplitResponse::SharedDtor() {
if (this != default_instance_) {
delete header_;
}
}
void AdminSplitResponse::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* AdminSplitResponse::descriptor() {
protobuf_AssignDescriptorsOnce();
return AdminSplitResponse_descriptor_;
}
const AdminSplitResponse& AdminSplitResponse::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
AdminSplitResponse* AdminSplitResponse::default_instance_ = NULL;
AdminSplitResponse* AdminSplitResponse::New() const {
return new AdminSplitResponse;
}
void AdminSplitResponse::Clear() {
if (has_header()) {
if (header_ != NULL) header_->::cockroach::proto::ResponseHeader::Clear();
}
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool AdminSplitResponse::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.AdminSplitResponse)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.ResponseHeader header = 1;
case 1: {
if (tag == 10) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_header()));
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.AdminSplitResponse)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.AdminSplitResponse)
return false;
#undef DO_
}
void AdminSplitResponse::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.AdminSplitResponse)
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, this->header(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.AdminSplitResponse)
}
::google::protobuf::uint8* AdminSplitResponse::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.AdminSplitResponse)
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
1, this->header(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.AdminSplitResponse)
return target;
}
int AdminSplitResponse::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->header());
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void AdminSplitResponse::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const AdminSplitResponse* source =
::google::protobuf::internal::dynamic_cast_if_available<const AdminSplitResponse*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void AdminSplitResponse::MergeFrom(const AdminSplitResponse& from) {
GOOGLE_CHECK_NE(&from, this);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_header()) {
mutable_header()->::cockroach::proto::ResponseHeader::MergeFrom(from.header());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void AdminSplitResponse::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void AdminSplitResponse::CopyFrom(const AdminSplitResponse& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool AdminSplitResponse::IsInitialized() const {
return true;
}
void AdminSplitResponse::Swap(AdminSplitResponse* other) {
if (other != this) {
std::swap(header_, other->header_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata AdminSplitResponse::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = AdminSplitResponse_descriptor_;
metadata.reflection = AdminSplitResponse_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int AdminMergeRequest::kHeaderFieldNumber;
#endif // !_MSC_VER
AdminMergeRequest::AdminMergeRequest()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.AdminMergeRequest)
}
void AdminMergeRequest::InitAsDefaultInstance() {
header_ = const_cast< ::cockroach::proto::RequestHeader*>(&::cockroach::proto::RequestHeader::default_instance());
}
AdminMergeRequest::AdminMergeRequest(const AdminMergeRequest& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.AdminMergeRequest)
}
void AdminMergeRequest::SharedCtor() {
_cached_size_ = 0;
header_ = NULL;
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
AdminMergeRequest::~AdminMergeRequest() {
// @@protoc_insertion_point(destructor:cockroach.proto.AdminMergeRequest)
SharedDtor();
}
void AdminMergeRequest::SharedDtor() {
if (this != default_instance_) {
delete header_;
}
}
void AdminMergeRequest::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* AdminMergeRequest::descriptor() {
protobuf_AssignDescriptorsOnce();
return AdminMergeRequest_descriptor_;
}
const AdminMergeRequest& AdminMergeRequest::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
AdminMergeRequest* AdminMergeRequest::default_instance_ = NULL;
AdminMergeRequest* AdminMergeRequest::New() const {
return new AdminMergeRequest;
}
void AdminMergeRequest::Clear() {
if (has_header()) {
if (header_ != NULL) header_->::cockroach::proto::RequestHeader::Clear();
}
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool AdminMergeRequest::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.AdminMergeRequest)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.RequestHeader header = 1;
case 1: {
if (tag == 10) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_header()));
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.AdminMergeRequest)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.AdminMergeRequest)
return false;
#undef DO_
}
void AdminMergeRequest::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.AdminMergeRequest)
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, this->header(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.AdminMergeRequest)
}
::google::protobuf::uint8* AdminMergeRequest::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.AdminMergeRequest)
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
1, this->header(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.AdminMergeRequest)
return target;
}
int AdminMergeRequest::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// optional .cockroach.proto.RequestHeader header = 1;
if (has_header()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->header());
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void AdminMergeRequest::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const AdminMergeRequest* source =
::google::protobuf::internal::dynamic_cast_if_available<const AdminMergeRequest*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void AdminMergeRequest::MergeFrom(const AdminMergeRequest& from) {
GOOGLE_CHECK_NE(&from, this);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_header()) {
mutable_header()->::cockroach::proto::RequestHeader::MergeFrom(from.header());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void AdminMergeRequest::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void AdminMergeRequest::CopyFrom(const AdminMergeRequest& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool AdminMergeRequest::IsInitialized() const {
return true;
}
void AdminMergeRequest::Swap(AdminMergeRequest* other) {
if (other != this) {
std::swap(header_, other->header_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata AdminMergeRequest::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = AdminMergeRequest_descriptor_;
metadata.reflection = AdminMergeRequest_reflection_;
return metadata;
}
// ===================================================================
#ifndef _MSC_VER
const int AdminMergeResponse::kHeaderFieldNumber;
#endif // !_MSC_VER
AdminMergeResponse::AdminMergeResponse()
: ::google::protobuf::Message() {
SharedCtor();
// @@protoc_insertion_point(constructor:cockroach.proto.AdminMergeResponse)
}
void AdminMergeResponse::InitAsDefaultInstance() {
header_ = const_cast< ::cockroach::proto::ResponseHeader*>(&::cockroach::proto::ResponseHeader::default_instance());
}
AdminMergeResponse::AdminMergeResponse(const AdminMergeResponse& from)
: ::google::protobuf::Message() {
SharedCtor();
MergeFrom(from);
// @@protoc_insertion_point(copy_constructor:cockroach.proto.AdminMergeResponse)
}
void AdminMergeResponse::SharedCtor() {
_cached_size_ = 0;
header_ = NULL;
::memset(_has_bits_, 0, sizeof(_has_bits_));
}
AdminMergeResponse::~AdminMergeResponse() {
// @@protoc_insertion_point(destructor:cockroach.proto.AdminMergeResponse)
SharedDtor();
}
void AdminMergeResponse::SharedDtor() {
if (this != default_instance_) {
delete header_;
}
}
void AdminMergeResponse::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ::google::protobuf::Descriptor* AdminMergeResponse::descriptor() {
protobuf_AssignDescriptorsOnce();
return AdminMergeResponse_descriptor_;
}
const AdminMergeResponse& AdminMergeResponse::default_instance() {
if (default_instance_ == NULL) protobuf_AddDesc_cockroach_2fproto_2fapi_2eproto();
return *default_instance_;
}
AdminMergeResponse* AdminMergeResponse::default_instance_ = NULL;
AdminMergeResponse* AdminMergeResponse::New() const {
return new AdminMergeResponse;
}
void AdminMergeResponse::Clear() {
if (has_header()) {
if (header_ != NULL) header_->::cockroach::proto::ResponseHeader::Clear();
}
::memset(_has_bits_, 0, sizeof(_has_bits_));
mutable_unknown_fields()->Clear();
}
bool AdminMergeResponse::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:cockroach.proto.AdminMergeResponse)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoff(127);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// optional .cockroach.proto.ResponseHeader header = 1;
case 1: {
if (tag == 10) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_header()));
} else {
goto handle_unusual;
}
if (input->ExpectAtEnd()) goto success;
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormat::SkipField(
input, tag, mutable_unknown_fields()));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:cockroach.proto.AdminMergeResponse)
return true;
failure:
// @@protoc_insertion_point(parse_failure:cockroach.proto.AdminMergeResponse)
return false;
#undef DO_
}
void AdminMergeResponse::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:cockroach.proto.AdminMergeResponse)
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray(
1, this->header(), output);
}
if (!unknown_fields().empty()) {
::google::protobuf::internal::WireFormat::SerializeUnknownFields(
unknown_fields(), output);
}
// @@protoc_insertion_point(serialize_end:cockroach.proto.AdminMergeResponse)
}
::google::protobuf::uint8* AdminMergeResponse::SerializeWithCachedSizesToArray(
::google::protobuf::uint8* target) const {
// @@protoc_insertion_point(serialize_to_array_start:cockroach.proto.AdminMergeResponse)
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
target = ::google::protobuf::internal::WireFormatLite::
WriteMessageNoVirtualToArray(
1, this->header(), target);
}
if (!unknown_fields().empty()) {
target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray(
unknown_fields(), target);
}
// @@protoc_insertion_point(serialize_to_array_end:cockroach.proto.AdminMergeResponse)
return target;
}
int AdminMergeResponse::ByteSize() const {
int total_size = 0;
if (_has_bits_[0 / 32] & (0xffu << (0 % 32))) {
// optional .cockroach.proto.ResponseHeader header = 1;
if (has_header()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->header());
}
}
if (!unknown_fields().empty()) {
total_size +=
::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize(
unknown_fields());
}
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = total_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void AdminMergeResponse::MergeFrom(const ::google::protobuf::Message& from) {
GOOGLE_CHECK_NE(&from, this);
const AdminMergeResponse* source =
::google::protobuf::internal::dynamic_cast_if_available<const AdminMergeResponse*>(
&from);
if (source == NULL) {
::google::protobuf::internal::ReflectionOps::Merge(from, this);
} else {
MergeFrom(*source);
}
}
void AdminMergeResponse::MergeFrom(const AdminMergeResponse& from) {
GOOGLE_CHECK_NE(&from, this);
if (from._has_bits_[0 / 32] & (0xffu << (0 % 32))) {
if (from.has_header()) {
mutable_header()->::cockroach::proto::ResponseHeader::MergeFrom(from.header());
}
}
mutable_unknown_fields()->MergeFrom(from.unknown_fields());
}
void AdminMergeResponse::CopyFrom(const ::google::protobuf::Message& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
void AdminMergeResponse::CopyFrom(const AdminMergeResponse& from) {
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool AdminMergeResponse::IsInitialized() const {
return true;
}
void AdminMergeResponse::Swap(AdminMergeResponse* other) {
if (other != this) {
std::swap(header_, other->header_);
std::swap(_has_bits_[0], other->_has_bits_[0]);
_unknown_fields_.Swap(&other->_unknown_fields_);
std::swap(_cached_size_, other->_cached_size_);
}
}
::google::protobuf::Metadata AdminMergeResponse::GetMetadata() const {
protobuf_AssignDescriptorsOnce();
::google::protobuf::Metadata metadata;
metadata.descriptor = AdminMergeResponse_descriptor_;
metadata.reflection = AdminMergeResponse_reflection_;
return metadata;
}
// @@protoc_insertion_point(namespace_scope)
} // namespace proto
} // namespace cockroach
// @@protoc_insertion_point(global_scope)<|fim▁end|> | GOOGLE_SAFE_CONCURRENT_WRITES_END(); |
<|file_name|>hide_top_overlay.d.ts<|end_file_name|><|fim▁begin|>/**
* DevExtreme (mobile/hide_top_overlay.d.ts)
* Version: 16.2.5
* Build date: Mon Feb 27 2017
*
* Copyright (c) 2012 - 2017 Developer Express Inc. ALL RIGHTS RESERVED
* EULA: https://www.devexpress.com/Support/EULAs/DevExtreme.xml
*/
import DevExpress from '../bundles/dx.all';
<|fim▁hole|><|fim▁end|> | export default DevExpress.hideTopOverlay; |
<|file_name|>possible_browser.py<|end_file_name|><|fim▁begin|># Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.internal.app import possible_app
class PossibleBrowser(possible_app.PossibleApp):
"""A browser that can be controlled.
Call Create() to launch the browser and begin manipulating it..
"""
def __init__(self, browser_type, target_os, supports_tab_control):
super(PossibleBrowser, self).__init__(app_type=browser_type,
target_os=target_os)
self._supports_tab_control = supports_tab_control
self._credentials_path = None
def __repr__(self):
return 'PossibleBrowser(app_type=%s)' % self.app_type
@property
def browser_type(self):
return self.app_type
@property
def supports_tab_control(self):
return self._supports_tab_control
def _InitPlatformIfNeeded(self):
raise NotImplementedError()
def Create(self, finder_options):
raise NotImplementedError()
def SupportsOptions(self, browser_options):
"""Tests for extension support."""
raise NotImplementedError()
def IsRemote(self):
return False
def RunRemote(self):<|fim▁hole|> pass
def last_modification_time(self):
return -1
def SetCredentialsPath(self, credentials_path):
self._credentials_path = credentials_path<|fim▁end|> | pass
def UpdateExecutableIfNeeded(self): |
<|file_name|>serious-widget.js<|end_file_name|><|fim▁begin|>// Generated by CoffeeScript 1.4.0
var isDefined,
__bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; },
__slice = [].slice,
__indexOf = [].indexOf || function(item) { for (var i = 0, l = this.length; i < l; i++) { if (i in this && this[i] === item) return i; } return -1; };
window.serious = {};
window.serious.Utils = {};
isDefined = function(obj) {
return typeof obj !== 'undefined' && obj !== null;
};
jQuery.fn.opacity = function(int) {
return $(this).css({
opacity: int
});
};
window.serious.Utils.clone = function(obj) {
var flags, key, newInstance;
if (!(obj != null) || typeof obj !== 'object') {
return obj;
}
if (obj instanceof Date) {
return new Date(obj.getTime());
}
if (obj instanceof RegExp) {
flags = '';
if (obj.global != null) {
flags += 'g';
}
if (obj.ignoreCase != null) {
flags += 'i';
}
if (obj.multiline != null) {
flags += 'm';
}
if (obj.sticky != null) {
flags += 'y';
}
return new RegExp(obj.source, flags);
}
newInstance = new obj.constructor();
for (key in obj) {
newInstance[key] = window.serious.Utils.clone(obj[key]);
}
return newInstance;
};
jQuery.fn.cloneTemplate = function(dict, removeUnusedField) {
var klass, nui, value;
if (removeUnusedField == null) {
removeUnusedField = false;
}
nui = $(this[0]).clone();
nui = nui.removeClass("template hidden").addClass("actual");
if (typeof dict === "object") {
for (klass in dict) {
value = dict[klass];
if (value !== null) {
nui.find(".out." + klass).html(value);
}
}
if (removeUnusedField) {
nui.find(".out").each(function() {
if ($(this).html() === "") {
return $(this).remove();
}
});
}
}
return nui;
};
Object.size = function(obj) {
var key, size;
size = 0;
for (key in obj) {
if (obj.hasOwnProperty(key)) {
size++;
}
}
return size;
};
window.serious.States = (function() {
function States() {
this.states = {};
}
States.prototype.set = function(state, value, scope) {
if (value == null) {
value = true;
}
if (scope == null) {
scope = document;
}
this.states[state] = value;
return this._showState(state, value);
};
States.prototype._showState = function(state, value, scope) {
if (value == null) {
value = true;
}
if (scope == null) {
scope = document;
}
$(".when-" + state, scope).each(function(idx, element) {
var expected_value;
element = $(element);
expected_value = element.data('state') || true;
return $(element).toggleClass('hidden', expected_value.toString() !== value.toString());
});
return $(".when-not-" + state, scope).each(function(idx, element) {
var expected_value;
element = $(element);
expected_value = element.data('state') || true;
return $(element).toggleClass('hidden', expected_value.toString() === value.toString());
});
};
return States;
})();
window.serious.Widget = (function() {
function Widget() {
this.cloneTemplate = __bind(this.cloneTemplate, this);
this.show = __bind(this.show, this);
this.hide = __bind(this.hide, this);
this.get = __bind(this.get, this);
this.set = __bind(this.set, this);
}
Widget.bindAll = function() {
var first, firsts, _i, _len;
firsts = 1 <= arguments.length ? __slice.call(arguments, 0) : [];
if (firsts) {
for (_i = 0, _len = firsts.length; _i < _len; _i++) {
first = firsts[_i];
Widget.ensureWidget($(first));
}
}
return $(".widget").each(function() {
var self;
self = $(this);
if (!self.hasClass('template') && !self.parents().hasClass('template')) {
return Widget.ensureWidget(self);
}
});
};
Widget.ensureWidget = function(ui) {
var widget, widget_class;
ui = $(ui);
if (!ui.length) {
return null;
} else if (ui[0]._widget != null) {
return ui[0]._widget;
} else {
widget_class = Widget.getWidgetClass(ui);
if (widget_class != null) {
widget = new widget_class();
widget.bindUI(ui);
return widget;
} else {
console.warn("widget not found for", ui);
return null;
}
}
};
Widget.getWidgetClass = function(ui) {
return eval("(" + $(ui).attr("data-widget") + ")");
};
Widget.prototype.bindUI = function(ui) {
var action, key, nui, value, _i, _len, _ref, _ref1, _results;
this.ui = $(ui);
if (this.ui[0]._widget) {
delete this.ui[0]._widget;
}
this.ui[0]._widget = this;
this.uis = {};
if (typeof this.UIS !== "undefined") {
_ref = this.UIS;
for (key in _ref) {
value = _ref[key];
nui = this.ui.find(value);
if (nui.length < 1) {
console.warn("uis", key, "not found in", ui);
}
this.uis[key] = nui;
}
}
if (this.ACTIONS != null) {
_ref1 = this.ACTIONS;
_results = [];
for (_i = 0, _len = _ref1.length; _i < _len; _i++) {
action = _ref1[_i];
_results.push(this._bindClick(this.ui.find(".do[data-action=" + action + "]"), action));
}
return _results;
}
};
Widget.prototype.set = function(field, value, context) {
/* Set a value to all tag with the given data-field attribute.
Field can be a dict or a field name.
If it is a dict, the second parameter should be a context.
The default context is the widget itself.
*/
var name, _value;
if (typeof field === "object") {
context = value || this.ui;
for (name in field) {
_value = field[name];
context.find(".out[data-field=" + name + "]").html(_value);
}
} else {
context = context || this.ui;
context.find(".out[data-field=" + field + "]").html(value);
}
return context;
};
Widget.prototype.get = function(form) {
var data;
form = $(form);
data = {};
form.find('input.in').each(function() {
var input;
input = $(this);
if (!input.hasClass('template') && !input.parents().hasClass('template')) {
return data[input.attr('name')] = input.val();
}
});
return data;
};
Widget.prototype.hide = function() {
return this.ui.addClass("hidden");
};
Widget.prototype.show = function() {
return this.ui.removeClass("hidden");
};
Widget.prototype.cloneTemplate = function(template_nui, dict, removeUnusedField) {
var action, klass, nui, value, _i, _len, _ref;
if (removeUnusedField == null) {
removeUnusedField = false;
}
nui = template_nui.clone();
nui = nui.removeClass("template hidden").addClass("actual");
if (typeof dict === "object") {
for (klass in dict) {
value = dict[klass];
if (value !== null) {
nui.find(".out." + klass).html(value);<|fim▁hole|> if (removeUnusedField) {
nui.find(".out").each(function() {
if ($(this).html() === "") {
return $(this).remove();
}
});
}
}
if (this.ACTIONS != null) {
_ref = this.ACTIONS;
for (_i = 0, _len = _ref.length; _i < _len; _i++) {
action = _ref[_i];
this._bindClick(nui.find(".do[data-action=" + action + "]"), action);
}
}
return nui;
};
Widget.prototype._bindClick = function(nui, action) {
var _this = this;
if ((action != null) && __indexOf.call(this.ACTIONS, action) >= 0) {
return nui.click(function(e) {
_this[action](e);
return e.preventDefault();
});
}
};
return Widget;
})();
window.serious.URL = (function() {
function URL() {
this.toString = __bind(this.toString, this);
this.fromString = __bind(this.fromString, this);
this.enableDynamicLinks = __bind(this.enableDynamicLinks, this);
this.updateUrl = __bind(this.updateUrl, this);
this.hasBeenAdded = __bind(this.hasBeenAdded, this);
this.hasChanged = __bind(this.hasChanged, this);
this.remove = __bind(this.remove, this);
this.update = __bind(this.update, this);
this.set = __bind(this.set, this);
this.onStateChanged = __bind(this.onStateChanged, this);
this.get = __bind(this.get, this);
var _this = this;
this.previousHash = [];
this.handlers = [];
this.hash = this.fromString(location.hash);
$(window).hashchange(function() {
var handler, _i, _len, _ref, _results;
_this.previousHash = window.serious.Utils.clone(_this.hash);
_this.hash = _this.fromString(location.hash);
_ref = _this.handlers;
_results = [];
for (_i = 0, _len = _ref.length; _i < _len; _i++) {
handler = _ref[_i];
_results.push(handler());
}
return _results;
});
}
URL.prototype.get = function(field) {
if (field == null) {
field = null;
}
if (field) {
return this.hash[field];
} else {
return this.hash;
}
};
URL.prototype.onStateChanged = function(handler) {
return this.handlers.push(handler);
};
URL.prototype.set = function(fields, silent) {
var hash, key, value;
if (silent == null) {
silent = false;
}
hash = silent ? this.hash : window.serious.Utils.clone(this.hash);
hash = [];
for (key in fields) {
value = fields[key];
if (isDefined(value)) {
hash[key] = value;
}
}
return this.updateUrl(hash);
};
URL.prototype.update = function(fields, silent) {
var hash, key, value;
if (silent == null) {
silent = false;
}
hash = silent ? this.hash : window.serious.Utils.clone(this.hash);
for (key in fields) {
value = fields[key];
if (isDefined(value)) {
hash[key] = value;
} else {
delete hash[key];
}
}
return this.updateUrl(hash);
};
URL.prototype.remove = function(key, silent) {
var hash;
if (silent == null) {
silent = false;
}
hash = silent ? this.hash : window.serious.Utils.clone(this.hash);
if (hash[key]) {
delete hash[key];
}
return this.updateUrl(hash);
};
URL.prototype.hasChanged = function(key) {
if (this.hash[key] != null) {
if (this.previousHash[key] != null) {
return this.hash[key].toString() !== this.previousHash[key].toString();
} else {
return true;
}
} else {
if (this.previousHash[key] != null) {
return true;
}
}
return false;
};
URL.prototype.hasBeenAdded = function(key) {
return console.error("not implemented");
};
URL.prototype.updateUrl = function(hash) {
if (hash == null) {
hash = null;
}
if (!hash || Object.size(hash) === 0) {
return location.hash = '_';
} else {
return location.hash = this.toString(hash);
}
};
URL.prototype.enableDynamicLinks = function(context) {
var _this = this;
if (context == null) {
context = null;
}
return $("a.internal[href]", context).click(function(e) {
var href, link;
link = $(e.currentTarget);
href = link.attr("data-href") || link.attr("href");
if (href[0] === "#") {
if (href.length > 1 && href[1] === "+") {
_this.update(_this.fromString(href.slice(2)));
} else if (href.length > 1 && href[1] === "-") {
_this.remove(_this.fromString(href.slice(2)));
} else {
_this.set(_this.fromString(href.slice(1)));
}
}
return false;
});
};
URL.prototype.fromString = function(value) {
var hash, hash_list, item, key, key_value, val, _i, _len;
value = value || location.hash;
hash = {};
value = value.replace('!', '');
hash_list = value.split("&");
for (_i = 0, _len = hash_list.length; _i < _len; _i++) {
item = hash_list[_i];
if (item != null) {
key_value = item.split("=");
if (key_value.length === 2) {
key = key_value[0].replace("#", "");
val = key_value[1].replace("#", "");
hash[key] = val;
}
}
}
return hash;
};
URL.prototype.toString = function(hash_list) {
var i, key, new_hash, value;
if (hash_list == null) {
hash_list = null;
}
hash_list = hash_list || this.hash;
new_hash = "!";
i = 0;
for (key in hash_list) {
value = hash_list[key];
if (i > 0) {
new_hash += "&";
}
new_hash += key + "=" + value;
i++;
}
return new_hash;
};
return URL;
})();<|fim▁end|> | }
} |
<|file_name|>basic-ontology.ts<|end_file_name|><|fim▁begin|>import { JsonObject, JsonProperty } from 'json2typescript';
@JsonObject('PropertyItem')
export class PropertyItem {
@JsonProperty('name', String)
public name: string = undefined;
@JsonProperty('guiorder', Number)
public guiorder: number = undefined;
@JsonProperty('description', String)
public description: string = undefined;
@JsonProperty('valuetype_id', String)
public valuetype_id: string = undefined;
@JsonProperty('label', String)
public label: string = undefined;
@JsonProperty('vocabulary', String)
public vocabulary: string = undefined;
@JsonProperty('attributes', String)
public attributes: string = undefined;
@JsonProperty('occurrence', String, true)
public occurrence: string = undefined;
@JsonProperty('id', String)
public id: string = undefined;
@JsonProperty('gui_name', String, true)
public gui_name: string = undefined;
}
/** has four default categories and four groups
* @Category
* none: no permission (the resource or the property will be hidden for the specified group)
* read: permission to see the property/value
* comment: permission to comment/annotate a value
* edit: permission to create and edit a value
* delete: permission to delete a value
*
* @Group
* world: every visitor
* guest: logged in knora user
* user: logged in project user
* admin: logged in project (or system) admin user
*/
@JsonObject('Permissions')
export class Permissions {
@JsonProperty('everyone', String)
public everyone: string = undefined;
@JsonProperty('guest', String)
public guest: string = undefined;
@JsonProperty('member', String)
public member: string = undefined;
@JsonProperty('admin', String)
public admin: string = undefined;
}
@JsonObject('Gui')
export class Gui {
@JsonProperty('element', String)
public element: string = undefined;
@JsonProperty('type', String)
public type: string = undefined;
@JsonProperty('list_id', String, true)
public list_id: string = undefined;
}
@JsonObject('Property')
export class Property {<|fim▁hole|>
@JsonProperty('cardinality', String)
public cardinality: string = undefined;
@JsonProperty('gui', Gui)
public gui: Gui = undefined;
/**
* Permission for the each property
* @type {Permissions}
*/
@JsonProperty('permissions', Permissions, true)
public permissions: Permissions = undefined;
}
/**
* the class includes the default properties as an array. The property id is the key in the array
*/
@JsonObject('ResourceClass')
export class ResourceClass {
@JsonProperty('id', String, true)
public id: string = undefined;
@JsonProperty('label', String)
public label: string = undefined;
@JsonProperty('description', String)
public description: string = undefined;
@JsonProperty('icon', String)
public icon: string = undefined;
@JsonProperty('file', String, true)
public file: string = undefined;
/**
* Permission for the resource
* @type {Permissions}
*/
@JsonProperty('permissions', Permissions, true)
public permissions: Permissions = undefined;
@JsonProperty('properties', [PropertyItem], true)
public properties: PropertyItem[] = undefined;
}
/**
* is an array of resource classes. The id of the resource class is the key in the array
*/
@JsonObject('BasicOntology')
export class BasicOntology {
// defaultProperties
@JsonProperty('defaultProperties', [PropertyItem], true)
public defaultProperties: PropertyItem[] = undefined;
// defaultPermissions
@JsonProperty('defaultPermissions', Permissions, true)
public defaultPermissions: Permissions = undefined;
// defaultResourceClasses
@JsonProperty('resourceClasses', [ResourceClass], true)
public resourceClasses: ResourceClass[] = undefined;
}
@JsonObject('PropertyObject')
export class PropertyObject {
@JsonProperty('key', String)
public key: string = undefined;
@JsonProperty('value', Property)
public value: Property = undefined;
}<|fim▁end|> |
@JsonProperty('label', String)
public label: string = undefined; |
<|file_name|>bbcodetag.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""Copyright (c) 2009 Sergio Gabriel Teves
All rights reserved.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from django import template
from django.conf import settings
from django.utils.encoding import smart_str, force_unicode
from django.utils.safestring import mark_safe
import string
import re
register = template.Library()
<|fim▁hole|> list = []
for item in settings.SMILEYS_PACK:
list.append({'code': item[0],'img': settings.SMILEYS_ROOT + item[1]})
return {'list': list}
@register.filter
def smiley(value):
img = '<img src="%s"/>'
for item in settings.SMILEYS_PACK:
code = string.replace(string.replace(item[0], "(", "\("),")","\)")
pattern = r"(?i)%s" % code
value = re.sub(pattern, img % (settings.SMILEYS_ROOT + item[1]),value)
return mark_safe(value)
@register.filter
def bbcode(value):
"""
Generates (X)HTML from string with BBCode "markup".
By using the postmark lib from:
@see: http://code.google.com/p/postmarkup/
"""
try:
from postmarkup import render_bbcode
except ImportError:
if settings.DEBUG:
raise template.TemplateSyntaxError, "Error in {% bbcode %} filter: The Python postmarkup library isn't installed."
return force_unicode(value)
else:
return mark_safe(render_bbcode(value))
bbcode.is_save = True
@register.filter
def strip_bbcode(value):
"""
Strips BBCode tags from a string
By using the postmark lib from:
@see: http://code.google.com/p/postmarkup/
"""
try:
from postmarkup import strip_bbcode
except ImportError:
if settings.DEBUG:
raise template.TemplateSyntaxError, "Error in {% bbcode %} filter: The Python postmarkup library isn't installed."
return force_unicode(value)
else:
return mark_safe(strip_bbcode(value))
bbcode.is_save = True<|fim▁end|> |
@register.inclusion_tag('tags/smileysrender.html')
def smileyrender():
|
<|file_name|>document.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cookie_rs;
use core::nonzero::NonZero;
use devtools_traits::ScriptToDevtoolsControlMsg;
use document_loader::{DocumentLoader, LoadType};
use dom::activation::{ActivationSource, synthetic_click_activation};
use dom::attr::Attr;
use dom::beforeunloadevent::BeforeUnloadEvent;
use dom::bindings::callback::ExceptionHandling;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::DOMRectBinding::DOMRectMethods;
use dom::bindings::codegen::Bindings::DocumentBinding;
use dom::bindings::codegen::Bindings::DocumentBinding::{DocumentMethods, DocumentReadyState, ElementCreationOptions};
use dom::bindings::codegen::Bindings::ElementBinding::ElementMethods;
use dom::bindings::codegen::Bindings::HTMLIFrameElementBinding::HTMLIFrameElementBinding::HTMLIFrameElementMethods;
use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use dom::bindings::codegen::Bindings::NodeFilterBinding::NodeFilter;
use dom::bindings::codegen::Bindings::PerformanceBinding::PerformanceMethods;
use dom::bindings::codegen::Bindings::TouchBinding::TouchMethods;
use dom::bindings::codegen::Bindings::WindowBinding::{FrameRequestCallback, ScrollBehavior, WindowMethods};
use dom::bindings::codegen::UnionTypes::NodeOrString;
use dom::bindings::error::{Error, ErrorResult, Fallible};
use dom::bindings::inheritance::{Castable, ElementTypeId, HTMLElementTypeId, NodeTypeId};
use dom::bindings::js::{JS, LayoutJS, MutNullableJS, Root};
use dom::bindings::js::RootedReference;
use dom::bindings::num::Finite;
use dom::bindings::refcounted::{Trusted, TrustedPromise};
use dom::bindings::reflector::{DomObject, reflect_dom_object};
use dom::bindings::str::{DOMString, USVString};
use dom::bindings::xmlname::{namespace_from_domstring, validate_and_extract, xml_name_type};
use dom::bindings::xmlname::XMLName::InvalidXMLName;
use dom::closeevent::CloseEvent;
use dom::comment::Comment;
use dom::cssstylesheet::CSSStyleSheet;
use dom::customelementregistry::CustomElementDefinition;
use dom::customevent::CustomEvent;
use dom::documentfragment::DocumentFragment;
use dom::documenttype::DocumentType;
use dom::domimplementation::DOMImplementation;
use dom::element::{Element, ElementCreator, ElementPerformFullscreenEnter, ElementPerformFullscreenExit};
use dom::element::CustomElementCreationMode;
use dom::errorevent::ErrorEvent;
use dom::event::{Event, EventBubbles, EventCancelable, EventDefault, EventStatus};
use dom::eventtarget::EventTarget;
use dom::focusevent::FocusEvent;
use dom::forcetouchevent::ForceTouchEvent;
use dom::globalscope::GlobalScope;
use dom::hashchangeevent::HashChangeEvent;
use dom::htmlanchorelement::HTMLAnchorElement;
use dom::htmlappletelement::HTMLAppletElement;
use dom::htmlareaelement::HTMLAreaElement;
use dom::htmlbaseelement::HTMLBaseElement;
use dom::htmlbodyelement::HTMLBodyElement;
use dom::htmlcollection::{CollectionFilter, HTMLCollection};
use dom::htmlelement::HTMLElement;
use dom::htmlembedelement::HTMLEmbedElement;
use dom::htmlformelement::{FormControl, FormControlElementHelpers, HTMLFormElement};
use dom::htmlheadelement::HTMLHeadElement;
use dom::htmlhtmlelement::HTMLHtmlElement;
use dom::htmliframeelement::HTMLIFrameElement;
use dom::htmlimageelement::HTMLImageElement;
use dom::htmlmetaelement::HTMLMetaElement;
use dom::htmlscriptelement::{HTMLScriptElement, ScriptResult};
use dom::htmltitleelement::HTMLTitleElement;
use dom::keyboardevent::KeyboardEvent;
use dom::location::Location;
use dom::messageevent::MessageEvent;
use dom::mouseevent::MouseEvent;
use dom::node::{self, CloneChildrenFlag, Node, NodeDamage, window_from_node, IS_IN_DOC, LayoutNodeHelpers};
use dom::node::VecPreOrderInsertionHelper;
use dom::nodeiterator::NodeIterator;
use dom::nodelist::NodeList;
use dom::pagetransitionevent::PageTransitionEvent;
use dom::popstateevent::PopStateEvent;
use dom::processinginstruction::ProcessingInstruction;
use dom::progressevent::ProgressEvent;
use dom::promise::Promise;
use dom::range::Range;
use dom::servoparser::ServoParser;
use dom::storageevent::StorageEvent;
use dom::stylesheetlist::StyleSheetList;
use dom::text::Text;
use dom::touch::Touch;
use dom::touchevent::TouchEvent;
use dom::touchlist::TouchList;
use dom::treewalker::TreeWalker;
use dom::uievent::UIEvent;
use dom::virtualmethods::vtable_for;
use dom::webglcontextevent::WebGLContextEvent;
use dom::window::{ReflowReason, Window};
use dom::windowproxy::WindowProxy;
use dom_struct::dom_struct;
use encoding::EncodingRef;
use encoding::all::UTF_8;
use euclid::{Point2D, Vector2D};
use html5ever::{LocalName, Namespace, QualName};
use hyper::header::{Header, SetCookie};
use hyper_serde::Serde;
use ipc_channel::ipc::{self, IpcSender};
use js::jsapi::{JSContext, JSObject, JSRuntime};
use js::jsapi::JS_GetRuntime;
use msg::constellation_msg::{ALT, CONTROL, SHIFT, SUPER};
use msg::constellation_msg::{BrowsingContextId, Key, KeyModifiers, KeyState, TopLevelBrowsingContextId};
use net_traits::{FetchResponseMsg, IpcSend, ReferrerPolicy};
use net_traits::CookieSource::NonHTTP;
use net_traits::CoreResourceMsg::{GetCookiesForUrl, SetCookiesForUrl};
use net_traits::pub_domains::is_pub_domain;
use net_traits::request::RequestInit;
use net_traits::response::HttpsState;
use num_traits::ToPrimitive;
use script_layout_interface::message::{Msg, ReflowQueryType};
use script_runtime::{CommonScriptMsg, ScriptThreadEventCategory};
use script_thread::{MainThreadScriptMsg, ScriptThread};
use script_traits::{AnimationState, CompositorEvent, DocumentActivity};
use script_traits::{MouseButton, MouseEventType, MozBrowserEvent};
use script_traits::{MsDuration, ScriptMsg, TouchpadPressurePhase};
use script_traits::{TouchEventType, TouchId};
use script_traits::UntrustedNodeAddress;
use servo_arc::Arc;
use servo_atoms::Atom;
use servo_config::prefs::PREFS;
use servo_url::{ImmutableOrigin, MutableOrigin, ServoUrl};
use std::ascii::AsciiExt;
use std::borrow::ToOwned;
use std::cell::{Cell, Ref, RefMut};
use std::collections::{HashMap, HashSet, VecDeque};
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::default::Default;
use std::iter::once;
use std::mem;
use std::rc::Rc;
use std::time::{Duration, Instant};
use style::attr::AttrValue;
use style::context::{QuirksMode, ReflowGoal};
use style::invalidation::element::restyle_hints::{RestyleHint, RESTYLE_SELF, RESTYLE_STYLE_ATTRIBUTE};
use style::media_queries::{Device, MediaList, MediaType};
use style::selector_parser::{RestyleDamage, Snapshot};
use style::shared_lock::{SharedRwLock as StyleSharedRwLock, SharedRwLockReadGuard};
use style::str::{HTML_SPACE_CHARACTERS, split_html_space_chars, str_join};
use style::stylesheet_set::StylesheetSet;
use style::stylesheets::{Stylesheet, StylesheetContents, Origin, OriginSet};
use task_source::TaskSource;
use time;
use timers::OneshotTimerCallback;
use url::Host;
use url::percent_encoding::percent_decode;
use webrender_api::ClipId;
/// The number of times we are allowed to see spurious `requestAnimationFrame()` calls before
/// falling back to fake ones.
///
/// A spurious `requestAnimationFrame()` call is defined as one that does not change the DOM.
const SPURIOUS_ANIMATION_FRAME_THRESHOLD: u8 = 5;
/// The amount of time between fake `requestAnimationFrame()`s.
const FAKE_REQUEST_ANIMATION_FRAME_DELAY: u64 = 16;
pub enum TouchEventResult {
Processed(bool),
Forwarded,
}
#[derive(Clone, Copy, Debug, HeapSizeOf, JSTraceable, PartialEq)]
pub enum IsHTMLDocument {
HTMLDocument,
NonHTMLDocument,
}
#[derive(Debug, HeapSizeOf)]
pub struct PendingRestyle {
/// If this element had a state or attribute change since the last restyle, track
/// the original condition of the element.
pub snapshot: Option<Snapshot>,
/// Any explicit restyles hints that have been accumulated for this element.
pub hint: RestyleHint,
/// Any explicit restyles damage that have been accumulated for this element.
pub damage: RestyleDamage,
}
impl PendingRestyle {
pub fn new() -> Self {
PendingRestyle {
snapshot: None,
hint: RestyleHint::empty(),
damage: RestyleDamage::empty(),
}
}
}
#[derive(Clone, HeapSizeOf, JSTraceable)]
#[must_root]
struct StyleSheetInDocument {
#[ignore_heap_size_of = "Arc"]
sheet: Arc<Stylesheet>,
owner: JS<Element>,
}
impl PartialEq for StyleSheetInDocument {
fn eq(&self, other: &Self) -> bool {
Arc::ptr_eq(&self.sheet, &other.sheet)
}
}
impl ::style::stylesheets::StylesheetInDocument for StyleSheetInDocument {
fn contents(&self, guard: &SharedRwLockReadGuard) -> &StylesheetContents {
self.sheet.contents(guard)
}
fn media<'a>(&'a self, guard: &'a SharedRwLockReadGuard) -> Option<&'a MediaList> {
self.sheet.media(guard)
}
fn enabled(&self) -> bool {
self.sheet.enabled()
}
}
/// https://dom.spec.whatwg.org/#document
#[dom_struct]
pub struct Document {
node: Node,
window: JS<Window>,
implementation: MutNullableJS<DOMImplementation>,
content_type: DOMString,
last_modified: Option<String>,
encoding: Cell<EncodingRef>,
has_browsing_context: bool,
is_html_document: bool,
activity: Cell<DocumentActivity>,
url: DOMRefCell<ServoUrl>,
#[ignore_heap_size_of = "defined in selectors"]
quirks_mode: Cell<QuirksMode>,
/// Caches for the getElement methods
id_map: DOMRefCell<HashMap<Atom, Vec<JS<Element>>>>,
tag_map: DOMRefCell<HashMap<LocalName, JS<HTMLCollection>>>,
tagns_map: DOMRefCell<HashMap<QualName, JS<HTMLCollection>>>,
classes_map: DOMRefCell<HashMap<Vec<Atom>, JS<HTMLCollection>>>,
images: MutNullableJS<HTMLCollection>,
embeds: MutNullableJS<HTMLCollection>,
links: MutNullableJS<HTMLCollection>,
forms: MutNullableJS<HTMLCollection>,
scripts: MutNullableJS<HTMLCollection>,
anchors: MutNullableJS<HTMLCollection>,
applets: MutNullableJS<HTMLCollection>,
source: DocumentSource,
/// Lock use for style attributes and author-origin stylesheet objects in this document.
/// Can be acquired once for accessing many objects.
style_shared_lock: StyleSharedRwLock,
/// List of stylesheets associated with nodes in this document. |None| if the list needs to be refreshed.
stylesheets: DOMRefCell<StylesheetSet<StyleSheetInDocument>>,
stylesheet_list: MutNullableJS<StyleSheetList>,
ready_state: Cell<DocumentReadyState>,
/// Whether the DOMContentLoaded event has already been dispatched.
domcontentloaded_dispatched: Cell<bool>,
/// The element that has most recently requested focus for itself.
possibly_focused: MutNullableJS<Element>,
/// The element that currently has the document focus context.
focused: MutNullableJS<Element>,
/// The script element that is currently executing.
current_script: MutNullableJS<HTMLScriptElement>,
/// https://html.spec.whatwg.org/multipage/#pending-parsing-blocking-script
pending_parsing_blocking_script: DOMRefCell<Option<PendingScript>>,
/// Number of stylesheets that block executing the next parser-inserted script
script_blocking_stylesheets_count: Cell<u32>,
/// https://html.spec.whatwg.org/multipage/#list-of-scripts-that-will-execute-when-the-document-has-finished-parsing
deferred_scripts: PendingInOrderScriptVec,
/// https://html.spec.whatwg.org/multipage/#list-of-scripts-that-will-execute-in-order-as-soon-as-possible
asap_in_order_scripts_list: PendingInOrderScriptVec,
/// https://html.spec.whatwg.org/multipage/#set-of-scripts-that-will-execute-as-soon-as-possible
asap_scripts_set: DOMRefCell<Vec<JS<HTMLScriptElement>>>,
/// https://html.spec.whatwg.org/multipage/#concept-n-noscript
/// True if scripting is enabled for all scripts in this document
scripting_enabled: bool,
/// https://html.spec.whatwg.org/multipage/#animation-frame-callback-identifier
/// Current identifier of animation frame callback
animation_frame_ident: Cell<u32>,
/// https://html.spec.whatwg.org/multipage/#list-of-animation-frame-callbacks
/// List of animation frame callbacks
animation_frame_list: DOMRefCell<Vec<(u32, Option<AnimationFrameCallback>)>>,
/// Whether we're in the process of running animation callbacks.
///
/// Tracking this is not necessary for correctness. Instead, it is an optimization to avoid
/// sending needless `ChangeRunningAnimationsState` messages to the compositor.
running_animation_callbacks: Cell<bool>,
/// Tracks all outstanding loads related to this document.
loader: DOMRefCell<DocumentLoader>,
/// The current active HTML parser, to allow resuming after interruptions.
current_parser: MutNullableJS<ServoParser>,
/// When we should kick off a reflow. This happens during parsing.
reflow_timeout: Cell<Option<u64>>,
/// The cached first `base` element with an `href` attribute.
base_element: MutNullableJS<HTMLBaseElement>,
/// This field is set to the document itself for inert documents.
/// https://html.spec.whatwg.org/multipage/#appropriate-template-contents-owner-document
appropriate_template_contents_owner_document: MutNullableJS<Document>,
/// Information on elements needing restyle to ship over to the layout thread when the
/// time comes.
pending_restyles: DOMRefCell<HashMap<JS<Element>, PendingRestyle>>,
/// This flag will be true if layout suppressed a reflow attempt that was
/// needed in order for the page to be painted.
needs_paint: Cell<bool>,
/// http://w3c.github.io/touch-events/#dfn-active-touch-point
active_touch_points: DOMRefCell<Vec<JS<Touch>>>,
/// Navigation Timing properties:
/// https://w3c.github.io/navigation-timing/#sec-PerformanceNavigationTiming
dom_loading: Cell<u64>,
dom_interactive: Cell<u64>,
dom_content_loaded_event_start: Cell<u64>,
dom_content_loaded_event_end: Cell<u64>,
dom_complete: Cell<u64>,
load_event_start: Cell<u64>,
load_event_end: Cell<u64>,
/// https://html.spec.whatwg.org/multipage/#concept-document-https-state
https_state: Cell<HttpsState>,
touchpad_pressure_phase: Cell<TouchpadPressurePhase>,
/// The document's origin.
origin: MutableOrigin,
/// https://w3c.github.io/webappsec-referrer-policy/#referrer-policy-states
referrer_policy: Cell<Option<ReferrerPolicy>>,
/// https://html.spec.whatwg.org/multipage/#dom-document-referrer
referrer: Option<String>,
/// https://html.spec.whatwg.org/multipage/#target-element
target_element: MutNullableJS<Element>,
/// https://w3c.github.io/uievents/#event-type-dblclick
#[ignore_heap_size_of = "Defined in std"]
last_click_info: DOMRefCell<Option<(Instant, Point2D<f32>)>>,
/// https://html.spec.whatwg.org/multipage/#ignore-destructive-writes-counter
ignore_destructive_writes_counter: Cell<u32>,
/// The number of spurious `requestAnimationFrame()` requests we've received.
///
/// A rAF request is considered spurious if nothing was actually reflowed.
spurious_animation_frames: Cell<u8>,
/// Track the total number of elements in this DOM's tree.
/// This is sent to the layout thread every time a reflow is done;
/// layout uses this to determine if the gains from parallel layout will be worth the overhead.
///
/// See also: https://github.com/servo/servo/issues/10110
dom_count: Cell<u32>,
/// Entry node for fullscreen.
fullscreen_element: MutNullableJS<Element>,
/// Map from ID to set of form control elements that have that ID as
/// their 'form' content attribute. Used to reset form controls
/// whenever any element with the same ID as the form attribute
/// is inserted or removed from the document.
/// See https://html.spec.whatwg.org/multipage/#form-owner
form_id_listener_map: DOMRefCell<HashMap<Atom, HashSet<JS<Element>>>>,
}
#[derive(HeapSizeOf, JSTraceable)]
struct ImagesFilter;
impl CollectionFilter for ImagesFilter {
fn filter(&self, elem: &Element, _root: &Node) -> bool {
elem.is::<HTMLImageElement>()
}
}
#[derive(HeapSizeOf, JSTraceable)]
struct EmbedsFilter;
impl CollectionFilter for EmbedsFilter {
fn filter(&self, elem: &Element, _root: &Node) -> bool {
elem.is::<HTMLEmbedElement>()
}
}
#[derive(HeapSizeOf, JSTraceable)]
struct LinksFilter;
impl CollectionFilter for LinksFilter {
fn filter(&self, elem: &Element, _root: &Node) -> bool {
(elem.is::<HTMLAnchorElement>() || elem.is::<HTMLAreaElement>()) &&
elem.has_attribute(&local_name!("href"))
}
}
#[derive(HeapSizeOf, JSTraceable)]
struct FormsFilter;
impl CollectionFilter for FormsFilter {
fn filter(&self, elem: &Element, _root: &Node) -> bool {
elem.is::<HTMLFormElement>()
}
}
#[derive(HeapSizeOf, JSTraceable)]
struct ScriptsFilter;
impl CollectionFilter for ScriptsFilter {
fn filter(&self, elem: &Element, _root: &Node) -> bool {
elem.is::<HTMLScriptElement>()
}
}
#[derive(HeapSizeOf, JSTraceable)]
struct AnchorsFilter;
impl CollectionFilter for AnchorsFilter {
fn filter(&self, elem: &Element, _root: &Node) -> bool {
elem.is::<HTMLAnchorElement>() && elem.has_attribute(&local_name!("href"))
}
}
#[derive(HeapSizeOf, JSTraceable)]
struct AppletsFilter;
impl CollectionFilter for AppletsFilter {
fn filter(&self, elem: &Element, _root: &Node) -> bool {
elem.is::<HTMLAppletElement>()
}
}
impl Document {
#[inline]
pub fn loader(&self) -> Ref<DocumentLoader> {
self.loader.borrow()
}
#[inline]
pub fn mut_loader(&self) -> RefMut<DocumentLoader> {
self.loader.borrow_mut()
}
#[inline]
pub fn has_browsing_context(&self) -> bool { self.has_browsing_context }
/// https://html.spec.whatwg.org/multipage/#concept-document-bc
#[inline]
pub fn browsing_context(&self) -> Option<Root<WindowProxy>> {
if self.has_browsing_context {
self.window.undiscarded_window_proxy()
} else {
None
}
}
#[inline]
pub fn window(&self) -> &Window {
&*self.window
}
#[inline]
pub fn is_html_document(&self) -> bool {
self.is_html_document
}
pub fn set_https_state(&self, https_state: HttpsState) {
self.https_state.set(https_state);
self.trigger_mozbrowser_event(MozBrowserEvent::SecurityChange(https_state));
}
pub fn is_fully_active(&self) -> bool {
self.activity.get() == DocumentActivity::FullyActive
}
pub fn is_active(&self) -> bool {
self.activity.get() != DocumentActivity::Inactive
}
pub fn set_activity(&self, activity: DocumentActivity) {
// This function should only be called on documents with a browsing context
assert!(self.has_browsing_context);
// Set the document's activity level, reflow if necessary, and suspend or resume timers.
if activity != self.activity.get() {
self.activity.set(activity);
if activity == DocumentActivity::FullyActive {
self.title_changed();
self.dirty_all_nodes();
self.window().reflow(
ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::CachedPageNeededReflow
);
self.window().resume();
} else {
self.window().suspend();
}
}
}
pub fn origin(&self) -> &MutableOrigin {
&self.origin
}
// https://dom.spec.whatwg.org/#concept-document-url
pub fn url(&self) -> ServoUrl {
self.url.borrow().clone()
}
pub fn set_url(&self, url: ServoUrl) {
*self.url.borrow_mut() = url;
}
// https://html.spec.whatwg.org/multipage/#fallback-base-url
pub fn fallback_base_url(&self) -> ServoUrl {
// Step 1: iframe srcdoc (#4767).
// Step 2: about:blank with a creator browsing context.
// Step 3.
self.url()
}
// https://html.spec.whatwg.org/multipage/#document-base-url
pub fn base_url(&self) -> ServoUrl {
match self.base_element() {
// Step 1.
None => self.fallback_base_url(),
// Step 2.
Some(base) => base.frozen_base_url(),
}
}
pub fn needs_paint(&self) -> bool {
self.needs_paint.get()
}
pub fn needs_reflow(&self) -> bool {
// FIXME: This should check the dirty bit on the document,
// not the document element. Needs some layout changes to make
// that workable.
self.stylesheets.borrow().has_changed() ||
self.GetDocumentElement().map_or(false, |root| {
root.upcast::<Node>().has_dirty_descendants() ||
!self.pending_restyles.borrow().is_empty() ||
self.needs_paint()
})
}
/// Returns the first `base` element in the DOM that has an `href` attribute.
pub fn base_element(&self) -> Option<Root<HTMLBaseElement>> {
self.base_element.get()
}
/// Refresh the cached first base element in the DOM.
/// https://github.com/w3c/web-platform-tests/issues/2122
pub fn refresh_base_element(&self) {
let base = self.upcast::<Node>()
.traverse_preorder()
.filter_map(Root::downcast::<HTMLBaseElement>)
.find(|element| element.upcast::<Element>().has_attribute(&local_name!("href")));
self.base_element.set(base.r());
}
pub fn dom_count(&self) -> u32 {
self.dom_count.get()
}
/// This is called by `bind_to_tree` when a node is added to the DOM.
/// The internal count is used by layout to determine whether to be sequential or parallel.
/// (it's sequential for small DOMs)
pub fn increment_dom_count(&self) {
self.dom_count.set(self.dom_count.get() + 1);
}
/// This is called by `unbind_from_tree` when a node is removed from the DOM.
pub fn decrement_dom_count(&self) {
self.dom_count.set(self.dom_count.get() - 1);
}
pub fn quirks_mode(&self) -> QuirksMode {
self.quirks_mode.get()
}
pub fn set_quirks_mode(&self, mode: QuirksMode) {
self.quirks_mode.set(mode);
if mode == QuirksMode::Quirks {
self.window.layout_chan().send(Msg::SetQuirksMode(mode)).unwrap();
}
}
pub fn encoding(&self) -> EncodingRef {
self.encoding.get()
}
pub fn set_encoding(&self, encoding: EncodingRef) {
self.encoding.set(encoding);
}
pub fn content_and_heritage_changed(&self, node: &Node, damage: NodeDamage) {
node.dirty(damage);
}
/// Reflows and disarms the timer if the reflow timer has expired.
pub fn reflow_if_reflow_timer_expired(&self) {
if let Some(reflow_timeout) = self.reflow_timeout.get() {
if time::precise_time_ns() < reflow_timeout {
return;
}
self.reflow_timeout.set(None);
self.window.reflow(ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::RefreshTick);
}
}
/// Schedules a reflow to be kicked off at the given `timeout` (in `time::precise_time_ns()`
/// units). This reflow happens even if the event loop is busy. This is used to display initial
/// page content during parsing.
pub fn set_reflow_timeout(&self, timeout: u64) {
if let Some(existing_timeout) = self.reflow_timeout.get() {
if existing_timeout < timeout {
return;
}
}
self.reflow_timeout.set(Some(timeout))
}
/// Remove any existing association between the provided id and any elements in this document.
pub fn unregister_named_element(&self, to_unregister: &Element, id: Atom) {
debug!("Removing named element from document {:p}: {:p} id={}",
self,
to_unregister,
id);
// Limit the scope of the borrow because id_map might be borrowed again by
// GetElementById through the following sequence of calls
// reset_form_owner_for_listeners -> reset_form_owner -> GetElementById
{
let mut id_map = self.id_map.borrow_mut();
let is_empty = match id_map.get_mut(&id) {
None => false,
Some(elements) => {
let position = elements.iter()
.position(|element| &**element == to_unregister)
.expect("This element should be in registered.");
elements.remove(position);
elements.is_empty()
}
};
if is_empty {
id_map.remove(&id);
}
}
self.reset_form_owner_for_listeners(&id);
}
/// Associate an element present in this document with the provided id.
pub fn register_named_element(&self, element: &Element, id: Atom) {
debug!("Adding named element to document {:p}: {:p} id={}",
self,
element,
id);
assert!(element.upcast::<Node>().is_in_doc());
assert!(!id.is_empty());
let root = self.GetDocumentElement()
.expect("The element is in the document, so there must be a document \
element.");
// Limit the scope of the borrow because id_map might be borrowed again by
// GetElementById through the following sequence of calls
// reset_form_owner_for_listeners -> reset_form_owner -> GetElementById
{
let mut id_map = self.id_map.borrow_mut();
let elements = id_map.entry(id.clone()).or_insert(Vec::new());
elements.insert_pre_order(element, root.r().upcast::<Node>());
}
self.reset_form_owner_for_listeners(&id);
}
pub fn register_form_id_listener<T: ?Sized + FormControl>(&self, id: DOMString, listener: &T) {
let mut map = self.form_id_listener_map.borrow_mut();
let listener = listener.to_element();
let set = map.entry(Atom::from(id)).or_insert(HashSet::new());
set.insert(JS::from_ref(listener));
}
pub fn unregister_form_id_listener<T: ?Sized + FormControl>(&self, id: DOMString, listener: &T) {
let mut map = self.form_id_listener_map.borrow_mut();
if let Occupied(mut entry) = map.entry(Atom::from(id)) {
entry.get_mut().remove(&JS::from_ref(listener.to_element()));
if entry.get().is_empty() {
entry.remove();
}
}
}
/// Attempt to find a named element in this page's document.
/// https://html.spec.whatwg.org/multipage/#the-indicated-part-of-the-document
pub fn find_fragment_node(&self, fragid: &str) -> Option<Root<Element>> {
// Step 1 is not handled here; the fragid is already obtained by the calling function
// Step 2: Simply use None to indicate the top of the document.
// Step 3 & 4
percent_decode(fragid.as_bytes()).decode_utf8().ok()
// Step 5
.and_then(|decoded_fragid| self.get_element_by_id(&Atom::from(decoded_fragid)))
// Step 6
.or_else(|| self.get_anchor_by_name(fragid))
// Step 7 & 8
}
/// Scroll to the target element, and when we do not find a target
/// and the fragment is empty or "top", scroll to the top.
/// https://html.spec.whatwg.org/multipage/#scroll-to-the-fragment-identifier
pub fn check_and_scroll_fragment(&self, fragment: &str) {
let target = self.find_fragment_node(fragment);
// Step 1
self.set_target_element(target.r());
let point = target.r().map(|element| {
// FIXME(#8275, pcwalton): This is pretty bogus when multiple layers are involved.
// Really what needs to happen is that this needs to go through layout to ask which
// layer the element belongs to, and have it send the scroll message to the
// compositor.
let rect = element.upcast::<Node>().bounding_content_box_or_zero();
// In order to align with element edges, we snap to unscaled pixel boundaries, since
// the paint thread currently does the same for drawing elements. This is important
// for pages that require pixel perfect scroll positioning for proper display
// (like Acid2). Since we don't have the device pixel ratio here, this might not be
// accurate, but should work as long as the ratio is a whole number. Once #8275 is
// fixed this should actually take into account the real device pixel ratio.
(rect.origin.x.to_nearest_px() as f32, rect.origin.y.to_nearest_px() as f32)
}).or_else(|| if fragment.is_empty() || fragment.eq_ignore_ascii_case("top") {
// FIXME(stshine): this should be the origin of the stacking context space,
// which may differ under the influence of writing mode.
Some((0.0, 0.0))
} else {
None
});
if let Some((x, y)) = point {
// Step 3
let global_scope = self.window.upcast::<GlobalScope>();
let webrender_pipeline_id = global_scope.pipeline_id().to_webrender();
self.window.update_viewport_for_scroll(x, y);
self.window.perform_a_scroll(x,
y,
ClipId::root_scroll_node(webrender_pipeline_id),
ScrollBehavior::Instant,
target.r());
}
}
fn get_anchor_by_name(&self, name: &str) -> Option<Root<Element>> {
let check_anchor = |node: &HTMLAnchorElement| {
let elem = node.upcast::<Element>();
elem.get_attribute(&ns!(), &local_name!("name"))
.map_or(false, |attr| &**attr.value() == name)
};
let doc_node = self.upcast::<Node>();
doc_node.traverse_preorder()
.filter_map(Root::downcast)
.find(|node| check_anchor(&node))
.map(Root::upcast)
}
// https://html.spec.whatwg.org/multipage/#current-document-readiness
pub fn set_ready_state(&self, state: DocumentReadyState) {
match state {
DocumentReadyState::Loading => {
// https://developer.mozilla.org/en-US/docs/Web/Events/mozbrowserconnected
self.trigger_mozbrowser_event(MozBrowserEvent::Connected);
update_with_current_time_ms(&self.dom_loading);
},
DocumentReadyState::Complete => {
// https://developer.mozilla.org/en-US/docs/Web/Events/mozbrowserloadend
self.trigger_mozbrowser_event(MozBrowserEvent::LoadEnd);
update_with_current_time_ms(&self.dom_complete);
},
DocumentReadyState::Interactive => update_with_current_time_ms(&self.dom_interactive),
};
self.ready_state.set(state);
self.upcast::<EventTarget>().fire_event(atom!("readystatechange"));
}
/// Return whether scripting is enabled or not
pub fn is_scripting_enabled(&self) -> bool {
self.scripting_enabled
}
/// Return the element that currently has focus.
// https://w3c.github.io/uievents/#events-focusevent-doc-focus
pub fn get_focused_element(&self) -> Option<Root<Element>> {
self.focused.get()
}
/// Initiate a new round of checking for elements requesting focus. The last element to call
/// `request_focus` before `commit_focus_transaction` is called will receive focus.
pub fn begin_focus_transaction(&self) {
self.possibly_focused.set(None);
}
/// Request that the given element receive focus once the current transaction is complete.
pub fn request_focus(&self, elem: &Element) {
if elem.is_focusable_area() {
self.possibly_focused.set(Some(elem))
}
}
/// Reassign the focus context to the element that last requested focus during this
/// transaction, or none if no elements requested it.
pub fn commit_focus_transaction(&self, focus_type: FocusType) {
if self.focused == self.possibly_focused.get().r() {
return
}
if let Some(ref elem) = self.focused.get() {
let node = elem.upcast::<Node>();
elem.set_focus_state(false);
// FIXME: pass appropriate relatedTarget
self.fire_focus_event(FocusEventType::Blur, node, None);
}
self.focused.set(self.possibly_focused.get().r());
if let Some(ref elem) = self.focused.get() {
elem.set_focus_state(true);
let node = elem.upcast::<Node>();
// FIXME: pass appropriate relatedTarget
self.fire_focus_event(FocusEventType::Focus, node, None);
// Update the focus state for all elements in the focus chain.
// https://html.spec.whatwg.org/multipage/#focus-chain
if focus_type == FocusType::Element {
self.send_to_constellation(ScriptMsg::Focus);
}
}
}
/// Handles any updates when the document's title has changed.
pub fn title_changed(&self) {
if self.browsing_context().is_some() {
// https://developer.mozilla.org/en-US/docs/Web/Events/mozbrowsertitlechange
self.trigger_mozbrowser_event(MozBrowserEvent::TitleChange(String::from(self.Title())));
self.send_title_to_constellation();
}
}
/// Sends this document's title to the constellation.
pub fn send_title_to_constellation(&self) {
let title = Some(String::from(self.Title()));
self.send_to_constellation(ScriptMsg::SetTitle(title));
}
pub fn dirty_all_nodes(&self) {
let root = self.upcast::<Node>();
for node in root.traverse_preorder() {
node.dirty(NodeDamage::OtherNodeDamage)
}
}
#[allow(unsafe_code)]
pub fn handle_mouse_event(&self,
js_runtime: *mut JSRuntime,
button: MouseButton,
client_point: Point2D<f32>,
mouse_event_type: MouseEventType) {
let mouse_event_type_string = match mouse_event_type {
MouseEventType::Click => "click".to_owned(),
MouseEventType::MouseUp => "mouseup".to_owned(),
MouseEventType::MouseDown => "mousedown".to_owned(),
};
debug!("{}: at {:?}", mouse_event_type_string, client_point);
let node = match self.window.hit_test_query(client_point, false) {
Some(node_address) => {
debug!("node address is {:?}", node_address);
unsafe {
node::from_untrusted_node_address(js_runtime, node_address)
}
},
None => return,
};
let el = match node.downcast::<Element>() {
Some(el) => Root::from_ref(el),
None => {
let parent = node.GetParentNode();
match parent.and_then(Root::downcast::<Element>) {
Some(parent) => parent,
None => return,
}
},
};
// If the target is an iframe, forward the event to the child document.
if let Some(iframe) = el.downcast::<HTMLIFrameElement>() {
if let Some(pipeline_id) = iframe.pipeline_id() {
let rect = iframe.upcast::<Element>().GetBoundingClientRect();
let child_origin = Vector2D::new(rect.X() as f32, rect.Y() as f32);
let child_point = client_point - child_origin;
let event = CompositorEvent::MouseButtonEvent(mouse_event_type, button, child_point);
let event = ScriptMsg::ForwardEvent(pipeline_id, event);
self.send_to_constellation(event);
}
return;
}
let node = el.upcast::<Node>();
debug!("{} on {:?}", mouse_event_type_string, node.debug_str());
// Prevent click event if form control element is disabled.
if let MouseEventType::Click = mouse_event_type {
if el.click_event_filter_by_disabled_state() {
return;
}
self.begin_focus_transaction();
}
// https://w3c.github.io/uievents/#event-type-click
let client_x = client_point.x as i32;
let client_y = client_point.y as i32;
let click_count = 1;
let event = MouseEvent::new(&self.window,
DOMString::from(mouse_event_type_string),
EventBubbles::Bubbles,
EventCancelable::Cancelable,
Some(&self.window),
click_count,
client_x,
client_y,
client_x,
client_y, // TODO: Get real screen coordinates?
false,
false,
false,
false,
0i16,
None);
let event = event.upcast::<Event>();
// https://w3c.github.io/uievents/#trusted-events
event.set_trusted(true);
// https://html.spec.whatwg.org/multipage/#run-authentic-click-activation-steps
let activatable = el.as_maybe_activatable();
match mouse_event_type {
MouseEventType::Click => el.authentic_click_activation(event),
MouseEventType::MouseDown => {
if let Some(a) = activatable {
a.enter_formal_activation_state();
}
let target = node.upcast();
event.fire(target);
},
MouseEventType::MouseUp => {
if let Some(a) = activatable {
a.exit_formal_activation_state();
}
let target = node.upcast();
event.fire(target);
},
}
if let MouseEventType::Click = mouse_event_type {
self.commit_focus_transaction(FocusType::Element);
self.maybe_fire_dblclick(client_point, node);
}
self.window.reflow(ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::MouseEvent);
}
fn maybe_fire_dblclick(&self, click_pos: Point2D<f32>, target: &Node) {
// https://w3c.github.io/uievents/#event-type-dblclick
let now = Instant::now();
let opt = self.last_click_info.borrow_mut().take();
if let Some((last_time, last_pos)) = opt {
let DBL_CLICK_TIMEOUT = Duration::from_millis(PREFS.get("dom.document.dblclick_timeout").as_u64()
.unwrap_or(300));
let DBL_CLICK_DIST_THRESHOLD = PREFS.get("dom.document.dblclick_dist").as_u64().unwrap_or(1);
// Calculate distance between this click and the previous click.
let line = click_pos - last_pos;
let dist = (line.dot(line) as f64).sqrt();
if now.duration_since(last_time) < DBL_CLICK_TIMEOUT &&
dist < DBL_CLICK_DIST_THRESHOLD as f64 {
// A double click has occurred if this click is within a certain time and dist. of previous click.
let click_count = 2;
let client_x = click_pos.x as i32;
let client_y = click_pos.y as i32;
let event = MouseEvent::new(&self.window,
DOMString::from("dblclick"),
EventBubbles::Bubbles,
EventCancelable::Cancelable,
Some(&self.window),
click_count,
client_x,
client_y,
client_x,
client_y,
false,
false,
false,
false,
0i16,
None);
event.upcast::<Event>().fire(target.upcast());
// When a double click occurs, self.last_click_info is left as None so that a
// third sequential click will not cause another double click.
return;
}
}
// Update last_click_info with the time and position of the click.
*self.last_click_info.borrow_mut() = Some((now, click_pos));
}
#[allow(unsafe_code)]
pub fn handle_touchpad_pressure_event(&self,
js_runtime: *mut JSRuntime,
client_point: Point2D<f32>,
pressure: f32,
phase_now: TouchpadPressurePhase) {
let node = match self.window.hit_test_query(client_point, false) {
Some(node_address) => unsafe {
node::from_untrusted_node_address(js_runtime, node_address)
},
None => return
};
let el = match node.downcast::<Element>() {
Some(el) => Root::from_ref(el),
None => {
let parent = node.GetParentNode();
match parent.and_then(Root::downcast::<Element>) {
Some(parent) => parent,
None => return
}
},
};
// If the target is an iframe, forward the event to the child document.
if let Some(iframe) = el.downcast::<HTMLIFrameElement>() {
if let Some(pipeline_id) = iframe.pipeline_id() {
let rect = iframe.upcast::<Element>().GetBoundingClientRect();
let child_origin = Vector2D::new(rect.X() as f32, rect.Y() as f32);
let child_point = client_point - child_origin;
let event = CompositorEvent::TouchpadPressureEvent(child_point,
pressure,
phase_now);
let event = ScriptMsg::ForwardEvent(pipeline_id, event);
self.send_to_constellation(event);
}
return;
}
let phase_before = self.touchpad_pressure_phase.get();
self.touchpad_pressure_phase.set(phase_now);
if phase_before == TouchpadPressurePhase::BeforeClick &&
phase_now == TouchpadPressurePhase::BeforeClick {
return;
}
let node = el.upcast::<Node>();
let target = node.upcast();
let force = match phase_now {
TouchpadPressurePhase::BeforeClick => pressure,
TouchpadPressurePhase::AfterFirstClick => 1. + pressure,
TouchpadPressurePhase::AfterSecondClick => 2. + pressure,
};
if phase_now != TouchpadPressurePhase::BeforeClick {
self.fire_forcetouch_event("servomouseforcechanged".to_owned(), target, force);
}
if phase_before != TouchpadPressurePhase::AfterSecondClick &&
phase_now == TouchpadPressurePhase::AfterSecondClick {
self.fire_forcetouch_event("servomouseforcedown".to_owned(), target, force);
}
if phase_before == TouchpadPressurePhase::AfterSecondClick &&
phase_now != TouchpadPressurePhase::AfterSecondClick {
self.fire_forcetouch_event("servomouseforceup".to_owned(), target, force);
}
}
fn fire_forcetouch_event(&self, event_name: String, target: &EventTarget, force: f32) {
let force_event = ForceTouchEvent::new(&self.window,
DOMString::from(event_name),
force);
let event = force_event.upcast::<Event>();
event.fire(target);
}
pub fn fire_mouse_event(&self, client_point: Point2D<f32>, target: &EventTarget, event_name: String) {
let client_x = client_point.x.to_i32().unwrap_or(0);
let client_y = client_point.y.to_i32().unwrap_or(0);
let mouse_event = MouseEvent::new(&self.window,
DOMString::from(event_name),
EventBubbles::Bubbles,
EventCancelable::Cancelable,
Some(&self.window),
0i32,
client_x,
client_y,
client_x,
client_y,
false,
false,
false,
false,
0i16,
None);
let event = mouse_event.upcast::<Event>();
event.fire(target);
}
#[allow(unsafe_code)]
pub fn handle_mouse_move_event(&self,
js_runtime: *mut JSRuntime,
client_point: Option<Point2D<f32>>,
prev_mouse_over_target: &MutNullableJS<Element>) {
let client_point = match client_point {
None => {
// If there's no point, there's no target under the mouse
// FIXME: dispatch mouseout here. We have no point.
prev_mouse_over_target.set(None);
return;
}
Some(client_point) => client_point,
};
let maybe_new_target = self.window.hit_test_query(client_point, true).and_then(|address| {
let node = unsafe { node::from_untrusted_node_address(js_runtime, address) };
node.inclusive_ancestors()
.filter_map(Root::downcast::<Element>)
.next()
});
// Send mousemove event to topmost target, and forward it if it's an iframe
if let Some(ref new_target) = maybe_new_target {
// If the target is an iframe, forward the event to the child document.
if let Some(iframe) = new_target.downcast::<HTMLIFrameElement>() {
if let Some(pipeline_id) = iframe.pipeline_id() {
let rect = iframe.upcast::<Element>().GetBoundingClientRect();
let child_origin = Vector2D::new(rect.X() as f32, rect.Y() as f32);
let child_point = client_point - child_origin;
let event = CompositorEvent::MouseMoveEvent(Some(child_point));
let event = ScriptMsg::ForwardEvent(pipeline_id, event);
self.send_to_constellation(event);
}
return;
}
self.fire_mouse_event(client_point, new_target.upcast(), "mousemove".to_owned());
}
// Nothing more to do here, mousemove is sent,
// and the element under the mouse hasn't changed.
if maybe_new_target == prev_mouse_over_target.get() {
return;
}
let old_target_is_ancestor_of_new_target = match (prev_mouse_over_target.get(), maybe_new_target.as_ref()) {
(Some(old_target), Some(new_target))
=> old_target.upcast::<Node>().is_ancestor_of(new_target.upcast::<Node>()),
_ => false,
};
// Here we know the target has changed, so we must update the state,
// dispatch mouseout to the previous one, mouseover to the new one,
if let Some(old_target) = prev_mouse_over_target.get() {
// If the old target is an ancestor of the new target, this can be skipped
// completely, since the node's hover state will be reseted below.
if !old_target_is_ancestor_of_new_target {
for element in old_target.upcast::<Node>()
.inclusive_ancestors()
.filter_map(Root::downcast::<Element>) {
element.set_hover_state(false);
element.set_active_state(false);
}
}
// Remove hover state to old target and its parents
self.fire_mouse_event(client_point, old_target.upcast(), "mouseout".to_owned());
// TODO: Fire mouseleave here only if the old target is
// not an ancestor of the new target.
}
if let Some(ref new_target) = maybe_new_target {
for element in new_target.upcast::<Node>()
.inclusive_ancestors()
.filter_map(Root::downcast::<Element>) {
if element.hover_state() {
break;
}
element.set_hover_state(true);
}
self.fire_mouse_event(client_point, &new_target.upcast(), "mouseover".to_owned());
// TODO: Fire mouseenter here.
}
// Store the current mouse over target for next frame.
prev_mouse_over_target.set(maybe_new_target.r());
self.window.reflow(ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::MouseEvent);
}
#[allow(unsafe_code)]
pub fn handle_touch_event(&self,
js_runtime: *mut JSRuntime,
event_type: TouchEventType,
touch_id: TouchId,
point: Point2D<f32>)
-> TouchEventResult {
let TouchId(identifier) = touch_id;
let event_name = match event_type {
TouchEventType::Down => "touchstart",
TouchEventType::Move => "touchmove",
TouchEventType::Up => "touchend",
TouchEventType::Cancel => "touchcancel",
};
let node = match self.window.hit_test_query(point, false) {
Some(node_address) => unsafe {
node::from_untrusted_node_address(js_runtime, node_address)
},
None => return TouchEventResult::Processed(false),
};
let el = match node.downcast::<Element>() {
Some(el) => Root::from_ref(el),
None => {
let parent = node.GetParentNode();
match parent.and_then(Root::downcast::<Element>) {
Some(parent) => parent,
None => return TouchEventResult::Processed(false),
}
},
};
// If the target is an iframe, forward the event to the child document.
if let Some(iframe) = el.downcast::<HTMLIFrameElement>() {
if let Some(pipeline_id) = iframe.pipeline_id() {
let rect = iframe.upcast::<Element>().GetBoundingClientRect();
let child_origin = Vector2D::new(rect.X() as f32, rect.Y() as f32);
let child_point = point - child_origin;
let event = CompositorEvent::TouchEvent(event_type, touch_id, child_point);
let event = ScriptMsg::ForwardEvent(pipeline_id, event);
self.send_to_constellation(event);
}
return TouchEventResult::Forwarded;
}
let target = Root::upcast::<EventTarget>(el);
let window = &*self.window;
let client_x = Finite::wrap(point.x as f64);
let client_y = Finite::wrap(point.y as f64);
let page_x = Finite::wrap(point.x as f64 + window.PageXOffset() as f64);
let page_y = Finite::wrap(point.y as f64 + window.PageYOffset() as f64);
let touch = Touch::new(window,
identifier,
&target,
client_x,
client_y, // TODO: Get real screen coordinates?
client_x,
client_y,
page_x,
page_y);
match event_type {
TouchEventType::Down => {
// Add a new touch point
self.active_touch_points.borrow_mut().push(JS::from_ref(&*touch));
}
TouchEventType::Move => {
// Replace an existing touch point
let mut active_touch_points = self.active_touch_points.borrow_mut();
match active_touch_points.iter_mut().find(|t| t.Identifier() == identifier) {
Some(t) => *t = JS::from_ref(&*touch),
None => warn!("Got a touchmove event for a non-active touch point"),
}
}
TouchEventType::Up |
TouchEventType::Cancel => {
// Remove an existing touch point
let mut active_touch_points = self.active_touch_points.borrow_mut();
match active_touch_points.iter().position(|t| t.Identifier() == identifier) {
Some(i) => {
active_touch_points.swap_remove(i);
}
None => warn!("Got a touchend event for a non-active touch point"),
}
}
}
rooted_vec!(let mut touches);
touches.extend(self.active_touch_points.borrow().iter().cloned());
rooted_vec!(let mut target_touches);
target_touches.extend(self.active_touch_points
.borrow()
.iter()
.filter(|t| t.Target() == target)
.cloned());
rooted_vec!(let changed_touches <- once(touch));
let event = TouchEvent::new(window,
DOMString::from(event_name),
EventBubbles::Bubbles,
EventCancelable::Cancelable,
Some(window),
0i32,
&TouchList::new(window, touches.r()),
&TouchList::new(window, changed_touches.r()),
&TouchList::new(window, target_touches.r()),
// FIXME: modifier keys
false,
false,
false,
false);
let event = event.upcast::<Event>();
let result = event.fire(&target);
window.reflow(ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::MouseEvent);
match result {
EventStatus::Canceled => TouchEventResult::Processed(false),
EventStatus::NotCanceled => TouchEventResult::Processed(true),
}
}
/// The entry point for all key processing for web content
pub fn dispatch_key_event(&self,
ch: Option<char>,
key: Key,
state: KeyState,
modifiers: KeyModifiers) {
let focused = self.get_focused_element();
let body = self.GetBody();
let target = match (&focused, &body) {
(&Some(ref focused), _) => focused.upcast(),
(&None, &Some(ref body)) => body.upcast(),
(&None, &None) => self.window.upcast(),
};
let ctrl = modifiers.contains(CONTROL);
let alt = modifiers.contains(ALT);
let shift = modifiers.contains(SHIFT);
let meta = modifiers.contains(SUPER);
let is_composing = false;
let is_repeating = state == KeyState::Repeated;
let ev_type = DOMString::from(match state {
KeyState::Pressed | KeyState::Repeated => "keydown",
KeyState::Released => "keyup",
}
.to_owned());
let props = KeyboardEvent::key_properties(ch, key, modifiers);
let keyevent = KeyboardEvent::new(&self.window,
ev_type,
true,
true,
Some(&self.window),
0,
ch,
Some(key),
DOMString::from(props.key_string.clone()),
DOMString::from(props.code),
props.location,
is_repeating,
is_composing,
ctrl,
alt,
shift,
meta,
None,
props.key_code);
let event = keyevent.upcast::<Event>();
event.fire(target);
let mut cancel_state = event.get_cancel_state();
// https://w3c.github.io/uievents/#keys-cancelable-keys
if state != KeyState::Released && props.is_printable() && cancel_state != EventDefault::Prevented {
// https://w3c.github.io/uievents/#keypress-event-order
let event = KeyboardEvent::new(&self.window,
DOMString::from("keypress"),
true,
true,
Some(&self.window),
0,
ch,
Some(key),
DOMString::from(props.key_string),
DOMString::from(props.code),
props.location,
is_repeating,
is_composing,
ctrl,
alt,
shift,
meta,
props.char_code,
0);
let ev = event.upcast::<Event>();
ev.fire(target);
cancel_state = ev.get_cancel_state();
}
if cancel_state == EventDefault::Allowed {
let msg = ScriptMsg::SendKeyEvent(ch, key, state, modifiers);
self.send_to_constellation(msg);
// This behavior is unspecced
// We are supposed to dispatch synthetic click activation for Space and/or Return,
// however *when* we do it is up to us.
// Here, we're dispatching it after the key event so the script has a chance to cancel it
// https://www.w3.org/Bugs/Public/show_bug.cgi?id=27337
match key {
Key::Space if state == KeyState::Released => {
let maybe_elem = target.downcast::<Element>();
if let Some(el) = maybe_elem {
synthetic_click_activation(el,
false,
false,
false,
false,
ActivationSource::NotFromClick)
}
}
Key::Enter if state == KeyState::Released => {
let maybe_elem = target.downcast::<Element>();
if let Some(el) = maybe_elem {
if let Some(a) = el.as_maybe_activatable() {
a.implicit_submission(ctrl, alt, shift, meta);
}
}
}
_ => (),
}
}
self.window.reflow(ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::KeyEvent);
}
// https://dom.spec.whatwg.org/#converting-nodes-into-a-node
pub fn node_from_nodes_and_strings(&self,
mut nodes: Vec<NodeOrString>)
-> Fallible<Root<Node>> {
if nodes.len() == 1 {
Ok(match nodes.pop().unwrap() {
NodeOrString::Node(node) => node,
NodeOrString::String(string) => Root::upcast(self.CreateTextNode(string)),
})
} else {
let fragment = Root::upcast::<Node>(self.CreateDocumentFragment());
for node in nodes {
match node {
NodeOrString::Node(node) => {
fragment.AppendChild(&node)?;
},
NodeOrString::String(string) => {
let node = Root::upcast::<Node>(self.CreateTextNode(string));
// No try!() here because appending a text node
// should not fail.
fragment.AppendChild(&node).unwrap();
}
}
}
Ok(fragment)
}
}
pub fn get_body_attribute(&self, local_name: &LocalName) -> DOMString {
match self.GetBody().and_then(Root::downcast::<HTMLBodyElement>) {
Some(ref body) => {
body.upcast::<Element>().get_string_attribute(local_name)
},
None => DOMString::new(),
}
}
pub fn set_body_attribute(&self, local_name: &LocalName, value: DOMString) {
if let Some(ref body) = self.GetBody().and_then(Root::downcast::<HTMLBodyElement>) {
let body = body.upcast::<Element>();
let value = body.parse_attribute(&ns!(), &local_name, value);
body.set_attribute(local_name, value);
}
}
pub fn set_current_script(&self, script: Option<&HTMLScriptElement>) {
self.current_script.set(script);
}
pub fn get_script_blocking_stylesheets_count(&self) -> u32 {
self.script_blocking_stylesheets_count.get()
}
pub fn increment_script_blocking_stylesheet_count(&self) {
let count_cell = &self.script_blocking_stylesheets_count;
count_cell.set(count_cell.get() + 1);
}
pub fn decrement_script_blocking_stylesheet_count(&self) {
let count_cell = &self.script_blocking_stylesheets_count;
assert!(count_cell.get() > 0);
count_cell.set(count_cell.get() - 1);
}
pub fn invalidate_stylesheets(&self) {
self.stylesheets.borrow_mut().force_dirty(OriginSet::all());
// Mark the document element dirty so a reflow will be performed.
//
// FIXME(emilio): Use the StylesheetSet invalidation stuff.
if let Some(element) = self.GetDocumentElement() {
element.upcast::<Node>().dirty(NodeDamage::NodeStyleDamaged);
}
}
pub fn trigger_mozbrowser_event(&self, event: MozBrowserEvent) {
if PREFS.is_mozbrowser_enabled() {
if let Some((parent_pipeline_id, _)) = self.window.parent_info() {
let event = ScriptMsg::MozBrowserEvent(parent_pipeline_id, event);
self.send_to_constellation(event);
}
}
}
/// https://html.spec.whatwg.org/multipage/#dom-window-requestanimationframe
pub fn request_animation_frame(&self, callback: AnimationFrameCallback) -> u32 {
let ident = self.animation_frame_ident.get() + 1;
self.animation_frame_ident.set(ident);
self.animation_frame_list.borrow_mut().push((ident, Some(callback)));
// TODO: Should tick animation only when document is visible
// If we are running 'fake' animation frames, we unconditionally
// set up a one-shot timer for script to execute the rAF callbacks.
if self.is_faking_animation_frames() {
let callback = FakeRequestAnimationFrameCallback {
document: Trusted::new(self),
};
self.global()
.schedule_callback(OneshotTimerCallback::FakeRequestAnimationFrame(callback),
MsDuration::new(FAKE_REQUEST_ANIMATION_FRAME_DELAY));
} else if !self.running_animation_callbacks.get() {
// No need to send a `ChangeRunningAnimationsState` if we're running animation callbacks:
// we're guaranteed to already be in the "animation callbacks present" state.
//
// This reduces CPU usage by avoiding needless thread wakeups in the common case of
// repeated rAF.
let event = ScriptMsg::ChangeRunningAnimationsState(AnimationState::AnimationCallbacksPresent);
self.send_to_constellation(event);
}
ident
}
/// https://html.spec.whatwg.org/multipage/#dom-window-cancelanimationframe
pub fn cancel_animation_frame(&self, ident: u32) {
let mut list = self.animation_frame_list.borrow_mut();
if let Some(pair) = list.iter_mut().find(|pair| pair.0 == ident) {
pair.1 = None;
}
}
/// https://html.spec.whatwg.org/multipage/#run-the-animation-frame-callbacks
pub fn run_the_animation_frame_callbacks(&self) {
rooted_vec!(let mut animation_frame_list);
mem::swap(
&mut *animation_frame_list,
&mut *self.animation_frame_list.borrow_mut());
self.running_animation_callbacks.set(true);
let was_faking_animation_frames = self.is_faking_animation_frames();
let timing = self.global().performance().Now();
for (_, callback) in animation_frame_list.drain(..) {
if let Some(callback) = callback {
callback.call(self, *timing);
}
}
self.running_animation_callbacks.set(false);
let spurious = !self.window.reflow(ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::RequestAnimationFrame);
if spurious && !was_faking_animation_frames {
// If the rAF callbacks did not mutate the DOM, then the
// reflow call above means that layout will not be invoked,
// and therefore no new frame will be sent to the compositor.
// If this happens, the compositor will not tick the animation
// and the next rAF will never be called! When this happens
// for several frames, then the spurious rAF detection below
// will kick in and use a timer to tick the callbacks. However,
// for the interim frames where we are deciding whether this rAF
// is considered spurious, we need to ensure that the layout
// and compositor *do* tick the animation.
self.window.force_reflow(ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::RequestAnimationFrame);
}
// Only send the animation change state message after running any callbacks.
// This means that if the animation callback adds a new callback for
// the next frame (which is the common case), we won't send a NoAnimationCallbacksPresent
// message quickly followed by an AnimationCallbacksPresent message.
//
// If this frame was spurious and we've seen too many spurious frames in a row, tell the
// constellation to stop giving us video refresh callbacks, to save energy. (A spurious
// animation frame is one in which the callback did not mutate the DOM—that is, an
// animation frame that wasn't actually used for animation.)
if self.animation_frame_list.borrow().is_empty() ||
(!was_faking_animation_frames && self.is_faking_animation_frames()) {
mem::swap(&mut *self.animation_frame_list.borrow_mut(),
&mut *animation_frame_list);
let event = ScriptMsg::ChangeRunningAnimationsState(AnimationState::NoAnimationCallbacksPresent);
self.send_to_constellation(event);
}
// Update the counter of spurious animation frames.
if spurious {
if self.spurious_animation_frames.get() < SPURIOUS_ANIMATION_FRAME_THRESHOLD {
self.spurious_animation_frames.set(self.spurious_animation_frames.get() + 1)
}
} else {
self.spurious_animation_frames.set(0)
}
}
pub fn fetch_async(&self, load: LoadType,
request: RequestInit,
fetch_target: IpcSender<FetchResponseMsg>) {
let mut loader = self.loader.borrow_mut();
loader.fetch_async(load, request, fetch_target);
}
// https://html.spec.whatwg.org/multipage/#the-end
// https://html.spec.whatwg.org/multipage/#delay-the-load-event
pub fn finish_load(&self, load: LoadType) {
// This does not delay the load event anymore.
debug!("Document got finish_load: {:?}", load);
self.loader.borrow_mut().finish_load(&load);
match load {
LoadType::Stylesheet(_) => {
// A stylesheet finishing to load may unblock any pending
// parsing-blocking script or deferred script.
self.process_pending_parsing_blocking_script();
// Step 3.
self.process_deferred_scripts();
},
LoadType::PageSource(_) => {
if self.has_browsing_context {
// Disarm the reflow timer and trigger the initial reflow.
self.reflow_timeout.set(None);
self.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage);
self.window.reflow(ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::FirstLoad);
}
// Deferred scripts have to wait for page to finish loading,
// this is the first opportunity to process them.
// Step 3.
self.process_deferred_scripts();
},
_ => {},
}
// Step 4 is in another castle, namely at the end of
// process_deferred_scripts.
// Step 5 can be found in asap_script_loaded and
// asap_in_order_script_loaded.
let loader = self.loader.borrow();
if loader.is_blocked() || loader.events_inhibited() {
// Step 6.
return;
}
ScriptThread::mark_document_with_no_blocked_loads(self);
}
// https://html.spec.whatwg.org/multipage/#the-end
pub fn maybe_queue_document_completion(&self) {
if self.loader.borrow().is_blocked() {
// Step 6.
return;
}
assert!(!self.loader.borrow().events_inhibited());
self.loader.borrow_mut().inhibit_events();
// The rest will ever run only once per document.
// Step 7.
debug!("Document loads are complete.");
let document = Trusted::new(self);
self.window.dom_manipulation_task_source().queue(
task!(fire_load_event: move || {
let document = document.root();
let window = document.window();
if !window.is_alive() {
return;
}
// Step 7.1.
document.set_ready_state(DocumentReadyState::Complete);
// Step 7.2.
if document.browsing_context().is_none() {
return;
}
let event = Event::new(
window.upcast(),
atom!("load"),
EventBubbles::DoesNotBubble,
EventCancelable::NotCancelable,
);
event.set_trusted(true);
// http://w3c.github.io/navigation-timing/#widl-PerformanceNavigationTiming-loadEventStart
update_with_current_time_ms(&document.load_event_start);
if document.source != DocumentSource::InitialAboutBlank {
debug!("About to dispatch load for {:?}", document.url());
// FIXME(nox): Why are errors silenced here?
let _ = window.upcast::<EventTarget>().dispatch_event_with_target(
document.upcast(),
&event,
);
}
// http://w3c.github.io/navigation-timing/#widl-PerformanceNavigationTiming-loadEventEnd
update_with_current_time_ms(&document.load_event_end);
window.reflow(
ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::DocumentLoaded,
);
if document.source != DocumentSource::InitialAboutBlank {
document.notify_constellation_load();
}
if let Some(fragment) = document.url().fragment() {
document.check_and_scroll_fragment(fragment);
}
}),
self.window.upcast(),
).unwrap();
// Step 8.
// TODO: pageshow event.
// Step 9.
// TODO: pending application cache download process tasks.
// Step 10.
// TODO: printing steps.
// Step 11.
// TODO: ready for post-load tasks.
// Step 12.
// TODO: completely loaded.
}
// https://html.spec.whatwg.org/multipage/#pending-parsing-blocking-script
pub fn set_pending_parsing_blocking_script(&self,
script: &HTMLScriptElement,
load: Option<ScriptResult>) {
assert!(!self.has_pending_parsing_blocking_script());
*self.pending_parsing_blocking_script.borrow_mut() = Some(PendingScript::new_with_load(script, load));
}
// https://html.spec.whatwg.org/multipage/#pending-parsing-blocking-script
pub fn has_pending_parsing_blocking_script(&self) -> bool {
self.pending_parsing_blocking_script.borrow().is_some()
}
/// https://html.spec.whatwg.org/multipage/#prepare-a-script step 22.d.
pub fn pending_parsing_blocking_script_loaded(&self, element: &HTMLScriptElement, result: ScriptResult) {
{
let mut blocking_script = self.pending_parsing_blocking_script.borrow_mut();
let entry = blocking_script.as_mut().unwrap();
assert!(&*entry.element == element);
entry.loaded(result);
}
self.process_pending_parsing_blocking_script();
}
fn process_pending_parsing_blocking_script(&self) {
if self.script_blocking_stylesheets_count.get() > 0 {
return;
}
let pair = self.pending_parsing_blocking_script
.borrow_mut()
.as_mut()
.and_then(PendingScript::take_result);
if let Some((element, result)) = pair {
*self.pending_parsing_blocking_script.borrow_mut() = None;
self.get_current_parser().unwrap().resume_with_pending_parsing_blocking_script(&element, result);
}
}
// https://html.spec.whatwg.org/multipage/#set-of-scripts-that-will-execute-as-soon-as-possible
pub fn add_asap_script(&self, script: &HTMLScriptElement) {
self.asap_scripts_set.borrow_mut().push(JS::from_ref(script));
}
/// https://html.spec.whatwg.org/multipage/#the-end step 5.
/// https://html.spec.whatwg.org/multipage/#prepare-a-script step 22.d.
pub fn asap_script_loaded(&self, element: &HTMLScriptElement, result: ScriptResult) {
{
let mut scripts = self.asap_scripts_set.borrow_mut();
let idx = scripts.iter().position(|entry| &**entry == element).unwrap();
scripts.swap_remove(idx);
}
element.execute(result);
}
// https://html.spec.whatwg.org/multipage/#list-of-scripts-that-will-execute-in-order-as-soon-as-possible
pub fn push_asap_in_order_script(&self, script: &HTMLScriptElement) {
self.asap_in_order_scripts_list.push(script);
}
/// https://html.spec.whatwg.org/multipage/#the-end step 5.
/// https://html.spec.whatwg.org/multipage/#prepare-a-script step 22.c.
pub fn asap_in_order_script_loaded(&self,
element: &HTMLScriptElement,
result: ScriptResult) {
self.asap_in_order_scripts_list.loaded(element, result);
while let Some((element, result)) = self.asap_in_order_scripts_list.take_next_ready_to_be_executed() {
element.execute(result);
}
}
// https://html.spec.whatwg.org/multipage/#list-of-scripts-that-will-execute-when-the-document-has-finished-parsing
pub fn add_deferred_script(&self, script: &HTMLScriptElement) {
self.deferred_scripts.push(script);
}
/// https://html.spec.whatwg.org/multipage/#the-end step 3.
/// https://html.spec.whatwg.org/multipage/#prepare-a-script step 22.d.
pub fn deferred_script_loaded(&self, element: &HTMLScriptElement, result: ScriptResult) {
self.deferred_scripts.loaded(element, result);
self.process_deferred_scripts();
}
/// https://html.spec.whatwg.org/multipage/#the-end step 3.
fn process_deferred_scripts(&self) {
if self.ready_state.get() != DocumentReadyState::Interactive {
return;
}
// Part of substep 1.
loop {
if self.script_blocking_stylesheets_count.get() > 0 {
return;
}
if let Some((element, result)) = self.deferred_scripts.take_next_ready_to_be_executed() {
element.execute(result);
} else {
break;
}
}
if self.deferred_scripts.is_empty() {
// https://html.spec.whatwg.org/multipage/#the-end step 4.
self.maybe_dispatch_dom_content_loaded();
}
}
// https://html.spec.whatwg.org/multipage/#the-end step 4.
pub fn maybe_dispatch_dom_content_loaded(&self) {
if self.domcontentloaded_dispatched.get() {
return;
}
self.domcontentloaded_dispatched.set(true);
assert!(self.ReadyState() != DocumentReadyState::Complete,
"Complete before DOMContentLoaded?");
update_with_current_time_ms(&self.dom_content_loaded_event_start);
// Step 4.1.
let window = self.window();
window.dom_manipulation_task_source().queue_event(self.upcast(), atom!("DOMContentLoaded"),
EventBubbles::Bubbles, EventCancelable::NotCancelable, window);
window.reflow(ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::DOMContentLoaded);
update_with_current_time_ms(&self.dom_content_loaded_event_end);
// Step 4.2.
// TODO: client message queue.
}
// https://html.spec.whatwg.org/multipage/#abort-a-document
fn abort(&self) {
// We need to inhibit the loader before anything else.
self.loader.borrow_mut().inhibit_events();
// Step 1.
for iframe in self.iter_iframes() {
if let Some(document) = iframe.GetContentDocument() {
// TODO: abort the active documents of every child browsing context.
document.abort();
// TODO: salvageable flag.
}
}
// Step 2.
self.script_blocking_stylesheets_count.set(0);
*self.pending_parsing_blocking_script.borrow_mut() = None;
*self.asap_scripts_set.borrow_mut() = vec![];
self.asap_in_order_scripts_list.clear();
self.deferred_scripts.clear();
// TODO: https://github.com/servo/servo/issues/15236
self.window.cancel_all_tasks();
// Step 3.
if let Some(parser) = self.get_current_parser() {
parser.abort();
// TODO: salvageable flag.
}
}
pub fn notify_constellation_load(&self) {
self.send_to_constellation(ScriptMsg::LoadComplete);
}
pub fn set_current_parser(&self, script: Option<&ServoParser>) {
self.current_parser.set(script);
}
pub fn get_current_parser(&self) -> Option<Root<ServoParser>> {
self.current_parser.get()
}
/// Iterate over all iframes in the document.
pub fn iter_iframes(&self) -> impl Iterator<Item=Root<HTMLIFrameElement>> {
self.upcast::<Node>()
.traverse_preorder()
.filter_map(Root::downcast::<HTMLIFrameElement>)
}
/// Find an iframe element in the document.
pub fn find_iframe(&self, browsing_context_id: BrowsingContextId) -> Option<Root<HTMLIFrameElement>> {
self.iter_iframes()
.find(|node| node.browsing_context_id() == Some(browsing_context_id))
}
/// Find a mozbrowser iframe element in the document.
pub fn find_mozbrowser_iframe(&self,
top_level_browsing_context_id: TopLevelBrowsingContextId)
-> Option<Root<HTMLIFrameElement>>
{
match self.find_iframe(BrowsingContextId::from(top_level_browsing_context_id)) {
None => None,
Some(iframe) => {
assert!(iframe.Mozbrowser());
Some(iframe)
},
}
}
pub fn get_dom_loading(&self) -> u64 {
self.dom_loading.get()
}
pub fn get_dom_interactive(&self) -> u64 {
self.dom_interactive.get()
}
pub fn get_dom_content_loaded_event_start(&self) -> u64 {
self.dom_content_loaded_event_start.get()
}
pub fn get_dom_content_loaded_event_end(&self) -> u64 {
self.dom_content_loaded_event_end.get()
}
pub fn get_dom_complete(&self) -> u64 {
self.dom_complete.get()
}
pub fn get_load_event_start(&self) -> u64 {
self.load_event_start.get()
}
pub fn get_load_event_end(&self) -> u64 {
self.load_event_end.get()
}
// https://html.spec.whatwg.org/multipage/#fire-a-focus-event
fn fire_focus_event(&self, focus_event_type: FocusEventType, node: &Node, related_target: Option<&EventTarget>) {
let (event_name, does_bubble) = match focus_event_type {
FocusEventType::Focus => (DOMString::from("focus"), EventBubbles::DoesNotBubble),
FocusEventType::Blur => (DOMString::from("blur"), EventBubbles::DoesNotBubble),
};
let event = FocusEvent::new(&self.window,
event_name,
does_bubble,
EventCancelable::NotCancelable,
Some(&self.window),
0i32,
related_target);
let event = event.upcast::<Event>();
event.set_trusted(true);
let target = node.upcast();
event.fire(target);
}
/// https://html.spec.whatwg.org/multipage/#cookie-averse-document-object
pub fn is_cookie_averse(&self) -> bool {
!self.has_browsing_context || !url_has_network_scheme(&self.url())
}
pub fn nodes_from_point(&self, client_point: &Point2D<f32>) -> Vec<UntrustedNodeAddress> {
if !self.window.reflow(ReflowGoal::ForScriptQuery,
ReflowQueryType::NodesFromPoint(*client_point),
ReflowReason::Query) {
return vec!();
};
self.window.layout().nodes_from_point_response()
}
/// https://html.spec.whatwg.org/multipage/#look-up-a-custom-element-definition
pub fn lookup_custom_element_definition(&self,
namespace: &Namespace,
local_name: &LocalName,
is: Option<&LocalName>)
-> Option<Rc<CustomElementDefinition>> {
if !PREFS.get("dom.customelements.enabled").as_boolean().unwrap_or(false) {
return None;
}
// Step 1
if *namespace != ns!(html) {
return None;
}
// Step 2
if !self.has_browsing_context {
return None;
}
// Step 3
let registry = self.window.CustomElements();
registry.lookup_definition(local_name, is)
}
fn send_to_constellation(&self, msg: ScriptMsg) {
let global_scope = self.window.upcast::<GlobalScope>();
global_scope.script_to_constellation_chan().send(msg).unwrap();
}
}
#[derive(HeapSizeOf, JSTraceable, PartialEq)]
pub enum DocumentSource {
FromParser,
InitialAboutBlank,
NotFromParser,
}
#[allow(unsafe_code)]
pub trait LayoutDocumentHelpers {
unsafe fn is_html_document_for_layout(&self) -> bool;
unsafe fn drain_pending_restyles(&self) -> Vec<(LayoutJS<Element>, PendingRestyle)>;
unsafe fn needs_paint_from_layout(&self);
unsafe fn will_paint(&self);
unsafe fn quirks_mode(&self) -> QuirksMode;
unsafe fn style_shared_lock(&self) -> &StyleSharedRwLock;
}
#[allow(unsafe_code)]
impl LayoutDocumentHelpers for LayoutJS<Document> {
#[inline]
unsafe fn is_html_document_for_layout(&self) -> bool {
(*self.unsafe_get()).is_html_document
}
#[inline]
#[allow(unrooted_must_root)]
unsafe fn drain_pending_restyles(&self) -> Vec<(LayoutJS<Element>, PendingRestyle)> {
let mut elements = (*self.unsafe_get()).pending_restyles.borrow_mut_for_layout();
// Elements were in a document when they were adding to this list, but that
// may no longer be true when the next layout occurs.
let result = elements.drain()
.map(|(k, v)| (k.to_layout(), v))
.filter(|&(ref k, _)| k.upcast::<Node>().get_flag(IS_IN_DOC))
.collect();
result
}
#[inline]
unsafe fn needs_paint_from_layout(&self) {
(*self.unsafe_get()).needs_paint.set(true)
}
#[inline]
unsafe fn will_paint(&self) {
(*self.unsafe_get()).needs_paint.set(false)
}
#[inline]
unsafe fn quirks_mode(&self) -> QuirksMode {
(*self.unsafe_get()).quirks_mode()
}
#[inline]
unsafe fn style_shared_lock(&self) -> &StyleSharedRwLock {
(*self.unsafe_get()).style_shared_lock()
}
}
// https://html.spec.whatwg.org/multipage/#is-a-registrable-domain-suffix-of-or-is-equal-to
// The spec says to return a bool, we actually return an Option<Host> containing
// the parsed host in the successful case, to avoid having to re-parse the host.
fn get_registrable_domain_suffix_of_or_is_equal_to(host_suffix_string: &str, original_host: Host) -> Option<Host> {
// Step 1
if host_suffix_string.is_empty() {
return None;
}
// Step 2-3.
let host = match Host::parse(host_suffix_string) {
Ok(host) => host,
Err(_) => return None,
};
// Step 4.
if host != original_host {
// Step 4.1
let host = match host {
Host::Domain(ref host) => host,
_ => return None,
};
let original_host = match original_host {
Host::Domain(ref original_host) => original_host,
_ => return None,
};
// Step 4.2
let (prefix, suffix) = match original_host.len().checked_sub(host.len()) {
Some(index) => original_host.split_at(index),
None => return None,
};
if !prefix.ends_with(".") {
return None;
}
if suffix != host {
return None;
}
// Step 4.3
if is_pub_domain(host) {
return None;
}
}
// Step 5
Some(host)
}
/// https://url.spec.whatwg.org/#network-scheme
fn url_has_network_scheme(url: &ServoUrl) -> bool {
match url.scheme() {
"ftp" | "http" | "https" => true,
_ => false,
}
}
#[derive(Clone, Copy, Eq, HeapSizeOf, JSTraceable, PartialEq)]
pub enum HasBrowsingContext {
No,
Yes,
}
impl Document {
pub fn new_inherited(window: &Window,
has_browsing_context: HasBrowsingContext,
url: Option<ServoUrl>,
origin: MutableOrigin,
is_html_document: IsHTMLDocument,
content_type: Option<DOMString>,
last_modified: Option<String>,
activity: DocumentActivity,
source: DocumentSource,
doc_loader: DocumentLoader,
referrer: Option<String>,
referrer_policy: Option<ReferrerPolicy>)
-> Document {
let url = url.unwrap_or_else(|| ServoUrl::parse("about:blank").unwrap());
let (ready_state, domcontentloaded_dispatched) = if source == DocumentSource::FromParser {
(DocumentReadyState::Loading, false)
} else {
(DocumentReadyState::Complete, true)
};
Document {
node: Node::new_document_node(),
window: JS::from_ref(window),
has_browsing_context: has_browsing_context == HasBrowsingContext::Yes,
implementation: Default::default(),
content_type: match content_type {
Some(string) => string,
None => DOMString::from(match is_html_document {
// https://dom.spec.whatwg.org/#dom-domimplementation-createhtmldocument
IsHTMLDocument::HTMLDocument => "text/html",
// https://dom.spec.whatwg.org/#concept-document-content-type
IsHTMLDocument::NonHTMLDocument => "application/xml",
}),
},
last_modified: last_modified,
url: DOMRefCell::new(url),
// https://dom.spec.whatwg.org/#concept-document-quirks
quirks_mode: Cell::new(QuirksMode::NoQuirks),
// https://dom.spec.whatwg.org/#concept-document-encoding
encoding: Cell::new(UTF_8),
is_html_document: is_html_document == IsHTMLDocument::HTMLDocument,
activity: Cell::new(activity),
id_map: DOMRefCell::new(HashMap::new()),
tag_map: DOMRefCell::new(HashMap::new()),
tagns_map: DOMRefCell::new(HashMap::new()),
classes_map: DOMRefCell::new(HashMap::new()),
images: Default::default(),
embeds: Default::default(),
links: Default::default(),
forms: Default::default(),
scripts: Default::default(),
anchors: Default::default(),
applets: Default::default(),
style_shared_lock: {
lazy_static! {
/// Per-process shared lock for author-origin stylesheets
///
/// FIXME: make it per-document or per-pipeline instead:
/// https://github.com/servo/servo/issues/16027
/// (Need to figure out what to do with the style attribute
/// of elements adopted into another document.)
static ref PER_PROCESS_AUTHOR_SHARED_LOCK: StyleSharedRwLock = {
StyleSharedRwLock::new()
};
}
PER_PROCESS_AUTHOR_SHARED_LOCK.clone()
//StyleSharedRwLock::new()
},
stylesheets: DOMRefCell::new(StylesheetSet::new()),
stylesheet_list: MutNullableJS::new(None),
ready_state: Cell::new(ready_state),
domcontentloaded_dispatched: Cell::new(domcontentloaded_dispatched),
possibly_focused: Default::default(),
focused: Default::default(),
current_script: Default::default(),
pending_parsing_blocking_script: Default::default(),
script_blocking_stylesheets_count: Cell::new(0u32),
deferred_scripts: Default::default(),
asap_in_order_scripts_list: Default::default(),
asap_scripts_set: Default::default(),
scripting_enabled: has_browsing_context == HasBrowsingContext::Yes,
animation_frame_ident: Cell::new(0),
animation_frame_list: DOMRefCell::new(vec![]),
running_animation_callbacks: Cell::new(false),
loader: DOMRefCell::new(doc_loader),
current_parser: Default::default(),
reflow_timeout: Cell::new(None),
base_element: Default::default(),
appropriate_template_contents_owner_document: Default::default(),
pending_restyles: DOMRefCell::new(HashMap::new()),
needs_paint: Cell::new(false),
active_touch_points: DOMRefCell::new(Vec::new()),
dom_loading: Cell::new(Default::default()),
dom_interactive: Cell::new(Default::default()),
dom_content_loaded_event_start: Cell::new(Default::default()),
dom_content_loaded_event_end: Cell::new(Default::default()),
dom_complete: Cell::new(Default::default()),
load_event_start: Cell::new(Default::default()),
load_event_end: Cell::new(Default::default()),
https_state: Cell::new(HttpsState::None),
touchpad_pressure_phase: Cell::new(TouchpadPressurePhase::BeforeClick),
origin: origin,
referrer: referrer,
referrer_policy: Cell::new(referrer_policy),
target_element: MutNullableJS::new(None),
last_click_info: DOMRefCell::new(None),
ignore_destructive_writes_counter: Default::default(),
spurious_animation_frames: Cell::new(0),
dom_count: Cell::new(1),
fullscreen_element: MutNullableJS::new(None),
form_id_listener_map: Default::default(),
source: source,
}
}
// https://dom.spec.whatwg.org/#dom-document-document
pub fn Constructor(window: &Window) -> Fallible<Root<Document>> {
let doc = window.Document();
let docloader = DocumentLoader::new(&*doc.loader());
Ok(Document::new(window,
HasBrowsingContext::No,
None,
doc.origin().clone(),
IsHTMLDocument::NonHTMLDocument,
None,
None,
DocumentActivity::Inactive,
DocumentSource::NotFromParser,
docloader,
None,
None))
}
pub fn new(window: &Window,
has_browsing_context: HasBrowsingContext,
url: Option<ServoUrl>,
origin: MutableOrigin,
doctype: IsHTMLDocument,
content_type: Option<DOMString>,
last_modified: Option<String>,
activity: DocumentActivity,
source: DocumentSource,
doc_loader: DocumentLoader,
referrer: Option<String>,
referrer_policy: Option<ReferrerPolicy>)
-> Root<Document> {
let document = reflect_dom_object(box Document::new_inherited(window,
has_browsing_context,
url,
origin,
doctype,
content_type,
last_modified,
activity,
source,
doc_loader,
referrer,
referrer_policy),
window,
DocumentBinding::Wrap);
{
let node = document.upcast::<Node>();
node.set_owner_doc(&document);
}
document
}
fn create_node_list<F: Fn(&Node) -> bool>(&self, callback: F) -> Root<NodeList> {
let doc = self.GetDocumentElement();
let maybe_node = doc.r().map(Castable::upcast::<Node>);
let iter = maybe_node.iter()
.flat_map(|node| node.traverse_preorder())
.filter(|node| callback(&node));
NodeList::new_simple_list(&self.window, iter)
}
fn get_html_element(&self) -> Option<Root<HTMLHtmlElement>> {
self.GetDocumentElement().and_then(Root::downcast)
}
/// Return a reference to the per-document shared lock used in stylesheets.
pub fn style_shared_lock(&self) -> &StyleSharedRwLock {
&self.style_shared_lock
}
/// Flushes the stylesheet list, and returns whether any stylesheet changed.
pub fn flush_stylesheets_for_reflow(&self) -> bool {
// NOTE(emilio): The invalidation machinery is used on the replicated
// list on the layout thread.
//
// FIXME(emilio): This really should differentiate between CSSOM changes
// and normal stylesheets additions / removals, because in the last case
// the layout thread already has that information and we could avoid
// dirtying the whole thing.
let mut stylesheets = self.stylesheets.borrow_mut();
let have_changed = stylesheets.has_changed();
stylesheets.flush_without_invalidation();
have_changed
}
/// Returns a `Device` suitable for media query evaluation.
///
/// FIXME(emilio): This really needs to be somehow more in sync with layout.
/// Feels like a hack.
///
/// Also, shouldn't return an option, I'm quite sure.
pub fn device(&self) -> Option<Device> {
let window_size = match self.window().window_size() {
Some(ws) => ws,
None => return None,
};
let viewport_size = window_size.initial_viewport;
let device_pixel_ratio = window_size.device_pixel_ratio;
Some(Device::new(MediaType::screen(), viewport_size, device_pixel_ratio))
}
/// Remove a stylesheet owned by `owner` from the list of document sheets.
#[allow(unrooted_must_root)] // Owner needs to be rooted already necessarily.
pub fn remove_stylesheet(&self, owner: &Element, s: &Arc<Stylesheet>) {
self.window()
.layout_chan()
.send(Msg::RemoveStylesheet(s.clone()))
.unwrap();
let guard = s.shared_lock.read();
// FIXME(emilio): Would be nice to remove the clone, etc.
self.stylesheets.borrow_mut().remove_stylesheet(
None,
StyleSheetInDocument {
sheet: s.clone(),
owner: JS::from_ref(owner),
},
&guard,
);
}
/// Add a stylesheet owned by `owner` to the list of document sheets, in the
/// correct tree position.
#[allow(unrooted_must_root)] // Owner needs to be rooted already necessarily.
pub fn add_stylesheet(&self, owner: &Element, sheet: Arc<Stylesheet>) {
// FIXME(emilio): It'd be nice to unify more code between the elements
// that own stylesheets, but StylesheetOwner is more about loading
// them...
debug_assert!(owner.as_stylesheet_owner().is_some() ||
owner.is::<HTMLMetaElement>(), "Wat");
let mut stylesheets = self.stylesheets.borrow_mut();
let insertion_point =
stylesheets
.iter()
.map(|(sheet, _origin)| sheet)
.find(|sheet_in_doc| {
owner.upcast::<Node>().is_before(sheet_in_doc.owner.upcast())
}).cloned();
self.window()
.layout_chan()
.send(Msg::AddStylesheet(
sheet.clone(),
insertion_point.as_ref().map(|s| s.sheet.clone())
))
.unwrap();
let sheet = StyleSheetInDocument {
sheet,
owner: JS::from_ref(owner),
};
let lock = self.style_shared_lock();
let guard = lock.read();
match insertion_point {
Some(ip) => {
stylesheets.insert_stylesheet_before(None, sheet, ip, &guard);
}
None => {
stylesheets.append_stylesheet(None, sheet, &guard);
}
}
}
/// Returns the number of document stylesheets.
pub fn stylesheet_count(&self) -> usize {
self.stylesheets.borrow().len()
}
pub fn stylesheet_at(&self, index: usize) -> Option<Root<CSSStyleSheet>> {
let stylesheets = self.stylesheets.borrow();
stylesheets.get(Origin::Author, index).and_then(|s| {
s.owner.upcast::<Node>().get_cssom_stylesheet()
})
}
/// https://html.spec.whatwg.org/multipage/#appropriate-template-contents-owner-document
pub fn appropriate_template_contents_owner_document(&self) -> Root<Document> {
self.appropriate_template_contents_owner_document.or_init(|| {
let doctype = if self.is_html_document {
IsHTMLDocument::HTMLDocument
} else {
IsHTMLDocument::NonHTMLDocument
};
let new_doc = Document::new(self.window(),
HasBrowsingContext::No,
None,
// https://github.com/whatwg/html/issues/2109
MutableOrigin::new(ImmutableOrigin::new_opaque()),
doctype,
None,
None,
DocumentActivity::Inactive,
DocumentSource::NotFromParser,
DocumentLoader::new(&self.loader()),
None,
None);
new_doc.appropriate_template_contents_owner_document.set(Some(&new_doc));
new_doc
})
}
pub fn get_element_by_id(&self, id: &Atom) -> Option<Root<Element>> {
self.id_map.borrow().get(&id).map(|ref elements| Root::from_ref(&*(*elements)[0]))
}
pub fn ensure_pending_restyle(&self, el: &Element) -> RefMut<PendingRestyle> {
let map = self.pending_restyles.borrow_mut();
RefMut::map(map, |m| m.entry(JS::from_ref(el)).or_insert_with(PendingRestyle::new))
}
pub fn element_state_will_change(&self, el: &Element) {
let mut entry = self.ensure_pending_restyle(el);
if entry.snapshot.is_none() {
entry.snapshot = Some(Snapshot::new(el.html_element_in_html_document()));
}
let snapshot = entry.snapshot.as_mut().unwrap();
if snapshot.state.is_none() {
snapshot.state = Some(el.state());
}
}
pub fn element_attr_will_change(&self, el: &Element, attr: &Attr) {
// FIXME(emilio): Kind of a shame we have to duplicate this.
//
// I'm getting rid of the whole hashtable soon anyway, since all it does
// right now is populate the element restyle data in layout, and we
// could in theory do it in the DOM I think.
let mut entry = self.ensure_pending_restyle(el);
if entry.snapshot.is_none() {
entry.snapshot = Some(Snapshot::new(el.html_element_in_html_document()));
}
if attr.local_name() == &local_name!("style") {
entry.hint.insert(RESTYLE_STYLE_ATTRIBUTE);
}
if vtable_for(el.upcast()).attribute_affects_presentational_hints(attr) {
entry.hint.insert(RESTYLE_SELF);
}
let snapshot = entry.snapshot.as_mut().unwrap();
if attr.local_name() == &local_name!("id") {
snapshot.id_changed = true;
} else if attr.local_name() == &local_name!("class") {
snapshot.class_changed = true;
} else {
snapshot.other_attributes_changed = true;
}
if snapshot.attrs.is_none() {
let attrs = el.attrs()
.iter()
.map(|attr| (attr.identifier().clone(), attr.value().clone()))
.collect();
snapshot.attrs = Some(attrs);
}
}
pub fn set_referrer_policy(&self, policy: Option<ReferrerPolicy>) {
self.referrer_policy.set(policy);
}
//TODO - default still at no-referrer
pub fn get_referrer_policy(&self) -> Option<ReferrerPolicy> {
return self.referrer_policy.get();
}
pub fn set_target_element(&self, node: Option<&Element>) {
if let Some(ref element) = self.target_element.get() {
element.set_target_state(false);
}
self.target_element.set(node);
if let Some(ref element) = self.target_element.get() {
element.set_target_state(true);
}
self.window.reflow(ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::ElementStateChanged);
}
pub fn incr_ignore_destructive_writes_counter(&self) {
self.ignore_destructive_writes_counter.set(
self.ignore_destructive_writes_counter.get() + 1);
}
pub fn decr_ignore_destructive_writes_counter(&self) {
self.ignore_destructive_writes_counter.set(
self.ignore_destructive_writes_counter.get() - 1);
}
/// Whether we've seen so many spurious animation frames (i.e. animation frames that didn't
/// mutate the DOM) that we've decided to fall back to fake ones.
fn is_faking_animation_frames(&self) -> bool {
self.spurious_animation_frames.get() >= SPURIOUS_ANIMATION_FRAME_THRESHOLD
}
// https://fullscreen.spec.whatwg.org/#dom-element-requestfullscreen
#[allow(unrooted_must_root)]
pub fn enter_fullscreen(&self, pending: &Element) -> Rc<Promise> {
// Step 1
let promise = Promise::new(self.global().r());
let mut error = false;
// Step 4
// check namespace
match *pending.namespace() {
ns!(mathml) => {
if pending.local_name().as_ref() != "math" {
error = true;
}
}
ns!(svg) => {
if pending.local_name().as_ref() != "svg" {
error = true;
}
}
ns!(html) => (),
_ => error = true,
}
// fullscreen element ready check
if !pending.fullscreen_element_ready_check() {
error = true;
}
// TODO fullscreen is supported
// TODO This algorithm is allowed to request fullscreen.
// Step 5 Parallel start
let window = self.window();
// Step 6
if !error {
let event = ScriptMsg::SetFullscreenState(true);
self.send_to_constellation(event);
}
// Step 7
let trusted_pending = Trusted::new(pending);
let trusted_promise = TrustedPromise::new(promise.clone());
let handler = ElementPerformFullscreenEnter::new(trusted_pending, trusted_promise, error);
let script_msg = CommonScriptMsg::Task(ScriptThreadEventCategory::EnterFullscreen, handler);
let msg = MainThreadScriptMsg::Common(script_msg);
window.main_thread_script_chan().send(msg).unwrap();
promise
}
// https://fullscreen.spec.whatwg.org/#exit-fullscreen
#[allow(unrooted_must_root)]
pub fn exit_fullscreen(&self) -> Rc<Promise> {
let global = self.global();
// Step 1
let promise = Promise::new(global.r());
// Step 2
if self.fullscreen_element.get().is_none() {
promise.reject_error(Error::Type(String::from("fullscreen is null")));
return promise
}
// TODO Step 3-6
let element = self.fullscreen_element.get().unwrap();
// Step 7 Parallel start
let window = self.window();
// Step 8
let event = ScriptMsg::SetFullscreenState(false);
self.send_to_constellation(event);
// Step 9
let trusted_element = Trusted::new(element.r());
let trusted_promise = TrustedPromise::new(promise.clone());
let handler = ElementPerformFullscreenExit::new(trusted_element, trusted_promise);
let script_msg = CommonScriptMsg::Task(ScriptThreadEventCategory::ExitFullscreen, handler);
let msg = MainThreadScriptMsg::Common(script_msg);
window.main_thread_script_chan().send(msg).unwrap();
promise
}
pub fn set_fullscreen_element(&self, element: Option<&Element>) {
self.fullscreen_element.set(element);
}
<|fim▁hole|> pub fn get_allow_fullscreen(&self) -> bool {
// https://html.spec.whatwg.org/multipage/#allowed-to-use
match self.browsing_context() {
// Step 1
None => false,
Some(_) => {
// Step 2
let window = self.window();
if window.is_top_level() {
true
} else {
// Step 3
window.GetFrameElement().map_or(false, |el| el.has_attribute(&local_name!("allowfullscreen")))
}
}
}
}
fn reset_form_owner_for_listeners(&self, id: &Atom) {
let map = self.form_id_listener_map.borrow();
if let Some(listeners) = map.get(id) {
for listener in listeners {
listener.r().as_maybe_form_control()
.expect("Element must be a form control")
.reset_form_owner();
}
}
}
}
impl Element {
fn click_event_filter_by_disabled_state(&self) -> bool {
let node = self.upcast::<Node>();
match node.type_id() {
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLButtonElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLInputElement)) |
// NodeTypeId::Element(ElementTypeId::HTMLKeygenElement) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLOptionElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLSelectElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLTextAreaElement))
if self.disabled_state() => true,
_ => false,
}
}
}
impl DocumentMethods for Document {
// https://drafts.csswg.org/cssom/#dom-document-stylesheets
fn StyleSheets(&self) -> Root<StyleSheetList> {
self.stylesheet_list.or_init(|| StyleSheetList::new(&self.window, JS::from_ref(&self)))
}
// https://dom.spec.whatwg.org/#dom-document-implementation
fn Implementation(&self) -> Root<DOMImplementation> {
self.implementation.or_init(|| DOMImplementation::new(self))
}
// https://dom.spec.whatwg.org/#dom-document-url
fn URL(&self) -> USVString {
USVString(String::from(self.url().as_str()))
}
// https://html.spec.whatwg.org/multipage/#dom-document-activeelement
fn GetActiveElement(&self) -> Option<Root<Element>> {
// TODO: Step 2.
match self.get_focused_element() {
Some(element) => Some(element), // Step 3. and 4.
None => match self.GetBody() { // Step 5.
Some(body) => Some(Root::upcast(body)),
None => self.GetDocumentElement(),
},
}
}
// https://html.spec.whatwg.org/multipage/#dom-document-hasfocus
fn HasFocus(&self) -> bool {
// Step 1-2.
if self.window().parent_info().is_none() && self.is_fully_active() {
return true;
}
// TODO Step 3.
false
}
// https://html.spec.whatwg.org/multipage/#dom-document-domain
fn Domain(&self) -> DOMString {
// Step 1.
if !self.has_browsing_context {
return DOMString::new();
}
// Step 2.
match self.origin.effective_domain() {
// Step 3.
None => DOMString::new(),
// Step 4.
Some(Host::Domain(domain)) => DOMString::from(domain),
Some(host) => DOMString::from(host.to_string()),
}
}
// https://html.spec.whatwg.org/multipage/#dom-document-domain
fn SetDomain(&self, value: DOMString) -> ErrorResult {
// Step 1.
if !self.has_browsing_context {
return Err(Error::Security);
}
// TODO: Step 2. "If this Document object's active sandboxing
// flag set has its sandboxed document.domain browsing context
// flag set, then throw a "SecurityError" DOMException."
// Steps 3-4.
let effective_domain = match self.origin.effective_domain() {
Some(effective_domain) => effective_domain,
None => return Err(Error::Security),
};
// Step 5
let host = match get_registrable_domain_suffix_of_or_is_equal_to(&*value, effective_domain) {
None => return Err(Error::Security),
Some(host) => host,
};
// Step 6
self.origin.set_domain(host);
Ok(())
}
// https://html.spec.whatwg.org/multipage/#dom-document-referrer
fn Referrer(&self) -> DOMString {
match self.referrer {
Some(ref referrer) => DOMString::from(referrer.to_string()),
None => DOMString::new()
}
}
// https://dom.spec.whatwg.org/#dom-document-documenturi
fn DocumentURI(&self) -> USVString {
self.URL()
}
// https://dom.spec.whatwg.org/#dom-document-compatmode
fn CompatMode(&self) -> DOMString {
DOMString::from(match self.quirks_mode.get() {
QuirksMode::LimitedQuirks | QuirksMode::NoQuirks => "CSS1Compat",
QuirksMode::Quirks => "BackCompat",
})
}
// https://dom.spec.whatwg.org/#dom-document-characterset
fn CharacterSet(&self) -> DOMString {
DOMString::from(match self.encoding.get().name() {
"utf-8" => "UTF-8",
"ibm866" => "IBM866",
"iso-8859-2" => "ISO-8859-2",
"iso-8859-3" => "ISO-8859-3",
"iso-8859-4" => "ISO-8859-4",
"iso-8859-5" => "ISO-8859-5",
"iso-8859-6" => "ISO-8859-6",
"iso-8859-7" => "ISO-8859-7",
"iso-8859-8" => "ISO-8859-8",
"iso-8859-8-i" => "ISO-8859-8-I",
"iso-8859-10" => "ISO-8859-10",
"iso-8859-13" => "ISO-8859-13",
"iso-8859-14" => "ISO-8859-14",
"iso-8859-15" => "ISO-8859-15",
"iso-8859-16" => "ISO-8859-16",
"koi8-r" => "KOI8-R",
"koi8-u" => "KOI8-U",
"gbk" => "GBK",
"big5" => "Big5",
"euc-jp" => "EUC-JP",
"iso-2022-jp" => "ISO-2022-JP",
"shift_jis" => "Shift_JIS",
"euc-kr" => "EUC-KR",
"utf-16be" => "UTF-16BE",
"utf-16le" => "UTF-16LE",
name => name
})
}
// https://dom.spec.whatwg.org/#dom-document-charset
fn Charset(&self) -> DOMString {
self.CharacterSet()
}
// https://dom.spec.whatwg.org/#dom-document-inputencoding
fn InputEncoding(&self) -> DOMString {
self.CharacterSet()
}
// https://dom.spec.whatwg.org/#dom-document-content_type
fn ContentType(&self) -> DOMString {
self.content_type.clone()
}
// https://dom.spec.whatwg.org/#dom-document-doctype
fn GetDoctype(&self) -> Option<Root<DocumentType>> {
self.upcast::<Node>().children().filter_map(Root::downcast).next()
}
// https://dom.spec.whatwg.org/#dom-document-documentelement
fn GetDocumentElement(&self) -> Option<Root<Element>> {
self.upcast::<Node>().child_elements().next()
}
// https://dom.spec.whatwg.org/#dom-document-getelementsbytagname
fn GetElementsByTagName(&self, qualified_name: DOMString) -> Root<HTMLCollection> {
let qualified_name = LocalName::from(&*qualified_name);
match self.tag_map.borrow_mut().entry(qualified_name.clone()) {
Occupied(entry) => Root::from_ref(entry.get()),
Vacant(entry) => {
let result = HTMLCollection::by_qualified_name(
&self.window, self.upcast(), qualified_name);
entry.insert(JS::from_ref(&*result));
result
}
}
}
// https://dom.spec.whatwg.org/#dom-document-getelementsbytagnamens
fn GetElementsByTagNameNS(&self,
maybe_ns: Option<DOMString>,
tag_name: DOMString)
-> Root<HTMLCollection> {
let ns = namespace_from_domstring(maybe_ns);
let local = LocalName::from(tag_name);
let qname = QualName::new(None, ns, local);
match self.tagns_map.borrow_mut().entry(qname.clone()) {
Occupied(entry) => Root::from_ref(entry.get()),
Vacant(entry) => {
let result = HTMLCollection::by_qual_tag_name(&self.window, self.upcast(), qname);
entry.insert(JS::from_ref(&*result));
result
}
}
}
// https://dom.spec.whatwg.org/#dom-document-getelementsbyclassname
fn GetElementsByClassName(&self, classes: DOMString) -> Root<HTMLCollection> {
let class_atoms: Vec<Atom> = split_html_space_chars(&classes)
.map(Atom::from)
.collect();
match self.classes_map.borrow_mut().entry(class_atoms.clone()) {
Occupied(entry) => Root::from_ref(entry.get()),
Vacant(entry) => {
let result = HTMLCollection::by_atomic_class_name(&self.window,
self.upcast(),
class_atoms);
entry.insert(JS::from_ref(&*result));
result
}
}
}
// https://dom.spec.whatwg.org/#dom-nonelementparentnode-getelementbyid
fn GetElementById(&self, id: DOMString) -> Option<Root<Element>> {
self.get_element_by_id(&Atom::from(id))
}
// https://dom.spec.whatwg.org/#dom-document-createelement
fn CreateElement(&self,
mut local_name: DOMString,
options: &ElementCreationOptions)
-> Fallible<Root<Element>> {
if xml_name_type(&local_name) == InvalidXMLName {
debug!("Not a valid element name");
return Err(Error::InvalidCharacter);
}
if self.is_html_document {
local_name.make_ascii_lowercase();
}
let ns = if self.is_html_document || self.content_type == "application/xhtml+xml" {
ns!(html)
} else {
ns!()
};
let name = QualName::new(None, ns, LocalName::from(local_name));
let is = options.is.as_ref().map(|is| LocalName::from(&**is));
Ok(Element::create(name, is, self, ElementCreator::ScriptCreated, CustomElementCreationMode::Synchronous))
}
// https://dom.spec.whatwg.org/#dom-document-createelementns
fn CreateElementNS(&self,
namespace: Option<DOMString>,
qualified_name: DOMString,
options: &ElementCreationOptions)
-> Fallible<Root<Element>> {
let (namespace, prefix, local_name) = validate_and_extract(namespace,
&qualified_name)?;
let name = QualName::new(prefix, namespace, local_name);
let is = options.is.as_ref().map(|is| LocalName::from(&**is));
Ok(Element::create(name, is, self, ElementCreator::ScriptCreated, CustomElementCreationMode::Synchronous))
}
// https://dom.spec.whatwg.org/#dom-document-createattribute
fn CreateAttribute(&self, mut local_name: DOMString) -> Fallible<Root<Attr>> {
if xml_name_type(&local_name) == InvalidXMLName {
debug!("Not a valid element name");
return Err(Error::InvalidCharacter);
}
if self.is_html_document {
local_name.make_ascii_lowercase();
}
let name = LocalName::from(local_name);
let value = AttrValue::String("".to_owned());
Ok(Attr::new(&self.window, name.clone(), value, name, ns!(), None, None))
}
// https://dom.spec.whatwg.org/#dom-document-createattributens
fn CreateAttributeNS(&self,
namespace: Option<DOMString>,
qualified_name: DOMString)
-> Fallible<Root<Attr>> {
let (namespace, prefix, local_name) = validate_and_extract(namespace,
&qualified_name)?;
let value = AttrValue::String("".to_owned());
let qualified_name = LocalName::from(qualified_name);
Ok(Attr::new(&self.window,
local_name,
value,
qualified_name,
namespace,
prefix,
None))
}
// https://dom.spec.whatwg.org/#dom-document-createdocumentfragment
fn CreateDocumentFragment(&self) -> Root<DocumentFragment> {
DocumentFragment::new(self)
}
// https://dom.spec.whatwg.org/#dom-document-createtextnode
fn CreateTextNode(&self, data: DOMString) -> Root<Text> {
Text::new(data, self)
}
// https://dom.spec.whatwg.org/#dom-document-createcomment
fn CreateComment(&self, data: DOMString) -> Root<Comment> {
Comment::new(data, self)
}
// https://dom.spec.whatwg.org/#dom-document-createprocessinginstruction
fn CreateProcessingInstruction(&self,
target: DOMString,
data: DOMString)
-> Fallible<Root<ProcessingInstruction>> {
// Step 1.
if xml_name_type(&target) == InvalidXMLName {
return Err(Error::InvalidCharacter);
}
// Step 2.
if data.contains("?>") {
return Err(Error::InvalidCharacter);
}
// Step 3.
Ok(ProcessingInstruction::new(target, data, self))
}
// https://dom.spec.whatwg.org/#dom-document-importnode
fn ImportNode(&self, node: &Node, deep: bool) -> Fallible<Root<Node>> {
// Step 1.
if node.is::<Document>() {
return Err(Error::NotSupported);
}
// Step 2.
let clone_children = if deep {
CloneChildrenFlag::CloneChildren
} else {
CloneChildrenFlag::DoNotCloneChildren
};
Ok(Node::clone(node, Some(self), clone_children))
}
// https://dom.spec.whatwg.org/#dom-document-adoptnode
fn AdoptNode(&self, node: &Node) -> Fallible<Root<Node>> {
// Step 1.
if node.is::<Document>() {
return Err(Error::NotSupported);
}
// Step 2.
Node::adopt(node, self);
// Step 3.
Ok(Root::from_ref(node))
}
// https://dom.spec.whatwg.org/#dom-document-createevent
fn CreateEvent(&self, mut interface: DOMString) -> Fallible<Root<Event>> {
interface.make_ascii_lowercase();
match &*interface {
"beforeunloadevent" =>
Ok(Root::upcast(BeforeUnloadEvent::new_uninitialized(&self.window))),
"closeevent" =>
Ok(Root::upcast(CloseEvent::new_uninitialized(self.window.upcast()))),
"customevent" =>
Ok(Root::upcast(CustomEvent::new_uninitialized(self.window.upcast()))),
"errorevent" =>
Ok(Root::upcast(ErrorEvent::new_uninitialized(self.window.upcast()))),
"events" | "event" | "htmlevents" | "svgevents" =>
Ok(Event::new_uninitialized(&self.window.upcast())),
"focusevent" =>
Ok(Root::upcast(FocusEvent::new_uninitialized(&self.window))),
"hashchangeevent" =>
Ok(Root::upcast(HashChangeEvent::new_uninitialized(&self.window))),
"keyboardevent" =>
Ok(Root::upcast(KeyboardEvent::new_uninitialized(&self.window))),
"messageevent" =>
Ok(Root::upcast(MessageEvent::new_uninitialized(self.window.upcast()))),
"mouseevent" | "mouseevents" =>
Ok(Root::upcast(MouseEvent::new_uninitialized(&self.window))),
"pagetransitionevent" =>
Ok(Root::upcast(PageTransitionEvent::new_uninitialized(&self.window))),
"popstateevent" =>
Ok(Root::upcast(PopStateEvent::new_uninitialized(&self.window))),
"progressevent" =>
Ok(Root::upcast(ProgressEvent::new_uninitialized(self.window.upcast()))),
"storageevent" => {
Ok(Root::upcast(StorageEvent::new_uninitialized(&self.window, "".into())))
},
"touchevent" =>
Ok(Root::upcast(
TouchEvent::new_uninitialized(&self.window,
&TouchList::new(&self.window, &[]),
&TouchList::new(&self.window, &[]),
&TouchList::new(&self.window, &[]),
)
)),
"uievent" | "uievents" =>
Ok(Root::upcast(UIEvent::new_uninitialized(&self.window))),
"webglcontextevent" =>
Ok(Root::upcast(WebGLContextEvent::new_uninitialized(&self.window))),
_ =>
Err(Error::NotSupported),
}
}
// https://html.spec.whatwg.org/multipage/#dom-document-lastmodified
fn LastModified(&self) -> DOMString {
match self.last_modified {
Some(ref t) => DOMString::from(t.clone()),
None => DOMString::from(time::now().strftime("%m/%d/%Y %H:%M:%S").unwrap().to_string()),
}
}
// https://dom.spec.whatwg.org/#dom-document-createrange
fn CreateRange(&self) -> Root<Range> {
Range::new_with_doc(self)
}
// https://dom.spec.whatwg.org/#dom-document-createnodeiteratorroot-whattoshow-filter
fn CreateNodeIterator(&self,
root: &Node,
what_to_show: u32,
filter: Option<Rc<NodeFilter>>)
-> Root<NodeIterator> {
NodeIterator::new(self, root, what_to_show, filter)
}
// https://w3c.github.io/touch-events/#idl-def-Document
fn CreateTouch(&self,
window: &Window,
target: &EventTarget,
identifier: i32,
page_x: Finite<f64>,
page_y: Finite<f64>,
screen_x: Finite<f64>,
screen_y: Finite<f64>)
-> Root<Touch> {
let client_x = Finite::wrap(*page_x - window.PageXOffset() as f64);
let client_y = Finite::wrap(*page_y - window.PageYOffset() as f64);
Touch::new(window,
identifier,
target,
screen_x,
screen_y,
client_x,
client_y,
page_x,
page_y)
}
// https://w3c.github.io/touch-events/#idl-def-document-createtouchlist(touch...)
fn CreateTouchList(&self, touches: &[&Touch]) -> Root<TouchList> {
TouchList::new(&self.window, &touches)
}
// https://dom.spec.whatwg.org/#dom-document-createtreewalker
fn CreateTreeWalker(&self,
root: &Node,
what_to_show: u32,
filter: Option<Rc<NodeFilter>>)
-> Root<TreeWalker> {
TreeWalker::new(self, root, what_to_show, filter)
}
// https://html.spec.whatwg.org/multipage/#document.title
fn Title(&self) -> DOMString {
let title = self.GetDocumentElement().and_then(|root| {
if root.namespace() == &ns!(svg) && root.local_name() == &local_name!("svg") {
// Step 1.
root.upcast::<Node>()
.child_elements()
.find(|node| {
node.namespace() == &ns!(svg) && node.local_name() == &local_name!("title")
})
.map(Root::upcast::<Node>)
} else {
// Step 2.
root.upcast::<Node>()
.traverse_preorder()
.find(|node| node.is::<HTMLTitleElement>())
}
});
match title {
None => DOMString::new(),
Some(ref title) => {
// Steps 3-4.
let value = title.child_text_content();
DOMString::from(str_join(split_html_space_chars(&value), " "))
},
}
}
// https://html.spec.whatwg.org/multipage/#document.title
fn SetTitle(&self, title: DOMString) {
let root = match self.GetDocumentElement() {
Some(root) => root,
None => return,
};
let elem = if root.namespace() == &ns!(svg) && root.local_name() == &local_name!("svg") {
let elem = root.upcast::<Node>().child_elements().find(|node| {
node.namespace() == &ns!(svg) && node.local_name() == &local_name!("title")
});
match elem {
Some(elem) => Root::upcast::<Node>(elem),
None => {
let name = QualName::new(None, ns!(svg), local_name!("title"));
let elem = Element::create(name,
None,
self,
ElementCreator::ScriptCreated,
CustomElementCreationMode::Synchronous);
let parent = root.upcast::<Node>();
let child = elem.upcast::<Node>();
parent.InsertBefore(child, parent.GetFirstChild().r())
.unwrap()
}
}
} else if root.namespace() == &ns!(html) {
let elem = root.upcast::<Node>()
.traverse_preorder()
.find(|node| node.is::<HTMLTitleElement>());
match elem {
Some(elem) => elem,
None => {
match self.GetHead() {
Some(head) => {
let name = QualName::new(None, ns!(html), local_name!("title"));
let elem = Element::create(name,
None,
self,
ElementCreator::ScriptCreated,
CustomElementCreationMode::Synchronous);
head.upcast::<Node>()
.AppendChild(elem.upcast())
.unwrap()
},
None => return,
}
}
}
} else {
return;
};
elem.SetTextContent(Some(title));
}
// https://html.spec.whatwg.org/multipage/#dom-document-head
fn GetHead(&self) -> Option<Root<HTMLHeadElement>> {
self.get_html_element()
.and_then(|root| root.upcast::<Node>().children().filter_map(Root::downcast).next())
}
// https://html.spec.whatwg.org/multipage/#dom-document-currentscript
fn GetCurrentScript(&self) -> Option<Root<HTMLScriptElement>> {
self.current_script.get()
}
// https://html.spec.whatwg.org/multipage/#dom-document-body
fn GetBody(&self) -> Option<Root<HTMLElement>> {
self.get_html_element().and_then(|root| {
let node = root.upcast::<Node>();
node.children().find(|child| {
match child.type_id() {
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLBodyElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLFrameSetElement)) => true,
_ => false
}
}).map(|node| Root::downcast(node).unwrap())
})
}
// https://html.spec.whatwg.org/multipage/#dom-document-body
fn SetBody(&self, new_body: Option<&HTMLElement>) -> ErrorResult {
// Step 1.
let new_body = match new_body {
Some(new_body) => new_body,
None => return Err(Error::HierarchyRequest),
};
let node = new_body.upcast::<Node>();
match node.type_id() {
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLBodyElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLFrameSetElement)) => {}
_ => return Err(Error::HierarchyRequest),
}
// Step 2.
let old_body = self.GetBody();
if old_body.r() == Some(new_body) {
return Ok(());
}
match (self.get_html_element(), &old_body) {
// Step 3.
(Some(ref root), &Some(ref child)) => {
let root = root.upcast::<Node>();
root.ReplaceChild(new_body.upcast(), child.upcast()).unwrap();
},
// Step 4.
(None, _) => return Err(Error::HierarchyRequest),
// Step 5.
(Some(ref root), &None) => {
let root = root.upcast::<Node>();
root.AppendChild(new_body.upcast()).unwrap();
}
}
Ok(())
}
// https://html.spec.whatwg.org/multipage/#dom-document-getelementsbyname
fn GetElementsByName(&self, name: DOMString) -> Root<NodeList> {
self.create_node_list(|node| {
let element = match node.downcast::<Element>() {
Some(element) => element,
None => return false,
};
if element.namespace() != &ns!(html) {
return false;
}
element.get_attribute(&ns!(), &local_name!("name"))
.map_or(false, |attr| &**attr.value() == &*name)
})
}
// https://html.spec.whatwg.org/multipage/#dom-document-images
fn Images(&self) -> Root<HTMLCollection> {
self.images.or_init(|| {
let filter = box ImagesFilter;
HTMLCollection::create(&self.window, self.upcast(), filter)
})
}
// https://html.spec.whatwg.org/multipage/#dom-document-embeds
fn Embeds(&self) -> Root<HTMLCollection> {
self.embeds.or_init(|| {
let filter = box EmbedsFilter;
HTMLCollection::create(&self.window, self.upcast(), filter)
})
}
// https://html.spec.whatwg.org/multipage/#dom-document-plugins
fn Plugins(&self) -> Root<HTMLCollection> {
self.Embeds()
}
// https://html.spec.whatwg.org/multipage/#dom-document-links
fn Links(&self) -> Root<HTMLCollection> {
self.links.or_init(|| {
let filter = box LinksFilter;
HTMLCollection::create(&self.window, self.upcast(), filter)
})
}
// https://html.spec.whatwg.org/multipage/#dom-document-forms
fn Forms(&self) -> Root<HTMLCollection> {
self.forms.or_init(|| {
let filter = box FormsFilter;
HTMLCollection::create(&self.window, self.upcast(), filter)
})
}
// https://html.spec.whatwg.org/multipage/#dom-document-scripts
fn Scripts(&self) -> Root<HTMLCollection> {
self.scripts.or_init(|| {
let filter = box ScriptsFilter;
HTMLCollection::create(&self.window, self.upcast(), filter)
})
}
// https://html.spec.whatwg.org/multipage/#dom-document-anchors
fn Anchors(&self) -> Root<HTMLCollection> {
self.anchors.or_init(|| {
let filter = box AnchorsFilter;
HTMLCollection::create(&self.window, self.upcast(), filter)
})
}
// https://html.spec.whatwg.org/multipage/#dom-document-applets
fn Applets(&self) -> Root<HTMLCollection> {
// FIXME: This should be return OBJECT elements containing applets.
self.applets.or_init(|| {
let filter = box AppletsFilter;
HTMLCollection::create(&self.window, self.upcast(), filter)
})
}
// https://html.spec.whatwg.org/multipage/#dom-document-location
fn GetLocation(&self) -> Option<Root<Location>> {
if self.is_fully_active() {
Some(self.window.Location())
} else {
None
}
}
// https://dom.spec.whatwg.org/#dom-parentnode-children
fn Children(&self) -> Root<HTMLCollection> {
HTMLCollection::children(&self.window, self.upcast())
}
// https://dom.spec.whatwg.org/#dom-parentnode-firstelementchild
fn GetFirstElementChild(&self) -> Option<Root<Element>> {
self.upcast::<Node>().child_elements().next()
}
// https://dom.spec.whatwg.org/#dom-parentnode-lastelementchild
fn GetLastElementChild(&self) -> Option<Root<Element>> {
self.upcast::<Node>().rev_children().filter_map(Root::downcast).next()
}
// https://dom.spec.whatwg.org/#dom-parentnode-childelementcount
fn ChildElementCount(&self) -> u32 {
self.upcast::<Node>().child_elements().count() as u32
}
// https://dom.spec.whatwg.org/#dom-parentnode-prepend
fn Prepend(&self, nodes: Vec<NodeOrString>) -> ErrorResult {
self.upcast::<Node>().prepend(nodes)
}
// https://dom.spec.whatwg.org/#dom-parentnode-append
fn Append(&self, nodes: Vec<NodeOrString>) -> ErrorResult {
self.upcast::<Node>().append(nodes)
}
// https://dom.spec.whatwg.org/#dom-parentnode-queryselector
fn QuerySelector(&self, selectors: DOMString) -> Fallible<Option<Root<Element>>> {
let root = self.upcast::<Node>();
root.query_selector(selectors)
}
// https://dom.spec.whatwg.org/#dom-parentnode-queryselectorall
fn QuerySelectorAll(&self, selectors: DOMString) -> Fallible<Root<NodeList>> {
let root = self.upcast::<Node>();
root.query_selector_all(selectors)
}
// https://html.spec.whatwg.org/multipage/#dom-document-readystate
fn ReadyState(&self) -> DocumentReadyState {
self.ready_state.get()
}
// https://html.spec.whatwg.org/multipage/#dom-document-defaultview
fn GetDefaultView(&self) -> Option<Root<Window>> {
if self.has_browsing_context {
Some(Root::from_ref(&*self.window))
} else {
None
}
}
// https://html.spec.whatwg.org/multipage/#dom-document-cookie
fn GetCookie(&self) -> Fallible<DOMString> {
if self.is_cookie_averse() {
return Ok(DOMString::new());
}
if !self.origin.is_tuple() {
return Err(Error::Security);
}
let url = self.url();
let (tx, rx) = ipc::channel().unwrap();
let _ = self.window
.upcast::<GlobalScope>()
.resource_threads()
.send(GetCookiesForUrl(url, tx, NonHTTP));
let cookies = rx.recv().unwrap();
Ok(cookies.map_or(DOMString::new(), DOMString::from))
}
// https://html.spec.whatwg.org/multipage/#dom-document-cookie
fn SetCookie(&self, cookie: DOMString) -> ErrorResult {
if self.is_cookie_averse() {
return Ok(());
}
if !self.origin.is_tuple() {
return Err(Error::Security);
}
if let Ok(cookie_header) = SetCookie::parse_header(&vec![cookie.to_string().into_bytes()]) {
let cookies = cookie_header.0.into_iter().filter_map(|cookie| {
cookie_rs::Cookie::parse(cookie).ok().map(Serde)
}).collect();
let _ = self.window
.upcast::<GlobalScope>()
.resource_threads()
.send(SetCookiesForUrl(self.url(), cookies, NonHTTP));
}
Ok(())
}
// https://html.spec.whatwg.org/multipage/#dom-document-bgcolor
fn BgColor(&self) -> DOMString {
self.get_body_attribute(&local_name!("bgcolor"))
}
// https://html.spec.whatwg.org/multipage/#dom-document-bgcolor
fn SetBgColor(&self, value: DOMString) {
self.set_body_attribute(&local_name!("bgcolor"), value)
}
// https://html.spec.whatwg.org/multipage/#dom-document-fgcolor
fn FgColor(&self) -> DOMString {
self.get_body_attribute(&local_name!("text"))
}
// https://html.spec.whatwg.org/multipage/#dom-document-fgcolor
fn SetFgColor(&self, value: DOMString) {
self.set_body_attribute(&local_name!("text"), value)
}
#[allow(unsafe_code)]
// https://html.spec.whatwg.org/multipage/#dom-tree-accessors:dom-document-nameditem-filter
unsafe fn NamedGetter(&self, _cx: *mut JSContext, name: DOMString) -> Option<NonZero<*mut JSObject>> {
#[derive(HeapSizeOf, JSTraceable)]
struct NamedElementFilter {
name: Atom,
}
impl CollectionFilter for NamedElementFilter {
fn filter(&self, elem: &Element, _root: &Node) -> bool {
filter_by_name(&self.name, elem.upcast())
}
}
// https://html.spec.whatwg.org/multipage/#dom-document-nameditem-filter
fn filter_by_name(name: &Atom, node: &Node) -> bool {
let html_elem_type = match node.type_id() {
NodeTypeId::Element(ElementTypeId::HTMLElement(type_)) => type_,
_ => return false,
};
let elem = match node.downcast::<Element>() {
Some(elem) => elem,
None => return false,
};
match html_elem_type {
HTMLElementTypeId::HTMLAppletElement => {
match elem.get_attribute(&ns!(), &local_name!("name")) {
Some(ref attr) if attr.value().as_atom() == name => true,
_ => {
match elem.get_attribute(&ns!(), &local_name!("id")) {
Some(ref attr) => attr.value().as_atom() == name,
None => false,
}
},
}
},
HTMLElementTypeId::HTMLFormElement => {
match elem.get_attribute(&ns!(), &local_name!("name")) {
Some(ref attr) => attr.value().as_atom() == name,
None => false,
}
},
HTMLElementTypeId::HTMLImageElement => {
match elem.get_attribute(&ns!(), &local_name!("name")) {
Some(ref attr) => {
if attr.value().as_atom() == name {
true
} else {
match elem.get_attribute(&ns!(), &local_name!("id")) {
Some(ref attr) => attr.value().as_atom() == name,
None => false,
}
}
},
None => false,
}
},
// TODO: Handle <embed>, <iframe> and <object>.
_ => false,
}
}
let name = Atom::from(name);
let root = self.upcast::<Node>();
{
// Step 1.
let mut elements = root.traverse_preorder()
.filter(|node| filter_by_name(&name, &node))
.peekable();
if let Some(first) = elements.next() {
if elements.peek().is_none() {
// TODO: Step 2.
// Step 3.
return Some(NonZero::new_unchecked(first.reflector().get_jsobject().get()));
}
} else {
return None;
}
}
// Step 4.
let filter = NamedElementFilter {
name: name,
};
let collection = HTMLCollection::create(self.window(), root, box filter);
Some(NonZero::new_unchecked(collection.reflector().get_jsobject().get()))
}
// https://html.spec.whatwg.org/multipage/#dom-tree-accessors:supported-property-names
fn SupportedPropertyNames(&self) -> Vec<DOMString> {
// FIXME: unimplemented (https://github.com/servo/servo/issues/7273)
vec![]
}
// https://html.spec.whatwg.org/multipage/#dom-document-clear
fn Clear(&self) {
// This method intentionally does nothing
}
// https://html.spec.whatwg.org/multipage/#dom-document-captureevents
fn CaptureEvents(&self) {
// This method intentionally does nothing
}
// https://html.spec.whatwg.org/multipage/#dom-document-releaseevents
fn ReleaseEvents(&self) {
// This method intentionally does nothing
}
// https://html.spec.whatwg.org/multipage/#globaleventhandlers
global_event_handlers!();
// https://html.spec.whatwg.org/multipage/#handler-onreadystatechange
event_handler!(readystatechange, GetOnreadystatechange, SetOnreadystatechange);
#[allow(unsafe_code)]
// https://drafts.csswg.org/cssom-view/#dom-document-elementfrompoint
fn ElementFromPoint(&self, x: Finite<f64>, y: Finite<f64>) -> Option<Root<Element>> {
let x = *x as f32;
let y = *y as f32;
let point = &Point2D::new(x, y);
let window = window_from_node(self);
let viewport = window.window_size().unwrap().initial_viewport;
if self.browsing_context().is_none() {
return None;
}
if x < 0.0 || y < 0.0 || x > viewport.width || y > viewport.height {
return None;
}
match self.window.hit_test_query(*point, false) {
Some(untrusted_node_address) => {
let js_runtime = unsafe { JS_GetRuntime(window.get_cx()) };
let node = unsafe {
node::from_untrusted_node_address(js_runtime, untrusted_node_address)
};
let parent_node = node.GetParentNode().unwrap();
let element_ref = node.downcast::<Element>().unwrap_or_else(|| {
parent_node.downcast::<Element>().unwrap()
});
Some(Root::from_ref(element_ref))
},
None => self.GetDocumentElement()
}
}
#[allow(unsafe_code)]
// https://drafts.csswg.org/cssom-view/#dom-document-elementsfrompoint
fn ElementsFromPoint(&self, x: Finite<f64>, y: Finite<f64>) -> Vec<Root<Element>> {
let x = *x as f32;
let y = *y as f32;
let point = &Point2D::new(x, y);
let window = window_from_node(self);
let viewport = window.window_size().unwrap().initial_viewport;
if self.browsing_context().is_none() {
return vec!();
}
// Step 2
if x < 0.0 || y < 0.0 || x > viewport.width || y > viewport.height {
return vec!();
}
let js_runtime = unsafe { JS_GetRuntime(window.get_cx()) };
// Step 1 and Step 3
let mut elements: Vec<Root<Element>> = self.nodes_from_point(point).iter()
.flat_map(|&untrusted_node_address| {
let node = unsafe {
node::from_untrusted_node_address(js_runtime, untrusted_node_address)
};
Root::downcast::<Element>(node)
}).collect();
// Step 4
if let Some(root_element) = self.GetDocumentElement() {
if elements.last() != Some(&root_element) {
elements.push(root_element);
}
}
// Step 5
elements
}
// https://html.spec.whatwg.org/multipage/#dom-document-open
fn Open(&self, type_: DOMString, replace: DOMString) -> Fallible<Root<Document>> {
if !self.is_html_document() {
// Step 1.
return Err(Error::InvalidState);
}
// Step 2.
// TODO: handle throw-on-dynamic-markup-insertion counter.
if !self.is_active() {
// Step 3.
return Ok(Root::from_ref(self));
}
let entry_responsible_document = GlobalScope::entry().as_window().Document();
// This check is same-origin not same-origin-domain.
// https://github.com/whatwg/html/issues/2282
// https://github.com/whatwg/html/pull/2288
if !self.origin.same_origin(&entry_responsible_document.origin) {
// Step 4.
return Err(Error::Security);
}
if self.get_current_parser().map_or(false, |parser| parser.script_nesting_level() > 0) {
// Step 5.
return Ok(Root::from_ref(self));
}
// Step 6.
// TODO: ignore-opens-during-unload counter check.
// Step 7: first argument already bound to `type_`.
// Step 8.
// TODO: check session history's state.
let replace = replace.eq_ignore_ascii_case("replace");
// Step 9.
// TODO: salvageable flag.
// Step 10.
// TODO: prompt to unload.
window_from_node(self).set_navigation_start();
// Step 11.
// TODO: unload.
// Step 12.
self.abort();
// Step 13.
for node in self.upcast::<Node>().traverse_preorder() {
node.upcast::<EventTarget>().remove_all_listeners();
}
// Step 14.
// TODO: remove any tasks associated with the Document in any task source.
// Step 15.
Node::replace_all(None, self.upcast::<Node>());
// Steps 16-18.
// Let's not?
// TODO: https://github.com/whatwg/html/issues/1698
// Step 19.
self.implementation.set(None);
self.images.set(None);
self.embeds.set(None);
self.links.set(None);
self.forms.set(None);
self.scripts.set(None);
self.anchors.set(None);
self.applets.set(None);
*self.stylesheets.borrow_mut() = StylesheetSet::new();
self.animation_frame_ident.set(0);
self.animation_frame_list.borrow_mut().clear();
self.pending_restyles.borrow_mut().clear();
self.target_element.set(None);
*self.last_click_info.borrow_mut() = None;
// Step 20.
self.set_encoding(UTF_8);
// Step 21.
// TODO: reload override buffer.
// Step 22.
// TODO: salvageable flag.
let url = entry_responsible_document.url();
// Step 23.
self.set_url(url.clone());
// Step 24.
// TODO: mute iframe load.
// Step 27.
let type_ = if type_.eq_ignore_ascii_case("replace") {
"text/html"
} else if let Some(position) = type_.find(';') {
&type_[0..position]
} else {
&*type_
};
let type_ = type_.trim_matches(HTML_SPACE_CHARACTERS);
// Step 25.
let resource_threads =
self.window.upcast::<GlobalScope>().resource_threads().clone();
*self.loader.borrow_mut() =
DocumentLoader::new_with_threads(resource_threads, Some(url.clone()));
ServoParser::parse_html_script_input(self, url, type_);
// Step 26.
self.ready_state.set(DocumentReadyState::Interactive);
// Step 28 is handled when creating the parser in step 25.
// Step 29.
// TODO: truncate session history.
// Step 30.
// TODO: remove history traversal tasks.
// Step 31.
// TODO: remove earlier entries.
if !replace {
// Step 32.
// TODO: add history entry.
}
// Step 33.
// TODO: clear fired unload flag.
// Step 34 is handled when creating the parser in step 25.
// Step 35.
Ok(Root::from_ref(self))
}
// https://html.spec.whatwg.org/multipage/#dom-document-write
fn Write(&self, text: Vec<DOMString>) -> ErrorResult {
if !self.is_html_document() {
// Step 1.
return Err(Error::InvalidState);
}
// Step 2.
// TODO: handle throw-on-dynamic-markup-insertion counter.
if !self.is_active() {
// Step 3.
return Ok(());
}
let parser = match self.get_current_parser() {
Some(ref parser) if parser.can_write() => Root::from_ref(&**parser),
_ => {
// Either there is no parser, which means the parsing ended;
// or script nesting level is 0, which means the method was
// called from outside a parser-executed script.
if self.ignore_destructive_writes_counter.get() > 0 {
// Step 4.
// TODO: handle ignore-opens-during-unload counter.
return Ok(());
}
// Step 5.
self.Open("text/html".into(), "".into())?;
self.get_current_parser().unwrap()
}
};
// Step 7.
// TODO: handle reload override buffer.
// Steps 6-8.
parser.write(text);
// Step 9.
Ok(())
}
// https://html.spec.whatwg.org/multipage/#dom-document-writeln
fn Writeln(&self, mut text: Vec<DOMString>) -> ErrorResult {
text.push("\n".into());
self.Write(text)
}
// https://html.spec.whatwg.org/multipage/#dom-document-close
fn Close(&self) -> ErrorResult {
if !self.is_html_document() {
// Step 1.
return Err(Error::InvalidState);
}
// Step 2.
// TODO: handle throw-on-dynamic-markup-insertion counter.
let parser = match self.get_current_parser() {
Some(ref parser) if parser.is_script_created() => Root::from_ref(&**parser),
_ => {
// Step 3.
return Ok(());
}
};
// Step 4-6.
parser.close();
Ok(())
}
// https://html.spec.whatwg.org/multipage/#documentandelementeventhandlers
document_and_element_event_handlers!();
// https://fullscreen.spec.whatwg.org/#handler-document-onfullscreenerror
event_handler!(fullscreenerror, GetOnfullscreenerror, SetOnfullscreenerror);
// https://fullscreen.spec.whatwg.org/#handler-document-onfullscreenchange
event_handler!(fullscreenchange, GetOnfullscreenchange, SetOnfullscreenchange);
// https://fullscreen.spec.whatwg.org/#dom-document-fullscreenenabled
fn FullscreenEnabled(&self) -> bool {
self.get_allow_fullscreen()
}
// https://fullscreen.spec.whatwg.org/#dom-document-fullscreen
fn Fullscreen(&self) -> bool {
self.fullscreen_element.get().is_some()
}
// https://fullscreen.spec.whatwg.org/#dom-document-fullscreenelement
fn GetFullscreenElement(&self) -> Option<Root<Element>> {
// TODO ShadowRoot
self.fullscreen_element.get()
}
#[allow(unrooted_must_root)]
// https://fullscreen.spec.whatwg.org/#dom-document-exitfullscreen
fn ExitFullscreen(&self) -> Rc<Promise> {
self.exit_fullscreen()
}
}
fn update_with_current_time_ms(marker: &Cell<u64>) {
if marker.get() == Default::default() {
let time = time::get_time();
let current_time_ms = time.sec * 1000 + time.nsec as i64 / 1000000;
marker.set(current_time_ms as u64);
}
}
/// https://w3c.github.io/webappsec-referrer-policy/#determine-policy-for-token
pub fn determine_policy_for_token(token: &str) -> Option<ReferrerPolicy> {
match_ignore_ascii_case! { token,
"never" | "no-referrer" => Some(ReferrerPolicy::NoReferrer),
"default" | "no-referrer-when-downgrade" => Some(ReferrerPolicy::NoReferrerWhenDowngrade),
"origin" => Some(ReferrerPolicy::Origin),
"same-origin" => Some(ReferrerPolicy::SameOrigin),
"strict-origin" => Some(ReferrerPolicy::StrictOrigin),
"strict-origin-when-cross-origin" => Some(ReferrerPolicy::StrictOriginWhenCrossOrigin),
"origin-when-cross-origin" => Some(ReferrerPolicy::OriginWhenCrossOrigin),
"always" | "unsafe-url" => Some(ReferrerPolicy::UnsafeUrl),
"" => Some(ReferrerPolicy::NoReferrer),
_ => None,
}
}
/// Specifies the type of focus event that is sent to a pipeline
#[derive(Clone, Copy, PartialEq)]
pub enum FocusType {
Element, // The first focus message - focus the element itself
Parent, // Focusing a parent element (an iframe)
}
/// Focus events
pub enum FocusEventType {
Focus, // Element gained focus. Doesn't bubble.
Blur, // Element lost focus. Doesn't bubble.
}
/// A fake `requestAnimationFrame()` callback—"fake" because it is not triggered by the video
/// refresh but rather a simple timer.
///
/// If the page is observed to be using `requestAnimationFrame()` for non-animation purposes (i.e.
/// without mutating the DOM), then we fall back to simple timeouts to save energy over video
/// refresh.
#[derive(HeapSizeOf, JSTraceable)]
pub struct FakeRequestAnimationFrameCallback {
/// The document.
#[ignore_heap_size_of = "non-owning"]
document: Trusted<Document>,
}
impl FakeRequestAnimationFrameCallback {
pub fn invoke(self) {
let document = self.document.root();
document.run_the_animation_frame_callbacks();
}
}
#[derive(HeapSizeOf, JSTraceable)]
pub enum AnimationFrameCallback {
DevtoolsFramerateTick { actor_name: String },
FrameRequestCallback {
#[ignore_heap_size_of = "Rc is hard"]
callback: Rc<FrameRequestCallback>
},
}
impl AnimationFrameCallback {
fn call(&self, document: &Document, now: f64) {
match *self {
AnimationFrameCallback::DevtoolsFramerateTick { ref actor_name } => {
let msg = ScriptToDevtoolsControlMsg::FramerateTick(actor_name.clone(), now);
let devtools_sender = document.window().upcast::<GlobalScope>().devtools_chan().unwrap();
devtools_sender.send(msg).unwrap();
}
AnimationFrameCallback::FrameRequestCallback { ref callback } => {
// TODO(jdm): The spec says that any exceptions should be suppressed:
// https://github.com/servo/servo/issues/6928
let _ = callback.Call__(Finite::wrap(now), ExceptionHandling::Report);
}
}
}
}
#[derive(Default, HeapSizeOf, JSTraceable)]
#[must_root]
struct PendingInOrderScriptVec {
scripts: DOMRefCell<VecDeque<PendingScript>>,
}
impl PendingInOrderScriptVec {
fn is_empty(&self) -> bool {
self.scripts.borrow().is_empty()
}
fn push(&self, element: &HTMLScriptElement) {
self.scripts.borrow_mut().push_back(PendingScript::new(element));
}
fn loaded(&self, element: &HTMLScriptElement, result: ScriptResult) {
let mut scripts = self.scripts.borrow_mut();
let entry = scripts.iter_mut().find(|entry| &*entry.element == element).unwrap();
entry.loaded(result);
}
fn take_next_ready_to_be_executed(&self) -> Option<(Root<HTMLScriptElement>, ScriptResult)> {
let mut scripts = self.scripts.borrow_mut();
let pair = scripts.front_mut().and_then(PendingScript::take_result);
if pair.is_none() {
return None;
}
scripts.pop_front();
pair
}
fn clear(&self) {
*self.scripts.borrow_mut() = Default::default();
}
}
#[derive(HeapSizeOf, JSTraceable)]
#[must_root]
struct PendingScript {
element: JS<HTMLScriptElement>,
load: Option<ScriptResult>,
}
impl PendingScript {
fn new(element: &HTMLScriptElement) -> Self {
Self { element: JS::from_ref(element), load: None }
}
fn new_with_load(element: &HTMLScriptElement, load: Option<ScriptResult>) -> Self {
Self { element: JS::from_ref(element), load }
}
fn loaded(&mut self, result: ScriptResult) {
assert!(self.load.is_none());
self.load = Some(result);
}
fn take_result(&mut self) -> Option<(Root<HTMLScriptElement>, ScriptResult)> {
self.load.take().map(|result| (Root::from_ref(&*self.element), result))
}
}<|fim▁end|> | |
<|file_name|>measurematrix.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
.. module:: measurematrix.py
.. moduleauthor:: Jozsef Attila Janko, Bence Takacs, Zoltan Siki (code optimalization)
Sample application of Ulyxes PyAPI to measure within a rectangular area
:param argv[1] (int): number of horizontal intervals (between measurements), default 1 (perimeter only)
:param argv[2] (int): number of vertical intervals(between measurements), default 1 (perimeter only)
:param argv[3] (sensor): 1100/1800/1200/5500, default 1100
:param argv[4] (port): serial port, default COM5
:param argv[5]: output file, default stdout
usage: python measurematrix.py 9 3 1100 COM5
"""
import re
import sys
sys.path.append('../pyapi/')
from angle import Angle
from serialiface import SerialIface
from totalstation import TotalStation
from echowriter import EchoWriter
from filewriter import FileWriter
from leicatps1200 import LeicaTPS1200
from leicatcra1100 import LeicaTCRA1100
from trimble5500 import Trimble5500
if __name__ == "__main__":
if sys.version_info[0] > 2: # Python 3 compatibility
raw_input = input
if len(sys.argv) == 1:
print("Usage: {0:s} horizontal_step vertical_step instrument port output_file".format(sys.argv[0]))
exit(1)
# set horizontal stepping interval dh_nr
dh_nr = 1
if len(sys.argv) > 1:
try:
dh_nr = int(sys.argv[1])
except ValueError:
print("invalid numeric value " + sys.argv[1])
sys.exit(1)
# set vertical stepping interval dv_nr
dv_nr = 1
if len(sys.argv) > 2:
try:
dv_nr = int(sys.argv[2])
except ValueError:
print("invalid numeric value " + sys.argv[2])
#sys.exit(1)
# set instrument
stationtype = '1100'
if len(sys.argv) > 3:
stationtype = sys.argv[3]
if re.search('120[0-9]$', stationtype):
mu = LeicaTPS1200()
elif re.search('110[0-9]$', stationtype):
mu = LeicaTCRA1100()
elif re.search('550[0-9]$', stationtype):
mu = Trimble5500()
else:
print("unsupported instrument type")
sys.exit(1)
# set port
port = '/dev/ttyUSB0'
if len(sys.argv) > 4:
port = sys.argv[4]
iface = SerialIface("test", port)
# set output file name
fn = None
if len(sys.argv) > 5:
fn = sys.argv[5]
# write out measurements
if fn:
wrt = FileWriter(angle='DEG', dist='.3f', fname=fn)<|fim▁hole|> if wrt.GetState() != wrt.WR_OK:
sys.exit(-1) # open error
ts = TotalStation(stationtype, mu, iface, wrt)
if isinstance(mu, Trimble5500):
print("Please change to reflectorless EDM mode (MNU 722 from keyboard)")
print("and turn on red laser (MNU 741 from keyboard) and press enter!")
raw_input()
else:
ts.SetATR(0) # turn ATR off
ts.SetEDMMode('RLSTANDARD') # reflectorless distance measurement
ts.SetRedLaser(1) # turn red laser on
w = raw_input("Target on lower left corner and press Enter")
w1 = ts.GetAngles()
w = raw_input("Target on upper right corner and press Enter")
w2 = ts.GetAngles()
dh = (w2['hz'].GetAngle() - w1['hz'].GetAngle()) / dh_nr
dv = (w2['v'].GetAngle() - w1['v'].GetAngle()) / dv_nr
# measurement loops
for i in range(dh_nr+1): # horizontal loop
measdir = i % 2 # check modulo
hz = Angle(w1['hz'].GetAngle() + i * dh, 'RAD')
for j in range(dv_nr+1): # vertical loop
if measdir == 0:
# move downward at odd steps to right
ts.Move(hz, Angle(w1['v'].GetAngle() + j * dv, 'RAD'))
else:
# move upward at event steps to right
ts.Move(hz, Angle(w2['v'].GetAngle() - j * dv, 'RAD'))
ts.Measure()
meas = ts.GetMeasure()
if ts.measureIface.state != ts.measureIface.IF_OK or 'errorCode' in meas:
print('FATAL Cannot measure point')<|fim▁end|> | else:
wrt = EchoWriter(angle='DEG', dist='.3f') |
<|file_name|>db.cc<|end_file_name|><|fim▁begin|>#include "db/db.h"
Database::Database() { this->records_tree_ = nullptr; }
void Database::Read(DatabaseReader &reader) {
this->records_tree_ = reader.ReadIndex();
}
Record *Database::GetRecordsTree() const { return this->records_tree_; }
<|fim▁hole|>}<|fim▁end|> | void Database::SetRecordsTree(Record *records_tree) {
this->records_tree_ = records_tree; |
<|file_name|>tfidf_smbkmeans.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import os
import sys
import inspect
cmd_folder = os.path.realpath(
os.path.abspath(
os.path.split(
inspect.getfile(
inspect.currentframe()
)
)[0]
)
)
if cmd_folder not in sys.path:
sys.path.insert(0, cmd_folder)
from smbkmeans import *
import pandas as pd
import numpy as np
import scipy.sparse as sp
import random
from bson.son import SON
from pymongo import MongoClient
from monary import Monary
import bz2
try:
import cPickle as pickle
except:
import pickle
settings = {
'mongo_host': 'server.local',
'mongo_db_name': 'mydb',
'mongo_port': 27017,
'tfidf_collection': 'tfidf',
'models_per_k': 25,
'ld_k_min': 0.5,
'ld_k_max': 2.5,
'k_steps': 50,
'batch_size': 1024
}
blacklist = {
'consumers': [],
'brands': [0],
'companies': [10000],
'categories': [0]
}
if __name__ == "__main__":
# establish PyMongo connection:
mongo_client = MongoClient(settings['mongo_host'],
settings['mongo_port'])
mongo_db = mongo_client[settings['mongo_db_name']]
# get collection:
tfidf_collection = mongo_db[settings['tfidf_collection']]
# find out who the consumers are
cursor = tfidf_collection.find(
{"consumer": {
"$nin": blacklist['consumers']
}}
).distinct('consumer')
consumers = np.array(cursor, dtype=np.int64)
n_consumers = len(consumers)
# find out how many items there are
cursor = tfidf_collection.find().distinct('item')
items = np.array(cursor, dtype=np.int64)
n_items = len(items)
# close PyMongo connection
mongo_client.close()
# set up Monary
monary_client = Monary(settings['mongo_host'],
settings['mongo_port'])
def get_consumer_mtx(consumer_batch):
'''Returns a sparse matrix with feature vectors for a consumer batch.'''
pipeline = [
{"$match": {
"consumer": {"$in": consumer_batch},
"brand": {"$nin": blacklist['brands']},
"company": {"$nin": blacklist['companies']},
"category": {"$nin": blacklist['categories']}
}},
{"$project": {
"_id": False,
"consumer": True,
"item": True,
"tfidf": "$purchasetfidf2"
}},
{"$sort": SON([("consumer", 1)])}
]
try:
# careful! Monary returns masked numpy arrays!
result = monary_client.aggregate(
settings['mongo_db_name'],
settings['tfidf_collection'],
pipeline,
["consumer", "item", "tfidf"],
["int64", "int64", "float64"])
except:
return sp.csr_matrix(shape=(len(consumer_batch), n_items),
dtype=np.float64)
# convert into CSR matrix
_, consumer_idcs = np.unique(result[0].data,
return_inverse=True)
mtx = sp.csr_matrix(
(result[2].data, (consumer_idcs,
result[1].data)),
shape=(len(consumer_batch), n_items),<|fim▁hole|> dtype=np.float64)
# normalize each row (this step can't be moved into the database
# because of the item blacklist)
for row_idx in xrange(len(consumer_batch)):
row = mtx.data[mtx.indptr[row_idx]:mtx.indptr[row_idx + 1]]
row /= np.linalg.norm(row)
return mtx
def get_batch(batch_size=100, offset=0, random_pick=True):
if random_pick:
# pick batch_size examples randomly from the consumers in the
# collection
consumer_batch = random.sample(consumers, batch_size)
else:
# advance index by offset
consumer_batch = list(consumers)[offset:]
# get the next batch_size consumers from the collection
consumer_batch = consumer_batch[:batch_size]
# obtain sparse matrix filled with feature vectors from database
mtx = get_consumer_mtx(consumer_batch)
return mtx
# train the models
ns_clusters = np.unique(np.int64(np.floor(
10. ** np.linspace(settings['ld_k_min'],
settings['ld_k_max'],
settings['k_steps'],
endpoint=True))))
np.random.shuffle(ns_clusters)
ns_clusters = ns_clusters.tolist()
models = [SphericalMiniBatchKMeans(n_clusters=n_clusters,
n_init=10,
max_iter=1000,
batch_size=settings['batch_size'],
reassignment_ratio=.01,
max_no_improvement=10,
project_l=5.) for _ in xrange(settings['models_per_k']) for n_clusters in ns_clusters]
filename = cmd_folder + '/tfidf_smbkmeans__tfidf2.pkl.bz2'
for model in models:
_ = model.fit(n_samples=n_consumers,
get_batch=get_batch)
fp = bz2.BZ2File(filename, 'w')
pickle.dump(models, fp, pickle.HIGHEST_PROTOCOL)
fp.close()<|fim▁end|> | |
<|file_name|>logging.rs<|end_file_name|><|fim▁begin|>// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::Error;
use consensus_types::common::{Author, Round};
use diem_logger::Schema;
use diem_types::waypoint::Waypoint;
use serde::Serialize;
#[derive(Schema)]
pub struct SafetyLogSchema<'a> {
name: LogEntry,
event: LogEvent,
round: Option<Round>,
preferred_round: Option<u64>,
last_voted_round: Option<u64>,
epoch: Option<u64>,
#[schema(display)]
error: Option<&'a Error>,
waypoint: Option<Waypoint>,
author: Option<Author>,
}
impl<'a> SafetyLogSchema<'a> {
pub fn new(name: LogEntry, event: LogEvent) -> Self {
Self {
name,
event,
round: None,
preferred_round: None,
last_voted_round: None,
epoch: None,
error: None,
waypoint: None,
author: None,
}
}
}
#[derive(Clone, Copy, Serialize)]
#[serde(rename_all = "snake_case")]
pub enum LogEntry {
ConsensusState,
ConstructAndSignVote,
Epoch,
Initialize,
KeyReconciliation,
LastVotedRound,
PreferredRound,
SignProposal,
SignTimeout,
State,
Waypoint,<|fim▁hole|>impl LogEntry {
pub fn as_str(&self) -> &'static str {
match self {
LogEntry::ConsensusState => "consensus_state",
LogEntry::ConstructAndSignVote => "construct_and_sign_vote",
LogEntry::Epoch => "epoch",
LogEntry::Initialize => "initialize",
LogEntry::LastVotedRound => "last_voted_round",
LogEntry::KeyReconciliation => "key_reconciliation",
LogEntry::PreferredRound => "preferred_round",
LogEntry::SignProposal => "sign_proposal",
LogEntry::SignTimeout => "sign_timeout",
LogEntry::State => "state",
LogEntry::Waypoint => "waypoint",
}
}
}
#[derive(Clone, Copy, Serialize)]
#[serde(rename_all = "snake_case")]
pub enum LogEvent {
Error,
Request,
Success,
Update,
}<|fim▁end|> | }
|
<|file_name|>main.go<|end_file_name|><|fim▁begin|>// span-reshape is a dumbed down span-import.
package main
import (
"bufio"
"encoding"
"flag"
"fmt"
"io"
"io/ioutil"
"log"
"os"
"runtime"
"runtime/pprof"
"sort"
"github.com/lytics/logrus"
"github.com/miku/span"
"github.com/miku/span/formats/ceeol"
"github.com/miku/span/formats/crossref"
"github.com/miku/span/formats/dblp"
"github.com/miku/span/formats/degruyter"
"github.com/miku/span/formats/doaj"
"github.com/miku/span/formats/dummy"
"github.com/miku/span/formats/elsevier"
"github.com/miku/span/formats/finc"
"github.com/miku/span/formats/genderopen"
"github.com/miku/span/formats/genios"
"github.com/miku/span/formats/hhbd"
"github.com/miku/span/formats/highwire"
"github.com/miku/span/formats/ieee"
"github.com/miku/span/formats/imslp"
"github.com/miku/span/formats/jstor"
"github.com/miku/span/formats/mediarep"
"github.com/miku/span/formats/olms"
"github.com/miku/span/formats/ssoar"
"github.com/miku/span/formats/thieme"
"github.com/miku/span/formats/zvdd"
"github.com/miku/span/parallel"
"github.com/miku/xmlstream"
"github.com/segmentio/encoding/json"
"golang.org/x/net/html/charset"
)
var (
name = flag.String("i", "", "input format name")
list = flag.Bool("list", false, "list input formats")
numWorkers = flag.Int("w", runtime.NumCPU(), "number of workers")
showVersion = flag.Bool("v", false, "prints current program version")
cpuProfile = flag.String("cpuprofile", "", "write cpu profile to file")
memProfile = flag.String("memprofile", "", "write heap profile to file (go tool pprof -png --alloc_objects program mem.pprof > mem.png)")
logfile = flag.String("logfile", "", "path to logfile to append to, otherwise stderr")
)
// Factory creates things.
type Factory func() interface{}
// FormatMap maps format name to pointer to format struct. TODO(miku): That
// looks just wrong.
var FormatMap = map[string]Factory{
"ceeol": func() interface{} { return new(ceeol.Article) },
"ceeol-marcxml": func() interface{} { return new(ceeol.Record) },
"crossref": func() interface{} { return new(crossref.Document) },
"dblp": func() interface{} { return new(dblp.Article) },
"degruyter": func() interface{} { return new(degruyter.Article) },
"doaj": func() interface{} { return new(doaj.ArticleV1) },
"doaj-legacy": func() interface{} { return new(doaj.Response) },
"doaj-oai": func() interface{} { return new(doaj.Record) },
"dummy": func() interface{} { return new(dummy.Example) },
"genderopen": func() interface{} { return new(genderopen.Record) },
"genios": func() interface{} { return new(genios.Document) },
"hhbd": func() interface{} { return new(hhbd.Record) },
"highwire": func() interface{} { return new(highwire.Record) },
"ieee": func() interface{} { return new(ieee.Publication) },
"imslp": func() interface{} { return new(imslp.Data) },
"jstor": func() interface{} { return new(jstor.Article) },
"mediarep-dim": func() interface{} { return new(mediarep.Dim) },
"olms": func() interface{} { return new(olms.Record) },
"olms-mets": func() interface{} { return new(olms.MetsRecord) },
"ssoar": func() interface{} { return new(ssoar.Record) },
"thieme-nlm": func() interface{} { return new(thieme.Record) },
"zvdd": func() interface{} { return new(zvdd.DublicCoreRecord) },
"zvdd-mets": func() interface{} { return new(zvdd.MetsRecord) },
}
// IntermediateSchemaer wrap a basic conversion method.
type IntermediateSchemaer interface {
ToIntermediateSchema() (*finc.IntermediateSchema, error)
}
// processXML converts XML based formats, given a format name. It reads XML as
// stream and converts record them to an intermediate schema (at the moment).
func processXML(r io.Reader, w io.Writer, name string) error {
if _, ok := FormatMap[name]; !ok {
return fmt.Errorf("unknown format name: %s", name)
}
obj := FormatMap[name]()
scanner := xmlstream.NewScanner(bufio.NewReader(r), obj)
// errors like invalid character entities happen, also ISO-8859, ...
scanner.Decoder.Strict = false
scanner.Decoder.CharsetReader = charset.NewReaderLabel
for scanner.Scan() {
tag := scanner.Element()
converter, ok := tag.(IntermediateSchemaer)
if !ok {
return fmt.Errorf("cannot convert to intermediate schema: %T", tag)
}
output, err := converter.ToIntermediateSchema()
if err != nil {
if _, ok := err.(span.Skip); ok {
continue
}
return err
}
if err := json.NewEncoder(w).Encode(output); err != nil {
return err
}
}
return scanner.Err()
}
// processJSON convert JSON based formats. Input is interpreted as newline delimited JSON.
func processJSON(r io.Reader, w io.Writer, name string) error {
if _, ok := FormatMap[name]; !ok {
return fmt.Errorf("unknown format name: %s", name)
}
p := parallel.NewProcessor(r, w, func(_ int64, b []byte) ([]byte, error) {
v := FormatMap[name]()
if err := json.Unmarshal(b, v); err != nil {
return nil, err
}
converter, ok := v.(IntermediateSchemaer)
if !ok {
return nil, fmt.Errorf("cannot convert to intermediate schema: %T", v)
}
output, err := converter.ToIntermediateSchema()
if _, ok := err.(span.Skip); ok {
return nil, nil
}
if err != nil {
return nil, err
}
bb, err := json.Marshal(output)
if err != nil {
return nil, err
}
bb = append(bb, '\n')
return bb, nil
})
return p.RunWorkers(*numWorkers)
}
// processText processes a single record from raw bytes.
func processText(r io.Reader, w io.Writer, name string) error {
if _, ok := FormatMap[name]; !ok {
return fmt.Errorf("unknown format name: %s", name)
}
// Get the format.
data := FormatMap[name]()
// We need an unmarshaller first.
unmarshaler, ok := data.(encoding.TextUnmarshaler)
if !ok {
return fmt.Errorf("cannot unmarshal text: %T", data)
}
b, err := ioutil.ReadAll(r)
if err != nil {
return err
}
if err := unmarshaler.UnmarshalText(b); err != nil {
return err
}
// Now that data is populated we can convert.
converter, ok := data.(IntermediateSchemaer)
if !ok {
return fmt.Errorf("cannot convert to intermediate schema: %T", data)
}
output, err := converter.ToIntermediateSchema()
if _, ok := err.(span.Skip); ok {
return nil
}
if err != nil {
return err
}
return json.NewEncoder(w).Encode(output)
}
func main() {
flag.Parse()
if *showVersion {
fmt.Println(span.AppVersion)
os.Exit(0)
}
if *cpuProfile != "" {
f, err := os.Create(*cpuProfile)
if err != nil {
log.Fatal(err)
}
if err := pprof.StartCPUProfile(f); err != nil {
log.Fatal(err)
}
defer pprof.StopCPUProfile()
}
if *list {
var keys []string
for k := range FormatMap {
keys = append(keys, k)
}
sort.Strings(keys)
for _, k := range keys {
fmt.Println(k)
}<|fim▁hole|> f, err := os.OpenFile(*logfile, os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0600)
if err != nil {
log.Fatal(err)
}
defer f.Close()
logger := logrus.New()
logger.Formatter = &logrus.JSONFormatter{}
logger.Out = f
log.SetOutput(logger.Writer())
}
w := bufio.NewWriter(os.Stdout)
defer w.Flush()
var reader io.Reader = os.Stdin
if flag.NArg() > 0 {
var files []io.Reader
for _, filename := range flag.Args() {
f, err := os.Open(filename)
if err != nil {
log.Fatal(err)
}
defer f.Close()
files = append(files, f)
}
reader = io.MultiReader(files...)
}
switch *name {
// XXX: Configure this in one place.
case "highwire", "ceeol", "ieee", "genios", "jstor", "thieme-tm",
"zvdd", "degruyter", "zvdd-mets", "hhbd", "thieme-nlm", "olms",
"olms-mets", "ssoar", "genderopen", "mediarep-dim",
"ceeol-marcxml", "doaj-oai", "dblp":
if err := processXML(reader, w, *name); err != nil {
log.Fatal(err)
}
case "doaj", "doaj-api", "crossref", "dummy":
if err := processJSON(reader, w, *name); err != nil {
log.Fatal(err)
}
case "imslp":
if err := processText(reader, w, *name); err != nil {
log.Fatal(err)
}
case "elsevier-tar":
shipment, err := elsevier.NewShipment(reader)
if err != nil {
log.Fatal(err)
}
docs, err := shipment.BatchConvert()
if err != nil {
log.Fatal(err)
}
encoder := json.NewEncoder(w)
for _, doc := range docs {
if encoder.Encode(doc); err != nil {
log.Fatal(err)
}
}
default:
if *name == "" {
log.Fatalf("input format required")
}
log.Fatalf("unknown format: %s", *name)
}
if *memProfile != "" {
f, err := os.Create(*memProfile)
if err != nil {
log.Fatal("could not create memory profile: ", err)
}
defer f.Close()
runtime.GC()
if err := pprof.WriteHeapProfile(f); err != nil {
log.Fatal(err)
}
}
}<|fim▁end|> | os.Exit(0)
}
if *logfile != "" { |
<|file_name|>AuditLogEntryFactoryTest.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright 2016 Intuit
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.intuit.wasabi.auditlogobjects;
import com.intuit.wasabi.eventlog.events.BucketCreateEvent;
import com.intuit.wasabi.eventlog.events.BucketEvent;
import com.intuit.wasabi.eventlog.events.ChangeEvent;
import com.intuit.wasabi.eventlog.events.EventLogEvent;
import com.intuit.wasabi.eventlog.events.ExperimentChangeEvent;
import com.intuit.wasabi.eventlog.events.ExperimentCreateEvent;
import com.intuit.wasabi.eventlog.events.ExperimentEvent;
import com.intuit.wasabi.eventlog.events.SimpleEvent;
import com.intuit.wasabi.experimentobjects.Bucket;
import com.intuit.wasabi.experimentobjects.Experiment;
import org.junit.Assert;
import org.junit.Test;
import org.mockito.Mockito;
import java.lang.reflect.Field;
/**
* Tests for {@link AuditLogEntryFactory}.
*/
public class AuditLogEntryFactoryTest {
@Test
public void testCreateFromEvent() throws Exception {
new AuditLogEntryFactory();
EventLogEvent[] events = new EventLogEvent[]{
new SimpleEvent("SimpleEvent"),
new ExperimentChangeEvent(Mockito.mock(Experiment.class), "Property", "before", "after"),
new ExperimentCreateEvent(Mockito.mock(Experiment.class)),
new BucketCreateEvent(Mockito.mock(Experiment.class), Mockito.mock(Bucket.class))
};
Field[] fields = AuditLogEntry.class.getFields();
for (Field field : fields) {
field.setAccessible(true);
}
for (EventLogEvent event : events) {
AuditLogEntry aleFactory = AuditLogEntryFactory.createFromEvent(event);
AuditLogEntry aleManual = new AuditLogEntry(
event.getTime(), event.getUser(), AuditLogAction.getActionForEvent(event),
event instanceof ExperimentEvent ? ((ExperimentEvent) event).getExperiment() : null,
event instanceof BucketEvent ? ((BucketEvent) event).getBucket().getLabel() : null,
event instanceof ChangeEvent ? ((ChangeEvent) event).getPropertyName() : null,
event instanceof ChangeEvent ? ((ChangeEvent) event).getBefore() : null,<|fim▁hole|> event instanceof ChangeEvent ? ((ChangeEvent) event).getAfter() : null
);
for (Field field : fields) {
Assert.assertEquals(field.get(aleManual), field.get(aleFactory));
}
}
}
}<|fim▁end|> | |
<|file_name|>cache.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
__author__ = 'hal9000'
__all__ = ['Cache', 'CacheServer']
import socket
import time
import json
import hashlib
from sqlite3 import dbapi2 as sqlite
import xbmc
import log
import gui
import system
SOCKET = '127.0.0.1', 59999
CLEAR = 60*60*24 # 1 day
class SQL:
def __init__(self, name, version):
self.fs = system.FS('cache')
if self.fs.exists('sandbox://' + name + '.sqlite'):
self.con = sqlite.connect(self.fs('sandbox://' + name + '.sqlite'))
else:
self.con = sqlite.connect(self.fs('sandbox://' + name + '.sqlite'))
self.sql_set('pragma auto_vacuum=1')
self.sql_set('create table meta(data text)')
self.sql_set('insert into meta(data) values(?)', (json.dumps({'version': version, 'timeout': int(time.time()) + CLEAR}),))
self.sql_set('create table cache(token varchar(32) unique, expire integer, data text)')
self.sql_set('create index dataindex on cache(expire)')
self.meta_load()
def health(self, version):
if self.meta['version'] != version:
self.meta_save('version', version)
self.clear()
elif self.meta['timeout'] < int(time.time()):
self.sql_set('delete from cache where expire<?', (int(time.time()), ))
self.meta_save('timeout', int(time.time()) + CLEAR)
def get(self, token):
return self.sql_get('select data from cache where token=? and expire>? limit 1', (hashlib.md5(str(token)).hexdigest(), int(time.time())))
def set(self, token, expire, data):
try:
jsdata = json.dumps(data)
except:
pass
else:
self.sql_set('replace into cache(token,expire,data) values(?,?,?)', (hashlib.md5(str(token)).hexdigest(), int(time.time()) + expire, jsdata))
def clear(self):
self.sql_set('delete from cache')
self.meta_save('timeout', int(time.time()) + CLEAR)
# Private
def sql_get(self, sql, *args):
cur = self.con.cursor()
cur.execute(sql, *args)
rows = cur.fetchall()
cur.close()
try:
return json.loads(rows[0][0])
except:
return None
def sql_set(self, sql, *args):
cur = self.con.cursor()
cur.execute(sql, *args)
self.con.commit()
cur.close()
def meta_load(self):
self.meta = self.sql_get('select data from meta')
if not self.meta:
self.meta = {'version': '', 'timeout': 0}
def meta_save(self, key, value):
self.meta[key] = value
self.sql_set('update meta set data=?', (json.dumps(self.meta),))
class Base:
def recv(self, sock):
data = ''
length = ''
idle = time.time()
while True:
try:
if isinstance(length, basestring):
c = sock.recv(1)
if c == '.':
length = int(length)
else:
length += c
else:<|fim▁hole|> data = sock.recv(length - len(data))
except socket.error, e:
if not e.errno in (10035, 35):
self.log('Recive', repr(e))
if e.errno in (22,):
self.log('Socket error 22')
return None
if idle + 10 < time.time():
self.log('Timeout')
return None
else:
if not isinstance(length, basestring) and len(data) == length:
try:
return json.loads(data)
except Exception, e:
self.log('JSON', repr(e))
return None
def send(self, sock, data):
try:
jsdata = json.dumps(data)
except:
jsdata = 'null'
sock.send(str(len(jsdata)) + '.' + jsdata)
def log(self, *args):
log.error(str(self.__class__.__name__), *args)
class Cache(Base):
def __init__(self, name, version=None):
self.name = str(name).strip()
self.version = str(version).strip()
def call(self, token, fun, *args, **kwargs):
cache = self._call([1, token])
if cache is not None:
return cache
res = fun(*args, **kwargs)
if res is None:
return None
else:
if isinstance(res, tuple) and len(res) == 2 and isinstance(res[0], int):
self._call([2, token, res[0], res[1]])
return res[1]
else:
return res
def clear(self):
self._call('clear')
def _call(self, data):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.connect(SOCKET)
except socket.error, e:
if e.errno in (111,):
self.log("CacheServer isn't running")
else:
self.log('Connect', repr(e))
return None
except:
return None
else:
self.send(sock, [self.name, self.version] + data)
r = self.recv(sock)
sock.close()
return r
class CacheServer(Base):
def __init__(self):
self.sql = {}
def run(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.bind(SOCKET)
except Exception, e:
self.log('Bind', repr(e))
gui.message('Failed to start CacheServer. Check log.')
else:
sock.listen(1)
sock.setblocking(0)
idle = time.time()
while not xbmc.abortRequested:
try:
(client, address) = sock.accept()
except socket.error, e:
if e.errno == 11 or e.errno == 10035 or e.errno == 35:
if idle + 3 < time.time():
time.sleep(0.5)
continue
self.log('Accept', repr(e))
continue
except:
continue
else:
self.send(client, self.command(self.recv(client)))
idle = time.time()
sock.close()
def command(self, data):
if not data or not isinstance(data, list) or len(data) < 3 or data[2] not in (1, 2, 3):
return None
sql = self.open(data[0], data[1])
if not sql:
return None
if data[2] == 1 and len(data) == 4 and isinstance(data[3], basestring):
return sql.get(data[3])
elif data[2] == 2 and len(data) == 6 and isinstance(data[3], basestring) and isinstance(data[4], int):
sql.set(data[3], data[4], data[5])
return 1
elif data[2] == 3:
sql.clear()
return 1
return None
def open(self, db, version):
name = str(db).strip()
if not name:
return None
ver = str(version).strip()
if db not in self.sql:
self.sql[db] = SQL(db, ver)
self.sql[db].health(ver)
return self.sql[db]<|fim▁end|> | |
<|file_name|>static-mut-not-pat.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Constants (static variables) can be used to match in patterns, but mutable
// statics cannot. This ensures that there's some form of error if this is
// attempted.
static mut a: int = 3;
fn main() {
// If they can't be matched against, then it's possible to capture the same
// name as a variable, hence this should be an unreachable pattern situation
// instead of spitting out a custom error about some identifier collisions
// (we should allow shadowing)
match 4i {
a => {} //~ ERROR static variables cannot be referenced in a pattern
_ => {}
}
}
struct NewBool(bool);
enum Direction {
North,<|fim▁hole|>}
const NEW_FALSE: NewBool = NewBool(false);
struct Foo {
bar: Option<Direction>,
baz: NewBool
}
static mut STATIC_MUT_FOO: Foo = Foo { bar: Some(West), baz: NEW_FALSE };
fn mutable_statics() {
match (Foo { bar: Some(North), baz: NewBool(true) }) {
Foo { bar: None, baz: NewBool(true) } => (),
STATIC_MUT_FOO => (),
//~^ ERROR static variables cannot be referenced in a pattern
Foo { bar: Some(South), .. } => (),
Foo { bar: Some(EAST), .. } => (),
Foo { bar: Some(North), baz: NewBool(true) } => (),
Foo { bar: Some(EAST), baz: NewBool(false) } => ()
}
}<|fim▁end|> | East,
South,
West |
<|file_name|>sha1Hash_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: iso-8859-1 -*-
""" sha1Hash_test.py
Unit tests for sha1.py
"""
from crypto.hash.sha1Hash import SHA1
import unittest
import struct
assert struct.calcsize('!IIIII') == 20, '5 integers should be 20 bytes'
class SHA1_FIPS180_TestCases(unittest.TestCase):
<|fim▁hole|> def testFIPS180_1_Appendix_A(self):
""" APPENDIX A. A SAMPLE MESSAGE AND ITS MESSAGE DIGEST """
hashAlg = SHA1()
message = 'abc'
message_digest = 0xA9993E36L, 0x4706816AL, 0xBA3E2571L, 0x7850C26CL, 0x9CD0D89DL
md_string = _toBString(message_digest)
assert( hashAlg(message) == md_string ), 'FIPS180 Appendix A test Failed'
def testFIPS180_1_Appendix_B(self):
""" APPENDIX B. A SECOND SAMPLE MESSAGE AND ITS MESSAGE DIGEST """
hashAlg = SHA1()
message = 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'
message_digest = 0x84983E44L, 0x1C3BD26EL, 0xBAAE4AA1L, 0xF95129E5L, 0xE54670F1L
md_string = _toBString(message_digest)
assert( hashAlg(message) == md_string ), 'FIPS180 Appendix B test Failed'
def testFIPS180_1_Appendix_C(self):
""" APPENDIX C. A THIRD SAMPLE MESSAGE AND ITS MESSAGE DIGEST
Let the message be the binary-coded form of the ASCII string which consists
of 1,000,000 repetitions of "a". """
hashAlg = SHA1()
message = 1000000*'a'
message_digest = 0x34AA973CL, 0xD4C4DAA4L, 0xF61EEB2BL, 0xDBAD2731L, 0x6534016FL
md_string = _toBString(message_digest)
assert( hashAlg(message) == md_string ), 'FIPS180 Appendix C test Failed'
def _toBlock(binaryString):
""" Convert binary string to blocks of 5 words of uint32() """
return [uint32(word) for word in struct.unpack('!IIIII', binaryString)]
def _toBString(block):
""" Convert block (5 words of 32 bits to binary string """
return ''.join([struct.pack('!I',word) for word in block])
if __name__ == '__main__':
# Run the tests from the command line
unittest.main()<|fim▁end|> | """ SHA-1 tests from FIPS180-1 Appendix A, B and C """
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from flask import Flask
__version__ = '0.1.1'
<|fim▁hole|>app.config.from_object('frijoles.default_settings')
app.config.from_envvar('FRIJOLES_SETTINGS', silent=True)
import frijoles.views<|fim▁end|> | app = Flask(__name__) |
<|file_name|>applicationsettings.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>use clap::ArgMatches;
use crate::io::constants::{APP_INFO, SCALE};
use std::path::PathBuf;
use log::LevelFilter;
use log4rs;
use log4rs::append::console::ConsoleAppender;
use log4rs::config::{Appender, Config, Root};
use log4rs::encode::pattern::PatternEncoder;
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct ApplicationSettings {
pub rom_file_name: String,
pub debug_mode: bool,
pub trace_mode: bool,
pub memvis_mode: bool,
pub debugger_on: bool,
pub vulkan_mode: bool,
config_path: Option<PathBuf>,
pub data_path: Option<PathBuf>,
pub ui_scale: f32,
}
impl ApplicationSettings {
pub fn new(arguments: &ArgMatches) -> Result<ApplicationSettings, String> {
// Attempt to read ROM first
let rom_file_name = arguments
.value_of("game")
.expect("Could not open specified rom")
.to_string();
let debug_mode = arguments.is_present("debug");
let trace_mode = arguments.is_present("trace");
let memvis_mode = arguments.is_present("visualize");
let vulkan_mode = arguments.is_present("vulkan");
// Set up logging
let stdout = ConsoleAppender::builder()
.encoder(Box::new(PatternEncoder::new("{h({l})} {m} {n}")))
.build();
let config = Config::builder()
.appender(Appender::builder().build("stdout", Box::new(stdout)))
.build(
Root::builder()
.appender("stdout")
.build(match (trace_mode, debug_mode) {
(true, _) => LevelFilter::Trace,
(false, true) => LevelFilter::Debug,
_ => LevelFilter::Info,
}),
)
.or_else(|_| Err("Could not build Config".to_string()))?;
// Set up debugging or command-line logging
let (should_debugger, _handle) = if debug_mode && cfg!(feature = "debugger") {
info!("Running in debug mode");
(true, None)
} else {
let handle = log4rs::init_config(config).or_else(|_| Err("Could not init Config"))?;
(false, Some(handle))
};
let data_path = match app_root(AppDataType::UserData, &APP_INFO) {
Ok(v) => {
debug!("Using user data path: {:?}", v);
Some(v)
}
Err(e) => {
error!("Could not open a user data path: {}", e);
None
}
};
let config_path = match app_root(AppDataType::UserConfig, &APP_INFO) {
Ok(v) => {
debug!("Using user config path: {:?}", v);
Some(v)
}
Err(e) => {
error!("Could not open a user config path: {}", e);
None
}
};
Ok(ApplicationSettings {
rom_file_name,
debug_mode,
trace_mode,
memvis_mode,
vulkan_mode,
config_path,
data_path,
debugger_on: should_debugger,
// logger_handle: handle,
ui_scale: SCALE,
})
}
}<|fim▁end|> | //! Stores all settings related to the application from a user perspective
use app_dirs::*; |
<|file_name|>get_permission_response.rs<|end_file_name|><|fim▁begin|>use crate::from_headers::*;
use crate::permission::CosmosPermission;
use crate::Permission;
use azure_sdk_core::errors::AzureError;
use azure_sdk_core::{etag_from_headers, session_token_from_headers};
use http::HeaderMap;
use std::borrow::Cow;
#[derive(Debug, Clone, PartialEq)]
pub struct GetPermissionResponse<'a> {
pub permission: Permission<'a, Cow<'a, str>>,
pub charge: f64,
pub etag: String,
pub activity_id: uuid::Uuid,
pub session_token: String,<|fim▁hole|>
impl<'a> std::convert::TryFrom<(&HeaderMap, &[u8])> for GetPermissionResponse<'a> {
type Error = AzureError;
fn try_from(value: (&HeaderMap, &[u8])) -> Result<Self, Self::Error> {
let headers = value.0;
let body = value.1;
debug!("headers == {:#?}", headers);
debug!("body == {:#?}", std::str::from_utf8(body)?);
// first get the Cosmos REST API permission
let cosmos_permission: CosmosPermission<'_> = serde_json::from_slice(body)?;
debug!("cosmos_permission== {:#?}", cosmos_permission);
// now convert into the SDK struct
let permission = Permission::try_from(cosmos_permission)?;
Ok(Self {
permission,
charge: request_charge_from_headers(headers)?,
etag: etag_from_headers(headers)?,
activity_id: activity_id_from_headers(headers)?,
session_token: session_token_from_headers(headers)?,
content_path: content_path_from_headers(headers)?.to_owned(),
alt_content_path: alt_content_path_from_headers(headers)?.to_owned(),
})
}
}<|fim▁end|> | pub content_path: String,
pub alt_content_path: String,
} |
<|file_name|>LetterHandler.cpp<|end_file_name|><|fim▁begin|>#include "stdafx.h"
#include "DBAgent.h"
using std::string;
void CUser::LetterSystem(Packet & pkt)
{
uint8 opcode = pkt.read<uint8>();
switch (opcode)
{
case LETTER_UNREAD:
case LETTER_LIST:
case LETTER_HISTORY:
case LETTER_READ:
case LETTER_GET_ITEM:
case LETTER_SEND:
break;
case LETTER_DELETE:
{
uint8 bCount = pkt.read<uint8>();
if (bCount > 5)
{
Packet result(WIZ_SHOPPING_MALL, uint8(STORE_LETTER));
result << uint8(LETTER_DELETE) << int8(-3);
Send(&result);
return;
}
} break;
default:
TRACE("Unknown letter packet: %X\n", opcode);
return;
}
g_pMain->AddDatabaseRequest(pkt, this);
}
void CUser::ReqLetterSystem(Packet & pkt)
{
uint8 opcode = pkt.read<uint8>();
switch (opcode)
{
// Are there any letters to be read?
// This is for the notification at the top of the screen.
case LETTER_UNREAD:
ReqLetterUnread();
break;
// Lists all the new mail.
case LETTER_LIST:
ReqLetterList();
break;
// Lists all the old mail.
case LETTER_HISTORY:
ReqLetterList(false);
break;
// Opens up the letter & marks it as read.
case LETTER_READ:
ReqLetterRead(pkt);
break;
// Used to send a letter & any coins/items (coins are disabled though)
case LETTER_SEND:
ReqLetterSend(pkt);
break;
// Used to take an item from a letter.
case LETTER_GET_ITEM:
ReqLetterGetItem(pkt);
break;
// Deletes up to 5 old letters at a time.
case LETTER_DELETE:
ReqLetterDelete(pkt);
break;
}
}
void CUser::ReqLetterUnread()
{
// TODO: Force this to use cached list data (or update if stale). Calling the DB for just this is pointless.
Packet result(WIZ_SHOPPING_MALL, uint8(STORE_LETTER));
result << uint8(LETTER_UNREAD)
<< g_DBAgent.GetUnreadLetterCount(m_strUserID);
Send(&result);
}
void CUser::ReqLetterList(bool bNewLettersOnly /*= true*/)
{
Packet result(WIZ_SHOPPING_MALL, uint8(STORE_LETTER));
result << uint8(bNewLettersOnly ? LETTER_LIST : LETTER_HISTORY);
if (!g_DBAgent.GetLetterList(m_strUserID, result, bNewLettersOnly))
result << int8(-1);
Send(&result);
}
void CUser::ReqLetterRead(Packet & pkt)
{
Packet result(WIZ_SHOPPING_MALL, uint8(STORE_LETTER));
uint32 nLetterID = pkt.read<uint32>();
string strMessage;
result << uint8(LETTER_READ);
if (!g_DBAgent.ReadLetter(m_strUserID, nLetterID, strMessage))
{
// TODO: research error codes
result << uint8(0);
}
else
{
result.SByte();
result << uint8(1) << nLetterID << strMessage;
}
Send(&result);
}
void CUser::ReqLetterSend(Packet & pkt)
{
Packet result(WIZ_SHOPPING_MALL, uint8(STORE_LETTER));
CUser * pUser;
string strRecipient, strSubject, strMessage;
_ITEM_DATA *pItem = nullptr;
uint32 nItemID = 0, nCoins = 0, nCoinRequirement = 1000;
uint8 bType, bSrcPos;
int8 bResult = 1;
int64 Serial = 0;
if (isMerchanting() || isTrading())
{
bResult = -1;
goto send_packet;
}
pkt.SByte();
pkt >> strRecipient >> strSubject >> bType;
// Invalid recipient name length
if (strRecipient.empty() || strRecipient.length() > MAX_ID_SIZE
// Invalid subject length
|| strSubject.empty() || strSubject.length() > 31
// Invalid type (as far as we're concerned)
|| bType == 0 || bType > 2)
bResult = -1;
else if (STRCASECMP(m_strUserID.c_str(), strRecipient.c_str()) == 0)
bResult = -6;
if (bResult != 1)
goto send_packet;
if (bType == 2)
{
pkt >> nItemID >> bSrcPos >> nCoins; // coins will always be 0 (it's disabled)
if (nItemID != 0)
nCoinRequirement = 10000; // if coins were enabled, we'd obviously tack nCoins onto this.
else
nCoinRequirement = 5000; // if coins were enabled, we'd obviously tack nCoins onto this.
_ITEM_TABLE *pTable = g_pMain->GetItemPtr(nItemID);
// Invalid item (ID doesn't exist)
if (pTable == nullptr
// Invalid slot ID
|| bSrcPos > HAVE_MAX
// Item doesn't match what the server sees.
|| (pItem = GetItem(SLOT_MAX + bSrcPos))->nNum != nItemID)
bResult = -1;
// Untradeable item
else if (pTable->m_bRace == RACE_UNTRADEABLE || nItemID >= ITEM_GOLD
|| pItem->isSealed() || pItem->isRented() || pItem->isBound() || pItem->isDuplicate() || pItem->nExpirationTime !=0)
bResult = -32;
}
pkt >> strMessage;
if (pItem != nullptr)
Serial = pItem->nSerialNum;
// Invalid message length
if (strMessage.empty() || strMessage.size() > 128)
bResult = -1;
if (bResult != 1 && nCoins == 0)
goto send_packet;
// Ensure they have all the coins they need
if (m_iGold < nCoinRequirement)
{
bResult = -1;
goto send_packet;
}
// Leave the rest up to the database (does the character exist, etc?)
if (pItem != nullptr)
{
if (pItem->nNum == nItemID && pItem->nSerialNum == Serial)
bResult = g_DBAgent.SendLetter(m_strUserID, strRecipient, strSubject, strMessage, bType, pItem, nCoins);
else
bResult = 1;
}
else
bResult = g_DBAgent.SendLetter(m_strUserID, strRecipient, strSubject, strMessage, bType, pItem, nCoins);
if (bResult != 1)
goto send_packet;
// Remove the player's coins
if (nCoins != 0)
GoldLose(nCoinRequirement+nCoins);
else
GoldLose(nCoinRequirement);
// Remove the player's item
if (pItem != nullptr)
{
memset(pItem, 0, sizeof(_ITEM_DATA));
SendStackChange(nItemID, pItem->sCount, pItem->sDuration, bSrcPos);
}
// If the other player's online, notify them.
pUser = g_pMain->GetUserPtr(strRecipient, TYPE_CHARACTER);
if (pUser != nullptr)
{
Packet notification(WIZ_SHOPPING_MALL, uint8(STORE_LETTER));
notification << uint8(LETTER_UNREAD) << true;
pUser->Send(¬ification);
}
send_packet:
result << uint8(LETTER_SEND) << uint8(bResult);
Send(&result);
}
void CUser::ReqLetterGetItem(Packet & pkt)
{
Packet result(WIZ_SHOPPING_MALL, uint8(STORE_LETTER));
uint64 nSerialNum = 0;
uint32 nLetterID = pkt.read<uint32>(), nItemID = 0, nCoins = 0;
uint16 sCount = 0, sDurability = 0;
int8 bResult = g_DBAgent.GetItemFromLetter(m_strUserID, nLetterID, nItemID, sCount, sDurability, nCoins, nSerialNum);
int pos = -1;
if (isMerchanting() || isTrading())
bResult = -1;
// If the request was successful, check requirements...
if (bResult == 1)
{
// If we're being given an item, do we have enough room for this item?
if (nItemID
&& ((pos = FindSlotForItem(nItemID, sCount)) < 0
|| !CheckWeight(nItemID, sCount)))
bResult = -1;
// If we're being given coins, do they exceed our max?
if (nCoins
&& m_iGold + nCoins > COIN_MAX)
bResult = -1;
}
// If all of the requirements passed, we can give the items/coins.
// But ONLY if ALL requirements are met.
if (bResult == 1)
{
if (nItemID)
{
_ITEM_DATA *pItem = GetItem(pos);
pItem->nNum = nItemID;
pItem->sCount += sCount;
pItem->sDuration += sDurability;
pItem->nSerialNum = nSerialNum;
if (pItem->nNum == nItemID && pItem->nSerialNum == nSerialNum)
SendStackChange(nItemID, pItem->sCount, pItem->sDuration, pos - SLOT_MAX, pItem->sCount == sCount);
}
if (nCoins)
GoldGain(nCoins);
}
result << uint8(LETTER_GET_ITEM) << bResult;
Send(&result);
}
void CUser::ReqLetterDelete(Packet & pkt)<|fim▁hole|>{
Packet result(WIZ_SHOPPING_MALL, uint8(STORE_LETTER));
uint8 bCount = pkt.read<uint8>();
result << uint8(LETTER_DELETE) << bCount;
for (uint8 i = 0; i < bCount; i++)
{
uint32 nLetterID = pkt.read<uint32>();
g_DBAgent.DeleteLetter(m_strUserID, nLetterID);
result << nLetterID;
}
Send(&result);
}<|fim▁end|> | |
<|file_name|>72_Edit_Distance.py<|end_file_name|><|fim▁begin|><|fim▁hole|> def minDistance(self, word1, word2):
"""
:type word1: str
:type word2: str
:rtype: int
"""
row = len(word1) + 1
col = len(word2) + 1
dp = [[0] * col for _ in range(row)]
for i in range(col):
dp[0][i] = i
for i in range(row):
dp[i][0] = i
for i in range(1, row):
for j in range(1, col):
if word1[i - 1] == word2[j - 1]:
dp[i][j] = dp[i - 1][j - 1]
else:
dp[i][j] = dp[i - 1][j - 1] + 1
dp[i][j] = min(dp[i][j], dp[i - 1][j] + 1, dp[i][j - 1] + 1)
return dp[row - 1][col - 1]<|fim▁end|> | class Solution(object): |
<|file_name|>ITrigger.ts<|end_file_name|><|fim▁begin|>import { IArtifact, IExecution, ITemplateInheritable } from 'core/domain';
export interface ITrigger extends ITemplateInheritable {
artifacts?: IArtifact[];
description?: string;
enabled: boolean;
rebake?: boolean;
user?: string;
type: string;
expectedArtifactIds?: string[]; // uuid references to ExpectedArtifacts defined in the Pipeline.
runAsUser?: string;
excludedArtifactTypePatterns?: RegExp[];
}
export interface IArtifactoryTrigger extends ITrigger {
artifactorySearchName: string;
artifactoryRepository: string;
type: 'artifactory';
}
export interface INexusTrigger extends ITrigger {
nexusSearchName: string;
nexusRepository: string;
type: 'nexus';
}
export interface IDockerTrigger extends ITrigger {
account?: string;
tag?: string;
registry?: string;
repository: string;
organization?: string;
}
export interface IGitTrigger extends ITrigger {
source: 'stash' | 'github' | 'bitbucket' | 'gitlab';
secret?: string;
project: string;
slug: string;
branch: string;<|fim▁hole|> type: 'git';
}
export interface IBuildTrigger extends ITrigger {
buildInfo?: any;
buildNumber?: number;
job: string;
project: string;
propertyFile?: string;
master: string;
type: 'jenkins' | 'travis' | 'wercker' | 'concourse';
}
export interface IWerckerTrigger extends IBuildTrigger {
app: string;
pipeline: string;
type: 'wercker';
}
export interface IConcourseTrigger extends IBuildTrigger {
// Concourse pipeline is represented by project
team: string;
jobName: string; // job will be the concatenation of team/pipeline/jobName
type: 'concourse';
}
export interface IPipelineTrigger extends ITrigger {
application: string;
parentExecution?: IExecution;
parentPipelineId?: string;
pipeline: string;
status: string[];
}
export interface ICronTrigger extends ITrigger {
cronExpression: string;
}
export interface IPubsubTrigger extends ITrigger {
pubsubSystem: string;
subscriptionName: string;
payloadConstraints: { [key: string]: string };
attributeConstraints: { [key: string]: string };
}
export interface IWebhookTrigger extends ITrigger {
source: string;
payloadConstraints: { [key: string]: string };
}
export interface IWerckerTrigger extends IBuildTrigger {
app: string;
pipeline: string;
type: 'wercker';
}<|fim▁end|> | hash?: string; |
<|file_name|>day24.rs<|end_file_name|><|fim▁begin|>#[macro_use]
extern crate clap;
use clap::App;
extern crate regex;
use std::error::Error;
use std::fs::File;
use std::io::prelude::*;
use std::collections::HashSet;
fn main() {
let yaml = load_yaml!("cli.yml");
let matches = App::from_yaml(yaml).get_matches();
let filename = matches.value_of("FILE").unwrap();
let mut file = match File::open(filename) {
Err(why) => panic!("Couldn't open {}: {}", filename, Error::description(&why)),
Ok(file) => file,
};
let mut s = String::new();
match file.read_to_string(&mut s) {
Err(why) => panic!("Couldn't read {}: {}", filename, Error::description(&why)),
Ok(_) => println!("Read file {}", filename),
}
let packages = parse_input(s.trim().split('\n').collect());
let sum = packages.iter().fold(0, |sum, p| sum + p);
split_into_3(&packages, sum / 3);
split_into_4(&packages, sum / 4);
}
fn split_into_4(packages : &Vec<i32>, target : i32) {
let mut all : HashSet<Vec<i32>> = HashSet::new();
let mut min_length = std::usize::MAX;
let groupings = generate_sum(&packages, target, Vec::new());
for g in groupings {
if g.len() <= min_length {
let available_packages = packages.clone().into_iter().filter(|x| !g.contains(x)).collect();
let second_grouping = generate_sum(&available_packages, target, Vec::new());
for g2 in second_grouping {
let available_packages_3rd : Vec<i32> = packages.clone().into_iter().filter(|x| !g.contains(x) && !g2.contains(x)).collect();
// Shouldn't generate all 2nd groups...just make sure 1 exists
let third_group_exists = sum_exists(&available_packages_3rd, target);
if third_group_exists {
all.insert(g.clone());
min_length = std::cmp::min(min_length, g.len());
}
}
}
}
let mut min_qe = std::usize::MAX;
for a in all {
if a.len() == min_length {
let qe : usize = a.iter().fold(1, |qe, x| qe * *x as usize);
min_qe = std::cmp::min(qe, min_qe);
}
}
println!("Part 2: Min QE = {}", min_qe);
}
fn split_into_3(packages : &Vec<i32>, target : i32) {
let mut all : HashSet<Vec<i32>> = HashSet::new();
let mut min_length = std::usize::MAX;
let groupings = generate_sum(&packages, target, Vec::new());
for g in groupings {
if g.len() <= min_length {
let available_packages = packages.clone().into_iter().filter(|x| !g.contains(x)).collect();
if sum_exists(&available_packages, target) {
all.insert(g.clone());
min_length = std::cmp::min(min_length, g.len());
}
}
}
let mut min_qe = std::usize::MAX;
for a in all {
if a.len() == min_length {
let qe : usize = a.iter().fold(1, |qe, x| qe * *x as usize);
min_qe = std::cmp::min(qe, min_qe);
}
}
println!("Part 1: Min QE = {}", min_qe);
}
fn sum_exists(packages : &Vec<i32>, target : i32) -> bool {
let mut exists = false;
for (i,p) in packages.iter().enumerate() {
if target - p == 0 {
exists = true;
} else if target - p > 0{
let new_vec = packages[i+1..packages.len()].to_vec();
exists = sum_exists(&new_vec, target - p);
}
if exists {
break;
}
}
exists
}
fn generate_sum(packages : &Vec<i32>, target : i32, potential : Vec<i32>) -> Vec<Vec<i32>> {
let mut groupings = Vec::new();
for (i,p) in packages.iter().enumerate() {
if target - p == 0 {
let mut group = potential.clone();
group.push(*p);
groupings.push(group.clone());
//println!("Found! {:?}", group);<|fim▁hole|> group.push(*p);
groupings.append(&mut generate_sum(&new_vec, target - p, group));
}
}
groupings
}
fn parse_input(input : Vec<&str>) -> Vec<i32> {
let mut v = Vec::new();
for s in input {
if s.trim().len() > 0 {
v.push(s.parse::<i32>().unwrap());
}
}
v.sort_by(|a,b| b.cmp(a));
v
}<|fim▁end|> | } else if target - p > 0{
let new_vec = packages[i+1..packages.len()].to_vec();
let mut group = potential.clone(); |
<|file_name|>json_filter.py<|end_file_name|><|fim▁begin|># Copyright (c) 2011 Openstack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import operator
import nova.scheduler
from nova.scheduler.filters import abstract_filter
class JsonFilter(abstract_filter.AbstractHostFilter):
"""Host Filter to allow simple JSON-based grammar for
selecting hosts.
"""
def _op_compare(self, args, op):
"""Returns True if the specified operator can successfully
compare the first item in the args with all the rest. Will
return False if only one item is in the list.
"""
if len(args) < 2:
return False
if op is operator.contains:
bad = not args[0] in args[1:]
else:
bad = [arg for arg in args[1:]
if not op(args[0], arg)]
return not bool(bad)
def _equals(self, args):
"""First term is == all the other terms."""
return self._op_compare(args, operator.eq)
def _less_than(self, args):
"""First term is < all the other terms."""
return self._op_compare(args, operator.lt)
def _greater_than(self, args):
"""First term is > all the other terms."""
return self._op_compare(args, operator.gt)
def _in(self, args):
"""First term is in set of remaining terms"""
return self._op_compare(args, operator.contains)
def _less_than_equal(self, args):
"""First term is <= all the other terms."""
return self._op_compare(args, operator.le)
def _greater_than_equal(self, args):
"""First term is >= all the other terms."""
return self._op_compare(args, operator.ge)
def _not(self, args):
"""Flip each of the arguments."""
return [not arg for arg in args]
def _or(self, args):
"""True if any arg is True."""
return any(args)
def _and(self, args):
"""True if all args are True."""
return all(args)
commands = {
'=': _equals,
'<': _less_than,
'>': _greater_than,
'in': _in,
'<=': _less_than_equal,
'>=': _greater_than_equal,
'not': _not,
'or': _or,
'and': _and,
}
def instance_type_to_filter(self, instance_type):
"""Convert instance_type into JSON filter object."""
required_ram = instance_type['memory_mb']
required_disk = instance_type['local_gb']
query = ['and',
['>=', '$compute.host_memory_free', required_ram],
['>=', '$compute.disk_available', required_disk]]
return json.dumps(query)
def _parse_string(self, string, host, hostinfo):
"""Strings prefixed with $ are capability lookups in the
form '$service.capability[.subcap*]'.
"""
if not string:
return None
if not string.startswith("$"):
return string
path = string[1:].split(".")
services = dict(compute=hostinfo.compute, network=hostinfo.network,
volume=hostinfo.volume)
service = services.get(path[0], None)
if not service:
return None
for item in path[1:]:
service = service.get(item, None)
if not service:
return None
return service
def _process_filter(self, query, host, hostinfo):
"""Recursively parse the query structure."""
if not query:
return True
cmd = query[0]
method = self.commands[cmd]
cooked_args = []
for arg in query[1:]:
if isinstance(arg, list):
arg = self._process_filter(arg, host, hostinfo)
elif isinstance(arg, basestring):
arg = self._parse_string(arg, host, hostinfo)
if arg is not None:
cooked_args.append(arg)
result = method(self, cooked_args)
return result
def filter_hosts(self, host_list, query, options):
"""Return a list of hosts that can fulfill the requirements
specified in the query.
"""
expanded = json.loads(query)
filtered_hosts = []
for host, hostinfo in host_list:
if not hostinfo:
continue
if hostinfo.compute and not hostinfo.compute.get("enabled", True):<|fim▁hole|> # Host is disabled
continue
result = self._process_filter(expanded, host, hostinfo)
if isinstance(result, list):
# If any succeeded, include the host
result = any(result)
if result:
filtered_hosts.append((host, hostinfo))
return filtered_hosts<|fim▁end|> | |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>import json
def load(ctx):
with open(ctx.obj["data_location"], "r") as f:<|fim▁hole|> with open(ctx.obj["data_location"], "w") as f:
json.dump(map_obj, f, indent=4)<|fim▁end|> | return json.load(f)
def save(ctx, map_obj): |
<|file_name|>riscv32i_unknown_none_elf.rs<|end_file_name|><|fim▁begin|>use crate::spec::{LinkerFlavor, LldFlavor, PanicStrategy, RelocModel};
use crate::spec::{Target, TargetOptions};
pub fn target() -> Target {
Target {
data_layout: "e-m:e-p:32:32-i64:64-n32-S128".to_string(),
llvm_target: "riscv32".to_string(),
pointer_width: 32,
arch: "riscv32".to_string(),
options: TargetOptions {
linker_flavor: LinkerFlavor::Lld(LldFlavor::Ld),
linker: Some("rust-lld".to_string()),
cpu: "generic-rv32".to_string(),
max_atomic_width: Some(0),
atomic_cas: false,
executables: true,
panic_strategy: PanicStrategy::Abort,
relocation_model: RelocModel::Static,<|fim▁hole|> eh_frame_header: false,
..Default::default()
},
}
}<|fim▁end|> | emit_debug_gdb_scripts: false, |
<|file_name|>non_max_suppression_async_test.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import * as tf from '../../index';
import {ALL_ENVS, describeWithFlags} from '../../jasmine_util';
import {expectArraysClose, expectArraysEqual} from '../../test_util';
describeWithFlags('nonMaxSuppressionAsync', ALL_ENVS, () => {
describe('NonMaxSuppressionAsync basic', () => {
it('select from three clusters', async () => {
const boxes = tf.tensor2d(
[
0, 0, 1, 1, 0, 0.1, 1, 1.1, 0, -0.1, 1, 0.9,
0, 10, 1, 11, 0, 10.1, 1, 11.1, 0, 100, 1, 101
],
[6, 4]);
const scores = tf.tensor1d([0.9, 0.75, 0.6, 0.95, 0.5, 0.3]);
const maxOutputSize = 3;
const iouThreshold = 0.5;
const scoreThreshold = 0;
const indices = await tf.image.nonMaxSuppressionAsync(
boxes, scores, maxOutputSize, iouThreshold, scoreThreshold);
expect(indices.shape).toEqual([3]);
expectArraysEqual(await indices.data(), [3, 0, 5]);
});
it('accepts a tensor-like object', async () => {
const boxes = [[0, 0, 1, 1], [0, 1, 1, 2]];
const scores = [1, 2];
const indices = await tf.image.nonMaxSuppressionAsync(boxes, scores, 10);
expect(indices.shape).toEqual([2]);
expect(indices.dtype).toEqual('int32');
expectArraysEqual(await indices.data(), [1, 0]);
});
});
describe('NonMaxSuppressionWithScoreAsync', () => {
it('select from three clusters with SoftNMS', async () => {
const boxes = tf.tensor2d(
[
0, 0, 1, 1, 0, 0.1, 1, 1.1, 0, -0.1, 1, 0.9,<|fim▁hole|> 0, 10, 1, 11, 0, 10.1, 1, 11.1, 0, 100, 1, 101
],
[6, 4]);
const scores = tf.tensor1d([0.9, 0.75, 0.6, 0.95, 0.5, 0.3]);
const maxOutputSize = 6;
const iouThreshold = 1.0;
const scoreThreshold = 0;
const softNmsSigma = 0.5;
const numTensorsBefore = tf.memory().numTensors;
const {selectedIndices, selectedScores} =
await tf.image.nonMaxSuppressionWithScoreAsync(
boxes, scores, maxOutputSize, iouThreshold, scoreThreshold,
softNmsSigma);
const numTensorsAfter = tf.memory().numTensors;
expectArraysEqual(await selectedIndices.data(), [3, 0, 1, 5, 4, 2]);
expectArraysClose(
await selectedScores.data(), [0.95, 0.9, 0.384, 0.3, 0.256, 0.197]);
// The number of tensors should increase by the number of tensors
// returned (i.e. selectedIndices and selectedScores).
expect(numTensorsAfter).toEqual(numTensorsBefore + 2);
});
});
describe('NonMaxSuppressionPaddedAsync', () => {
it('select from three clusters with pad five.', async () => {
const boxes = tf.tensor2d(
[
0, 0, 1, 1, 0, 0.1, 1, 1.1, 0, -0.1, 1, 0.9,
0, 10, 1, 11, 0, 10.1, 1, 11.1, 0, 100, 1, 101
],
[6, 4]);
const scores = tf.tensor1d([0.9, 0.75, 0.6, 0.95, 0.5, 0.3]);
const maxOutputSize = 5;
const iouThreshold = 0.5;
const scoreThreshold = 0.0;
const before = tf.memory().numTensors;
const {selectedIndices, validOutputs} =
await tf.image.nonMaxSuppressionPaddedAsync(
boxes, scores, maxOutputSize, iouThreshold, scoreThreshold, true);
const after = tf.memory().numTensors;
expectArraysEqual(await selectedIndices.data(), [3, 0, 5, 0, 0]);
expectArraysEqual(await validOutputs.data(), 3);
// The number of tensors should increase by the number of tensors
// returned (i.e. selectedIndices and selectedScores).
expect(after).toEqual(before + 2);
});
});
});<|fim▁end|> | |
<|file_name|>mongodbUpgrade.js<|end_file_name|><|fim▁begin|>// This code is largely borrowed from: github.com/louischatriot/nedb-to-mongodb
// This code moves your data from NeDB to MongoDB
// You will first need to create the MongoDB connection in your /routes/config.json file
// You then need to ensure your MongoDB Database has been created.
// ** IMPORTANT **
// There are no duplication checks in place. Please only run this script once.
// ** IMPORTANT **
const Nedb = require('nedb');
const mongodb = require('mongodb');
const async = require('async');
const path = require('path');
const common = require('../routes/common');
const config = common.read_config();
let ndb;
// check for DB config
if(!config.settings.database.connection_string){
console.log('No MongoDB configured. Please see README.md for help');
process.exit(1);
}
// Connect to the MongoDB database
mongodb.connect(config.settings.database.connection_string, {}, (err, mdb) => {
if(err){
console.log('Couldn\'t connect to the Mongo database');
console.log(err);
process.exit(1);
}
console.log('Connected to: ' + config.settings.database.connection_string);
console.log('');
insertKB(mdb, (KBerr, report) => {
insertUsers(mdb, (Usererr, report) => {
if(KBerr || Usererr){
console.log('There was an error upgrading to MongoDB. Check the console output');
}else{
console.log('MongoDB upgrade completed successfully');
process.exit();
}
});
});
});
function insertKB(db, callback){
const collection = db.collection('kb');
console.log(path.join(__dirname, 'kb.db'));
ndb = new Nedb(path.join(__dirname, 'kb.db'));
ndb.loadDatabase((err) => {
if(err){
console.error('Error while loading the data from the NeDB database');
console.error(err);<|fim▁hole|> process.exit(1);
}
ndb.find({}, (err, docs) => {
if(docs.length === 0){
console.error('The NeDB database contains no data, no work required');
console.error('You should probably check the NeDB datafile path though!');
}else{
console.log('Loaded ' + docs.length + ' article(s) data from the NeDB database');
console.log('');
}
console.log('Inserting articles into MongoDB...');
async.each(docs, (doc, cb) => {
console.log('Article inserted: ' + doc.kb_title);
// check for permalink. If it is not set we set the old NeDB _id to the permalink to stop links from breaking.
if(!doc.kb_permalink || doc.kb_permalink === ''){
doc.kb_permalink = doc._id;
}
// delete the old ID and let MongoDB generate new ones
delete doc._id;
collection.insert(doc, (err) => { return cb(err); });
}, (err) => {
if(err){
console.log('An error happened while inserting data');
callback(err, null);
}else{
console.log('All articles successfully inserted');
console.log('');
callback(null, 'All articles successfully inserted');
}
});
});
});
};
function insertUsers(db, callback){
const collection = db.collection('users');
ndb = new Nedb(path.join(__dirname, 'users.db'));
ndb.loadDatabase((err) => {
if(err){
console.error('Error while loading the data from the NeDB database');
console.error(err);
process.exit(1);
}
ndb.find({}, (err, docs) => {
if(docs.length === 0){
console.error('The NeDB database contains no data, no work required');
console.error('You should probably check the NeDB datafile path though!');
}else{
console.log('Loaded ' + docs.length + ' user(s) data from the NeDB database');
console.log('');
}
console.log('Inserting users into MongoDB...');
async.each(docs, (doc, cb) => {
console.log('User inserted: ' + doc.user_email);
// delete the old ID and let MongoDB generate new ones
delete doc._id;
collection.insert(doc, (err) => { return cb(err); });
}, (err) => {
if(err){
console.error('An error happened while inserting user data');
callback(err, null);
}else{
console.log('All users successfully inserted');
console.log('');
callback(null, 'All users successfully inserted');
}
});
});
});
};<|fim▁end|> | |
<|file_name|>loop.py<|end_file_name|><|fim▁begin|># RUN: %python -m artiq.compiler.testbench.signature %s >%t
# RUN: OutputCheck %s --file-to-check=%t
# CHECK-L: f: ()->NoneType delay(30 mu)
def f():
for _ in range(10):
delay_mu(3)<|fim▁hole|>
# CHECK-L: g: ()->NoneType delay(60 mu)
def g():
for _ in range(10):
for _ in range(2):
delay_mu(3)<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2020, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
''' The validation module provides the capability to perform integrity
checks on an entire collection of Bokeh models.
To create a Bokeh visualization, the central task is to assemble a collection
model objects from |bokeh.models| into a graph that represents the scene that
should be created in the client. It is possible to to this "by hand", using the
model objects directly. However, to make this process easier, Bokeh provides
higher level interfaces such as |bokeh.plotting| for users.
These interfaces automate common "assembly" steps, to ensure a Bokeh object<|fim▁hole|>To assist with diagnosing potential problems, Bokeh performs a validation step
when outputting a visualization for display. This module contains error and
warning codes as well as helper functions for defining validation checks.
One use case for warnings is to loudly point users in the right direction
when they accidentally do something that they probably didn't mean to do - this
is the case for EMPTY_LAYOUT for instance. Since warnings don't necessarily
indicate misuse, they are configurable. To silence a warning, use the silence
function provided.
.. code-block:: python
>>> from bokeh.core.validation import silence
>>> from bokeh.core.validation.warnings import EMPTY_LAYOUT
>>> silence(EMPTY_LAYOUT, True)
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
import logging # isort:skip
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Bokeh imports
from .check import check_integrity, silence, silenced
from .decorators import error, warning
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------<|fim▁end|> | graph is created in a consistent, predictable way. However, regardless of what
interface is used, it is possible to put Bokeh models together in ways that are
incomplete, or that do not make sense in some way.
|
<|file_name|>discoveryclient.api.js<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2014 Juniper Networks, Inc. All rights reserved.
*/
var global = require('./global'),
config = process.mainModule.exports.config,
commonUtils = require('../utils/common.utils'),
logutils = require('../utils/log.utils');
var serviceRespData = {};
var webuiIP = null;
function checkIfServiceRespDataExists (service, data)
{
try {
var serviceType = service['serviceType'];
var svcData = serviceRespData[serviceType]['data'][serviceType];
if (null == svcData) {
return false;
}
var svcCnt = svcData.length;
var dataCnt = data[serviceType].length;
if (svcCnt != dataCnt) {
return false;
}
for (var i = 0; i < svcCnt; i++) {
if (svcData[i]['ip-address'] !=
data[serviceType][i]['ip-address']) {
return false;
}
if (svcData[i]['port'] != data[serviceType][i]['port']) {
return false;
}
}
} catch(e) {
return false;
}
return true;
}
function storeServiceRespData (service, data)
{
if ((null == service) || (null == data) || (null == data['ttl'])) {
return;
}
var serviceType = service['serviceType'];
if (null == serviceRespData[serviceType]) {
serviceRespData[serviceType] = {};
}
if (false == checkIfServiceRespDataExists(service, data)) {
/* Log Only if change happens */
logutils.logger.debug("DiscService Response Updated by process:" +
process.pid + " " + JSON.stringify(data));
}
serviceRespData[serviceType]['service'] = service;
serviceRespData[serviceType]['data'] = data;
}
function getServiceRespDataList ()
{
return serviceRespData;
}
/* Function: getDiscServiceByServiceType
This function uses load balancing internally
Always it returns the first server IP/Port combination in the service list and
pushes that at the end, such that next time when new request comes then this
server should be requested at last, as we have already sent the request to
this server.
*/
function getDiscServiceByServiceType (serviceType)
{
if (null != serviceRespData[serviceType]) {
try {
var service =
serviceRespData[serviceType]['data'][serviceType].shift();
serviceRespData[serviceType]['data'][serviceType].push(service);
return service;
} catch(e) {
}
}
//logutils.logger.error("Unknown Service Type Request Rxed in " +
// "getDiscServiceByServiceType(): " + serviceType);
return null;
}
function getDiscServiceByApiServerType (apiServerType)
{
var service = null;
var serviceType = null;
var respObj = [];
switch (apiServerType) {
case global.label.OPS_API_SERVER:<|fim▁hole|> break;
case global.label.VNCONFIG_API_SERVER:
case global.label.API_SERVER:
serviceType = global.DISC_SERVICE_TYPE_API_SERVER;
break;
case global.label.DNS_SERVER:
serviceType = global.DISC_SERVICE_TYPE_DNS_SERVER;
break;
default:
// logutils.logger.debug("Unknown Discovery Server Service Type:" +
// apiServerType);
return null;
}
return getDiscServiceByServiceType(serviceType);
}
function processDiscoveryServiceResponseMsg (msg)
{
if (null == msg) {
return;
}
msg = JSON.parse(msg.toString());
if (msg && msg['serviceResponse']) {
storeServiceRespData(msg['serviceResponse']['service'],
msg['serviceResponse']['data']);
}
}
function getServerTypeByServerName (serverName)
{
switch (serverName) {
case global.label.OPS_API_SERVER:
return global.DISC_SERVICE_TYPE_OP_SERVER;
case global.label.OPS_API_SERVER:
case global.label.VNCONFIG_API_SERVER:
return global.DISC_SERVICE_TYPE_API_SERVER;
case global.label.DNS_SERVER:
return global.DISC_SERVICE_TYPE_DNS_SERVER;
default:
return null;
}
}
function sendWebServerReadyMessage ()
{
var data = {};
data['jobType'] = global.STR_MAIN_WEB_SERVER_READY;
data = JSON.stringify(data);
var msg = {
cmd: global.STR_SEND_TO_JOB_SERVER,
reqData: data
};
process.send(msg);
}
function sendDiscSubscribeMsgToJobServer (serverType)
{
var data = {};
data['jobType'] = global.STR_DISC_SUBSCRIBE_MSG;
data['serverType'] = serverType;
data = JSON.stringify(data);
var msg = {
cmd: global.STR_SEND_TO_JOB_SERVER,
reqData: data
};
process.send(msg);
}
function sendDiscSubMessageOnDemand (apiName)
{
var myId = process.mainModule.exports['myIdentity'];
var servType = getServerTypeByServerName(apiName);
if (null == servType) {
logutils.logger.error("Unknown Discovery serviceType in " +
"sendDiscSubMessageOnDemand() : " + servType);
return;
}
if (global.service.MAINSEREVR == myId) {
sendDiscSubscribeMsgToJobServer(servType);
} else {
try {
discServ = require('../jobs/core/discoveryservice.api');
discServ.subscribeDiscoveryServiceOnDemand(servType);
} catch(e) {
logutils.logger.error('Module discoveryservice.api can not be ' +
'found');
}
}
}
function resetServicesByParams (params, apiName)
{
var serviceType = getServerTypeByServerName(apiName);
if (null == serviceType) {
return null;
}
try {
var servData = serviceRespData[serviceType]['data'][serviceType];
var servCnt = servData.length;
if (servCnt <= 1) {
/* Only one/no server, so no need to do any params update, as no other
* server available
*/
return null;
}
for (var i = 0; i < servCnt; i++) {
if ((servData[i]['ip-address'] == params['url']) &&
(servData[i]['port'] == params['port'])) {
serviceRespData[serviceType]['data'][serviceType].splice(i, 1);
break;
}
}
params['url'] =
serviceRespData[serviceType]['data'][serviceType][0]['ip-address'];
params['port'] =
serviceRespData[serviceType]['data'][serviceType][0]['port'];
} catch(e) {
logutils.logger.error("In resetServicesByParams(): exception occurred" +
" " + e);
return null;
}
return params;
}
function getDiscServiceRespDataList (req, res, appData)
{
commonUtils.handleJSONResponse(null, res, getServiceRespDataList());
}
function setWebUINodeIP (ip)
{
if (null != ip) {
webuiIP = ip;
}
}
function getWebUINodeIP (ip)
{
return webuiIP;
}
exports.resetServicesByParams = resetServicesByParams;
exports.storeServiceRespData = storeServiceRespData;
exports.getServiceRespDataList = getServiceRespDataList;
exports.getDiscServiceByApiServerType = getDiscServiceByApiServerType;
exports.getDiscServiceByServiceType = getDiscServiceByServiceType;
exports.processDiscoveryServiceResponseMsg = processDiscoveryServiceResponseMsg;
exports.sendWebServerReadyMessage = sendWebServerReadyMessage;
exports.sendDiscSubMessageOnDemand = sendDiscSubMessageOnDemand;
exports.getDiscServiceRespDataList = getDiscServiceRespDataList;
exports.setWebUINodeIP = setWebUINodeIP;
exports.getWebUINodeIP = getWebUINodeIP;<|fim▁end|> | case global.label.OPSERVER:
serviceType = global.DISC_SERVICE_TYPE_OP_SERVER; |
<|file_name|>conversation.py<|end_file_name|><|fim▁begin|>"""
Support for functionality to have conversations with Home Assistant.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/conversation/
"""
import logging
import re
import warnings
import voluptuous as vol
from homeassistant import core<|fim▁hole|>
REQUIREMENTS = ['fuzzywuzzy==0.12.0']
ATTR_TEXT = 'text'
DOMAIN = 'conversation'
REGEX_TURN_COMMAND = re.compile(r'turn (?P<name>(?: |\w)+) (?P<command>\w+)')
SERVICE_PROCESS = 'process'
SERVICE_PROCESS_SCHEMA = vol.Schema({
vol.Required(ATTR_TEXT): vol.All(cv.string, vol.Lower),
})
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({}),
}, extra=vol.ALLOW_EXTRA)
def setup(hass, config):
"""Register the process service."""
warnings.filterwarnings('ignore', module='fuzzywuzzy')
from fuzzywuzzy import process as fuzzyExtract
logger = logging.getLogger(__name__)
def process(service):
"""Parse text into commands."""
text = service.data[ATTR_TEXT]
match = REGEX_TURN_COMMAND.match(text)
if not match:
logger.error("Unable to process: %s", text)
return
name, command = match.groups()
entities = {state.entity_id: state.name for state in hass.states.all()}
entity_ids = fuzzyExtract.extractOne(
name, entities, score_cutoff=65)[2]
if not entity_ids:
logger.error(
"Could not find entity id %s from text %s", name, text)
return
if command == 'on':
hass.services.call(core.DOMAIN, SERVICE_TURN_ON, {
ATTR_ENTITY_ID: entity_ids,
}, blocking=True)
elif command == 'off':
hass.services.call(core.DOMAIN, SERVICE_TURN_OFF, {
ATTR_ENTITY_ID: entity_ids,
}, blocking=True)
else:
logger.error('Got unsupported command %s from text %s',
command, text)
hass.services.register(
DOMAIN, SERVICE_PROCESS, process, schema=SERVICE_PROCESS_SCHEMA)
return True<|fim▁end|> | from homeassistant.const import (
ATTR_ENTITY_ID, SERVICE_TURN_OFF, SERVICE_TURN_ON)
import homeassistant.helpers.config_validation as cv |
<|file_name|>app_config.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
Project-wide application configuration.
DO NOT STORE SECRETS, PASSWORDS, ETC. IN THIS FILE.
They will be exposed to users. Use environment variables instead.
See get_secrets() below for a fast way to access them.
"""
import os
"""
NAMES
"""
# Project name used for display
PROJECT_NAME = 'quotable'<|fim▁hole|>
# Project name in urls
# Use dashes, not underscores!
PROJECT_SLUG = 'quotable'
# The name of the repository containing the source
REPOSITORY_NAME = 'quotable'
REPOSITORY_URL = '[email protected]:nprapps/%s.git' % REPOSITORY_NAME
REPOSITORY_ALT_URL = None # '[email protected]:nprapps/%s.git' % REPOSITORY_NAME'
# The name to be used in paths on the server
PROJECT_FILENAME = 'quotable'
"""
DEPLOYMENT
"""
FILE_SERVER = 'tools.apps.npr.org'
S3_BUCKET = 'tools.apps.npr.org'
ASSETS_S3_BUCKET = 'assets.apps.npr.org'
# These variables will be set at runtime. See configure_targets() below
DEBUG = True
"""
COPY EDITING
"""
COPY_GOOGLE_DOC_KEY = '0AlXMOHKxzQVRdHZuX1UycXplRlBfLVB0UVNldHJYZmc'
"""
SHARING
"""
PROJECT_DESCRIPTION = 'An opinionated project template for (mostly) server-less apps.'
SHARE_URL = 'http://%s/%s/' % (S3_BUCKET, PROJECT_SLUG)
TWITTER = {
'TEXT': PROJECT_NAME,
'URL': SHARE_URL,
# Will be resized to 120x120, can't be larger than 1MB
'IMAGE_URL': ''
}
FACEBOOK = {
'TITLE': PROJECT_NAME,
'URL': SHARE_URL,
'DESCRIPTION': PROJECT_DESCRIPTION,
# Should be square. No documented restrictions on size
'IMAGE_URL': TWITTER['IMAGE_URL'],
'APP_ID': '138837436154588'
}
GOOGLE = {
# Thumbnail image for Google News / Search.
# No documented restrictions on resolution or size
'IMAGE_URL': TWITTER['IMAGE_URL']
}
NPR_DFP = {
'STORY_ID': '203618536',
'TARGET': 'News_NPR_News_Investigations',
'ENVIRONMENT': 'NPRTEST',
'TESTSERVER': 'true'
}
"""
SERVICES
"""
GOOGLE_ANALYTICS_ID = 'UA-5828686-4'<|fim▁end|> | |
<|file_name|>muting.py<|end_file_name|><|fim▁begin|>from django.http import HttpResponse, HttpRequest
from typing import Optional
import ujson
from django.utils.translation import ugettext as _
from zerver.lib.actions import do_mute_topic, do_unmute_topic
from zerver.lib.request import has_request_variables, REQ
from zerver.lib.response import json_success, json_error
from zerver.lib.topic_mutes import topic_is_muted
from zerver.lib.streams import (
access_stream_by_id,
access_stream_by_name,
access_stream_for_unmute_topic_by_id,
access_stream_for_unmute_topic_by_name,
check_for_exactly_one_stream_arg,
)
from zerver.lib.validator import check_int
from zerver.models import get_stream, Stream, UserProfile
def mute_topic(user_profile: UserProfile,
stream_id: Optional[int],
stream_name: Optional[str],
topic_name: str) -> HttpResponse:
if stream_name is not None:
(stream, recipient, sub) = access_stream_by_name(user_profile, stream_name)
else:
assert stream_id is not None
(stream, recipient, sub) = access_stream_by_id(user_profile, stream_id)
if topic_is_muted(user_profile, stream.id, topic_name):
return json_error(_("Topic already muted"))
do_mute_topic(user_profile, stream, recipient, topic_name)
return json_success()
def unmute_topic(user_profile: UserProfile,
stream_id: Optional[int],
stream_name: Optional[str],
topic_name: str) -> HttpResponse:
error = _("Topic is not muted")
if stream_name is not None:
stream = access_stream_for_unmute_topic_by_name(user_profile, stream_name, error)
else:
assert stream_id is not None
stream = access_stream_for_unmute_topic_by_id(user_profile, stream_id, error)
if not topic_is_muted(user_profile, stream.id, topic_name):
return json_error(error)
do_unmute_topic(user_profile, stream, topic_name)
return json_success()
@has_request_variables
def update_muted_topic(request: HttpRequest,
user_profile: UserProfile,
stream_id: Optional[int]=REQ(validator=check_int, default=None),
stream: Optional[str]=REQ(default=None),
topic: str=REQ(),
op: str=REQ()) -> HttpResponse:<|fim▁hole|>
check_for_exactly_one_stream_arg(stream_id=stream_id, stream=stream)
if op == 'add':
return mute_topic(
user_profile=user_profile,
stream_id=stream_id,
stream_name=stream,
topic_name=topic,
)
elif op == 'remove':
return unmute_topic(
user_profile=user_profile,
stream_id=stream_id,
stream_name=stream,
topic_name=topic,
)<|fim▁end|> | |
<|file_name|>move_generating.rs<|end_file_name|><|fim▁begin|>extern crate pleco;
use pleco::SQ;
use pleco::core::*;
use pleco::core::piece_move::*;
use pleco::board::{Board,RandBoard};
#[test]
fn test_movegen_captures() {
let vec = RandBoard::default().no_check().many(9);
vec.iter().for_each(|b| {
let moves = b.generate_moves_of_type(GenTypes::Captures);
for m in moves {
if !m.is_promo() {
assert!(m.is_capture());
assert!(b.captured_piece(m).is_real());
}
}
})
}
#[test]
fn test_movegen_quiets() {
let vec = RandBoard::default().no_check().many(6);
vec.iter().for_each(|b| {
let moves = b.generate_moves_of_type(GenTypes::Quiets);
for m in moves {
if !m.is_promo() && !m.is_castle() {
assert!(!m.is_capture());
assert!(!b.captured_piece(m).is_real());
}
}
})
}
#[test]
fn test_movegen_quiet_checks() {
let vec = RandBoard::default().no_check().many(5);
vec.iter().for_each(|b| {
b.generate_moves_of_type(GenTypes::QuietChecks);
})
}
// Testing with no flags and bit input
#[test]
fn bit_move_position() {
let bits: u16 = 0b0000111011010000;
let bit_move = BitMove::new(bits);
assert_eq!(bit_move.get_src().0, 0b010000);
assert_eq!(bit_move.get_dest().0, 0b111011);
assert!(bit_move.is_quiet_move());
assert!(!bit_move.is_promo());
assert!(!bit_move.is_capture());
assert!(!bit_move.is_castle());
assert!(!bit_move.is_king_castle());
assert!(!bit_move.is_queen_castle());
assert!(!bit_move.is_double_push().0);
assert!(!bit_move.is_en_passant());
}
#[test]
fn test_opening_position() {
let b = Board::start_pos();
let moves = b.generate_moves();
assert_eq!(moves.len(), (8 * 2) + (2 * 2));
}
#[test]
fn test_move_permutations() {
let moves = all_move_flags();
for move_flag in moves {
let pre_move_info = PreMoveInfo {
src: SQ(9),
dst: SQ(42),
flags: move_flag,
};
let move_info = BitMove::init(pre_move_info);
assert_eq!(move_flag == MoveFlag::QuietMove, move_info.is_quiet_move());
assert_eq!(
move_flag == MoveFlag::Castle { king_side: true } ||
move_flag == MoveFlag::Castle { king_side: false },
move_info.is_castle()
);
assert_eq!(
move_flag == MoveFlag::Castle { king_side: true },
move_info.is_king_castle()
);
assert_eq!(
move_flag == MoveFlag::Castle { king_side: false },
move_info.is_queen_castle()
);
assert_eq!(
move_flag == MoveFlag::DoublePawnPush,
move_info.is_double_push().0
);
assert_eq!(<|fim▁hole|> move_flag == MoveFlag::Capture { ep_capture: true },
move_info.is_en_passant()
);
}
}
// Test all Promotion Moves for correct Piece Placement
#[test]
fn bit_move_promoions() {
let move_flag = MoveFlag::Promotion {
capture: true,
prom: PieceType::P,
};
let pre_move_info = PreMoveInfo {
src: SQ(9),
dst: SQ(42),
flags: move_flag,
};
let move_info = BitMove::init(pre_move_info);
assert!(move_info.is_capture());
assert!(move_info.is_promo());
assert_eq!(move_info.promo_piece(), PieceType::Q);
let move_flag = MoveFlag::Promotion {
capture: true,
prom: PieceType::N,
};
let pre_move_info = PreMoveInfo {
src: SQ(9),
dst: SQ(42),
flags: move_flag,
};
let move_info = BitMove::init(pre_move_info);
assert!(move_info.is_capture());
assert!(move_info.is_promo());
assert_eq!(move_info.promo_piece(), PieceType::N);
let move_flag = MoveFlag::Promotion {
capture: true,
prom: PieceType::B,
};
let pre_move_info = PreMoveInfo {
src: SQ(9),
dst: SQ(42),
flags: move_flag,
};
let move_info = BitMove::init(pre_move_info);
assert!(move_info.is_capture());
assert!(move_info.is_promo());
assert_eq!(move_info.promo_piece(), PieceType::B);
let move_flag = MoveFlag::Promotion {
capture: true,
prom: PieceType::R,
};
let pre_move_info = PreMoveInfo {
src: SQ(9),
dst: SQ(42),
flags: move_flag,
};
let move_info = BitMove::init(pre_move_info);
assert!(move_info.is_capture());
assert!(move_info.is_promo());
assert_eq!(move_info.promo_piece(), PieceType::R);
let move_flag = MoveFlag::Promotion {
capture: true,
prom: PieceType::K,
};
let pre_move_info = PreMoveInfo {
src: SQ(9),
dst: SQ(42),
flags: move_flag,
};
let move_info = BitMove::init(pre_move_info);
assert!(move_info.is_capture());
assert!(move_info.is_promo());
assert_eq!(move_info.promo_piece(), PieceType::Q);
let move_flag = MoveFlag::Promotion {
capture: true,
prom: PieceType::Q,
};
let pre_move_info = PreMoveInfo {
src: SQ(9),
dst: SQ(42),
flags: move_flag,
};
let move_info = BitMove::init(pre_move_info);
assert!(move_info.is_capture());
assert!(move_info.is_promo());
assert_eq!(move_info.promo_piece(), PieceType::Q);
let move_flag = MoveFlag::Promotion {
capture: false,
prom: PieceType::P,
};
let pre_move_info = PreMoveInfo {
src: SQ(9),
dst: SQ(42),
flags: move_flag,
};
let move_info = BitMove::init(pre_move_info);
assert!(!move_info.is_capture());
assert!(move_info.is_promo());
assert_eq!(move_info.promo_piece(), PieceType::Q);
let move_flag = MoveFlag::Promotion {
capture: false,
prom: PieceType::N,
};
let pre_move_info = PreMoveInfo {
src: SQ(9),
dst: SQ(42),
flags: move_flag,
};
let move_info = BitMove::init(pre_move_info);
assert!(!move_info.is_capture());
assert!(move_info.is_promo());
assert_eq!(move_info.promo_piece(), PieceType::N);
let move_flag = MoveFlag::Promotion {
capture: false,
prom: PieceType::B,
};
let pre_move_info = PreMoveInfo {
src: SQ(9),
dst: SQ(42),
flags: move_flag,
};
let move_info = BitMove::init(pre_move_info);
assert!(!move_info.is_capture());
assert!(move_info.is_promo());
assert_eq!(move_info.promo_piece(), PieceType::B);
let move_flag = MoveFlag::Promotion {
capture: false,
prom: PieceType::R,
};
let pre_move_info = PreMoveInfo {
src: SQ(9),
dst: SQ(42),
flags: move_flag,
};
let move_info = BitMove::init(pre_move_info);
assert!(!move_info.is_capture());
assert!(move_info.is_promo());
assert_eq!(move_info.promo_piece(), PieceType::R);
let move_flag = MoveFlag::Promotion {
capture: false,
prom: PieceType::K,
};
let pre_move_info = PreMoveInfo {
src: SQ(9),
dst: SQ(42),
flags: move_flag,
};
let move_info = BitMove::init(pre_move_info);
assert!(!move_info.is_capture());
assert!(move_info.is_promo());
assert_eq!(move_info.promo_piece(), PieceType::Q);
let move_flag = MoveFlag::Promotion {
capture: false,
prom: PieceType::Q,
};
let pre_move_info = PreMoveInfo {
src: SQ(9),
dst: SQ(42),
flags: move_flag,
};
let move_info = BitMove::init(pre_move_info);
assert!(!move_info.is_capture());
assert!(move_info.is_promo());
assert_eq!(move_info.promo_piece(), PieceType::Q);
}
fn all_move_flags() -> Vec<MoveFlag> {
let mut move_flags = Vec::new();
move_flags.push(MoveFlag::Promotion {
capture: true,
prom: PieceType::P,
});
move_flags.push(MoveFlag::Promotion {
capture: true,
prom: PieceType::N,
});
move_flags.push(MoveFlag::Promotion {
capture: true,
prom: PieceType::B,
});
move_flags.push(MoveFlag::Promotion {
capture: true,
prom: PieceType::R,
});
move_flags.push(MoveFlag::Promotion {
capture: true,
prom: PieceType::K,
});
move_flags.push(MoveFlag::Promotion {
capture: true,
prom: PieceType::Q,
});
move_flags.push(MoveFlag::Promotion {
capture: false,
prom: PieceType::P,
});
move_flags.push(MoveFlag::Promotion {
capture: false,
prom: PieceType::N,
});
move_flags.push(MoveFlag::Promotion {
capture: false,
prom: PieceType::B,
});
move_flags.push(MoveFlag::Promotion {
capture: false,
prom: PieceType::R,
});
move_flags.push(MoveFlag::Promotion {
capture: false,
prom: PieceType::K,
});
move_flags.push(MoveFlag::Promotion {
capture: false,
prom: PieceType::Q,
});
move_flags.push(MoveFlag::Castle { king_side: true });
move_flags.push(MoveFlag::Castle { king_side: false });
move_flags.push(MoveFlag::Capture { ep_capture: true });
move_flags.push(MoveFlag::Capture { ep_capture: false });
move_flags.push(MoveFlag::DoublePawnPush);
move_flags.push(MoveFlag::QuietMove);
move_flags
}<|fim▁end|> | |
<|file_name|>websocket.py<|end_file_name|><|fim▁begin|>"""Implementation of the WebSocket protocol.
`WebSockets <http://dev.w3.org/html5/websockets/>`_ allow for bidirectional
communication between the browser and server.
WebSockets are supported in the current versions of all major browsers,
although older versions that do not support WebSockets are still in use
(refer to http://caniuse.com/websockets for details).
This module implements the final version of the WebSocket protocol as
defined in `RFC 6455 <http://tools.ietf.org/html/rfc6455>`_. Certain
browser versions (notably Safari 5.x) implemented an earlier draft of
the protocol (known as "draft 76") and are not compatible with this module.
.. versionchanged:: 4.0
Removed support for the draft 76 protocol version.
"""
from __future__ import absolute_import, division, print_function, with_statement
# Author: Jacob Kristhammar, 2010
import base64
import collections
import hashlib
import os
import struct
import tornado.escape
import tornado.web
import zlib
from tornado.concurrent import TracebackFuture
from tornado.escape import utf8, native_str, to_unicode
from tornado import httpclient, httputil
from tornado.ioloop import IOLoop
from tornado.iostream import StreamClosedError
from tornado.log import gen_log, app_log
from tornado import simple_httpclient
from tornado.tcpclient import TCPClient
from tornado.util import _websocket_mask, PY3
if PY3:
from urllib.parse import urlparse # py2
xrange = range
else:
from urlparse import urlparse # py3
class WebSocketError(Exception):
pass
class WebSocketClosedError(WebSocketError):
"""Raised by operations on a closed connection.
.. versionadded:: 3.2
"""
pass
class WebSocketHandler(tornado.web.RequestHandler):
"""Subclass this class to create a basic WebSocket handler.
Override `on_message` to handle incoming messages, and use
`write_message` to send messages to the client. You can also
override `open` and `on_close` to handle opened and closed
connections.
See http://dev.w3.org/html5/websockets/ for details on the
JavaScript interface. The protocol is specified at
http://tools.ietf.org/html/rfc6455.
Here is an example WebSocket handler that echos back all received messages
back to the client:
.. testcode::
class EchoWebSocket(tornado.websocket.WebSocketHandler):
def open(self):
print("WebSocket opened")
def on_message(self, message):
self.write_message(u"You said: " + message)
def on_close(self):
print("WebSocket closed")
.. testoutput::
:hide:
WebSockets are not standard HTTP connections. The "handshake" is
HTTP, but after the handshake, the protocol is
message-based. Consequently, most of the Tornado HTTP facilities
are not available in handlers of this type. The only communication
methods available to you are `write_message()`, `ping()`, and
`close()`. Likewise, your request handler class should implement
`open()` method rather than ``get()`` or ``post()``.
If you map the handler above to ``/websocket`` in your application, you can
invoke it in JavaScript with::
var ws = new WebSocket("ws://localhost:8888/websocket");
ws.onopen = function() {
ws.send("Hello, world");
};
ws.onmessage = function (evt) {
alert(evt.data);
};
This script pops up an alert box that says "You said: Hello, world".
Web browsers allow any site to open a websocket connection to any other,
instead of using the same-origin policy that governs other network
access from javascript. This can be surprising and is a potential
security hole, so since Tornado 4.0 `WebSocketHandler` requires
applications that wish to receive cross-origin websockets to opt in
by overriding the `~WebSocketHandler.check_origin` method (see that
method's docs for details). Failure to do so is the most likely
cause of 403 errors when making a websocket connection.
When using a secure websocket connection (``wss://``) with a self-signed
certificate, the connection from a browser may fail because it wants
to show the "accept this certificate" dialog but has nowhere to show it.
You must first visit a regular HTML page using the same certificate
to accept it before the websocket connection will succeed.
"""
def __init__(self, application, request, **kwargs):
super(WebSocketHandler, self).__init__(application, request, **kwargs)
self.ws_connection = None
self.close_code = None
self.close_reason = None
self.stream = None
self._on_close_called = False
@tornado.web.asynchronous
def get(self, *args, **kwargs):
self.open_args = args
self.open_kwargs = kwargs
# Upgrade header should be present and should be equal to WebSocket
if self.request.headers.get("Upgrade", "").lower() != 'websocket':
self.clear()
self.set_status(400)
log_msg = "Can \"Upgrade\" only to \"WebSocket\"."
self.finish(log_msg)
gen_log.debug(log_msg)
return
# Connection header should be upgrade.
# Some proxy servers/load balancers
# might mess with it.
headers = self.request.headers
connection = map(lambda s: s.strip().lower(),
headers.get("Connection", "").split(","))
if 'upgrade' not in connection:
self.clear()
self.set_status(400)
log_msg = "\"Connection\" must be \"Upgrade\"."
self.finish(log_msg)
gen_log.debug(log_msg)
return
# Handle WebSocket Origin naming convention differences
# The difference between version 8 and 13 is that in 8 the
# client sends a "Sec-Websocket-Origin" header and in 13 it's
# simply "Origin".
if "Origin" in self.request.headers:
origin = self.request.headers.get("Origin")
else:
origin = self.request.headers.get("Sec-Websocket-Origin", None)
# If there was an origin header, check to make sure it matches
# according to check_origin. When the origin is None, we assume it
# did not come from a browser and that it can be passed on.
if origin is not None and not self.check_origin(origin):
self.clear()
self.set_status(403)
log_msg = "Cross origin websockets not allowed"
self.finish(log_msg)
gen_log.debug(log_msg)
return
self.stream = self.request.connection.detach()
self.stream.set_close_callback(self.on_connection_close)
self.ws_connection = self.get_websocket_protocol()
if self.ws_connection:
self.clear_header('Content-Type')
self.ws_connection.accept_connection()
else:
if not self.stream.closed():
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 426 Upgrade Required\r\n"
"Sec-WebSocket-Version: 7, 8, 13\r\n\r\n"))
self.stream.close()
def write_message(self, message, binary=False):
"""Sends the given message to the client of this Web Socket.
The message may be either a string or a dict (which will be
encoded as json). If the ``binary`` argument is false, the
message will be sent as utf8; in binary mode any byte string
is allowed.
If the connection is already closed, raises `WebSocketClosedError`.
.. versionchanged:: 3.2
`WebSocketClosedError` was added (previously a closed connection
would raise an `AttributeError`)
.. versionchanged:: 4.3
Returns a `.Future` which can be used for flow control.
"""
if self.ws_connection is None:
raise WebSocketClosedError()
if isinstance(message, dict):
message = tornado.escape.json_encode(message)
return self.ws_connection.write_message(message, binary=binary)
def select_subprotocol(self, subprotocols):
"""Invoked when a new WebSocket requests specific subprotocols.
``subprotocols`` is a list of strings identifying the
subprotocols proposed by the client. This method may be
overridden to return one of those strings to select it, or
``None`` to not select a subprotocol. Failure to select a
subprotocol does not automatically abort the connection,
although clients may close the connection if none of their
proposed subprotocols was selected.
"""
return None
def get_compression_options(self):
"""Override to return compression options for the connection.
If this method returns None (the default), compression will
be disabled. If it returns a dict (even an empty one), it
will be enabled. The contents of the dict may be used to
control the memory and CPU usage of the compression,
but no such options are currently implemented.
.. versionadded:: 4.1
"""
return None
def open(self, *args, **kwargs):
"""Invoked when a new WebSocket is opened.
The arguments to `open` are extracted from the `tornado.web.URLSpec`
regular expression, just like the arguments to
`tornado.web.RequestHandler.get`.
"""
pass
def on_message(self, message):
"""Handle incoming messages on the WebSocket
This method must be overridden.
"""
raise NotImplementedError
def ping(self, data):
"""Send ping frame to the remote end."""
if self.ws_connection is None:
raise WebSocketClosedError()
self.ws_connection.write_ping(data)
def on_pong(self, data):
"""Invoked when the response to a ping frame is received."""
pass
def on_close(self):
"""Invoked when the WebSocket is closed.
If the connection was closed cleanly and a status code or reason
phrase was supplied, these values will be available as the attributes
``self.close_code`` and ``self.close_reason``.
.. versionchanged:: 4.0
Added ``close_code`` and ``close_reason`` attributes.
"""
pass
def close(self, code=None, reason=None):
"""Closes this Web Socket.
Once the close handshake is successful the socket will be closed.
``code`` may be a numeric status code, taken from the values
defined in `RFC 6455 section 7.4.1
<https://tools.ietf.org/html/rfc6455#section-7.4.1>`_.
``reason`` may be a textual message about why the connection is
closing. These values are made available to the client, but are
not otherwise interpreted by the websocket protocol.
.. versionchanged:: 4.0
Added the ``code`` and ``reason`` arguments.
"""
if self.ws_connection:
self.ws_connection.close(code, reason)
self.ws_connection = None
def check_origin(self, origin):
"""Override to enable support for allowing alternate origins.
The ``origin`` argument is the value of the ``Origin`` HTTP
header, the url responsible for initiating this request. This
method is not called for clients that do not send this header;
such requests are always allowed (because all browsers that
implement WebSockets support this header, and non-browser
clients do not have the same cross-site security concerns).
Should return True to accept the request or False to reject it.
By default, rejects all requests with an origin on a host other
than this one.
This is a security protection against cross site scripting attacks on
browsers, since WebSockets are allowed to bypass the usual same-origin
policies and don't use CORS headers.
To accept all cross-origin traffic (which was the default prior to
Tornado 4.0), simply override this method to always return true::
def check_origin(self, origin):
return True
To allow connections from any subdomain of your site, you might
do something like::
def check_origin(self, origin):
parsed_origin = urllib.parse.urlparse(origin)
return parsed_origin.netloc.endswith(".mydomain.com")
.. versionadded:: 4.0
"""
parsed_origin = urlparse(origin)
origin = parsed_origin.netloc
origin = origin.lower()
host = self.request.headers.get("Host")
# Check to see that origin matches host directly, including ports
return origin == host
def set_nodelay(self, value):
"""Set the no-delay flag for this stream.
By default, small messages may be delayed and/or combined to minimize
the number of packets sent. This can sometimes cause 200-500ms delays
due to the interaction between Nagle's algorithm and TCP delayed
ACKs. To reduce this delay (at the expense of possibly increasing
bandwidth usage), call ``self.set_nodelay(True)`` once the websocket
connection is established.
See `.BaseIOStream.set_nodelay` for additional details.
.. versionadded:: 3.1
"""
self.stream.set_nodelay(value)
def on_connection_close(self):
if self.ws_connection:
self.ws_connection.on_connection_close()
self.ws_connection = None
if not self._on_close_called:
self._on_close_called = True
self.on_close()
def send_error(self, *args, **kwargs):
if self.stream is None:
super(WebSocketHandler, self).send_error(*args, **kwargs)
else:
# If we get an uncaught exception during the handshake,
# we have no choice but to abruptly close the connection.
# TODO: for uncaught exceptions after the handshake,
# we can close the connection more gracefully.
self.stream.close()
def get_websocket_protocol(self):
websocket_version = self.request.headers.get("Sec-WebSocket-Version")
if websocket_version in ("7", "8", "13"):
return WebSocketProtocol13(
self, compression_options=self.get_compression_options(),
response_headers=self._headers)
<|fim▁hole|>
def _wrap_method(method):
def _disallow_for_websocket(self, *args, **kwargs):
if self.stream is None:
method(self, *args, **kwargs)
else:
raise RuntimeError("Method not supported for Web Sockets")
return _disallow_for_websocket
for method in ["write", "redirect", "set_header", "set_cookie",
"set_status", "flush", "finish"]:
setattr(WebSocketHandler, method,
_wrap_method(getattr(WebSocketHandler, method)))
class WebSocketProtocol(object):
"""Base class for WebSocket protocol versions.
"""
def __init__(self, handler):
self.handler = handler
self.request = handler.request
self.stream = handler.stream
self.client_terminated = False
self.server_terminated = False
def _run_callback(self, callback, *args, **kwargs):
"""Runs the given callback with exception handling.
On error, aborts the websocket connection and returns False.
"""
try:
callback(*args, **kwargs)
except Exception:
app_log.error("Uncaught exception in %s",
self.request.path, exc_info=True)
self._abort()
def on_connection_close(self):
self._abort()
def _abort(self):
"""Instantly aborts the WebSocket connection by closing the socket"""
self.client_terminated = True
self.server_terminated = True
self.stream.close() # forcibly tear down the connection
self.close() # let the subclass cleanup
class _PerMessageDeflateCompressor(object):
def __init__(self, persistent, max_wbits):
if max_wbits is None:
max_wbits = zlib.MAX_WBITS
# There is no symbolic constant for the minimum wbits value.
if not (8 <= max_wbits <= zlib.MAX_WBITS):
raise ValueError("Invalid max_wbits value %r; allowed range 8-%d",
max_wbits, zlib.MAX_WBITS)
self._max_wbits = max_wbits
if persistent:
self._compressor = self._create_compressor()
else:
self._compressor = None
def _create_compressor(self):
return zlib.compressobj(tornado.web.GZipContentEncoding.GZIP_LEVEL,
zlib.DEFLATED, -self._max_wbits)
def compress(self, data):
compressor = self._compressor or self._create_compressor()
data = (compressor.compress(data) +
compressor.flush(zlib.Z_SYNC_FLUSH))
assert data.endswith(b'\x00\x00\xff\xff')
return data[:-4]
class _PerMessageDeflateDecompressor(object):
def __init__(self, persistent, max_wbits):
if max_wbits is None:
max_wbits = zlib.MAX_WBITS
if not (8 <= max_wbits <= zlib.MAX_WBITS):
raise ValueError("Invalid max_wbits value %r; allowed range 8-%d",
max_wbits, zlib.MAX_WBITS)
self._max_wbits = max_wbits
if persistent:
self._decompressor = self._create_decompressor()
else:
self._decompressor = None
def _create_decompressor(self):
return zlib.decompressobj(-self._max_wbits)
def decompress(self, data):
decompressor = self._decompressor or self._create_decompressor()
return decompressor.decompress(data + b'\x00\x00\xff\xff')
class WebSocketProtocol13(WebSocketProtocol):
"""Implementation of the WebSocket protocol from RFC 6455.
This class supports versions 7 and 8 of the protocol in addition to the
final version 13.
"""
# Bit masks for the first byte of a frame.
FIN = 0x80
RSV1 = 0x40
RSV2 = 0x20
RSV3 = 0x10
RSV_MASK = RSV1 | RSV2 | RSV3
OPCODE_MASK = 0x0f
def __init__(self, handler, mask_outgoing=False,
compression_options=None, response_headers=None):
WebSocketProtocol.__init__(self, handler)
self._response_headers = response_headers
self.mask_outgoing = mask_outgoing
self._final_frame = False
self._frame_opcode = None
self._masked_frame = None
self._frame_mask = None
self._frame_length = None
self._fragmented_message_buffer = None
self._fragmented_message_opcode = None
self._waiting = None
self._compression_options = compression_options
self._decompressor = None
self._compressor = None
self._frame_compressed = None
# The total uncompressed size of all messages received or sent.
# Unicode messages are encoded to utf8.
# Only for testing; subject to change.
self._message_bytes_in = 0
self._message_bytes_out = 0
# The total size of all packets received or sent. Includes
# the effect of compression, frame overhead, and control frames.
self._wire_bytes_in = 0
self._wire_bytes_out = 0
def accept_connection(self):
try:
self._handle_websocket_headers()
self._accept_connection()
except ValueError:
gen_log.debug("Malformed WebSocket request received",
exc_info=True)
self._abort()
return
def _handle_websocket_headers(self):
"""Verifies all invariant- and required headers
If a header is missing or have an incorrect value ValueError will be
raised
"""
fields = ("Host", "Sec-Websocket-Key", "Sec-Websocket-Version")
if not all(map(lambda f: self.request.headers.get(f), fields)):
raise ValueError("Missing/Invalid WebSocket headers")
@staticmethod
def compute_accept_value(key):
"""Computes the value for the Sec-WebSocket-Accept header,
given the value for Sec-WebSocket-Key.
"""
sha1 = hashlib.sha1()
sha1.update(utf8(key))
sha1.update(b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11") # Magic value
return native_str(base64.b64encode(sha1.digest()))
def _challenge_response(self):
return WebSocketProtocol13.compute_accept_value(
self.request.headers.get("Sec-Websocket-Key"))
def _accept_connection(self):
subprotocol_header = ''
subprotocols = self.request.headers.get("Sec-WebSocket-Protocol", '')
subprotocols = [s.strip() for s in subprotocols.split(',')]
if subprotocols:
selected = self.handler.select_subprotocol(subprotocols)
if selected:
assert selected in subprotocols
subprotocol_header = ("Sec-WebSocket-Protocol: %s\r\n"
% selected)
extension_header = ''
extensions = self._parse_extensions_header(self.request.headers)
for ext in extensions:
if (ext[0] == 'permessage-deflate' and
self._compression_options is not None):
# TODO: negotiate parameters if compression_options
# specifies limits.
self._create_compressors('server', ext[1])
if ('client_max_window_bits' in ext[1] and
ext[1]['client_max_window_bits'] is None):
# Don't echo an offered client_max_window_bits
# parameter with no value.
del ext[1]['client_max_window_bits']
extension_header = ('Sec-WebSocket-Extensions: %s\r\n' %
httputil._encode_header(
'permessage-deflate', ext[1]))
break
response_headers = ''
if self._response_headers is not None:
for header_name, header_value in self._response_headers.get_all():
response_headers += '%s: %s\r\n' % (header_name, header_value)
if self.stream.closed():
self._abort()
return
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 101 Switching Protocols\r\n"
"Upgrade: websocket\r\n"
"Connection: Upgrade\r\n"
"Sec-WebSocket-Accept: %s\r\n"
"%s%s%s"
"\r\n" % (self._challenge_response(), subprotocol_header,
extension_header, response_headers)))
self._run_callback(self.handler.open, *self.handler.open_args,
**self.handler.open_kwargs)
self._receive_frame()
def _parse_extensions_header(self, headers):
extensions = headers.get("Sec-WebSocket-Extensions", '')
if extensions:
return [httputil._parse_header(e.strip())
for e in extensions.split(',')]
return []
def _process_server_headers(self, key, headers):
"""Process the headers sent by the server to this client connection.
'key' is the websocket handshake challenge/response key.
"""
assert headers['Upgrade'].lower() == 'websocket'
assert headers['Connection'].lower() == 'upgrade'
accept = self.compute_accept_value(key)
assert headers['Sec-Websocket-Accept'] == accept
extensions = self._parse_extensions_header(headers)
for ext in extensions:
if (ext[0] == 'permessage-deflate' and
self._compression_options is not None):
self._create_compressors('client', ext[1])
else:
raise ValueError("unsupported extension %r", ext)
def _get_compressor_options(self, side, agreed_parameters):
"""Converts a websocket agreed_parameters set to keyword arguments
for our compressor objects.
"""
options = dict(
persistent=(side + '_no_context_takeover') not in agreed_parameters)
wbits_header = agreed_parameters.get(side + '_max_window_bits', None)
if wbits_header is None:
options['max_wbits'] = zlib.MAX_WBITS
else:
options['max_wbits'] = int(wbits_header)
return options
def _create_compressors(self, side, agreed_parameters):
# TODO: handle invalid parameters gracefully
allowed_keys = set(['server_no_context_takeover',
'client_no_context_takeover',
'server_max_window_bits',
'client_max_window_bits'])
for key in agreed_parameters:
if key not in allowed_keys:
raise ValueError("unsupported compression parameter %r" % key)
other_side = 'client' if (side == 'server') else 'server'
self._compressor = _PerMessageDeflateCompressor(
**self._get_compressor_options(side, agreed_parameters))
self._decompressor = _PerMessageDeflateDecompressor(
**self._get_compressor_options(other_side, agreed_parameters))
def _write_frame(self, fin, opcode, data, flags=0):
if fin:
finbit = self.FIN
else:
finbit = 0
frame = struct.pack("B", finbit | opcode | flags)
l = len(data)
if self.mask_outgoing:
mask_bit = 0x80
else:
mask_bit = 0
if l < 126:
frame += struct.pack("B", l | mask_bit)
elif l <= 0xFFFF:
frame += struct.pack("!BH", 126 | mask_bit, l)
else:
frame += struct.pack("!BQ", 127 | mask_bit, l)
if self.mask_outgoing:
mask = os.urandom(4)
data = mask + _websocket_mask(mask, data)
frame += data
self._wire_bytes_out += len(frame)
try:
return self.stream.write(frame)
except StreamClosedError:
self._abort()
def write_message(self, message, binary=False):
"""Sends the given message to the client of this Web Socket."""
if binary:
opcode = 0x2
else:
opcode = 0x1
message = tornado.escape.utf8(message)
assert isinstance(message, bytes)
self._message_bytes_out += len(message)
flags = 0
if self._compressor:
message = self._compressor.compress(message)
flags |= self.RSV1
return self._write_frame(True, opcode, message, flags=flags)
def write_ping(self, data):
"""Send ping frame."""
assert isinstance(data, bytes)
self._write_frame(True, 0x9, data)
def _receive_frame(self):
try:
self.stream.read_bytes(2, self._on_frame_start)
except StreamClosedError:
self._abort()
def _on_frame_start(self, data):
self._wire_bytes_in += len(data)
header, payloadlen = struct.unpack("BB", data)
self._final_frame = header & self.FIN
reserved_bits = header & self.RSV_MASK
self._frame_opcode = header & self.OPCODE_MASK
self._frame_opcode_is_control = self._frame_opcode & 0x8
if self._decompressor is not None and self._frame_opcode != 0:
self._frame_compressed = bool(reserved_bits & self.RSV1)
reserved_bits &= ~self.RSV1
if reserved_bits:
# client is using as-yet-undefined extensions; abort
self._abort()
return
self._masked_frame = bool(payloadlen & 0x80)
payloadlen = payloadlen & 0x7f
if self._frame_opcode_is_control and payloadlen >= 126:
# control frames must have payload < 126
self._abort()
return
try:
if payloadlen < 126:
self._frame_length = payloadlen
if self._masked_frame:
self.stream.read_bytes(4, self._on_masking_key)
else:
self.stream.read_bytes(self._frame_length,
self._on_frame_data)
elif payloadlen == 126:
self.stream.read_bytes(2, self._on_frame_length_16)
elif payloadlen == 127:
self.stream.read_bytes(8, self._on_frame_length_64)
except StreamClosedError:
self._abort()
def _on_frame_length_16(self, data):
self._wire_bytes_in += len(data)
self._frame_length = struct.unpack("!H", data)[0]
try:
if self._masked_frame:
self.stream.read_bytes(4, self._on_masking_key)
else:
self.stream.read_bytes(self._frame_length, self._on_frame_data)
except StreamClosedError:
self._abort()
def _on_frame_length_64(self, data):
self._wire_bytes_in += len(data)
self._frame_length = struct.unpack("!Q", data)[0]
try:
if self._masked_frame:
self.stream.read_bytes(4, self._on_masking_key)
else:
self.stream.read_bytes(self._frame_length, self._on_frame_data)
except StreamClosedError:
self._abort()
def _on_masking_key(self, data):
self._wire_bytes_in += len(data)
self._frame_mask = data
try:
self.stream.read_bytes(self._frame_length,
self._on_masked_frame_data)
except StreamClosedError:
self._abort()
def _on_masked_frame_data(self, data):
# Don't touch _wire_bytes_in; we'll do it in _on_frame_data.
self._on_frame_data(_websocket_mask(self._frame_mask, data))
def _on_frame_data(self, data):
self._wire_bytes_in += len(data)
if self._frame_opcode_is_control:
# control frames may be interleaved with a series of fragmented
# data frames, so control frames must not interact with
# self._fragmented_*
if not self._final_frame:
# control frames must not be fragmented
self._abort()
return
opcode = self._frame_opcode
elif self._frame_opcode == 0: # continuation frame
if self._fragmented_message_buffer is None:
# nothing to continue
self._abort()
return
self._fragmented_message_buffer += data
if self._final_frame:
opcode = self._fragmented_message_opcode
data = self._fragmented_message_buffer
self._fragmented_message_buffer = None
else: # start of new data message
if self._fragmented_message_buffer is not None:
# can't start new message until the old one is finished
self._abort()
return
if self._final_frame:
opcode = self._frame_opcode
else:
self._fragmented_message_opcode = self._frame_opcode
self._fragmented_message_buffer = data
if self._final_frame:
self._handle_message(opcode, data)
if not self.client_terminated:
self._receive_frame()
def _handle_message(self, opcode, data):
if self.client_terminated:
return
if self._frame_compressed:
data = self._decompressor.decompress(data)
if opcode == 0x1:
# UTF-8 data
self._message_bytes_in += len(data)
try:
decoded = data.decode("utf-8")
except UnicodeDecodeError:
self._abort()
return
self._run_callback(self.handler.on_message, decoded)
elif opcode == 0x2:
# Binary data
self._message_bytes_in += len(data)
self._run_callback(self.handler.on_message, data)
elif opcode == 0x8:
# Close
self.client_terminated = True
if len(data) >= 2:
self.handler.close_code = struct.unpack('>H', data[:2])[0]
if len(data) > 2:
self.handler.close_reason = to_unicode(data[2:])
# Echo the received close code, if any (RFC 6455 section 5.5.1).
self.close(self.handler.close_code)
elif opcode == 0x9:
# Ping
self._write_frame(True, 0xA, data)
elif opcode == 0xA:
# Pong
self._run_callback(self.handler.on_pong, data)
else:
self._abort()
def close(self, code=None, reason=None):
"""Closes the WebSocket connection."""
if not self.server_terminated:
if not self.stream.closed():
if code is None and reason is not None:
code = 1000 # "normal closure" status code
if code is None:
close_data = b''
else:
close_data = struct.pack('>H', code)
if reason is not None:
close_data += utf8(reason)
self._write_frame(True, 0x8, close_data)
self.server_terminated = True
if self.client_terminated:
if self._waiting is not None:
self.stream.io_loop.remove_timeout(self._waiting)
self._waiting = None
self.stream.close()
elif self._waiting is None:
# Give the client a few seconds to complete a clean shutdown,
# otherwise just close the connection.
self._waiting = self.stream.io_loop.add_timeout(
self.stream.io_loop.time() + 5, self._abort)
class WebSocketClientConnection(simple_httpclient._HTTPConnection):
"""WebSocket client connection.
This class should not be instantiated directly; use the
`websocket_connect` function instead.
"""
def __init__(self, io_loop, request, on_message_callback=None,
compression_options=None):
self.compression_options = compression_options
self.connect_future = TracebackFuture()
self.protocol = None
self.read_future = None
self.read_queue = collections.deque()
self.key = base64.b64encode(os.urandom(16))
self._on_message_callback = on_message_callback
self.close_code = self.close_reason = None
scheme, sep, rest = request.url.partition(':')
scheme = {'ws': 'http', 'wss': 'https'}[scheme]
request.url = scheme + sep + rest
request.headers.update({
'Upgrade': 'websocket',
'Connection': 'Upgrade',
'Sec-WebSocket-Key': self.key,
'Sec-WebSocket-Version': '13',
})
if self.compression_options is not None:
# Always offer to let the server set our max_wbits (and even though
# we don't offer it, we will accept a client_no_context_takeover
# from the server).
# TODO: set server parameters for deflate extension
# if requested in self.compression_options.
request.headers['Sec-WebSocket-Extensions'] = (
'permessage-deflate; client_max_window_bits')
self.tcp_client = TCPClient(io_loop=io_loop)
super(WebSocketClientConnection, self).__init__(
io_loop, None, request, lambda: None, self._on_http_response,
104857600, self.tcp_client, 65536, 104857600)
def close(self, code=None, reason=None):
"""Closes the websocket connection.
``code`` and ``reason`` are documented under
`WebSocketHandler.close`.
.. versionadded:: 3.2
.. versionchanged:: 4.0
Added the ``code`` and ``reason`` arguments.
"""
if self.protocol is not None:
self.protocol.close(code, reason)
self.protocol = None
def on_connection_close(self):
if not self.connect_future.done():
self.connect_future.set_exception(StreamClosedError())
self.on_message(None)
self.tcp_client.close()
super(WebSocketClientConnection, self).on_connection_close()
def _on_http_response(self, response):
if not self.connect_future.done():
if response.error:
self.connect_future.set_exception(response.error)
else:
self.connect_future.set_exception(WebSocketError(
"Non-websocket response"))
def headers_received(self, start_line, headers):
if start_line.code != 101:
return super(WebSocketClientConnection, self).headers_received(
start_line, headers)
self.headers = headers
self.protocol = self.get_websocket_protocol()
self.protocol._process_server_headers(self.key, self.headers)
self.protocol._receive_frame()
if self._timeout is not None:
self.io_loop.remove_timeout(self._timeout)
self._timeout = None
self.stream = self.connection.detach()
self.stream.set_close_callback(self.on_connection_close)
# Once we've taken over the connection, clear the final callback
# we set on the http request. This deactivates the error handling
# in simple_httpclient that would otherwise interfere with our
# ability to see exceptions.
self.final_callback = None
self.connect_future.set_result(self)
def write_message(self, message, binary=False):
"""Sends a message to the WebSocket server."""
return self.protocol.write_message(message, binary)
def read_message(self, callback=None):
"""Reads a message from the WebSocket server.
If on_message_callback was specified at WebSocket
initialization, this function will never return messages
Returns a future whose result is the message, or None
if the connection is closed. If a callback argument
is given it will be called with the future when it is
ready.
"""
assert self.read_future is None
future = TracebackFuture()
if self.read_queue:
future.set_result(self.read_queue.popleft())
else:
self.read_future = future
if callback is not None:
self.io_loop.add_future(future, callback)
return future
def on_message(self, message):
if self._on_message_callback:
self._on_message_callback(message)
elif self.read_future is not None:
self.read_future.set_result(message)
self.read_future = None
else:
self.read_queue.append(message)
def on_pong(self, data):
pass
def get_websocket_protocol(self):
return WebSocketProtocol13(self, mask_outgoing=True,
compression_options=self.compression_options)
def websocket_connect(url, io_loop=None, callback=None, connect_timeout=None,
on_message_callback=None, compression_options=None):
"""Client-side websocket support.
Takes a url and returns a Future whose result is a
`WebSocketClientConnection`.
``compression_options`` is interpreted in the same way as the
return value of `.WebSocketHandler.get_compression_options`.
The connection supports two styles of operation. In the coroutine
style, the application typically calls
`~.WebSocketClientConnection.read_message` in a loop::
conn = yield websocket_connect(url)
while True:
msg = yield conn.read_message()
if msg is None: break
# Do something with msg
In the callback style, pass an ``on_message_callback`` to
``websocket_connect``. In both styles, a message of ``None``
indicates that the connection has been closed.
.. versionchanged:: 3.2
Also accepts ``HTTPRequest`` objects in place of urls.
.. versionchanged:: 4.1
Added ``compression_options`` and ``on_message_callback``.
The ``io_loop`` argument is deprecated.
"""
if io_loop is None:
io_loop = IOLoop.current()
if isinstance(url, httpclient.HTTPRequest):
assert connect_timeout is None
request = url
# Copy and convert the headers dict/object (see comments in
# AsyncHTTPClient.fetch)
request.headers = httputil.HTTPHeaders(request.headers)
else:
request = httpclient.HTTPRequest(url, connect_timeout=connect_timeout)
request = httpclient._RequestProxy(
request, httpclient.HTTPRequest._DEFAULTS)
conn = WebSocketClientConnection(io_loop, request,
on_message_callback=on_message_callback,
compression_options=compression_options)
if callback is not None:
io_loop.add_future(conn.connect_future, callback)
return conn.connect_future<|fim▁end|> | |
<|file_name|>app.py<|end_file_name|><|fim▁begin|>#Camera App
#copyright (c) 2015 Tyler Spadgenske
# MIT License
import sys
import pygame
import picamera
import io
import yuv2rgb
import os
import time
from subprocess import Popen
class Stream():
def __init__(self):
self.mode = 'capture'
self.deleted = False
self.uploading = False
self.no_files = False
#Get current photo name index
try:
index_file = open('/home/pi/index.dat', 'r')
except:
#Create new file if needed
index_file = open('/home/pi/index.dat', 'w+')
index_file.write('0')
index_file.close()
index_file = open('/home/pi/index.dat')
print 'NO INDEX FILE. CREATED /home/pi/index.dat'
self.index = int(index_file.readline())
index_file.close()
#Set screen to SPI
os.environ["SDL_FBDEV"] = "/dev/fb1"
os.environ["SDL_MOUSEDEV"] = "/dev/input/touchscreen" #Use touchscreen instead of event0
os.environ["SDL_MOUSEDRV"] = "TSLIB"
#COnfigure camera
self.camera = picamera.PiCamera()
self.camera.resolution = (320, 480)
self.camera.rotation = 90<|fim▁hole|> self.yuv = bytearray(320 * 480 * 3 / 2)
#Setup window
self.screen = pygame.display.set_mode((320, 480), pygame.FULLSCREEN)
pygame.mouse.set_visible(False)
#Setup buttons
self.capture = pygame.image.load('/home/pi/tyos/apps/camera/camera.png')
self.gallery = pygame.image.load('/home/pi/tyos/apps/camera/images/gallery.png')
self.door = pygame.image.load('/home/pi/tyos/apps/camera/images/door.png')
self.right = pygame.image.load('/home/pi/tyos/apps/camera/images/right.png')
self.left = pygame.image.load('/home/pi/tyos/apps/camera/images/left.png')
self.home = pygame.image.load('/home/pi/tyos/apps/camera/images/home.png')
self.upload = pygame.image.load('/home/pi/tyos/apps/camera/images/upload.png')
self.delete = pygame.image.load('/home/pi/tyos/apps/camera/images/trash.png')
self.deleted_image = pygame.image.load('/home/pi/tyos/apps/camera/images/deleted.png')
self.uploading_image = pygame.image.load('/home/pi/tyos/apps/camera/images/uploading.png')
self.no_files_image = pygame.image.load('/home/pi/tyos/apps/camera/images/nofiles.png')
def display(self):
while True:
if self.mode == 'gallery':
self.screen.blit(self.image_in_view, (0,0))
self.screen.blit(self.left, (20, 410))
self.screen.blit(self.right, (240, 410))
self.screen.blit(self.home, (125, 400))
self.screen.blit(self.delete, (5, 5))
self.screen.blit(self.upload, (40, 5))
if self.deleted:
self.screen.blit(self.deleted_image, (79, 200))
if time.time() - self.delete_time > 3:
self.deleted = False
if self.uploading:
self.screen.blit(self.uploading_image, (79, 200))
if time.time() - self.uploading_time > 6:
self.uploading = False
if self.mode == 'capture':
#Get camera stream
self.stream = io.BytesIO() # Capture into in-memory stream
self.camera.capture(self.stream, use_video_port=True, format='raw')
self.stream.seek(0)
self.stream.readinto(self.yuv) # stream -> YUV buffer
self.stream.close()
yuv2rgb.convert(self.yuv, self.rgb, 320, 480)
#Create pygame image from screen and blit it
img = pygame.image.frombuffer(self.rgb[0:(320 * 480 * 3)], (320, 480), 'RGB')
self.screen.blit(img, (0,0))
#Blit buttons
self.screen.blit(self.capture, (125, 400))
self.screen.blit(self.gallery, (20, 415))
self.screen.blit(self.door, (240, 410))
if self.no_files:
self.screen.blit(self.no_files_image, (79, 200))
if time.time() - self.files_time > 3:
self.no_files = False
pygame.display.update()
#Handle events
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONUP:
if self.mode == 'gallery':
if event.pos[1] < 40 and event.pos[0] < 35:
self.deleted = True
self.delete_time = time.time()
os.remove('/home/pi/Photos/' + self.images[self.current_image])
self.current_image = 0
self.images = os.listdir('/home/pi/Photos/')
if len(self.images) == 0:
self.mode = 'capture'
self.no_files = True
self.files_time = time.time()
if event.pos[1] < 40 and event.pos[0] > 35 and event.pos[0] < 75:
self.uploading = True
self.uploading_time = time.time()
cam = Popen(['/home/pi/Dropbox-Uploader/./dropbox_uploader.sh', 'upload', '/home/pi/Photos/' +
self.images[self.current_image], self.images[self.current_image]])
if event.pos[1] > 400 and event.pos[1] < 470:
if event.pos[0] > 125 and event.pos[0] < 195:
if self.mode == 'capture':
self.camera.capture('/home/pi/Photos/' + str(self.index) + '.jpg')
self.index += 1
if self.mode == 'gallery':
self.mode = 'capture'
if event.pos[0] < 70:
if self.mode == 'capture':
self.mode = 'gallery'
self.current_image = 0
self.images = os.listdir('/home/pi/Photos/')
if len(self.images) == 0:
self.mode = 'capture'
self.no_files = True
self.files_time = time.time()
else:
self.image_in_view = pygame.image.load('/home/pi/Photos/' + self.images[self.current_image])
if self.mode == 'gallery':
self.current_image -= 1
if self.current_image == -1:
self.current_image = len(self.images) - 1
self.image_in_view = pygame.image.load('/home/pi/Photos/' + self.images[self.current_image])
if event.pos[0] > 255:
if self.mode == 'capture':
print 'exiting...'
os.remove('/home/pi/index.dat')
new = open('/home/pi/index.dat', 'w+')
new.write(str(self.index))
new.close()
cam = Popen(['sudo', 'python', '/home/pi/tyos/src/main.py'])
pygame.quit()
sys.exit()
if self.mode == 'gallery':
if self.current_image == len(self.images) - 1:
self.current_image = 0
else:
self.current_image += 1
self.image_in_view = pygame.image.load('/home/pi/Photos/' + self.images[self.current_image])
if __name__ == '__main__':
q = Stream()
q.display()<|fim▁end|> |
# Buffers for viewfinder data
self.rgb = bytearray(320 * 480 * 3) |
<|file_name|>e2e.cli.snippet.js<|end_file_name|><|fim▁begin|>"use strict";
var path = require("path");
var assert = require("chai").assert;
var request = require("supertest");
var fork = require("child_process").fork;
var index = path.resolve(__dirname + "/../../../index.js");
describe.skip("E2E CLI Snippet test", function () {
// use `mocha --timeout` option instead
//this.timeout(5000);
var bs, options;
before(function (done) {
bs = fork(index, ["start", "--logLevel=silent"]);
bs.on("message", function (data) {
options = data.options;
done();
});
bs.send({send: "options"});
});
after(function (done) {
bs.kill("SIGINT");
setTimeout(done, 200); // Allow server to close successfully
});
it("can serve the client JS", function (done) {
request(options.urls.local)
.get(options.scriptPaths.versioned)
.expect(200)
.end(function (err, res) {
assert.include(res.text, "Connected to BrowserSync");
done();
});<|fim▁hole|> });
});<|fim▁end|> | |
<|file_name|>cli.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2008, 2009, 2010, 2011, 2012, 2013 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from __future__ import print_function
# pylint: disable=C0103
"""
BibEdit CLI tool.
Usage: bibedit [options]
General options::
-h, --help print this help
-V, --version print version number
Options to inspect record history::
--list-revisions [recid] list all revisions of a record
--list-revisions-details [recid] list detailed revisions of a record
--get-revision [recid.revdate] print MARCXML of given record revision
--diff-revisions [recidA.revdateB] [recidC.revdateD] print MARCXML difference between<|fim▁hole|> record A dated B and record C dated D
--revert-to-revision [recid.revdate] submit given record revision to
become current revision
--check-revisions [recid] check if revisions are not corrupted
(* stands for all records)
--fix-revisions [recid] fix revisions that are corrupted
(* stands for all records)
--clean-revisions [recid] clean duplicate revisions
(* stands for all records)
"""
__revision__ = "$Id$"
import sys
import zlib
from invenio.legacy.dbquery import run_sql
from intbitset import intbitset
from invenio.legacy.bibedit.utils import get_marcxml_of_revision_id, \
get_record_revision_ids, get_xml_comparison, record_locked_by_other_user, \
record_locked_by_queue, revision_format_valid_p, save_xml_record, \
split_revid, get_info_of_revision_id, get_record_revisions
from invenio.legacy.bibrecord import create_record, records_identical
def print_usage():
"""Print help."""
print(__doc__)
def print_version():
"""Print version information."""
print(__revision__)
def cli_clean_revisions(recid, dry_run=True, verbose=True):
"""Clean revisions of the given recid, by removing duplicate revisions
that do not change the content of the record."""
if recid == '*':
recids = intbitset(run_sql("SELECT DISTINCT id_bibrec FROM hstRECORD"))
else:
try:
recids = [int(recid)]
except ValueError:
print('ERROR: record ID must be integer, not %s.' % recid)
sys.exit(1)
for recid in recids:
all_revisions = run_sql("SELECT marcxml, job_id, job_name, job_person, job_date FROM hstRECORD WHERE id_bibrec=%s ORDER BY job_date ASC", (recid,))
previous_rec = {}
deleted_revisions = 0
for marcxml, job_id, job_name, job_person, job_date in all_revisions:
try:
current_rec = create_record(zlib.decompress(marcxml))[0]
except Exception:
print("ERROR: corrupted revisions found. Please run %s --fix-revisions '*'" % sys.argv[0], file=sys.stderr)
sys.exit(1)
if records_identical(current_rec, previous_rec):
deleted_revisions += 1
if not dry_run:
run_sql("DELETE FROM hstRECORD WHERE id_bibrec=%s AND job_id=%s AND job_name=%s AND job_person=%s AND job_date=%s", (recid, job_id, job_name, job_person, job_date))
previous_rec = current_rec
if verbose and deleted_revisions:
print("record %s: deleted %s duplicate revisions out of %s" % (recid, deleted_revisions, len(all_revisions)))
if verbose:
print("DONE")
def cli_list_revisions(recid, details=False):
"""Print list of all known record revisions (=RECID.REVDATE) for record
RECID.
"""
try:
recid = int(recid)
except ValueError:
print('ERROR: record ID must be integer, not %s.' % recid)
sys.exit(1)
record_rev_list = get_record_revision_ids(recid)
if not details:
out = '\n'.join(record_rev_list)
else:
out = "%s %s %s %s\n" % ("# Revision".ljust(22), "# Task ID".ljust(15),
"# Author".ljust(15), "# Job Details")
out += '\n'.join([get_info_of_revision_id(revid) for revid in record_rev_list])
if out:
print(out)
else:
print('ERROR: Record %s not found.' % recid)
def cli_get_revision(revid):
"""Return MARCXML for record revision REVID (=RECID.REVDATE) of a record."""
if not revision_format_valid_p(revid):
print('ERROR: revision %s is invalid; ' \
'must be NNN.YYYYMMDDhhmmss.' % revid)
sys.exit(1)
out = get_marcxml_of_revision_id(revid)
if out:
print(out)
else:
print('ERROR: Revision %s not found.' % revid)
def cli_diff_revisions(revid1, revid2):
"""Return diffs of MARCXML for record revisions REVID1, REVID2."""
for revid in [revid1, revid2]:
if not revision_format_valid_p(revid):
print('ERROR: revision %s is invalid; ' \
'must be NNN.YYYYMMDDhhmmss.' % revid)
sys.exit(1)
xml1 = get_marcxml_of_revision_id(revid1)
if not xml1:
print('ERROR: Revision %s not found. ' % revid1)
sys.exit(1)
xml2 = get_marcxml_of_revision_id(revid2)
if not xml2:
print('ERROR: Revision %s not found. ' % revid2)
sys.exit(1)
print(get_xml_comparison(revid1, revid2, xml1, xml2))
def cli_revert_to_revision(revid):
"""Submit specified record revision REVID upload, to replace current
version.
"""
if not revision_format_valid_p(revid):
print('ERROR: revision %s is invalid; ' \
'must be NNN.YYYYMMDDhhmmss.' % revid)
sys.exit(1)
xml_record = get_marcxml_of_revision_id(revid)
if xml_record == '':
print('ERROR: Revision %s does not exist. ' % revid)
sys.exit(1)
recid = split_revid(revid)[0]
if record_locked_by_other_user(recid, -1):
print('The record is currently being edited. ' \
'Please try again in a few minutes.')
sys.exit(1)
if record_locked_by_queue(recid):
print('The record is locked because of unfinished upload tasks. ' \
'Please try again in a few minutes.')
sys.exit(1)
save_xml_record(recid, 0, xml_record)
print('Your modifications have now been submitted. They will be ' \
'processed as soon as the task queue is empty.')
def check_rev(recid, verbose=True, fix=False):
revisions = get_record_revisions(recid)
for recid, job_date in revisions:
rev = '%s.%s' % (recid, job_date)
try:
get_marcxml_of_revision_id(rev)
if verbose:
print('%s: ok' % rev)
except zlib.error:
print('%s: invalid' % rev)
if fix:
fix_rev(recid, job_date, verbose)
def fix_rev(recid, job_date, verbose=True):
sql = 'DELETE FROM hstRECORD WHERE id_bibrec = %s AND job_date = "%s"'
run_sql(sql, (recid, job_date))
def cli_check_revisions(recid):
if recid == '*':
print('Checking all records')
recids = intbitset(run_sql("SELECT id FROM bibrec ORDER BY id"))
for index, rec in enumerate(recids):
if index % 1000 == 0 and index:
print(index, 'records processed')
check_rev(rec, verbose=False)
else:
check_rev(recid)
def cli_fix_revisions(recid):
if recid == '*':
print('Fixing all records')
recids = intbitset(run_sql("SELECT id FROM bibrec ORDER BY id"))
for index, rec in enumerate(recids):
if index % 1000 == 0 and index:
print(index, 'records processed')
check_rev(rec, verbose=False, fix=True)
else:
check_rev(recid, fix=True)
def main():
"""Main entry point."""
if '--help' in sys.argv or \
'-h' in sys.argv:
print_usage()
elif '--version' in sys.argv or \
'-V' in sys.argv:
print_version()
else:
try:
cmd = sys.argv[1]
opts = sys.argv[2:]
if not opts:
raise IndexError
except IndexError:
print_usage()
sys.exit(1)
if cmd == '--list-revisions':
try:
recid = opts[0]
except IndexError:
print_usage()
sys.exit(1)
cli_list_revisions(recid, details=False)
elif cmd == '--list-revisions-details':
try:
recid = opts[0]
except IndexError:
print_usage()
sys.exit(1)
cli_list_revisions(recid, details=True)
elif cmd == '--get-revision':
try:
revid = opts[0]
except IndexError:
print_usage()
sys.exit(1)
cli_get_revision(revid)
elif cmd == '--diff-revisions':
try:
revid1 = opts[0]
revid2 = opts[1]
except IndexError:
print_usage()
sys.exit(1)
cli_diff_revisions(revid1, revid2)
elif cmd == '--revert-to-revision':
try:
revid = opts[0]
except IndexError:
print_usage()
sys.exit(1)
cli_revert_to_revision(revid)
elif cmd == '--check-revisions':
try:
recid = opts[0]
except IndexError:
recid = '*'
cli_check_revisions(recid)
elif cmd == '--fix-revisions':
try:
recid = opts[0]
except IndexError:
recid = '*'
cli_fix_revisions(recid)
elif cmd == '--clean-revisions':
try:
recid = opts[0]
except IndexError:
recid = '*'
cli_clean_revisions(recid, dry_run=False)
else:
print("ERROR: Please specify a command. Please see '--help'.")
sys.exit(1)
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>store.js<|end_file_name|><|fim▁begin|>import { applyMiddleware, createStore, compose } from "redux";
//Connect react router with redux
import { syncHistoryWithStore } from "react-router-redux";
import { browserHistory } from "react-router";
import logger from "redux-logger";
import thunk from "redux-thunk";
import promise from "redux-promise-middleware";
import rootReducer from './reducers/index.js';
// axsios<|fim▁hole|>//This is like setting the initial state for thee reducers here instead of doing it directly in the
//reducer function itself.
// const initialState = {
// allData: 123,
// sesssion
// }
const middleware = applyMiddleware(promise(), thunk, logger());
const store = createStore(rootReducer, middleware);
export const history = syncHistoryWithStore(browserHistory, store);
export default store;<|fim▁end|> |
//If making an initial state and passing as a preloadedState to createStore, need to make a reducer
//for each property in the intial state. The reason being the reducer has access to the state. So |
<|file_name|>associate_params.go<|end_file_name|><|fim▁begin|>package model
// ArgBind bind args.
type ArgBind struct {
OpenID string
OutOpenID string
AppID int64
}
// ArgBindInfo bind info args.
type ArgBindInfo struct {
Mid int64
AppID int64
}<|fim▁hole|>type ArgThirdPrizeGrant struct {
Mid int64 `form:"mid" validate:"required"`
PrizeKey int64 `form:"prize_key"`
UniqueNo string `form:"unique_no" validate:"required"`
PrizeType int8 `form:"prize_type" validate:"required"`
Appkey string `form:"appkey" validate:"required"`
Remark string `form:"remark" validate:"required"`
AppID int64
}
// ArgBilibiliPrizeGrant args.
type ArgBilibiliPrizeGrant struct {
PrizeKey string
UniqueNo string
OpenID string
AppID int64
}
// BilibiliPrizeGrantResp resp.
type BilibiliPrizeGrantResp struct {
Amount float64
FullAmount float64
Description string
}<|fim▁end|> |
// ArgThirdPrizeGrant prize grant args. |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('My Name', '[email protected]'),
)
MANAGERS = ADMINS
import tempfile, os
from django import contrib
tempdata = tempfile.mkdtemp()
approot = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
adminroot = os.path.join(contrib.__path__[0], 'admin')
DATABASES = {
'default': {
'NAME': os.path.join(tempdata, 'signalqueue-test.db'),
'TEST_NAME': os.path.join(tempdata, 'signalqueue-test.db'),
'ENGINE': 'django.db.backends.sqlite3',
'USER': '',
'PASSWORD': '',
}
}
TIME_ZONE = 'America/New_York'
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
USE_I18N = False
MEDIA_ROOT = os.path.join(approot, 'static')
MEDIA_URL = '/face/'
STATIC_ROOT = os.path.join(adminroot, 'static', 'admin')[0]
STATIC_URL = '/staticfiles/'
ADMIN_MEDIA_PREFIX = '/admin-media/'
ROOT_URLCONF = 'signalqueue.settings.urlconf'
TEMPLATE_DIRS = (
os.path.join(approot, 'templates'),
os.path.join(adminroot, 'templates'),
os.path.join(adminroot, 'templates', 'admin'),
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.gzip.GZipMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.request",
"django.core.context_processors.debug",
#"django.core.context_processors.i18n", this is AMERICA
"django.core.context_processors.media",
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.staticfiles',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.admin',
'django_nose',
'djcelery',
'delegate',
'signalqueue',
)
LOGGING = dict(<|fim▁hole|> formatters={ 'standard': { 'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s' }, },
handlers={
'default': { 'level':'DEBUG', 'class':'logging.StreamHandler', 'formatter':'standard', },
'nil': { 'level':'DEBUG', 'class':'django.utils.log.NullHandler', },
},
loggers={
'signalqueue': { 'handlers': ['default'], 'level': 'INFO', 'propagate': False },
},
root={ 'handlers': ['default'], 'level': 'INFO', 'propagate': False },
)
SQ_QUEUES = {
'default': { # you need at least one dict named 'default' in SQ_QUEUES
'ENGINE': 'signalqueue.worker.backends.RedisSetQueue', # required - full path to a QueueBase subclass
'INTERVAL': 30, # 1/3 sec
'OPTIONS': dict(port=8356),
},
'listqueue': {
'ENGINE': 'signalqueue.worker.backends.RedisQueue',
'INTERVAL': 30, # 1/3 sec
'OPTIONS': dict(port=8356),
},
'db': {
'ENGINE': 'signalqueue.worker.backends.DatabaseQueueProxy',
'INTERVAL': 30, # 1/3 sec
'OPTIONS': dict(app_label='signalqueue',
modl_name='EnqueuedSignal'),
},
'celery': {
'ENGINE': 'signalqueue.worker.celeryqueue.CeleryQueue',
'INTERVAL': 30, # 1/3 sec
'OPTIONS': dict(celery_queue_name='inactive',
transport='redis', port=8356),
},
}
SQ_ADDITIONAL_SIGNALS=['signalqueue.tests']
SQ_WORKER_PORT = 11201
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
try:
from kombu import Queue
except ImportError:
pass
else:
CELERY_DEFAULT_QUEUE = 'default'
CELERY_DEFAULT_ROUTING_KEY = 'default'
CELERY_DEFAULT_EXCHANGE_TYPE = 'direct'
CELERY_QUEUES = (
Queue('default', routing_key='default.#'),
Queue('yodogg', routing_key='yodogg.#'),
)
CELERY_ALWAYS_EAGER = True
BROKER_URL = 'redis://localhost:8356/0'
BROKER_HOST = "localhost"
BROKER_BACKEND = "redis"
REDIS_PORT = 8356
REDIS_HOST = "localhost"
BROKER_USER = ""
BROKER_PASSWORD = ""
BROKER_VHOST = "0"
REDIS_DB = 0
REDIS_CONNECT_RETRY = True
CELERY_SEND_EVENTS = True
CELERY_RESULT_BACKEND = "redis://localhost:8356/0"
CELERY_TASK_RESULT_EXPIRES = 10
CELERYBEAT_SCHEDULER = "djcelery.schedulers.DatabaseScheduler"
try:
import djcelery
except ImportError:
pass
else:
djcelery.setup_loader()
# package path-extension snippet.
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)<|fim▁end|> | version=1,
disable_existing_loggers=False, |
<|file_name|>FontWarnings.js<|end_file_name|><|fim▁begin|>/*
* ../../../..//localization/cy/FontWarnings.js
*
* Copyright (c) 2009-2018 The MathJax Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*************************************************************<|fim▁hole|> *
* Copyright (c) 2009-2018 The MathJax Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
MathJax.Localization.addTranslation("cy", "FontWarnings", {
version: "2.7.5",
isLoaded: true,
strings: {}
});
MathJax.Ajax.loadComplete("[MathJax]/localization/cy/FontWarnings.js");<|fim▁end|> | *
* MathJax/localization/cy/FontWarnings.js |
<|file_name|>pyng.py<|end_file_name|><|fim▁begin|>__author__ = "davide"
import struct
import socket
import argparse
import sys
from datetime import datetime
import time
from collections import defaultdict
from signal import signal, SIGINT, SIG_IGN
ICMP_ECHO_REQUEST = 8, 0
ICMP_ECHO_RESPONSE = 0, 0
__all__ = ["ICMPPacket", "Pinger",
"ICMP_ECHO_REQUEST", "ICMP_ECHO_RESPONSE"]
# Python module for pinging hosts
class ICMPPacket:
"""Class that represents an ICMP struct_packet"""
__slots__ = "_data", "_checksum", "_type"
def __init__(self, packetType=ICMP_ECHO_RESPONSE, data=""):
"""Initialize the struct_packet
@param packetType: tuple
"""
self.packetType = packetType
self.data = data
self._checksum = -1<|fim▁hole|>
@property
def packetType(self):
"""16 bits that represent the struct_packet type, code"""
return self._type
@packetType.setter
def packetType(self, packet_type):
if len(packet_type) != 2:
raise ValueError("type must be a 2-element tuple")
if any(not 0 <= val < (1 << 8) for val in packet_type):
raise ValueError("Packet type not valid")
self._type = packet_type
@property
def data(self):
"""Packet content"""
return self._data
@data.setter
def data(self, data=b""):
self._data = data or b""
def compute_checksum(self):
# checksum set to zero
header = bytes([self._type[0], self._type[1], 0, 0])
struct_packet = header + self._data
length = len(struct_packet)
if length % 2:
odd = struct_packet[-1] << 8
struct_packet = struct_packet[:-1]
else:
odd = 0
format_len = len(struct_packet) // 2
blocks = struct.unpack("!{}H".format(format_len), struct_packet)
checksum = sum(blocks)
checksum += odd
checksum = (checksum >> 16) + (checksum & 0xFFFF)
checksum += checksum >> 16
self._checksum = ~checksum & 0xFFFF
@property
def checksum(self):
"""Packet checksum"""
return self._checksum
@property
def computedChecksum(self):
"""Computed checksum"""
return self._checksum >= 0
def __str__(self):
return ("ICMPPacket[type={}, data={}, checksum={}]"
.format(self._type, self._data[4:], self._checksum))
def encodePacket(self):
"""Returns the struct_packet encoded in a string"""
if not self.computedChecksum:
self.compute_checksum()
return struct.pack("!BBH{}s".format(len(self._data)),
self._type[0], self._type[1],
self._checksum, self._data)
@staticmethod
def buildPacket(raw):
"""Builds an ICMPPacket from the string raw
(received from a pong), returns (IP Header (raw), ICMP Packet)"""
ihl = (raw[0] & 0x0F) << 2
ip_header, raw_packet = raw[:ihl], raw[ihl:]
format_len = len(raw_packet) - 4
unpacked = struct.unpack("!BBH{}s".format(format_len), raw_packet)
packet = ICMPPacket(unpacked[:2], unpacked[3])
packet._checksum = unpacked[2]
return ip_header, packet
class Pinger:
"""Class useful for pinging remote hosts"""
DEFAULT_TIMEOUT = 5
def __init__(self, timeout=DEFAULT_TIMEOUT):
"""Initalize the Pinger with the timeout specified"""
self.socket = None
self.timeout = timeout
self.id_dict = defaultdict(int)
def ping(self, dest_address, data=None):
"""Sends to dest a ping packet with data specified"""
if not self.socket:
self.close()
dest_address = str(dest_address)
self.socket = socket.socket(socket.AF_INET, socket.SOCK_RAW,
socket.getprotobyname("icmp"))
self.socket.connect((dest_address, 0))
self.socket.settimeout(self.timeout)
packet = ICMPPacket(packetType=ICMP_ECHO_REQUEST)
idpacket = struct.pack("!I", self.id_dict[dest_address])
packet.data = idpacket + (data or b"")
self.id_dict[dest_address] += 1
packet_struct = packet.encodePacket()
self.socket.send(packet_struct)
def pong(self):
"""Returns the response of remote host"""
if not self.socket:
raise socket.error("Socket closed")
return ICMPPacket.buildPacket(self.socket.recv((1 << 16) - 1))
def close(self):
"""Closes the Pinger"""
if self.socket:
self.socket.close()
self.socket = None
def __del__(self):
"""Closes the Pinger"""
self.close()
def main():
def parseArgs():
handler = argparse.ArgumentParser(description="Pinger")
handler.add_argument('-r', '--remote_host', help="Destination",
default="localhost", dest="dest")
handler.add_argument('-d', '--data', help="Dati", default="",
dest="data")
handler.add_argument('-t', '--tries', help="Numero di ping",
default=sys.maxsize, dest="tries", type=int)
return handler.parse_args()
args = parseArgs()
try:
ip = socket.gethostbyname(args.dest)
except socket.gaierror:
sys.exit("{} not found".format(args.dest))
print("Pinging", args.dest, "(" + ip + ")")
pinger = Pinger()
tmax, tmin, tmean, total, received = -1, sys.maxsize, 0, 0, 0
for i in range(args.tries):
total += 1
try:
pinger.ping(args.dest, args.data.encode())
t = datetime.now()
pinger.pong()
t = (datetime.now() - t).microseconds / 1000.
print("Got ping from {} in {:1.2f} ms".format(args.dest, t))
handler = signal(SIGINT, SIG_IGN)
tmax, tmin = max(tmax, t), min(tmin, t)
received += 1
tmean = ((received - 1) * tmean + t) / received
signal(SIGINT, handler)
if i != args.tries - 1:
time.sleep(1)
except socket.timeout:
print("Host is not reachable")
except KeyboardInterrupt:
break
print("***** RESULTS *****")
if received != 0:
stats = "Max time: {:1.2f} ms, Min time: {:1.2f} ms, Avg time: {:1.2f} ms"
print(stats.format(tmax, tmin, tmean))
stats = "Sent packets: {}\tReceived: {}\tLost: {}"
print(stats.format(total, received, total - received))
print("Packet Lost: {:1.0f}%".format((total - received) / total * 100))
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>Content.js<|end_file_name|><|fim▁begin|>import React, { Component } from 'react';
import { KeyboardAwareScrollView } from 'react-native-keyboard-aware-scroll-view';
import { connectStyle } from 'native-base-shoutem-theme';
import mapPropsToStyleNames from '../Utils/mapPropsToStyleNames';
class Content extends Component {<|fim▁hole|> resetScrollToCoords={(this.props.disableKBDismissScroll) ? null : { x: 0, y: 0 }}
ref={(c) => { this._scrollview = c; this._root = c; }}
{...this.props}
>
{this.props.children}
</KeyboardAwareScrollView>
);
}
}
Content.propTypes = {
...KeyboardAwareScrollView.propTypes,
style: React.PropTypes.object,
padder: React.PropTypes.bool,
disableKBDismissScroll: React.PropTypes.bool,
enableResetScrollToCoords: React.PropTypes.bool
};
const StyledContent = connectStyle('NativeBase.Content', {}, mapPropsToStyleNames)(Content);
export {
StyledContent as Content,
};<|fim▁end|> | render() {
return (
<KeyboardAwareScrollView
automaticallyAdjustContentInsets={false} |
<|file_name|>twitter.py<|end_file_name|><|fim▁begin|>from twython import Twython
from django.conf import settings
from .base import BaseSource
<|fim▁hole|> raise ValueError
self.uid = uid
self.screen_name = screen_name
def fetch(self):
APP_KEY = settings.SOCIAL_AUTH_TWITTER_KEY
APP_SECRET = settings.SOCIAL_AUTH_TWITTER_SECRET
twitter = Twython(
APP_KEY,
APP_SECRET,
settings.TWITTER_ACCESS_TOKEN,
settings.TWITTER_ACCESS_TOKEN_SECRET)
if self.uid:
tweets = twitter.get_user_timeline(user_id=self.uid)
else:
tweets = twitter.get_user_timeline(screen_name=self.screen_name)
for tweet in tweets:
yield {
'id': tweet['id'],
'content': tweet['text'],
'created_at': tweet['created_at'],
'entities': tweet['entities']
}<|fim▁end|> | class TwitterSource(BaseSource):
def __init__(self, uid=None, screen_name=None):
if uid is None and screen_name is None: |
<|file_name|>annotate_old_bird_calls.py<|end_file_name|><|fim▁begin|>"""
Annotates Old Bird call detections in the BirdVox-70k archive.
The annotations classify clips detected by the Old Bird Tseep and Thrush
detectors according to the archive's ground truth call clips.
This script must be run from the archive directory.
"""
from django.db.models import F
from django.db.utils import IntegrityError
import pandas as pd
# Set up Django. This must happen before any use of Django, including
# ORM class imports.
import vesper.util.django_utils as django_utils
django_utils.set_up_django()
from vesper.django.app.models import (
AnnotationInfo, Clip, Processor, Recording, StringAnnotation, User)
import vesper.django.app.model_utils as model_utils
import scripts.old_bird_detector_eval.utils as utils
# Set this `True` to skip actually annotating the Old Bird detections.
# The script will still compute the classifications and print precision,
# recall, and F1 statistics. This is useful for testing purposes, since
# the script runs considerably faster when it doesn't annotate.
ANNOTATE = True
GROUND_TRUTH_DETECTOR_NAME = 'BirdVox-70k'
# The elements of the pairs of numbers are (0) the approximate start offset
# of a call within an Old Bird detector clip, and (1) the approximate
# maximum duration of a call. The units of both numbers are seconds.
DETECTOR_DATA = (
('Old Bird Tseep Detector Redux 1.1', 'Call.High'),
('Old Bird Thrush Detector Redux 1.1', 'Call.Low'),
)
CLASSIFICATION_ANNOTATION_NAME = 'Classification'
CENTER_INDEX_ANNOTATION_NAME = 'Call Center Index'
CENTER_FREQ_ANNOTATION_NAME = 'Call Center Freq'
SAMPLE_RATE = 24000
def main():
rows = annotate_old_bird_calls()
raw_df = create_raw_df(rows)
aggregate_df = create_aggregate_df(raw_df)<|fim▁hole|>
add_precision_recall_f1(raw_df)
add_precision_recall_f1(aggregate_df)
print(raw_df.to_csv())
print(aggregate_df.to_csv())
def annotate_old_bird_calls():
center_index_annotation_info = \
AnnotationInfo.objects.get(name=CENTER_INDEX_ANNOTATION_NAME)
center_freq_annotation_info = \
AnnotationInfo.objects.get(name=CENTER_FREQ_ANNOTATION_NAME)
classification_annotation_info = \
AnnotationInfo.objects.get(name=CLASSIFICATION_ANNOTATION_NAME)
user = User.objects.get(username='Vesper')
sm_pairs = model_utils.get_station_mic_output_pairs_list()
ground_truth_detector = Processor.objects.get(
name=GROUND_TRUTH_DETECTOR_NAME)
rows = []
for detector_name, annotation_value in DETECTOR_DATA:
short_detector_name = detector_name.split()[2]
old_bird_detector = Processor.objects.get(name=detector_name)
window = utils.OLD_BIRD_CLIP_CALL_CENTER_WINDOWS[short_detector_name]
for station, mic_output in sm_pairs:
station_num = int(station.name.split()[1])
print('{} {}...'.format(short_detector_name, station_num))
ground_truth_clips = list(model_utils.get_clips(
station=station,
mic_output=mic_output,
detector=ground_truth_detector,
annotation_name=CLASSIFICATION_ANNOTATION_NAME,
annotation_value=annotation_value))
ground_truth_call_center_indices = \
[c.start_index + c.length // 2 for c in ground_truth_clips]
ground_truth_call_count = len(ground_truth_clips)
old_bird_clips = list(model_utils.get_clips(
station=station,
mic_output=mic_output,
detector=old_bird_detector))
old_bird_clip_count = len(old_bird_clips)
clips = [(c.start_index, c.length) for c in old_bird_clips]
matches = utils.match_clips_with_calls(
clips, ground_truth_call_center_indices, window)
old_bird_call_count = len(matches)
rows.append([
short_detector_name, station_num, ground_truth_call_count,
old_bird_call_count, old_bird_clip_count])
if ANNOTATE:
# Clear any existing annotations.
for clip in old_bird_clips:
model_utils.unannotate_clip(
clip, classification_annotation_info,
creating_user=user)
# Create new annotations.
for i, j in matches:
old_bird_clip = old_bird_clips[i]
call_center_index = ground_truth_call_center_indices[j]
ground_truth_clip = ground_truth_clips[j]
# Annotate Old Bird clip call center index.
model_utils.annotate_clip(
old_bird_clip, center_index_annotation_info,
str(call_center_index), creating_user=user)
# Get ground truth clip call center frequency.
annotations = \
model_utils.get_clip_annotations(ground_truth_clip)
call_center_freq = annotations[CENTER_FREQ_ANNOTATION_NAME]
# Annotate Old Bird clip call center frequency.
model_utils.annotate_clip(
old_bird_clip, center_freq_annotation_info,
call_center_freq, creating_user=user)
model_utils.annotate_clip(
old_bird_clip, classification_annotation_info,
annotation_value, creating_user=user)
return rows
def create_raw_df(rows):
columns = [
'Detector', 'Station', 'Ground Truth Calls', 'Old Bird Calls',
'Old Bird Clips']
return pd.DataFrame(rows, columns=columns)
def create_aggregate_df(df):
data = [
sum_counts(df, 'Tseep'),
sum_counts(df, 'Thrush'),
sum_counts(df, 'All')
]
columns = [
'Detector', 'Ground Truth Calls', 'Old Bird Calls', 'Old Bird Clips']
return pd.DataFrame(data, columns=columns)
def sum_counts(df, detector):
if detector != 'All':
df = df.loc[df['Detector'] == detector]
return [
detector,
df['Ground Truth Calls'].sum(),
df['Old Bird Calls'].sum(),
df['Old Bird Clips'].sum()]
def add_precision_recall_f1(df):
p = df['Old Bird Calls'] / df['Old Bird Clips']
r = df['Old Bird Calls'] / df['Ground Truth Calls']
df['Precision'] = to_percent(p)
df['Recall'] = to_percent(r)
df['F1'] = to_percent(2 * p * r / (p + r))
def to_percent(x):
return round(1000 * x) / 10
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Nefertari documentation build configuration file, created by
# sphinx-quickstart on Fri Mar 27 11:16:31 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
# 'sphinxcontrib.fulltoc',
'releases'
]
releases_github_path = 'brandicted/ramses'
releases_debug = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
<|fim▁hole|>project = u'Ramses'
copyright = u'2015, Brandicted'
author = u'Brandicted'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# on_rtd is whether we are on readthedocs.org, this line of code grabbed from docs.readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# otherwise, readthedocs.org uses their theme by default, so no need to specify it
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Ramsesdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Ramses.tex', u'Ramses Documentation',
u'Brandicted', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'ramses', u'Ramses Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Ramses', u'Ramses Documentation',
author, 'Ramses', 'API generator for Pyramid using RAML',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False<|fim▁end|> | # General information about the project. |
<|file_name|>_utils.py<|end_file_name|><|fim▁begin|>import sys
import types
import typing as t
import decorator as deco
from gssapi.raw.misc import GSSError
if t.TYPE_CHECKING:
from gssapi.sec_contexts import SecurityContext
def import_gssapi_extension(
name: str,
) -> t.Optional[types.ModuleType]:
"""Import a GSSAPI extension module
This method imports a GSSAPI extension module based
on the name of the extension (not including the
'ext_' prefix). If the extension is not available,
the method retuns None.
Args:
name (str): the name of the extension
Returns:
module: Either the extension module or None
"""
try:
path = 'gssapi.raw.ext_{0}'.format(name)
__import__(path)
return sys.modules[path]
except ImportError:
return None
def inquire_property(
name: str,
doc: t.Optional[str] = None
) -> property:
"""Creates a property based on an inquire result
This method creates a property that calls the
:python:`_inquire` method, and return the value of the
requested information.
Args:
name (str): the name of the 'inquire' result information
Returns:
property: the created property
"""
def inquire_property(self: "SecurityContext") -> t.Any:
if not self._started:
msg = (f"Cannot read {name} from a security context whose "
"establishment has not yet been started.")
raise AttributeError(msg)
return getattr(self._inquire(**{name: True}), name)
return property(inquire_property, doc=doc)
# use UTF-8 as the default encoding, like Python 3
_ENCODING = 'UTF-8'
def _get_encoding() -> str:
"""Gets the current encoding used for strings.
This value is used to encode and decode string
values like names.
Returns:
str: the current encoding
"""
return _ENCODING
def set_encoding(
enc: str,
) -> None:
"""Sets the current encoding used for strings
This value is used to encode and decode string
values like names.
Args:
enc: the encoding to use
"""
global _ENCODING
_ENCODING = enc
def _encode_dict(
d: t.Dict[t.Union[bytes, str], t.Union[bytes, str]],
) -> t.Dict[bytes, bytes]:
"""Encodes any relevant strings in a dict"""
def enc(x: t.Union[bytes, str]) -> bytes:
if isinstance(x, str):
return x.encode(_ENCODING)
else:
return x
return {enc(k): enc(v) for k, v in d.items()}
# in case of Python 3, just use exception chaining
@deco.decorator
def catch_and_return_token(
func: t.Callable,
self: "SecurityContext",
*args: t.Any,
**kwargs: t.Any,
) -> t.Optional[bytes]:
"""Optionally defer exceptions and return a token instead
When `__DEFER_STEP_ERRORS__` is set on the implementing class
or instance, methods wrapped with this wrapper will
catch and save their :python:`GSSError` exceptions and
instead return the result token attached to the exception.
The exception can be later retrived through :python:`_last_err`
(and :python:`_last_tb` when Python 2 is in use).
"""
try:
return func(self, *args, **kwargs)
except GSSError as e:
defer_step_errors = getattr(self, '__DEFER_STEP_ERRORS__', False)
if e.token is not None and defer_step_errors:
self._last_err = e
# skip the "return func" line above in the traceback
tb = e.__traceback__.tb_next # type: ignore[union-attr]
self._last_err.__traceback__ = tb
return e.token
else:
raise
@deco.decorator
def check_last_err(
func: t.Callable,
self: "SecurityContext",
*args: t.Any,
**kwargs: t.Any,
) -> t.Any:
"""Check and raise deferred errors before running the function
This method checks :python:`_last_err` before running the wrapped
function. If present and not None, the exception will be raised
with its original traceback.
"""
if self._last_err is not None:
try:
raise self._last_err
finally:
self._last_err = None<|fim▁hole|> return func(self, *args, **kwargs)
class CheckLastError(type):
"""Check for a deferred error on all methods
This metaclass applies the :python:`check_last_err` decorator
to all methods not prefixed by '_'.
Additionally, it enabled `__DEFER_STEP_ERRORS__` by default.
"""
def __new__(
cls,
name: str,
parents: t.Tuple[t.Type],
attrs: t.Dict[str, t.Any],
) -> "CheckLastError":
attrs['__DEFER_STEP_ERRORS__'] = True
for attr_name in attrs:
attr = attrs[attr_name]
# wrap only methods
if not isinstance(attr, types.FunctionType):
continue
if attr_name[0] != '_':
attrs[attr_name] = check_last_err(attr)
return super(CheckLastError, cls).__new__(cls, name, parents, attrs)<|fim▁end|> | else: |
<|file_name|>test_policies.py<|end_file_name|><|fim▁begin|># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import six
import st2tests
from st2common.bootstrap.policiesregistrar import register_policy_types, register_policies
from st2common.models.api.action import ActionAPI, RunnerTypeAPI
from st2common.models.api.policy import PolicyTypeAPI, PolicyAPI<|fim▁hole|>from st2common.persistence.policy import PolicyType, Policy
from st2common.persistence.runner import RunnerType
from st2common.policies import ResourcePolicyApplicator, get_driver
from st2tests import DbTestCase, fixturesloader
TEST_FIXTURES = {
'runners': [
'testrunner1.yaml'
],
'actions': [
'action1.yaml'
],
'policytypes': [
'fake_policy_type_1.yaml',
'fake_policy_type_2.yaml'
],
'policies': [
'policy_1.yaml',
'policy_2.yaml'
]
}
PACK = 'generic'
LOADER = fixturesloader.FixturesLoader()
FIXTURES = LOADER.load_fixtures(fixtures_pack=PACK, fixtures_dict=TEST_FIXTURES)
class PolicyTest(DbTestCase):
@classmethod
def setUpClass(cls):
super(PolicyTest, cls).setUpClass()
for _, fixture in six.iteritems(FIXTURES['runners']):
instance = RunnerTypeAPI(**fixture)
RunnerType.add_or_update(RunnerTypeAPI.to_model(instance))
for _, fixture in six.iteritems(FIXTURES['actions']):
instance = ActionAPI(**fixture)
Action.add_or_update(ActionAPI.to_model(instance))
for _, fixture in six.iteritems(FIXTURES['policytypes']):
instance = PolicyTypeAPI(**fixture)
PolicyType.add_or_update(PolicyTypeAPI.to_model(instance))
for _, fixture in six.iteritems(FIXTURES['policies']):
instance = PolicyAPI(**fixture)
Policy.add_or_update(PolicyAPI.to_model(instance))
def test_get_by_ref(self):
policy_db = Policy.get_by_ref('wolfpack.action-1.concurrency')
self.assertIsNotNone(policy_db)
self.assertEqual(policy_db.pack, 'wolfpack')
self.assertEqual(policy_db.name, 'action-1.concurrency')
policy_type_db = PolicyType.get_by_ref(policy_db.policy_type)
self.assertIsNotNone(policy_type_db)
self.assertEqual(policy_type_db.resource_type, 'action')
self.assertEqual(policy_type_db.name, 'concurrency')
def test_get_driver(self):
policy_db = Policy.get_by_ref('wolfpack.action-1.concurrency')
policy = get_driver(policy_db.ref, policy_db.policy_type, **policy_db.parameters)
self.assertIsInstance(policy, ResourcePolicyApplicator)
self.assertEqual(policy._policy_ref, policy_db.ref)
self.assertEqual(policy._policy_type, policy_db.policy_type)
self.assertTrue(hasattr(policy, 'threshold'))
self.assertEqual(policy.threshold, 3)
class PolicyBootstrapTest(DbTestCase):
def test_register_policy_types(self):
self.assertEqual(register_policy_types(st2tests), 2)
type1 = PolicyType.get_by_ref('action.concurrency')
self.assertEqual(type1.name, 'concurrency')
self.assertEqual(type1.resource_type, 'action')
type2 = PolicyType.get_by_ref('action.mock_policy_error')
self.assertEqual(type2.name, 'mock_policy_error')
self.assertEqual(type2.resource_type, 'action')
def test_register_policies(self):
pack_dir = os.path.join(fixturesloader.get_fixtures_base_path(), 'dummy_pack_1')
self.assertEqual(register_policies(pack_dir=pack_dir), 2)
p1 = Policy.get_by_ref('dummy_pack_1.test_policy_1')
self.assertEqual(p1.name, 'test_policy_1')
self.assertEqual(p1.pack, 'dummy_pack_1')
self.assertEqual(p1.resource_ref, 'dummy_pack_1.local')
self.assertEqual(p1.policy_type, 'action.concurrency')
p2 = Policy.get_by_ref('dummy_pack_1.test_policy_2')
self.assertEqual(p2.name, 'test_policy_2')
self.assertEqual(p2.pack, 'dummy_pack_1')
self.assertEqual(p2.resource_ref, 'dummy_pack_1.local')
self.assertEqual(p2.policy_type, 'action.mock_policy_error')
self.assertEqual(p2.resource_ref, 'dummy_pack_1.local')<|fim▁end|> | from st2common.persistence.action import Action |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use anyhow::*;
use diesel::r2d2::{self, ConnectionManager, PooledConnection};
use diesel::sqlite::SqliteConnection;
use diesel::RunQueryDsl;
use std::path::Path;
mod schema;
pub use self::schema::*;
#[allow(dead_code)]
const DB_MIGRATIONS_PATH: &str = "migrations";
embed_migrations!("migrations");
#[derive(Clone)]
pub struct DB {
pool: r2d2::Pool<ConnectionManager<SqliteConnection>>,
}
#[derive(Debug)]
struct ConnectionCustomizer {}
impl diesel::r2d2::CustomizeConnection<SqliteConnection, diesel::r2d2::Error>
for ConnectionCustomizer
{
fn on_acquire(&self, connection: &mut SqliteConnection) -> Result<(), diesel::r2d2::Error> {
let query = diesel::sql_query(
r#"<|fim▁hole|> PRAGMA synchronous = NORMAL;
PRAGMA foreign_keys = ON;
"#,
);
query
.execute(connection)
.map_err(diesel::r2d2::Error::QueryError)?;
Ok(())
}
}
impl DB {
pub fn new(path: &Path) -> Result<DB> {
std::fs::create_dir_all(&path.parent().unwrap())?;
let manager = ConnectionManager::<SqliteConnection>::new(path.to_string_lossy());
let pool = diesel::r2d2::Pool::builder()
.connection_customizer(Box::new(ConnectionCustomizer {}))
.build(manager)?;
let db = DB { pool };
db.migrate_up()?;
Ok(db)
}
pub fn connect(&self) -> Result<PooledConnection<ConnectionManager<SqliteConnection>>> {
self.pool.get().map_err(Error::new)
}
#[allow(dead_code)]
fn migrate_down(&self) -> Result<()> {
let connection = self.connect().unwrap();
loop {
match diesel_migrations::revert_latest_migration_in_directory(
&connection,
Path::new(DB_MIGRATIONS_PATH),
) {
Ok(_) => (),
Err(diesel_migrations::RunMigrationsError::MigrationError(
diesel_migrations::MigrationError::NoMigrationRun,
)) => break,
Err(e) => bail!(e),
}
}
Ok(())
}
fn migrate_up(&self) -> Result<()> {
let connection = self.connect().unwrap();
embedded_migrations::run(&connection)?;
Ok(())
}
}
#[test]
fn run_migrations() {
use crate::test::*;
use crate::test_name;
let output_dir = prepare_test_directory(test_name!());
let db_path = output_dir.join("db.sqlite");
let db = DB::new(&db_path).unwrap();
db.migrate_down().unwrap();
db.migrate_up().unwrap();
}<|fim▁end|> | PRAGMA busy_timeout = 60000;
PRAGMA journal_mode = WAL; |
<|file_name|>test_recordElement.py<|end_file_name|><|fim▁begin|>from unittest import TestCase
from plivo import plivoxml
from tests import PlivoXmlTestCase
class RecordElementTest(TestCase, PlivoXmlTestCase):
def test_set_methods(self):
expected_response = '<Response><Record action="https://foo.example.com" callbackMethod="GET" ' \
'callbackUrl="https://foo.example.com" fileFormat="wav" finishOnKey="#" ' \
'maxLength="10" method="GET" playBeep="false" recordSession="false" ' \
'redirect="false" startOnDialAnswer="false" timeout="100" transcriptionMethod="GET" ' \
'transcriptionType="hybrid" transcriptionUrl="https://foo.example.com"/>' \
'</Response>'
action = 'https://foo.example.com'
method = 'GET'
fileFormat = 'wav'
redirect = False
timeout = 100
maxLength = 10
recordSession = False
startOnDialAnswer = False
playBeep = False
finishOnKey = '#'
transcriptionType = 'hybrid'
transcriptionUrl = 'https://foo.example.com'
transcriptionMethod = 'GET'
callbackUrl = 'https://foo.example.com'
callbackMethod = 'GET'
element = plivoxml.ResponseElement()
response = element.add(
plivoxml.RecordElement().set_action(action).set_method(method)
.set_file_format(fileFormat).set_redirect(redirect).set_timeout(
timeout).set_max_length(maxLength).set_play_beep(playBeep)
.set_finish_on_key(finishOnKey).set_record_session(recordSession).
set_start_on_dial_answer(startOnDialAnswer).set_transcription_type(<|fim▁hole|> callbackUrl).set_callback_method(callbackMethod)).to_string(False)
self.assertXmlEqual(response, expected_response)<|fim▁end|> | transcriptionType).set_transcription_url(transcriptionUrl)
.set_transcription_method(transcriptionMethod).set_callback_url( |
<|file_name|>lt.js<|end_file_name|><|fim▁begin|>'use strict';
var convert = require('./convert'),
func = convert('lt', require('../lt'));
func.placeholder = require('./placeholder');
module.exports = func;<|fim▁hole|><|fim▁end|> | //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIi4uLy4uLy4uLy4uL2NsaWVudC9saWIvbG9kYXNoL2ZwL2x0LmpzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiI7O0FBQUEsSUFBSSxVQUFVLFFBQVEsV0FBUixDQUFWO0lBQ0EsT0FBTyxRQUFRLElBQVIsRUFBYyxRQUFRLE9BQVIsQ0FBZCxDQUFQOztBQUVKLEtBQUssV0FBTCxHQUFtQixRQUFRLGVBQVIsQ0FBbkI7QUFDQSxPQUFPLE9BQVAsR0FBaUIsSUFBakIiLCJmaWxlIjoibHQuanMiLCJzb3VyY2VzQ29udGVudCI6WyJ2YXIgY29udmVydCA9IHJlcXVpcmUoJy4vY29udmVydCcpLFxuICAgIGZ1bmMgPSBjb252ZXJ0KCdsdCcsIHJlcXVpcmUoJy4uL2x0JykpO1xuXG5mdW5jLnBsYWNlaG9sZGVyID0gcmVxdWlyZSgnLi9wbGFjZWhvbGRlcicpO1xubW9kdWxlLmV4cG9ydHMgPSBmdW5jO1xuIl19 |
<|file_name|>cxx.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright 2020 Xavier Claessens
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, see <http://www.gnu.org/licenses/>.
*/
#include <glib.h>
typedef struct
{
int dummy;
} MyObject;
static void
test_typeof (void)
{
#if __cplusplus >= 201103L
// Test that with C++11 we don't get those kind of errors:
// error: invalid conversion from ‘gpointer’ {aka ‘void*’} to ‘MyObject*’ [-fpermissive]
MyObject *obj = g_rc_box_new0 (MyObject);
MyObject *obj2 = g_rc_box_acquire (obj);
g_assert_true (obj2 == obj);
MyObject *obj3 = g_atomic_pointer_get (&obj2);
g_assert_true (obj3 == obj);
MyObject *obj4 = nullptr;
g_atomic_pointer_set (&obj4, obj3);
g_assert_true (obj4 == obj);
MyObject *obj5 = nullptr;
g_atomic_pointer_compare_and_exchange (&obj5, nullptr, obj4);
g_assert_true (obj5 == obj);
MyObject *obj6 = g_steal_pointer (&obj5);
g_assert_true (obj6 == obj);
g_clear_pointer (&obj6, g_rc_box_release);
g_rc_box_release (obj);
#else
g_test_skip ("This test requires C++11 compiler");
#endif
}
<|fim▁hole|>
g_test_add_func ("/C++/typeof", test_typeof);
return g_test_run ();
}<|fim▁end|> | int
main (int argc, char *argv[])
{
g_test_init (&argc, &argv, NULL); |
<|file_name|>card.py<|end_file_name|><|fim▁begin|># img
# trigger = attributes[12]
# http://ws-tcg.com/en/cardlist
# edit
import os
import requests
import sqlite3
def get_card(browser):
attributes = browser.find_elements_by_xpath('//table[@class="status"]/tbody/tr/td')
image = attributes[0].find_element_by_xpath('./img').get_attribute('src')
if attributes[1].find_element_by_xpath('./span[@class="kana"]').text:
card_name = attributes[1].find_element_by_xpath('./span[@class="kana"]').text
else:
card_name = None
card_no = attributes[2].text if attributes[2].text else None
rarity = attributes[3].text if attributes[3].text else None<|fim▁hole|> side = "Weiß"
elif attributes[5].find_element_by_xpath('./img').get_attribute("src") == "http://ws-tcg.com/en/cardlist/partimages/s.gif":
side = "Schwarz"
else:
side = None
card_type = attributes[6].text if attributes[6].text else None
if attributes[7].find_element_by_xpath('./img').get_attribute("src") == "http://ws-tcg.com/en/cardlist/partimages/yellow.gif":
color = "Yellow"
elif attributes[7].find_element_by_xpath('./img').get_attribute("src") == "http://ws-tcg.com/en/cardlist/partimages/green.gif":
color = "Green"
elif attributes[7].find_element_by_xpath('./img').get_attribute("src") == "http://ws-tcg.com/en/cardlist/partimages/red.gif":
color = "Red"
elif attributes[7].find_element_by_xpath('./img').get_attribute("src") == "http://ws-tcg.com/en/cardlist/partimages/blue.gif":
color = "Blue"
else:
color = None
level = attributes[8].text if attributes[8].text else None
cost = attributes[9].text if attributes[9].text else None
power = attributes[10].text if attributes[10].text else None
soul = len(attributes[11].find_elements_by_xpath('./img[contains(@src, "http://ws-tcg.com/en/cardlist/partimages/soul.gif")]'))
special_attribute = attributes[13].text if attributes[13].text else None
text = attributes[14].text if attributes[14].text else None
flavor_text = attributes[15].text if attributes[15].text else None
if not os.path.exists("images"):
os.makedirs("images")
if not os.path.exists("images/" + card_no.split("/")[0]):
os.makedirs("images/" + card_no.split("/")[0])
r = requests.get(image, stream=True)
if r.status_code == 200:
with open("images/" + card_no + ".jpg", 'wb') as f:
for chunk in r:
f.write(chunk)
card = (card_name, card_no, rarity, expansion, side, card_type, color, level, cost, power, soul,
special_attribute, text, flavor_text)
connection = sqlite3.connect('cards.sqlite3')
cursor = connection.cursor()
cursor.execute('INSERT INTO cards (name, no, rarity, expansion, side, type, color, level, cost, power, soul,'
'special_attribute, text, flavor_text) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,?, ?)', card)
connection.commit()
connection.close()<|fim▁end|> | expansion = attributes[4].text if attributes[4].text else None
if attributes[5].find_element_by_xpath('./img').get_attribute("src") == "http://ws-tcg.com/en/cardlist/partimages/w.gif": |
<|file_name|>Fl_Fill_Dial.H<|end_file_name|><|fim▁begin|>//
// "$Id: Fl_Fill_Dial.H 9637 2012-07-24 04:37:22Z matt $"
<|fim▁hole|>//
// Filled dial header file for the Fast Light Tool Kit (FLTK).
//
// Copyright 1998-2010 by Bill Spitzak and others.
//
// This library is free software. Distribution and use rights are outlined in
// the file "COPYING" which should have been included with this file. If this
// file is missing or damaged, see the license at:
//
// http://www.fltk.org/COPYING.php
//
// Please report all bugs and problems on the following page:
//
// http://www.fltk.org/str.php
//
/* \file
Fl_Fill_Dial widget . */
#ifndef Fl_Fill_Dial_H
#define Fl_Fill_Dial_H
#include "Fl_Dial.H"
/** Draws a dial with a filled arc */
class FL_EXPORT Fl_Fill_Dial : public Fl_Dial
{
public:
/** Creates a filled dial, also setting its type to FL_FILL_DIAL. */
Fl_Fill_Dial(int X,int Y,int W,int H, const char *L) : Fl_Dial(X,Y,W,H,L)
{
type(FL_FILL_DIAL);
}
};
#endif
//
// End of "$Id: Fl_Fill_Dial.H 9637 2012-07-24 04:37:22Z matt $".
//<|fim▁end|> | |
<|file_name|>generate_data_driven_dependency_metadata_test.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*
* @generated SignedSource<<4a921be6b2efc18fbd6ef5e4b6623e04>>
*/
mod generate_data_driven_dependency_metadata;
use generate_data_driven_dependency_metadata::transform_fixture;
use fixture_tests::test_fixture;
#[test]
fn match_on_child_of_plural() {
let input = include_str!("generate_data_driven_dependency_metadata/fixtures/match-on-child-of-plural.graphql");
let expected = include_str!("generate_data_driven_dependency_metadata/fixtures/match-on-child-of-plural.expected");
test_fixture(transform_fixture, "match-on-child-of-plural.graphql", "generate_data_driven_dependency_metadata/fixtures/match-on-child-of-plural.expected", input, expected);
}
#[test]
fn match_with_extra_args() {
let input = include_str!("generate_data_driven_dependency_metadata/fixtures/match-with-extra-args.graphql");
let expected = include_str!("generate_data_driven_dependency_metadata/fixtures/match-with-extra-args.expected");
test_fixture(transform_fixture, "match-with-extra-args.graphql", "generate_data_driven_dependency_metadata/fixtures/match-with-extra-args.expected", input, expected);
}
#[test]
fn module_without_match() {
let input = include_str!("generate_data_driven_dependency_metadata/fixtures/module-without-match.graphql");
let expected = include_str!("generate_data_driven_dependency_metadata/fixtures/module-without-match.expected");<|fim▁hole|>#[test]
fn relay_match_on_interface() {
let input = include_str!("generate_data_driven_dependency_metadata/fixtures/relay-match-on-interface.graphql");
let expected = include_str!("generate_data_driven_dependency_metadata/fixtures/relay-match-on-interface.expected");
test_fixture(transform_fixture, "relay-match-on-interface.graphql", "generate_data_driven_dependency_metadata/fixtures/relay-match-on-interface.expected", input, expected);
}
#[test]
fn relay_match_on_union() {
let input = include_str!("generate_data_driven_dependency_metadata/fixtures/relay-match-on-union.graphql");
let expected = include_str!("generate_data_driven_dependency_metadata/fixtures/relay-match-on-union.expected");
test_fixture(transform_fixture, "relay-match-on-union.graphql", "generate_data_driven_dependency_metadata/fixtures/relay-match-on-union.expected", input, expected);
}
#[test]
fn relay_match_on_union_plural() {
let input = include_str!("generate_data_driven_dependency_metadata/fixtures/relay-match-on-union-plural.graphql");
let expected = include_str!("generate_data_driven_dependency_metadata/fixtures/relay-match-on-union-plural.expected");
test_fixture(transform_fixture, "relay-match-on-union-plural.graphql", "generate_data_driven_dependency_metadata/fixtures/relay-match-on-union-plural.expected", input, expected);
}<|fim▁end|> | test_fixture(transform_fixture, "module-without-match.graphql", "generate_data_driven_dependency_metadata/fixtures/module-without-match.expected", input, expected);
}
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>"""Module with Caffe models."""
from django.db import models
from employees.models import Employee
class Caffe(models.Model):
"""Stores one cafe."""
name = models.CharField(max_length=100, unique=True)
city = models.CharField(max_length=100)
street = models.CharField(max_length=100)
# CharField for extra characters like '-'
postal_code = models.CharField(max_length=20)
# CharFields in case house numbers like '1A'
building_number = models.CharField(max_length=10)
house_number = models.CharField(max_length=10, blank=True)
created_on = models.TimeField(auto_now_add=True)
creator = models.ForeignKey(Employee,<|fim▁hole|>
def __str__(self):
return '{}, {}'.format(self.name, self. city)<|fim▁end|> | related_name='my_caffe',
default=None,
blank=False,
null=True) |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use minigrep::Config;
use std::{env, process};
fn main() {
let args: Vec<String> = env::args().collect();
let config = Config::new(&args).unwrap_or_else(|err| {
eprintln!("problem parsing arguments: {}", err);
process::exit(1);
});
<|fim▁hole|> eprintln!("application error: {}", err);
process::exit(1)
}
}<|fim▁end|> | if let Err(err) = minigrep::run(config) { |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# PROJETO LAVAGEM A SECO
#
# MAIN
#
# Felipe Bandeira da Silva
# 26 jul 15
#
import logging
import tornado.escape
import tornado.ioloop
import tornado.web
import tornado.options
import tornado.websocket
import tornado.httpserver
import os.path
from tornado.concurrent import Future
from tornado import gen
from tornado.options import define, options, parse_command_line
import socket
import fcntl
import struct
import random
define("port", default=8888, help="run on the given port", type=int)
define("debug", default=False, help="run in debug mode")
import multiprocessing
import controle
import time
import os
import signal
import subprocess
import sys
from platform import uname
#NAVEGADOR = 'epiphany'
NAVEGADOR = 'midori -e Fullscreen -a'
# A pagina HTML contém informações interessantes e que devem ser
# apresentadas ao usuário. Quanto menor o tempo maior o processamento
# por parte do cliente ou dependendo do caso pelo servidor.
TEMPO_MS_ATUALIZACAO_HTML = 500
# Via websocket é possível mais um cliente conectado e todos devem
# receber as mensagens do servidor, bem como enviar.
# clientes do websocket
clients = []
# tarefa para atualizacao do pagina html
queue_joyx = multiprocessing.Queue()
queue_joyy = multiprocessing.Queue()
queue_joyz = multiprocessing.Queue()
# anemometro
queue_velocidade = multiprocessing.Queue()
queue_direcao = multiprocessing.Queue()
queue_distancia = multiprocessing.Queue()
# usado para o controle da página pelo joystick
queue_joy_botoes = multiprocessing.Queue()
#class NavegadorWEB(multiprocessing.Process):
# def __init__(self):
# multiprocessing.Process.__init__(self)
#
# self.navegador = subprocess.Popen(['epiphany-browser 192.168.42.1:8888'], stdout=subprocess.PIPE, \
# shell=True, preexec_fn=os.setsid)
#
# def run(self):
# while True:
# time.sleep(0.01)
def inicia_navegador():
navegador = subprocess.Popen([NAVEGADOR+' 192.168.42.1:8888'], \
stdout=subprocess.PIPE, \
shell=True, preexec_fn=os.setsid)
def fecha_navegador():
processos = subprocess.Popen(['pgrep', NAVEGADOR], stdout=subprocess.PIPE)
print 'PID dos processos', processos.stdout
for pid in processos.stdout:
os.kill(int(pid), signal.SIGTERM)
try:
time.sleep(3)
os.kill(int(pid), 0)
print u'erro: o processo %d ainda existe' % pid
except OSError as ex:
continue
def get_ip_address():
# Informa o endereço IP da primeira conexão funcionando
# visto em:
# http://code.activestate.com/recipes/439094-get-the-ip-address-associated-with-a-network-inter/
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
ifname = 'eth0'
return socket.inet_ntoa(fcntl.ioctl( \
s.fileno(), \
0x8915, # SIOCGIFADDR \
struct.pack('256s', ifname[:15]) \
)[20:24])
except:
try:
ifname = 'wlan0'
return socket.inet_ntoa(fcntl.ioctl( \
s.fileno(), \
0x8915, # SIOCGIFADDR \
struct.pack('256s', ifname[:15]) \
)[20:24])
except:
return "127.0.0.1"
def get_ip_address_interface(ifname):
# Informa o endereço de IP de uma rede <ifname>
# visto em:
# http://code.activestate.com/recipes/439094-get-the-ip-address-associated-with-a-network-inter/
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
return socket.inet_ntoa(fcntl.ioctl( \
s.fileno(), \
0x8915, # SIOCGIFADDR \
struct.pack('256s', ifname[:15]) \
)[20:24])
except:
return "0.0.0.0"
class MainHandler(tornado.web.RequestHandler):
# Atende ao GET e POST do cliente
def get(self):
# é possível via argumento renderizar a página html com
# informações interessantes, os comentários devem ter o mesmo
# nome da variável da página
self.render("index.html", title="LAVAGEM A SECO", \
ip_host=get_ip_address()+":"+str(options.port), \
msg_status="LIGADO")
class WebSocketHandler(tornado.websocket.WebSocketHandler):
# Todo cliente se encarrega de conectar-se ao servidor websocket.
# Quando existe uma nova conexão é salvo qual cliente foi.
def open(self):
print 'tornado: websocket: aviso: nova conexão de um cliente'
clients.append(self)
self.write_message("connected")
# Quando um cliente envia uma mensagem, esta é a função responsável
# por ler e aqui deve ficar a chamada dos get das filas(queue)
def on_message(self, message):
print 'tornado: websocket: aviso: nova mensagem: %s' % message
q = self.application.settings.get('queue')
q.put(message)<|fim▁hole|> # Para evitar envios de informações a clientes que não existem mais
# é necessário retirá-los da lista
def on_close(self):
print 'tornado: websocket: aviso: conexão finalizada/perdida'
clients.remove(self)
fecha_navegador()
inicia_navegador()
def envia_cmd_websocket(cmd, arg):
# Facilita o trabalho repetitivo de envia mensagem para todo os clientes
# Envia um comando e seu argumento para todos os clientes
for c in clients:
c.write_message(cmd+";"+arg)
def tarefa_atualizacao_html():
# Esta função tem uma chamada periódica, responsável por atualizar os
# elementos atualizáveis na página html
envia_cmd_websocket("lan", get_ip_address())
envia_cmd_websocket("random", str(random.randint(0,1000)))
# para envia algo é necessário que fila tenha algo
if not queue_joyx.empty():
resultado = queue_joyx.get()
envia_cmd_websocket("joyx", str(resultado)[:6])
if not queue_joyy.empty():
resultado = queue_joyy.get()
envia_cmd_websocket("joyy", str(resultado)[:6])
if not queue_joyz.empty():
resultado = queue_joyz.get()
envia_cmd_websocket("joyz", str(resultado)[:6])
if not queue_joy_botoes.empty():
resultado = queue_joy_botoes.get()
envia_cmd_websocket("b", str(resultado))
if not queue_velocidade.empty():
resultado = queue_velocidade.get()
envia_cmd_websocket("v", str(resultado))
if not queue_direcao.empty():
resultado = queue_direcao.get()
envia_cmd_websocket("d", str(resultado))
if not queue_distancia.empty():
resultado = queue_distancia.get()
envia_cmd_websocket("x", str(resultado)[:6])
def main():
print u"Iniciando o servidor Tornado"
fecha_navegador()
tarefa_controle = multiprocessing.Queue()
# esse loop ler os dados do joystick e envia para o lavos
# sem ele, nenhuma resposta do Joystick é atendida.
controle_loop = controle.ControleLavagem(tarefa_controle, \
queue_joyx, \
queue_joyy, \
queue_joyz, \
queue_joy_botoes, \
queue_velocidade, \
queue_direcao, \
queue_distancia)
controle_loop.daemon = True
controle_loop.start()
# espera um pouco para que a tarefa esteja realmente pronta
# sincronismo é mais interessante?
time.sleep(1)
tarefa_controle.put("Testando Tarefa :)")
parse_command_line()
app = tornado.web.Application(
[
(r"/", MainHandler),
(r"/ws", WebSocketHandler)
],
cookie_secret="__TODO:_GENERATE_YOUR_OWN_RANDOM_VALUE_HERE__",
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path=os.path.join(os.path.dirname(__file__), "static"),
xsrf_cookies=True,
debug=options.debug,
autoreload=True,
queue=tarefa_controle,
)
# porta que o servidor irá usar
app.listen(options.port)
# carrega o servidor mas não inicia
main_loop = tornado.ioloop.IOLoop.instance()
# Aqui será a principal tarefa do lavagem, leitura e acionamento
tarefa_atualizacao_html_loop = tornado.ioloop.PeriodicCallback(tarefa_atualizacao_html,\
TEMPO_MS_ATUALIZACAO_HTML, \
io_loop = main_loop)
print u"aviso: tornado: start"
tarefa_atualizacao_html_loop.start()
inicia_navegador()
# o loop do servidor deve ser o último, já que não um daemon
main_loop.start()
if __name__ == "__main__":
main()<|fim▁end|> | |
<|file_name|>test_update_coordinator.py<|end_file_name|><|fim▁begin|>"""Tests for the update coordinator."""
import asyncio
from datetime import timedelta
import logging
import urllib.error
import aiohttp
import pytest
import requests
from homeassistant.helpers import update_coordinator
from homeassistant.util.dt import utcnow
from tests.async_mock import AsyncMock, Mock, patch
from tests.common import async_fire_time_changed
_LOGGER = logging.getLogger(__name__)
def get_crd(hass, update_interval):
"""Make coordinator mocks."""
calls = 0
async def refresh() -> int:
nonlocal calls
calls += 1
return calls
crd = update_coordinator.DataUpdateCoordinator[int](
hass,
_LOGGER,
name="test",
update_method=refresh,
update_interval=update_interval,
)
return crd
DEFAULT_UPDATE_INTERVAL = timedelta(seconds=10)
@pytest.fixture
def crd(hass):
"""Coordinator mock with default update interval."""
return get_crd(hass, DEFAULT_UPDATE_INTERVAL)
@pytest.fixture
def crd_without_update_interval(hass):
"""Coordinator mock that never automatically updates."""
return get_crd(hass, None)
async def test_async_refresh(crd):
"""Test async_refresh for update coordinator."""
assert crd.data is None
await crd.async_refresh()
assert crd.data == 1
assert crd.last_update_success is True
# Make sure we didn't schedule a refresh because we have 0 listeners
assert crd._unsub_refresh is None
updates = []
def update_callback():
updates.append(crd.data)
unsub = crd.async_add_listener(update_callback)
await crd.async_refresh()
assert updates == [2]
assert crd._unsub_refresh is not None
# Test unsubscribing through function
unsub()
await crd.async_refresh()
assert updates == [2]
# Test unsubscribing through method
crd.async_add_listener(update_callback)
crd.async_remove_listener(update_callback)
await crd.async_refresh()
assert updates == [2]
async def test_request_refresh(crd):
"""Test request refresh for update coordinator."""
assert crd.data is None
await crd.async_request_refresh()
assert crd.data == 1
assert crd.last_update_success is True
# Second time we hit the debonuce
await crd.async_request_refresh()
assert crd.data == 1
assert crd.last_update_success is True
async def test_request_refresh_no_auto_update(crd_without_update_interval):
"""Test request refresh for update coordinator without automatic update."""
crd = crd_without_update_interval
assert crd.data is None
await crd.async_request_refresh()
assert crd.data == 1
assert crd.last_update_success is True
# Second time we hit the debonuce
await crd.async_request_refresh()
assert crd.data == 1
assert crd.last_update_success is True
@pytest.mark.parametrize(
"err_msg",
[
(asyncio.TimeoutError, "Timeout fetching test data"),
(requests.exceptions.Timeout, "Timeout fetching test data"),
(urllib.error.URLError("timed out"), "Timeout fetching test data"),
(aiohttp.ClientError, "Error requesting test data"),
(requests.exceptions.RequestException, "Error requesting test data"),
(urllib.error.URLError("something"), "Error requesting test data"),
(update_coordinator.UpdateFailed, "Error fetching test data"),
],
)
async def test_refresh_known_errors(err_msg, crd, caplog):
"""Test raising known errors."""
crd.update_method = AsyncMock(side_effect=err_msg[0])
await crd.async_refresh()
assert crd.data is None
assert crd.last_update_success is False
assert err_msg[1] in caplog.text
async def test_refresh_fail_unknown(crd, caplog):
"""Test raising unknown error."""
await crd.async_refresh()
crd.update_method = AsyncMock(side_effect=ValueError)
await crd.async_refresh()
assert crd.data == 1 # value from previous fetch
assert crd.last_update_success is False
assert "Unexpected error fetching test data" in caplog.text
async def test_refresh_no_update_method(crd):
"""Test raising error is no update method is provided."""
await crd.async_refresh()
crd.update_method = None
with pytest.raises(NotImplementedError):
await crd.async_refresh()
async def test_update_interval(hass, crd):
"""Test update interval works."""
# Test we don't update without subscriber
async_fire_time_changed(hass, utcnow() + crd.update_interval)
await hass.async_block_till_done()
assert crd.data is None
# Add subscriber
update_callback = Mock()
crd.async_add_listener(update_callback)
# Test twice we update with subscriber
async_fire_time_changed(hass, utcnow() + crd.update_interval)
await hass.async_block_till_done()
assert crd.data == 1
async_fire_time_changed(hass, utcnow() + crd.update_interval)
await hass.async_block_till_done()
assert crd.data == 2
# Test removing listener
crd.async_remove_listener(update_callback)
async_fire_time_changed(hass, utcnow() + crd.update_interval)
await hass.async_block_till_done()
# Test we stop updating after we lose last subscriber
assert crd.data == 2
async def test_update_interval_not_present(hass, crd_without_update_interval):
"""Test update never happens with no update interval."""
crd = crd_without_update_interval
# Test we don't update without subscriber with no update interval
async_fire_time_changed(hass, utcnow() + DEFAULT_UPDATE_INTERVAL)
await hass.async_block_till_done()
assert crd.data is None
# Add subscriber
update_callback = Mock()
crd.async_add_listener(update_callback)
# Test twice we don't update with subscriber with no update interval
async_fire_time_changed(hass, utcnow() + DEFAULT_UPDATE_INTERVAL)
await hass.async_block_till_done()
assert crd.data is None
async_fire_time_changed(hass, utcnow() + DEFAULT_UPDATE_INTERVAL)
await hass.async_block_till_done()
assert crd.data is None
# Test removing listener
crd.async_remove_listener(update_callback)
async_fire_time_changed(hass, utcnow() + DEFAULT_UPDATE_INTERVAL)
await hass.async_block_till_done()
# Test we stop don't update after we lose last subscriber
assert crd.data is None
async def test_refresh_recover(crd, caplog):
"""Test recovery of freshing data."""
crd.last_update_success = False<|fim▁hole|> assert "Fetching test data recovered" in caplog.text
async def test_coordinator_entity(crd):
"""Test the CoordinatorEntity class."""
entity = update_coordinator.CoordinatorEntity(crd)
assert entity.should_poll is False
crd.last_update_success = False
assert entity.available is False
await entity.async_update()
assert entity.available is True
with patch(
"homeassistant.helpers.entity.Entity.async_on_remove"
) as mock_async_on_remove:
await entity.async_added_to_hass()
assert mock_async_on_remove.called
# Verify we do not update if the entity is disabled
crd.last_update_success = False
with patch("homeassistant.helpers.entity.Entity.enabled", False):
await entity.async_update()
assert entity.available is False<|fim▁end|> |
await crd.async_refresh()
assert crd.last_update_success is True |
<|file_name|>GoogleJsonResponseExceptionFactoryTesting.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.api.client.googleapis.testing.json;
import com.google.api.client.googleapis.json.GoogleJsonResponseException;
import com.google.api.client.http.HttpRequest;
import com.google.api.client.http.HttpResponse;
import com.google.api.client.json.Json;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.testing.http.HttpTesting;
import com.google.api.client.testing.http.MockHttpTransport;
import com.google.api.client.testing.http.MockLowLevelHttpResponse;
import com.google.api.client.util.Beta;
import java.io.IOException;
/**
* {@link Beta} <br>
* Factory class that builds {@link GoogleJsonResponseException} instances for testing.
*
* @since 1.18
*/
@Beta
public final class GoogleJsonResponseExceptionFactoryTesting {
/**
* Convenience factory method that builds a {@link GoogleJsonResponseException} from its
* arguments. The method builds a dummy {@link HttpRequest} and {@link HttpResponse}, sets the
* response's status to a user-specified HTTP error code, suppresses exceptions, and executes the
* request. This forces the underlying framework to create, but not throw, a {@link
* GoogleJsonResponseException}, which the method retrieves and returns to the invoker.<|fim▁hole|> *
* @param jsonFactory the JSON factory that will create all JSON required by the underlying
* framework
* @param httpCode the desired HTTP error code. Note: do nut specify any codes that indicate
* successful completion, e.g. 2XX.
* @param reasonPhrase the HTTP reason code that explains the error. For example, if {@code
* httpCode} is {@code 404}, the reason phrase should be {@code NOT FOUND}.
* @return the generated {@link GoogleJsonResponseException}, as specified.
* @throws IOException if request transport fails.
*/
public static GoogleJsonResponseException newMock(
JsonFactory jsonFactory, int httpCode, String reasonPhrase) throws IOException {
MockLowLevelHttpResponse otherServiceUnavaiableLowLevelResponse =
new MockLowLevelHttpResponse()
.setStatusCode(httpCode)
.setReasonPhrase(reasonPhrase)
.setContentType(Json.MEDIA_TYPE)
.setContent(
"{ \"error\": { \"errors\": [ { \"reason\": \""
+ reasonPhrase
+ "\" } ], "
+ "\"code\": "
+ httpCode
+ " } }");
MockHttpTransport otherTransport =
new MockHttpTransport.Builder()
.setLowLevelHttpResponse(otherServiceUnavaiableLowLevelResponse)
.build();
HttpRequest otherRequest =
otherTransport.createRequestFactory().buildGetRequest(HttpTesting.SIMPLE_GENERIC_URL);
otherRequest.setThrowExceptionOnExecuteError(false);
HttpResponse otherServiceUnavailableResponse = otherRequest.execute();
return GoogleJsonResponseException.from(jsonFactory, otherServiceUnavailableResponse);
}
}<|fim▁end|> | |
<|file_name|>buddhist.js<|end_file_name|><|fim▁begin|>define(
//begin v1.x content
{
"dateFormatItem-yyyyMMMEd": "E, d MMM y G",
"dateFormatItem-MMMEd": "E, d MMM",
"dateFormatItem-hms": "hh:mm:ss a",
"days-standAlone-wide": [
"niedziela",
"poniedziałek",
"wtorek",
"środa",
"czwartek",
"piątek",
"sobota"
],
"months-standAlone-narrow": [
"s",
"l",
"m",
"k",
"m",
"c",
"l",
"s",
"w",
"p",
"l",
"g"
],
"dateTimeFormat-short": "{1}, {0}",
"dateFormatItem-Gy": "y G",
"dateTimeFormat-medium": "{1}, {0}",
"quarters-standAlone-abbr": [
"1 kw.",
"2 kw.",
"3 kw.",
"4 kw."
],
"dateFormatItem-y": "y G",
"dateFormatItem-yyyy": "y G",
"months-standAlone-abbr": [
"sty",
"lut",
"mar",
"kwi",
"maj",
"cze",
"lip",
"sie",
"wrz",
"paź",
"lis",
"gru"
],
"dateFormatItem-Ed": "E, d",
"days-standAlone-narrow": [
"N",
"P",
"W",<|fim▁hole|> "S"
],
"eraAbbr": [
"BE"
],
"dateFormatItem-GyMMMd": "d MMM y G",
"dateFormat-long": "d MMMM y G",
"dateFormat-medium": "d MMM y G",
"quarters-standAlone-wide": [
"I kwartał",
"II kwartał",
"III kwartał",
"IV kwartał"
],
"dateFormatItem-yyyyQQQQ": "QQQQ y G",
"quarters-standAlone-narrow": [
"K1",
"K2",
"K3",
"K4"
],
"months-standAlone-wide": [
"styczeń",
"luty",
"marzec",
"kwiecień",
"maj",
"czerwiec",
"lipiec",
"sierpień",
"wrzesień",
"październik",
"listopad",
"grudzień"
],
"dateFormatItem-yyyyMd": "d.MM.y G",
"dateFormatItem-yyyyMMMd": "d MMM y G",
"dateFormatItem-yyyyMEd": "E, d.MM.y G",
"dateFormatItem-MMMd": "d MMM",
"months-format-abbr": [
"sty",
"lut",
"mar",
"kwi",
"maj",
"cze",
"lip",
"sie",
"wrz",
"paź",
"lis",
"gru"
],
"quarters-format-abbr": [
"K1",
"K2",
"K3",
"K4"
],
"days-format-abbr": [
"niedz.",
"pon.",
"wt.",
"śr.",
"czw.",
"pt.",
"sob."
],
"days-format-narrow": [
"N",
"P",
"W",
"Ś",
"C",
"P",
"S"
],
"dateFormatItem-GyMMMEd": "E, d MMM y G",
"dateFormatItem-GyMMM": "LLL y G",
"dateFormatItem-yyyyQQQ": "QQQ y G",
"dateFormatItem-MEd": "E, d.MM",
"months-format-narrow": [
"s",
"l",
"m",
"k",
"m",
"c",
"l",
"s",
"w",
"p",
"l",
"g"
],
"days-standAlone-short": [
"niedz.",
"pon.",
"wt.",
"śr.",
"czw.",
"pt.",
"sob."
],
"dateFormatItem-hm": "hh:mm a",
"days-standAlone-abbr": [
"niedz.",
"pon.",
"wt.",
"śr.",
"czw.",
"pt.",
"sob."
],
"dateFormat-short": "dd.MM.y G",
"dateFormatItem-yyyyM": "MM.y G",
"dateFormat-full": "EEEE, d MMMM y G",
"dateFormatItem-Md": "d.MM",
"months-format-wide": [
"stycznia",
"lutego",
"marca",
"kwietnia",
"maja",
"czerwca",
"lipca",
"sierpnia",
"września",
"października",
"listopada",
"grudnia"
],
"days-format-short": [
"niedz.",
"pon.",
"wt.",
"śr.",
"czw.",
"pt.",
"sob."
],
"dateFormatItem-yyyyMMM": "LLL y G",
"quarters-format-wide": [
"I kwartał",
"II kwartał",
"III kwartał",
"IV kwartał"
],
"days-format-wide": [
"niedziela",
"poniedziałek",
"wtorek",
"środa",
"czwartek",
"piątek",
"sobota"
],
"dateFormatItem-h": "hh a"
}
//end v1.x content
);<|fim▁end|> | "Ś",
"C",
"P", |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#[no_mangle]
pub extern "C" fn add_u32s(lhs: u32, rhs: u32) -> u32 {<|fim▁hole|>#[no_mangle]
pub static TEST_VALUE: u32 = 100;<|fim▁end|> | lhs + rhs
}
|
<|file_name|>desktop_macosx64.py<|end_file_name|><|fim▁begin|>import os
import sys
config = {
#########################################################################
######## MACOSX GENERIC CONFIG KEYS/VAlUES
'default_actions': [
'clobber',
'clone-tools',
'checkout-sources',
'build',
'upload-files',
'sendchange',
'check-test',
],
"buildbot_json_path": "buildprops.json",
'exes': {
'python2.7': sys.executable,
"buildbot": "/tools/buildbot/bin/buildbot",
},
'app_ini_path': '%(obj_dir)s/dist/bin/application.ini',
'enable_ccache': True,
'vcs_share_base': '/builds/hg-shared',
'objdir': 'obj-firefox',
'tooltool_script': ["/builds/tooltool.py"],
'tooltool_bootstrap': "setup.sh",
'enable_talos_sendchange': False,
'enable_unittest_sendchange': False,
#########################################################################
#########################################################################
###### 64 bit specific ######
'base_name': 'B2G_%(branch)s_macosx64_gecko',
'platform': 'macosx64_gecko',
'stage_platform': 'macosx64_gecko',
'stage_product': 'b2g',
'env': {
'MOZBUILD_STATE_PATH': os.path.join(os.getcwd(), '.mozbuild'),
'MOZ_AUTOMATION': '1',
'HG_SHARE_BASE_DIR': '/builds/hg-shared',
'MOZ_OBJDIR': 'obj-firefox',
'CHOWN_ROOT': '~/bin/chown_root',
'CHOWN_REVERT': '~/bin/chown_revert',
'TINDERBOX_OUTPUT': '1',
'TOOLTOOL_CACHE': '/builds/tooltool_cache',
'TOOLTOOL_HOME': '/builds',
'MOZ_CRASHREPORTER_NO_REPORT': '1',
'CCACHE_DIR': '/builds/ccache',
'CCACHE_COMPRESS': '1',
'CCACHE_UMASK': '002',
'LC_ALL': 'C',
## 64 bit specific
'PATH': '/tools/python/bin:/tools/buildbot/bin:/opt/local/bin:/usr/bin:'
'/bin:/usr/sbin:/sbin:/usr/local/bin:/usr/X11/bin',
##
},
'upload_env': {
# stage_server is dictated from build_pool_specifics.py
'UPLOAD_HOST': '%(stage_server)s',
'UPLOAD_USER': '%(stage_username)s',
'UPLOAD_SSH_KEY': '/Users/cltbld/.ssh/%(stage_ssh_key)s',
'UPLOAD_TO_TEMP': '1',<|fim▁hole|> 'MINIDUMP_STACKWALK': '%(abs_tools_dir)s/breakpad/osx64/minidump_stackwalk',
'MINIDUMP_SAVE_PATH': '%(base_work_dir)s/minidumps',
},
'src_mozconfig': 'b2g/config/mozconfigs/macosx64_gecko/nightly',
'tooltool_manifest_src': 'b2g/config/tooltool-manifests/macosx64/releng.manifest',
#########################################################################
}<|fim▁end|> | },
"check_test_env": { |
<|file_name|>downloadGames.js<|end_file_name|><|fim▁begin|>var fs = require('fs')
var d3 = require('d3')
var request = require('request')
var cheerio = require('cheerio')
var queue = require('queue-async')
var _ = require('underscore')
var glob = require("glob")
var games = []
glob.sync(__dirname + "/raw-series/*").forEach(scrape)
function scrape(dir, i){
var series = dir.split('/raw-series/')[1]
process.stdout.write("parsing " + i + " " + series + "\r")
var html = fs.readFileSync(dir, 'utf-8')
var $ = cheerio.load(html)
$('a').each(function(i){
var str = $(this).text()<|fim▁hole|>
games.push({series: series, boxLink: $(this).attr('href').replace('/boxscores/', '')})
})
}
fs.writeFileSync(__dirname + '/playoffGames.csv', d3.csv.format(games))
var q = queue(1)
var downloaded = glob.sync(__dirname + '/raw-box/*.html').map(d => d.split('/raw-box/')[1])
games
.map(d => d.boxLink)
.filter(d => !_.contains(downloaded, d))
.forEach(d => q.defer(downloadBox, d))
function downloadBox(d, cb){
process.stdout.write("downloading " + d + "\r");
var url = 'http://www.basketball-reference.com/boxscores/' + d
// console.log(url)
setTimeout(cb, 1000)
request(url, function(error, response, html){
var path = __dirname + '/raw-box/' + d
fs.writeFileSync(path, html)
})
}<|fim▁end|> | var href = $(this).attr('href')
if (str == 'box scores' || !~href.indexOf('/boxscores/') || i % 2) return |
<|file_name|>moc_mimepart.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************
** Meta object code from reading C++ file 'mimepart.h'
**
** Created by: The Qt Meta Object Compiler version 67 (Qt 5.7.0)
**
** WARNING! All changes made in this file will be lost!
*****************************************************************************/
#include "../../GCApp/emailapi/mimepart.h"
#include <QtCore/qbytearray.h>
#include <QtCore/qmetatype.h>
#if !defined(Q_MOC_OUTPUT_REVISION)
#error "The header file 'mimepart.h' doesn't include <QObject>."
#elif Q_MOC_OUTPUT_REVISION != 67
#error "This file was generated using the moc from 5.7.0. It"
#error "cannot be used with the include files from this version of Qt."
#error "(The moc has changed too much.)"
#endif
QT_BEGIN_MOC_NAMESPACE
struct qt_meta_stringdata_MimePart_t {
QByteArrayData data[1];
char stringdata0[9];
};
#define QT_MOC_LITERAL(idx, ofs, len) \
Q_STATIC_BYTE_ARRAY_DATA_HEADER_INITIALIZER_WITH_OFFSET(len, \
qptrdiff(offsetof(qt_meta_stringdata_MimePart_t, stringdata0) + ofs \
- idx * sizeof(QByteArrayData)) \
)<|fim▁hole|> },
"MimePart"
};
#undef QT_MOC_LITERAL
static const uint qt_meta_data_MimePart[] = {
// content:
7, // revision
0, // classname
0, 0, // classinfo
0, 0, // methods
0, 0, // properties
0, 0, // enums/sets
0, 0, // constructors
0, // flags
0, // signalCount
0 // eod
};
void MimePart::qt_static_metacall(QObject *_o, QMetaObject::Call _c, int _id, void **_a)
{
Q_UNUSED(_o);
Q_UNUSED(_id);
Q_UNUSED(_c);
Q_UNUSED(_a);
}
const QMetaObject MimePart::staticMetaObject = {
{ &QObject::staticMetaObject, qt_meta_stringdata_MimePart.data,
qt_meta_data_MimePart, qt_static_metacall, Q_NULLPTR, Q_NULLPTR}
};
const QMetaObject *MimePart::metaObject() const
{
return QObject::d_ptr->metaObject ? QObject::d_ptr->dynamicMetaObject() : &staticMetaObject;
}
void *MimePart::qt_metacast(const char *_clname)
{
if (!_clname) return Q_NULLPTR;
if (!strcmp(_clname, qt_meta_stringdata_MimePart.stringdata0))
return static_cast<void*>(const_cast< MimePart*>(this));
return QObject::qt_metacast(_clname);
}
int MimePart::qt_metacall(QMetaObject::Call _c, int _id, void **_a)
{
_id = QObject::qt_metacall(_c, _id, _a);
if (_id < 0)
return _id;
return _id;
}
QT_END_MOC_NAMESPACE<|fim▁end|> | static const qt_meta_stringdata_MimePart_t qt_meta_stringdata_MimePart = {
{
QT_MOC_LITERAL(0, 0, 8) // "MimePart"
|
<|file_name|>storage.go<|end_file_name|><|fim▁begin|>/*
Copyright 2015 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and<|fim▁hole|>
import (
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
genericapirequest "k8s.io/apiserver/pkg/request"
"k8s.io/kubernetes/pkg/apis/batch"
"k8s.io/kubernetes/pkg/genericapiserver/api/rest"
"k8s.io/kubernetes/pkg/registry/batch/job"
"k8s.io/kubernetes/pkg/registry/generic"
genericregistry "k8s.io/kubernetes/pkg/registry/generic/registry"
)
// JobStorage includes dummy storage for Job.
type JobStorage struct {
Job *REST
Status *StatusREST
}
func NewStorage(optsGetter generic.RESTOptionsGetter) JobStorage {
jobRest, jobStatusRest := NewREST(optsGetter)
return JobStorage{
Job: jobRest,
Status: jobStatusRest,
}
}
// REST implements a RESTStorage for jobs against etcd
type REST struct {
*genericregistry.Store
}
// NewREST returns a RESTStorage object that will work against Jobs.
func NewREST(optsGetter generic.RESTOptionsGetter) (*REST, *StatusREST) {
store := &genericregistry.Store{
NewFunc: func() runtime.Object { return &batch.Job{} },
NewListFunc: func() runtime.Object { return &batch.JobList{} },
ObjectNameFunc: func(obj runtime.Object) (string, error) {
return obj.(*batch.Job).Name, nil
},
PredicateFunc: job.MatchJob,
QualifiedResource: batch.Resource("jobs"),
CreateStrategy: job.Strategy,
UpdateStrategy: job.Strategy,
DeleteStrategy: job.Strategy,
}
options := &generic.StoreOptions{RESTOptions: optsGetter, AttrFunc: job.GetAttrs}
if err := store.CompleteWithOptions(options); err != nil {
panic(err) // TODO: Propagate error up
}
statusStore := *store
statusStore.UpdateStrategy = job.StatusStrategy
return &REST{store}, &StatusREST{store: &statusStore}
}
// StatusREST implements the REST endpoint for changing the status of a resourcequota.
type StatusREST struct {
store *genericregistry.Store
}
func (r *StatusREST) New() runtime.Object {
return &batch.Job{}
}
// Get retrieves the object from the storage. It is required to support Patch.
func (r *StatusREST) Get(ctx genericapirequest.Context, name string, options *metav1.GetOptions) (runtime.Object, error) {
return r.store.Get(ctx, name, options)
}
// Update alters the status subset of an object.
func (r *StatusREST) Update(ctx genericapirequest.Context, name string, objInfo rest.UpdatedObjectInfo) (runtime.Object, bool, error) {
return r.store.Update(ctx, name, objInfo)
}<|fim▁end|> | limitations under the License.
*/
package storage |
<|file_name|>test_structure.py<|end_file_name|><|fim▁begin|>import copy, time, StringIO
import unittest
from datetime import datetime
from datetime import date
from nive.utils.dataPool2.structure import *
from nive.tests import __local
from nive.utils.dataPool2.tests import test_Base
ftypes = {}
ftypes[u"data2"] = {u"fstr":"string",
u"ftext":"text",
u"ftime":"timestamp",
u"fmselection":"mselection",
u"fmcheckboxes":"mcheckboxes",
u"furllist":"urllist",
u"funitlist":"unitlist",
u"fbool":"bool",
u"fjson":"json"}
ftypes[u"pool_meta"] = {}
for i in test_Base.SystemFlds:
ftypes[u"pool_meta"][i["id"]] = i
class StructureTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_set1(self):
structure = PoolStructure(structure=test_Base.struct,
fieldtypes=ftypes,
stdMeta=[u"id",u"pool_type"])
self.assert_(structure.get(u"pool_meta"))
self.assert_(len(structure.get(u"pool_meta")) == len(test_Base.struct[u"pool_meta"]))
self.assert_(len(structure.get(u"data1"))==len(test_Base.struct[u"data1"]))
self.assert_(len(structure.get(u"data2"))==len(test_Base.struct[u"data2"]))
self.assert_(len(structure.stdMeta)==2)
self.assert_(structure.fieldtypes[u"data2"][u"fstr"]=="string")
self.assert_(structure.codepage==u"utf-8")
def test_set2(self):
structure = PoolStructure()
structure.Init(structure=test_Base.struct,
fieldtypes=ftypes,
codepage="latin-1")
self.assert_(structure.get(u"pool_meta"))
self.assert_(len(structure.get(u"pool_meta")) == len(test_Base.struct[u"pool_meta"]))
self.assert_(len(structure.get(u"data1"))==len(test_Base.struct[u"data1"]))
self.assert_(len(structure.get(u"data2"))==len(test_Base.struct[u"data2"]))
self.assert_(len(structure.stdMeta)==0)
self.assert_(structure.fieldtypes[u"data2"][u"fstr"]=="string")
self.assert_(structure.codepage==u"latin-1")
def test_set3(self):
structure = PoolStructure()
structure.Init(structure={u"pool_meta": [], u"data1": [], u"data2": []},
fieldtypes=ftypes,
codepage="latin-1")
self.assert_(structure.get(u"pool_meta"))
self.assert_(len(structure.get(u"pool_meta"))==2)
self.assert_(len(structure.get(u"data1"))==0)
self.assert_(len(structure.get(u"data2"))==0)
def test_empty(self):
structure = PoolStructure()
self.assert_(structure.IsEmpty())
def test_func(self):
structure = PoolStructure(structure=test_Base.struct,
fieldtypes=ftypes,
stdMeta=[u"id",u"pool_type"])
self.assertFalse(structure.IsEmpty())
self.assert_(structure.get("pool_meta"))
self.assert_(structure.get("none","aaa")=="aaa")
self.assert_(structure["pool_meta"])
self.assert_(structure["data1"])
self.assert_(structure["data2"])
self.assert_(structure.has_key("data2"))
self.assert_(len(structure.keys())==3)
class ConversionTest(unittest.TestCase):
def setUp(self):
self.structure = PoolStructure(structure=test_Base.struct,
fieldtypes=ftypes,
stdMeta=[u"id",u"pool_type"])
def tearDown(self):
pass
def test_serialize_notype(self):
self.assert_(self.structure.serialize(u"pool_meta", u"somevalue", 123)==123)
self.assert_(isinstance(self.structure.serialize(u"pool_meta", u"somevalue", "123"), unicode))
value = datetime.now()
self.assert_(self.structure.serialize(u"pool_meta", u"somevalue", value)==value.strftime("%Y-%m-%d %H:%M:%S"))
value = ("aaa","bbb")
self.assert_(self.structure.serialize(u"pool_meta", u"somevalue", value).startswith(u"_json_"))
value = (u"aaa",u"bbb")
self.assert_(self.structure.serialize(u"pool_meta", u"somevalue", value).startswith(u"_json_"))
value = [1,2,3]
self.assert_(self.structure.serialize(u"pool_meta", u"somevalue", value).startswith(u"_json_"))
def test_se_mselection(self):
v = {u"id":u"123", u"pool_sort":u"123.12", u"pool_wfa":["value"], u"somevalue": "test"}
values = self.structure.serialize(u"pool_meta", None, v)
self.assert_(values[u"id"]==123)
self.assert_(values[u"pool_sort"]==123.12)
self.assert_(values[u"pool_wfa"]==u"value")
def test_se_number(self):
self.assert_(self.structure.serialize(u"pool_meta", u"id", 123)==123)
self.assert_(self.structure.serialize(u"pool_meta", u"id", u"123")==123)
self.assert_(self.structure.serialize(u"pool_meta", u"id", "123")==123)
self.assert_(self.structure.serialize(u"pool_meta", u"id", 123.12)==123)
def test_se_float(self):
self.assert_(self.structure.serialize(u"pool_meta", u"pool_sort", 123)==123.0)
self.assert_(self.structure.serialize(u"pool_meta", u"pool_sort", u"123.12")==123.12)
self.assert_(self.structure.serialize(u"pool_meta", u"pool_sort", "123.0")==123.0)
self.assert_(self.structure.serialize(u"pool_meta", u"pool_sort", 123.12)==123.12)
def test_se_date(self):
value = datetime.now()
self.assert_(self.structure.serialize(u"pool_meta", u"pool_change", value)==unicode(value))
value = date.today()
self.assert_(self.structure.serialize(u"pool_meta", u"pool_create", value)==unicode(value))
value = time.time()
self.assert_(self.structure.serialize(u"data2", u"ftime", value)==unicode(value))
def test_se_list(self):
self.assert_(self.structure.serialize(u"pool_meta", u"pool_wfa", u"value")==u"value")
self.assert_(self.structure.serialize(u"pool_meta", u"pool_wfa", ["value"])=="value")
self.assert_(self.structure.serialize(u"pool_meta", u"pool_wfa", ())=="")
def test_se_mlist(self):
self.assert_(self.structure.serialize(u"data2", u"fmselection", u"value"))
self.assert_(self.structure.serialize(u"data2", u"fmselection", [u"value"]))
self.assert_(self.structure.serialize(u"data2", u"fmselection", ("value",)))
self.assertFalse(self.structure.serialize(u"data2", u"fmselection", u""))
self.assert_(self.structure.serialize(u"data2", u"mcheckboxes", u"value"))
self.assert_(self.structure.serialize(u"data2", u"furllist", u"value"))
self.assert_(self.structure.serialize(u"data2", u"funitlist", u"value"))
def test_se_bool(self):
self.assert_(self.structure.serialize(u"data2", u"fbool", u"true")==1)
self.assert_(self.structure.serialize(u"data2", u"fbool", u"false")==0)
self.assert_(self.structure.serialize(u"data2", u"fbool", True)==1)
self.assert_(self.structure.serialize(u"data2", u"fbool", False)==0)
self.assert_(self.structure.serialize(u"data2", u"fbool", u"True")==1)
self.assert_(self.structure.serialize(u"data2", u"fbool", u"False")==0)
self.assert_(self.structure.serialize(u"data2", u"fbool", ("???",))==0)
def test_se_json(self):
self.assert_(self.structure.serialize(u"data2", u"fjson", {"a":123,"b":"aaa"}))
self.assert_(json.loads(self.structure.serialize(u"data2", u"fjson", {"a":123,"b":"aaa"}))["a"]==123)
self.assert_(json.loads(self.structure.serialize(u"data2", u"fjson", {"a":123,"b":"aaa"}))["b"]==u"aaa")
def test_deserialize_notype(self):
value = u"_json_"+json.dumps(("aaa","bbb"))<|fim▁hole|>
def test_ds_mselection(self):
v = {u"fmselection": json.dumps(["aaa","bbb"]),u"furllist":json.dumps(["aaa","bbb"]), u"somevalue": "test"}
values = self.structure.deserialize(u"data2", None, v)
self.assert_(values[u"fmselection"][0]=="aaa")
self.assert_(values[u"furllist"][0]=="aaa")
def test_ds_date(self):
value = datetime.now()
x=self.structure.deserialize(u"pool_meta", u"pool_change", unicode(value))
self.assert_(x.strftime("%Y-%m-%d %H:%M:%S")==value.strftime("%Y-%m-%d %H:%M:%S"))
value = date.today()
x=self.structure.deserialize(u"pool_meta", u"pool_create", unicode(value))
self.assert_(x.strftime("%Y-%m-%d")==value.strftime("%Y-%m-%d"))
value = time.time()
self.assert_(self.structure.deserialize(u"data2", u"ftime", value))
def test_ds_mselection(self):
self.assert_(self.structure.deserialize(u"data2", u"fmselection", json.dumps(["aaa","bbb"]))[0]=="aaa")
self.assert_(self.structure.deserialize(u"data2", u"fmcheckboxes", json.dumps(["aaa","bbb"]))[0]=="aaa")
self.assert_(self.structure.deserialize(u"data2", u"furllist", json.dumps(["aaa","bbb"]))[0]=="aaa")
self.assert_(self.structure.deserialize(u"data2", u"funitlist", json.dumps(["aaa","bbb"]))[0]=="aaa")
def test_ds_json(self):
self.assert_(self.structure.deserialize(u"data2", u"fjson", json.dumps(["aaa","bbb"]))[0]=="aaa")
def seCallback(value, field):
return value.swapcase()
def deCallback(value, field):
return value.capitalize()
class CallbackTest(unittest.TestCase):
def setUp(self):
self.structure = PoolStructure(structure=test_Base.struct,
fieldtypes=ftypes,
stdMeta=[u"id",u"pool_type"])
self.structure.serializeCallbacks = {"string": seCallback}
self.structure.deserializeCallbacks = {"string": deCallback}
def tearDown(self):
pass
def test_serialize_callback(self):
self.assert_(self.structure.serialize(u"pool_meta", u"title", u"somevalue")==u"SOMEVALUE")
self.assert_(self.structure.deserialize(u"pool_meta", u"title", u"somevalue")==u"Somevalue")
def test_se_mselection(self):
v = {u"id":u"123", u"pool_sort":u"123.12", u"pool_wfa":["value"], u"somevalue": "test"}
values = self.structure.serialize(u"pool_meta", None, v)
self.assert_(values[u"id"]==123)
self.assert_(values[u"pool_sort"]==123.12)
self.assert_(values[u"pool_wfa"]==u"value")
def test_se_number(self):
self.assert_(self.structure.serialize(u"pool_meta", u"id", 123)==123)
self.assert_(self.structure.serialize(u"pool_meta", u"id", u"123")==123)
self.assert_(self.structure.serialize(u"pool_meta", u"id", "123")==123)
self.assert_(self.structure.serialize(u"pool_meta", u"id", 123.12)==123)
def test_se_float(self):
self.assert_(self.structure.serialize(u"pool_meta", u"pool_sort", 123)==123.0)
self.assert_(self.structure.serialize(u"pool_meta", u"pool_sort", u"123.12")==123.12)
self.assert_(self.structure.serialize(u"pool_meta", u"pool_sort", "123.0")==123.0)
self.assert_(self.structure.serialize(u"pool_meta", u"pool_sort", 123.12)==123.12)<|fim▁end|> | self.assert_(self.structure.deserialize(u"pool_meta", u"somevalue", value)[0]==u"aaa")
self.assert_(self.structure.deserialize(u"pool_meta", u"somevalue", "somevalue")==u"somevalue") |
<|file_name|>destructuring.rs<|end_file_name|><|fim▁begin|>fn main() {
let pair = (4u32, 5u32);
let (a, b) = pair;
let (b, a) = (a, b); // Swap
let smaller = match pair {
(x, y) if x < y => x,<|fim▁hole|>
match pair {
(0, 0) => println!("Origin"),
(0, y) => println!("Y-axis, coordinate {}", y),
(x, 0) => println!("X-axis, coordinate {}", x),
(x, y) => {
let distance = ((x*x + y*y() as f32).sqrt();
println!("Point");
};
}<|fim▁end|> | (_, y) => y
};
} |
<|file_name|>configParse.py<|end_file_name|><|fim▁begin|>#
# configparse.py
#
# an example of using the parsing module to be able to process a .INI configuration file
#
# Copyright (c) 2003, Paul McGuire
#
from pyparsing import \
Literal, Word, ZeroOrMore, Group, Dict, Optional, \
printables, ParseException, restOfLine
import pprint
inibnf = None
def inifile_BNF():
global inibnf
if not inibnf:
# punctuation
lbrack = Literal("[").suppress()
rbrack = Literal("]").suppress()
equals = Literal("=").suppress()
semi = Literal(";")
comment = semi + Optional( restOfLine )
<|fim▁hole|> sectionDef = lbrack + Word( nonrbrack ) + rbrack
keyDef = ~lbrack + Word( nonequals ) + equals + restOfLine
# using Dict will allow retrieval of named data fields as attributes of the parsed results
inibnf = Dict( ZeroOrMore( Group( sectionDef + Dict( ZeroOrMore( Group( keyDef ) ) ) ) ) )
inibnf.ignore( comment )
return inibnf
pp = pprint.PrettyPrinter(2)
def test( strng ):
print strng
try:
iniFile = file(strng)
iniData = "".join( iniFile.readlines() )
bnf = inifile_BNF()
tokens = bnf.parseString( iniData )
pp.pprint( tokens.asList() )
except ParseException, err:
print err.line
print " "*(err.column-1) + "^"
print err
iniFile.close()
print
return tokens
ini = test("setup.ini")
print "ini['Startup']['modemid'] =", ini['Startup']['modemid']
print "ini.Startup =", ini.Startup
print "ini.Startup.modemid =", ini.Startup.modemid<|fim▁end|> |
nonrbrack = "".join( [ c for c in printables if c != "]" ] ) + " \t"
nonequals = "".join( [ c for c in printables if c != "=" ] ) + " \t"
|
<|file_name|>slider.component.spec.ts<|end_file_name|><|fim▁begin|>/* tslint:disable:no-unused-variable */
import {SliderComponent} from "./slider.component";
describe('Component: Slider', () => {
it('should create an instance', () => {
let component = new SliderComponent(null, null);
expect(component).toBeTruthy();
});<|fim▁hole|>});<|fim▁end|> | |
<|file_name|>vimsupport.py<|end_file_name|><|fim▁begin|># Copyright (C) 2011-2012 Google Inc.
# 2016 YouCompleteMe contributors
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
# Not installing aliases from python-future; it's unreliable and slow.
from builtins import * # noqa
from future.utils import iterkeys
import vim
import os
import json
import re
from collections import defaultdict
from ycmd.utils import ( ByteOffsetToCodepointOffset, GetCurrentDirectory,
JoinLinesAsUnicode, ToBytes, ToUnicode )
from ycmd import user_options_store
BUFFER_COMMAND_MAP = { 'same-buffer' : 'edit',
'horizontal-split' : 'split',
'vertical-split' : 'vsplit',
'new-tab' : 'tabedit' }
FIXIT_OPENING_BUFFERS_MESSAGE_FORMAT = (
'The requested operation will apply changes to {0} files which are not '
'currently open. This will therefore open {0} new files in the hidden '
'buffers. The quickfix list can then be used to review the changes. No '
'files will be written to disk. Do you wish to continue?' )
potential_hint_triggers = list( map( ToBytes, [ '[', '(', ',', ':' ] ) )
def CanComplete():
"""Returns whether it's appropriate to provide any completion at the current
line and column."""
try:
line, column = LineAndColumnAfterLastNonWhitespace()
except TypeError:
return False
if ( line, column ) == CurrentLineAndColumn():
return True
return ( ToBytes( vim.current.buffer[ line ][ column - 1 ] )
in potential_hint_triggers )
def SnappedLineAndColumn():
"""Will return CurrentLineAndColumn(), except when there's solely whitespace
between caret and a potential hint trigger, where it "snaps to trigger",
returning hint trigger's line and column instead."""
try:
line, column = LineAndColumnAfterLastNonWhitespace()
except TypeError:
return CurrentLineAndColumn()
if ( ToBytes( vim.current.buffer[ line ][ column - 1 ] )
in potential_hint_triggers ):
return ( line, column )
return CurrentLineAndColumn()
def LineAndColumnAfterLastNonWhitespace():
line, column = CurrentLineAndColumn()
line_value = vim.current.line[ :column ].rstrip()
while not line_value:
line = line - 1
if line == -1:
return None
line_value = vim.current.buffer[ line ].rstrip()
return line, len( line_value )
NO_SELECTION_MADE_MSG = "No valid selection was made; aborting."
def CurrentLineAndColumn():
"""Returns the 0-based current line and 0-based current column."""
# See the comment in CurrentColumn about the calculation for the line and
# column number
line, column = vim.current.window.cursor
line -= 1
return line, column
def CurrentColumn():
"""Returns the 0-based current column. Do NOT access the CurrentColumn in
vim.current.line. It doesn't exist yet when the cursor is at the end of the
line. Only the chars before the current column exist in vim.current.line."""
# vim's columns are 1-based while vim.current.line columns are 0-based
# ... but vim.current.window.cursor (which returns a (line, column) tuple)
# columns are 0-based, while the line from that same tuple is 1-based.
# vim.buffers buffer objects OTOH have 0-based lines and columns.
# Pigs have wings and I'm a loopy purple duck. Everything makes sense now.
return vim.current.window.cursor[ 1 ]
def CurrentLineContents():
return ToUnicode( vim.current.line )
def CurrentLineContentsAndCodepointColumn():
"""Returns the line contents as a unicode string and the 0-based current
column as a codepoint offset. If the current column is outside the line,
returns the column position at the end of the line."""
line = CurrentLineContents()
byte_column = CurrentColumn()
# ByteOffsetToCodepointOffset expects 1-based offset.
column = ByteOffsetToCodepointOffset( line, byte_column + 1 ) - 1
return line, column
def TextAfterCursor():
"""Returns the text after CurrentColumn."""
return ToUnicode( vim.current.line[ CurrentColumn(): ] )
def TextBeforeCursor():
"""Returns the text before CurrentColumn."""
return ToUnicode( vim.current.line[ :CurrentColumn() ] )
# Note the difference between buffer OPTIONS and VARIABLES; the two are not
# the same.
def GetBufferOption( buffer_object, option ):
# NOTE: We used to check for the 'options' property on the buffer_object which
# is available in recent versions of Vim and would then use:
#
# buffer_object.options[ option ]
#
# to read the value, BUT this caused annoying flickering when the
# buffer_object was a hidden buffer (with option = 'ft'). This was all due to
# a Vim bug. Until this is fixed, we won't use it.
to_eval = 'getbufvar({0}, "&{1}")'.format( buffer_object.number, option )
return GetVariableValue( to_eval )
def BufferModified( buffer_object ):
return bool( int( GetBufferOption( buffer_object, 'mod' ) ) )
def GetUnsavedAndSpecifiedBufferData( including_filepath ):
"""Build part of the request containing the contents and filetypes of all
dirty buffers as well as the buffer with filepath |including_filepath|."""
buffers_data = {}
for buffer_object in vim.buffers:
buffer_filepath = GetBufferFilepath( buffer_object )
if not ( BufferModified( buffer_object ) or
buffer_filepath == including_filepath ):
continue
buffers_data[ buffer_filepath ] = {
# Add a newline to match what gets saved to disk. See #1455 for details.
'contents': JoinLinesAsUnicode( buffer_object ) + '\n',
'filetypes': FiletypesForBuffer( buffer_object )
}
return buffers_data
def GetBufferNumberForFilename( filename, open_file_if_needed = True ):
return GetIntValue( u"bufnr('{0}', {1})".format(
EscapeForVim( os.path.realpath( filename ) ),
int( open_file_if_needed ) ) )
def GetCurrentBufferFilepath():
return GetBufferFilepath( vim.current.buffer )
def BufferIsVisible( buffer_number ):
if buffer_number < 0:
return False
window_number = GetIntValue( "bufwinnr({0})".format( buffer_number ) )
return window_number != -1
def GetBufferFilepath( buffer_object ):
if buffer_object.name:
return buffer_object.name
# Buffers that have just been created by a command like :enew don't have any
# buffer name so we use the buffer number for that.
return os.path.join( GetCurrentDirectory(), str( buffer_object.number ) )
def GetCurrentBufferNumber():
return vim.current.buffer.number
def GetBufferChangedTick( bufnr ):
return GetIntValue( 'getbufvar({0}, "changedtick")'.format( bufnr ) )
def UnplaceSignInBuffer( buffer_number, sign_id ):
if buffer_number < 0:
return
vim.command(
'try | exec "sign unplace {0} buffer={1}" | catch /E158/ | endtry'.format(
sign_id, buffer_number ) )
def PlaceSign( sign_id, line_num, buffer_num, is_error = True ):
# libclang can give us diagnostics that point "outside" the file; Vim borks
# on these.
if line_num < 1:
line_num = 1
sign_name = 'YcmError' if is_error else 'YcmWarning'
vim.command( 'sign place {0} name={1} line={2} buffer={3}'.format(
sign_id, sign_name, line_num, buffer_num ) )
def ClearYcmSyntaxMatches():
matches = VimExpressionToPythonType( 'getmatches()' )
for match in matches:
if match[ 'group' ].startswith( 'Ycm' ):
vim.eval( 'matchdelete({0})'.format( match[ 'id' ] ) )
def AddDiagnosticSyntaxMatch( line_num,
column_num,
line_end_num = None,
column_end_num = None,
is_error = True ):
"""Highlight a range in the current window starting from
(|line_num|, |column_num|) included to (|line_end_num|, |column_end_num|)
excluded. If |line_end_num| or |column_end_num| are not given, highlight the
character at (|line_num|, |column_num|). Both line and column numbers are
1-based. Return the ID of the newly added match."""
group = 'YcmErrorSection' if is_error else 'YcmWarningSection'
line_num, column_num = LineAndColumnNumbersClamped( line_num, column_num )
if not line_end_num or not column_end_num:
return GetIntValue(
"matchadd('{0}', '\%{1}l\%{2}c')".format( group, line_num, column_num ) )
# -1 and then +1 to account for column end not included in the range.
line_end_num, column_end_num = LineAndColumnNumbersClamped(
line_end_num, column_end_num - 1 )
column_end_num += 1
return GetIntValue(
"matchadd('{0}', '\%{1}l\%{2}c\_.\\{{-}}\%{3}l\%{4}c')".format(
group, line_num, column_num, line_end_num, column_end_num ) )
# Clamps the line and column numbers so that they are not past the contents of
# the buffer. Numbers are 1-based byte offsets.
def LineAndColumnNumbersClamped( line_num, column_num ):
new_line_num = line_num
new_column_num = column_num
max_line = len( vim.current.buffer )
if line_num and line_num > max_line:
new_line_num = max_line
max_column = len( vim.current.buffer[ new_line_num - 1 ] )
if column_num and column_num > max_column:
new_column_num = max_column
return new_line_num, new_column_num
def SetLocationList( diagnostics ):
"""Populate the location list with diagnostics. Diagnostics should be in
qflist format; see ":h setqflist" for details."""
vim.eval( 'setloclist( 0, {0} )'.format( json.dumps( diagnostics ) ) )
def OpenLocationList( focus = False, autoclose = False ):
"""Open the location list to full width at the bottom of the screen with its
height automatically set to fit all entries. This behavior can be overridden
by using the YcmLocationOpened autocommand. When focus is set to True, the
location list window becomes the active window. When autoclose is set to True,
the location list window is automatically closed after an entry is
selected."""
vim.command( 'botright lopen' )
SetFittingHeightForCurrentWindow()
if autoclose:
# This autocommand is automatically removed when the location list window is
# closed.
vim.command( 'au WinLeave <buffer> q' )
if VariableExists( '#User#YcmLocationOpened' ):
vim.command( 'doautocmd User YcmLocationOpened' )
if not focus:
JumpToPreviousWindow()
def SetQuickFixList( quickfix_list ):
"""Populate the quickfix list and open it. List should be in qflist format:
see ":h setqflist" for details."""
vim.eval( 'setqflist( {0} )'.format( json.dumps( quickfix_list ) ) )
def OpenQuickFixList( focus = False, autoclose = False ):
"""Open the quickfix list to full width at the bottom of the screen with its
height automatically set to fit all entries. This behavior can be overridden
by using the YcmQuickFixOpened autocommand.
See the OpenLocationList function for the focus and autoclose options."""
vim.command( 'botright copen' )
SetFittingHeightForCurrentWindow()
if autoclose:
# This autocommand is automatically removed when the quickfix window is
# closed.
vim.command( 'au WinLeave <buffer> q' )
if VariableExists( '#User#YcmQuickFixOpened' ):
vim.command( 'doautocmd User YcmQuickFixOpened' )
if not focus:
JumpToPreviousWindow()
def SetFittingHeightForCurrentWindow():
window_width = GetIntValue( 'winwidth( 0 )' )
fitting_height = 0
for line in vim.current.buffer:
fitting_height += len( line ) // window_width + 1
vim.command( '{0}wincmd _'.format( fitting_height ) )
def ConvertDiagnosticsToQfList( diagnostics ):
def ConvertDiagnosticToQfFormat( diagnostic ):
# See :h getqflist for a description of the dictionary fields.
# Note that, as usual, Vim is completely inconsistent about whether
# line/column numbers are 1 or 0 based in its various APIs. Here, it wants
# them to be 1-based. The documentation states quite clearly that it
# expects a byte offset, by which it means "1-based column number" as
# described in :h getqflist ("the first column is 1").
location = diagnostic[ 'location' ]
line_num = location[ 'line_num' ]
# libclang can give us diagnostics that point "outside" the file; Vim borks
# on these.
if line_num < 1:
line_num = 1
text = diagnostic[ 'text' ]
if diagnostic.get( 'fixit_available', False ):
text += ' (FixIt available)'
return {
'bufnr' : GetBufferNumberForFilename( location[ 'filepath' ] ),
'lnum' : line_num,
'col' : location[ 'column_num' ],
'text' : text,
'type' : diagnostic[ 'kind' ][ 0 ],
'valid' : 1
}
return [ ConvertDiagnosticToQfFormat( x ) for x in diagnostics ]
def GetVimGlobalsKeys():
return vim.eval( 'keys( g: )' )
def VimExpressionToPythonType( vim_expression ):
"""Returns a Python type from the return value of the supplied Vim expression.
If the expression returns a list, dict or other non-string type, then it is
returned unmodified. If the string return can be converted to an
integer, returns an integer, otherwise returns the result converted to a
Unicode string."""
result = vim.eval( vim_expression )
if not ( isinstance( result, str ) or isinstance( result, bytes ) ):
return result
try:
return int( result )
except ValueError:
return ToUnicode( result )
def HiddenEnabled( buffer_object ):
return bool( int( GetBufferOption( buffer_object, 'hid' ) ) )
def BufferIsUsable( buffer_object ):
return not BufferModified( buffer_object ) or HiddenEnabled( buffer_object )
def EscapedFilepath( filepath ):
return filepath.replace( ' ' , r'\ ' )
# Both |line| and |column| need to be 1-based
def TryJumpLocationInOpenedTab( filename, line, column ):
filepath = os.path.realpath( filename )
for tab in vim.tabpages:
for win in tab.windows:
if win.buffer.name == filepath:
vim.current.tabpage = tab
vim.current.window = win
vim.current.window.cursor = ( line, column - 1 )
# Center the screen on the jumped-to location
vim.command( 'normal! zz' )
return True
# 'filename' is not opened in any tab pages
return False
# Maps User command to vim command
def GetVimCommand( user_command, default = 'edit' ):
vim_command = BUFFER_COMMAND_MAP.get( user_command, default )
if vim_command == 'edit' and not BufferIsUsable( vim.current.buffer ):
vim_command = 'split'
return vim_command
# Both |line| and |column| need to be 1-based
def JumpToLocation( filename, line, column ):
# Add an entry to the jumplist
vim.command( "normal! m'" )
if filename != GetCurrentBufferFilepath():<|fim▁hole|> # location, not to the start of the newly opened file.
# Sadly this fails on random occasions and the undesired jump remains in the
# jumplist.
user_command = user_options_store.Value( 'goto_buffer_command' )
if user_command == 'new-or-existing-tab':
if TryJumpLocationInOpenedTab( filename, line, column ):
return
user_command = 'new-tab'
vim_command = GetVimCommand( user_command )
try:
vim.command( 'keepjumps {0} {1}'.format( vim_command,
EscapedFilepath( filename ) ) )
# When the file we are trying to jump to has a swap file
# Vim opens swap-exists-choices dialog and throws vim.error with E325 error,
# or KeyboardInterrupt after user selects one of the options.
except vim.error as e:
if 'E325' not in str( e ):
raise
# Do nothing if the target file is still not opened (user chose (Q)uit)
if filename != GetCurrentBufferFilepath():
return
# Thrown when user chooses (A)bort in .swp message box
except KeyboardInterrupt:
return
vim.current.window.cursor = ( line, column - 1 )
# Center the screen on the jumped-to location
vim.command( 'normal! zz' )
def NumLinesInBuffer( buffer_object ):
# This is actually less than obvious, that's why it's wrapped in a function
return len( buffer_object )
# Calling this function from the non-GUI thread will sometimes crash Vim. At
# the time of writing, YCM only uses the GUI thread inside Vim (this used to
# not be the case).
def PostVimMessage( message, warning = True, truncate = False ):
"""Display a message on the Vim status line. By default, the message is
highlighted and logged to Vim command-line history (see :h history).
Unset the |warning| parameter to disable this behavior. Set the |truncate|
parameter to avoid hit-enter prompts (see :h hit-enter) when the message is
longer than the window width."""
echo_command = 'echom' if warning else 'echo'
# Displaying a new message while previous ones are still on the status line
# might lead to a hit-enter prompt or the message appearing without a
# newline so we do a redraw first.
vim.command( 'redraw' )
if warning:
vim.command( 'echohl WarningMsg' )
message = ToUnicode( message )
if truncate:
vim_width = GetIntValue( '&columns' )
message = message.replace( '\n', ' ' )
if len( message ) > vim_width:
message = message[ : vim_width - 4 ] + '...'
old_ruler = GetIntValue( '&ruler' )
old_showcmd = GetIntValue( '&showcmd' )
vim.command( 'set noruler noshowcmd' )
vim.command( "{0} '{1}'".format( echo_command,
EscapeForVim( message ) ) )
SetVariableValue( '&ruler', old_ruler )
SetVariableValue( '&showcmd', old_showcmd )
else:
for line in message.split( '\n' ):
vim.command( "{0} '{1}'".format( echo_command,
EscapeForVim( line ) ) )
if warning:
vim.command( 'echohl None' )
def PresentDialog( message, choices, default_choice_index = 0 ):
"""Presents the user with a dialog where a choice can be made.
This will be a dialog for gvim users or a question in the message buffer
for vim users or if `set guioptions+=c` was used.
choices is list of alternatives.
default_choice_index is the 0-based index of the default element
that will get choosen if the user hits <CR>. Use -1 for no default.
PresentDialog will return a 0-based index into the list
or -1 if the dialog was dismissed by using <Esc>, Ctrl-C, etc.
If you are presenting a list of options for the user to choose from, such as
a list of imports, or lines to insert (etc.), SelectFromList is a better
option.
See also:
:help confirm() in vim (Note that vim uses 1-based indexes)
Example call:
PresentDialog("Is this a nice example?", ["Yes", "No", "May&be"])
Is this a nice example?
[Y]es, (N)o, May(b)e:"""
to_eval = "confirm('{0}', '{1}', {2})".format(
EscapeForVim( ToUnicode( message ) ),
EscapeForVim( ToUnicode( "\n" .join( choices ) ) ),
default_choice_index + 1 )
try:
return GetIntValue( to_eval ) - 1
except KeyboardInterrupt:
return -1
def Confirm( message ):
"""Display |message| with Ok/Cancel operations. Returns True if the user
selects Ok"""
return bool( PresentDialog( message, [ "Ok", "Cancel" ] ) == 0 )
def SelectFromList( prompt, items ):
"""Ask the user to select an item from the list |items|.
Presents the user with |prompt| followed by a numbered list of |items|,
from which they select one. The user is asked to enter the number of an
item or click it.
|items| should not contain leading ordinals: they are added automatically.
Returns the 0-based index in the list |items| that the user selected, or a
negative number if no valid item was selected.
See also :help inputlist()."""
vim_items = [ prompt ]
vim_items.extend( [ "{0}: {1}".format( i + 1, item )
for i, item in enumerate( items ) ] )
# The vim documentation warns not to present lists larger than the number of
# lines of display. This is sound advice, but there really isn't any sensible
# thing we can do in that scenario. Testing shows that Vim just pages the
# message; that behaviour is as good as any, so we don't manipulate the list,
# or attempt to page it.
# For an explanation of the purpose of inputsave() / inputrestore(),
# see :help input(). Briefly, it makes inputlist() work as part of a mapping.
vim.eval( 'inputsave()' )
try:
# Vim returns the number the user entered, or the line number the user
# clicked. This may be wildly out of range for our list. It might even be
# negative.
#
# The first item is index 0, and this maps to our "prompt", so we subtract 1
# from the result and return that, assuming it is within the range of the
# supplied list. If not, we return negative.
#
# See :help input() for explanation of the use of inputsave() and inpput
# restore(). It is done in try/finally in case vim.eval ever throws an
# exception (such as KeyboardInterrupt)
selected = GetIntValue( "inputlist( " + json.dumps( vim_items ) + " )" ) - 1
except KeyboardInterrupt:
selected = -1
finally:
vim.eval( 'inputrestore()' )
if selected < 0 or selected >= len( items ):
# User selected something outside of the range
raise RuntimeError( NO_SELECTION_MADE_MSG )
return selected
def EscapeForVim( text ):
return ToUnicode( text.replace( "'", "''" ) )
def CurrentFiletypes():
return VimExpressionToPythonType( "&filetype" ).split( '.' )
def GetBufferFiletypes( bufnr ):
command = 'getbufvar({0}, "&ft")'.format( bufnr )
return VimExpressionToPythonType( command ).split( '.' )
def FiletypesForBuffer( buffer_object ):
# NOTE: Getting &ft for other buffers only works when the buffer has been
# visited by the user at least once, which is true for modified buffers
return GetBufferOption( buffer_object, 'ft' ).split( '.' )
def VariableExists( variable ):
return GetBoolValue( "exists( '{0}' )".format( EscapeForVim( variable ) ) )
def SetVariableValue( variable, value ):
vim.command( "let {0} = {1}".format( variable, json.dumps( value ) ) )
def GetVariableValue( variable ):
return vim.eval( variable )
def GetBoolValue( variable ):
return bool( int( vim.eval( variable ) ) )
def GetIntValue( variable ):
return int( vim.eval( variable ) )
def _SortChunksByFile( chunks ):
"""Sort the members of the list |chunks| (which must be a list of dictionaries
conforming to ycmd.responses.FixItChunk) by their filepath. Returns a new
list in arbitrary order."""
chunks_by_file = defaultdict( list )
for chunk in chunks:
filepath = chunk[ 'range' ][ 'start' ][ 'filepath' ]
chunks_by_file[ filepath ].append( chunk )
return chunks_by_file
def _GetNumNonVisibleFiles( file_list ):
"""Returns the number of file in the iterable list of files |file_list| which
are not curerntly open in visible windows"""
return len(
[ f for f in file_list
if not BufferIsVisible( GetBufferNumberForFilename( f, False ) ) ] )
def _OpenFileInSplitIfNeeded( filepath ):
"""Ensure that the supplied filepath is open in a visible window, opening a
new split if required. Returns the buffer number of the file and an indication
of whether or not a new split was opened.
If the supplied filename is already open in a visible window, return just
return its buffer number. If the supplied file is not visible in a window
in the current tab, opens it in a new vertical split.
Returns a tuple of ( buffer_num, split_was_opened ) indicating the buffer
number and whether or not this method created a new split. If the user opts
not to open a file, or if opening fails, this method raises RuntimeError,
otherwise, guarantees to return a visible buffer number in buffer_num."""
buffer_num = GetBufferNumberForFilename( filepath, False )
# We only apply changes in the current tab page (i.e. "visible" windows).
# Applying changes in tabs does not lead to a better user experience, as the
# quickfix list no longer works as you might expect (doesn't jump into other
# tabs), and the complexity of choosing where to apply edits is significant.
if BufferIsVisible( buffer_num ):
# file is already open and visible, just return that buffer number (and an
# idicator that we *didn't* open a split)
return ( buffer_num, False )
# The file is not open in a visible window, so we open it in a split.
# We open the file with a small, fixed height. This means that we don't
# make the current buffer the smallest after a series of splits.
OpenFilename( filepath, {
'focus': True,
'fix': True,
'size': GetIntValue( '&previewheight' ),
} )
# OpenFilename returns us to the original cursor location. This is what we
# want, because we don't want to disorientate the user, but we do need to
# know the (now open) buffer number for the filename
buffer_num = GetBufferNumberForFilename( filepath, False )
if not BufferIsVisible( buffer_num ):
# This happens, for example, if there is a swap file and the user
# selects the "Quit" or "Abort" options. We just raise an exception to
# make it clear to the user that the abort has left potentially
# partially-applied changes.
raise RuntimeError(
'Unable to open file: {0}\nFixIt/Refactor operation '
'aborted prior to completion. Your files have not been '
'fully updated. Please use undo commands to revert the '
'applied changes.'.format( filepath ) )
# We opened this file in a split
return ( buffer_num, True )
def ReplaceChunks( chunks ):
"""Apply the source file deltas supplied in |chunks| to arbitrary files.
|chunks| is a list of changes defined by ycmd.responses.FixItChunk,
which may apply arbitrary modifications to arbitrary files.
If a file specified in a particular chunk is not currently open in a visible
buffer (i.e., one in a window visible in the current tab), we:
- issue a warning to the user that we're going to open new files (and offer
her the option to abort cleanly)
- open the file in a new split, make the changes, then hide the buffer.
If for some reason a file could not be opened or changed, raises RuntimeError.
Otherwise, returns no meaningful value."""
# We apply the edits file-wise for efficiency, and because we must track the
# file-wise offset deltas (caused by the modifications to the text).
chunks_by_file = _SortChunksByFile( chunks )
# We sort the file list simply to enable repeatable testing
sorted_file_list = sorted( iterkeys( chunks_by_file ) )
# Make sure the user is prepared to have her screen mutilated by the new
# buffers
num_files_to_open = _GetNumNonVisibleFiles( sorted_file_list )
if num_files_to_open > 0:
if not Confirm(
FIXIT_OPENING_BUFFERS_MESSAGE_FORMAT.format( num_files_to_open ) ):
return
# Store the list of locations where we applied changes. We use this to display
# the quickfix window showing the user where we applied changes.
locations = []
for filepath in sorted_file_list:
( buffer_num, close_window ) = _OpenFileInSplitIfNeeded( filepath )
ReplaceChunksInBuffer( chunks_by_file[ filepath ],
vim.buffers[ buffer_num ],
locations )
# When opening tons of files, we don't want to have a split for each new
# file, as this simply does not scale, so we open the window, make the
# edits, then hide the window.
if close_window:
# Some plugins (I'm looking at you, syntastic) might open a location list
# for the window we just opened. We don't want that location list hanging
# around, so we close it. lclose is a no-op if there is no location list.
vim.command( 'lclose' )
# Note that this doesn't lose our changes. It simply "hides" the buffer,
# which can later be re-accessed via the quickfix list or `:ls`
vim.command( 'hide' )
# Open the quickfix list, populated with entries for each location we changed.
if locations:
SetQuickFixList( locations )
OpenQuickFixList()
PostVimMessage( 'Applied {0} changes'.format( len( chunks ) ),
warning = False )
def ReplaceChunksInBuffer( chunks, vim_buffer, locations ):
"""Apply changes in |chunks| to the buffer-like object |buffer|. Append each
chunk's start to the list |locations|"""
# We need to track the difference in length, but ensuring we apply fixes
# in ascending order of insertion point.
chunks.sort( key = lambda chunk: (
chunk[ 'range' ][ 'start' ][ 'line_num' ],
chunk[ 'range' ][ 'start' ][ 'column_num' ]
) )
# Remember the line number we're processing. Negative line number means we
# haven't processed any lines yet (by nature of being not equal to any
# real line number).
last_line = -1
line_delta = 0
for chunk in chunks:
if chunk[ 'range' ][ 'start' ][ 'line_num' ] != last_line:
# If this chunk is on a different line than the previous chunk,
# then ignore previous deltas (as offsets won't have changed).
last_line = chunk[ 'range' ][ 'end' ][ 'line_num' ]
char_delta = 0
( new_line_delta, new_char_delta ) = ReplaceChunk(
chunk[ 'range' ][ 'start' ],
chunk[ 'range' ][ 'end' ],
chunk[ 'replacement_text' ],
line_delta, char_delta,
vim_buffer,
locations )
line_delta += new_line_delta
char_delta += new_char_delta
# Replace the chunk of text specified by a contiguous range with the supplied
# text.
# * start and end are objects with line_num and column_num properties
# * the range is inclusive
# * indices are all 1-based
# * the returned character delta is the delta for the last line
#
# returns the delta (in lines and characters) that any position after the end
# needs to be adjusted by.
#
# NOTE: Works exclusively with bytes() instances and byte offsets as returned
# by ycmd and used within the Vim buffers
def ReplaceChunk( start, end, replacement_text, line_delta, char_delta,
vim_buffer, locations = None ):
# ycmd's results are all 1-based, but vim's/python's are all 0-based
# (so we do -1 on all of the values)
start_line = start[ 'line_num' ] - 1 + line_delta
end_line = end[ 'line_num' ] - 1 + line_delta
source_lines_count = end_line - start_line + 1
start_column = start[ 'column_num' ] - 1 + char_delta
end_column = end[ 'column_num' ] - 1
if source_lines_count == 1:
end_column += char_delta
# NOTE: replacement_text is unicode, but all our offsets are byte offsets,
# so we convert to bytes
replacement_lines = ToBytes( replacement_text ).splitlines( False )
if not replacement_lines:
replacement_lines = [ bytes( b'' ) ]
replacement_lines_count = len( replacement_lines )
# NOTE: Vim buffers are a list of byte objects on Python 2 but unicode
# objects on Python 3.
end_existing_text = ToBytes( vim_buffer[ end_line ] )[ end_column : ]
start_existing_text = ToBytes( vim_buffer[ start_line ] )[ : start_column ]
new_char_delta = ( len( replacement_lines[ -1 ] )
- ( end_column - start_column ) )
if replacement_lines_count > 1:
new_char_delta -= start_column
replacement_lines[ 0 ] = start_existing_text + replacement_lines[ 0 ]
replacement_lines[ -1 ] = replacement_lines[ -1 ] + end_existing_text
vim_buffer[ start_line : end_line + 1 ] = replacement_lines[:]
if locations is not None:
locations.append( {
'bufnr': vim_buffer.number,
'filename': vim_buffer.name,
# line and column numbers are 1-based in qflist
'lnum': start_line + 1,
'col': start_column + 1,
'text': replacement_text,
'type': 'F',
} )
new_line_delta = replacement_lines_count - source_lines_count
return ( new_line_delta, new_char_delta )
def InsertNamespace( namespace ):
if VariableExists( 'g:ycm_csharp_insert_namespace_expr' ):
expr = GetVariableValue( 'g:ycm_csharp_insert_namespace_expr' )
if expr:
SetVariableValue( "g:ycm_namespace_to_insert", namespace )
vim.eval( expr )
return
pattern = '^\s*using\(\s\+[a-zA-Z0-9]\+\s\+=\)\?\s\+[a-zA-Z0-9.]\+\s*;\s*'
existing_indent = ''
line = SearchInCurrentBuffer( pattern )
if line:
existing_line = LineTextInCurrentBuffer( line )
existing_indent = re.sub( r"\S.*", "", existing_line )
new_line = "{0}using {1};\n\n".format( existing_indent, namespace )
replace_pos = { 'line_num': line + 1, 'column_num': 1 }
ReplaceChunk( replace_pos, replace_pos, new_line, 0, 0, vim.current.buffer )
PostVimMessage( 'Add namespace: {0}'.format( namespace ), warning = False )
def SearchInCurrentBuffer( pattern ):
""" Returns the 1-indexed line on which the pattern matches
(going UP from the current position) or 0 if not found """
return GetIntValue( "search('{0}', 'Wcnb')".format( EscapeForVim( pattern )))
def LineTextInCurrentBuffer( line_number ):
""" Returns the text on the 1-indexed line (NOT 0-indexed) """
return vim.current.buffer[ line_number - 1 ]
def ClosePreviewWindow():
""" Close the preview window if it is present, otherwise do nothing """
vim.command( 'silent! pclose!' )
def JumpToPreviewWindow():
""" Jump the vim cursor to the preview window, which must be active. Returns
boolean indicating if the cursor ended up in the preview window """
vim.command( 'silent! wincmd P' )
return vim.current.window.options[ 'previewwindow' ]
def JumpToPreviousWindow():
""" Jump the vim cursor to its previous window position """
vim.command( 'silent! wincmd p' )
def JumpToTab( tab_number ):
"""Jump to Vim tab with corresponding number """
vim.command( 'silent! tabn {0}'.format( tab_number ) )
def OpenFileInPreviewWindow( filename ):
""" Open the supplied filename in the preview window """
vim.command( 'silent! pedit! ' + filename )
def WriteToPreviewWindow( message ):
""" Display the supplied message in the preview window """
# This isn't something that comes naturally to Vim. Vim only wants to show
# tags and/or actual files in the preview window, so we have to hack it a
# little bit. We generate a temporary file name and "open" that, then write
# the data to it. We make sure the buffer can't be edited or saved. Other
# approaches include simply opening a split, but we want to take advantage of
# the existing Vim options for preview window height, etc.
ClosePreviewWindow()
OpenFileInPreviewWindow( vim.eval( 'tempname()' ) )
if JumpToPreviewWindow():
# We actually got to the preview window. By default the preview window can't
# be changed, so we make it writable, write to it, then make it read only
# again.
vim.current.buffer.options[ 'modifiable' ] = True
vim.current.buffer.options[ 'readonly' ] = False
vim.current.buffer[:] = message.splitlines()
vim.current.buffer.options[ 'buftype' ] = 'nofile'
vim.current.buffer.options[ 'bufhidden' ] = 'wipe'
vim.current.buffer.options[ 'buflisted' ] = False
vim.current.buffer.options[ 'swapfile' ] = False
vim.current.buffer.options[ 'modifiable' ] = False
vim.current.buffer.options[ 'readonly' ] = True
# We need to prevent closing the window causing a warning about unsaved
# file, so we pretend to Vim that the buffer has not been changed.
vim.current.buffer.options[ 'modified' ] = False
JumpToPreviousWindow()
else:
# We couldn't get to the preview window, but we still want to give the user
# the information we have. The only remaining option is to echo to the
# status area.
PostVimMessage( message, warning = False )
def BufferIsVisibleForFilename( filename ):
"""Check if a buffer exists for a specific file."""
buffer_number = GetBufferNumberForFilename( filename, False )
return BufferIsVisible( buffer_number )
def CloseBuffersForFilename( filename ):
"""Close all buffers for a specific file."""
buffer_number = GetBufferNumberForFilename( filename, False )
while buffer_number != -1:
vim.command( 'silent! bwipeout! {0}'.format( buffer_number ) )
new_buffer_number = GetBufferNumberForFilename( filename, False )
if buffer_number == new_buffer_number:
raise RuntimeError( "Buffer {0} for filename '{1}' should already be "
"wiped out.".format( buffer_number, filename ) )
buffer_number = new_buffer_number
def OpenFilename( filename, options = {} ):
"""Open a file in Vim. Following options are available:
- command: specify which Vim command is used to open the file. Choices
are same-buffer, horizontal-split, vertical-split, and new-tab (default:
horizontal-split);
- size: set the height of the window for a horizontal split or the width for
a vertical one (default: '');
- fix: set the winfixheight option for a horizontal split or winfixwidth for
a vertical one (default: False). See :h winfix for details;
- focus: focus the opened file (default: False);
- watch: automatically watch for changes (default: False). This is useful
for logs;
- position: set the position where the file is opened (default: start).
Choices are start and end."""
# Set the options.
command = GetVimCommand( options.get( 'command', 'horizontal-split' ),
'horizontal-split' )
size = ( options.get( 'size', '' ) if command in [ 'split', 'vsplit' ] else
'' )
focus = options.get( 'focus', False )
# There is no command in Vim to return to the previous tab so we need to
# remember the current tab if needed.
if not focus and command == 'tabedit':
previous_tab = GetIntValue( 'tabpagenr()' )
else:
previous_tab = None
# Open the file.
try:
vim.command( '{0}{1} {2}'.format( size, command, filename ) )
# When the file we are trying to jump to has a swap file,
# Vim opens swap-exists-choices dialog and throws vim.error with E325 error,
# or KeyboardInterrupt after user selects one of the options which actually
# opens the file (Open read-only/Edit anyway).
except vim.error as e:
if 'E325' not in str( e ):
raise
# Otherwise, the user might have chosen Quit. This is detectable by the
# current file not being the target file
if filename != GetCurrentBufferFilepath():
return
except KeyboardInterrupt:
# Raised when the user selects "Abort" after swap-exists-choices
return
_SetUpLoadedBuffer( command,
filename,
options.get( 'fix', False ),
options.get( 'position', 'start' ),
options.get( 'watch', False ) )
# Vim automatically set the focus to the opened file so we need to get the
# focus back (if the focus option is disabled) when opening a new tab or
# window.
if not focus:
if command == 'tabedit':
JumpToTab( previous_tab )
if command in [ 'split', 'vsplit' ]:
JumpToPreviousWindow()
def _SetUpLoadedBuffer( command, filename, fix, position, watch ):
"""After opening a buffer, configure it according to the supplied options,
which are as defined by the OpenFilename method."""
if command == 'split':
vim.current.window.options[ 'winfixheight' ] = fix
if command == 'vsplit':
vim.current.window.options[ 'winfixwidth' ] = fix
if watch:
vim.current.buffer.options[ 'autoread' ] = True
vim.command( "exec 'au BufEnter <buffer> :silent! checktime {0}'"
.format( filename ) )
if position == 'end':
vim.command( 'silent! normal! Gzz' )<|fim▁end|> | # We prefix the command with 'keepjumps' so that opening the file is not
# recorded in the jumplist. So when we open the file and move the cursor to
# a location in it, the user can use CTRL-O to jump back to the original |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
""" Flexx setup script.
"""
import os
from os import path as op
try:
# use setuptools namespace, allows for "develop"
import setuptools # noqa, analysis:ignore
except ImportError:
pass # it's not essential for installation
from distutils.core import setup
name = 'flexx'
description = "Pure Python toolkit for creating GUI's using web technology."
# Get version and docstring
__version__ = None
__doc__ = ''
docStatus = 0 # Not started, in progress, done
initFile = os.path.join(os.path.dirname(__file__), name, '__init__.py')
for line in open(initFile).readlines():
if (line.startswith('version_info') or line.startswith('__version__')):
exec(line.strip())
elif line.startswith('"""'):
if docStatus == 0:
docStatus = 1
line = line.lstrip('"')
elif docStatus == 1:
docStatus = 2
if docStatus == 1:
__doc__ += line
def package_tree(pkgroot):
path = os.path.dirname(__file__)
subdirs = [os.path.relpath(i[0], path).replace(os.path.sep, '.')
for i in os.walk(os.path.join(path, pkgroot))
if '__init__.py' in i[2]]
return subdirs
setup(
name=name,
version=__version__,
author='Flexx contributors',
author_email='[email protected]',
license='(new) BSD',
url='http://flexx.readthedocs.org',
download_url='https://pypi.python.org/pypi/flexx',
keywords="ui design, web runtime, pyscript, reactive programming, FRP",
description=description,<|fim▁hole|> install_requires=[],
packages=package_tree(name),
package_dir={name: name},
package_data={},
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'Intended Audience :: Education',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python',
#'Programming Language :: Python :: 2.7', # not yet supported
'Programming Language :: Python :: 3.4',
],
)<|fim▁end|> | long_description=__doc__,
platforms='any',
provides=[name], |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// src/test/os/mod.rs<|fim▁hole|>
// ===========================================================================
// Imports
// ===========================================================================
// Stdlib imports
// Third-party imports
// Local imports
// use os::windows::protocol::v1::InitSession;
// ===========================================================================
// Tests
// ===========================================================================
mod sessionstate {
mod check_msg {
use error::{SasdErrorKind, SasdResult};
use os::windows::protocol::v1::SessionState;
use protocol::v1::SessionRequest;
use rmpv::Value;
use rpc::v1 as rpc1;
use siminau_rpc::message::{CodeConvert, Message, MessageType};
// --------------------
// Helper
// --------------------
struct Test;
impl SessionState for Test {
fn check_msg_method(&self, _req: SessionRequest)
-> SasdResult<SessionRequest>
{
unreachable!()
}
}
#[test]
fn non_u64_method()
{
// --------------------------------------------------------
// GIVEN
// a Message instance and
// the message has a non-u64 value for the method argument and
// a type implementing the SessionState trait
// --------------------------------------------------------
let msgtype = Value::from(MessageType::Request.to_number());
let msgid = Value::from(42);
let msgcode = Value::from("hello");
let msgargs = Value::Array(vec![]);
let value = Value::Array(vec![msgtype, msgid, msgcode, msgargs]);
let msg = Message::from(value).unwrap();
let test = Test;
// ----------------------------------------------------
// WHEN
// SessionState::check_msg() is called with the message
// ----------------------------------------------------
let result = test.check_msg(msg);
// -------------------------------------
// THEN
// An InvalidMessage error is generated
// -------------------------------------
let val = match result {
Err(e) => {
match e.kind() {
&SasdErrorKind::InvalidMessage => true,
_ => false,
}
}
_ => false,
};
assert!(val);
}
#[test]
fn invalid_method_code()
{
// --------------------------------------------------------
// GIVEN
// a Message instance and
// a u64 value that is not valid for SessionMethod and
// a type implementing the SessionState trait
// --------------------------------------------------------
let msgtype = Value::from(MessageType::Request.to_number());
let msgid = Value::from(42);
let msgcode = Value::from(9999);
let msgargs = Value::Array(vec![]);
let value = Value::Array(vec![msgtype, msgid, msgcode, msgargs]);
let msg = Message::from(value).unwrap();
let test = Test;
// ----------------------------------------------------
// WHEN
// SessionState::check_msg() is called with the message
// ----------------------------------------------------
let result = test.check_msg(msg);
// -------------------------------------
// THEN
// An InvalidMessage error is generated
// -------------------------------------
let val = match result {
Err(e) => {
match e.kind() {
&SasdErrorKind::InvalidMessage => true,
_ => false,
}
}
_ => false,
};
assert!(val);
}
#[test]
fn invalid_request_message()
{
// --------------------------------------------------------
// GIVEN
// a Message instance and
// the message's 4th parameter as a non-vector
// --------------------------------------------------------
let msgtype = Value::from(MessageType::Request.to_number());
let msgid = Value::from(42);
let msgcode = Value::from(rpc1::SessionMethod::Attach.to_number());
let msgargs = Value::from(42);
let value = Value::Array(vec![msgtype, msgid, msgcode, msgargs]);
let msg = Message::from(value).unwrap();
let test = Test;
// ----------------------------------------------------
// WHEN
// SessionState::check_msg() is called with the message
// ----------------------------------------------------
let result = test.check_msg(msg);
// -------------------------------------
// THEN
// An InvalidMessage error is generated
// -------------------------------------
let val = match result {
Err(e) => {
match e.kind() {
&SasdErrorKind::InvalidMessage => true,
_ => false,
}
}
_ => false,
};
assert!(val);
}
#[test]
fn call_check_msg_method()
{
let expected = String::from("called");
// -----------------------------------------------------
// GIVEN
// a type implementing SessionState and
// the type's check_msg_method() method raises an error and
// a Message that can be turned into a SessionRequest
// -----------------------------------------------------
struct Test;
impl SessionState for Test {
fn check_msg_method(&self, _req: SessionRequest)
-> SasdResult<SessionRequest>
{
bail!("called")
}
}
let test = Test;
// Message
let msgtype = Value::from(MessageType::Request.to_number());
let msgid = Value::from(42);
let msgcode = Value::from(rpc1::SessionMethod::Attach.to_number());
let msgargs = Value::Array(vec![]);
let value = Value::Array(vec![msgtype, msgid, msgcode, msgargs]);
let msg = Message::from(value).unwrap();
// ----------------------------------------------------------
// WHEN
// the SessionState type's check_msg() method is called with
// the message
// ----------------------------------------------------------
let result = test.check_msg(msg);
// -------------------------------
// THEN
// check_msg_method() is called
// -------------------------------
let val = match result {
Err(e) => {
match e.kind() {
&SasdErrorKind::Msg(ref msg) => msg == &expected,
_ => false,
}
}
_ => false,
};
assert!(val);
}
}
}
mod initsession {
mod from_value {
use error::SasdErrorKind;
use protocol::StateValue;
use protocol::v1::{InitSession, Session, StateValue as V1StateValue};
#[test]
fn is_initsession()
{
// ------------------------
// GIVEN
// a StateValue::V1(InitSession) instance
// ------------------------
let value =
StateValue::V1(V1StateValue::InitSession(InitSession::new()));
// -------------------------------
// WHEN
// InitSession::from_value() is called with the StateValue value
// -------------------------------
let result = InitSession::from_value(value);
// --------------------------------------------------
// THEN
// the value wrapped in the StateValue variant is returned
// --------------------------------------------------
let testval = match result {
Ok(_) => true,
_ => false,
};
assert!(testval);
}
#[test]
fn not_initsession()
{
// ------------------------
// GIVEN
// a StateValue::V1(Session) instance
// ------------------------
let value = StateValue::V1(V1StateValue::Session(Session::new()));
// -------------------------------
// WHEN
// InitSession::from_value() is called with the StateValue value
// -------------------------------
let result = InitSession::from_value(value);
// --------------------------------------------------
// THEN
// a SasdErrorKind::InvalidStateValue error is returned
// --------------------------------------------------
let testval = match result {
Err(e) => {
match e.kind() {
&SasdErrorKind::InvalidStateValue(_, _) => {
let expected = "Invalid StateValue: expected \
StateValue::V1(InitSession), got \
StateValue::V1(Session(Session)) \
instead"
.to_owned();
assert_eq!(e.to_string(), expected);
true
// e.to_string() == expected
}
_ => false,
}
}
_ => false,
};
assert!(testval);
}
}
mod can_skip_auth {
use os::windows::protocol::SessionStore;
use protocol::{State, StateValue};
use protocol::v1::{InitSession, SessionRequest, SessionResponse,
StateValue as V1StateValue};
use rmpv::Value;
use rpc::v1::{SessionError, SessionMethod};
use siminau_rpc::message::response::RpcResponse;
use state::SessionState;
use std::fs::OpenOptions;
use std::io::Read;
use std::path::PathBuf;
// Helpers
use settings::{WindowsSection, new_settings_handle};
use settings::test::helper::new_settings;
use test::protocol::{cleanup_settings, dummy_session_state};
// TODO
// This matches on both session and auth tokens.
// Once the state machine has been fleshed out, this should be changed
// so that only the auth token is checked
#[test]
fn skip_auth_on_matching_tokens()
{
// -------------------------------------------------------
// GIVEN
// a valid SessionRequest message and
// the message contains session and auth tokens as args and
// a sessionstore that holds a session and auth token and
// the sessionstore tokens match the message tokens and
// an InitSession instance
// -------------------------------------------------------
// Create tokens and request message
let auth_token = "world".to_owned();
let msgargs = vec![Value::from(auth_token.clone())];
let request =
SessionRequest::new(42, SessionMethod::Attach, msgargs);
// Create state
let mut init = InitSession::new();
// Create session state
let dummy =
StateValue::V1(V1StateValue::InitSession(InitSession::new()));
let settings = new_settings(
1234,
None,
WindowsSection {
token_data_dir: PathBuf::from("/does/not/exist"),
},
);
let settings_handle = new_settings_handle(settings);
let session_store = SessionStore {
auth_token: auth_token,
auth_file: None,
};
let mut session_state =
SessionState::new(session_store, settings_handle, dummy);
let mut handle = session_state.handle();
// ------------------------------------------------------------
// WHEN
// InitSession::dispatch() is called with the sessionstore and
// message
// ------------------------------------------------------------
let result = init.dispatch(&mut handle, request.into()).unwrap();
let (state, msg) = match result {
(Some(s), Some(m)) => (s, m),
_ => unreachable!(),
};
// ----------------------------------------------------
// THEN
// A (State, SessionResponse) tuple is returned and
// the state is V1StateKind::Session and
// the response has Nil for its error and
// the response has Nil for its result
// ----------------------------------------------------
let response = SessionResponse::from(msg).unwrap();
assert!(state.is_v1());
assert!(state.as_v1().unwrap().is_session());
assert_eq!(response.error_code(), SessionError::Nil);
assert_eq!(response.result(), &Value::Nil);
}
// TODO
// it doesn't check if the token_data_dir already contains a
// file with the same name as the generated file name.
#[test]
fn do_auth()
{
// -------------------------------------------------------
// GIVEN
// a valid SessionRequest message and
// an empty sessionstore and
// an InitSession instance
// -------------------------------------------------------
let request =
SessionRequest::new(42, SessionMethod::Attach, vec![]);
// Create state
let mut init = InitSession::new();
// Create session state
let dummy =
StateValue::V1(V1StateValue::InitSession(InitSession::new()));
let mut session_state = dummy_session_state(dummy);
// ------------------------------------------------------------
// WHEN
// InitSession::dispatch() is called with the sessionstore and
// message
// ------------------------------------------------------------
let (state, msg) = {
let mut handle = session_state.handle();
let result = init.dispatch(&mut handle, request.into())
.unwrap();
match result {
(Some(s), Some(m)) => (s, m),
_ => unreachable!(),
}
};
// --------------------------------------------------------
// THEN
// A (State, SessionResponse) tuple is returned and
// the state is V1StateValue::AuthSession and
// the response has Nil for its error and
// the response has session and auth tokens for its result
// --------------------------------------------------------
let response = SessionResponse::from(msg).unwrap();
assert!(state.is_v1());
assert!(state.as_v1().unwrap().is_authsession());
assert_eq!(response.error_code(), SessionError::Nil);
// This is a &Vec<Value>
let result = response.result().as_array().unwrap();
assert_eq!(result.len(), 1);
let auth_filepath = PathBuf::from(result[0].as_str().unwrap());
assert!(auth_filepath.exists());
assert!(auth_filepath.is_file());
// Read the file
let auth_token = {
let mut buf: Vec<u8> = Vec::new();
let mut f = OpenOptions::new()
.read(true)
.open(auth_filepath.as_path())
.unwrap();
let numbytes = f.read_to_end(&mut buf).unwrap();
assert!(numbytes > 0);
// Return the auth token contained in the file
String::from_utf8(buf).unwrap()
};
assert_eq!(auth_token.len(), 64);
// --------------------
// CLEANUP
// --------------------
cleanup_settings(session_state);
assert!(!auth_filepath.exists());
assert!(!auth_filepath.parent().unwrap().exists());
}
}
}
mod authsession {
mod check_msg_method {
use error::SasdErrorKind;
use protocol::{State, StateValue};
use protocol::v1::{AuthSession, SessionRequest,
StateValue as V1StateValue};
use quickcheck::TestResult;
use rmpv::Value;
use rpc::v1::SessionMethod;
// Helpers
use test::protocol::dummy_session_state_nofs;
#[test]
fn non_authattach_msg_error()
{
// -------------------------------------------------------
// GIVEN
// a SessionRequest message and
// the message method is not AuthAttach and
// an empty sessionstore and
// an InitSession instance
// -------------------------------------------------------
let request =
SessionRequest::new(42, SessionMethod::Attach, vec![]);
// Create state
let mut auth = AuthSession::new();
// Create session state
let dummy =
StateValue::V1(V1StateValue::AuthSession(AuthSession::new()));
let mut session_state = dummy_session_state_nofs(dummy);
// ------------------------------------------------------------
// WHEN
// AuthSession::dispatch() is called with the sessionstore and
// message
// ------------------------------------------------------------
let result = {
let mut handle = session_state.handle();
auth.dispatch(&mut handle, request.into())
};
// ----------------------------------------------------
// THEN
// An error is returned and
// the error is UnexpectedMessage
// ----------------------------------------------------
let testval = match result {
Ok(_) => false,
Err(e) => {
match e.kind() {
&SasdErrorKind::UnexpectedMessage => true,
_ => false,
}
}
};
assert!(testval);
}
quickcheck! {
fn authattach_args_error(numargs: usize) -> TestResult
{
if numargs == 1 {
return TestResult::discard()
}
// -------------------------------------------------------
// GIVEN
// a SessionRequest message and
// the message method is AuthAttach and
// the message has a number of args != 1 and
// an empty sessionstore and
// an InitSession instance
// -------------------------------------------------------
// Setup args
let mut args = Vec::new();
for i in 0..numargs {
args.push(Value::from(i));
}
// Create request message
let request =
SessionRequest::new(42, SessionMethod::AuthAttach, args);
// Create state
let mut auth = AuthSession::new();
// Create session state
let dummy =
StateValue::V1(V1StateValue::AuthSession(AuthSession::new()));
let mut session_state = dummy_session_state_nofs(dummy);
// ------------------------------------------------------------
// WHEN
// AuthSession::dispatch() is called with the sessionstore and
// message
// ------------------------------------------------------------
let result = {
let mut handle = session_state.handle();
auth.dispatch(&mut handle, request.into())
};
// ----------------------------------------------------
// THEN
// An error is returned and
// the error is InvalidMessage
// ----------------------------------------------------
let testval = match result {
Ok(_) => false,
Err(e) => {
match e.kind() {
&SasdErrorKind::InvalidMessage => true,
_ => false,
}
}
};
TestResult::from_bool(testval)
}
}
}
mod auth_attach {
use protocol::{State, StateValue};
use protocol::v1::{AuthSession, SessionRequest, SessionResponse,
StateValue as V1StateValue};
use rmpv::{Utf8String, Value};
use rpc::v1::{SessionError, SessionMethod};
use siminau_rpc::message::response::RpcResponse;
// Helpers
use test::protocol::dummy_session_state_nofs;
#[test]
fn auth_token_nomatch()
{
// -------------------------------------------------------
// GIVEN
// an auth token and
// a SessionRequest message and
// the message method is AuthAttach and
// a sessionstore containing a non-matching auth token and
// an InitSession instance
// -------------------------------------------------------
let auth_token = "HELLO".to_owned();
let request =
SessionRequest::new(
42,
SessionMethod::AuthAttach,
vec![Value::String(Utf8String::from(&auth_token[..]))],
);
// Create state
let mut auth = AuthSession::new();
let dummy =
StateValue::V1(V1StateValue::AuthSession(AuthSession::new()));
// Create session state
let mut session_state = dummy_session_state_nofs(dummy);
// Assign tokens to session_state
session_state.session_store().auth_token = "NOTCORRECT".to_owned();
// ------------------------------------------------------------
// WHEN
// AuthSession::dispatch() is called with the sessionstore and
// message
// ------------------------------------------------------------
let (_, msg) = {
let mut handle = session_state.handle();
auth.dispatch(&mut handle, request.into()).unwrap()
};
// ----------------------------------------------------
// THEN
// An error response is returned and
// the response's error code is InvalidAttach and
// the response's result is the str "auth token doesn't match"
// ----------------------------------------------------
let resp = SessionResponse::from(msg.unwrap()).unwrap();
assert_eq!(resp.error_code(), SessionError::InvalidAttach);
let result = resp.result().as_str().unwrap();
assert_eq!(result, "auth token doesn't match");
}
#[test]
fn auth_token_match()
{
// -------------------------------------------------------
// GIVEN
// an auth token and
// a SessionRequest message and
// the message method is AuthAttach and
// a sessionstore containing a matching auth token and
// an InitSession instance
// -------------------------------------------------------
let auth_token = "HELLO".to_owned();
let request =
SessionRequest::new(
42,
SessionMethod::AuthAttach,
vec![Value::String(Utf8String::from(&auth_token[..]))],
);
// Create state
let mut auth = AuthSession::new();
let dummy =
StateValue::V1(V1StateValue::AuthSession(AuthSession::new()));
// Create session state
let mut session_state = dummy_session_state_nofs(dummy);
// Assign tokens to session_state
session_state.session_store().auth_token = auth_token;
// ------------------------------------------------------------
// WHEN
// AuthSession::dispatch() is called with the sessionstore and
// message
// ------------------------------------------------------------
let (state, msg) = {
let mut handle = session_state.handle();
auth.dispatch(&mut handle, request.into()).unwrap()
};
// ----------------------------------------------------
// THEN
// A non-error response is returned and
// the response's error code is Nil and
// the response's result is Nil
// ----------------------------------------------------
// Check response
let resp = SessionResponse::from(msg.unwrap()).unwrap();
assert_eq!(resp.error_code(), SessionError::Nil);
assert!(resp.result().is_nil());
// Check state
let testval = match state {
Some(StateValue::V1(ref v1)) => v1.is_session(),
_ => false,
};
assert!(testval);
}
}
}
// ===========================================================================
//
// ===========================================================================<|fim▁end|> | // Copyright (C) 2017 authors and contributors (see AUTHORS file)
//
// This file is released under the MIT License.
|
<|file_name|>net.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (c) 2017 The Sarielsaz Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test RPC calls related to net.
Tests correspond to code in rpc/net.cpp.
"""
import time
from test_framework.test_framework import SarielsazTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
connect_nodes_bi,
p2p_port,
)
class NetTest(SarielsazTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def run_test(self):
self._test_connection_count()
self._test_getnettotals()
self._test_getnetworkinginfo()
self._test_getaddednodeinfo()
self._test_getpeerinfo()
def _test_connection_count(self):
# connect_nodes_bi connects each node to the other
assert_equal(self.nodes[0].getconnectioncount(), 2)
def _test_getnettotals(self):
# check that getnettotals totalbytesrecv and totalbytessent
# are consistent with getpeerinfo
peer_info = self.nodes[0].getpeerinfo()
assert_equal(len(peer_info), 2)
net_totals = self.nodes[0].getnettotals()
assert_equal(sum([peer['bytesrecv'] for peer in peer_info]),
net_totals['totalbytesrecv'])
assert_equal(sum([peer['bytessent'] for peer in peer_info]),
net_totals['totalbytessent'])
# test getnettotals and getpeerinfo by doing a ping
# the bytes sent/received should change
# note ping and pong are 32 bytes each
self.nodes[0].ping()
time.sleep(0.1)
peer_info_after_ping = self.nodes[0].getpeerinfo()
net_totals_after_ping = self.nodes[0].getnettotals()
for before, after in zip(peer_info, peer_info_after_ping):
assert_equal(before['bytesrecv_per_msg']['pong'] + 32, after['bytesrecv_per_msg']['pong'])
assert_equal(before['bytessent_per_msg']['ping'] + 32, after['bytessent_per_msg']['ping'])
assert_equal(net_totals['totalbytesrecv'] + 32*2, net_totals_after_ping['totalbytesrecv'])
assert_equal(net_totals['totalbytessent'] + 32*2, net_totals_after_ping['totalbytessent'])
def _test_getnetworkinginfo(self):
assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], True)
assert_equal(self.nodes[0].getnetworkinfo()['connections'], 2)
self.nodes[0].setnetworkactive(False)
assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], False)
timeout = 3
while self.nodes[0].getnetworkinfo()['connections'] != 0:
# Wait a bit for all sockets to close
assert timeout > 0, 'not all connections closed in time'
timeout -= 0.1
time.sleep(0.1)
self.nodes[0].setnetworkactive(True)
connect_nodes_bi(self.nodes, 0, 1)
assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], True)<|fim▁hole|> assert_equal(self.nodes[0].getnetworkinfo()['connections'], 2)
def _test_getaddednodeinfo(self):
assert_equal(self.nodes[0].getaddednodeinfo(), [])
# add a node (node2) to node0
ip_port = "127.0.0.1:{}".format(p2p_port(2))
self.nodes[0].addnode(ip_port, 'add')
# check that the node has indeed been added
added_nodes = self.nodes[0].getaddednodeinfo(ip_port)
assert_equal(len(added_nodes), 1)
assert_equal(added_nodes[0]['addednode'], ip_port)
# check that a non-existent node returns an error
assert_raises_rpc_error(-24, "Node has not been added",
self.nodes[0].getaddednodeinfo, '1.1.1.1')
def _test_getpeerinfo(self):
peer_info = [x.getpeerinfo() for x in self.nodes]
# check both sides of bidirectional connection between nodes
# the address bound to on one side will be the source address for the other node
assert_equal(peer_info[0][0]['addrbind'], peer_info[1][0]['addr'])
assert_equal(peer_info[1][0]['addrbind'], peer_info[0][0]['addr'])
if __name__ == '__main__':
NetTest().main()<|fim▁end|> | |
<|file_name|>test_buffered_pipe.py<|end_file_name|><|fim▁begin|># Copyright (C) 2006-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distrubuted in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
Some unit tests for BufferedPipe.
"""
import threading
import time
import unittest
from paramiko.buffered_pipe import BufferedPipe, PipeTimeout
from paramiko import pipe
from util import ParamikoTest
def delay_thread(pipe):
pipe.feed('a')
time.sleep(0.5)
pipe.feed('b')
pipe.close()
def close_thread(pipe):
time.sleep(0.2)
pipe.close()
class BufferedPipeTest(ParamikoTest):
def test_1_buffered_pipe(self):
p = BufferedPipe()
self.assert_(not p.read_ready())
p.feed('hello.')
self.assert_(p.read_ready())
data = p.read(6)
self.assertEquals(b'hello.', data)
p.feed('plus/minus')<|fim▁hole|> p.close()
self.assert_(not p.read_ready())
self.assertEquals(b'', p.read(1))
def test_2_delay(self):
p = BufferedPipe()
self.assert_(not p.read_ready())
threading.Thread(target=delay_thread, args=(p,)).start()
self.assertEquals(b'a', p.read(1, 0.1))
try:
p.read(1, 0.1)
self.assert_(False)
except PipeTimeout:
pass
self.assertEquals(b'b', p.read(1, 1.0))
self.assertEquals(b'', p.read(1))
def test_3_close_while_reading(self):
p = BufferedPipe()
threading.Thread(target=close_thread, args=(p,)).start()
data = p.read(1, 1.0)
self.assertEquals(b'', data)
def test_4_or_pipe(self):
p = pipe.make_pipe()
p1, p2 = pipe.make_or_pipe(p)
self.assertFalse(p._set)
p1.set()
self.assertTrue(p._set)
p2.set()
self.assertTrue(p._set)
p1.clear()
self.assertTrue(p._set)
p2.clear()
self.assertFalse(p._set)<|fim▁end|> | self.assertEquals(b'plu', p.read(3))
self.assertEquals(b's/m', p.read(3))
self.assertEquals(b'inus', p.read(4))
|
<|file_name|>vsserror.rs<|end_file_name|><|fim▁begin|>// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option.
// All files in the project carrying such notice may not be copied, modified, or distributed
// except according to those terms.
//! VSS Error header file
use um::winnt::HRESULT;
pub const VSS_E_BAD_STATE: HRESULT = 0x80042301;
pub const VSS_E_UNEXPECTED: HRESULT = 0x80042302;
pub const VSS_E_PROVIDER_ALREADY_REGISTERED: HRESULT = 0x80042303;
pub const VSS_E_PROVIDER_NOT_REGISTERED: HRESULT = 0x80042304;
pub const VSS_E_PROVIDER_VETO: HRESULT = 0x80042306;
pub const VSS_E_PROVIDER_IN_USE: HRESULT = 0x80042307;
pub const VSS_E_OBJECT_NOT_FOUND: HRESULT = 0x80042308;
pub const VSS_S_ASYNC_PENDING: HRESULT = 0x00042309;
pub const VSS_S_ASYNC_FINISHED: HRESULT = 0x0004230A;
pub const VSS_S_ASYNC_CANCELLED: HRESULT = 0x0004230B;
pub const VSS_E_VOLUME_NOT_SUPPORTED: HRESULT = 0x8004230C;
pub const VSS_E_VOLUME_NOT_SUPPORTED_BY_PROVIDER: HRESULT = 0x8004230E;
pub const VSS_E_OBJECT_ALREADY_EXISTS: HRESULT = 0x8004230D;
pub const VSS_E_UNEXPECTED_PROVIDER_ERROR: HRESULT = 0x8004230F;
pub const VSS_E_CORRUPT_XML_DOCUMENT: HRESULT = 0x80042310;
pub const VSS_E_INVALID_XML_DOCUMENT: HRESULT = 0x80042311;
pub const VSS_E_MAXIMUM_NUMBER_OF_VOLUMES_REACHED: HRESULT = 0x80042312;
pub const VSS_E_FLUSH_WRITES_TIMEOUT: HRESULT = 0x80042313;
pub const VSS_E_HOLD_WRITES_TIMEOUT: HRESULT = 0x80042314;
pub const VSS_E_UNEXPECTED_WRITER_ERROR: HRESULT = 0x80042315;
pub const VSS_E_SNAPSHOT_SET_IN_PROGRESS: HRESULT = 0x80042316;
pub const VSS_E_MAXIMUM_NUMBER_OF_SNAPSHOTS_REACHED: HRESULT = 0x80042317;
pub const VSS_E_WRITER_INFRASTRUCTURE: HRESULT = 0x80042318;
pub const VSS_E_WRITER_NOT_RESPONDING: HRESULT = 0x80042319;
pub const VSS_E_WRITER_ALREADY_SUBSCRIBED: HRESULT = 0x8004231A;
pub const VSS_E_UNSUPPORTED_CONTEXT: HRESULT = 0x8004231B;
pub const VSS_E_VOLUME_IN_USE: HRESULT = 0x8004231D;
pub const VSS_E_MAXIMUM_DIFFAREA_ASSOCIATIONS_REACHED: HRESULT = 0x8004231E;
pub const VSS_E_INSUFFICIENT_STORAGE: HRESULT = 0x8004231F;
pub const VSS_E_NO_SNAPSHOTS_IMPORTED: HRESULT = 0x80042320;
pub const VSS_S_SOME_SNAPSHOTS_NOT_IMPORTED: HRESULT = 0x00042321;
pub const VSS_E_SOME_SNAPSHOTS_NOT_IMPORTED: HRESULT = 0x80042321;
pub const VSS_E_MAXIMUM_NUMBER_OF_REMOTE_MACHINES_REACHED: HRESULT = 0x80042322;
pub const VSS_E_REMOTE_SERVER_UNAVAILABLE: HRESULT = 0x80042323;
pub const VSS_E_REMOTE_SERVER_UNSUPPORTED: HRESULT = 0x80042324;
pub const VSS_E_REVERT_IN_PROGRESS: HRESULT = 0x80042325;
pub const VSS_E_REVERT_VOLUME_LOST: HRESULT = 0x80042326;
pub const VSS_E_REBOOT_REQUIRED: HRESULT = 0x80042327;
pub const VSS_E_TRANSACTION_FREEZE_TIMEOUT: HRESULT = 0x80042328;
pub const VSS_E_TRANSACTION_THAW_TIMEOUT: HRESULT = 0x80042329;
pub const VSS_E_VOLUME_NOT_LOCAL: HRESULT = 0x8004232D;
pub const VSS_E_CLUSTER_TIMEOUT: HRESULT = 0x8004232E;
pub const VSS_E_WRITERERROR_INCONSISTENTSNAPSHOT: HRESULT = 0x800423F0;
pub const VSS_E_WRITERERROR_OUTOFRESOURCES: HRESULT = 0x800423F1;
pub const VSS_E_WRITERERROR_TIMEOUT: HRESULT = 0x800423F2;
pub const VSS_E_WRITERERROR_RETRYABLE: HRESULT = 0x800423F3;
pub const VSS_E_WRITERERROR_NONRETRYABLE: HRESULT = 0x800423F4;
pub const VSS_E_WRITERERROR_RECOVERY_FAILED: HRESULT = 0x800423F5;
pub const VSS_E_BREAK_REVERT_ID_FAILED: HRESULT = 0x800423F6;
pub const VSS_E_LEGACY_PROVIDER: HRESULT = 0x800423F7;
pub const VSS_E_MISSING_DISK: HRESULT = 0x800423F8;
pub const VSS_E_MISSING_HIDDEN_VOLUME: HRESULT = 0x800423F9;
pub const VSS_E_MISSING_VOLUME: HRESULT = 0x800423FA;
pub const VSS_E_AUTORECOVERY_FAILED: HRESULT = 0x800423FB;
pub const VSS_E_DYNAMIC_DISK_ERROR: HRESULT = 0x800423FC;
pub const VSS_E_NONTRANSPORTABLE_BCD: HRESULT = 0x800423FD;
pub const VSS_E_CANNOT_REVERT_DISKID: HRESULT = 0x800423FE;
pub const VSS_E_RESYNC_IN_PROGRESS: HRESULT = 0x800423FF;
pub const VSS_E_CLUSTER_ERROR: HRESULT = 0x80042400;
pub const VSS_E_UNSELECTED_VOLUME: HRESULT = 0x8004232A;
pub const VSS_E_SNAPSHOT_NOT_IN_SET: HRESULT = 0x8004232B;
pub const VSS_E_NESTED_VOLUME_LIMIT: HRESULT = 0x8004232C;<|fim▁hole|>pub const VSS_E_ASRERROR_NO_ARCPATH: HRESULT = 0x80042403;
pub const VSS_E_ASRERROR_MISSING_DYNDISK: HRESULT = 0x80042404;
pub const VSS_E_ASRERROR_SHARED_CRIDISK: HRESULT = 0x80042405;
pub const VSS_E_ASRERROR_DATADISK_RDISK0: HRESULT = 0x80042406;
pub const VSS_E_ASRERROR_RDISK0_TOOSMALL: HRESULT = 0x80042407;
pub const VSS_E_ASRERROR_CRITICAL_DISKS_TOO_SMALL: HRESULT = 0x80042408;
pub const VSS_E_WRITER_STATUS_NOT_AVAILABLE: HRESULT = 0x80042409;
pub const VSS_E_ASRERROR_DYNAMIC_VHD_NOT_SUPPORTED: HRESULT = 0x8004240A;
pub const VSS_E_CRITICAL_VOLUME_ON_INVALID_DISK: HRESULT = 0x80042411;
pub const VSS_E_ASRERROR_RDISK_FOR_SYSTEM_DISK_NOT_FOUND: HRESULT = 0x80042412;
pub const VSS_E_ASRERROR_NO_PHYSICAL_DISK_AVAILABLE: HRESULT = 0x80042413;
pub const VSS_E_ASRERROR_FIXED_PHYSICAL_DISK_AVAILABLE_AFTER_DISK_EXCLUSION: HRESULT =
0x80042414;
pub const VSS_E_ASRERROR_CRITICAL_DISK_CANNOT_BE_EXCLUDED: HRESULT = 0x80042415;
pub const VSS_E_ASRERROR_SYSTEM_PARTITION_HIDDEN: HRESULT = 0x80042416;
pub const VSS_E_FSS_TIMEOUT: HRESULT = 0x80042417;<|fim▁end|> | pub const VSS_E_NOT_SUPPORTED: HRESULT = 0x8004232F;
pub const VSS_E_WRITERERROR_PARTIAL_FAILURE: HRESULT = 0x80042336;
pub const VSS_E_ASRERROR_DISK_ASSIGNMENT_FAILED: HRESULT = 0x80042401;
pub const VSS_E_ASRERROR_DISK_RECREATION_FAILED: HRESULT = 0x80042402; |
<|file_name|>classes_63.js<|end_file_name|><|fim▁begin|>var searchData=
[<|fim▁hole|>];<|fim▁end|> | ['cache',['Cache',['../classCache.html',1,'']]],
['conversation',['Conversation',['../classConversation.html',1,'']]] |
<|file_name|>ImpQual.java<|end_file_name|><|fim▁begin|>import java.util.*;
import Jakarta.util.FixDosOutputStream;
import java.io.*;
public class ImpQual {
<|fim▁hole|> public String GetName() {
String result = ( ( AST_QualifiedName ) arg[0] ).GetName();
if ( arg[1].arg[0] != null )
result = result + ".*";
return result;
}
}<|fim▁end|> | /* returns name of AST_QualifiedName */
|
<|file_name|>temperature-mpl115a2.js<|end_file_name|><|fim▁begin|>var five = require("../lib/johnny-five.js");
var board = new five.Board();
<|fim▁hole|> });
temperature.on("data", function() {
console.log("temperature");
console.log(" celsius : ", this.celsius);
console.log(" fahrenheit : ", this.fahrenheit);
console.log(" kelvin : ", this.kelvin);
console.log("--------------------------------------");
});
});
// @markdown
// - [MPL115A2 - I2C Barometric Pressure/Temperature Sensor](https://www.adafruit.com/product/992)
// @markdown<|fim▁end|> | board.on("ready", function() {
var temperature = new five.Temperature({
controller: "MPL115A2" |
<|file_name|>test_loadbalance.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from marvin.cloudstackTestCase import *
from marvin.cloudstackAPI import *
from marvin.sshClient import SshClient
from marvin.integration.lib.utils import *
from marvin.integration.lib.base import *
from marvin.integration.lib.common import *
from nose.plugins.attrib import attr
#Import System modules
import time
_multiprocess_shared_ = True
class Services:
"""Test Network Services
"""
def __init__(self):
self.services = {
"ostype": "CentOS 5.3 (64-bit)",
# Cent OS 5.3 (64 bit)
"lb_switch_wait": 10,
# Time interval after which LB switches the requests
"sleep": 60,
"timeout":10,
"network_offering": {
"name": 'Test Network offering',
"displaytext": 'Test Network offering',
"guestiptype": 'Isolated',
"supportedservices": 'Dhcp,Dns,SourceNat,PortForwarding',
"traffictype": 'GUEST',
"availability": 'Optional',
"serviceProviderList" : {
"Dhcp": 'VirtualRouter',<|fim▁hole|> "Dns": 'VirtualRouter',
"SourceNat": 'VirtualRouter',
"PortForwarding": 'VirtualRouter',
},
},
"network": {
"name": "Test Network",
"displaytext": "Test Network",
},
"service_offering": {
"name": "Tiny Instance",
"displaytext": "Tiny Instance",
"cpunumber": 1,
"cpuspeed": 100,
# in MHz
"memory": 256,
# In MBs
},
"account": {
"email": "[email protected]",
"firstname": "Test",
"lastname": "User",
"username": "test",
"password": "password",
},
"server":
{
"displayname": "Small Instance",
"username": "root",
"password": "password",
"hypervisor": 'XenServer',
"privateport": 22,
"publicport": 22,
"ssh_port": 22,
"protocol": 'TCP',
},
"natrule":
{
"privateport": 22,
"publicport": 2222,
"protocol": "TCP"
},
"lbrule":
{
"name": "SSH",
"alg": "roundrobin",
# Algorithm used for load balancing
"privateport": 22,
"publicport": 2222,
"protocol": 'TCP'
}
}
class TestLoadBalance(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.api_client = super(TestLoadBalance, cls).getClsTestClient().getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client, cls.services)
cls.zone = get_zone(cls.api_client, cls.services)
template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["server"]["zoneid"] = cls.zone.id
#Create an account, network, VM and IP addresses
cls.account = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain.id
)
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls.vm_1 = VirtualMachine.create(
cls.api_client,
cls.services["server"],
templateid=template.id,
accountid=cls.account.name,
domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id
)
cls.vm_2 = VirtualMachine.create(
cls.api_client,
cls.services["server"],
templateid=template.id,
accountid=cls.account.name,
domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id
)
cls.vm_3 = VirtualMachine.create(
cls.api_client,
cls.services["server"],
templateid=template.id,
accountid=cls.account.name,
domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id
)
cls.non_src_nat_ip = PublicIPAddress.create(
cls.api_client,
cls.account.name,
cls.zone.id,
cls.account.domainid,
cls.services["server"]
)
# Open up firewall port for SSH
cls.fw_rule = FireWallRule.create(
cls.api_client,
ipaddressid=cls.non_src_nat_ip.ipaddress.id,
protocol=cls.services["lbrule"]["protocol"],
cidrlist=['0.0.0.0/0'],
startport=cls.services["lbrule"]["publicport"],
endport=cls.services["lbrule"]["publicport"]
)
cls._cleanup = [
cls.account,
cls.service_offering
]
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.cleanup = []
return
def tearDown(self):
cleanup_resources(self.apiclient, self.cleanup)
return
@classmethod
def tearDownClass(cls):
cleanup_resources(cls.api_client, cls._cleanup)
return
def try_ssh(self, ip_addr, hostnames):
try:
self.debug(
"SSH into VM (IPaddress: %s) & NAT Rule (Public IP: %s)" %
(self.vm_1.ipaddress, ip_addr)
)
# If Round Robin Algorithm is chosen,
# each ssh command should alternate between VMs
ssh_1 = SshClient(
ip_addr,
self.services['lbrule']["publicport"],
self.vm_1.username,
self.vm_1.password
)
hostnames.append(ssh_1.execute("hostname")[0])
self.debug(hostnames)
except Exception as e:
self.fail("%s: SSH failed for VM with IP Address: %s" %
(e, ip_addr))
time.sleep(self.services["lb_switch_wait"])
return
@attr(tags = ["advanced", "advancedns", "smoke"])
def test_01_create_lb_rule_src_nat(self):
"""Test to create Load balancing rule with source NAT"""
# Validate the Following:
#1. listLoadBalancerRules should return the added rule
#2. attempt to ssh twice on the load balanced IP
#3. verify using the hostname of the VM
# that round robin is indeed happening as expected
src_nat_ip_addrs = PublicIPAddress.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid
)
self.assertEqual(
isinstance(src_nat_ip_addrs, list),
True,
"Check list response returns a valid list"
)
src_nat_ip_addr = src_nat_ip_addrs[0]
# Check if VM is in Running state before creating LB rule
vm_response = VirtualMachine.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid
)
self.assertEqual(
isinstance(vm_response, list),
True,
"Check list VM returns a valid list"
)
self.assertNotEqual(
len(vm_response),
0,
"Check Port Forwarding Rule is created"
)
for vm in vm_response:
self.assertEqual(
vm.state,
'Running',
"VM state should be Running before creating a NAT rule."
)
#Create Load Balancer rule and assign VMs to rule
lb_rule = LoadBalancerRule.create(
self.apiclient,
self.services["lbrule"],
src_nat_ip_addr.id,
accountid=self.account.name
)
self.cleanup.append(lb_rule)
lb_rule.assign(self.apiclient, [self.vm_1, self.vm_2])
lb_rules = list_lb_rules(
self.apiclient,
id=lb_rule.id
)
self.assertEqual(
isinstance(lb_rules, list),
True,
"Check list response returns a valid list"
)
#verify listLoadBalancerRules lists the added load balancing rule
self.assertNotEqual(
len(lb_rules),
0,
"Check Load Balancer Rule in its List"
)
self.assertEqual(
lb_rules[0].id,
lb_rule.id,
"Check List Load Balancer Rules returns valid Rule"
)
# listLoadBalancerRuleInstances should list all
# instances associated with that LB rule
lb_instance_rules = list_lb_instances(
self.apiclient,
id=lb_rule.id
)
self.assertEqual(
isinstance(lb_instance_rules, list),
True,
"Check list response returns a valid list"
)
self.assertNotEqual(
len(lb_instance_rules),
0,
"Check Load Balancer instances Rule in its List"
)
self.debug("lb_instance_rules Ids: %s, %s" % (
lb_instance_rules[0].id,
lb_instance_rules[1].id
))
self.debug("VM ids: %s, %s" % (self.vm_1.id, self.vm_2.id))
self.assertIn(
lb_instance_rules[0].id,
[self.vm_1.id, self.vm_2.id],
"Check List Load Balancer instances Rules returns valid VM ID"
)
self.assertIn(
lb_instance_rules[1].id,
[self.vm_1.id, self.vm_2.id],
"Check List Load Balancer instances Rules returns valid VM ID"
)
hostnames = []
self.try_ssh(src_nat_ip_addr.ipaddress, hostnames)
self.try_ssh(src_nat_ip_addr.ipaddress, hostnames)
self.try_ssh(src_nat_ip_addr.ipaddress, hostnames)
self.try_ssh(src_nat_ip_addr.ipaddress, hostnames)
self.try_ssh(src_nat_ip_addr.ipaddress, hostnames)
self.debug("Hostnames: %s" % str(hostnames))
self.assertIn(
self.vm_1.name,
hostnames,
"Check if ssh succeeded for server1"
)
self.assertIn(
self.vm_2.name,
hostnames,
"Check if ssh succeeded for server2"
)
#SSH should pass till there is a last VM associated with LB rule
lb_rule.remove(self.apiclient, [self.vm_2])
# making hostnames list empty
hostnames[:] = []
try:
self.debug("SSHing into IP address: %s after removing VM (ID: %s)" %
(
src_nat_ip_addr.ipaddress,
self.vm_2.id
))
self.try_ssh(src_nat_ip_addr.ipaddress, hostnames)
self.assertIn(
self.vm_1.name,
hostnames,
"Check if ssh succeeded for server1"
)
except Exception as e:
self.fail("%s: SSH failed for VM with IP Address: %s" %
(e, src_nat_ip_addr.ipaddress))
lb_rule.remove(self.apiclient, [self.vm_1])
with self.assertRaises(Exception):
self.debug("Removed all VMs, trying to SSH")
self.try_ssh(src_nat_ip_addr.ipaddress, hostnames)
return
@attr(tags = ["advanced", "advancedns", "smoke"])
def test_02_create_lb_rule_non_nat(self):
"""Test to create Load balancing rule with non source NAT"""
# Validate the Following:
#1. listLoadBalancerRules should return the added rule
#2. attempt to ssh twice on the load balanced IP
#3. verify using the hostname of the VM that
# round robin is indeed happening as expected
#Create Load Balancer rule and assign VMs to rule
lb_rule = LoadBalancerRule.create(
self.apiclient,
self.services["lbrule"],
self.non_src_nat_ip.ipaddress.id,
accountid=self.account.name
)
self.cleanup.append(lb_rule)
lb_rule.assign(self.apiclient, [self.vm_1, self.vm_2])
lb_rules = list_lb_rules(
self.apiclient,
id=lb_rule.id
)
self.assertEqual(
isinstance(lb_rules, list),
True,
"Check list response returns a valid list"
)
#verify listLoadBalancerRules lists the added load balancing rule
self.assertNotEqual(
len(lb_rules),
0,
"Check Load Balancer Rule in its List"
)
self.assertEqual(
lb_rules[0].id,
lb_rule.id,
"Check List Load Balancer Rules returns valid Rule"
)
# listLoadBalancerRuleInstances should list
# all instances associated with that LB rule
lb_instance_rules = list_lb_instances(
self.apiclient,
id=lb_rule.id
)
self.assertEqual(
isinstance(lb_instance_rules, list),
True,
"Check list response returns a valid list"
)
self.assertNotEqual(
len(lb_instance_rules),
0,
"Check Load Balancer instances Rule in its List"
)
self.assertIn(
lb_instance_rules[0].id,
[self.vm_1.id, self.vm_2.id],
"Check List Load Balancer instances Rules returns valid VM ID"
)
self.assertIn(
lb_instance_rules[1].id,
[self.vm_1.id, self.vm_2.id],
"Check List Load Balancer instances Rules returns valid VM ID"
)
try:
hostnames = []
self.try_ssh(self.non_src_nat_ip.ipaddress.ipaddress, hostnames)
self.try_ssh(self.non_src_nat_ip.ipaddress.ipaddress, hostnames)
self.try_ssh(self.non_src_nat_ip.ipaddress.ipaddress, hostnames)
self.try_ssh(self.non_src_nat_ip.ipaddress.ipaddress, hostnames)
self.try_ssh(self.non_src_nat_ip.ipaddress.ipaddress, hostnames)
self.debug("Hostnames: %s" % str(hostnames))
self.assertIn(
self.vm_1.name,
hostnames,
"Check if ssh succeeded for server1"
)
self.assertIn(
self.vm_2.name,
hostnames,
"Check if ssh succeeded for server2"
)
#SSH should pass till there is a last VM associated with LB rule
lb_rule.remove(self.apiclient, [self.vm_2])
self.debug("SSHing into IP address: %s after removing VM (ID: %s) from LB rule" %
(
self.non_src_nat_ip.ipaddress.ipaddress,
self.vm_2.id
))
# Making host list empty
hostnames[:] = []
self.try_ssh(self.non_src_nat_ip.ipaddress.ipaddress, hostnames)
self.assertIn(
self.vm_1.name,
hostnames,
"Check if ssh succeeded for server1"
)
self.debug("Hostnames after removing VM2: %s" % str(hostnames))
except Exception as e:
self.fail("%s: SSH failed for VM with IP Address: %s" %
(e, self.non_src_nat_ip.ipaddress.ipaddress))
lb_rule.remove(self.apiclient, [self.vm_1])
with self.assertRaises(Exception):
self.debug("SSHing into IP address: %s after removing VM (ID: %s) from LB rule" %
(
self.non_src_nat_ip.ipaddress.ipaddress,
self.vm_1.id
))
self.try_ssh(self.non_src_nat_ip.ipaddress.ipaddress, hostnames)
return
@attr(tags = ["advanced", "advancedns", "smoke"])
def test_assign_and_removal_lb(self):
"""Test for assign & removing load balancing rule"""
# Validate:
#1. Verify list API - listLoadBalancerRules lists
# all the rules with the relevant ports
#2. listLoadBalancerInstances will list
# the instances associated with the corresponding rule.
#3. verify ssh attempts should pass as long as there
# is at least one instance associated with the rule
# Check if VM is in Running state before creating LB rule
vm_response = VirtualMachine.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid
)
self.assertEqual(
isinstance(vm_response, list),
True,
"Check list VM returns a valid list"
)
self.assertNotEqual(
len(vm_response),
0,
"Check Port Forwarding Rule is created"
)
for vm in vm_response:
self.assertEqual(
vm.state,
'Running',
"VM state should be Running before creating a NAT rule."
)
lb_rule = LoadBalancerRule.create(
self.apiclient,
self.services["lbrule"],
self.non_src_nat_ip.ipaddress.id,
self.account.name
)
lb_rule.assign(self.apiclient, [self.vm_1, self.vm_2])
hostnames = []
self.try_ssh(self.non_src_nat_ip.ipaddress.ipaddress, hostnames)
self.try_ssh(self.non_src_nat_ip.ipaddress.ipaddress, hostnames)
self.try_ssh(self.non_src_nat_ip.ipaddress.ipaddress, hostnames)
self.try_ssh(self.non_src_nat_ip.ipaddress.ipaddress, hostnames)
self.try_ssh(self.non_src_nat_ip.ipaddress.ipaddress, hostnames)
self.debug("Hostnames: %s" % str(hostnames))
self.assertIn(
self.vm_1.name,
hostnames,
"Check if ssh succeeded for server1"
)
self.assertIn(
self.vm_2.name,
hostnames,
"Check if ssh succeeded for server2"
)
#Removing VM and assigning another VM to LB rule
lb_rule.remove(self.apiclient, [self.vm_2])
# making hostnames list empty
hostnames[:] = []
try:
self.debug("SSHing again into IP address: %s with VM (ID: %s) added to LB rule" %
(
self.non_src_nat_ip.ipaddress.ipaddress,
self.vm_1.id,
))
self.try_ssh(self.non_src_nat_ip.ipaddress.ipaddress, hostnames)
self.assertIn(
self.vm_1.name,
hostnames,
"Check if ssh succeeded for server1"
)
except Exception as e:
self.fail("SSH failed for VM with IP: %s" %
self.non_src_nat_ip.ipaddress.ipaddress)
lb_rule.assign(self.apiclient, [self.vm_3])
# # Making hostnames list empty
hostnames[:] = []
self.try_ssh(self.non_src_nat_ip.ipaddress.ipaddress, hostnames)
self.try_ssh(self.non_src_nat_ip.ipaddress.ipaddress, hostnames)
self.try_ssh(self.non_src_nat_ip.ipaddress.ipaddress, hostnames)
self.try_ssh(self.non_src_nat_ip.ipaddress.ipaddress, hostnames)
self.try_ssh(self.non_src_nat_ip.ipaddress.ipaddress, hostnames)
self.debug("Hostnames: %s" % str(hostnames))
self.assertIn(
self.vm_1.name,
hostnames,
"Check if ssh succeeded for server1"
)
self.assertIn(
self.vm_3.name,
hostnames,
"Check if ssh succeeded for server3"
)
return<|fim▁end|> | |
<|file_name|>test_unmarshall_inquiry.py<|end_file_name|><|fim▁begin|># coding: utf-8
# Copyright (C) 2014 by Ronnie Sahlberg <[email protected]>
# Copyright (C) 2015 by Markus Rosjat <[email protected]>
# SPDX-FileCopyrightText: 2014 The python-scsi Authors
#
# SPDX-License-Identifier: LGPL-2.1-or-later
import unittest
from pyscsi.pyscsi import scsi_enum_inquiry as INQUIRY
from pyscsi.pyscsi.scsi_cdb_inquiry import Inquiry
from pyscsi.pyscsi.scsi_enum_command import sbc
from pyscsi.utils.converter import scsi_int_to_ba
from .mock_device import MockDevice, MockSCSI
class MockInquiryStandard(MockDevice):
def execute(self, cmd):
cmd.datain[0] = 0x25 # QUAL:1 TYPE:5
cmd.datain[1] = 0x80 # RMB:1
cmd.datain[2] = 0x07 # VERSION:7
cmd.datain[3] = 0x23 # NORMACA:1 HISUP:0 RDF:3
cmd.datain[4] = 0x40 # ADDITIONAL LENGTH:64
cmd.datain[5] = 0xb9 # SCCS:1 ACC:0 TGPS:3 3PC:1 PROTECT:1
cmd.datain[6] = 0x71 # ENCSERV:1 VS:1 MULTIP:1 ADDR16:1
cmd.datain[7] = 0x33 # WBUS16:1 SYNC:1 CMDQUE:1 VS2:1
# t10 vendor id
cmd.datain[8:16] = bytearray(ord(c) for c in ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'])
# product id
cmd.datain[16:32] = bytearray(ord(c) for c in ['i', 'i', 'i', 'i', 'i', 'i', 'i', 'i',
'j', 'j', 'j', 'j', 'j', 'j', 'j', 'j'])
# product revision level
cmd.datain[32:36] = bytearray(ord(c) for c in ['r', 'e', 'v', 'n'])
cmd.datain[56] = 0x09 # CLOCKING:2 QAS:0 IUS:1
class MockLBP(MockDevice):
def execute(self, cmd):
cmd.datain[0] = 0x00 # QUAL:0 TYPE:0<|fim▁hole|> cmd.datain[1] = 0xb2 # logical block provisioning
cmd.datain[2] = 0x00 #
cmd.datain[3] = 0x04 # page length == 4
cmd.datain[4] = 0x12 # threshold exponent
cmd.datain[5] = 0xe7 # LBPU:1 LBPWS:1 LBPWS10:1 LBPRZ:1 ANC_SUP:1 DP:1
cmd.datain[6] = 0x02 # Provisioning Type:2
cmd.datain[7] = 0x00 #
class MockUSN(MockDevice):
def execute(self, cmd):
cmd.datain[0] = 0x00 # QUAL:0 TYPE:0
cmd.datain[1] = 0x80 # unit serial number
cmd.datain[2] = 0x00 #
cmd.datain[3] = 0x04 # page length == 4
cmd.datain[4:8] = "ABCD".encode()
class MockDevId(MockDevice):
def execute(self, cmd):
cmd.datain[0] = 0x00 # QUAL:0 TYPE:0
cmd.datain[1] = 0x83 # device identifier
cmd.datain[2] = 0x00
cmd.datain[3] = 0x00
pos = 4
# Designation Descriptor: T10_VENDOR_ID
t10 = bytearray(8)
t10[0] = ord('T')
t10[1] = ord('e')
t10[2] = ord('s')
t10[3] = ord('t')
t10[4] = ord(' ')
t10[5] = ord('T')
t10[6] = ord('1')
t10[7] = ord('0')
dd = bytearray(4)
dd += t10
dd[0] = 0x52 # iSCSI, ASCII
dd[1] = 0xa1 # AssociatedWithTargetDevice, T10_VENDOR_ID
dd[3] = len(t10)
cmd.datain[pos:pos + len(dd)] = dd
pos += len(dd)
# Designation Descriptor: EUI-64, 8 byte version
eui = bytearray(8)
# IEEE company identifier
eui[0] = 0x11
eui[1] = 0x22
eui[2] = 0x33
# vendor specific
eui[3] = ord('a')
eui[4] = ord('b')
eui[5] = ord('c')
eui[6] = ord('d')
eui[7] = ord('e')
dd = bytearray(4)
dd += eui
dd[0] = 0x01 # BINARY
dd[1] = 0x22 # AssociatedWithTargetDevice, EUI-64
dd[2:4] = scsi_int_to_ba(len(t10), 2)
cmd.datain[pos:pos + len(dd)] = dd
pos += len(dd)
cmd.datain[2:4] = scsi_int_to_ba(pos - 4, 2)
class MockReferrals(MockDevice):
def execute(self, cmd):
cmd.datain[0] = 0x00 # QUAL:0 TYPE:0
cmd.datain[1] = 0xb3 # referrals
cmd.datain[2] = 0x00 #
cmd.datain[3] = 0x0c # page length: 12
cmd.datain[11] = 23
cmd.datain[15] = 37
class MockExtendedInquiry(MockDevice):
def execute(self, cmd):
cmd.datain[0] = 0x00 # QUAL:0 TYPE:0
cmd.datain[1] = 0x86 # extended inquiry
cmd.datain[2] = 0x00 #
cmd.datain[3] = 0x3c # page length: 60
cmd.datain[4] = 0x57 # activate microcode:1 spt:2 grd_chk:1
# app_chk:1 ref_chk:1
cmd.datain[5] = 0x33 # uask_sup:1 group_sup:1 prior_sup:0 headsup:0
# ordsup:1 simpsup:1
cmd.datain[6] = 0x05 # wu_sup:0 crd_sup:1 nv_sup:0 v_sup:1
cmd.datain[7] = 0x11 # p_i_i_sup:1 luiclr:1
cmd.datain[8] = 0x11 # r_sup:1 cbcs:1
cmd.datain[9] = 0x03 # multi...:3
cmd.datain[11] = 0x0f # extended...:15
cmd.datain[12] = 0xe0 # poa_sup:1 hra_sup:1 vsa_sup:1
cmd.datain[13] = 0x05 # maximum...:5
class UnmarshallInquiryTest(unittest.TestCase):
def test_main(self):
with MockSCSI(MockInquiryStandard(sbc)) as s:
cmd = s.inquiry()
i = cmd.result
self.assertEqual(i['peripheral_qualifier'], 1)
self.assertEqual(i['peripheral_device_type'], 5)
self.assertEqual(i['rmb'], 1)
self.assertEqual(i['version'], 7)
self.assertEqual(i['normaca'], 1)
self.assertEqual(i['hisup'], 0)
self.assertEqual(i['response_data_format'], 3)
self.assertEqual(i['additional_length'], 64)
self.assertEqual(i['sccs'], 1)
self.assertEqual(i['acc'], 0)
self.assertEqual(i['tpgs'], 3)
self.assertEqual(i['3pc'], 1)
self.assertEqual(i['protect'], 1)
self.assertEqual(i['encserv'], 1)
self.assertEqual(i['vs'], 1)
self.assertEqual(i['multip'], 1)
self.assertEqual(i['addr16'], 1)
self.assertEqual(i['wbus16'], 1)
self.assertEqual(i['sync'], 1)
self.assertEqual(i['cmdque'], 1)
self.assertEqual(i['vs2'], 1)
self.assertEqual(i['clocking'], 2)
self.assertEqual(i['qas'], 0)
self.assertEqual(i['ius'], 1)
self.assertEqual(i['t10_vendor_identification'].decode("utf-8"), 'abcdefgh')
self.assertEqual(i['product_identification'].decode("utf-8"), 'iiiiiiiijjjjjjjj')
self.assertEqual(i['product_revision_level'].decode("utf-8"), 'revn')
d = Inquiry.unmarshall_datain(Inquiry.marshall_datain(i))
self.assertEqual(d, i)
with MockSCSI(MockLBP(sbc)) as s:
cmd = s.inquiry(evpd=1, page_code=INQUIRY.VPD.LOGICAL_BLOCK_PROVISIONING)
i = cmd.result
self.assertEqual(i['peripheral_qualifier'], 0)
self.assertEqual(i['peripheral_qualifier'], 0)
self.assertEqual(i['threshold_exponent'], 0x12)
self.assertEqual(i['lbpu'], 1)
self.assertEqual(i['lpbws'], 1)
self.assertEqual(i['lbpws10'], 1)
self.assertEqual(i['lbprz'], 1)
self.assertEqual(i['anc_sup'], 1)
self.assertEqual(i['dp'], 1)
self.assertEqual(i['provisioning_type'], INQUIRY.PROVISIONING_TYPE.THIN_PROVISIONED)
d = Inquiry.unmarshall_datain(Inquiry.marshall_datain(i), evpd=1)
self.assertEqual(d, i)
with MockSCSI(MockUSN(sbc)) as s:
cmd = s.inquiry(evpd=1, page_code=INQUIRY.VPD.UNIT_SERIAL_NUMBER)
i = cmd.result
self.assertEqual(i['peripheral_qualifier'], 0)
self.assertEqual(i['peripheral_qualifier'], 0)
self.assertEqual(i['unit_serial_number'].decode("utf-8"), "ABCD")
d = Inquiry.unmarshall_datain(Inquiry.marshall_datain(i), evpd=1)
self.assertEqual(d, i)
with MockSCSI(MockReferrals(sbc)) as s:
cmd = s.inquiry(evpd=1, page_code=INQUIRY.VPD.REFERRALS)
i = cmd.result
self.assertEqual(i['peripheral_qualifier'], 0)
self.assertEqual(i['peripheral_qualifier'], 0)
self.assertEqual(i['user_data_segment_size'], 23)
self.assertEqual(i['user_data_segment_multiplier'], 37)
d = Inquiry.unmarshall_datain(Inquiry.marshall_datain(i), evpd=1)
self.assertEqual(d, i)
with MockSCSI(MockExtendedInquiry(sbc)) as s:
cmd = s.inquiry(evpd=1, page_code=INQUIRY.VPD.EXTENDED_INQUIRY_DATA)
i = cmd.result
self.assertEqual(i['peripheral_qualifier'], 0)
self.assertEqual(i['peripheral_qualifier'], 0)
self.assertEqual(i['activate_microcode'], 1)
self.assertEqual(i['spt'], 2)
self.assertEqual(i['grd_chk'], 1)
self.assertEqual(i['app_chk'], 1)
self.assertEqual(i['ref_chk'], 1)
self.assertEqual(i['uask_sup'], 1)
self.assertEqual(i['group_sup'], 1)
self.assertEqual(i['prior_sup'], 0)
self.assertEqual(i['headsup'], 0)
self.assertEqual(i['ordsup'], 1)
self.assertEqual(i['simpsup'], 1)
self.assertEqual(i['wu_sup'], 0)
self.assertEqual(i['crd_sup'], 1)
self.assertEqual(i['nv_sup'], 0)
self.assertEqual(i['v_sup'], 1)
self.assertEqual(i['p_i_i_sup'], 1)
self.assertEqual(i['luiclr'], 1)
self.assertEqual(i['r_sup'], 1)
self.assertEqual(i['cbcs'], 1)
self.assertEqual(i['multi_it_nexus_microcode_download'], 3)
self.assertEqual(i['extended_self_test_completion_minutes'], 15)
self.assertEqual(i['poa_sup'], 1)
self.assertEqual(i['hra_sup'], 1)
self.assertEqual(i['vsa_sup'], 1)
self.assertEqual(i['maximum_supported_sense_data_length'], 5)
d = Inquiry.unmarshall_datain(Inquiry.marshall_datain(i), evpd=1)
self.assertEqual(d, i)
s.device = MockDevId(sbc)
cmd = s.inquiry(evpd=1, page_code=INQUIRY.VPD.DEVICE_IDENTIFICATION)
i = cmd.result
self.assertEqual(i['peripheral_qualifier'], 0)
self.assertEqual(i['peripheral_qualifier'], 0)
dd = i['designator_descriptors']
self.assertEqual(len(dd), 2)
# T10 designation descriptor
self.assertEqual(dd[0]['association'], 2)
self.assertEqual(dd[0]['code_set'], 2)
self.assertEqual(dd[0]['designator_length'], 8)
self.assertEqual(dd[0]['designator_type'], 1)
self.assertEqual(dd[0]['piv'], 1)
self.assertEqual(dd[0]['protocol_identifier'], 5)
self.assertEqual(dd[0]['designator']['t10_vendor_id'].decode("utf-8"), 'Test T10')
self.assertEqual(dd[0]['designator']['vendor_specific_id'].decode("utf-8"), '')
# EUI-64 designation descriptor
self.assertEqual(dd[1]['association'], 2)
self.assertEqual(dd[1]['code_set'], 1)
self.assertEqual(dd[1]['designator_length'], 8)
self.assertEqual(dd[1]['designator_type'], 2)
self.assertEqual(dd[1]['piv'], 0)
self.assertFalse(hasattr(dd[1], 'protocol_identifier'))
self.assertEqual(dd[1]['designator']['ieee_company_id'], 0x112233)
self.assertEqual(dd[1]['designator']['vendor_specific_extension_id'].decode("utf-8"), 'abcde')
d = Inquiry.unmarshall_datain(Inquiry.marshall_datain(i), evpd=1)
self.assertEqual(d, i)<|fim▁end|> |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.