text
stringlengths 2
100k
| meta
dict |
---|---|
<div class="panel category-panel">
<div class="panel-heading category-head">
<a href="{{ category.url }}">{{ category.title }}</a>
</div>
<div class="panel-body category-body">
<div class="category-meta">
<div class="col-md-5 col-sm-5 col-xs-8 forum-name">{% trans %}Forum{% endtrans %}</div>
<div class="col-md-2 col-sm-2 hidden-xs forum-stats">{% trans %}Topics{% endtrans %}</div>
<div class="col-md-2 col-sm-2 hidden-xs forum-stats">{% trans %}Posts{% endtrans %}</div>
<div class="col-md-3 col-sm-3 col-xs-4 forum-last-post">{% trans %}Last Post{% endtrans %}</div>
</div>
{% for forum, forumsread in forums %}
<div class="row category-row hover">
{% if forum.external %}
<div class="col-md-5 col-sm-5 col-xs-8 forum-info">
<div class="row">
<!-- Icon -->
<div class="col-md-1 col-sm-2 col-xs-2 forum-status">
<span class="fa fa-external-link fa-fw forum-external"></span>
</div>
<div class="col-md-11 col-sm-10 col-xs-10">
<!-- Forum Name -->
<div class="forum-name">
<span class="forum-link">{% trans %}Link to{% endtrans %}:</span> <a href="{{ forum.url }}">{{ forum.title }}</a>
</div>
<!-- Forum Description -->
<div class="forum-description">
{{ forum.description|nonpost_markup }}
</div>
</div>
</div>
</div> <!-- end forum-info -->
<!-- Post Count -->
<div class="col-md-2 col-sm-2 hidden-xs forum-posts">
-
</div>
<!-- Topic Count -->
<div class="col-md-2 col-sm-2 hidden-xs forum-topics">
-
</div>
<!-- Last Post -->
<div class="col-md-3 col-sm-3 col-xs-4 forum-last-post">
---
</div>
{% else %}
<div class="col-md-5 col-sm-5 col-xs-8 forum-info">
<div class="row">
<!-- Icon -->
<div class="col-md-1 col-sm-2 col-xs-2 forum-status">
{% if forum.locked %}
<span class="fa fa-lock fa-fw forum-locked"></span>
{% elif forum|forum_is_unread(forumsread, current_user) %}
<span class="fa fa-comments fa-fw forum-unread"></span>
{% else %}
<span class="fa fa-comments-o fa-fw forum-read"></span>
{% endif %}
</div>
<div class="col-md-11 col-sm-10 col-xs-10">
<!-- Forum Name -->
<div class="forum-name">
<a href="{{ forum.url }}">{{ forum.title }}</a>
</div>
<!-- Forum Description -->
<div class="forum-description">
{{ forum.description|nonpost_markup }}
</div>
<!-- Forum Moderators -->
{% if forum.show_moderators %}
<div class="forum-moderators">
{% trans %}Moderators{% endtrans %}:
{% for moderator in forum.moderators %}
<a href="{{ url_for('user.profile', username=moderator.username) }}">{{ moderator.username }}</a>{% if not loop.last %}, {% endif %}
{% endfor %}
</div>
{% endif %}
</div>
</div>
</div> <!-- end forum-info -->
<!-- Post Count -->
<div class="col-md-2 col-sm-2 hidden-xs forum-topics">
{{ forum.topic_count }}
</div>
<!-- Topic Count -->
<div class="col-md-2 col-sm-2 hidden-xs forum-posts">
{{ forum.post_count }}
</div>
<!-- Last Post -->
<div class="col-md-3 col-sm-3 col-xs-4 forum-last-post">
{% if forum.last_post_id %}
<div class="last-post-title">
<a href="{{ forum.last_post_url }}" title="{{ forum.last_post_title }}">
{{ forum.last_post_title|crop_title }}
</a>
</div>
<div class="last-post-time">
{{ forum.last_post_created|time_since }}
</div>
<div class="last-post-author">
{% trans %}by{% endtrans %}
{% if forum.last_post_user_id %}
<a href="{{ url_for('user.profile', username=forum.last_post_username) }}">{{ forum.last_post_username }}</a>
{% else %}
{{ forum.last_post_username }}
{% endif %}
</div>
{% else %}
{% trans %}No posts.{% endtrans %}
{% endif %} {# endif forum.last_post_id #}
</div>
{% endif %} {# endif forum.external #}
</div> <!-- end category-content -->
{% endfor %}
</div>
</div>
| {
"pile_set_name": "Github"
} |
(function(){
'use strict';
describe('Module angularSails.base', function () {
it('should be registered', function() {
var module = angular.module('angularSails.base');
expect(module).not.toBeNull();
});
beforeEach(module('angularSails.base'));
describe('$sailsRef service api', function () {
// Mock socket service.
beforeEach(module(function ($provide) {
$provide.factory('angularSailsSocket', ['$q', function ($q) {
var collectionData = [{id:1}, {id: 2}, {id: 3}];
var deferred = $q.defer();
return {
get: function() {
deferred.resolve(collectionData);
return deferred.promise;
},
post: function() {
collectionData = [{id:1}, {id: 2}, {id: 3}, {id: 4}];
deferred.resolve(collectionData);
return deferred.promise;
},
put: function(path, data) {
deferred.resolve(data);
return deferred.promise;
},
delete: function() {
return deferred.promise;
},
on: function () {}
};
}]);
}));
var $sailsRef, $rootScope, collection;
beforeEach(inject(function (_$sailsRef_, _$rootScope_) {
$sailsRef = _$sailsRef_;
$rootScope = _$rootScope_;
collection = $sailsRef('/comment');
$rootScope.$apply();
}));
it('should create a collection object from a resource', function () {
expect(angular.isObject(collection)).toBe(true);
expect(collection.$add).toBeDefined();
expect(collection.$update).toBeDefined();
expect(collection.$remove).toBeDefined();
expect(collection.$save).toBeDefined();
});
it('should contain models that are model objects', function () {
var model = collection['1'];
expect(angular.isObject(model)).toBe(true);
expect(model.$update).toBeDefined();
expect(model.$remove).toBeDefined();
});
it('should add a model to the collection when using $add', function () {
collection.$add({id: 4});
$rootScope.$apply();
var newModel = collection['4'];
expect(newModel).toBeDefined();
expect(angular.isObject(newModel)).toBe(true);
expect(newModel.$update).toBeDefined();
expect(newModel.$remove).toBeDefined();
});
it('should update the model in the collection when using $update', function () {
var collection = $sailsRef('/comment');
$rootScope.$apply();
var modelToUpdate = collection['1'];
modelToUpdate.name = 'Greg';
collection.$update(modelToUpdate);
$rootScope.$apply();
expect(collection['1'].name).toEqual('Greg');
});
});
describe('angular sails filter', function () {
var filter, data, result;
beforeEach(inject(function ($filter) {
filter = $filter('collectionToArray');
data = {
'1': {id: 1},
'4': {id: 10},
'3': {id: 5},
'2': {id: 8}
};
result = filter(data);
}));
it('should map an object to an array of objects', function () {
expect(angular.isArray(result)).toBe(true);
expect(angular.isObject(result[0])).toBe(true);
});
it('should be mutable by other angular filters', inject( function ($filter) {
var orderByResult = $filter('orderBy')(result, 'id');
expect(orderByResult).toEqual([
{id: 1},
{id: 5},
{id: 8},
{id: 10}
]);
}));
});
});
}());
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<html>
{% include head.html %}
<body>
{% include header.html %}
<div class="page-content">
{{ content }}
</div>
{% include footer.html %}
</body>
</html>
| {
"pile_set_name": "Github"
} |
/*! jQuery UI - v1.11.0 - 2014-06-26
* http://jqueryui.com
* Includes: core.css, accordion.css, autocomplete.css, button.css, datepicker.css, dialog.css, draggable.css, menu.css, progressbar.css, resizable.css, selectable.css, selectmenu.css, slider.css, sortable.css, spinner.css, tabs.css, tooltip.css, theme.css
* To view and modify this theme, visit http://jqueryui.com/themeroller/?ffDefault=Gill%20Sans%2CArial%2Csans-serif&fwDefault=bold&fsDefault=1.2em&cornerRadius=4px&bgColorHeader=35414f&bgTextureHeader=dots_small&bgImgOpacityHeader=35&borderColorHeader=2c4359&fcHeader=e1e463&iconColorHeader=e1e463&bgColorContent=ffffff&bgTextureContent=flat&bgImgOpacityContent=75&borderColorContent=aaaaaa&fcContent=2c4359&iconColorContent=c02669&bgColorDefault=93c3cd&bgTextureDefault=diagonals_small&bgImgOpacityDefault=50&borderColorDefault=93c3cd&fcDefault=333333&iconColorDefault=ffffff&bgColorHover=ccd232&bgTextureHover=diagonals_small&bgImgOpacityHover=75&borderColorHover=999999&fcHover=212121&iconColorHover=454545&bgColorActive=db4865&bgTextureActive=diagonals_small&bgImgOpacityActive=40&borderColorActive=ff6b7f&fcActive=ffffff&iconColorActive=ffffff&bgColorHighlight=ffff38&bgTextureHighlight=dots_medium&bgImgOpacityHighlight=80&borderColorHighlight=b4d100&fcHighlight=363636&iconColorHighlight=88a206&bgColorError=ff3853&bgTextureError=diagonals_small&bgImgOpacityError=50&borderColorError=ff6b7f&fcError=ffffff&iconColorError=ffeb33&bgColorOverlay=f7f7ba&bgTextureOverlay=white_lines&bgImgOpacityOverlay=85&opacityOverlay=80&bgColorShadow=ba9217&bgTextureShadow=flat&bgImgOpacityShadow=75&opacityShadow=20&thicknessShadow=10px&offsetTopShadow=8px&offsetLeftShadow=8px&cornerRadiusShadow=5px
* Copyright 2014 jQuery Foundation and other contributors; Licensed MIT */
/* Layout helpers
----------------------------------*/
.ui-helper-hidden {
display: none;
}
.ui-helper-hidden-accessible {
border: 0;
clip: rect(0 0 0 0);
height: 1px;
margin: -1px;
overflow: hidden;
padding: 0;
position: absolute;
width: 1px;
}
.ui-helper-reset {
margin: 0;
padding: 0;
border: 0;
outline: 0;
line-height: 1.3;
text-decoration: none;
font-size: 100%;
list-style: none;
}
.ui-helper-clearfix:before,
.ui-helper-clearfix:after {
content: "";
display: table;
border-collapse: collapse;
}
.ui-helper-clearfix:after {
clear: both;
}
.ui-helper-clearfix {
min-height: 0; /* support: IE7 */
}
.ui-helper-zfix {
width: 100%;
height: 100%;
top: 0;
left: 0;
position: absolute;
opacity: 0;
filter:Alpha(Opacity=0);
}
.ui-front {
z-index: 100;
}
/* Interaction Cues
----------------------------------*/
.ui-state-disabled {
cursor: default !important;
}
/* Icons
----------------------------------*/
/* states and images */
.ui-icon {
display: block;
text-indent: -99999px;
overflow: hidden;
background-repeat: no-repeat;
}
/* Misc visuals
----------------------------------*/
/* Overlays */
.ui-widget-overlay {
position: fixed;
top: 0;
left: 0;
width: 100%;
height: 100%;
}
.ui-accordion .ui-accordion-header {
display: block;
cursor: pointer;
position: relative;
margin: 2px 0 0 0;
padding: .5em .5em .5em .7em;
min-height: 0; /* support: IE7 */
font-size: 100%;
}
.ui-accordion .ui-accordion-icons {
padding-left: 2.2em;
}
.ui-accordion .ui-accordion-icons .ui-accordion-icons {
padding-left: 2.2em;
}
.ui-accordion .ui-accordion-header .ui-accordion-header-icon {
position: absolute;
left: .5em;
top: 50%;
margin-top: -8px;
}
.ui-accordion .ui-accordion-content {
padding: 1em 2.2em;
border-top: 0;
overflow: auto;
}
.ui-autocomplete {
position: absolute;
top: 0;
left: 0;
cursor: default;
}
.ui-button {
display: inline-block;
position: relative;
padding: 0;
line-height: normal;
margin-right: .1em;
cursor: pointer;
vertical-align: middle;
text-align: center;
overflow: visible; /* removes extra width in IE */
}
.ui-button,
.ui-button:link,
.ui-button:visited,
.ui-button:hover,
.ui-button:active {
text-decoration: none;
}
/* to make room for the icon, a width needs to be set here */
.ui-button-icon-only {
width: 2.2em;
}
/* button elements seem to need a little more width */
button.ui-button-icon-only {
width: 2.4em;
}
.ui-button-icons-only {
width: 3.4em;
}
button.ui-button-icons-only {
width: 3.7em;
}
/* button text element */
.ui-button .ui-button-text {
display: block;
line-height: normal;
}
.ui-button-text-only .ui-button-text {
padding: .4em 1em;
}
.ui-button-icon-only .ui-button-text,
.ui-button-icons-only .ui-button-text {
padding: .4em;
text-indent: -9999999px;
}
.ui-button-text-icon-primary .ui-button-text,
.ui-button-text-icons .ui-button-text {
padding: .4em 1em .4em 2.1em;
}
.ui-button-text-icon-secondary .ui-button-text,
.ui-button-text-icons .ui-button-text {
padding: .4em 2.1em .4em 1em;
}
.ui-button-text-icons .ui-button-text {
padding-left: 2.1em;
padding-right: 2.1em;
}
/* no icon support for input elements, provide padding by default */
input.ui-button {
padding: .4em 1em;
}
/* button icon element(s) */
.ui-button-icon-only .ui-icon,
.ui-button-text-icon-primary .ui-icon,
.ui-button-text-icon-secondary .ui-icon,
.ui-button-text-icons .ui-icon,
.ui-button-icons-only .ui-icon {
position: absolute;
top: 50%;
margin-top: -8px;
}
.ui-button-icon-only .ui-icon {
left: 50%;
margin-left: -8px;
}
.ui-button-text-icon-primary .ui-button-icon-primary,
.ui-button-text-icons .ui-button-icon-primary,
.ui-button-icons-only .ui-button-icon-primary {
left: .5em;
}
.ui-button-text-icon-secondary .ui-button-icon-secondary,
.ui-button-text-icons .ui-button-icon-secondary,
.ui-button-icons-only .ui-button-icon-secondary {
right: .5em;
}
/* button sets */
.ui-buttonset {
margin-right: 7px;
}
.ui-buttonset .ui-button {
margin-left: 0;
margin-right: -.3em;
}
/* workarounds */
/* reset extra padding in Firefox, see h5bp.com/l */
input.ui-button::-moz-focus-inner,
button.ui-button::-moz-focus-inner {
border: 0;
padding: 0;
}
.ui-datepicker {
width: 17em;
padding: .2em .2em 0;
display: none;
}
.ui-datepicker .ui-datepicker-header {
position: relative;
padding: .2em 0;
}
.ui-datepicker .ui-datepicker-prev,
.ui-datepicker .ui-datepicker-next {
position: absolute;
top: 2px;
width: 1.8em;
height: 1.8em;
}
.ui-datepicker .ui-datepicker-prev-hover,
.ui-datepicker .ui-datepicker-next-hover {
top: 1px;
}
.ui-datepicker .ui-datepicker-prev {
left: 2px;
}
.ui-datepicker .ui-datepicker-next {
right: 2px;
}
.ui-datepicker .ui-datepicker-prev-hover {
left: 1px;
}
.ui-datepicker .ui-datepicker-next-hover {
right: 1px;
}
.ui-datepicker .ui-datepicker-prev span,
.ui-datepicker .ui-datepicker-next span {
display: block;
position: absolute;
left: 50%;
margin-left: -8px;
top: 50%;
margin-top: -8px;
}
.ui-datepicker .ui-datepicker-title {
margin: 0 2.3em;
line-height: 1.8em;
text-align: center;
}
.ui-datepicker .ui-datepicker-title select {
font-size: 1em;
margin: 1px 0;
}
.ui-datepicker select.ui-datepicker-month,
.ui-datepicker select.ui-datepicker-year {
width: 49%;
}
.ui-datepicker table {
width: 100%;
font-size: .9em;
border-collapse: collapse;
margin: 0 0 .4em;
}
.ui-datepicker th {
padding: .7em .3em;
text-align: center;
font-weight: bold;
border: 0;
}
.ui-datepicker td {
border: 0;
padding: 1px;
}
.ui-datepicker td span,
.ui-datepicker td a {
display: block;
padding: .2em;
text-align: right;
text-decoration: none;
}
.ui-datepicker .ui-datepicker-buttonpane {
background-image: none;
margin: .7em 0 0 0;
padding: 0 .2em;
border-left: 0;
border-right: 0;
border-bottom: 0;
}
.ui-datepicker .ui-datepicker-buttonpane button {
float: right;
margin: .5em .2em .4em;
cursor: pointer;
padding: .2em .6em .3em .6em;
width: auto;
overflow: visible;
}
.ui-datepicker .ui-datepicker-buttonpane button.ui-datepicker-current {
float: left;
}
/* with multiple calendars */
.ui-datepicker.ui-datepicker-multi {
width: auto;
}
.ui-datepicker-multi .ui-datepicker-group {
float: left;
}
.ui-datepicker-multi .ui-datepicker-group table {
width: 95%;
margin: 0 auto .4em;
}
.ui-datepicker-multi-2 .ui-datepicker-group {
width: 50%;
}
.ui-datepicker-multi-3 .ui-datepicker-group {
width: 33.3%;
}
.ui-datepicker-multi-4 .ui-datepicker-group {
width: 25%;
}
.ui-datepicker-multi .ui-datepicker-group-last .ui-datepicker-header,
.ui-datepicker-multi .ui-datepicker-group-middle .ui-datepicker-header {
border-left-width: 0;
}
.ui-datepicker-multi .ui-datepicker-buttonpane {
clear: left;
}
.ui-datepicker-row-break {
clear: both;
width: 100%;
font-size: 0;
}
/* RTL support */
.ui-datepicker-rtl {
direction: rtl;
}
.ui-datepicker-rtl .ui-datepicker-prev {
right: 2px;
left: auto;
}
.ui-datepicker-rtl .ui-datepicker-next {
left: 2px;
right: auto;
}
.ui-datepicker-rtl .ui-datepicker-prev:hover {
right: 1px;
left: auto;
}
.ui-datepicker-rtl .ui-datepicker-next:hover {
left: 1px;
right: auto;
}
.ui-datepicker-rtl .ui-datepicker-buttonpane {
clear: right;
}
.ui-datepicker-rtl .ui-datepicker-buttonpane button {
float: left;
}
.ui-datepicker-rtl .ui-datepicker-buttonpane button.ui-datepicker-current,
.ui-datepicker-rtl .ui-datepicker-group {
float: right;
}
.ui-datepicker-rtl .ui-datepicker-group-last .ui-datepicker-header,
.ui-datepicker-rtl .ui-datepicker-group-middle .ui-datepicker-header {
border-right-width: 0;
border-left-width: 1px;
}
.ui-dialog {
overflow: hidden;
position: absolute;
top: 0;
left: 0;
padding: .2em;
outline: 0;
}
.ui-dialog .ui-dialog-titlebar {
padding: .4em 1em;
position: relative;
}
.ui-dialog .ui-dialog-title {
float: left;
margin: .1em 0;
white-space: nowrap;
width: 90%;
overflow: hidden;
text-overflow: ellipsis;
}
.ui-dialog .ui-dialog-titlebar-close {
position: absolute;
right: .3em;
top: 50%;
width: 20px;
margin: -10px 0 0 0;
padding: 1px;
height: 20px;
}
.ui-dialog .ui-dialog-content {
position: relative;
border: 0;
padding: .5em 1em;
background: none;
overflow: auto;
}
.ui-dialog .ui-dialog-buttonpane {
text-align: left;
border-width: 1px 0 0 0;
background-image: none;
margin-top: .5em;
padding: .3em 1em .5em .4em;
}
.ui-dialog .ui-dialog-buttonpane .ui-dialog-buttonset {
float: right;
}
.ui-dialog .ui-dialog-buttonpane button {
margin: .5em .4em .5em 0;
cursor: pointer;
}
.ui-dialog .ui-resizable-se {
width: 12px;
height: 12px;
right: -5px;
bottom: -5px;
background-position: 16px 16px;
}
.ui-draggable .ui-dialog-titlebar {
cursor: move;
}
.ui-draggable-handle {
-ms-touch-action: none;
touch-action: none;
}
.ui-menu {
list-style: none;
padding: 0;
margin: 0;
display: block;
outline: none;
}
.ui-menu .ui-menu {
position: absolute;
}
.ui-menu .ui-menu-item {
position: relative;
margin: 0;
padding: 3px 1em 3px .4em;
cursor: pointer;
min-height: 0; /* support: IE7 */
/* support: IE10, see #8844 */
list-style-image: url("data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7");
}
.ui-menu .ui-menu-divider {
margin: 5px 0;
height: 0;
font-size: 0;
line-height: 0;
border-width: 1px 0 0 0;
}
.ui-menu .ui-state-focus,
.ui-menu .ui-state-active {
margin: -1px;
}
/* icon support */
.ui-menu-icons {
position: relative;
}
.ui-menu-icons .ui-menu-item {
padding-left: 2em;
}
/* left-aligned */
.ui-menu .ui-icon {
position: absolute;
top: 0;
bottom: 0;
left: .2em;
margin: auto 0;
}
/* right-aligned */
.ui-menu .ui-menu-icon {
left: auto;
right: 0;
}
.ui-progressbar {
height: 2em;
text-align: left;
overflow: hidden;
}
.ui-progressbar .ui-progressbar-value {
margin: -1px;
height: 100%;
}
.ui-progressbar .ui-progressbar-overlay {
background: url("images/animated-overlay.gif");
height: 100%;
filter: alpha(opacity=25);
opacity: 0.25;
}
.ui-progressbar-indeterminate .ui-progressbar-value {
background-image: none;
}
.ui-resizable {
position: relative;
}
.ui-resizable-handle {
position: absolute;
font-size: 0.1px;
display: block;
-ms-touch-action: none;
touch-action: none;
}
.ui-resizable-disabled .ui-resizable-handle,
.ui-resizable-autohide .ui-resizable-handle {
display: none;
}
.ui-resizable-n {
cursor: n-resize;
height: 7px;
width: 100%;
top: -5px;
left: 0;
}
.ui-resizable-s {
cursor: s-resize;
height: 7px;
width: 100%;
bottom: -5px;
left: 0;
}
.ui-resizable-e {
cursor: e-resize;
width: 7px;
right: -5px;
top: 0;
height: 100%;
}
.ui-resizable-w {
cursor: w-resize;
width: 7px;
left: -5px;
top: 0;
height: 100%;
}
.ui-resizable-se {
cursor: se-resize;
width: 12px;
height: 12px;
right: 1px;
bottom: 1px;
}
.ui-resizable-sw {
cursor: sw-resize;
width: 9px;
height: 9px;
left: -5px;
bottom: -5px;
}
.ui-resizable-nw {
cursor: nw-resize;
width: 9px;
height: 9px;
left: -5px;
top: -5px;
}
.ui-resizable-ne {
cursor: ne-resize;
width: 9px;
height: 9px;
right: -5px;
top: -5px;
}
.ui-selectable {
-ms-touch-action: none;
touch-action: none;
}
.ui-selectable-helper {
position: absolute;
z-index: 100;
border: 1px dotted black;
}
.ui-selectmenu-menu {
padding: 0;
margin: 0;
position: absolute;
top: 0;
left: 0;
display: none;
}
.ui-selectmenu-menu .ui-menu {
overflow: auto;
/* Support: IE7 */
overflow-x: hidden;
padding-bottom: 1px;
}
.ui-selectmenu-menu .ui-menu .ui-selectmenu-optgroup {
font-size: 1em;
font-weight: bold;
line-height: 1.5;
padding: 2px 0.4em;
margin: 0.5em 0 0 0;
height: auto;
border: 0;
}
.ui-selectmenu-open {
display: block;
}
.ui-selectmenu-button {
display: inline-block;
overflow: hidden;
position: relative;
text-decoration: none;
cursor: pointer;
}
.ui-selectmenu-button span.ui-icon {
right: 0.5em;
left: auto;
margin-top: -8px;
position: absolute;
top: 50%;
}
.ui-selectmenu-button span.ui-selectmenu-text {
text-align: left;
padding: 0.4em 2.1em 0.4em 1em;
display: block;
line-height: 1.4;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
.ui-slider {
position: relative;
text-align: left;
}
.ui-slider .ui-slider-handle {
position: absolute;
z-index: 2;
width: 1.2em;
height: 1.2em;
cursor: default;
-ms-touch-action: none;
touch-action: none;
}
.ui-slider .ui-slider-range {
position: absolute;
z-index: 1;
font-size: .7em;
display: block;
border: 0;
background-position: 0 0;
}
/* For IE8 - See #6727 */
.ui-slider.ui-state-disabled .ui-slider-handle,
.ui-slider.ui-state-disabled .ui-slider-range {
filter: inherit;
}
.ui-slider-horizontal {
height: .8em;
}
.ui-slider-horizontal .ui-slider-handle {
top: -.3em;
margin-left: -.6em;
}
.ui-slider-horizontal .ui-slider-range {
top: 0;
height: 100%;
}
.ui-slider-horizontal .ui-slider-range-min {
left: 0;
}
.ui-slider-horizontal .ui-slider-range-max {
right: 0;
}
.ui-slider-vertical {
width: .8em;
height: 100px;
}
.ui-slider-vertical .ui-slider-handle {
left: -.3em;
margin-left: 0;
margin-bottom: -.6em;
}
.ui-slider-vertical .ui-slider-range {
left: 0;
width: 100%;
}
.ui-slider-vertical .ui-slider-range-min {
bottom: 0;
}
.ui-slider-vertical .ui-slider-range-max {
top: 0;
}
.ui-sortable-handle {
-ms-touch-action: none;
touch-action: none;
}
.ui-spinner {
position: relative;
display: inline-block;
overflow: hidden;
padding: 0;
vertical-align: middle;
}
.ui-spinner-input {
border: none;
background: none;
color: inherit;
padding: 0;
margin: .2em 0;
vertical-align: middle;
margin-left: .4em;
margin-right: 22px;
}
.ui-spinner-button {
width: 16px;
height: 50%;
font-size: .5em;
padding: 0;
margin: 0;
text-align: center;
position: absolute;
cursor: default;
display: block;
overflow: hidden;
right: 0;
}
/* more specificity required here to override default borders */
.ui-spinner a.ui-spinner-button {
border-top: none;
border-bottom: none;
border-right: none;
}
/* vertically center icon */
.ui-spinner .ui-icon {
position: absolute;
margin-top: -8px;
top: 50%;
left: 0;
}
.ui-spinner-up {
top: 0;
}
.ui-spinner-down {
bottom: 0;
}
/* TR overrides */
.ui-spinner .ui-icon-triangle-1-s {
/* need to fix icons sprite */
background-position: -65px -16px;
}
.ui-tabs {
position: relative;/* position: relative prevents IE scroll bug (element with position: relative inside container with overflow: auto appear as "fixed") */
padding: .2em;
}
.ui-tabs .ui-tabs-nav {
margin: 0;
padding: .2em .2em 0;
}
.ui-tabs .ui-tabs-nav li {
list-style: none;
float: left;
position: relative;
top: 0;
margin: 1px .2em 0 0;
border-bottom-width: 0;
padding: 0;
white-space: nowrap;
}
.ui-tabs .ui-tabs-nav .ui-tabs-anchor {
float: left;
padding: .5em 1em;
text-decoration: none;
}
.ui-tabs .ui-tabs-nav li.ui-tabs-active {
margin-bottom: -1px;
padding-bottom: 1px;
}
.ui-tabs .ui-tabs-nav li.ui-tabs-active .ui-tabs-anchor,
.ui-tabs .ui-tabs-nav li.ui-state-disabled .ui-tabs-anchor,
.ui-tabs .ui-tabs-nav li.ui-tabs-loading .ui-tabs-anchor {
cursor: text;
}
.ui-tabs-collapsible .ui-tabs-nav li.ui-tabs-active .ui-tabs-anchor {
cursor: pointer;
}
.ui-tabs .ui-tabs-panel {
display: block;
border-width: 0;
padding: 1em 1.4em;
background: none;
}
.ui-tooltip {
padding: 8px;
position: absolute;
z-index: 9999;
max-width: 300px;
-webkit-box-shadow: 0 0 5px #aaa;
box-shadow: 0 0 5px #aaa;
}
body .ui-tooltip {
border-width: 2px;
}
/* Component containers
----------------------------------*/
.ui-widget {
font-family: Gill Sans,Arial,sans-serif;
font-size: 1.2em;
}
.ui-widget .ui-widget {
font-size: 1em;
}
.ui-widget input,
.ui-widget select,
.ui-widget textarea,
.ui-widget button {
font-family: Gill Sans,Arial,sans-serif;
font-size: 1em;
}
.ui-widget-content {
border: 1px solid #aaaaaa;
background: #ffffff url("images/ui-bg_flat_75_ffffff_40x100.png") 50% 50% repeat-x;
color: #2c4359;
}
.ui-widget-content a {
color: #2c4359;
}
.ui-widget-header {
border: 1px solid #2c4359;
background: #35414f url("images/ui-bg_dots-small_35_35414f_2x2.png") 50% 50% repeat;
color: #e1e463;
font-weight: bold;
}
.ui-widget-header a {
color: #e1e463;
}
/* Interaction states
----------------------------------*/
.ui-state-default,
.ui-widget-content .ui-state-default,
.ui-widget-header .ui-state-default {
border: 1px solid #93c3cd;
background: #93c3cd url("images/ui-bg_diagonals-small_50_93c3cd_40x40.png") 50% 50% repeat;
font-weight: bold;
color: #333333;
}
.ui-state-default a,
.ui-state-default a:link,
.ui-state-default a:visited {
color: #333333;
text-decoration: none;
}
.ui-state-hover,
.ui-widget-content .ui-state-hover,
.ui-widget-header .ui-state-hover,
.ui-state-focus,
.ui-widget-content .ui-state-focus,
.ui-widget-header .ui-state-focus {
border: 1px solid #999999;
background: #ccd232 url("images/ui-bg_diagonals-small_75_ccd232_40x40.png") 50% 50% repeat;
font-weight: bold;
color: #212121;
}
.ui-state-hover a,
.ui-state-hover a:hover,
.ui-state-hover a:link,
.ui-state-hover a:visited,
.ui-state-focus a,
.ui-state-focus a:hover,
.ui-state-focus a:link,
.ui-state-focus a:visited {
color: #212121;
text-decoration: none;
}
.ui-state-active,
.ui-widget-content .ui-state-active,
.ui-widget-header .ui-state-active {
border: 1px solid #ff6b7f;
background: #db4865 url("images/ui-bg_diagonals-small_40_db4865_40x40.png") 50% 50% repeat;
font-weight: bold;
color: #ffffff;
}
.ui-state-active a,
.ui-state-active a:link,
.ui-state-active a:visited {
color: #ffffff;
text-decoration: none;
}
/* Interaction Cues
----------------------------------*/
.ui-state-highlight,
.ui-widget-content .ui-state-highlight,
.ui-widget-header .ui-state-highlight {
border: 1px solid #b4d100;
background: #ffff38 url("images/ui-bg_dots-medium_80_ffff38_4x4.png") 50% 50% repeat;
color: #363636;
}
.ui-state-highlight a,
.ui-widget-content .ui-state-highlight a,
.ui-widget-header .ui-state-highlight a {
color: #363636;
}
.ui-state-error,
.ui-widget-content .ui-state-error,
.ui-widget-header .ui-state-error {
border: 1px solid #ff6b7f;
background: #ff3853 url("images/ui-bg_diagonals-small_50_ff3853_40x40.png") 50% 50% repeat;
color: #ffffff;
}
.ui-state-error a,
.ui-widget-content .ui-state-error a,
.ui-widget-header .ui-state-error a {
color: #ffffff;
}
.ui-state-error-text,
.ui-widget-content .ui-state-error-text,
.ui-widget-header .ui-state-error-text {
color: #ffffff;
}
.ui-priority-primary,
.ui-widget-content .ui-priority-primary,
.ui-widget-header .ui-priority-primary {
font-weight: bold;
}
.ui-priority-secondary,
.ui-widget-content .ui-priority-secondary,
.ui-widget-header .ui-priority-secondary {
opacity: .7;
filter:Alpha(Opacity=70);
font-weight: normal;
}
.ui-state-disabled,
.ui-widget-content .ui-state-disabled,
.ui-widget-header .ui-state-disabled {
opacity: .35;
filter:Alpha(Opacity=35);
background-image: none;
}
.ui-state-disabled .ui-icon {
filter:Alpha(Opacity=35); /* For IE8 - See #6059 */
}
/* Icons
----------------------------------*/
/* states and images */
.ui-icon {
width: 16px;
height: 16px;
}
.ui-icon,
.ui-widget-content .ui-icon {
background-image: url("images/ui-icons_c02669_256x240.png");
}
.ui-widget-header .ui-icon {
background-image: url("images/ui-icons_e1e463_256x240.png");
}
.ui-state-default .ui-icon {
background-image: url("images/ui-icons_ffffff_256x240.png");
}
.ui-state-hover .ui-icon,
.ui-state-focus .ui-icon {
background-image: url("images/ui-icons_454545_256x240.png");
}
.ui-state-active .ui-icon {
background-image: url("images/ui-icons_ffffff_256x240.png");
}
.ui-state-highlight .ui-icon {
background-image: url("images/ui-icons_88a206_256x240.png");
}
.ui-state-error .ui-icon,
.ui-state-error-text .ui-icon {
background-image: url("images/ui-icons_ffeb33_256x240.png");
}
/* positioning */
.ui-icon-blank { background-position: 16px 16px; }
.ui-icon-carat-1-n { background-position: 0 0; }
.ui-icon-carat-1-ne { background-position: -16px 0; }
.ui-icon-carat-1-e { background-position: -32px 0; }
.ui-icon-carat-1-se { background-position: -48px 0; }
.ui-icon-carat-1-s { background-position: -64px 0; }
.ui-icon-carat-1-sw { background-position: -80px 0; }
.ui-icon-carat-1-w { background-position: -96px 0; }
.ui-icon-carat-1-nw { background-position: -112px 0; }
.ui-icon-carat-2-n-s { background-position: -128px 0; }
.ui-icon-carat-2-e-w { background-position: -144px 0; }
.ui-icon-triangle-1-n { background-position: 0 -16px; }
.ui-icon-triangle-1-ne { background-position: -16px -16px; }
.ui-icon-triangle-1-e { background-position: -32px -16px; }
.ui-icon-triangle-1-se { background-position: -48px -16px; }
.ui-icon-triangle-1-s { background-position: -64px -16px; }
.ui-icon-triangle-1-sw { background-position: -80px -16px; }
.ui-icon-triangle-1-w { background-position: -96px -16px; }
.ui-icon-triangle-1-nw { background-position: -112px -16px; }
.ui-icon-triangle-2-n-s { background-position: -128px -16px; }
.ui-icon-triangle-2-e-w { background-position: -144px -16px; }
.ui-icon-arrow-1-n { background-position: 0 -32px; }
.ui-icon-arrow-1-ne { background-position: -16px -32px; }
.ui-icon-arrow-1-e { background-position: -32px -32px; }
.ui-icon-arrow-1-se { background-position: -48px -32px; }
.ui-icon-arrow-1-s { background-position: -64px -32px; }
.ui-icon-arrow-1-sw { background-position: -80px -32px; }
.ui-icon-arrow-1-w { background-position: -96px -32px; }
.ui-icon-arrow-1-nw { background-position: -112px -32px; }
.ui-icon-arrow-2-n-s { background-position: -128px -32px; }
.ui-icon-arrow-2-ne-sw { background-position: -144px -32px; }
.ui-icon-arrow-2-e-w { background-position: -160px -32px; }
.ui-icon-arrow-2-se-nw { background-position: -176px -32px; }
.ui-icon-arrowstop-1-n { background-position: -192px -32px; }
.ui-icon-arrowstop-1-e { background-position: -208px -32px; }
.ui-icon-arrowstop-1-s { background-position: -224px -32px; }
.ui-icon-arrowstop-1-w { background-position: -240px -32px; }
.ui-icon-arrowthick-1-n { background-position: 0 -48px; }
.ui-icon-arrowthick-1-ne { background-position: -16px -48px; }
.ui-icon-arrowthick-1-e { background-position: -32px -48px; }
.ui-icon-arrowthick-1-se { background-position: -48px -48px; }
.ui-icon-arrowthick-1-s { background-position: -64px -48px; }
.ui-icon-arrowthick-1-sw { background-position: -80px -48px; }
.ui-icon-arrowthick-1-w { background-position: -96px -48px; }
.ui-icon-arrowthick-1-nw { background-position: -112px -48px; }
.ui-icon-arrowthick-2-n-s { background-position: -128px -48px; }
.ui-icon-arrowthick-2-ne-sw { background-position: -144px -48px; }
.ui-icon-arrowthick-2-e-w { background-position: -160px -48px; }
.ui-icon-arrowthick-2-se-nw { background-position: -176px -48px; }
.ui-icon-arrowthickstop-1-n { background-position: -192px -48px; }
.ui-icon-arrowthickstop-1-e { background-position: -208px -48px; }
.ui-icon-arrowthickstop-1-s { background-position: -224px -48px; }
.ui-icon-arrowthickstop-1-w { background-position: -240px -48px; }
.ui-icon-arrowreturnthick-1-w { background-position: 0 -64px; }
.ui-icon-arrowreturnthick-1-n { background-position: -16px -64px; }
.ui-icon-arrowreturnthick-1-e { background-position: -32px -64px; }
.ui-icon-arrowreturnthick-1-s { background-position: -48px -64px; }
.ui-icon-arrowreturn-1-w { background-position: -64px -64px; }
.ui-icon-arrowreturn-1-n { background-position: -80px -64px; }
.ui-icon-arrowreturn-1-e { background-position: -96px -64px; }
.ui-icon-arrowreturn-1-s { background-position: -112px -64px; }
.ui-icon-arrowrefresh-1-w { background-position: -128px -64px; }
.ui-icon-arrowrefresh-1-n { background-position: -144px -64px; }
.ui-icon-arrowrefresh-1-e { background-position: -160px -64px; }
.ui-icon-arrowrefresh-1-s { background-position: -176px -64px; }
.ui-icon-arrow-4 { background-position: 0 -80px; }
.ui-icon-arrow-4-diag { background-position: -16px -80px; }
.ui-icon-extlink { background-position: -32px -80px; }
.ui-icon-newwin { background-position: -48px -80px; }
.ui-icon-refresh { background-position: -64px -80px; }
.ui-icon-shuffle { background-position: -80px -80px; }
.ui-icon-transfer-e-w { background-position: -96px -80px; }
.ui-icon-transferthick-e-w { background-position: -112px -80px; }
.ui-icon-folder-collapsed { background-position: 0 -96px; }
.ui-icon-folder-open { background-position: -16px -96px; }
.ui-icon-document { background-position: -32px -96px; }
.ui-icon-document-b { background-position: -48px -96px; }
.ui-icon-note { background-position: -64px -96px; }
.ui-icon-mail-closed { background-position: -80px -96px; }
.ui-icon-mail-open { background-position: -96px -96px; }
.ui-icon-suitcase { background-position: -112px -96px; }
.ui-icon-comment { background-position: -128px -96px; }
.ui-icon-person { background-position: -144px -96px; }
.ui-icon-print { background-position: -160px -96px; }
.ui-icon-trash { background-position: -176px -96px; }
.ui-icon-locked { background-position: -192px -96px; }
.ui-icon-unlocked { background-position: -208px -96px; }
.ui-icon-bookmark { background-position: -224px -96px; }
.ui-icon-tag { background-position: -240px -96px; }
.ui-icon-home { background-position: 0 -112px; }
.ui-icon-flag { background-position: -16px -112px; }
.ui-icon-calendar { background-position: -32px -112px; }
.ui-icon-cart { background-position: -48px -112px; }
.ui-icon-pencil { background-position: -64px -112px; }
.ui-icon-clock { background-position: -80px -112px; }
.ui-icon-disk { background-position: -96px -112px; }
.ui-icon-calculator { background-position: -112px -112px; }
.ui-icon-zoomin { background-position: -128px -112px; }
.ui-icon-zoomout { background-position: -144px -112px; }
.ui-icon-search { background-position: -160px -112px; }
.ui-icon-wrench { background-position: -176px -112px; }
.ui-icon-gear { background-position: -192px -112px; }
.ui-icon-heart { background-position: -208px -112px; }
.ui-icon-star { background-position: -224px -112px; }
.ui-icon-link { background-position: -240px -112px; }
.ui-icon-cancel { background-position: 0 -128px; }
.ui-icon-plus { background-position: -16px -128px; }
.ui-icon-plusthick { background-position: -32px -128px; }
.ui-icon-minus { background-position: -48px -128px; }
.ui-icon-minusthick { background-position: -64px -128px; }
.ui-icon-close { background-position: -80px -128px; }
.ui-icon-closethick { background-position: -96px -128px; }
.ui-icon-key { background-position: -112px -128px; }
.ui-icon-lightbulb { background-position: -128px -128px; }
.ui-icon-scissors { background-position: -144px -128px; }
.ui-icon-clipboard { background-position: -160px -128px; }
.ui-icon-copy { background-position: -176px -128px; }
.ui-icon-contact { background-position: -192px -128px; }
.ui-icon-image { background-position: -208px -128px; }
.ui-icon-video { background-position: -224px -128px; }
.ui-icon-script { background-position: -240px -128px; }
.ui-icon-alert { background-position: 0 -144px; }
.ui-icon-info { background-position: -16px -144px; }
.ui-icon-notice { background-position: -32px -144px; }
.ui-icon-help { background-position: -48px -144px; }
.ui-icon-check { background-position: -64px -144px; }
.ui-icon-bullet { background-position: -80px -144px; }
.ui-icon-radio-on { background-position: -96px -144px; }
.ui-icon-radio-off { background-position: -112px -144px; }
.ui-icon-pin-w { background-position: -128px -144px; }
.ui-icon-pin-s { background-position: -144px -144px; }
.ui-icon-play { background-position: 0 -160px; }
.ui-icon-pause { background-position: -16px -160px; }
.ui-icon-seek-next { background-position: -32px -160px; }
.ui-icon-seek-prev { background-position: -48px -160px; }
.ui-icon-seek-end { background-position: -64px -160px; }
.ui-icon-seek-start { background-position: -80px -160px; }
/* ui-icon-seek-first is deprecated, use ui-icon-seek-start instead */
.ui-icon-seek-first { background-position: -80px -160px; }
.ui-icon-stop { background-position: -96px -160px; }
.ui-icon-eject { background-position: -112px -160px; }
.ui-icon-volume-off { background-position: -128px -160px; }
.ui-icon-volume-on { background-position: -144px -160px; }
.ui-icon-power { background-position: 0 -176px; }
.ui-icon-signal-diag { background-position: -16px -176px; }
.ui-icon-signal { background-position: -32px -176px; }
.ui-icon-battery-0 { background-position: -48px -176px; }
.ui-icon-battery-1 { background-position: -64px -176px; }
.ui-icon-battery-2 { background-position: -80px -176px; }
.ui-icon-battery-3 { background-position: -96px -176px; }
.ui-icon-circle-plus { background-position: 0 -192px; }
.ui-icon-circle-minus { background-position: -16px -192px; }
.ui-icon-circle-close { background-position: -32px -192px; }
.ui-icon-circle-triangle-e { background-position: -48px -192px; }
.ui-icon-circle-triangle-s { background-position: -64px -192px; }
.ui-icon-circle-triangle-w { background-position: -80px -192px; }
.ui-icon-circle-triangle-n { background-position: -96px -192px; }
.ui-icon-circle-arrow-e { background-position: -112px -192px; }
.ui-icon-circle-arrow-s { background-position: -128px -192px; }
.ui-icon-circle-arrow-w { background-position: -144px -192px; }
.ui-icon-circle-arrow-n { background-position: -160px -192px; }
.ui-icon-circle-zoomin { background-position: -176px -192px; }
.ui-icon-circle-zoomout { background-position: -192px -192px; }
.ui-icon-circle-check { background-position: -208px -192px; }
.ui-icon-circlesmall-plus { background-position: 0 -208px; }
.ui-icon-circlesmall-minus { background-position: -16px -208px; }
.ui-icon-circlesmall-close { background-position: -32px -208px; }
.ui-icon-squaresmall-plus { background-position: -48px -208px; }
.ui-icon-squaresmall-minus { background-position: -64px -208px; }
.ui-icon-squaresmall-close { background-position: -80px -208px; }
.ui-icon-grip-dotted-vertical { background-position: 0 -224px; }
.ui-icon-grip-dotted-horizontal { background-position: -16px -224px; }
.ui-icon-grip-solid-vertical { background-position: -32px -224px; }
.ui-icon-grip-solid-horizontal { background-position: -48px -224px; }
.ui-icon-gripsmall-diagonal-se { background-position: -64px -224px; }
.ui-icon-grip-diagonal-se { background-position: -80px -224px; }
/* Misc visuals
----------------------------------*/
/* Corner radius */
.ui-corner-all,
.ui-corner-top,
.ui-corner-left,
.ui-corner-tl {
border-top-left-radius: 4px;
}
.ui-corner-all,
.ui-corner-top,
.ui-corner-right,
.ui-corner-tr {
border-top-right-radius: 4px;
}
.ui-corner-all,
.ui-corner-bottom,
.ui-corner-left,
.ui-corner-bl {
border-bottom-left-radius: 4px;
}
.ui-corner-all,
.ui-corner-bottom,
.ui-corner-right,
.ui-corner-br {
border-bottom-right-radius: 4px;
}
/* Overlays */
.ui-widget-overlay {
background: #f7f7ba url("images/ui-bg_white-lines_85_f7f7ba_40x100.png") 50% 50% repeat;
opacity: .8;
filter: Alpha(Opacity=80);
}
.ui-widget-shadow {
margin: 8px 0 0 8px;
padding: 10px;
background: #ba9217 url("images/ui-bg_flat_75_ba9217_40x100.png") 50% 50% repeat-x;
opacity: .2;
filter: Alpha(Opacity=20);
border-radius: 5px;
}
| {
"pile_set_name": "Github"
} |
#content-container
margin: auto
width: 960px
#formatting-container
background-color: #f5f5f5
border-bottom: 1px solid #ccc
padding: 5px 12px
.ql-active, button:hover
color: green
font-weight: bold
#editor
height: 330px
.quill-wrapper
border: 1px solid #ccc
border-radius 2px
box-shadow: inset 0 1px 1px rgba(0,0,0,.075)
.basic-wrapper, .advanced-wrapper
border: 1px solid #ccc
float: left
margin-left: 3%
margin-top: 15px
margin-bottom: 15px
.basic-wrapper
width: 40%
.advanced-wrapper
width: 50%
.editor-container
height: 400px
.toolbar-container
border-bottom: 1px solid #ccc
.basic-wrapper .toolbar-container
padding: 8px 14px
.ql-active, button:hover
color: green
font-weight: bold
.ql-editor
ul
list-style-type disc !important /* Override form style */
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>en</string>
<key>CFBundleExecutable</key>
<string>${EXECUTABLE_NAME}</string>
<key>CFBundleIdentifier</key>
<string>org.cocoapods.${PRODUCT_NAME:rfc1034identifier}</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>${PRODUCT_NAME}</string>
<key>CFBundlePackageType</key>
<string>FMWK</string>
<key>CFBundleShortVersionString</key>
<string>1.0.0</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>${CURRENT_PROJECT_VERSION}</string>
<key>NSPrincipalClass</key>
<string></string>
</dict>
</plist>
| {
"pile_set_name": "Github"
} |
///////////////////////////////////////////////////////////////////////////////////
/// OpenGL Mathematics (glm.g-truc.net)
///
/// Copyright (c) 2005 - 2014 G-Truc Creation (www.g-truc.net)
/// Permission is hereby granted, free of charge, to any person obtaining a copy
/// of this software and associated documentation files (the "Software"), to deal
/// in the Software without restriction, including without limitation the rights
/// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
/// copies of the Software, and to permit persons to whom the Software is
/// furnished to do so, subject to the following conditions:
///
/// The above copyright notice and this permission notice shall be included in
/// all copies or substantial portions of the Software.
///
/// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
/// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
/// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
/// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
/// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
/// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
/// THE SOFTWARE.
///
/// @ref gtx_handed_coordinate_space
/// @file glm/gtx/handed_coordinate_space.hpp
/// @date 2005-12-21 / 2011-06-07
/// @author Christophe Riccio
///
/// @see core (dependence)
///
/// @defgroup gtx_handed_coordinate_space GLM_GTX_handed_coordinate_space
/// @ingroup gtx
///
/// @brief To know if a set of three basis vectors defines a right or left-handed coordinate system.
///
/// <glm/gtx/handed_coordinate_system.hpp> need to be included to use these functionalities.
///////////////////////////////////////////////////////////////////////////////////
#ifndef GLM_GTX_handed_coordinate_space
#define GLM_GTX_handed_coordinate_space
// Dependency:
#include "../glm.hpp"
#if(defined(GLM_MESSAGES) && !defined(GLM_EXT_INCLUDED))
# pragma message("GLM: GLM_GTX_handed_coordinate_space extension included")
#endif
namespace glm
{
/// @addtogroup gtx_handed_coordinate_space
/// @{
//! Return if a trihedron right handed or not.
//! From GLM_GTX_handed_coordinate_space extension.
template <typename T, precision P>
GLM_FUNC_DECL bool rightHanded(
detail::tvec3<T, P> const & tangent,
detail::tvec3<T, P> const & binormal,
detail::tvec3<T, P> const & normal);
//! Return if a trihedron left handed or not.
//! From GLM_GTX_handed_coordinate_space extension.
template <typename T, precision P>
GLM_FUNC_DECL bool leftHanded(
detail::tvec3<T, P> const & tangent,
detail::tvec3<T, P> const & binormal,
detail::tvec3<T, P> const & normal);
/// @}
}// namespace glm
#include "handed_coordinate_space.inl"
#endif//GLM_GTX_handed_coordinate_space
| {
"pile_set_name": "Github"
} |
// ThreadTest.cs - NUnit Test Cases for the System.Threading.Thread class
//
// Authors
// Eduardo Garcia Cebollero ([email protected])
// Sebastien Pouliot <[email protected]>
//
// (C) Eduardo Garcia Cebollero.
// (C) Ximian, Inc. http://www.ximian.com
// (C) 2004 Novell (http://www.novell.com)
//
using System;
using System.Globalization;
using System.Security.Principal;
using System.Threading;
using System.Threading.Tasks;
using System.Reflection;
using System.Collections.Generic;
using SD = System.Diagnostics;
using NUnit.Framework;
namespace MonoTests.System.Threading
{
#if !DISABLE_SECURITY
// These tests seem to hang the 2.0 framework. So they are disabled for now
// Don't reenable them until you can run a few thousand times on an SMP box.
[Category ("NotWorking")]
public class ThreadedPrincipalTest
{
public static void NoPrincipal ()
{
AppDomain.CurrentDomain.SetPrincipalPolicy (PrincipalPolicy.NoPrincipal);
IPrincipal p = Thread.CurrentPrincipal;
Assert.IsNull (p, "#1");
Thread.CurrentPrincipal = new GenericPrincipal (new GenericIdentity ("mono"), null);
Assert.IsNotNull (Thread.CurrentPrincipal, "#2");
Thread.CurrentPrincipal = null;
Assert.IsNull (Thread.CurrentPrincipal, "#3");
// in this case we can return to null
}
public static void UnauthenticatedPrincipal ()
{
AppDomain.CurrentDomain.SetPrincipalPolicy (PrincipalPolicy.UnauthenticatedPrincipal);
IPrincipal p = Thread.CurrentPrincipal;
Assert.IsNotNull (p, "#1");
Assert.IsTrue ((p is GenericPrincipal), "#2");
Assert.AreEqual (String.Empty, p.Identity.Name, "#3");
Assert.AreEqual (String.Empty, p.Identity.AuthenticationType, "#4");
Assert.IsFalse (p.Identity.IsAuthenticated, "#5");
Thread.CurrentPrincipal = new GenericPrincipal (new GenericIdentity ("mono"), null);
Assert.IsNotNull (Thread.CurrentPrincipal, "#6");
Thread.CurrentPrincipal = null;
Assert.IsNotNull (Thread.CurrentPrincipal, "#7");
// in this case we can't return to null
}
public static void WindowsPrincipal ()
{
AppDomain.CurrentDomain.SetPrincipalPolicy (PrincipalPolicy.WindowsPrincipal);
IPrincipal p = Thread.CurrentPrincipal;
Assert.IsNotNull (p, "#1");
Assert.IsTrue ((p is WindowsPrincipal), "#2");
Assert.IsNotNull (p.Identity.Name, "#3");
Assert.IsNotNull (p.Identity.AuthenticationType, "#4");
Assert.IsTrue (p.Identity.IsAuthenticated, "#5");
// note: we can switch from a WindowsPrincipal to a GenericPrincipal
Thread.CurrentPrincipal = new GenericPrincipal (new GenericIdentity ("mono"), null);
Assert.IsNotNull (Thread.CurrentPrincipal, "#6");
Thread.CurrentPrincipal = null;
Assert.IsNotNull (Thread.CurrentPrincipal, "#7");
// in this case we can't return to null
}
public static void CopyOnNewThread ()
{
Assert.IsNotNull (Thread.CurrentPrincipal, "#1");
Assert.AreEqual ("good", Thread.CurrentPrincipal.Identity.Name, "#2");
}
}
#endif
[TestFixture]
[Category("MobileNotWorking")] // Abort #10240
public class ThreadTest
{
//TimeSpan Infinite = new TimeSpan (-10000); // -10000 ticks == -1 ms
TimeSpan SmallNegative = new TimeSpan (-2); // between 0 and -1.0 (infinite) ms
TimeSpan Negative = new TimeSpan (-20000); // really negative
//TimeSpan MaxValue = TimeSpan.FromMilliseconds ((long) Int32.MaxValue);
TimeSpan TooLarge = TimeSpan.FromMilliseconds ((long) Int32.MaxValue + 1);
//Some Classes to test as threads
private class C1Test
{
public int cnt;
public Thread thread1;
public bool endm1;
public bool endm2;
public C1Test()
{
thread1 = (Thread)null;
this.cnt = 0;
endm1 = endm2 = false;
}
public void TestMethod()
{
while (cnt < 10)
{
cnt++;
}
endm1 = true;
}
public void TestMethod2()
{
if (!(thread1==(Thread)null) )
{
thread1.Join();
}
endm2 = true;
}
}
private class C2Test
{
public int cnt;
public bool run = false;
public C2Test()
{
this.cnt = 0;
}
public void TestMethod()
{
run = true;
while (true)
{
if (cnt < 1000)
cnt++;
else
cnt = 0;
}
}
}
private class C3Test
{
public C1Test sub_class;
public Thread sub_thread;
public C3Test()
{
sub_class = new C1Test();
sub_thread = new Thread(new ThreadStart(sub_class.TestMethod));
}
public void TestMethod1()
{
sub_thread.Start();
Thread.Sleep (100);
#if MONO_FEATURE_THREAD_ABORT
sub_thread.Abort();
#else
sub_thread.Interrupt ();
#endif
}
}
private class C4Test
{
public C1Test class1;
public C1Test class2;
public Thread thread1;
public Thread thread2;
public bool T1ON ;
public bool T2ON ;
public C4Test()
{
T1ON = false;
T2ON = false;
class1 = new C1Test();
class2 = new C1Test();
thread1 = new Thread(new ThreadStart(class1.TestMethod));
thread2 = new Thread(new ThreadStart(class2.TestMethod));
}
public void TestMethod1()
{
thread1.Start();
TestUtil.WaitForAlive (thread1, "wait1");
T1ON = true;
thread2.Start();
TestUtil.WaitForAlive (thread2, "wait2");
T2ON = true;
#if MONO_FEATURE_THREAD_ABORT
thread1.Abort();
#else
thread1.Interrupt ();
#endif
TestUtil.WaitForNotAlive (thread1, "wait3");
T1ON = false;
#if MONO_FEATURE_THREAD_ABORT
thread2.Abort();
#else
thread2.Interrupt ();
#endif
TestUtil.WaitForNotAlive (thread2, "wait4");
T2ON = false;
}
public void TestMethod2()
{
thread1.Start();
thread1.Join();
}
}
[Test]
public void TestCtor1()
{
C1Test test1 = new C1Test();
Thread t = new Thread (new ThreadStart (test1.TestMethod));
Assert.IsTrue (t.CurrentCulture.IsReadOnly, "CurrentCulture.IsReadOnly");
Assert.IsFalse (t.IsAlive, "IsAlive");
Assert.IsFalse (t.IsBackground, "IsBackground");
Assert.IsNull (t.Name, "Name");
Assert.AreEqual (ThreadState.Unstarted, t.ThreadState, "ThreadState");
}
[Test]
[Category ("NotWorking")] // we're not sharing (read-only) CultureInfo
public void CultureInfo_Shared_Across_Threads ()
{
Thread t = new Thread (TestCtor1);
Assert.AreSame (t.CurrentCulture, t.CurrentUICulture, "Culture");
Assert.AreSame (t.CurrentCulture, CultureInfo.CurrentCulture, "CultureInfo.CurrentCulture");
Assert.AreSame (t.CurrentUICulture, CultureInfo.CurrentUICulture, "CultureInfo.CurrentUICulture");
Assert.AreSame (t.CurrentCulture, Thread.CurrentThread.CurrentCulture, "Thread.CurrentThread.CurrentCulture");
Assert.AreSame (t.CurrentUICulture, Thread.CurrentThread.CurrentUICulture, "Thread.CurrentThread.CurrentUICulture");
}
[Test] // bug #325566
[Category ("MultiThreaded")]
public void GetHashCodeTest ()
{
C1Test test1 = new C1Test ();
Thread tA = new Thread (new ThreadStart (test1.TestMethod));
int hA1 = tA.GetHashCode ();
Assert.IsTrue (hA1 > 0, "#A1");
tA.Start ();
int hA2 = tA.GetHashCode ();
Assert.AreEqual (hA1, hA2, "#A2");
tA.Join ();
int hA3 = tA.GetHashCode ();
Assert.AreEqual (hA1, hA3, "#A3");
Assert.AreEqual (hA1, tA.ManagedThreadId, "#A4");
test1 = new C1Test ();
Thread tB = new Thread (new ThreadStart (test1.TestMethod));
int hB1 = tB.GetHashCode ();
Assert.IsTrue (hB1 > 0, "#B1");
tB.Start ();
int hB2 = tB.GetHashCode ();
Assert.AreEqual (hB1, hB2, "#B2");
tB.Join ();
int hB3 = tB.GetHashCode ();
Assert.AreEqual (hB1, hB3, "#B3");
Assert.AreEqual (hB1, tB.ManagedThreadId, "#B4");
Assert.IsFalse (hA2 == hB2, "#B5");
}
[Test] // bug #82700
[Category ("MultiThreaded")]
public void ManagedThreadId ()
{
C1Test test1 = new C1Test ();
Thread t1 = new Thread (new ThreadStart (test1.TestMethod));
int mtA1 = t1.ManagedThreadId;
t1.Start ();
int mtA2 = t1.ManagedThreadId;
t1.Join ();
int mtA3 = t1.ManagedThreadId;
Assert.AreEqual (mtA1, mtA2, "#A1");
Assert.AreEqual (mtA2, mtA3, "#A2");
test1 = new C1Test ();
Thread t2 = new Thread (new ThreadStart (test1.TestMethod));
int mtB1 = t2.ManagedThreadId;
t2.Start ();
int mtB2 = t2.ManagedThreadId;
t2.Join ();
int mtB3 = t2.ManagedThreadId;
Assert.AreEqual (mtB1, mtB2, "#B1");
Assert.AreEqual (mtB2, mtB3, "#B2");
Assert.IsFalse (mtB1 == mtA1, "#B3");
}
[Test]
[Category ("NotDotNet")] // it hangs.
[Category ("MultiThreaded")]
public void TestStart()
{
{
C1Test test1 = new C1Test();
Thread TestThread = new Thread(new ThreadStart(test1.TestMethod));
TestThread.Start();
TestThread.Join();
Assert.AreEqual (10, test1.cnt, "#1");
}
{
C2Test test1 = new C2Test();
Thread TestThread = new Thread(new ThreadStart(test1.TestMethod));
TestThread.Start();
#if MONO_FEATURE_THREAD_ABORT
TestThread.Abort();
#else
TestThread.Interrupt ();
#endif
try {
TestThread.Start();
Assert.Fail ("#2");
} catch (ThreadStateException) {
}
}
{
C2Test test1 = new C2Test();
Thread TestThread = new Thread(new ThreadStart(test1.TestMethod));
TestThread.Start();
while (!test1.run) {
}
bool started = (TestThread.ThreadState == ThreadState.Running);
Assert.AreEqual (started, test1.run, "#15 Thread Is not in the correct state: ");
#if MONO_FEATURE_THREAD_ABORT
TestThread.Abort();
#else
TestThread.Interrupt ();
#endif
}
}
[Test]
[Category ("MultiThreaded")]
public void TestApartmentState ()
{
C2Test test1 = new C2Test();
Thread TestThread = new Thread(new ThreadStart(test1.TestMethod));
Assert.AreEqual (ApartmentState.Unknown, TestThread.ApartmentState, "#1");
TestThread.Start();
TestUtil.WaitForAlive (TestThread, "wait5");
Assert.AreEqual (ApartmentState.MTA, TestThread.ApartmentState, "#2");
#if MONO_FEATURE_THREAD_ABORT
TestThread.Abort();
#else
TestThread.Interrupt ();
#endif
}
[Test]
[Category ("NotWorking")] // setting the priority of a Thread before it is started isn't implemented in Mono yet
public void TestPriority1()
{
C2Test test1 = new C2Test();
Thread TestThread = new Thread(new ThreadStart(test1.TestMethod));
try {
TestThread.Priority=ThreadPriority.BelowNormal;
ThreadPriority before = TestThread.Priority;
Assert.AreEqual (ThreadPriority.BelowNormal, before, "#40 Unexpected priority before thread start: ");
TestThread.Start();
TestUtil.WaitForAlive (TestThread, "wait7");
ThreadPriority after = TestThread.Priority;
Assert.AreEqual (before, after, "#41 Unexpected Priority Change: ");
} finally {
#if MONO_FEATURE_THREAD_ABORT
TestThread.Abort();
#else
TestThread.Interrupt ();
#endif
}
}
#if MONO_FEATURE_THREAD_ABORT
[Test]
[Category ("NotDotNet")] // on MS, Thread is still in AbortRequested state when Start is invoked
public void AbortUnstarted ()
{
C2Test test1 = new C2Test();
Thread th = new Thread (new ThreadStart (test1.TestMethod));
th.Abort ();
th.Start ();
}
#endif
[Test]
[Category ("NotDotNet")] // on MS, ThreadState is immediately Stopped after Abort
[Category ("NotWorking")] // this is a MonoTODO -> no support for Priority
public void TestPriority2()
{
C2Test test1 = new C2Test();
Thread TestThread = new Thread(new ThreadStart(test1.TestMethod));
try {
Assert.AreEqual (ThreadPriority.Normal, TestThread.Priority, "#42 Incorrect Priority in New thread: ");
TestThread.Start();
TestUtil.WaitForAliveOrStop (TestThread, "wait8");
Assert.AreEqual (ThreadPriority.Normal, TestThread.Priority, "#43 Incorrect Priority in Started thread: ");
} finally {
#if MONO_FEATURE_THREAD_ABORT
TestThread.Abort();
#else
TestThread.Interrupt ();
#endif
}
Assert.AreEqual (ThreadPriority.Normal, TestThread.Priority, "#44 Incorrect Priority in Aborted thread: ");
}
[Test]
[Category ("NotWorking")] // this is a MonoTODO -> no support for Priority
public void TestPriority3()
{
C2Test test1 = new C2Test();
Thread TestThread = new Thread(new ThreadStart(test1.TestMethod));
try {
TestThread.Start();
TestThread.Priority = ThreadPriority.Lowest;
Assert.AreEqual (ThreadPriority.Lowest, TestThread.Priority, "#45A Incorrect Priority:");
TestThread.Priority = ThreadPriority.BelowNormal;
Assert.AreEqual (ThreadPriority.BelowNormal, TestThread.Priority, "#45B Incorrect Priority:");
TestThread.Priority = ThreadPriority.Normal;
Assert.AreEqual (ThreadPriority.Normal, TestThread.Priority, "#45C Incorrect Priority:");
TestThread.Priority = ThreadPriority.AboveNormal;
Assert.AreEqual (ThreadPriority.AboveNormal, TestThread.Priority, "#45D Incorrect Priority:");
TestThread.Priority = ThreadPriority.Highest;
Assert.AreEqual (ThreadPriority.Highest, TestThread.Priority, "#45E Incorrect Priority:");
}
finally {
#if MONO_FEATURE_THREAD_ABORT
TestThread.Abort();
#else
TestThread.Interrupt ();
#endif
}
}
[Test]
[Category ("MultiThreaded")]
public void TestUndivisibleByPageSizeMaxStackSize ()
{
const int undivisible_stacksize = 1048573;
var thread = new Thread (new ThreadStart (delegate {}), undivisible_stacksize);
thread.Start ();
thread.Join ();
}
[Test]
[Category ("MultiThreaded")]
public void TestIsBackground1 ()
{
C2Test test1 = new C2Test();
Thread TestThread = new Thread(new ThreadStart(test1.TestMethod));
try {
TestThread.Start();
TestUtil.WaitForAlive (TestThread, "wait9");
bool state = TestThread.IsBackground;
Assert.IsFalse (state, "#51 IsBackground not set at the default state: ");
} finally {
#if MONO_FEATURE_THREAD_ABORT
TestThread.Abort();
#else
TestThread.Interrupt ();
#endif
}
}
[Test]
[Category ("MultiThreaded")]
public void TestIsBackground2 ()
{
C2Test test1 = new C2Test();
Thread TestThread = new Thread(new ThreadStart(test1.TestMethod));
TestThread.IsBackground = true;
try {
TestThread.Start();
} finally {
#if MONO_FEATURE_THREAD_ABORT
TestThread.Abort();
#else
TestThread.Interrupt ();
#endif
}
if (TestThread.IsAlive) {
try {
Assert.IsTrue (TestThread.IsBackground, "#52 Is Background Changed to Start ");
} catch (ThreadStateException) {
// Ignore if thread died meantime
}
}
}
[Test]
[Category ("MultiThreaded")]
public void TestName()
{
C2Test test1 = new C2Test();
Thread TestThread = new Thread(new ThreadStart(test1.TestMethod));
try {
TestThread.Start();
TestUtil.WaitForAlive (TestThread, "wait10");
string name = TestThread.Name;
Assert.IsNull (name, "#61 Name set when mustn't be set: ");
string newname = "Testing....";
TestThread.Name = newname;
Assert.AreEqual (newname, TestThread.Name, "#62 Name not set when must be set: ");
} finally {
#if MONO_FEATURE_THREAD_ABORT
TestThread.Abort();
#else
TestThread.Interrupt ();
#endif
}
}
[Test]
public void Name ()
{
Thread t = new Thread (new ThreadStart (Name));
Assert.IsNull (t.Name, "Name-1");
t.Name = null;
Assert.IsNull (t.Name, "Name-2");
}
[Test]
[ExpectedException (typeof (InvalidOperationException))]
public void Rename ()
{
Thread t = new Thread (new ThreadStart (Rename));
t.Name = "a";
t.Name = "b";
}
[Test]
[Category ("MultiThreaded")]
public void TestNestedThreads1()
{
C3Test test1 = new C3Test();
Thread TestThread = new Thread(new ThreadStart(test1.TestMethod1));
try {
TestThread.Start();
TestUtil.WaitForAlive (TestThread, "wait11");
} finally {
#if MONO_FEATURE_THREAD_ABORT
TestThread.Abort();
#else
TestThread.Interrupt ();
#endif
}
}
[Test]
[Category ("MultiThreaded")]
public void TestNestedThreads2()
{
C4Test test1 = new C4Test();
Thread TestThread = new Thread(new ThreadStart(test1.TestMethod1));
try {
TestThread.Start();
} finally {
#if MONO_FEATURE_THREAD_ABORT
TestThread.Abort();
#else
TestThread.Interrupt ();
#endif
}
}
[Test]
[Category ("MultiThreaded")]
public void TestJoin1()
{
C1Test test1 = new C1Test();
C1Test test2 = new C1Test();
Thread thread1 = new Thread(new ThreadStart(test1.TestMethod));
Thread thread2 = new Thread(new ThreadStart(test1.TestMethod2));
try {
thread1.Start();
thread2.Start();
thread2.Join();
} finally {
#if MONO_FEATURE_THREAD_ABORT
thread1.Abort();
thread2.Abort();
#else
thread1.Interrupt ();
thread2.Interrupt ();
#endif
}
}
[Test]
[ExpectedException (typeof (ArgumentOutOfRangeException))]
public void Join_Int32_Negative ()
{
// -1 is Timeout.Infinite
Thread.CurrentThread.Join (-2);
}
[Test]
[ExpectedException (typeof (ArgumentOutOfRangeException))]
public void Join_TimeSpan_Negative ()
{
Thread.CurrentThread.Join (Negative);
}
[Test]
[ExpectedException (typeof (ArgumentOutOfRangeException))]
public void Join_TimeSpan_TooLarge ()
{
Thread.CurrentThread.Join (TooLarge);
}
[Test]
public void Join_TimeSpan_SmallNegative ()
{
Thread.CurrentThread.Join (SmallNegative);
}
[Test]
[ExpectedException (typeof (ArgumentOutOfRangeException))]
public void Sleep_Int32_Negative ()
{
// -1 is Timeout.Infinite
Thread.Sleep (-2);
}
[Test]
public void Sleep_TimeSpan_SmallNegative ()
{
Thread.Sleep (SmallNegative);
}
[Test]
[ExpectedException (typeof (ArgumentOutOfRangeException))]
public void Sleep_TimeSpan_Negative ()
{
Thread.Sleep (Negative);
}
[Test]
[ExpectedException (typeof (ArgumentOutOfRangeException))]
public void Sleep_TimeSpan_TooLarge ()
{
Thread.Sleep (TooLarge);
}
[Test]
public void SpinWait ()
{
// no exception for negative numbers
Thread.SpinWait (Int32.MinValue);
Thread.SpinWait (0);
}
[Test]
[Category ("MultiThreaded")]
public void TestThreadState ()
{
//TODO: Test The rest of the possible transitions
C2Test test1 = new C2Test();
Thread TestThread = new Thread(new ThreadStart(test1.TestMethod));
Assert.AreEqual (ThreadState.Unstarted, TestThread.ThreadState, "#101 Wrong Thread State");
try {
TestThread.Start();
//while(!TestThread.IsAlive); //In the MS Documentation this is not necessary
//but in the MS SDK it is
Assert.IsTrue (TestThread.ThreadState == ThreadState.Running || (TestThread.ThreadState & ThreadState.Unstarted) != 0,
"#102 Wrong Thread State: " + TestThread.ThreadState.ToString ());
} finally {
#if MONO_FEATURE_THREAD_ABORT
TestThread.Abort();
#else
TestThread.Interrupt ();
#endif
}
TestUtil.WaitForNotAlive (TestThread, "wait12");
// Docs say state will be Stopped, but Aborted happens sometimes (?)
Assert.IsTrue ((ThreadState.Stopped & TestThread.ThreadState) != 0 || (ThreadState.Aborted & TestThread.ThreadState) != 0,
"#103 Wrong Thread State: " + TestThread.ThreadState.ToString ());
}
#if !DISABLE_SECURITY
[Test]
[Ignore ("see comment below.")]
public void CurrentPrincipal_PrincipalPolicy_NoPrincipal ()
{
// note: switching from PrincipalPolicy won't work inside the same thread
// because as soon as a Principal object is created the Policy doesn't matter anymore
Thread t = new Thread (new ThreadStart (ThreadedPrincipalTest.NoPrincipal));
try {
t.Start ();
t.Join ();
} catch {
#if MONO_FEATURE_THREAD_ABORT
t.Abort ();
#else
t.Interrupt ();
#endif
}
}
[Test]
[Ignore ("see comment below.")]
public void CurrentPrincipal_PrincipalPolicy_UnauthenticatedPrincipal ()
{
// note: switching from PrincipalPolicy won't work inside the same thread
// because as soon as a Principal object is created the Policy doesn't matter anymore
Thread t = new Thread (new ThreadStart (ThreadedPrincipalTest.UnauthenticatedPrincipal));
try {
t.Start ();
t.Join ();
} catch {
#if MONO_FEATURE_THREAD_ABORT
t.Abort ();
#else
t.Interrupt ();
#endif
}
}
[Test]
public void CurrentPrincipal_PrincipalPolicy_WindowsPrincipal ()
{
// note: switching from PrincipalPolicy won't work inside the same thread
// because as soon as a Principal object is created the Policy doesn't matter anymore
Thread t = new Thread (new ThreadStart (ThreadedPrincipalTest.WindowsPrincipal));
try {
t.Start ();
t.Join ();
} catch {
#if MONO_FEATURE_THREAD_ABORT
t.Abort ();
#else
t.Interrupt ();
#endif
}
}
[Test]
public void IPrincipal_CopyOnNewThread ()
{
Thread.CurrentPrincipal = new GenericPrincipal (new GenericIdentity ("bad"), null);
Thread t = new Thread (new ThreadStart (ThreadedPrincipalTest.CopyOnNewThread));
try {
Thread.CurrentPrincipal = new GenericPrincipal (new GenericIdentity ("good"), null);
t.Start ();
t.Join ();
} catch {
#if MONO_FEATURE_THREAD_ABORT
t.Abort ();
#else
t.Interrupt ();
#endif
}
}
#endif
int counter = 0;
#if MONO_FEATURE_THREAD_SUSPEND_RESUME
[Test]
[Category ("MultiThreaded")]
public void TestSuspend ()
{
Thread t = new Thread (new ThreadStart (DoCount));
t.IsBackground = true;
t.Start ();
CheckIsRunning ("t1", t);
t.Suspend ();
WaitSuspended ("t2", t);
CheckIsNotRunning ("t3", t);
t.Resume ();
WaitResumed ("t4", t);
CheckIsRunning ("t5", t);
t.Abort ();
TestUtil.WaitForNotAlive (t, "wait13");
CheckIsNotRunning ("t6", t);
}
#endif
#if MONO_FEATURE_THREAD_SUSPEND_RESUME && MONO_FEATURE_THREAD_ABORT
[Test]
[Category("NotDotNet")] // On MS, ThreadStateException is thrown on Abort: "Thread is suspended; attempting to abort"
[Category ("MultiThreaded")]
public void TestSuspendAbort ()
{
Thread t = new Thread (new ThreadStart (DoCount));
t.IsBackground = true;
t.Start ();
CheckIsRunning ("t1", t);
t.Suspend ();
WaitSuspended ("t2", t);
CheckIsNotRunning ("t3", t);
t.Abort ();
int n=0;
while (t.IsAlive && n < 200) {
Thread.Sleep (10);
n++;
}
Assert.IsTrue (n < 200, "Timeout while waiting for abort");
CheckIsNotRunning ("t6", t);
}
#endif
[Test]
[Category ("MultiThreaded")]
public void Test_Interrupt ()
{
ManualResetEvent mre = new ManualResetEvent (false);
bool interruptedExceptionThrown = false;
ThreadPool.QueueUserWorkItem (Test_Interrupt_Worker, Thread.CurrentThread);
try {
try {
mre.WaitOne (3000);
} finally {
try {
mre.WaitOne (0);
} catch (ThreadInterruptedException) {
Assert.Fail ("ThreadInterruptedException thrown twice");
}
}
} catch (ThreadInterruptedException) {
interruptedExceptionThrown = true;
}
Assert.IsTrue (interruptedExceptionThrown, "ThreadInterruptedException expected.");
}
[Test]
[ExpectedException (typeof (ArgumentNullException))]
public void TestQueueUserWorkItemNullCallback ()
{
ThreadPool.QueueUserWorkItem (null, null);
}
private void Test_Interrupt_Worker (object o)
{
Thread t = o as Thread;
Thread.Sleep (100);
t.Interrupt ();
}
[Test]
public void Test_InterruptCurrentThread ()
{
ManualResetEvent mre = new ManualResetEvent (false);
bool interruptedExceptionThrown = false;
Thread.CurrentThread.Interrupt ();
try {
mre.WaitOne (0);
Assert.Fail ();
} catch (ThreadInterruptedException) {
}
}
[Test]
public void GetNamedDataSlotTest ()
{
Assert.IsNotNull (Thread.GetNamedDataSlot ("te#st"), "#1");
Assert.AreSame (Thread.GetNamedDataSlot ("te#st"), Thread.GetNamedDataSlot ("te#st"), "#2");
}
class DomainClass : MarshalByRefObject {
Thread m_thread;
bool success;
public bool Run () {
m_thread = new Thread(ThreadProc);
m_thread.Start(Thread.CurrentThread);
m_thread.Join();
return success;
}
public void ThreadProc (object arg) {
success = m_thread == Thread.CurrentThread;
}
}
#if MONO_FEATURE_MULTIPLE_APPDOMAINS
[Test]
[Category ("NotDotNet")]
public void CurrentThread_Domains ()
{
AppDomain ad = AppDomain.CreateDomain ("foo");
ad.Load (typeof (DomainClass).Assembly.GetName ());
var o = (DomainClass)ad.CreateInstanceAndUnwrap (typeof (DomainClass).Assembly.FullName, typeof (DomainClass).FullName);
Assert.IsTrue (o.Run ());
AppDomain.Unload (ad);
}
#endif // MONO_FEATURE_MULTIPLE_APPDOMAINS
[Test]
public void SetNameInThreadPoolThread ()
{
Task t = Task.Run (delegate () {
Thread.CurrentThread.Name = "ThreadName1";
Assert.AreEqual (Thread.CurrentThread.Name, "ThreadName1", "#1");
try {
Thread.CurrentThread.Name = "ThreadName2";
Assert.Fail ("#2");
} catch (InvalidOperationException) {
}
});
t.Wait ();
}
void CheckIsRunning (string s, Thread t)
{
int c = counter;
Thread.Sleep (100);
Assert.IsTrue (counter > c, s);
}
void CheckIsNotRunning (string s, Thread t)
{
int c = counter;
Thread.Sleep (100);
Assert.AreEqual (counter, c, s);
}
void WaitSuspended (string s, Thread t)
{
int n=0;
ThreadState state = t.ThreadState;
while ((state & ThreadState.Suspended) == 0) {
Assert.IsTrue ((state & ThreadState.SuspendRequested) != 0, s + ": expected SuspendRequested state");
Thread.Sleep (10);
n++;
Assert.IsTrue (n < 100, s + ": failed to suspend");
state = t.ThreadState;
}
Assert.IsTrue ((state & ThreadState.SuspendRequested) == 0, s + ": SuspendRequested state not expected");
}
void WaitResumed (string s, Thread t)
{
int n=0;
while ((t.ThreadState & ThreadState.Suspended) != 0) {
Thread.Sleep (10);
n++;
Assert.IsTrue (n < 100, s + ": failed to resume");
}
}
public void DoCount ()
{
while (true) {
counter++;
Thread.Sleep (1);
}
}
}
[TestFixture]
public class ThreadStateTest {
void Start ()
{
}
[Test] // bug #81720
[Category ("MultiThreaded")]
public void IsBackGround ()
{
Thread t1 = new Thread (new ThreadStart (Start));
Assert.AreEqual (ThreadState.Unstarted, t1.ThreadState, "#A1");
Assert.IsFalse (t1.IsBackground, "#A2");
t1.Start ();
t1.Join ();
Assert.AreEqual (ThreadState.Stopped, t1.ThreadState, "#A3");
try {
bool isBackGround = t1.IsBackground;
Assert.Fail ("#A4: " + isBackGround.ToString ());
} catch (ThreadStateException ex) {
Assert.AreEqual (typeof (ThreadStateException), ex.GetType (), "#A5");
Assert.IsNull (ex.InnerException, "#A6");
Assert.IsNotNull (ex.Message, "#A7");
}
Thread t2 = new Thread (new ThreadStart (Start));
Assert.AreEqual (ThreadState.Unstarted, t2.ThreadState, "#B1");
t2.IsBackground = true;
Assert.AreEqual (ThreadState.Unstarted | ThreadState.Background, t2.ThreadState, "#B2");
Assert.IsTrue (t2.IsBackground, "#B3");
t2.Start ();
t2.Join ();
Assert.AreEqual (ThreadState.Stopped, t2.ThreadState, "#B4");
try {
bool isBackGround = t2.IsBackground;
Assert.Fail ("#B5: " + isBackGround.ToString ());
} catch (ThreadStateException ex) {
Assert.AreEqual (typeof (ThreadStateException), ex.GetType (), "#B6");
Assert.IsNull (ex.InnerException, "#B7");
Assert.IsNotNull (ex.Message, "#B8");
}
}
[Test] // bug #60031
[Category ("MultiThreaded")]
public void StoppedThreadsThrowThreadStateException ()
{
var t = new Thread (() => { });
t.Start ();
t.Join ();
Assert.Throws<ThreadStateException> (() => { var isb = t.IsBackground; }, "IsBackground getter");
Assert.Throws<ThreadStateException> (() => { var isb = t.ApartmentState; }, "ApartmentState getter");
Assert.Throws<ThreadStateException> (() => t.ApartmentState = ApartmentState.MTA, "ApartmentState setter");
Assert.Throws<ThreadStateException> (() => t.IsBackground = false, "IsBackground setter");
Assert.Throws<ThreadStateException> (() => t.Start (), "Start ()");
#if MONO_FEATURE_THREAD_SUSPEND_RESUME
Assert.Throws<ThreadStateException> (() => t.Resume (), "Resume ()");
Assert.Throws<ThreadStateException> (() => t.Suspend (), "Suspend ()");
#endif
Assert.Throws<ThreadStateException> (() => t.GetApartmentState (), "GetApartmentState ()");
Assert.Throws<ThreadStateException> (() => t.SetApartmentState (ApartmentState.MTA), "SetApartmentState ()");
Assert.Throws<ThreadStateException> (() => t.TrySetApartmentState (ApartmentState.MTA), "TrySetApartmentState ()");
}
}
[TestFixture]
[Serializable]
public class ThreadTest_ManagedThreadId
{
AppDomain ad1;
AppDomain ad2;
MBRO mbro = new MBRO ();
class MBRO : MarshalByRefObject {
public int id_a1;
public int id_b1;
public int id_b2;
public string ad_a1;
public string ad_b1;
public string ad_b2;
public string message;
}
#if !MOBILE
[Test]
public void ManagedThreadId_AppDomains ()
{
AppDomain currentDomain = AppDomain.CurrentDomain;
ad1 = AppDomain.CreateDomain ("AppDomain 1", currentDomain.Evidence, currentDomain.SetupInformation);
ad2 = AppDomain.CreateDomain ("AppDomain 2", currentDomain.Evidence, currentDomain.SetupInformation);
Thread a = new Thread (ThreadA);
Thread b = new Thread (ThreadB);
// execute on AppDomain 1 thread A
// execute on AppDomain 2 thread B
// execute on AppDomain 1 thread B - must have same ManagedThreadId as Ad 2 on thread B
a.Start ();
a.Join ();
b.Start ();
b.Join ();
AppDomain.Unload (ad1);
AppDomain.Unload (ad2);
if (mbro.message != null)
Assert.Fail (mbro.message);
// Console.WriteLine ("Done id_a1: {0} id_b1: {1} id_b2: {2} ad_a1: {3} ad_b1: {4} ad_b2: {5}", mbro.id_a1, mbro.id_b1, mbro.id_b2, mbro.ad_a1, mbro.ad_b1, mbro.ad_b2);
Assert.AreEqual ("AppDomain 1", mbro.ad_a1, "Name #1");
Assert.AreEqual ("AppDomain 1", mbro.ad_b1, "Name #2");
Assert.AreEqual ("AppDomain 2", mbro.ad_b2, "Name #3");
Assert.AreNotEqual (mbro.id_a1, mbro.id_b1, "Id #1");
Assert.AreNotEqual (mbro.id_a1, mbro.id_b2, "Id #2");
Assert.AreEqual (mbro.id_b1, mbro.id_b2, "Id #3");
Assert.AreNotEqual (mbro.id_a1, Thread.CurrentThread.ManagedThreadId, "Id #4");
Assert.AreNotEqual (mbro.id_b1, Thread.CurrentThread.ManagedThreadId, "Id #5");
Assert.AreNotEqual (mbro.id_b2, Thread.CurrentThread.ManagedThreadId, "Id #6");
Assert.AreNotEqual (mbro.ad_a1, AppDomain.CurrentDomain.FriendlyName, "Name #4");
Assert.AreNotEqual (mbro.ad_b1, AppDomain.CurrentDomain.FriendlyName, "Name #5");
Assert.AreNotEqual (mbro.ad_b2, AppDomain.CurrentDomain.FriendlyName, "Name #6");
}
#endif
void A1 ()
{
mbro.id_a1 = Thread.CurrentThread.ManagedThreadId;
mbro.ad_a1 = AppDomain.CurrentDomain.FriendlyName;
}
void B2 ()
{
mbro.id_b2 = Thread.CurrentThread.ManagedThreadId;
mbro.ad_b2 = AppDomain.CurrentDomain.FriendlyName;
}
void B1 ()
{
mbro.id_b1 = Thread.CurrentThread.ManagedThreadId;
mbro.ad_b1 = AppDomain.CurrentDomain.FriendlyName;
}
void ThreadA (object obj)
{
// Console.WriteLine ("ThreadA");
try {
ad1.DoCallBack (A1);
} catch (Exception ex) {
mbro.message = string.Format ("ThreadA exception: {0}", ex);
}
// Console.WriteLine ("ThreadA Done");
}
void ThreadB (object obj)
{
// Console.WriteLine ("ThreadB");
try {
ad2.DoCallBack (B2);
ad1.DoCallBack (B1);
} catch (Exception ex) {
mbro.message = string.Format ("ThreadB exception: {0}", ex);
}
// Console.WriteLine ("ThreadB Done");
}
}
[TestFixture]
public class ThreadApartmentTest
{
void Start ()
{
}
[Test] // bug #81658
[Category ("MultiThreaded")]
public void ApartmentState_StoppedThread ()
{
Thread t1 = new Thread (new ThreadStart (Start));
t1.Start ();
t1.Join ();
try {
ApartmentState state = t1.ApartmentState;
Assert.Fail ("#A1: " + state.ToString ());
} catch (ThreadStateException ex) {
Assert.AreEqual (typeof (ThreadStateException), ex.GetType (), "#A2");
Assert.IsNull (ex.InnerException, "#A3");
Assert.IsNotNull (ex.Message, "#A4");
}
Thread t2 = new Thread (new ThreadStart (Start));
t2.IsBackground = true;
t2.Start ();
t2.Join ();
try {
ApartmentState state = t2.ApartmentState;
Assert.Fail ("#B1: " + state.ToString ());
} catch (ThreadStateException ex) {
Assert.AreEqual (typeof (ThreadStateException), ex.GetType (), "#B2");
Assert.IsNull (ex.InnerException, "#B3");
Assert.IsNotNull (ex.Message, "#B4");
}
}
[Test]
public void ApartmentState_BackGround ()
{
Thread t1 = new Thread (new ThreadStart (Start));
t1.IsBackground = true;
Assert.AreEqual (ApartmentState.Unknown, t1.ApartmentState, "#1");
t1.ApartmentState = ApartmentState.STA;
Assert.AreEqual (ApartmentState.STA, t1.ApartmentState, "#2");
}
[Test]
[Category ("MultiThreaded")]
public void TestApartmentState ()
{
Thread t1 = new Thread (new ThreadStart (Start));
Thread t2 = new Thread (new ThreadStart (Start));
Thread t3 = new Thread (new ThreadStart (Start));
Assert.AreEqual (ApartmentState.Unknown, t1.ApartmentState, "Thread1 Default");
Assert.AreEqual (ApartmentState.Unknown, t2.ApartmentState, "Thread2 Default");
Assert.AreEqual (ApartmentState.Unknown, t3.ApartmentState, "Thread3 Default");
t1.ApartmentState = ApartmentState.STA;
Assert.AreEqual (ApartmentState.STA, t1.ApartmentState, "Thread1 Set Once");
t1.ApartmentState = ApartmentState.MTA;
Assert.AreEqual (ApartmentState.STA, t1.ApartmentState, "Thread1 Set Twice");
t2.ApartmentState = ApartmentState.MTA;
Assert.AreEqual (ApartmentState.MTA, t2.ApartmentState, "Thread2 Set Once");
t2.ApartmentState = ApartmentState.STA;
Assert.AreEqual (ApartmentState.MTA, t2.ApartmentState, "Thread2 Set Twice");
bool exception_occured = false;
try {
t3.ApartmentState = ApartmentState.Unknown;
}
catch (Exception) {
exception_occured = true;
}
Assert.AreEqual (ApartmentState.Unknown, t3.ApartmentState, "Thread3 Set Invalid");
Assert.IsFalse (exception_occured, "Thread3 Set Invalid Exception Occured");
t1.Start ();
exception_occured = false;
try {
t1.ApartmentState = ApartmentState.STA;
}
catch (Exception) {
exception_occured = true;
}
Assert.IsTrue (exception_occured, "Thread1 Started Invalid Exception Occured");
}
[Test]
public void TestSetApartmentStateSameState ()
{
Thread t1 = new Thread (new ThreadStart (Start));
t1.SetApartmentState (ApartmentState.STA);
Assert.AreEqual (ApartmentState.STA, t1.ApartmentState, "Thread1 Set Once");
t1.SetApartmentState (ApartmentState.STA);
Assert.AreEqual (ApartmentState.STA, t1.ApartmentState, "Thread1 Set twice");
}
[Test]
[ExpectedException(typeof(InvalidOperationException))]
public void TestSetApartmentStateDiffState ()
{
Thread t1 = new Thread (new ThreadStart (Start));
t1.SetApartmentState (ApartmentState.STA);
Assert.AreEqual (ApartmentState.STA, t1.ApartmentState, "Thread1 Set Once");
t1.SetApartmentState (ApartmentState.MTA);
}
[Test]
[Category ("MultiThreaded")]
public void TestTrySetApartmentState ()
{
Thread t1 = new Thread (new ThreadStart (Start));
t1.SetApartmentState (ApartmentState.STA);
Assert.AreEqual (ApartmentState.STA, t1.ApartmentState, "#1");
bool result = t1.TrySetApartmentState (ApartmentState.MTA);
Assert.IsFalse (result, "#2");
result = t1.TrySetApartmentState (ApartmentState.STA);
Assert.IsTrue (result, "#3");
}
[Test]
[Category ("MultiThreaded")]
public void TestTrySetApartmentStateRunning ()
{
Thread t1 = new Thread (new ThreadStart (Start));
t1.SetApartmentState (ApartmentState.STA);
Assert.AreEqual (ApartmentState.STA, t1.ApartmentState, "#1");
t1.Start ();
try {
t1.TrySetApartmentState (ApartmentState.STA);
Assert.Fail ("#2");
} catch (ThreadStateException) {
}
t1.Join ();
}
[Test]
public void Volatile () {
double v3 = 55667;
Thread.VolatileWrite (ref v3, double.MaxValue);
Assert.AreEqual (v3, double.MaxValue);
object o = "ABC";
Assert.AreEqual ("ABC", Thread.VolatileRead (ref o));
float v4 = 1;
Thread.VolatileWrite (ref v4, float.MaxValue);
Assert.AreEqual (v4, float.MaxValue);
}
[Test]
public void Culture ()
{
Assert.IsNotNull (Thread.CurrentThread.CurrentCulture, "CurrentCulture");
Assert.IsNotNull (Thread.CurrentThread.CurrentUICulture, "CurrentUICulture");
}
[Test]
[Category ("MultiThreaded")]
public void ThreadStartSimple ()
{
int i = 0;
Thread t = new Thread (delegate () {
// ensure the NSAutoreleasePool works
i++;
});
t.Start ();
t.Join ();
Assert.AreEqual (1, i, "ThreadStart");
}
[Test]
[Category ("MultiThreaded")]
public void ParametrizedThreadStart ()
{
int i = 0;
object arg = null;
Thread t = new Thread (delegate (object obj) {
// ensure the NSAutoreleasePool works
i++;
arg = obj;
});
t.Start (this);
t.Join ();
Assert.AreEqual (1, i, "ParametrizedThreadStart");
Assert.AreEqual (this, arg, "obj");
}
[Test]
public void SetNameTpThread () {
ThreadPool.QueueUserWorkItem(new WaitCallback(ThreadProc));
}
static void ThreadProc(Object stateInfo) {
Thread.CurrentThread.Name = "My Worker";
}
[Test]
public void GetStackTraces () {
var m = typeof (Thread).GetMethod ("Mono_GetStackTraces", BindingFlags.NonPublic|BindingFlags.Static);
if (m != null) {
var res = (Dictionary<Thread,SD.StackTrace>)typeof (Thread).GetMethod ("Mono_GetStackTraces", BindingFlags.NonPublic|BindingFlags.Static).Invoke (null, null);
foreach (var t in res.Keys) {
var st = res [t].ToString ();
}
}
}
}
public class TestUtil
{
public static void WaitForNotAlive (Thread t, string s)
{
WhileAlive (t, true, s);
}
public static void WaitForAlive (Thread t, string s)
{
WhileAlive (t, false, s);
}
public static bool WaitForAliveOrStop (Thread t, string s)
{
return WhileAliveOrStop (t, false, s);
}
public static void WhileAlive (Thread t, bool alive, string s)
{
var sw = SD.Stopwatch.StartNew ();
while (t.IsAlive == alive) {
if (sw.Elapsed.TotalSeconds > 10) {
if (alive) Assert.Fail ("Timeout while waiting for not alive state. " + s);
else Assert.Fail ("Timeout while waiting for alive state. " + s);
}
}
}
public static bool WhileAliveOrStop (Thread t, bool alive, string s)
{
var sw = SD.Stopwatch.StartNew ();
while (t.IsAlive == alive) {
if (t.ThreadState == ThreadState.Stopped)
return false;
if (sw.Elapsed.TotalSeconds > 10) {
if (alive) Assert.Fail ("Timeout while waiting for not alive state. " + s);
else Assert.Fail ("Timeout while waiting for alive state. " + s);
}
}
return true;
}
}
}
| {
"pile_set_name": "Github"
} |
/* eslint no-unused-expressions:0 */
/* globals afterEach, beforeEach, describe, it */
'use strict';
var chai = require('chai');
var sinon = require('sinon');
var Buildmail = require('../lib/buildmail');
var http = require('http');
var stream = require('stream');
var Transform = stream.Transform;
var PassThrough = stream.PassThrough;
var expect = chai.expect;
chai.config.includeStack = true;
describe('Buildmail', function () {
it('should create Buildmail object', function () {
expect(new Buildmail()).to.exist;
});
describe('#createChild', function () {
it('should create child', function () {
var mb = new Buildmail('multipart/mixed');
var child = mb.createChild('multipart/mixed');
expect(child.parentNode).to.equal(mb);
expect(child.rootNode).to.equal(mb);
var subchild1 = child.createChild('text/html');
expect(subchild1.parentNode).to.equal(child);
expect(subchild1.rootNode).to.equal(mb);
var subchild2 = child.createChild('text/html');
expect(subchild2.parentNode).to.equal(child);
expect(subchild2.rootNode).to.equal(mb);
});
});
describe('#appendChild', function () {
it('should append child node', function () {
var mb = new Buildmail('multipart/mixed');
var child = new Buildmail('text/plain');
mb.appendChild(child);
expect(child.parentNode).to.equal(mb);
expect(child.rootNode).to.equal(mb);
expect(mb.childNodes.length).to.equal(1);
expect(mb.childNodes[0]).to.equal(child);
});
});
describe('#replace', function () {
it('should replace node', function () {
var mb = new Buildmail(),
child = mb.createChild('text/plain'),
replacement = new Buildmail('image/png');
child.replace(replacement);
expect(mb.childNodes.length).to.equal(1);
expect(mb.childNodes[0]).to.equal(replacement);
});
});
describe('#remove', function () {
it('should remove node', function () {
var mb = new Buildmail(),
child = mb.createChild('text/plain');
child.remove();
expect(mb.childNodes.length).to.equal(0);
expect(child.parenNode).to.not.exist;
});
});
describe('#setHeader', function () {
it('should set header', function () {
var mb = new Buildmail();
mb.setHeader('key', 'value');
mb.setHeader('key', 'value1');
expect(mb.getHeader('Key')).to.equal('value1');
mb.setHeader([{
key: 'key',
value: 'value2'
}, {
key: 'key2',
value: 'value3'
}]);
expect(mb._headers).to.deep.equal([{
key: 'Key',
value: 'value2'
}, {
key: 'Key2',
value: 'value3'
}]);
mb.setHeader({
key: 'value4',
key2: 'value5'
});
expect(mb._headers).to.deep.equal([{
key: 'Key',
value: 'value4'
}, {
key: 'Key2',
value: 'value5'
}]);
});
it('should set multiple headers with the same key', function () {
var mb = new Buildmail();
mb.setHeader('key', ['value1', 'value2', 'value3']);
expect(mb._headers).to.deep.equal([{
key: 'Key',
value: ['value1', 'value2', 'value3']
}]);
});
});
describe('#addHeader', function () {
it('should add header', function () {
var mb = new Buildmail();
mb.addHeader('key', 'value1');
mb.addHeader('key', 'value2');
mb.addHeader([{
key: 'key',
value: 'value2'
}, {
key: 'key2',
value: 'value3'
}]);
mb.addHeader({
key: 'value4',
key2: 'value5'
});
expect(mb._headers).to.deep.equal([{
key: 'Key',
value: 'value1'
}, {
key: 'Key',
value: 'value2'
}, {
key: 'Key',
value: 'value2'
}, {
key: 'Key2',
value: 'value3'
}, {
key: 'Key',
value: 'value4'
}, {
key: 'Key2',
value: 'value5'
}]);
});
it('should set multiple headers with the same key', function () {
var mb = new Buildmail();
mb.addHeader('key', ['value1', 'value2', 'value3']);
expect(mb._headers).to.deep.equal([{
key: 'Key',
value: 'value1'
}, {
key: 'Key',
value: 'value2'
}, {
key: 'Key',
value: 'value3'
}]);
});
});
describe('#getHeader', function () {
it('should return first matching header value', function () {
var mb = new Buildmail();
mb._headers = [{
key: 'Key',
value: 'value4'
}, {
key: 'Key2',
value: 'value5'
}];
expect(mb.getHeader('KEY')).to.equal('value4');
});
});
describe('#setContent', function () {
it('should set the contents for a node', function () {
var mb = new Buildmail();
mb.setContent('abc');
expect(mb.content).to.equal('abc');
});
});
describe('#build', function () {
it('should build root node', function (done) {
var mb = new Buildmail('text/plain').
setHeader({
date: '12345',
'message-id': '67890'
}).
setContent('Hello world!'),
expected = 'Content-Type: text/plain\r\n' +
'Date: 12345\r\n' +
'Message-ID: <67890>\r\n' +
'Content-Transfer-Encoding: 7bit\r\n' +
'MIME-Version: 1.0\r\n' +
'\r\n' +
'Hello world!';
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(msg).to.equal(expected);
done();
});
});
it('should build child node', function (done) {
var mb = new Buildmail('multipart/mixed'),
childNode = mb.createChild('text/plain').
setContent('Hello world!'),
expected = 'Content-Type: text/plain\r\n' +
'Content-Transfer-Encoding: 7bit\r\n' +
'\r\n' +
'Hello world!';
childNode.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(msg).to.equal(expected);
done();
});
});
it('should build multipart node', function (done) {
var mb = new Buildmail('multipart/mixed', {
baseBoundary: 'test'
}).setHeader({
date: '12345',
'message-id': '67890'
}),
expected = 'Content-Type: multipart/mixed; boundary="----sinikael-?=_1-test"\r\n' +
'Date: 12345\r\n' +
'Message-ID: <67890>\r\n' +
'MIME-Version: 1.0\r\n' +
'\r\n' +
'------sinikael-?=_1-test\r\n' +
'Content-Type: text/plain\r\n' +
'Content-Transfer-Encoding: 7bit\r\n' +
'\r\n' +
'Hello world!\r\n' +
'------sinikael-?=_1-test--\r\n';
mb.createChild('text/plain').setContent('Hello world!');
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(msg).to.equal(expected);
done();
});
});
it('should build root with generated headers', function (done) {
var mb = new Buildmail('text/plain');
mb.hostname = 'abc';
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(/^Date:\s/m.test(msg)).to.be.true;
expect(/^Message\-ID:\s/m.test(msg)).to.be.true;
expect(/^MIME-Version: 1\.0$/m.test(msg)).to.be.true;
done();
});
});
it('should not include bcc missing in output, but in envelope', function (done) {
var mb = new Buildmail('text/plain').
setHeader({
from: '[email protected]',
to: '[email protected]',
bcc: '[email protected]'
});
var envelope = mb.getEnvelope();
expect(envelope).to.deep.equal({
from: '[email protected]',
to: ['[email protected]', '[email protected]']
});
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(/^From: [email protected]$/m.test(msg)).to.be.true;
expect(/^To: [email protected]$/m.test(msg)).to.be.true;
expect(!/^Bcc:/m.test(msg)).to.be.true;
done();
});
});
it('should include bcc missing in output and in envelope', function (done) {
var mb = new Buildmail(
'text/plain', {
keepBcc: true
}).
setHeader({
from: '[email protected]',
to: '[email protected]',
bcc: '[email protected]'
});
var envelope = mb.getEnvelope();
expect(envelope).to.deep.equal({
from: '[email protected]',
to: ['[email protected]', '[email protected]']
});
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(/^From: [email protected]$/m.test(msg)).to.be.true;
expect(/^To: [email protected]$/m.test(msg)).to.be.true;
expect(/^Bcc: [email protected]$/m.test(msg)).to.be.true;
done();
});
});
it('should use set envelope', function (done) {
var mb = new Buildmail('text/plain').
setHeader({
from: '[email protected]',
to: '[email protected]',
bcc: '[email protected]'
}).setEnvelope({
from: 'U Name, A Name <[email protected]>',
to: 'B Name <[email protected]>, [email protected]',
bcc: 'P P P, <[email protected]>',
fooField: {
barValue: 'foobar'
}
});
var envelope = mb.getEnvelope();
expect(envelope).to.deep.equal({
from: '[email protected]',
to: ['[email protected]', '[email protected]', '[email protected]'],
fooField: {
barValue: 'foobar'
}
});
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(/^From: [email protected]$/m.test(msg)).to.be.true;
expect(/^To: [email protected]$/m.test(msg)).to.be.true;
expect(!/^Bcc:/m.test(msg)).to.be.true;
done();
});
});
it('should have unicode subject', function (done) {
var mb = new Buildmail('text/plain').
setHeader({
subject: 'jõgeval istus kägu metsas'
});
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(/^Subject: =\?UTF\-8\?Q\?j=C3=B5geval_istus_k=C3=A4gu\?= metsas$/m.test(msg)).to.be.true;
done();
});
});
it('should have unicode subject with strange characters', function (done) {
var mb = new Buildmail('text/plain').
setHeader({
subject: 'ˆ¸ÁÌÓıÏˇÁÛ^¸\\ÁıˆÌÁÛØ^\\˜Û˝™ˇıÓ¸^\\˜fi^\\·\\˜Ø^£˜#fi^\\£fi^\\£fi^\\'
});
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(msg.match(/\bSubject: [^\r]*\r\n( [^\r]*\r\n)*/)[0]).to.equal('Subject: =?UTF-8?B?y4bCuMOBw4zDk8Sxw4/Lh8OBw5tewrhcw4HEscuG?=\r\n =?UTF-8?B?w4zDgcObw5heXMucw5vLneKEosuHxLHDk8K4Xlw=?=\r\n =?UTF-8?B?y5zvrIFeXMK3XMucw5hewqPLnCPvrIFeXMKj76yB?=\r\n =?UTF-8?B?XlzCo++sgV5c?=\r\n');
done();
});
});
it('should keep 7bit text as is', function (done) {
var mb = new Buildmail('text/plain').
setContent('tere tere');
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(/\r\n\r\ntere tere$/.test(msg)).to.be.true;
expect(/^Content-Type: text\/plain$/m.test(msg)).to.be.true;
expect(/^Content-Transfer-Encoding: 7bit$/m.test(msg)).to.be.true;
done();
});
});
it('should prefer base64', function (done) {
var mb = new Buildmail('text/plain').
setHeader({
subject: 'õõõõ'
}).
setContent('õõõõõõõõ');
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(/^Content-Type: text\/plain; charset=utf-8$/m.test(msg)).to.be.true;
expect(/^Content-Transfer-Encoding: base64$/m.test(msg)).to.be.true;
expect(/^Subject: =\?UTF-8\?B\?w7XDtcO1w7U=\?=$/m.test(msg)).to.be.true;
msg = msg.split('\r\n\r\n');
msg.shift();
msg = msg.join('\r\n\r\n');
expect(msg).to.equal('w7XDtcO1w7XDtcO1w7XDtQ==');
done();
});
});
it('should force quoted-printable', function (done) {
var mb = new Buildmail('text/plain', {
textEncoding: 'quoted-printable'
}).setHeader({
subject: 'õõõõ'
}).
setContent('õõõõõõõõ');
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(/^Content-Type: text\/plain; charset=utf-8$/m.test(msg)).to.be.true;
expect(/^Content-Transfer-Encoding: quoted-printable$/m.test(msg)).to.be.true;
expect(/^Subject: =\?UTF-8\?Q\?=C3=B5=C3=B5=C3=B5=C3=B5\?=$/m.test(msg)).to.be.true;
msg = msg.split('\r\n\r\n');
msg.shift();
msg = msg.join('\r\n\r\n');
expect(msg).to.equal('=C3=B5=C3=B5=C3=B5=C3=B5=C3=B5=C3=B5=C3=B5=C3=B5');
done();
});
});
it('should prefer quoted-printable', function (done) {
var mb = new Buildmail('text/plain').
setContent('ooooooooõ');
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(/^Content-Type: text\/plain; charset=utf-8$/m.test(msg)).to.be.true;
expect(/^Content-Transfer-Encoding: quoted-printable$/m.test(msg)).to.be.true;
msg = msg.split('\r\n\r\n');
msg.shift();
msg = msg.join('\r\n\r\n');
expect(msg).to.equal('oooooooo=C3=B5');
done();
});
});
it('should not flow text', function (done) {
var mb = new Buildmail('text/plain').
setContent('a b c d e f g h i j k l m o p q r s t u w x y z 1 2 3 4 5 6 7 8 9 0 a b c d e f g h i j k l m o p q r s t u w x y z 1 2 3 4 5 6 7 8 9 0');
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(/^Content-Type: text\/plain$/m.test(msg)).to.be.true;
expect(/^Content-Transfer-Encoding: quoted-printable$/m.test(msg)).to.be.true;
msg = msg.split('\r\n\r\n');
msg.shift();
msg = msg.join('\r\n\r\n');
expect(msg).to.equal('a b c d e f g h i j k l m o p q r s t u w x y z 1 2 3 4 5 6 7 8 9 0 a b c d=\r\n e f g h i j k l m o p q r s t u w x y z 1 2 3 4 5 6 7 8 9 =\r\n0');
done();
});
});
it('should not flow html', function (done) {
var mb = new Buildmail('text/html').
setContent('a b c d e f g h i j k l m o p q r s t u w x y z 1 2 3 4 5 6 7 8 9 0 a b c d e f g h i j k l m o p q r s t u w x y z 1 2 3 4 5 6 7 8 9 0');
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(/^Content-Type: text\/html$/m.test(msg)).to.be.true;
expect(/^Content-Transfer-Encoding: quoted-printable$/m.test(msg)).to.be.true;
msg = msg.split('\r\n\r\n');
msg.shift();
msg = msg.join('\r\n\r\n');
expect(msg).to.equal('a b c d e f g h i j k l m o p q r s t u w x y z 1 2 3 4 5 6 7 8 9 0 a b c d=\r\n e f g h i j k l m o p q r s t u w x y z 1 2 3 4 5 6 7 8 9 =\r\n0');
done();
});
});
it('should use 7bit for html', function (done) {
var mb = new Buildmail('text/html').
setContent('a b c d e f g h i j k l m o p\r\nq r s t u w x y z 1 2 3 4 5 6\r\n7 8 9 0 a b c d e f g h i j k\r\nl m o p q r s t u w x y z\r\n1 2 3 4 5 6 7 8 9 0');
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(/^Content-Type: text\/html$/m.test(msg)).to.be.true;
expect(/^Content-Transfer-Encoding: 7bit$/m.test(msg)).to.be.true;
msg = msg.split('\r\n\r\n');
msg.shift();
msg = msg.join('\r\n\r\n');
expect(msg).to.equal('a b c d e f g h i j k l m o p\r\nq r s t u w x y z 1 2 3 4 5 6\r\n7 8 9 0 a b c d e f g h i j k\r\nl m o p q r s t u w x y z\r\n1 2 3 4 5 6 7 8 9 0');
done();
});
});
it('should fetch ascii filename', function (done) {
var mb = new Buildmail('text/plain', {
filename: 'jogeva.txt'
}).
setContent('jogeva');
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(/\r\n\r\njogeva$/.test(msg)).to.be.true;
expect(/^Content-Type: text\/plain; name=jogeva.txt$/m.test(msg)).to.be.true;
expect(/^Content-Transfer-Encoding: 7bit$/m.test(msg)).to.be.true;
expect(/^Content-Disposition: attachment; filename=jogeva.txt$/m.test(msg)).to.be.true;
done();
});
});
it('should set unicode filename', function (done) {
var mb = new Buildmail('text/plain', {
filename: 'jõgeva.txt'
}).
setContent('jõgeva');
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(/^Content-Type: text\/plain; charset=utf-8;/m.test(msg)).to.be.true;
expect(/^Content-Transfer-Encoding: quoted-printable$/m.test(msg)).to.be.true;
expect(/^Content-Disposition: attachment; filename\*0\*=utf-8''j%C3%B5geva.txt$/m.test(msg)).to.be.true;
done();
});
});
it('should set dashed filename', function (done) {
var mb = new Buildmail('text/plain', {
filename: 'Ɣ------Ɣ------Ɣ------Ɣ------Ɣ------Ɣ------Ɣ------.pdf'
}).
setContent('jõgeva');
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(msg.indexOf('Content-Disposition: attachment;\r\n' +
' filename*0*=utf-8\'\'%C6%94------%C6%94------%C6%94------%C6%94;\r\n' +
' filename*1*=------%C6%94------%C6%94------%C6%94------.pdf')).to.be.gte(0);
done();
});
});
it('should encode filename with a space', function (done) {
var mb = new Buildmail('text/plain', {
filename: 'document a.test.pdf'
}).
setContent('jõgeva');
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(/^Content-Type: text\/plain; charset=utf-8;/m.test(msg)).to.be.true;
expect(/^Content-Transfer-Encoding: quoted-printable$/m.test(msg)).to.be.true;
expect(/^Content-Disposition: attachment; filename="document a.test.pdf"$/m.test(msg)).to.be.true;
done();
});
});
it('should detect content type from filename', function (done) {
var mb = new Buildmail(false, {
filename: 'jogeva.zip'
}).
setContent('jogeva');
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(/^Content-Type: application\/zip;/m.test(msg)).to.be.true;
done();
});
});
it('should convert address objects', function (done) {
var mb = new Buildmail(false).
setHeader({
from: [{
name: 'the safewithme õ testuser',
address: 'safewithme.testuser@jõgeva.com'
}],
cc: [{
name: 'the safewithme testuser',
address: 'safewithme.testuser@jõgeva.com'
}]
});
expect(mb.getEnvelope()).to.deep.equal({
from: '[email protected]',
to: [
'[email protected]'
]
});
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(/^From: =\?UTF\-8\?Q\?the_safewithme_=C3=B5_testuser\?=$/m.test(msg)).to.be.true; expect(/^\s+<safewithme.testuser@xn\-\-jgeva-dua.com>$/m.test(msg)).to.be.true;
expect(/^Cc: the safewithme testuser <safewithme.testuser@xn\-\-jgeva-dua.com>$/m.test(msg)).to.be.true;
done();
});
});
it('should skip empty header', function (done) {
var mb = new Buildmail('text/plain').
setHeader({
a: 'b',
cc: '',
dd: [],
o: false,
date: 'zzz',
'message-id': '67890'
}).
setContent('Hello world!'),
expected = 'Content-Type: text/plain\r\n' +
'A: b\r\n' +
'Date: zzz\r\n' +
'Message-ID: <67890>\r\n' +
'Content-Transfer-Encoding: 7bit\r\n' +
'MIME-Version: 1.0\r\n' +
'\r\n' +
'Hello world!';
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(msg).to.equal(expected);
done();
});
});
it('should not process prepared headers', function (done) {
var mb = new Buildmail('text/plain').
setHeader({
unprepared: {
value: new Array(100).join('a b')
},
prepared: {
value: new Array(100).join('a b'),
prepared: true
},
unicode: {
value: 'õäöü',
prepared: true
},
date: 'zzz',
'message-id': '67890'
}).
setContent('Hello world!'),
expected = 'Content-Type: text/plain\r\n' +
// long folded value
'Unprepared: a ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba\r\n' +
' ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba\r\n' +
' ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba\r\n' +
' ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba\r\n' +
' ba ba ba b\r\n' +
// long unfolded value
'Prepared: a ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba ba b\r\n' +
// non-ascii value
'Unicode: õäöü\r\n' +
'Date: zzz\r\n' +
'Message-ID: <67890>\r\n' +
'Content-Transfer-Encoding: 7bit\r\n' +
'MIME-Version: 1.0\r\n' +
'\r\n' +
'Hello world!';
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(msg).to.equal(expected);
done();
});
});
it('should set default transfer encoding for application content', function (done) {
var mb = new Buildmail('application/x-my-stuff').
setHeader({
date: '12345',
'message-id': '67890'
}).
setContent('Hello world!'),
expected = 'Content-Type: application/x-my-stuff\r\n' +
'Date: 12345\r\n' +
'Message-ID: <67890>\r\n' +
'Content-Transfer-Encoding: base64\r\n' +
'MIME-Version: 1.0\r\n' +
'\r\n' +
'SGVsbG8gd29ybGQh';
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(msg).to.equal(expected);
done();
});
});
it('should not set transfer encoding for multipart content', function (done) {
var mb = new Buildmail('multipart/global').
setHeader({
date: '12345',
'message-id': '67890'
}).
setContent('Hello world!'),
expected = 'Content-Type: multipart/global; boundary=abc\r\n' +
'Date: 12345\r\n' +
'Message-ID: <67890>\r\n' +
'MIME-Version: 1.0\r\n' +
'\r\n' +
'Hello world!\r\n' +
'--abc--' +
'\r\n';
mb.boundary = 'abc';
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(msg).to.equal(expected);
done();
});
});
it('should not set transfer encoding for message/ content', function (done) {
var mb = new Buildmail('message/rfc822').
setHeader({
date: '12345',
'message-id': '67890'
}).
setContent('Hello world!'),
expected = 'Content-Type: message/rfc822\r\n' +
'Date: 12345\r\n' +
'Message-ID: <67890>\r\n' +
'MIME-Version: 1.0\r\n' +
'\r\n' +
'Hello world!';
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(msg).to.equal(expected);
done();
});
});
it('should use from domain for message-id', function (done) {
var mb = new Buildmail('text/plain').
setHeader({
from: '[email protected]'
});
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(/^Message-ID: <[0-9a-f\-]+@example\.com>$/m.test(msg)).to.be.true;
done();
});
});
it('should fallback to hostname for message-id', function (done) {
var mb = new Buildmail('text/plain');
mb.hostname = 'abc';
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(/^Message-ID: <[0-9a-f\-]+@abc>$/m.test(msg)).to.be.true;
done();
});
});
});
describe('#getEnvelope', function () {
it('should get envelope', function () {
expect(new Buildmail().addHeader({
from: 'From <[email protected]>',
sender: 'Sender <[email protected]>',
to: '[email protected]'
}).addHeader({
to: '[email protected]',
cc: '[email protected], [email protected]',
bcc: '[email protected], Rec5 <[email protected]>'
}).getEnvelope()).to.deep.equal({
from: '[email protected]',
to: ['[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]']
});
expect(new Buildmail().addHeader({
sender: 'Sender <[email protected]>',
to: '[email protected]'
}).addHeader({
to: '[email protected]',
cc: '[email protected], [email protected]',
bcc: '[email protected], Rec5 <[email protected]>'
}).getEnvelope()).to.deep.equal({
from: '[email protected]',
to: ['[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]']
});
});
});
describe('#messageId', function () {
it('should create and return message-Id', function () {
var mail = new Buildmail().addHeader({
from: 'From <[email protected]>'
});
var messageId = mail.messageId();
expect(/^<[\w\-]+@example\.com>$/.test(messageId)).to.be.true;
expect(messageId).to.equal(mail.messageId());
});
});
describe('#getAddresses', function () {
it('should get address object', function () {
expect(new Buildmail().addHeader({
from: 'From <[email protected]>',
sender: 'Sender <[email protected]>',
to: '[email protected]'
}).addHeader({
to: '[email protected]',
cc: '[email protected], [email protected]',
bcc: '[email protected], Rec5 <[email protected]>'
}).getAddresses()).to.deep.equal({
from: [{
address: '[email protected]',
name: 'From'
}],
sender: [{
address: '[email protected]',
name: 'Sender'
}],
to: [{
address: '[email protected]',
name: ''
}, {
address: '[email protected]',
name: ''
}],
cc: [{
address: '[email protected]',
name: ''
}, {
address: '[email protected]',
name: ''
}],
bcc: [{
address: '[email protected]',
name: ''
}, {
address: '[email protected]',
name: 'Rec5'
}]
});
expect(new Buildmail().addHeader({
sender: 'Sender <[email protected]>',
to: '[email protected]'
}).addHeader({
to: '[email protected]',
cc: '[email protected], [email protected]',
bcc: '[email protected], Rec5 <[email protected]>'
}).getAddresses()).to.deep.equal({
sender: [{
address: '[email protected]',
name: 'Sender'
}],
to: [{
address: '[email protected]',
name: ''
}, {
address: '[email protected]',
name: ''
}],
cc: [{
address: '[email protected]',
name: ''
}],
bcc: [{
address: '[email protected]',
name: ''
}, {
address: '[email protected]',
name: 'Rec5'
}]
});
});
});
describe('#_parseAddresses', function () {
it('should normalize header key', function () {
var mb = new Buildmail();
expect(mb._parseAddresses('test [email protected]')).to.deep.equal([{
address: '[email protected]',
name: 'test'
}]);
expect(mb._parseAddresses(['test [email protected]'])).to.deep.equal([{
address: '[email protected]',
name: 'test'
}]);
expect(mb._parseAddresses([
['test [email protected]']
])).to.deep.equal([{
address: '[email protected]',
name: 'test'
}]);
expect(mb._parseAddresses([{
address: '[email protected]',
name: 'test'
}])).to.deep.equal([{
address: '[email protected]',
name: 'test'
}]);
});
});
describe('#_normalizeHeaderKey', function () {
it('should normalize header key', function () {
var mb = new Buildmail();
expect(mb._normalizeHeaderKey('key')).to.equal('Key');
expect(mb._normalizeHeaderKey('mime-vERSION')).to.equal('MIME-Version');
expect(mb._normalizeHeaderKey('-a-long-name')).to.equal('-A-Long-Name');
expect(mb._normalizeHeaderKey('some-spf')).to.equal('Some-SPF');
expect(mb._normalizeHeaderKey('dkim-some')).to.equal('DKIM-Some');
expect(mb._normalizeHeaderKey('x-smtpapi')).to.equal('X-SMTPAPI');
expect(mb._normalizeHeaderKey('message-id')).to.equal('Message-ID');
expect(mb._normalizeHeaderKey('CONTENT-FEATUres')).to.equal('Content-features');
});
});
describe('#_handleContentType', function () {
it('should do nothing on non multipart', function () {
var mb = new Buildmail();
expect(mb.boundary).to.not.exist;
mb._handleContentType({
value: 'text/plain'
});
expect(mb.boundary).to.be.false;
expect(mb.multipart).to.be.false;
});
it('should use provided boundary', function () {
var mb = new Buildmail();
expect(mb.boundary).to.not.exist;
mb._handleContentType({
value: 'multipart/mixed',
params: {
boundary: 'abc'
}
});
expect(mb.boundary).to.equal('abc');
expect(mb.multipart).to.equal('mixed');
});
it('should generate boundary', function () {
var mb = new Buildmail();
sinon.stub(mb, '_generateBoundary').returns('def');
expect(mb.boundary).to.not.exist;
mb._handleContentType({
value: 'multipart/mixed',
params: {}
});
expect(mb.boundary).to.equal('def');
expect(mb.multipart).to.equal('mixed');
mb._generateBoundary.restore();
});
});
describe('#_generateBoundary ', function () {
it('should genereate boundary string', function () {
var mb = new Buildmail();
mb._nodeId = 'abc';
mb.rootNode.baseBoundary = 'def';
expect(mb._generateBoundary()).to.equal('----sinikael-?=_abc-def');
});
});
describe('#_encodeHeaderValue', function () {
it('should do noting if possible', function () {
var mb = new Buildmail();
expect(mb._encodeHeaderValue('x-my', 'test value')).to.equal('test value');
});
it('should encode non ascii characters', function () {
var mb = new Buildmail();
expect(mb._encodeHeaderValue('x-my', 'test jõgeva value')).to.equal('test =?UTF-8?Q?j=C3=B5geva?= value');
});
it('should format references', function () {
var mb = new Buildmail();
expect(mb._encodeHeaderValue('references', 'abc def')).to.equal('<abc> <def>');
expect(mb._encodeHeaderValue('references', ['abc', 'def'])).to.equal('<abc> <def>');
});
it('should format message-id', function () {
var mb = new Buildmail();
expect(mb._encodeHeaderValue('message-id', 'abc')).to.equal('<abc>');
});
it('should format addresses', function () {
var mb = new Buildmail();
expect(mb._encodeHeaderValue('from', {
name: 'the safewithme testuser',
address: 'safewithme.testuser@jõgeva.com'
})).to.equal('the safewithme testuser <[email protected]>');
});
});
describe('#_convertAddresses', function () {
it('should convert address object to a string', function () {
var mb = new Buildmail();
expect(mb._convertAddresses([{
name: 'Jõgeva Ants',
address: 'ants@jõgeva.ee'
}, {
name: 'Composers',
group: [{
address: '[email protected]',
name: 'Bach, Sebastian'
}, {
address: '[email protected]',
name: 'Mozzie'
}]
}])).to.equal('=?UTF-8?Q?J=C3=B5geva_Ants?= <[email protected]>, Composers:"Bach, Sebastian" <[email protected]>, Mozzie <[email protected]>;');
});
it('should keep ascii name as is', function () {
var mb = new Buildmail();
expect(mb._convertAddresses([{
name: 'O\'Vigala Sass',
address: '[email protected]'
}])).to.equal('O\'Vigala Sass <[email protected]>');
});
it('should include name in quotes for special symbols', function () {
var mb = new Buildmail();
expect(mb._convertAddresses([{
name: 'Sass, Vigala',
address: '[email protected]'
}])).to.equal('"Sass, Vigala" <[email protected]>');
});
it('should escape quotes', function () {
var mb = new Buildmail();
expect(mb._convertAddresses([{
name: '"Vigala Sass"',
address: '[email protected]'
}])).to.equal('"\\"Vigala Sass\\"" <[email protected]>');
});
it('should mime encode unicode names', function () {
var mb = new Buildmail();
expect(mb._convertAddresses([{
name: '"Jõgeva Sass"',
address: '[email protected]'
}])).to.equal('=?UTF-8?Q?=22J=C3=B5geva_Sass=22?= <[email protected]>');
});
});
describe('#_generateMessageId', function(){
it('should generate uuid-looking message-id', function(){
var mb = new Buildmail();
var mid = mb._generateMessageId();
expect(/^<[0-9a-f]{8}\-[0-9a-f]{4}\-[0-9a-f]{4}\-[0-9a-f]{4}\-[0-9a-f]{12}@.*>/.test(mid)).to.be.true;
});
});
describe('Attachment streaming', function () {
var port = 10337;
var server;
beforeEach(function (done) {
server = http.createServer(function (req, res) {
res.writeHead(200, {
'Content-Type': 'text/plain'
});
var data = new Buffer(new Array(1024 + 1).join('ä'), 'utf-8');
var i = 0;
var sendByte = function () {
if (i >= data.length) {
return res.end();
}
res.write(new Buffer([data[i++]]));
setImmediate(sendByte);
};
sendByte();
});
server.listen(port, done);
});
afterEach(function (done) {
server.close(done);
});
it('should pipe URL as an attachment', function (done) {
var mb = new Buildmail('text/plain').
setContent({
href: 'http://localhost:' + port
});
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(/^=C3=A4/m.test(msg)).to.be.true;
done();
});
});
it('should reject URL attachment', function (done) {
var mb = new Buildmail('text/plain', {disableUrlAccess: true}).
setContent({
href: 'http://localhost:' + port
});
mb.build(function (err, msg) {
expect(err).to.exist;
expect(msg).to.not.exist;
done();
});
});
it('should return an error on invalid url', function (done) {
var mb = new Buildmail('text/plain').
setContent({
href: 'http://__should_not_exist:58888'
});
mb.build(function (err) {
expect(err).to.exist;
done();
});
});
it('should pipe file as an attachment', function (done) {
var mb = new Buildmail('application/octet-stream').
setContent({
path: __dirname + '/fixtures/attachment.bin'
});
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(/^w7VrdmEK$/m.test(msg)).to.be.true;
done();
});
});
it('should reject file as an attachment', function (done) {
var mb = new Buildmail('application/octet-stream', {disableFileAccess: true}).
setContent({
path: __dirname + '/fixtures/attachment.bin'
});
mb.build(function (err, msg) {
expect(err).to.exist;
expect(msg).to.not.exist;
done();
});
});
it('should return an error on invalid file path', function (done) {
var mb = new Buildmail('text/plain').
setContent({
href: '/ASfsdfsdf/Sdgsgdfg/SDFgdfgdfg'
});
mb.build(function (err) {
expect(err).to.exist;
done();
});
});
it('should return a error for an errored stream', function (done) {
var s = new PassThrough();
var mb = new Buildmail('text/plain').
setContent(s);
s.write('abc');
s.emit('error', new Error('Stream error'));
setTimeout(function () {
mb.build(function (err) {
expect(err).to.exist;
done();
});
}, 100);
});
it('should return a stream error', function (done) {
var s = new PassThrough();
var mb = new Buildmail('text/plain').
setContent(s);
mb.build(function (err) {
expect(err).to.exist;
done();
});
s.write('abc');
setTimeout(function () {
s.emit('error', new Error('Stream error'));
}, 100);
});
});
describe('#transform', function () {
it('should pipe through provided stream', function (done) {
var mb = new Buildmail('text/plain').
setHeader({
date: '12345',
'message-id': '67890'
}).
setContent('Hello world!');
var expected = 'Content-Type:\ttext/plain\r\n' +
'Date:\t12345\r\n' +
'Message-ID:\t<67890>\r\n' +
'Content-Transfer-Encoding:\t7bit\r\n' +
'MIME-Version:\t1.0\r\n' +
'\r\n' +
'Hello\tworld!';
// Transform stream that replaces all spaces with tabs
var transform = new Transform();
transform._transform = function (chunk, encoding, done) {
if (encoding !== 'buffer') {
chunk = new Buffer(chunk, encoding);
}
for (var i = 0, len = chunk.length; i < len; i++) {
if (chunk[i] === 0x20) {
chunk[i] = 0x09;
}
}
this.push(chunk);
done();
};
mb.transform(transform);
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(msg).to.equal(expected);
done();
});
});
});
describe('Raw content', function () {
it('should return pregenerated content', function (done) {
var expected = new Array(100).join('Test\n');
var mb = new Buildmail('text/plain').setRaw(expected);
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(msg).to.equal(expected);
done();
});
});
it('should return pregenerated content for a child node', function (done) {
var expected = new Array(100).join('Test\n');
var mb = new Buildmail('multipart/mixed', {
baseBoundary: 'test'
}).setHeader({
date: '12345',
'message-id': '67890'
});
var child = mb.createChild();
child.setRaw(expected);
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(msg).to.equal('Content-Type: multipart/mixed; boundary="----sinikael-?=_1-test"\r\n' +
'Date: 12345\r\n' +
'Message-ID: <67890>\r\n' +
'MIME-Version: 1.0\r\n' +
'\r\n' +
'------sinikael-?=_1-test\r\n' +
expected +
'\r\n' +
'------sinikael-?=_1-test--\r\n');
done();
});
});
it('should return pregenerated content from a stream', function (done) {
var expected = new Array(100).join('Test\n');
var raw = new PassThrough();
var mb = new Buildmail('text/plain').setRaw(raw);
setImmediate(function () {
raw.end(expected);
});
mb.build(function (err, msg) {
expect(err).to.not.exist;
msg = msg.toString();
expect(msg).to.equal(expected);
done();
});
});
it('should catch error from a raw stream 1', function (done) {
var raw = new PassThrough();
var mb = new Buildmail('text/plain').setRaw(raw);
raw.emit('error', new Error('Stream error'));
mb.build(function (err) {
expect(err).to.exist;
done();
});
});
it('should catch error from a raw stream 2', function (done) {
var raw = new PassThrough();
var mb = new Buildmail('text/plain').setRaw(raw);
mb.build(function (err) {
expect(err).to.exist;
done();
});
setImmediate(function () {
raw.emit('error', new Error('Stream error'));
});
});
});
});
| {
"pile_set_name": "Github"
} |
/**_________________________________________________________________
class: RawPCCProducer.cc
description: Creates a LumiInfo object that will contain the luminosity per bunch crossing,
along with the total luminosity and the statistical error.
authors:Sam Higginbotham ([email protected]) and Chris Palmer ([email protected])
________________________________________________________________**/
#include <string>
#include <iostream>
#include <fstream>
#include <vector>
#include <mutex>
#include <cmath>
#include "DataFormats/Luminosity/interface/PixelClusterCounts.h"
#include "DataFormats/Luminosity/interface/LumiInfo.h"
#include "DataFormats/Luminosity/interface/LumiConstants.h"
#include "CondFormats/Luminosity/interface/LumiCorrections.h"
#include "CondFormats/DataRecord/interface/LumiCorrectionsRcd.h"
#include "FWCore/MessageLogger/interface/MessageLogger.h"
#include "FWCore/Framework/interface/MakerMacros.h"
#include "FWCore/Framework/interface/ConsumesCollector.h"
#include "FWCore/Framework/interface/ESHandle.h"
#include "FWCore/Framework/interface/EventSetup.h"
#include "FWCore/Framework/interface/Frameworkfwd.h"
#include "FWCore/Framework/interface/global/EDProducer.h"
#include "FWCore/Framework/interface/Event.h"
#include "FWCore/ParameterSet/interface/ParameterSet.h"
#include "FWCore/Utilities/interface/EDGetToken.h"
#include "FWCore/ServiceRegistry/interface/Service.h"
#include "FWCore/Framework/interface/LuminosityBlock.h"
class RawPCCProducer : public edm::global::EDProducer<edm::EndLuminosityBlockProducer> {
public:
explicit RawPCCProducer(const edm::ParameterSet&);
~RawPCCProducer() override;
private:
void globalEndLuminosityBlockProduce(edm::LuminosityBlock& lumiSeg, const edm::EventSetup& iSetup) const final;
void produce(edm::StreamID, edm::Event& iEvent, const edm::EventSetup& iSetup) const final;
edm::EDGetTokenT<reco::PixelClusterCounts> pccToken_;
const edm::EDPutTokenT<LumiInfo> putToken_;
const std::string takeAverageValue_; //Output average values
const std::vector<int> modVeto_; //The list of modules to skip in the lumi calc.
const std::string csvOutLabel_;
mutable std::mutex fileLock_;
const bool saveCSVFile_;
const bool applyCorr_;
};
//--------------------------------------------------------------------------------------------------
RawPCCProducer::RawPCCProducer(const edm::ParameterSet& iConfig)
: putToken_{produces<LumiInfo, edm::Transition::EndLuminosityBlock>(
iConfig.getParameter<edm::ParameterSet>("RawPCCProducerParameters")
.getUntrackedParameter<std::string>("outputProductName", "alcaLumi"))},
takeAverageValue_{iConfig.getParameter<edm::ParameterSet>("RawPCCProducerParameters")
.getUntrackedParameter<std::string>("OutputValue", std::string("Totals"))},
modVeto_{
iConfig.getParameter<edm::ParameterSet>("RawPCCProducerParameters").getParameter<std::vector<int>>("modVeto")},
csvOutLabel_{iConfig.getParameter<edm::ParameterSet>("RawPCCProducerParameters")
.getUntrackedParameter<std::string>("label", std::string("rawPCC.csv"))},
saveCSVFile_{iConfig.getParameter<edm::ParameterSet>("RawPCCProducerParameters")
.getUntrackedParameter<bool>("saveCSVFile", false)},
applyCorr_{iConfig.getParameter<edm::ParameterSet>("RawPCCProducerParameters")
.getUntrackedParameter<bool>("ApplyCorrections", false)} {
auto pccSource =
iConfig.getParameter<edm::ParameterSet>("RawPCCProducerParameters").getParameter<std::string>("inputPccLabel");
auto prodInst =
iConfig.getParameter<edm::ParameterSet>("RawPCCProducerParameters").getParameter<std::string>("ProdInst");
pccToken_ = consumes<reco::PixelClusterCounts, edm::InLumi>(edm::InputTag(pccSource, prodInst));
}
//--------------------------------------------------------------------------------------------------
RawPCCProducer::~RawPCCProducer() {}
//--------------------------------------------------------------------------------------------------
void RawPCCProducer::produce(edm::StreamID, edm::Event& iEvent, const edm::EventSetup& iSetup) const {}
//--------------------------------------------------------------------------------------------------
void RawPCCProducer::globalEndLuminosityBlockProduce(edm::LuminosityBlock& lumiSeg,
const edm::EventSetup& iSetup) const {
float totalLumi = 0.0; //The total raw luminosity from the pixel clusters - not scaled
float statErrOnLumi = 0.0; //the statistical error on the lumi - large num ie sqrt(N)
//new vector containing clusters per bxid
std::vector<int> clustersPerBXOutput(LumiConstants::numBX, 0);
//new vector containing clusters per bxid with afterglow corrections
std::vector<float> corrClustersPerBXOutput(LumiConstants::numBX, 0);
//The indicies of all the good modules - not vetoed
std::vector<int> goodMods;
edm::Handle<reco::PixelClusterCounts> pccHandle;
lumiSeg.getByToken(pccToken_, pccHandle);
const reco::PixelClusterCounts& inputPcc = *(pccHandle.product());
//vector with Module IDs 1-1 map to bunch x-ing in clusers
auto modID = inputPcc.readModID();
//vector with total events at each bxid.
auto events = inputPcc.readEvents();
auto clustersPerBXInput = inputPcc.readCounts();
//making list of modules to sum over
for (unsigned int i = 0; i < modID.size(); i++) {
if (std::find(modVeto_.begin(), modVeto_.end(), modID.at(i)) == modVeto_.end()) {
goodMods.push_back(i);
}
}
//summing over good modules only
for (int bx = 0; bx < int(LumiConstants::numBX); bx++) {
for (unsigned int i = 0; i < goodMods.size(); i++) {
clustersPerBXOutput.at(bx) += clustersPerBXInput.at(goodMods.at(i) * int(LumiConstants::numBX) + bx);
}
}
std::vector<float> correctionScaleFactors;
if (applyCorr_) {
edm::ESHandle<LumiCorrections> corrHandle;
iSetup.get<LumiCorrectionsRcd>().get(corrHandle);
const LumiCorrections* pccCorrections = corrHandle.product();
correctionScaleFactors = pccCorrections->getCorrectionsBX();
} else {
correctionScaleFactors.resize(LumiConstants::numBX, 1.0);
}
for (unsigned int i = 0; i < clustersPerBXOutput.size(); i++) {
if (events.at(i) != 0) {
corrClustersPerBXOutput[i] = clustersPerBXOutput[i] * correctionScaleFactors[i];
} else {
corrClustersPerBXOutput[i] = 0.0;
}
totalLumi += corrClustersPerBXOutput[i];
statErrOnLumi += float(events[i]);
}
std::vector<float> errorPerBX; //Stat error (or number of events)
errorPerBX.assign(events.begin(), events.end());
if (takeAverageValue_ == "Average") {
unsigned int NActiveBX = 0;
for (int bx = 0; bx < int(LumiConstants::numBX); bx++) {
if (events[bx] > 0) {
NActiveBX++;
// Counting where events are will only work
// for ZeroBias or AlwaysTrue triggers.
// Random triggers will get all BXs.
corrClustersPerBXOutput[bx] /= float(events[bx]);
errorPerBX[bx] = 1 / sqrt(float(events[bx]));
}
}
if (statErrOnLumi != 0) {
totalLumi = totalLumi / statErrOnLumi * float(NActiveBX);
statErrOnLumi = 1 / sqrt(statErrOnLumi) * totalLumi;
}
}
LumiInfo outputLumiInfo;
outputLumiInfo.setTotalInstLumi(totalLumi);
outputLumiInfo.setTotalInstStatError(statErrOnLumi);
outputLumiInfo.setErrorLumiAllBX(errorPerBX);
outputLumiInfo.setInstLumiAllBX(corrClustersPerBXOutput);
if (saveCSVFile_) {
std::lock_guard<std::mutex> lock(fileLock_);
std::ofstream csfile(csvOutLabel_, std::ios_base::app);
csfile << std::to_string(lumiSeg.run()) << ",";
csfile << std::to_string(lumiSeg.luminosityBlock()) << ",";
csfile << std::to_string(totalLumi);
if (totalLumi > 0) {
for (unsigned int bx = 0; bx < LumiConstants::numBX; bx++) {
csfile << "," << std::to_string(corrClustersPerBXOutput[bx]);
}
csfile << std::endl;
} else if (totalLumi < 0) {
edm::LogInfo("WARNING") << "WHY IS LUMI NEGATIVE?!?!?!? " << totalLumi;
}
csfile.close();
}
lumiSeg.emplace(putToken_, std::move(outputLumiInfo));
}
DEFINE_FWK_MODULE(RawPCCProducer);
| {
"pile_set_name": "Github"
} |
/* ssl/tls1.h */
/* Copyright (C) 1995-1998 Eric Young ([email protected])
* All rights reserved.
*
* This package is an SSL implementation written
* by Eric Young ([email protected]).
* The implementation was written so as to conform with Netscapes SSL.
*
* This library is free for commercial and non-commercial use as long as
* the following conditions are aheared to. The following conditions
* apply to all code found in this distribution, be it the RC4, RSA,
* lhash, DES, etc., code; not just the SSL code. The SSL documentation
* included with this distribution is covered by the same copyright terms
* except that the holder is Tim Hudson ([email protected]).
*
* Copyright remains Eric Young's, and as such any Copyright notices in
* the code are not to be removed.
* If this package is used in a product, Eric Young should be given attribution
* as the author of the parts of the library used.
* This can be in the form of a textual message at program startup or
* in documentation (online or textual) provided with the package.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. All advertising materials mentioning features or use of this software
* must display the following acknowledgement:
* "This product includes cryptographic software written by
* Eric Young ([email protected])"
* The word 'cryptographic' can be left out if the rouines from the library
* being used are not cryptographic related :-).
* 4. If you include any Windows specific code (or a derivative thereof) from
* the apps directory (application code) you must include an acknowledgement:
* "This product includes software written by Tim Hudson ([email protected])"
*
* THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* The licence and distribution terms for any publically available version or
* derivative of this code cannot be changed. i.e. this code cannot simply be
* copied and put under another distribution licence
* [including the GNU Public Licence.]
*/
/* ====================================================================
* Copyright (c) 1998-2006 The OpenSSL Project. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
*
* 3. All advertising materials mentioning features or use of this
* software must display the following acknowledgment:
* "This product includes software developed by the OpenSSL Project
* for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
*
* 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
* endorse or promote products derived from this software without
* prior written permission. For written permission, please contact
* [email protected].
*
* 5. Products derived from this software may not be called "OpenSSL"
* nor may "OpenSSL" appear in their names without prior written
* permission of the OpenSSL Project.
*
* 6. Redistributions of any form whatsoever must retain the following
* acknowledgment:
* "This product includes software developed by the OpenSSL Project
* for use in the OpenSSL Toolkit (http://www.openssl.org/)"
*
* THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
* EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
* ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
* ====================================================================
*
* This product includes cryptographic software written by Eric Young
* ([email protected]). This product includes software written by Tim
* Hudson ([email protected]).
*
*/
/* ====================================================================
* Copyright 2002 Sun Microsystems, Inc. ALL RIGHTS RESERVED.
*
* Portions of the attached software ("Contribution") are developed by
* SUN MICROSYSTEMS, INC., and are contributed to the OpenSSL project.
*
* The Contribution is licensed pursuant to the OpenSSL open source
* license provided above.
*
* ECC cipher suite support in OpenSSL originally written by
* Vipul Gupta and Sumit Gupta of Sun Microsystems Laboratories.
*
*/
/* ====================================================================
* Copyright 2005 Nokia. All rights reserved.
*
* The portions of the attached software ("Contribution") is developed by
* Nokia Corporation and is licensed pursuant to the OpenSSL open source
* license.
*
* The Contribution, originally written by Mika Kousa and Pasi Eronen of
* Nokia Corporation, consists of the "PSK" (Pre-Shared Key) ciphersuites
* support (see RFC 4279) to OpenSSL.
*
* No patent licenses or other rights except those expressly stated in
* the OpenSSL open source license shall be deemed granted or received
* expressly, by implication, estoppel, or otherwise.
*
* No assurances are provided by Nokia that the Contribution does not
* infringe the patent or other intellectual property rights of any third
* party or that the license provides you with all the necessary rights
* to make use of the Contribution.
*
* THE SOFTWARE IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND. IN
* ADDITION TO THE DISCLAIMERS INCLUDED IN THE LICENSE, NOKIA
* SPECIFICALLY DISCLAIMS ANY LIABILITY FOR CLAIMS BROUGHT BY YOU OR ANY
* OTHER ENTITY BASED ON INFRINGEMENT OF INTELLECTUAL PROPERTY RIGHTS OR
* OTHERWISE.
*/
#ifndef HEADER_TLS1_H
# define HEADER_TLS1_H
# include <openssl/buffer.h>
#ifdef __cplusplus
extern "C" {
#endif
# define TLS1_ALLOW_EXPERIMENTAL_CIPHERSUITES 0
# define TLS1_VERSION 0x0301
# define TLS1_1_VERSION 0x0302
# define TLS1_2_VERSION 0x0303
# define TLS_MAX_VERSION TLS1_2_VERSION
# define TLS1_VERSION_MAJOR 0x03
# define TLS1_VERSION_MINOR 0x01
# define TLS1_1_VERSION_MAJOR 0x03
# define TLS1_1_VERSION_MINOR 0x02
# define TLS1_2_VERSION_MAJOR 0x03
# define TLS1_2_VERSION_MINOR 0x03
# define TLS1_get_version(s) \
((s->version >> 8) == TLS1_VERSION_MAJOR ? s->version : 0)
# define TLS1_get_client_version(s) \
((s->client_version >> 8) == TLS1_VERSION_MAJOR ? s->client_version : 0)
# define TLS1_AD_DECRYPTION_FAILED 21
# define TLS1_AD_RECORD_OVERFLOW 22
# define TLS1_AD_UNKNOWN_CA 48/* fatal */
# define TLS1_AD_ACCESS_DENIED 49/* fatal */
# define TLS1_AD_DECODE_ERROR 50/* fatal */
# define TLS1_AD_DECRYPT_ERROR 51
# define TLS1_AD_EXPORT_RESTRICTION 60/* fatal */
# define TLS1_AD_PROTOCOL_VERSION 70/* fatal */
# define TLS1_AD_INSUFFICIENT_SECURITY 71/* fatal */
# define TLS1_AD_INTERNAL_ERROR 80/* fatal */
# define TLS1_AD_INAPPROPRIATE_FALLBACK 86/* fatal */
# define TLS1_AD_USER_CANCELLED 90
# define TLS1_AD_NO_RENEGOTIATION 100
/* codes 110-114 are from RFC3546 */
# define TLS1_AD_UNSUPPORTED_EXTENSION 110
# define TLS1_AD_CERTIFICATE_UNOBTAINABLE 111
# define TLS1_AD_UNRECOGNIZED_NAME 112
# define TLS1_AD_BAD_CERTIFICATE_STATUS_RESPONSE 113
# define TLS1_AD_BAD_CERTIFICATE_HASH_VALUE 114
# define TLS1_AD_UNKNOWN_PSK_IDENTITY 115/* fatal */
/* ExtensionType values from RFC3546 / RFC4366 / RFC6066 */
# define TLSEXT_TYPE_server_name 0
# define TLSEXT_TYPE_max_fragment_length 1
# define TLSEXT_TYPE_client_certificate_url 2
# define TLSEXT_TYPE_trusted_ca_keys 3
# define TLSEXT_TYPE_truncated_hmac 4
# define TLSEXT_TYPE_status_request 5
/* ExtensionType values from RFC4681 */
# define TLSEXT_TYPE_user_mapping 6
/* ExtensionType values from RFC5878 */
# define TLSEXT_TYPE_client_authz 7
# define TLSEXT_TYPE_server_authz 8
/* ExtensionType values from RFC6091 */
# define TLSEXT_TYPE_cert_type 9
/* ExtensionType values from RFC4492 */
# define TLSEXT_TYPE_elliptic_curves 10
# define TLSEXT_TYPE_ec_point_formats 11
/* ExtensionType value from RFC5054 */
# define TLSEXT_TYPE_srp 12
/* ExtensionType values from RFC5246 */
# define TLSEXT_TYPE_signature_algorithms 13
/* ExtensionType value from RFC5764 */
# define TLSEXT_TYPE_use_srtp 14
/* ExtensionType value from RFC5620 */
# define TLSEXT_TYPE_heartbeat 15
/* ExtensionType value from RFC7301 */
# define TLSEXT_TYPE_application_layer_protocol_negotiation 16
/*
* ExtensionType value for TLS padding extension.
* http://tools.ietf.org/html/draft-agl-tls-padding
*/
# define TLSEXT_TYPE_padding 21
/* ExtensionType value from RFC4507 */
# define TLSEXT_TYPE_session_ticket 35
/* ExtensionType value from draft-rescorla-tls-opaque-prf-input-00.txt */
# if 0
/*
* will have to be provided externally for now ,
* i.e. build with -DTLSEXT_TYPE_opaque_prf_input=38183
* using whatever extension number you'd like to try
*/
# define TLSEXT_TYPE_opaque_prf_input ??
# endif
/* Temporary extension type */
# define TLSEXT_TYPE_renegotiate 0xff01
# ifndef OPENSSL_NO_NEXTPROTONEG
/* This is not an IANA defined extension number */
# define TLSEXT_TYPE_next_proto_neg 13172
# endif
/* NameType value from RFC3546 */
# define TLSEXT_NAMETYPE_host_name 0
/* status request value from RFC3546 */
# define TLSEXT_STATUSTYPE_ocsp 1
/* ECPointFormat values from RFC4492 */
# define TLSEXT_ECPOINTFORMAT_first 0
# define TLSEXT_ECPOINTFORMAT_uncompressed 0
# define TLSEXT_ECPOINTFORMAT_ansiX962_compressed_prime 1
# define TLSEXT_ECPOINTFORMAT_ansiX962_compressed_char2 2
# define TLSEXT_ECPOINTFORMAT_last 2
/* Signature and hash algorithms from RFC5246 */
# define TLSEXT_signature_anonymous 0
# define TLSEXT_signature_rsa 1
# define TLSEXT_signature_dsa 2
# define TLSEXT_signature_ecdsa 3
/* Total number of different signature algorithms */
# define TLSEXT_signature_num 4
# define TLSEXT_hash_none 0
# define TLSEXT_hash_md5 1
# define TLSEXT_hash_sha1 2
# define TLSEXT_hash_sha224 3
# define TLSEXT_hash_sha256 4
# define TLSEXT_hash_sha384 5
# define TLSEXT_hash_sha512 6
/* Total number of different digest algorithms */
# define TLSEXT_hash_num 7
/* Flag set for unrecognised algorithms */
# define TLSEXT_nid_unknown 0x1000000
/* ECC curves */
# define TLSEXT_curve_P_256 23
# define TLSEXT_curve_P_384 24
# ifndef OPENSSL_NO_TLSEXT
# define TLSEXT_MAXLEN_host_name 255
const char *SSL_get_servername(const SSL *s, const int type);
int SSL_get_servername_type(const SSL *s);
/*
* SSL_export_keying_material exports a value derived from the master secret,
* as specified in RFC 5705. It writes |olen| bytes to |out| given a label and
* optional context. (Since a zero length context is allowed, the |use_context|
* flag controls whether a context is included.) It returns 1 on success and
* zero otherwise.
*/
int SSL_export_keying_material(SSL *s, unsigned char *out, size_t olen,
const char *label, size_t llen,
const unsigned char *p, size_t plen,
int use_context);
int SSL_get_sigalgs(SSL *s, int idx,
int *psign, int *phash, int *psignandhash,
unsigned char *rsig, unsigned char *rhash);
int SSL_get_shared_sigalgs(SSL *s, int idx,
int *psign, int *phash, int *psignandhash,
unsigned char *rsig, unsigned char *rhash);
int SSL_check_chain(SSL *s, X509 *x, EVP_PKEY *pk, STACK_OF(X509) *chain);
# define SSL_set_tlsext_host_name(s,name) \
SSL_ctrl(s,SSL_CTRL_SET_TLSEXT_HOSTNAME,TLSEXT_NAMETYPE_host_name,(char *)name)
# define SSL_set_tlsext_debug_callback(ssl, cb) \
SSL_callback_ctrl(ssl,SSL_CTRL_SET_TLSEXT_DEBUG_CB,(void (*)(void))cb)
# define SSL_set_tlsext_debug_arg(ssl, arg) \
SSL_ctrl(ssl,SSL_CTRL_SET_TLSEXT_DEBUG_ARG,0, (void *)arg)
# define SSL_set_tlsext_status_type(ssl, type) \
SSL_ctrl(ssl,SSL_CTRL_SET_TLSEXT_STATUS_REQ_TYPE,type, NULL)
# define SSL_get_tlsext_status_exts(ssl, arg) \
SSL_ctrl(ssl,SSL_CTRL_GET_TLSEXT_STATUS_REQ_EXTS,0, (void *)arg)
# define SSL_set_tlsext_status_exts(ssl, arg) \
SSL_ctrl(ssl,SSL_CTRL_SET_TLSEXT_STATUS_REQ_EXTS,0, (void *)arg)
# define SSL_get_tlsext_status_ids(ssl, arg) \
SSL_ctrl(ssl,SSL_CTRL_GET_TLSEXT_STATUS_REQ_IDS,0, (void *)arg)
# define SSL_set_tlsext_status_ids(ssl, arg) \
SSL_ctrl(ssl,SSL_CTRL_SET_TLSEXT_STATUS_REQ_IDS,0, (void *)arg)
# define SSL_get_tlsext_status_ocsp_resp(ssl, arg) \
SSL_ctrl(ssl,SSL_CTRL_GET_TLSEXT_STATUS_REQ_OCSP_RESP,0, (void *)arg)
# define SSL_set_tlsext_status_ocsp_resp(ssl, arg, arglen) \
SSL_ctrl(ssl,SSL_CTRL_SET_TLSEXT_STATUS_REQ_OCSP_RESP,arglen, (void *)arg)
# define SSL_CTX_set_tlsext_servername_callback(ctx, cb) \
SSL_CTX_callback_ctrl(ctx,SSL_CTRL_SET_TLSEXT_SERVERNAME_CB,(void (*)(void))cb)
# define SSL_TLSEXT_ERR_OK 0
# define SSL_TLSEXT_ERR_ALERT_WARNING 1
# define SSL_TLSEXT_ERR_ALERT_FATAL 2
# define SSL_TLSEXT_ERR_NOACK 3
# define SSL_CTX_set_tlsext_servername_arg(ctx, arg) \
SSL_CTX_ctrl(ctx,SSL_CTRL_SET_TLSEXT_SERVERNAME_ARG,0, (void *)arg)
# define SSL_CTX_get_tlsext_ticket_keys(ctx, keys, keylen) \
SSL_CTX_ctrl((ctx),SSL_CTRL_GET_TLSEXT_TICKET_KEYS,(keylen),(keys))
# define SSL_CTX_set_tlsext_ticket_keys(ctx, keys, keylen) \
SSL_CTX_ctrl((ctx),SSL_CTRL_SET_TLSEXT_TICKET_KEYS,(keylen),(keys))
# define SSL_CTX_set_tlsext_status_cb(ssl, cb) \
SSL_CTX_callback_ctrl(ssl,SSL_CTRL_SET_TLSEXT_STATUS_REQ_CB,(void (*)(void))cb)
# define SSL_CTX_set_tlsext_status_arg(ssl, arg) \
SSL_CTX_ctrl(ssl,SSL_CTRL_SET_TLSEXT_STATUS_REQ_CB_ARG,0, (void *)arg)
# define SSL_set_tlsext_opaque_prf_input(s, src, len) \
SSL_ctrl(s,SSL_CTRL_SET_TLSEXT_OPAQUE_PRF_INPUT, len, src)
# define SSL_CTX_set_tlsext_opaque_prf_input_callback(ctx, cb) \
SSL_CTX_callback_ctrl(ctx,SSL_CTRL_SET_TLSEXT_OPAQUE_PRF_INPUT_CB, (void (*)(void))cb)
# define SSL_CTX_set_tlsext_opaque_prf_input_callback_arg(ctx, arg) \
SSL_CTX_ctrl(ctx,SSL_CTRL_SET_TLSEXT_OPAQUE_PRF_INPUT_CB_ARG, 0, arg)
# define SSL_CTX_set_tlsext_ticket_key_cb(ssl, cb) \
SSL_CTX_callback_ctrl(ssl,SSL_CTRL_SET_TLSEXT_TICKET_KEY_CB,(void (*)(void))cb)
# ifndef OPENSSL_NO_HEARTBEATS
# define SSL_TLSEXT_HB_ENABLED 0x01
# define SSL_TLSEXT_HB_DONT_SEND_REQUESTS 0x02
# define SSL_TLSEXT_HB_DONT_RECV_REQUESTS 0x04
# define SSL_get_tlsext_heartbeat_pending(ssl) \
SSL_ctrl((ssl),SSL_CTRL_GET_TLS_EXT_HEARTBEAT_PENDING,0,NULL)
# define SSL_set_tlsext_heartbeat_no_requests(ssl, arg) \
SSL_ctrl((ssl),SSL_CTRL_SET_TLS_EXT_HEARTBEAT_NO_REQUESTS,arg,NULL)
# endif
# endif
/* PSK ciphersuites from 4279 */
# define TLS1_CK_PSK_WITH_RC4_128_SHA 0x0300008A
# define TLS1_CK_PSK_WITH_3DES_EDE_CBC_SHA 0x0300008B
# define TLS1_CK_PSK_WITH_AES_128_CBC_SHA 0x0300008C
# define TLS1_CK_PSK_WITH_AES_256_CBC_SHA 0x0300008D
/*
* Additional TLS ciphersuites from expired Internet Draft
* draft-ietf-tls-56-bit-ciphersuites-01.txt (available if
* TLS1_ALLOW_EXPERIMENTAL_CIPHERSUITES is defined, see s3_lib.c). We
* actually treat them like SSL 3.0 ciphers, which we probably shouldn't.
* Note that the first two are actually not in the IDs.
*/
# define TLS1_CK_RSA_EXPORT1024_WITH_RC4_56_MD5 0x03000060/* not in
* ID */
# define TLS1_CK_RSA_EXPORT1024_WITH_RC2_CBC_56_MD5 0x03000061/* not in
* ID */
# define TLS1_CK_RSA_EXPORT1024_WITH_DES_CBC_SHA 0x03000062
# define TLS1_CK_DHE_DSS_EXPORT1024_WITH_DES_CBC_SHA 0x03000063
# define TLS1_CK_RSA_EXPORT1024_WITH_RC4_56_SHA 0x03000064
# define TLS1_CK_DHE_DSS_EXPORT1024_WITH_RC4_56_SHA 0x03000065
# define TLS1_CK_DHE_DSS_WITH_RC4_128_SHA 0x03000066
/* AES ciphersuites from RFC3268 */
# define TLS1_CK_RSA_WITH_AES_128_SHA 0x0300002F
# define TLS1_CK_DH_DSS_WITH_AES_128_SHA 0x03000030
# define TLS1_CK_DH_RSA_WITH_AES_128_SHA 0x03000031
# define TLS1_CK_DHE_DSS_WITH_AES_128_SHA 0x03000032
# define TLS1_CK_DHE_RSA_WITH_AES_128_SHA 0x03000033
# define TLS1_CK_ADH_WITH_AES_128_SHA 0x03000034
# define TLS1_CK_RSA_WITH_AES_256_SHA 0x03000035
# define TLS1_CK_DH_DSS_WITH_AES_256_SHA 0x03000036
# define TLS1_CK_DH_RSA_WITH_AES_256_SHA 0x03000037
# define TLS1_CK_DHE_DSS_WITH_AES_256_SHA 0x03000038
# define TLS1_CK_DHE_RSA_WITH_AES_256_SHA 0x03000039
# define TLS1_CK_ADH_WITH_AES_256_SHA 0x0300003A
/* TLS v1.2 ciphersuites */
# define TLS1_CK_RSA_WITH_NULL_SHA256 0x0300003B
# define TLS1_CK_RSA_WITH_AES_128_SHA256 0x0300003C
# define TLS1_CK_RSA_WITH_AES_256_SHA256 0x0300003D
# define TLS1_CK_DH_DSS_WITH_AES_128_SHA256 0x0300003E
# define TLS1_CK_DH_RSA_WITH_AES_128_SHA256 0x0300003F
# define TLS1_CK_DHE_DSS_WITH_AES_128_SHA256 0x03000040
/* Camellia ciphersuites from RFC4132 */
# define TLS1_CK_RSA_WITH_CAMELLIA_128_CBC_SHA 0x03000041
# define TLS1_CK_DH_DSS_WITH_CAMELLIA_128_CBC_SHA 0x03000042
# define TLS1_CK_DH_RSA_WITH_CAMELLIA_128_CBC_SHA 0x03000043
# define TLS1_CK_DHE_DSS_WITH_CAMELLIA_128_CBC_SHA 0x03000044
# define TLS1_CK_DHE_RSA_WITH_CAMELLIA_128_CBC_SHA 0x03000045
# define TLS1_CK_ADH_WITH_CAMELLIA_128_CBC_SHA 0x03000046
/* TLS v1.2 ciphersuites */
# define TLS1_CK_DHE_RSA_WITH_AES_128_SHA256 0x03000067
# define TLS1_CK_DH_DSS_WITH_AES_256_SHA256 0x03000068
# define TLS1_CK_DH_RSA_WITH_AES_256_SHA256 0x03000069
# define TLS1_CK_DHE_DSS_WITH_AES_256_SHA256 0x0300006A
# define TLS1_CK_DHE_RSA_WITH_AES_256_SHA256 0x0300006B
# define TLS1_CK_ADH_WITH_AES_128_SHA256 0x0300006C
# define TLS1_CK_ADH_WITH_AES_256_SHA256 0x0300006D
/* Camellia ciphersuites from RFC4132 */
# define TLS1_CK_RSA_WITH_CAMELLIA_256_CBC_SHA 0x03000084
# define TLS1_CK_DH_DSS_WITH_CAMELLIA_256_CBC_SHA 0x03000085
# define TLS1_CK_DH_RSA_WITH_CAMELLIA_256_CBC_SHA 0x03000086
# define TLS1_CK_DHE_DSS_WITH_CAMELLIA_256_CBC_SHA 0x03000087
# define TLS1_CK_DHE_RSA_WITH_CAMELLIA_256_CBC_SHA 0x03000088
# define TLS1_CK_ADH_WITH_CAMELLIA_256_CBC_SHA 0x03000089
/* SEED ciphersuites from RFC4162 */
# define TLS1_CK_RSA_WITH_SEED_SHA 0x03000096
# define TLS1_CK_DH_DSS_WITH_SEED_SHA 0x03000097
# define TLS1_CK_DH_RSA_WITH_SEED_SHA 0x03000098
# define TLS1_CK_DHE_DSS_WITH_SEED_SHA 0x03000099
# define TLS1_CK_DHE_RSA_WITH_SEED_SHA 0x0300009A
# define TLS1_CK_ADH_WITH_SEED_SHA 0x0300009B
/* TLS v1.2 GCM ciphersuites from RFC5288 */
# define TLS1_CK_RSA_WITH_AES_128_GCM_SHA256 0x0300009C
# define TLS1_CK_RSA_WITH_AES_256_GCM_SHA384 0x0300009D
# define TLS1_CK_DHE_RSA_WITH_AES_128_GCM_SHA256 0x0300009E
# define TLS1_CK_DHE_RSA_WITH_AES_256_GCM_SHA384 0x0300009F
# define TLS1_CK_DH_RSA_WITH_AES_128_GCM_SHA256 0x030000A0
# define TLS1_CK_DH_RSA_WITH_AES_256_GCM_SHA384 0x030000A1
# define TLS1_CK_DHE_DSS_WITH_AES_128_GCM_SHA256 0x030000A2
# define TLS1_CK_DHE_DSS_WITH_AES_256_GCM_SHA384 0x030000A3
# define TLS1_CK_DH_DSS_WITH_AES_128_GCM_SHA256 0x030000A4
# define TLS1_CK_DH_DSS_WITH_AES_256_GCM_SHA384 0x030000A5
# define TLS1_CK_ADH_WITH_AES_128_GCM_SHA256 0x030000A6
# define TLS1_CK_ADH_WITH_AES_256_GCM_SHA384 0x030000A7
/*
* ECC ciphersuites from draft-ietf-tls-ecc-12.txt with changes soon to be in
* draft 13
*/
# define TLS1_CK_ECDH_ECDSA_WITH_NULL_SHA 0x0300C001
# define TLS1_CK_ECDH_ECDSA_WITH_RC4_128_SHA 0x0300C002
# define TLS1_CK_ECDH_ECDSA_WITH_DES_192_CBC3_SHA 0x0300C003
# define TLS1_CK_ECDH_ECDSA_WITH_AES_128_CBC_SHA 0x0300C004
# define TLS1_CK_ECDH_ECDSA_WITH_AES_256_CBC_SHA 0x0300C005
# define TLS1_CK_ECDHE_ECDSA_WITH_NULL_SHA 0x0300C006
# define TLS1_CK_ECDHE_ECDSA_WITH_RC4_128_SHA 0x0300C007
# define TLS1_CK_ECDHE_ECDSA_WITH_DES_192_CBC3_SHA 0x0300C008
# define TLS1_CK_ECDHE_ECDSA_WITH_AES_128_CBC_SHA 0x0300C009
# define TLS1_CK_ECDHE_ECDSA_WITH_AES_256_CBC_SHA 0x0300C00A
# define TLS1_CK_ECDH_RSA_WITH_NULL_SHA 0x0300C00B
# define TLS1_CK_ECDH_RSA_WITH_RC4_128_SHA 0x0300C00C
# define TLS1_CK_ECDH_RSA_WITH_DES_192_CBC3_SHA 0x0300C00D
# define TLS1_CK_ECDH_RSA_WITH_AES_128_CBC_SHA 0x0300C00E
# define TLS1_CK_ECDH_RSA_WITH_AES_256_CBC_SHA 0x0300C00F
# define TLS1_CK_ECDHE_RSA_WITH_NULL_SHA 0x0300C010
# define TLS1_CK_ECDHE_RSA_WITH_RC4_128_SHA 0x0300C011
# define TLS1_CK_ECDHE_RSA_WITH_DES_192_CBC3_SHA 0x0300C012
# define TLS1_CK_ECDHE_RSA_WITH_AES_128_CBC_SHA 0x0300C013
# define TLS1_CK_ECDHE_RSA_WITH_AES_256_CBC_SHA 0x0300C014
# define TLS1_CK_ECDH_anon_WITH_NULL_SHA 0x0300C015
# define TLS1_CK_ECDH_anon_WITH_RC4_128_SHA 0x0300C016
# define TLS1_CK_ECDH_anon_WITH_DES_192_CBC3_SHA 0x0300C017
# define TLS1_CK_ECDH_anon_WITH_AES_128_CBC_SHA 0x0300C018
# define TLS1_CK_ECDH_anon_WITH_AES_256_CBC_SHA 0x0300C019
/* SRP ciphersuites from RFC 5054 */
# define TLS1_CK_SRP_SHA_WITH_3DES_EDE_CBC_SHA 0x0300C01A
# define TLS1_CK_SRP_SHA_RSA_WITH_3DES_EDE_CBC_SHA 0x0300C01B
# define TLS1_CK_SRP_SHA_DSS_WITH_3DES_EDE_CBC_SHA 0x0300C01C
# define TLS1_CK_SRP_SHA_WITH_AES_128_CBC_SHA 0x0300C01D
# define TLS1_CK_SRP_SHA_RSA_WITH_AES_128_CBC_SHA 0x0300C01E
# define TLS1_CK_SRP_SHA_DSS_WITH_AES_128_CBC_SHA 0x0300C01F
# define TLS1_CK_SRP_SHA_WITH_AES_256_CBC_SHA 0x0300C020
# define TLS1_CK_SRP_SHA_RSA_WITH_AES_256_CBC_SHA 0x0300C021
# define TLS1_CK_SRP_SHA_DSS_WITH_AES_256_CBC_SHA 0x0300C022
/* ECDH HMAC based ciphersuites from RFC5289 */
# define TLS1_CK_ECDHE_ECDSA_WITH_AES_128_SHA256 0x0300C023
# define TLS1_CK_ECDHE_ECDSA_WITH_AES_256_SHA384 0x0300C024
# define TLS1_CK_ECDH_ECDSA_WITH_AES_128_SHA256 0x0300C025
# define TLS1_CK_ECDH_ECDSA_WITH_AES_256_SHA384 0x0300C026
# define TLS1_CK_ECDHE_RSA_WITH_AES_128_SHA256 0x0300C027
# define TLS1_CK_ECDHE_RSA_WITH_AES_256_SHA384 0x0300C028
# define TLS1_CK_ECDH_RSA_WITH_AES_128_SHA256 0x0300C029
# define TLS1_CK_ECDH_RSA_WITH_AES_256_SHA384 0x0300C02A
/* ECDH GCM based ciphersuites from RFC5289 */
# define TLS1_CK_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 0x0300C02B
# define TLS1_CK_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 0x0300C02C
# define TLS1_CK_ECDH_ECDSA_WITH_AES_128_GCM_SHA256 0x0300C02D
# define TLS1_CK_ECDH_ECDSA_WITH_AES_256_GCM_SHA384 0x0300C02E
# define TLS1_CK_ECDHE_RSA_WITH_AES_128_GCM_SHA256 0x0300C02F
# define TLS1_CK_ECDHE_RSA_WITH_AES_256_GCM_SHA384 0x0300C030
# define TLS1_CK_ECDH_RSA_WITH_AES_128_GCM_SHA256 0x0300C031
# define TLS1_CK_ECDH_RSA_WITH_AES_256_GCM_SHA384 0x0300C032
/*
* XXX * Backward compatibility alert: + * Older versions of OpenSSL gave
* some DHE ciphers names with "EDH" + * instead of "DHE". Going forward, we
* should be using DHE + * everywhere, though we may indefinitely maintain
* aliases for users + * or configurations that used "EDH" +
*/
# define TLS1_TXT_RSA_EXPORT1024_WITH_RC4_56_MD5 "EXP1024-RC4-MD5"
# define TLS1_TXT_RSA_EXPORT1024_WITH_RC2_CBC_56_MD5 "EXP1024-RC2-CBC-MD5"
# define TLS1_TXT_RSA_EXPORT1024_WITH_DES_CBC_SHA "EXP1024-DES-CBC-SHA"
# define TLS1_TXT_DHE_DSS_EXPORT1024_WITH_DES_CBC_SHA "EXP1024-DHE-DSS-DES-CBC-SHA"
# define TLS1_TXT_RSA_EXPORT1024_WITH_RC4_56_SHA "EXP1024-RC4-SHA"
# define TLS1_TXT_DHE_DSS_EXPORT1024_WITH_RC4_56_SHA "EXP1024-DHE-DSS-RC4-SHA"
# define TLS1_TXT_DHE_DSS_WITH_RC4_128_SHA "DHE-DSS-RC4-SHA"
/* AES ciphersuites from RFC3268 */
# define TLS1_TXT_RSA_WITH_AES_128_SHA "AES128-SHA"
# define TLS1_TXT_DH_DSS_WITH_AES_128_SHA "DH-DSS-AES128-SHA"
# define TLS1_TXT_DH_RSA_WITH_AES_128_SHA "DH-RSA-AES128-SHA"
# define TLS1_TXT_DHE_DSS_WITH_AES_128_SHA "DHE-DSS-AES128-SHA"
# define TLS1_TXT_DHE_RSA_WITH_AES_128_SHA "DHE-RSA-AES128-SHA"
# define TLS1_TXT_ADH_WITH_AES_128_SHA "ADH-AES128-SHA"
# define TLS1_TXT_RSA_WITH_AES_256_SHA "AES256-SHA"
# define TLS1_TXT_DH_DSS_WITH_AES_256_SHA "DH-DSS-AES256-SHA"
# define TLS1_TXT_DH_RSA_WITH_AES_256_SHA "DH-RSA-AES256-SHA"
# define TLS1_TXT_DHE_DSS_WITH_AES_256_SHA "DHE-DSS-AES256-SHA"
# define TLS1_TXT_DHE_RSA_WITH_AES_256_SHA "DHE-RSA-AES256-SHA"
# define TLS1_TXT_ADH_WITH_AES_256_SHA "ADH-AES256-SHA"
/* ECC ciphersuites from RFC4492 */
# define TLS1_TXT_ECDH_ECDSA_WITH_NULL_SHA "ECDH-ECDSA-NULL-SHA"
# define TLS1_TXT_ECDH_ECDSA_WITH_RC4_128_SHA "ECDH-ECDSA-RC4-SHA"
# define TLS1_TXT_ECDH_ECDSA_WITH_DES_192_CBC3_SHA "ECDH-ECDSA-DES-CBC3-SHA"
# define TLS1_TXT_ECDH_ECDSA_WITH_AES_128_CBC_SHA "ECDH-ECDSA-AES128-SHA"
# define TLS1_TXT_ECDH_ECDSA_WITH_AES_256_CBC_SHA "ECDH-ECDSA-AES256-SHA"
# define TLS1_TXT_ECDHE_ECDSA_WITH_NULL_SHA "ECDHE-ECDSA-NULL-SHA"
# define TLS1_TXT_ECDHE_ECDSA_WITH_RC4_128_SHA "ECDHE-ECDSA-RC4-SHA"
# define TLS1_TXT_ECDHE_ECDSA_WITH_DES_192_CBC3_SHA "ECDHE-ECDSA-DES-CBC3-SHA"
# define TLS1_TXT_ECDHE_ECDSA_WITH_AES_128_CBC_SHA "ECDHE-ECDSA-AES128-SHA"
# define TLS1_TXT_ECDHE_ECDSA_WITH_AES_256_CBC_SHA "ECDHE-ECDSA-AES256-SHA"
# define TLS1_TXT_ECDH_RSA_WITH_NULL_SHA "ECDH-RSA-NULL-SHA"
# define TLS1_TXT_ECDH_RSA_WITH_RC4_128_SHA "ECDH-RSA-RC4-SHA"
# define TLS1_TXT_ECDH_RSA_WITH_DES_192_CBC3_SHA "ECDH-RSA-DES-CBC3-SHA"
# define TLS1_TXT_ECDH_RSA_WITH_AES_128_CBC_SHA "ECDH-RSA-AES128-SHA"
# define TLS1_TXT_ECDH_RSA_WITH_AES_256_CBC_SHA "ECDH-RSA-AES256-SHA"
# define TLS1_TXT_ECDHE_RSA_WITH_NULL_SHA "ECDHE-RSA-NULL-SHA"
# define TLS1_TXT_ECDHE_RSA_WITH_RC4_128_SHA "ECDHE-RSA-RC4-SHA"
# define TLS1_TXT_ECDHE_RSA_WITH_DES_192_CBC3_SHA "ECDHE-RSA-DES-CBC3-SHA"
# define TLS1_TXT_ECDHE_RSA_WITH_AES_128_CBC_SHA "ECDHE-RSA-AES128-SHA"
# define TLS1_TXT_ECDHE_RSA_WITH_AES_256_CBC_SHA "ECDHE-RSA-AES256-SHA"
# define TLS1_TXT_ECDH_anon_WITH_NULL_SHA "AECDH-NULL-SHA"
# define TLS1_TXT_ECDH_anon_WITH_RC4_128_SHA "AECDH-RC4-SHA"
# define TLS1_TXT_ECDH_anon_WITH_DES_192_CBC3_SHA "AECDH-DES-CBC3-SHA"
# define TLS1_TXT_ECDH_anon_WITH_AES_128_CBC_SHA "AECDH-AES128-SHA"
# define TLS1_TXT_ECDH_anon_WITH_AES_256_CBC_SHA "AECDH-AES256-SHA"
/* PSK ciphersuites from RFC 4279 */
# define TLS1_TXT_PSK_WITH_RC4_128_SHA "PSK-RC4-SHA"
# define TLS1_TXT_PSK_WITH_3DES_EDE_CBC_SHA "PSK-3DES-EDE-CBC-SHA"
# define TLS1_TXT_PSK_WITH_AES_128_CBC_SHA "PSK-AES128-CBC-SHA"
# define TLS1_TXT_PSK_WITH_AES_256_CBC_SHA "PSK-AES256-CBC-SHA"
/* SRP ciphersuite from RFC 5054 */
# define TLS1_TXT_SRP_SHA_WITH_3DES_EDE_CBC_SHA "SRP-3DES-EDE-CBC-SHA"
# define TLS1_TXT_SRP_SHA_RSA_WITH_3DES_EDE_CBC_SHA "SRP-RSA-3DES-EDE-CBC-SHA"
# define TLS1_TXT_SRP_SHA_DSS_WITH_3DES_EDE_CBC_SHA "SRP-DSS-3DES-EDE-CBC-SHA"
# define TLS1_TXT_SRP_SHA_WITH_AES_128_CBC_SHA "SRP-AES-128-CBC-SHA"
# define TLS1_TXT_SRP_SHA_RSA_WITH_AES_128_CBC_SHA "SRP-RSA-AES-128-CBC-SHA"
# define TLS1_TXT_SRP_SHA_DSS_WITH_AES_128_CBC_SHA "SRP-DSS-AES-128-CBC-SHA"
# define TLS1_TXT_SRP_SHA_WITH_AES_256_CBC_SHA "SRP-AES-256-CBC-SHA"
# define TLS1_TXT_SRP_SHA_RSA_WITH_AES_256_CBC_SHA "SRP-RSA-AES-256-CBC-SHA"
# define TLS1_TXT_SRP_SHA_DSS_WITH_AES_256_CBC_SHA "SRP-DSS-AES-256-CBC-SHA"
/* Camellia ciphersuites from RFC4132 */
# define TLS1_TXT_RSA_WITH_CAMELLIA_128_CBC_SHA "CAMELLIA128-SHA"
# define TLS1_TXT_DH_DSS_WITH_CAMELLIA_128_CBC_SHA "DH-DSS-CAMELLIA128-SHA"
# define TLS1_TXT_DH_RSA_WITH_CAMELLIA_128_CBC_SHA "DH-RSA-CAMELLIA128-SHA"
# define TLS1_TXT_DHE_DSS_WITH_CAMELLIA_128_CBC_SHA "DHE-DSS-CAMELLIA128-SHA"
# define TLS1_TXT_DHE_RSA_WITH_CAMELLIA_128_CBC_SHA "DHE-RSA-CAMELLIA128-SHA"
# define TLS1_TXT_ADH_WITH_CAMELLIA_128_CBC_SHA "ADH-CAMELLIA128-SHA"
# define TLS1_TXT_RSA_WITH_CAMELLIA_256_CBC_SHA "CAMELLIA256-SHA"
# define TLS1_TXT_DH_DSS_WITH_CAMELLIA_256_CBC_SHA "DH-DSS-CAMELLIA256-SHA"
# define TLS1_TXT_DH_RSA_WITH_CAMELLIA_256_CBC_SHA "DH-RSA-CAMELLIA256-SHA"
# define TLS1_TXT_DHE_DSS_WITH_CAMELLIA_256_CBC_SHA "DHE-DSS-CAMELLIA256-SHA"
# define TLS1_TXT_DHE_RSA_WITH_CAMELLIA_256_CBC_SHA "DHE-RSA-CAMELLIA256-SHA"
# define TLS1_TXT_ADH_WITH_CAMELLIA_256_CBC_SHA "ADH-CAMELLIA256-SHA"
/* SEED ciphersuites from RFC4162 */
# define TLS1_TXT_RSA_WITH_SEED_SHA "SEED-SHA"
# define TLS1_TXT_DH_DSS_WITH_SEED_SHA "DH-DSS-SEED-SHA"
# define TLS1_TXT_DH_RSA_WITH_SEED_SHA "DH-RSA-SEED-SHA"
# define TLS1_TXT_DHE_DSS_WITH_SEED_SHA "DHE-DSS-SEED-SHA"
# define TLS1_TXT_DHE_RSA_WITH_SEED_SHA "DHE-RSA-SEED-SHA"
# define TLS1_TXT_ADH_WITH_SEED_SHA "ADH-SEED-SHA"
/* TLS v1.2 ciphersuites */
# define TLS1_TXT_RSA_WITH_NULL_SHA256 "NULL-SHA256"
# define TLS1_TXT_RSA_WITH_AES_128_SHA256 "AES128-SHA256"
# define TLS1_TXT_RSA_WITH_AES_256_SHA256 "AES256-SHA256"
# define TLS1_TXT_DH_DSS_WITH_AES_128_SHA256 "DH-DSS-AES128-SHA256"
# define TLS1_TXT_DH_RSA_WITH_AES_128_SHA256 "DH-RSA-AES128-SHA256"
# define TLS1_TXT_DHE_DSS_WITH_AES_128_SHA256 "DHE-DSS-AES128-SHA256"
# define TLS1_TXT_DHE_RSA_WITH_AES_128_SHA256 "DHE-RSA-AES128-SHA256"
# define TLS1_TXT_DH_DSS_WITH_AES_256_SHA256 "DH-DSS-AES256-SHA256"
# define TLS1_TXT_DH_RSA_WITH_AES_256_SHA256 "DH-RSA-AES256-SHA256"
# define TLS1_TXT_DHE_DSS_WITH_AES_256_SHA256 "DHE-DSS-AES256-SHA256"
# define TLS1_TXT_DHE_RSA_WITH_AES_256_SHA256 "DHE-RSA-AES256-SHA256"
# define TLS1_TXT_ADH_WITH_AES_128_SHA256 "ADH-AES128-SHA256"
# define TLS1_TXT_ADH_WITH_AES_256_SHA256 "ADH-AES256-SHA256"
/* TLS v1.2 GCM ciphersuites from RFC5288 */
# define TLS1_TXT_RSA_WITH_AES_128_GCM_SHA256 "AES128-GCM-SHA256"
# define TLS1_TXT_RSA_WITH_AES_256_GCM_SHA384 "AES256-GCM-SHA384"
# define TLS1_TXT_DHE_RSA_WITH_AES_128_GCM_SHA256 "DHE-RSA-AES128-GCM-SHA256"
# define TLS1_TXT_DHE_RSA_WITH_AES_256_GCM_SHA384 "DHE-RSA-AES256-GCM-SHA384"
# define TLS1_TXT_DH_RSA_WITH_AES_128_GCM_SHA256 "DH-RSA-AES128-GCM-SHA256"
# define TLS1_TXT_DH_RSA_WITH_AES_256_GCM_SHA384 "DH-RSA-AES256-GCM-SHA384"
# define TLS1_TXT_DHE_DSS_WITH_AES_128_GCM_SHA256 "DHE-DSS-AES128-GCM-SHA256"
# define TLS1_TXT_DHE_DSS_WITH_AES_256_GCM_SHA384 "DHE-DSS-AES256-GCM-SHA384"
# define TLS1_TXT_DH_DSS_WITH_AES_128_GCM_SHA256 "DH-DSS-AES128-GCM-SHA256"
# define TLS1_TXT_DH_DSS_WITH_AES_256_GCM_SHA384 "DH-DSS-AES256-GCM-SHA384"
# define TLS1_TXT_ADH_WITH_AES_128_GCM_SHA256 "ADH-AES128-GCM-SHA256"
# define TLS1_TXT_ADH_WITH_AES_256_GCM_SHA384 "ADH-AES256-GCM-SHA384"
/* ECDH HMAC based ciphersuites from RFC5289 */
# define TLS1_TXT_ECDHE_ECDSA_WITH_AES_128_SHA256 "ECDHE-ECDSA-AES128-SHA256"
# define TLS1_TXT_ECDHE_ECDSA_WITH_AES_256_SHA384 "ECDHE-ECDSA-AES256-SHA384"
# define TLS1_TXT_ECDH_ECDSA_WITH_AES_128_SHA256 "ECDH-ECDSA-AES128-SHA256"
# define TLS1_TXT_ECDH_ECDSA_WITH_AES_256_SHA384 "ECDH-ECDSA-AES256-SHA384"
# define TLS1_TXT_ECDHE_RSA_WITH_AES_128_SHA256 "ECDHE-RSA-AES128-SHA256"
# define TLS1_TXT_ECDHE_RSA_WITH_AES_256_SHA384 "ECDHE-RSA-AES256-SHA384"
# define TLS1_TXT_ECDH_RSA_WITH_AES_128_SHA256 "ECDH-RSA-AES128-SHA256"
# define TLS1_TXT_ECDH_RSA_WITH_AES_256_SHA384 "ECDH-RSA-AES256-SHA384"
/* ECDH GCM based ciphersuites from RFC5289 */
# define TLS1_TXT_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 "ECDHE-ECDSA-AES128-GCM-SHA256"
# define TLS1_TXT_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 "ECDHE-ECDSA-AES256-GCM-SHA384"
# define TLS1_TXT_ECDH_ECDSA_WITH_AES_128_GCM_SHA256 "ECDH-ECDSA-AES128-GCM-SHA256"
# define TLS1_TXT_ECDH_ECDSA_WITH_AES_256_GCM_SHA384 "ECDH-ECDSA-AES256-GCM-SHA384"
# define TLS1_TXT_ECDHE_RSA_WITH_AES_128_GCM_SHA256 "ECDHE-RSA-AES128-GCM-SHA256"
# define TLS1_TXT_ECDHE_RSA_WITH_AES_256_GCM_SHA384 "ECDHE-RSA-AES256-GCM-SHA384"
# define TLS1_TXT_ECDH_RSA_WITH_AES_128_GCM_SHA256 "ECDH-RSA-AES128-GCM-SHA256"
# define TLS1_TXT_ECDH_RSA_WITH_AES_256_GCM_SHA384 "ECDH-RSA-AES256-GCM-SHA384"
# define TLS_CT_RSA_SIGN 1
# define TLS_CT_DSS_SIGN 2
# define TLS_CT_RSA_FIXED_DH 3
# define TLS_CT_DSS_FIXED_DH 4
# define TLS_CT_ECDSA_SIGN 64
# define TLS_CT_RSA_FIXED_ECDH 65
# define TLS_CT_ECDSA_FIXED_ECDH 66
# define TLS_CT_GOST94_SIGN 21
# define TLS_CT_GOST01_SIGN 22
/*
* when correcting this number, correct also SSL3_CT_NUMBER in ssl3.h (see
* comment there)
*/
# define TLS_CT_NUMBER 9
# define TLS1_FINISH_MAC_LENGTH 12
# define TLS_MD_MAX_CONST_SIZE 20
# define TLS_MD_CLIENT_FINISH_CONST "client finished"
# define TLS_MD_CLIENT_FINISH_CONST_SIZE 15
# define TLS_MD_SERVER_FINISH_CONST "server finished"
# define TLS_MD_SERVER_FINISH_CONST_SIZE 15
# define TLS_MD_SERVER_WRITE_KEY_CONST "server write key"
# define TLS_MD_SERVER_WRITE_KEY_CONST_SIZE 16
# define TLS_MD_KEY_EXPANSION_CONST "key expansion"
# define TLS_MD_KEY_EXPANSION_CONST_SIZE 13
# define TLS_MD_CLIENT_WRITE_KEY_CONST "client write key"
# define TLS_MD_CLIENT_WRITE_KEY_CONST_SIZE 16
# define TLS_MD_SERVER_WRITE_KEY_CONST "server write key"
# define TLS_MD_SERVER_WRITE_KEY_CONST_SIZE 16
# define TLS_MD_IV_BLOCK_CONST "IV block"
# define TLS_MD_IV_BLOCK_CONST_SIZE 8
# define TLS_MD_MASTER_SECRET_CONST "master secret"
# define TLS_MD_MASTER_SECRET_CONST_SIZE 13
# ifdef CHARSET_EBCDIC
# undef TLS_MD_CLIENT_FINISH_CONST
/*
* client finished
*/
# define TLS_MD_CLIENT_FINISH_CONST "\x63\x6c\x69\x65\x6e\x74\x20\x66\x69\x6e\x69\x73\x68\x65\x64"
# undef TLS_MD_SERVER_FINISH_CONST
/*
* server finished
*/
# define TLS_MD_SERVER_FINISH_CONST "\x73\x65\x72\x76\x65\x72\x20\x66\x69\x6e\x69\x73\x68\x65\x64"
# undef TLS_MD_SERVER_WRITE_KEY_CONST
/*
* server write key
*/
# define TLS_MD_SERVER_WRITE_KEY_CONST "\x73\x65\x72\x76\x65\x72\x20\x77\x72\x69\x74\x65\x20\x6b\x65\x79"
# undef TLS_MD_KEY_EXPANSION_CONST
/*
* key expansion
*/
# define TLS_MD_KEY_EXPANSION_CONST "\x6b\x65\x79\x20\x65\x78\x70\x61\x6e\x73\x69\x6f\x6e"
# undef TLS_MD_CLIENT_WRITE_KEY_CONST
/*
* client write key
*/
# define TLS_MD_CLIENT_WRITE_KEY_CONST "\x63\x6c\x69\x65\x6e\x74\x20\x77\x72\x69\x74\x65\x20\x6b\x65\x79"
# undef TLS_MD_SERVER_WRITE_KEY_CONST
/*
* server write key
*/
# define TLS_MD_SERVER_WRITE_KEY_CONST "\x73\x65\x72\x76\x65\x72\x20\x77\x72\x69\x74\x65\x20\x6b\x65\x79"
# undef TLS_MD_IV_BLOCK_CONST
/*
* IV block
*/
# define TLS_MD_IV_BLOCK_CONST "\x49\x56\x20\x62\x6c\x6f\x63\x6b"
# undef TLS_MD_MASTER_SECRET_CONST
/*
* master secret
*/
# define TLS_MD_MASTER_SECRET_CONST "\x6d\x61\x73\x74\x65\x72\x20\x73\x65\x63\x72\x65\x74"
# endif
/* TLS Session Ticket extension struct */
struct tls_session_ticket_ext_st {
unsigned short length;
void *data;
};
#ifdef __cplusplus
}
#endif
#endif
| {
"pile_set_name": "Github"
} |
package org.stepik.android.remote.lesson.service
import io.reactivex.Single
import org.stepik.android.remote.lesson.model.LessonResponse
import retrofit2.Call
import retrofit2.http.GET
import retrofit2.http.Query
interface LessonService {
@GET("api/lessons")
fun getLessons(@Query("ids[]") lessons: LongArray): Call<LessonResponse>
@GET("api/lessons")
fun getLessonsRx(@Query("ids[]") lessons: LongArray): Single<LessonResponse>
} | {
"pile_set_name": "Github"
} |
<?php
/**
* This file is part of the Carbon package.
*
* (c) Brian Nesbitt <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
return array_replace_recursive(require __DIR__.'/ca.php', [
]);
| {
"pile_set_name": "Github"
} |
using System;
using System.Web.Mvc;
namespace FluentSecurity.Specification.TestData
{
public class DefaultPolicyViolationHandler : IPolicyViolationHandler
{
public ActionResult Handle(PolicyViolationException exception)
{
throw new NotImplementedException();
}
}
} | {
"pile_set_name": "Github"
} |
package clockwork
import (
"reflect"
"testing"
"time"
)
func TestFakeClockAfter(t *testing.T) {
fc := &fakeClock{}
zero := fc.After(0)
select {
case <-zero:
default:
t.Errorf("zero did not return!")
}
one := fc.After(1)
two := fc.After(2)
six := fc.After(6)
ten := fc.After(10)
fc.Advance(1)
select {
case <-one:
default:
t.Errorf("one did not return!")
}
select {
case <-two:
t.Errorf("two returned prematurely!")
case <-six:
t.Errorf("six returned prematurely!")
case <-ten:
t.Errorf("ten returned prematurely!")
default:
}
fc.Advance(1)
select {
case <-two:
default:
t.Errorf("two did not return!")
}
select {
case <-six:
t.Errorf("six returned prematurely!")
case <-ten:
t.Errorf("ten returned prematurely!")
default:
}
fc.Advance(1)
select {
case <-six:
t.Errorf("six returned prematurely!")
case <-ten:
t.Errorf("ten returned prematurely!")
default:
}
fc.Advance(3)
select {
case <-six:
default:
t.Errorf("six did not return!")
}
select {
case <-ten:
t.Errorf("ten returned prematurely!")
default:
}
fc.Advance(100)
select {
case <-ten:
default:
t.Errorf("ten did not return!")
}
}
func TestNotifyBlockers(t *testing.T) {
b1 := &blocker{1, make(chan struct{})}
b2 := &blocker{2, make(chan struct{})}
b3 := &blocker{5, make(chan struct{})}
b4 := &blocker{10, make(chan struct{})}
b5 := &blocker{10, make(chan struct{})}
bs := []*blocker{b1, b2, b3, b4, b5}
bs1 := notifyBlockers(bs, 2)
if n := len(bs1); n != 4 {
t.Fatalf("got %d blockers, want %d", n, 4)
}
select {
case <-b2.ch:
case <-time.After(time.Second):
t.Fatalf("timed out waiting for channel close!")
}
bs2 := notifyBlockers(bs1, 10)
if n := len(bs2); n != 2 {
t.Fatalf("got %d blockers, want %d", n, 2)
}
select {
case <-b4.ch:
case <-time.After(time.Second):
t.Fatalf("timed out waiting for channel close!")
}
select {
case <-b5.ch:
case <-time.After(time.Second):
t.Fatalf("timed out waiting for channel close!")
}
}
func TestNewFakeClock(t *testing.T) {
fc := NewFakeClock()
now := fc.Now()
if now.IsZero() {
t.Fatalf("fakeClock.Now() fulfills IsZero")
}
now2 := fc.Now()
if !reflect.DeepEqual(now, now2) {
t.Fatalf("fakeClock.Now() returned different value: want=%#v got=%#v", now, now2)
}
}
func TestNewFakeClockAt(t *testing.T) {
t1 := time.Date(1999, time.February, 3, 4, 5, 6, 7, time.UTC)
fc := NewFakeClockAt(t1)
now := fc.Now()
if !reflect.DeepEqual(now, t1) {
t.Fatalf("fakeClock.Now() returned unexpected non-initialised value: want=%#v, got %#v", t1, now)
}
}
| {
"pile_set_name": "Github"
} |
{
"name": "Shepherd",
"version": "0.1.3",
"summary": "A collection of protocol and extensions to aid with `NSUserActivity`s",
"description": "A collection of protocol and extensions to aid with the handling and creation of `NSUserActivity`s",
"homepage": "https://github.com/JosephDuffy/Shepherd",
"license": {
"type": "MIT",
"file": "LICENSE"
},
"authors": "Joseph Duffy",
"source": {
"git": "https://github.com/JosephDuffy/Shepherd.git",
"tag": "v0.1.3"
},
"source_files": "Sources/**/*.swift",
"platforms": {
"osx": "10.10",
"ios": "8.0",
"tvos": "9.0",
"watchos": "2.0"
},
"swift_versions": "5.0",
"swift_version": "5.0"
}
| {
"pile_set_name": "Github"
} |
package com.datastax.tutorial;
import me.prettyprint.hector.api.Keyspace;
import me.prettyprint.hector.api.beans.ColumnSlice;
import me.prettyprint.hector.api.factory.HFactory;
import me.prettyprint.hector.api.query.QueryResult;
import me.prettyprint.hector.api.query.SliceQuery;
/**
* Get all the columns for a single Npanxx row.
* Shows the construction of a {@link SliceQuery} with the
* {@link StringSerializer} defined in the parent class.
*
* Thrift API: http://wiki.apache.org/cassandra/API#get_slice
* SlicePredicate: http://wiki.apache.org/cassandra/API#SlicePredicate
*
* To run this example from maven:
* mvn -e exec:java -Dexec.args="get_slice" -Dexec.mainClass="com.datastax.tutorial.TutorialRunner"
*
*/
public class GetSliceForNpanxx extends TutorialCommand {
public GetSliceForNpanxx(Keyspace keyspace) {
super(keyspace);
}
@Override
public QueryResult<ColumnSlice<String,String>> execute() {
SliceQuery<String, String, String> sliceQuery =
HFactory.createSliceQuery(keyspace, stringSerializer, stringSerializer, stringSerializer);
sliceQuery.setColumnFamily("Npanxx");
sliceQuery.setKey("512202");
// We only ever have these four columns on Npanxx
sliceQuery.setColumnNames("city","state","lat","lng");
// The following would do the exact same as the above
// accept here we say get the first 4 columns according to comparator order
// sliceQuery.setRange("", "", false, 4);
QueryResult<ColumnSlice<String, String>> result = sliceQuery.execute();
return result;
}
}
| {
"pile_set_name": "Github"
} |
# frozen_string_literal: true
class RemoveKubernetesJob < ActiveRecord::Migration[5.1]
def change
remove_column :builds, :kubernetes_job, :boolean, default: false, null: false
end
end
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2016 VMware, Inc. All Rights Reserved.
*
* This product is licensed to you under the Apache License, Version 2.0 (the "License").
* You may not use this product except in compliance with the License.
*
* This product may include a number of subcomponents with separate copyright notices
* and license terms. Your use of these subcomponents is subject to the terms and
* conditions of the subcomponent's license, as noted in the LICENSE file.
*/
import ClosureListItemVue from 'components/containers/ClosureListItemVue.html';
import DeleteConfirmationSupportMixin from 'components/common/DeleteConfirmationSupportMixin';
import constants from 'core/constants';
import utils from 'core/utils';
import {
ContainerActions, NavigationActions, AppActions
} from 'actions/Actions';
var ClosureListItem = Vue.extend({
template: ClosureListItemVue,
mixins: [DeleteConfirmationSupportMixin],
props: {
model: {
required: true
}
},
computed: {
documentId: function() {
return utils.getDocumentId(this.model.documentSelfLink);
},
inputs: function() {
return JSON.stringify(this.model.inputs);
},
outputs: function() {
return JSON.stringify(this.model.outputs);
}
},
methods: {
containerStatusDisplay: utils.containerStatusDisplay,
getClosureRunId: function() {
return this.model.documentId;
},
openClosureDetails: function($event) {
$event.stopPropagation();
$event.preventDefault();
NavigationActions.openClosureDetails(this.getClosureRunId());
},
removeClosureRun: function() {
this.confirmRemoval(ContainerActions.removeClosureRun, [this.model.documentSelfLink]);
AppActions.openView(constants.VIEWS.RESOURCES.VIEWS.CLOSURES.name);
ContainerActions.openContainers({
'$category': 'closures'
}, true);
},
operationSupported: function(op) {
return utils.operationSupported(op, this.model);
}
}
});
Vue.component('closure-grid-item', ClosureListItem);
export default ClosureListItem;
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2015-2016 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#pragma once
#include <wtf/HashSet.h>
namespace WTF {
template<typename Node, typename Set = HashSet<Node>>
class GraphNodeWorklist {
public:
GraphNodeWorklist() { }
~GraphNodeWorklist() { }
// Returns true if we didn't know about the node before.
bool push(Node node)
{
if (!m_seen.add(node))
return false;
m_stack.append(node);
return true;
}
template<typename Iterable>
void pushAll(const Iterable& iterable)
{
for (Node node : iterable)
push(node);
}
bool isEmpty() const { return m_stack.isEmpty(); }
bool notEmpty() const { return !m_stack.isEmpty(); }
Node pop()
{
if (m_stack.isEmpty())
return Node();
return m_stack.takeLast();
}
bool saw(Node node) { return m_seen.contains(node); }
const Set& seen() const { return m_seen; }
private:
Set m_seen;
Vector<Node, 16> m_stack;
};
template<typename Node, typename T>
struct GraphNodeWith {
GraphNodeWith()
: node()
, data()
{
}
GraphNodeWith(Node node, const T& data)
: node(node)
, data(data)
{
}
explicit operator bool() const { return !!node; }
Node node;
T data;
};
template<typename Node, typename T, typename Set = HashSet<Node>>
class ExtendedGraphNodeWorklist {
public:
ExtendedGraphNodeWorklist() { }
void forcePush(const GraphNodeWith<Node, T>& entry)
{
m_stack.append(entry);
}
void forcePush(Node node, const T& data)
{
forcePush(GraphNodeWith<Node, T>(node, data));
}
bool push(const GraphNodeWith<Node, T>& entry)
{
if (!m_seen.add(entry.node))
return false;
forcePush(entry);
return true;
}
bool push(Node node, const T& data)
{
return push(GraphNodeWith<Node, T>(node, data));
}
bool notEmpty() const { return !m_stack.isEmpty(); }
GraphNodeWith<Node, T> pop()
{
if (m_stack.isEmpty())
return GraphNodeWith<Node, T>();
return m_stack.takeLast();
}
private:
Set m_seen;
Vector<GraphNodeWith<Node, T>> m_stack;
};
enum class GraphVisitOrder : uint8_t {
Pre,
Post
};
template<typename Node>
struct GraphNodeWithOrder {
GraphNodeWithOrder()
: node()
, order(GraphVisitOrder::Pre)
{
}
GraphNodeWithOrder(Node node, GraphVisitOrder order)
: node(node)
, order(order)
{
}
explicit operator bool() const { return node; }
Node node;
GraphVisitOrder order;
};
template<typename Node, typename Set = HashSet<Node>>
class PostOrderGraphNodeWorklist {
public:
PostOrderGraphNodeWorklist()
{
}
~PostOrderGraphNodeWorklist()
{
}
bool pushPre(Node node)
{
return m_worklist.push(node, GraphVisitOrder::Pre);
}
void pushPost(Node node)
{
m_worklist.forcePush(node, GraphVisitOrder::Post);
}
bool push(Node node, GraphVisitOrder order = GraphVisitOrder::Pre)
{
switch (order) {
case GraphVisitOrder::Pre:
return pushPre(node);
case GraphVisitOrder::Post:
pushPost(node);
return true;
}
RELEASE_ASSERT_NOT_REACHED();
return false;
}
bool push(const GraphNodeWithOrder<Node>& data)
{
return push(data.node, data.order);
}
bool notEmpty() const { return m_worklist.notEmpty(); }
GraphNodeWithOrder<Node> pop()
{
GraphNodeWith<Node, GraphVisitOrder> result = m_worklist.pop();
return GraphNodeWithOrder<Node>(result.node, result.data);
}
private:
ExtendedGraphNodeWorklist<Node, GraphVisitOrder, Set> m_worklist;
};
} // namespace WTF
using WTF::GraphNodeWorklist;
using WTF::GraphNodeWith;
using WTF::ExtendedGraphNodeWorklist;
using WTF::GraphVisitOrder;
using WTF::GraphNodeWithOrder;
using WTF::PostOrderGraphNodeWorklist;
| {
"pile_set_name": "Github"
} |
<?php
declare(strict_types=1);
namespace GraphQL\Executor;
use GraphQL\Error\Error;
use GraphQL\Language\AST\ArgumentNode;
use GraphQL\Language\AST\BooleanValueNode;
use GraphQL\Language\AST\DirectiveNode;
use GraphQL\Language\AST\EnumValueDefinitionNode;
use GraphQL\Language\AST\EnumValueNode;
use GraphQL\Language\AST\FieldDefinitionNode;
use GraphQL\Language\AST\FieldNode;
use GraphQL\Language\AST\FloatValueNode;
use GraphQL\Language\AST\FragmentSpreadNode;
use GraphQL\Language\AST\InlineFragmentNode;
use GraphQL\Language\AST\IntValueNode;
use GraphQL\Language\AST\ListValueNode;
use GraphQL\Language\AST\Node;
use GraphQL\Language\AST\NodeList;
use GraphQL\Language\AST\NullValueNode;
use GraphQL\Language\AST\ObjectValueNode;
use GraphQL\Language\AST\StringValueNode;
use GraphQL\Language\AST\ValueNode;
use GraphQL\Language\AST\VariableDefinitionNode;
use GraphQL\Language\AST\VariableNode;
use GraphQL\Language\Printer;
use GraphQL\Type\Definition\Directive;
use GraphQL\Type\Definition\EnumType;
use GraphQL\Type\Definition\FieldDefinition;
use GraphQL\Type\Definition\InputObjectType;
use GraphQL\Type\Definition\InputType;
use GraphQL\Type\Definition\ListOfType;
use GraphQL\Type\Definition\NonNull;
use GraphQL\Type\Definition\ScalarType;
use GraphQL\Type\Definition\Type;
use GraphQL\Type\Schema;
use GraphQL\Utils\AST;
use GraphQL\Utils\TypeInfo;
use GraphQL\Utils\Utils;
use GraphQL\Utils\Value;
use stdClass;
use Throwable;
use function array_key_exists;
use function array_map;
use function count;
use function sprintf;
class Values
{
/**
* Prepares an object map of variables of the correct type based on the provided
* variable definitions and arbitrary input. If the input cannot be coerced
* to match the variable definitions, a Error will be thrown.
*
* @param VariableDefinitionNode[] $varDefNodes
* @param mixed[] $inputs
*
* @return mixed[]
*/
public static function getVariableValues(Schema $schema, $varDefNodes, array $inputs)
{
$errors = [];
$coercedValues = [];
foreach ($varDefNodes as $varDefNode) {
$varName = $varDefNode->variable->name->value;
/** @var InputType|Type $varType */
$varType = TypeInfo::typeFromAST($schema, $varDefNode->type);
if (! Type::isInputType($varType)) {
// Must use input types for variables. This should be caught during
// validation, however is checked again here for safety.
$errors[] = new Error(
sprintf(
'Variable "$%s" expected value of type "%s" which cannot be used as an input type.',
$varName,
Printer::doPrint($varDefNode->type)
),
[$varDefNode->type]
);
} else {
$hasValue = array_key_exists($varName, $inputs);
$value = $hasValue ? $inputs[$varName] : Utils::undefined();
if (! $hasValue && ($varDefNode->defaultValue !== null)) {
// If no value was provided to a variable with a default value,
// use the default value.
$coercedValues[$varName] = AST::valueFromAST($varDefNode->defaultValue, $varType);
} elseif ((! $hasValue || $value === null) && ($varType instanceof NonNull)) {
// If no value or a nullish value was provided to a variable with a
// non-null type (required), produce an error.
$errors[] = new Error(
sprintf(
$hasValue
? 'Variable "$%s" of non-null type "%s" must not be null.'
: 'Variable "$%s" of required type "%s" was not provided.',
$varName,
Utils::printSafe($varType)
),
[$varDefNode]
);
} elseif ($hasValue) {
if ($value === null) {
// If the explicit value `null` was provided, an entry in the coerced
// values must exist as the value `null`.
$coercedValues[$varName] = null;
} else {
// Otherwise, a non-null value was provided, coerce it to the expected
// type or report an error if coercion fails.
$coerced = Value::coerceValue($value, $varType, $varDefNode);
/** @var Error[] $coercionErrors */
$coercionErrors = $coerced['errors'];
if (count($coercionErrors ?? []) > 0) {
$messagePrelude = sprintf(
'Variable "$%s" got invalid value %s; ',
$varName,
Utils::printSafeJson($value)
);
foreach ($coercionErrors as $error) {
$errors[] = new Error(
$messagePrelude . $error->getMessage(),
$error->getNodes(),
$error->getSource(),
$error->getPositions(),
$error->getPath(),
$error->getPrevious(),
$error->getExtensions()
);
}
} else {
$coercedValues[$varName] = $coerced['value'];
}
}
}
}
}
if (count($errors) > 0) {
return [$errors, null];
}
return [null, $coercedValues];
}
/**
* Prepares an object map of argument values given a directive definition
* and a AST node which may contain directives. Optionally also accepts a map
* of variable values.
*
* If the directive does not exist on the node, returns undefined.
*
* @param FragmentSpreadNode|FieldNode|InlineFragmentNode|EnumValueDefinitionNode|FieldDefinitionNode $node
* @param mixed[]|null $variableValues
*
* @return mixed[]|null
*/
public static function getDirectiveValues(Directive $directiveDef, $node, $variableValues = null)
{
if (isset($node->directives) && $node->directives instanceof NodeList) {
$directiveNode = Utils::find(
$node->directives,
static function (DirectiveNode $directive) use ($directiveDef) : bool {
return $directive->name->value === $directiveDef->name;
}
);
if ($directiveNode !== null) {
return self::getArgumentValues($directiveDef, $directiveNode, $variableValues);
}
}
return null;
}
/**
* Prepares an object map of argument values given a list of argument
* definitions and list of argument AST nodes.
*
* @param FieldDefinition|Directive $def
* @param FieldNode|DirectiveNode $node
* @param mixed[] $variableValues
*
* @return mixed[]
*
* @throws Error
*/
public static function getArgumentValues($def, $node, $variableValues = null)
{
if (count($def->args) === 0) {
return [];
}
$argumentNodes = $node->arguments;
$argumentValueMap = [];
foreach ($argumentNodes as $argumentNode) {
$argumentValueMap[$argumentNode->name->value] = $argumentNode->value;
}
return static::getArgumentValuesForMap($def, $argumentValueMap, $variableValues, $node);
}
/**
* @param FieldDefinition|Directive $fieldDefinition
* @param ArgumentNode[] $argumentValueMap
* @param mixed[] $variableValues
* @param Node|null $referenceNode
*
* @return mixed[]
*
* @throws Error
*/
public static function getArgumentValuesForMap($fieldDefinition, $argumentValueMap, $variableValues = null, $referenceNode = null)
{
$argumentDefinitions = $fieldDefinition->args;
$coercedValues = [];
foreach ($argumentDefinitions as $argumentDefinition) {
$name = $argumentDefinition->name;
$argType = $argumentDefinition->getType();
$argumentValueNode = $argumentValueMap[$name] ?? null;
if ($argumentValueNode instanceof VariableNode) {
$variableName = $argumentValueNode->name->value;
$hasValue = array_key_exists($variableName, $variableValues ?? []);
$isNull = $hasValue ? $variableValues[$variableName] === null : false;
} else {
$hasValue = $argumentValueNode !== null;
$isNull = $argumentValueNode instanceof NullValueNode;
}
if (! $hasValue && $argumentDefinition->defaultValueExists()) {
// If no argument was provided where the definition has a default value,
// use the default value.
$coercedValues[$name] = $argumentDefinition->defaultValue;
} elseif ((! $hasValue || $isNull) && ($argType instanceof NonNull)) {
// If no argument or a null value was provided to an argument with a
// non-null type (required), produce a field error.
if ($isNull) {
throw new Error(
'Argument "' . $name . '" of non-null type ' .
'"' . Utils::printSafe($argType) . '" must not be null.',
$referenceNode
);
}
if ($argumentValueNode instanceof VariableNode) {
$variableName = $argumentValueNode->name->value;
throw new Error(
'Argument "' . $name . '" of required type "' . Utils::printSafe($argType) . '" was ' .
'provided the variable "$' . $variableName . '" which was not provided ' .
'a runtime value.',
[$argumentValueNode]
);
}
throw new Error(
'Argument "' . $name . '" of required type ' .
'"' . Utils::printSafe($argType) . '" was not provided.',
$referenceNode
);
} elseif ($hasValue) {
if ($argumentValueNode instanceof NullValueNode) {
// If the explicit value `null` was provided, an entry in the coerced
// values must exist as the value `null`.
$coercedValues[$name] = null;
} elseif ($argumentValueNode instanceof VariableNode) {
$variableName = $argumentValueNode->name->value;
Utils::invariant($variableValues !== null, 'Must exist for hasValue to be true.');
// Note: This does no further checking that this variable is correct.
// This assumes that this query has been validated and the variable
// usage here is of the correct type.
$coercedValues[$name] = $variableValues[$variableName] ?? null;
} else {
$valueNode = $argumentValueNode;
$coercedValue = AST::valueFromAST($valueNode, $argType, $variableValues);
if (Utils::isInvalid($coercedValue)) {
// Note: ValuesOfCorrectType validation should catch this before
// execution. This is a runtime check to ensure execution does not
// continue with an invalid argument value.
throw new Error(
'Argument "' . $name . '" has invalid value ' . Printer::doPrint($valueNode) . '.',
[$argumentValueNode]
);
}
$coercedValues[$name] = $coercedValue;
}
}
}
return $coercedValues;
}
/**
* @deprecated as of 8.0 (Moved to \GraphQL\Utils\AST::valueFromAST)
*
* @param VariableNode|NullValueNode|IntValueNode|FloatValueNode|StringValueNode|BooleanValueNode|EnumValueNode|ListValueNode|ObjectValueNode $valueNode
* @param ScalarType|EnumType|InputObjectType|ListOfType|NonNull $type
* @param mixed[]|null $variables
*
* @return mixed[]|stdClass|null
*
* @codeCoverageIgnore
*/
public static function valueFromAST(ValueNode $valueNode, InputType $type, ?array $variables = null)
{
return AST::valueFromAST($valueNode, $type, $variables);
}
/**
* @deprecated as of 0.12 (Use coerceValue() directly for richer information)
*
* @param mixed[] $value
* @param ScalarType|EnumType|InputObjectType|ListOfType|NonNull $type
*
* @return string[]
*
* @codeCoverageIgnore
*/
public static function isValidPHPValue($value, InputType $type)
{
$errors = Value::coerceValue($value, $type)['errors'];
return $errors
? array_map(
static function (Throwable $error) : string {
return $error->getMessage();
},
$errors
)
: [];
}
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2016 Smith AR <[email protected]>
* Michail Vourlakos <[email protected]>
*
* This file is part of Latte-Dock
*
* Latte-Dock is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License as
* published by the Free Software Foundation; either version 2 of
* the License, or (at your option) any later version.
*
* Latte-Dock is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import QtQuick 2.1
import QtGraphicalEffects 1.0
import org.kde.plasma.plasmoid 2.0
import org.kde.plasma.core 2.0 as PlasmaCore
import org.kde.plasma.components 2.0 as PlasmaComponents
import org.kde.latte.core 0.2 as LatteCore
import org.kde.latte.components 1.0 as LatteComponents
import "../applet" as Applet
Item{
id: editVisual
width: root.isHorizontal ? (latteView ? latteView.width : root.width) :
visibilityManager.thicknessEditMode
height: root.isVertical ? (latteView ? latteView.height : root.height) :
visibilityManager.thicknessEditMode
visible: editVisual.inEditMode
readonly property int settingsThickness: settingsOverlay.thickness
property int speed: LatteCore.WindowSystem.compositingActive ? animations.speedFactor.normal*3.6*animations.duration.large : 10
property int thickness: visibilityManager.thicknessEditMode + root.editShadow
property int rootThickness: visibilityManager.thicknessZoomOriginal + root.editShadow //- visibilityManager.thicknessEditMode
property int editLength: root.isHorizontal ? (root.behaveAsPlasmaPanel ? root.width - metrics.maxIconSize/4 : root.width)://root.maxLength) :
(root.behaveAsPlasmaPanel ? root.height - metrics.maxIconSize/4 : root.height)
property bool farEdge: (plasmoid.location===PlasmaCore.Types.BottomEdge) || (plasmoid.location===PlasmaCore.Types.RightEdge)
property bool editAnimationEnded: false
property bool editAnimationInFullThickness: false
property bool editAnimationRunning: false
property bool plasmaEditMode: plasmoid.userConfiguring
property bool inEditMode: false
property rect efGeometry
readonly property real appliedOpacity: imageTiler.opacity
readonly property real maxOpacity: root.inConfigureAppletsMode || !LatteCore.WindowSystem.compositingActive ?
1 : plasmoid.configuration.editBackgroundOpacity
LatteComponents.ExternalShadow{
id: editExternalShadow
width: root.isHorizontal ? imageTiler.width : root.editShadow
height: root.isHorizontal ? root.editShadow : imageTiler.height
visible: !editTransition.running && root.editMode && LatteCore.WindowSystem.compositingActive
shadowSize: root.editShadow
shadowOpacity: Math.max(0.35, imageTiler.opacity)
shadowDirection: plasmoid.location
states: [
///topShadow
State {
name: "topShadow"
when: (plasmoid.location === PlasmaCore.Types.BottomEdge)
AnchorChanges {
target: editExternalShadow
anchors{ top:undefined; bottom:imageTiler.top; left:undefined; right:undefined;
horizontalCenter:imageTiler.horizontalCenter; verticalCenter:undefined}
}
},
///bottomShadow
State {
name: "bottomShadow"
when: (plasmoid.location === PlasmaCore.Types.TopEdge)
AnchorChanges {
target: editExternalShadow
anchors{ top:imageTiler.bottom; bottom:undefined; left:undefined; right:undefined;
horizontalCenter:imageTiler.horizontalCenter; verticalCenter:undefined}
}
},
///leftShadow
State {
name: "leftShadow"
when: (plasmoid.location === PlasmaCore.Types.RightEdge)
AnchorChanges {
target: editExternalShadow
anchors{ top:undefined; bottom:undefined; left:undefined; right:imageTiler.left;
horizontalCenter:undefined; verticalCenter:imageTiler.verticalCenter}
}
},
///rightShadow
State {
name: "rightShadow"
when: (plasmoid.location === PlasmaCore.Types.LeftEdge)
AnchorChanges {
target: editExternalShadow
anchors{ top:undefined; bottom:undefined; left:imageTiler.right; right:undefined;
horizontalCenter:undefined; verticalCenter:imageTiler.verticalCenter}
}
}
]
}
Image{
id: imageTiler
anchors.centerIn: parent
width: parent.width
height: parent.height
opacity: 0
fillMode: Image.Tile
source: {
if (hasBackground) {
return viewLayout.background;
}
return viewLayout ? "../../icons/"+viewLayout.background+"print.jpg" : "../../icons/blueprint.jpg"
}
readonly property bool hasBackground: (viewLayout && viewLayout.background.startsWith("/")) ? true : false
Connections {
target: editVisual
onMaxOpacityChanged: {
if (editVisual.editAnimationEnded) {
imageTiler.opacity = editVisual.maxOpacity;
}
}
}
Behavior on opacity {
enabled: editVisual.editAnimationEnded
NumberAnimation {
duration: 0.8 * animations.duration.proposed
easing.type: Easing.OutCubic
}
}
}
MouseArea {
id: editBackMouseArea
anchors.fill: imageTiler
visible: editModeVisual.editAnimationEnded && !root.inConfigureAppletsMode
hoverEnabled: true
property bool wheelIsBlocked: false;
readonly property double opacityStep: 0.1
readonly property string tooltip: i18nc("opacity for background under edit mode, %0% is opacity percentage",
"You can use mouse wheel to change background opacity of %0%").arg(Math.round(plasmoid.configuration.editBackgroundOpacity * 100))
onWheel: {
processWheel(wheel);
}
function processWheel(wheel) {
if (wheelIsBlocked) {
return;
}
wheelIsBlocked = true;
scrollDelayer.start();
var angle = wheel.angleDelta.y / 8;
if (angle > 10) {
plasmoid.configuration.editBackgroundOpacity = Math.min(100, plasmoid.configuration.editBackgroundOpacity + opacityStep)
} else if (angle < -10) {
plasmoid.configuration.editBackgroundOpacity = Math.max(0, plasmoid.configuration.editBackgroundOpacity - opacityStep)
}
}
Connections {
target: root
onEmptyAreasWheel: {
if (root.editMode && !root.inConfigureAppletsMode) {
editBackMouseArea.processWheel(wheel);
}
}
}
//! A timer is needed in order to handle also touchpads that probably
//! send too many signals very fast. This way the signals per sec are limited.
//! The user needs to have a steady normal scroll in order to not
//! notice a annoying delay
Timer{
id: scrollDelayer
interval: 80
onTriggered: editBackMouseArea.wheelIsBlocked = false;
}
}
PlasmaComponents.Button {
anchors.fill: editBackMouseArea
opacity: 0
tooltip: editBackMouseArea.tooltip
}
//! Settings Overlay
SettingsOverlay {
id: settingsOverlay
anchors.fill: parent
visible: root.editMode
}
Applet.TitleTooltipParent {
id: titleTooltipParent
metrics: root.metrics
parabolic: root.parabolic
minimumThickness: visibilityManager.thicknessEditMode
maximumThickness: root.inConfigureAppletsMode ? visibilityManager.thicknessEditMode : 9999
}
Connections{
target: root
onThemeColorsChanged: imageTiler.opacity = editVisual.maxOpacity
}
Connections{
target: plasmoid
onLocationChanged: initializeEditPosition();
}
onInEditModeChanged: {
if (inEditMode) {
latteView.visibility.addBlockHidingEvent("EditVisual[qml]::inEditMode()");
} else {
latteView.visibility.removeBlockHidingEvent("EditVisual[qml]::inEditMode()");
if (latteView.visibility.isHidden) {
latteView.visibility.mustBeShown();
}
}
}
onRootThicknessChanged: {
initializeEditPosition();
}
onThicknessChanged: {
initializeEditPosition();
}
onXChanged: updateEffectsArea();
onYChanged: updateEffectsArea();
onWidthChanged: {
/*if (root.isHorizontal) {
initializeEditPosition();
}*/
updateEffectsArea();
}
onHeightChanged: {
/* if (root.isVertical) {
initializeEditPosition();
}*/
updateEffectsArea();
}
function updateEffectsArea(){
if (LatteCore.WindowSystem.compositingActive ||
!latteView || state !== "edit" || !editAnimationEnded) {
return;
}
var rootGeometry = mapToItem(root, 0, 0);
efGeometry.x = rootGeometry.x;
efGeometry.y = rootGeometry.y;
efGeometry.width = width;
efGeometry.height = height;
latteView.effects.rect = efGeometry;
}
function initializeNormalPosition() {
if (plasmoid.location === PlasmaCore.Types.BottomEdge) {
y = rootThickness;
x = 0;
} else if (plasmoid.location === PlasmaCore.Types.RightEdge) {
x = rootThickness;
y = 0;
} else if (plasmoid.location === PlasmaCore.Types.LeftEdge) {
x = -editVisual.thickness;
y = 0;
} else if (plasmoid.location === PlasmaCore.Types.TopEdge) {
y = -editVisual.thickness;
x = 0;
}
}
function initializeEditPosition() {
if (root.editMode) {
if (plasmoid.location === PlasmaCore.Types.LeftEdge){
x = 0;
y = 0;
} else if (plasmoid.location === PlasmaCore.Types.TopEdge) {
x = 0;
y = 0;
} else if (plasmoid.location === PlasmaCore.Types.BottomEdge) {
x = 0;
y = rootThickness - thickness;
} else if (plasmoid.location === PlasmaCore.Types.RightEdge) {
x = rootThickness - thickness;
y = 0;
}
}
}
}
| {
"pile_set_name": "Github"
} |
zh_cn:
ohm:
errors:
messages:
format: "是无效的"
not_present: "不能是空白字元"
not_numeric: "不是数字"
not_url: "是无效的"
not_email: "是无效的"
not_valid: "没有包含在列表中"
not_in_range: "没有包含在列表中"
not_decimal: "是无效的"
| {
"pile_set_name": "Github"
} |
package dataturks.response;
import bonsai.dropwizard.dao.d.DUsers;
public class UserDetails {
public String uid;
public String firstName = "Jane";
public String secondName;
public String profilePic;
public String email;
public UserDetails(){
}
public UserDetails(DUsers user) {
if (user != null) {
this.uid = user.getId();
setFirstName(user.getFirstName());
setSecondName(user.getSecondName());
setProfilePic(user.getProfilePic());
setEmail(user.getEmail());
}
}
public UserDetails(String uid, String firstName, String secondName) {
this();
this.uid = uid;
this.firstName = firstName;
this.secondName = secondName;
}
public String getUid() {
return uid;
}
public void setUid(String uid) {
this.uid = uid;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getSecondName() {
return secondName;
}
public void setSecondName(String secondName) {
this.secondName = secondName;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getProfilePic() {
return profilePic;
}
public void setProfilePic(String profilePic) {
this.profilePic = profilePic;
}
}
| {
"pile_set_name": "Github"
} |
# Swag [](https://travis-ci.org/go-openapi/swag) [](https://codecov.io/gh/go-openapi/swag) [](https://slackin.goswagger.io)
[](https://raw.githubusercontent.com/go-openapi/swag/master/LICENSE) [](http://godoc.org/github.com/go-openapi/swag)
Contains a bunch of helper functions:
* convert between value and pointers for builtins
* convert from string to builtin
* fast json concatenation
* search in path
* load from file or http
* name manglin | {
"pile_set_name": "Github"
} |
# Webdriver Manager for Python

[](https://pypi.org/project/webdriver-manager)
[](https://pypi.org/project/webdriver-manager/)
[](https://codecov.io/gh/SergeyPirogov/webdriver_manager)
[Patreon](https://www.patreon.com/automation_remarks)
The main idea is to simplify management of binary drivers for different browsers.
For now support:
- ChromeDriver
- GeckoDriver
- IEDriver
- OperaDriver
- EdgeChromiumDriver
Before:
You should download binary chromedriver, unzip it somewhere in you PC and set path to this driver like this:
```python
webdriver.Chrome('/home/user/drivers/chromedriver')
ChromeDriverManager(path=custom_path).install()
```
It’s boring!!! Moreover every time the new version of driver released, you should go and repeat all steps again and again.
With webdriver manager, you just need to do two simple steps:
Install manager:
```bash
pip install webdriver_manager
```
Use with Chrome:
```python
from selenium import webdriver
from webdriver_manager.chrome import ChromeDriverManager
driver = webdriver.Chrome(ChromeDriverManager().install())
```
Use with Chromium:
```python
from selenium import webdriver
from webdriver_manager.chrome import ChromeDriverManager
from webdriver_manager.utils import ChromeType
driver = webdriver.Chrome(ChromeDriverManager(chrome_type=ChromeType.CHROMIUM).install())
```
Use with FireFox:
```python
from selenium import webdriver
from webdriver_manager.firefox import GeckoDriverManager
driver = webdriver.Firefox(executable_path=GeckoDriverManager().install())
```
Use with IE
```python
from selenium import webdriver
from webdriver_manager.microsoft import IEDriverManager
driver = webdriver.Ie(IEDriverManager().install())
```
Use with Edge
```python
from selenium import webdriver
from webdriver_manager.microsoft import EdgeChromiumDriverManager
driver = webdriver.Edge(EdgeChromiumDriverManager().install())
```
Use with Opera
```python
from selenium import webdriver
from webdriver_manager.opera import OperaDriverManager
driver = webdriver.Opera(executable_path=OperaDriverManager().install())
```
If the Opera browser is installed in a location other than C:/Program Files or C:/Program Files (x86) on windows
and /usr/bin/opera for all unix variants and mac, then use the below code,
```python
from selenium import webdriver
from webdriver_manager.opera import OperaDriverManager
options = webdriver.ChromeOptions()
options.add_argument('allow-elevated-browser')
options.binary_location = "C:\\Users\\USERNAME\\FOLDERLOCATION\\Opera\\VERSION\\opera.exe"
driver = webdriver.Opera(executable_path=OperaDriverManager().install(), options=options)
```
## Configuration
If you face error related to github credentials, you need to place github token: (\*)
Example:
```bash
export GH_TOKEN = "asdasdasdasd"
```
(\*) access_token required to work with Github API more info <https://help.github.com/articles/creating-an-access-token-for-command-line-use/>.
There is also possibility to set same variables via ENV VARIABLES.
To silent `webdriver_manager` logs and remove them from console, initialize env variable `WDM_LOG_LEVEL` with `'0'` value before your selenium tests:
```python
import os
os.environ['WDM_LOG_LEVEL'] = '0'
```
or via constructor:
```python
ChromeDriverManager("2.26", log_level=0).install()
```
By default webdriver manager prints a blank space before its log output if logging is enabled. If you want to disable this, initialize `WDM_PRINT_FIRST_LINE` with `'False'` before your tests:
```python
import os
os.environ['WDM_PRINT_FIRST_LINE'] = 'False'
```
or via constructor:
```python
ChromeDriverManager("2.26", print_first_line=False).install()
```
By default all driver binaries are saved to user.home/.wdm folder. You can override this setting and save binaries to project.root/.wdm.
```
import os
os.environ['WDM_LOCAL'] = '1'
```
Driver cache by default is valid for 1 day. You are able to change this value using constructor parameter:
```python
ChromeDriverManager("2.26", cache_valid_range=1).install()
```
This will make your test automation more elegant and robust!
| {
"pile_set_name": "Github"
} |
<?xml version="1.0"?>
<data>
<value type="string" key="pages">100</value>
</data> | {
"pile_set_name": "Github"
} |
<%@ page pageEncoding="UTF-8"%>
<%@ taglib prefix="c" uri="http://java.sun.com/jsp/jstl/core" %>
<%
response.setHeader("Pragma", "No-cache");
response.setHeader("Cache-Control", "no-cache");
response.setDateHeader("Expires", 0);
%>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<meta http-equiv="pragma" content="no-cache">
<title>JdonMVC demo</title>
</head>
<body>
<div style="text-align: center;">
<h3>JdonFramework Application DEMO
<br />note: add above 5 line, you will find other pages (Auto Pagination).
<br/>Source Download: <a href="sourceforge.net/projects/jdon/files/JdonFramework-App/samples-6.6/">click here</a>
</h3>
<table width="550" cellpadding=6 cellspacing=0 border=1 align="center">
<tr bgcolor="#C3C3C3">
<td bgcolor="#D9D9D9">image</td>
<td bgcolor="#D9D9D9">userId</td>
<td bgcolor="#D9D9D9">name</td>
<td bgcolor="#D9D9D9">operation</td>
</tr>
<c:forEach var="user" items="${userList}">
<tr bgcolor="#ffffff">
<td><c:if test="${not empty user.uploadFile}">
pic :<img src="<%=request.getContextPath() %>/showUpload?pid=${user.userId}" border='0' width="20" height="20"/>
</c:if>
</td>
<td>${user.userId}</td><td>${user.username}</td>
<td><a href="user/${user.userId}">edit</a>
<form action="user/${user.userId}" method="post">
<input type="hidden" name="_method" value="DELETE" />
<input type="submit" value="delete" />
</form>
</tr>
</c:forEach>
</table>
<a href="newUser.jsp">add</a>
</div>
* if you cann't find any items that you just added, please modify database config in META-INF/context.xml
<p>
* reload this page, you will find the pictures if you just uploaded them.
</body>
</html>
| {
"pile_set_name": "Github"
} |
#include "AndroidBuilder.h"
namespace Upp {
AndroidBuilderCommands::AndroidBuilderCommands(
AndroidProject* projectPtr, AndroidSDK* sdkPtr, Jdk* jdkPtr)
: projectPtr(projectPtr)
, sdkPtr(sdkPtr)
, jdkPtr(jdkPtr)
{
ASSERT_(projectPtr, "Project is not initialized.");
ASSERT_(sdkPtr, "Android sdk is not initialized.");
ASSERT_(jdkPtr, "JDK is not initialized.");
}
String AndroidBuilderCommands::PreperCompileJavaSourcesCommand(
const Vector<String>& sources)
{
String cmd;
cmd << NormalizeExePath(jdkPtr->GetJavacPath());
if (jdkPtr->GetVersion().IsGreaterOrEqual(1, 8)) {
cmd << " -source 1.7 -target 1.7";
}
cmd << (projectPtr->IsDebug() ? " -g" : " -g:none");
cmd << " -d "<< projectPtr->GetClassesDir();
cmd << " -classpath ";
cmd << NormalizeExePath(sdkPtr->AndroidJarPath()) << Java::GetDelimiter();
cmd << projectPtr->GetBuildDir();
cmd << " -sourcepath ";
cmd << projectPtr->GetJavaDir() << " ";
for(int i = 0; i < sources.GetCount(); ++i) {
cmd << sources[i];
if(i < sources.GetCount() - 1)
cmd << " ";
}
return cmd;
}
}
| {
"pile_set_name": "Github"
} |
// Copyright 2015 The btcsuite developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package btcec
import (
"compress/zlib"
"encoding/base64"
"encoding/binary"
"io/ioutil"
"strings"
)
//go:generate go run -tags gensecp256k1 genprecomps.go
// loadS256BytePoints decompresses and deserializes the pre-computed byte points
// used to accelerate scalar base multiplication for the secp256k1 curve. This
// approach is used since it allows the compile to use significantly less ram
// and be performed much faster than it is with hard-coding the final in-memory
// data structure. At the same time, it is quite fast to generate the in-memory
// data structure at init time with this approach versus computing the table.
func loadS256BytePoints() error {
// There will be no byte points to load when generating them.
bp := secp256k1BytePoints
if len(bp) == 0 {
return nil
}
// Decompress the pre-computed table used to accelerate scalar base
// multiplication.
decoder := base64.NewDecoder(base64.StdEncoding, strings.NewReader(bp))
r, err := zlib.NewReader(decoder)
if err != nil {
return err
}
serialized, err := ioutil.ReadAll(r)
if err != nil {
return err
}
// Deserialize the precomputed byte points and set the curve to them.
offset := 0
var bytePoints [32][256][3]fieldVal
for byteNum := 0; byteNum < 32; byteNum++ {
// All points in this window.
for i := 0; i < 256; i++ {
px := &bytePoints[byteNum][i][0]
py := &bytePoints[byteNum][i][1]
pz := &bytePoints[byteNum][i][2]
for i := 0; i < 10; i++ {
px.n[i] = binary.LittleEndian.Uint32(serialized[offset:])
offset += 4
}
for i := 0; i < 10; i++ {
py.n[i] = binary.LittleEndian.Uint32(serialized[offset:])
offset += 4
}
for i := 0; i < 10; i++ {
pz.n[i] = binary.LittleEndian.Uint32(serialized[offset:])
offset += 4
}
}
}
secp256k1.bytePoints = &bytePoints
return nil
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include <math.h>
#include <string.h>
#include <algorithm>
#include <bitset>
#include <vector>
#include "modules/audio_processing/residual_echo_detector.h"
#include "rtc_base/checks.h"
#include "rtc_base/ref_counted_object.h"
namespace webrtc {
void FuzzOneInput(const uint8_t* data, size_t size) {
// Number of times to update the echo detector.
constexpr size_t kNrOfUpdates = 7;
// Each round of updates requires a call to both AnalyzeRender and
// AnalyzeCapture, so the amount of needed input bytes doubles. Also, two
// bytes are used to set the call order.
constexpr size_t kNrOfNeededInputBytes = 2 * kNrOfUpdates * sizeof(float) + 2;
// The maximum audio energy that an audio frame can have is equal to the
// number of samples in the frame multiplied by 2^30. We use a single sample
// to represent an audio frame in this test, so it should have a maximum value
// equal to the square root of that value.
const float maxFuzzedValue = sqrtf(20 * 48) * 32768;
if (size < kNrOfNeededInputBytes) {
return;
}
size_t read_idx = 0;
// Use the first two bytes to choose the call order.
uint16_t call_order_int;
memcpy(&call_order_int, &data[read_idx], 2);
read_idx += 2;
std::bitset<16> call_order(call_order_int);
rtc::scoped_refptr<ResidualEchoDetector> echo_detector =
new rtc::RefCountedObject<ResidualEchoDetector>();
std::vector<float> input(1);
// Call AnalyzeCaptureAudio once to prevent the flushing of the buffer.
echo_detector->AnalyzeCaptureAudio(input);
for (size_t i = 0; i < 2 * kNrOfUpdates; ++i) {
// Convert 4 input bytes to a float.
RTC_DCHECK_LE(read_idx + sizeof(float), size);
memcpy(input.data(), &data[read_idx], sizeof(float));
read_idx += sizeof(float);
if (!isfinite(input[0]) || fabs(input[0]) > maxFuzzedValue) {
// Ignore infinity, nan values and values that are unrealistically large.
continue;
}
if (call_order[i]) {
echo_detector->AnalyzeRenderAudio(input);
} else {
echo_detector->AnalyzeCaptureAudio(input);
}
}
}
} // namespace webrtc
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<!--
Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/
-->
<html>
<head>
<meta charset="utf-8">
<title>CSS Test: Testing min-height:auto & 'overflow' interaction</title>
<link rel="author" title="Daniel Holbert" href="mailto:[email protected]">
<link rel="help" href="http://www.w3.org/TR/css-flexbox-1/#min-size-auto">
<link rel="match" href="flexbox-min-height-auto-003-ref.html">
<!--
This testcase checks how "overflow-y" impacts the sizing behavior of flex
items with "min-height:auto" (the new initial value for "min-height").
In particular, the flex-item-specific "min-height:auto" behavior is
supposed to be disabled (e.g. we end up with min-height:0) when
"overflow-y" is non-"visible".
-->
<style>
.flexbox {
display: flex;
flex-direction: column;
height: 30px; /* Shrink flex items below min-height */
margin-right: 2px; /* (Just for spacing things out, visually) */
float: left;
}
.flexbox > * {
/* Flex items have purple border: */
border: 2px dotted purple;
}
.flexbox > * > * {
/* Flex items' contents are gray & fixed-size: */
background: gray;
width: 40px;
height: 80px;
}
.yvisible { overflow-y: visible; }
.yhidden { overflow-y: hidden; }
.yscroll { overflow-y: scroll; }
.yauto { overflow-y: auto; }
</style>
</head>
<body>
<!-- min-height:auto should prevent shrinking below intrinsic height when
the flex item has "overflow-y: visible", but not for any other
overflow-y values. -->
<div class="flexbox"><div class="yvisible"><div></div></div></div>
<div class="flexbox"><div class="yhidden"><div></div></div></div>
<div class="flexbox"><div class="yscroll"><div></div></div></div>
<div class="flexbox"><div class="yauto"><div></div></div></div>
</body>
</html>
| {
"pile_set_name": "Github"
} |
---
# Source: olm/templates/0000_50_olm_06-catalogsource.crd.yaml
apiVersion: apiextensions.k8s.io/v1beta1
kind: CustomResourceDefinition
metadata:
name: catalogsources.operators.coreos.com
annotations:
displayName: CatalogSource
description: A source configured to find packages and updates.
spec:
group: operators.coreos.com
version: v1alpha1
versions:
- name: v1alpha1
served: true
storage: true
scope: Namespaced
names:
plural: catalogsources
singular: catalogsource
kind: CatalogSource
listKind: CatalogSourceList
shortNames:
- catsrc
categories:
- olm
additionalPrinterColumns:
- name: Name
type: string
description: The pretty name of the catalog
JSONPath: .spec.displayName
- name: Type
type: string
description: The type of the catalog
JSONPath: .spec.sourceType
- name: Publisher
type: string
description: The publisher of the catalog
JSONPath: .spec.publisher
- name: Age
type: date
JSONPath: .metadata.creationTimestamp
subresources:
# status enables the status subresource.
status: {}
validation:
openAPIV3Schema:
properties:
spec:
type: object
description: Spec for a catalog source.
required:
- sourceType
properties:
sourceType:
type: string
description: The type of the source. `configmap` is the new name for `internal`
enum:
- internal # deprecated
- configmap
- grpc
configMap:
type: string
description: The name of a ConfigMap that holds the entries for an in-memory catalog.
address:
type: string
description: An optional address. When set, directs OLM to connect to use a pre-existing registry server at this address.
image:
type: string
description: An image that serves a grpc registry. Only valid for `grpc` sourceType. If both image and address are set, OLM does not use the address field.
displayName:
type: string
description: Pretty name for display
publisher:
type: string
description: The name of an entity that publishes this catalog
secrets:
type: array
description: A set of secrets that can be used to access the contents of the catalog. It is best to keep this list small, since each will need to be tried for every catalog entry.
items:
type: string
description: A name of a secret in the namespace where the CatalogSource is defined.
status:
type: object
description: The status of the CatalogSource
properties:
configMapReference:
type: object
description: If sourceType is `internal` or `configmap`, then this holds a reference to the configmap associated with this CatalogSource.
properties:
name:
type: string
description: name of the configmap
namespace:
type: string
description: namespace of the configmap
resourceVersion:
type: string
description: resourceVersion of the configmap
uid:
type: string
description: uid of the configmap
registryService:
type: object
properties:
protocol:
type: string
description: protocol of the registry service
enum:
- grpc
serviceName:
type: string
description: name of the registry service
serviceNamespace:
type: string
description: namespace of the registry service
port:
type: string
description: port of the registry service
lastSync:
type: string
description: the last time the catalog was updated. If this time is less than the last updated time on the object, the catalog will be re-cached.
| {
"pile_set_name": "Github"
} |
<img id="input-image" src="">
<img id="output-image">
<script>
function main(args, md) {
setHeader('Content-Type', 'text/html')
return new Promise((resolve, reject) => {
// load an image, then resize and apply some CSS filters
let img = document.getElementById("input-image");
img.onerror = function(err) {
reject(err)
}
img.onload = function() {
const result = getBase64Image(
document.getElementById('input-image'),
'The watermark');
// display result for debugging purposes, not needed in prod
document.getElementById('output-image').src = result
resolve(`<img src="${result}" >`); // can also return raw data
};
img.src = args.img || 'nelson-mandela-bridge.jpg';
});
}
function getBase64Image(img, watermark) {
var canvas = document.createElement("canvas");
canvas.width = img.width;
canvas.height = img.height;
var ctx = canvas.getContext("2d");
ctx.drawImage(img, 0, 0, img.width, img.height);
ctx.fillStyle = "white";
ctx.font = "bold 32px Arial";
ctx.fillText(watermark, 10, canvas.height - 42);
var dataURL = canvas.toDataURL("image/png");
return dataURL;
}
</script>
| {
"pile_set_name": "Github"
} |
// Code generated by smithy-go-codegen DO NOT EDIT.
package types
// A key-value pair that identifies or specifies metadata about an AWS CloudHSM
// resource.
type Tag struct {
// The value of the tag.
Value *string
// The key of the tag.
Key *string
}
| {
"pile_set_name": "Github"
} |
#ifndef HEADER_COMP_H
#define HEADER_COMP_H
#include <openssl/crypto.h>
#ifdef __cplusplus
extern "C" {
#endif
typedef struct comp_ctx_st COMP_CTX;
typedef struct comp_method_st
{
int type; /* NID for compression library */
const char *name; /* A text string to identify the library */
int (*init)(COMP_CTX *ctx);
void (*finish)(COMP_CTX *ctx);
int (*compress)(COMP_CTX *ctx,
unsigned char *out, unsigned int olen,
unsigned char *in, unsigned int ilen);
int (*expand)(COMP_CTX *ctx,
unsigned char *out, unsigned int olen,
unsigned char *in, unsigned int ilen);
/* The following two do NOTHING, but are kept for backward compatibility */
long (*ctrl)(void);
long (*callback_ctrl)(void);
} COMP_METHOD;
struct comp_ctx_st
{
COMP_METHOD *meth;
unsigned long compress_in;
unsigned long compress_out;
unsigned long expand_in;
unsigned long expand_out;
CRYPTO_EX_DATA ex_data;
};
COMP_CTX *COMP_CTX_new(COMP_METHOD *meth);
void COMP_CTX_free(COMP_CTX *ctx);
int COMP_compress_block(COMP_CTX *ctx, unsigned char *out, int olen,
unsigned char *in, int ilen);
int COMP_expand_block(COMP_CTX *ctx, unsigned char *out, int olen,
unsigned char *in, int ilen);
COMP_METHOD *COMP_rle(void );
COMP_METHOD *COMP_zlib(void );
void COMP_zlib_cleanup(void);
#ifdef HEADER_BIO_H
#ifdef ZLIB
BIO_METHOD *BIO_f_zlib(void);
#endif
#endif
/* BEGIN ERROR CODES */
/* The following lines are auto generated by the script mkerr.pl. Any changes
* made after this point may be overwritten when the script is next run.
*/
void ERR_load_COMP_strings(void);
/* Error codes for the COMP functions. */
/* Function codes. */
#define COMP_F_BIO_ZLIB_FLUSH 99
#define COMP_F_BIO_ZLIB_NEW 100
#define COMP_F_BIO_ZLIB_READ 101
#define COMP_F_BIO_ZLIB_WRITE 102
/* Reason codes. */
#define COMP_R_ZLIB_DEFLATE_ERROR 99
#define COMP_R_ZLIB_INFLATE_ERROR 100
#define COMP_R_ZLIB_NOT_SUPPORTED 101
#ifdef __cplusplus
}
#endif
#endif
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<!--
~ Copyright (c) 2018 Zhang Hai <[email protected]>
~ All Rights Reserved.
-->
<vector
xmlns:android="http://schemas.android.com/apk/res/android"
android:width="40dp"
android:height="40dp"
android:viewportWidth="24"
android:viewportHeight="24">
<path
android:fillColor="#FFFFFFFF"
android:pathData="M18,2H6c-1.1,0 -2,0.9 -2,2v16c0,1.1 0.9,2 2,2h12c1.1,0 2,-0.9 2,-2V4c0,-1.1 -0.9,-2 -2,-2zM6,4h5v8l-2.5,-1.5L6,12V4z" />
</vector>
| {
"pile_set_name": "Github"
} |
.\" **************************************************************************
.\" * _ _ ____ _
.\" * Project ___| | | | _ \| |
.\" * / __| | | | |_) | |
.\" * | (__| |_| | _ <| |___
.\" * \___|\___/|_| \_\_____|
.\" *
.\" * Copyright (C) 1998 - 2014, Daniel Stenberg, <[email protected]>, et al.
.\" *
.\" * This software is licensed as described in the file COPYING, which
.\" * you should have received as part of this distribution. The terms
.\" * are also available at https://curl.haxx.se/docs/copyright.html.
.\" *
.\" * You may opt to use, copy, modify, merge, publish, distribute and/or sell
.\" * copies of the Software, and permit persons to whom the Software is
.\" * furnished to do so, under the terms of the COPYING file.
.\" *
.\" * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
.\" * KIND, either express or implied.
.\" *
.\" **************************************************************************
.\"
.TH CURLOPT_CONV_FROM_NETWORK_FUNCTION 3 "February 03, 2016" "libcurl 7.54.0" "curl_easy_setopt options"
.SH NAME
CURLOPT_CONV_FROM_NETWORK_FUNCTION \- convert data from network to host encoding
.SH SYNOPSIS
.nf
#include <curl/curl.h>
CURLcode conv_callback(char *ptr, size_t length);
CURLcode curl_easy_setopt(CURL *handle, CURLOPT_CONV_FROM_NETWORK_FUNCTION,
conv_callback);
.SH DESCRIPTION
Pass a pointer to your callback function, which should match the prototype
shown above.
Applies to non-ASCII platforms. \fIcurl_version_info(3)\fP will return the
CURL_VERSION_CONV feature bit set if this option is provided.
The data to be converted is in a buffer pointed to by the \fIptr\fP parameter.
The amount of data to convert is indicated by the \fIlength\fP parameter. The
converted data overlays the input data in the buffer pointed to by the ptr
parameter. \fICURLE_OK\fP must be returned upon successful conversion. A
CURLcode return value defined by curl.h, such as \fICURLE_CONV_FAILED\fP,
should be returned if an error was encountered.
\fBCURLOPT_CONV_FROM_NETWORK_FUNCTION\fP converts to host encoding from the
network encoding. It is used when commands or ASCII data are received over
the network.
If you set a callback pointer to NULL, or don't set it at all, the built-in
libcurl iconv functions will be used. If HAVE_ICONV was not defined when
libcurl was built, and no callback has been established, conversion will
return the CURLE_CONV_REQD error code.
If HAVE_ICONV is defined, CURL_ICONV_CODESET_OF_HOST must also be defined.
For example:
\&#define CURL_ICONV_CODESET_OF_HOST "IBM-1047"
The iconv code in libcurl will default the network and UTF8 codeset names as
follows:
\&#define CURL_ICONV_CODESET_OF_NETWORK "ISO8859-1"
\&#define CURL_ICONV_CODESET_FOR_UTF8 "UTF-8"
You will need to override these definitions if they are different on your
system.
.SH DEFAULT
NULL
.SH PROTOCOLS
FTP, SMTP, IMAP, POP3
.SH EXAMPLE
TODO
.SH AVAILABILITY
Available only if \fBCURL_DOES_CONVERSIONS\fP was defined when libcurl was built.
.SH RETURN VALUE
Returns CURLE_OK if the option is supported, and CURLE_UNKNOWN_OPTION if not.
.SH "SEE ALSO"
.BR CURLOPT_CONV_TO_NETWORK_FUNCTION "(3), " CURLOPT_CONV_FROM_UTF8_FUNCTION "(3), "
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<install type="component" version="1.5.0">
<name>Mass Mail</name>
<author>Joomla! Project</author>
<creationDate>April 2006</creationDate>
<copyright>Copyright (C) 2005 - 2008 Open Source Matters. All rights reserved.</copyright>
<license>http://www.gnu.org/licenses/gpl-2.0.html GNU/GPL</license>
<authorEmail>[email protected]</authorEmail>
<authorUrl>www.joomla.org</authorUrl>
<version>1.5.0</version>
<description>DESCMASSMAIL</description>
</install>
| {
"pile_set_name": "Github"
} |
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
/*
* DatabaseLoader.java
* Copyright (C) 2004 University of Waikato, Hamilton, New Zealand
*
*/
package weka.core.converters;
import java.io.File;
import java.io.IOException;
import java.sql.DatabaseMetaData;
import java.sql.Date;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Time;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.StringTokenizer;
import java.util.Vector;
import weka.core.Attribute;
import weka.core.DenseInstance;
import weka.core.Environment;
import weka.core.EnvironmentHandler;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.Option;
import weka.core.OptionHandler;
import weka.core.RevisionUtils;
import weka.core.SparseInstance;
import weka.core.Utils;
import weka.experiment.InstanceQuery;
/**
<!-- globalinfo-start -->
* Reads Instances from a Database. Can read a database in batch or incremental mode.<br/>
* In inremental mode MySQL and HSQLDB are supported.<br/>
* For all other DBMS set a pseudoincremental mode is used:<br/>
* In pseudo incremental mode the instances are read into main memory all at once and then incrementally provided to the user.<br/>
* For incremental loading the rows in the database table have to be ordered uniquely.<br/>
* The reason for this is that every time only a single row is fetched by extending the user query by a LIMIT clause.<br/>
* If this extension is impossible instances will be loaded pseudoincrementally. To ensure that every row is fetched exaclty once, they have to ordered.<br/>
* Therefore a (primary) key is necessary.This approach is chosen, instead of using JDBC driver facilities, because the latter one differ betweeen different drivers.<br/>
* If you use the DatabaseSaver and save instances by generating automatically a primary key (its name is defined in DtabaseUtils), this primary key will be used for ordering but will not be part of the output. The user defined SQL query to extract the instances should not contain LIMIT and ORDER BY clauses (see -Q option).<br/>
* In addition, for incremental loading, you can define in the DatabaseUtils file how many distinct values a nominal attribute is allowed to have. If this number is exceeded, the column will become a string attribute.<br/>
* In batch mode no string attributes will be created.
* <p/>
<!-- globalinfo-end -->
*
<!-- options-start -->
* Valid options are: <p/>
*
* <pre> -url <JDBC URL>
* The JDBC URL to connect to.
* (default: from DatabaseUtils.props file)</pre>
*
* <pre> -user <name>
* The user to connect with to the database.
* (default: none)</pre>
*
* <pre> -password <password>
* The password to connect with to the database.
* (default: none)</pre>
*
* <pre> -Q <query>
* SQL query of the form
* SELECT <list of columns>|* FROM <table> [WHERE]
* to execute.
* (default: Select * From Results0)</pre>
*
* <pre> -P <list of column names>
* List of column names uniquely defining a DB row
* (separated by ', ').
* Used for incremental loading.
* If not specified, the key will be determined automatically,
* if possible with the used JDBC driver.
* The auto ID column created by the DatabaseSaver won't be loaded.</pre>
*
* <pre> -I
* Sets incremental loading</pre>
*
<!-- options-end -->
*
* @author Stefan Mutter ([email protected])
* @version $Revision: 7489 $
* @see Loader
*/
public class DatabaseLoader
extends AbstractLoader
implements BatchConverter, IncrementalConverter, DatabaseConverter,
OptionHandler, EnvironmentHandler {
/** for serialization */
static final long serialVersionUID = -7936159015338318659L;
/** The header information that is retrieved in the beginning of incremental loading */
protected Instances m_structure;
/** Used in pseudoincremental mode. The whole dataset from which instances will be read incrementally.*/
protected Instances m_datasetPseudoInc;
/** Set of instances that equals m_structure except that the auto_generated_id column is not included as an attribute*/
protected Instances m_oldStructure;
/** The database connection */
protected DatabaseConnection m_DataBaseConnection;
/** The user defined query to load instances. (form: SELECT *|<column-list> FROM <table> [WHERE <condition>]) */
protected String m_query = "Select * from Results0";
/** Flag indicating that pseudo incremental mode is used (all instances load at once into main memeory and then incrementally from main memory instead of the database) */
protected boolean m_pseudoIncremental;
/** If true it checks whether or not the table exists in the database before loading depending on jdbc metadata information.
* Set flag to false if no check is required or if jdbc metadata is not complete. */
protected boolean m_checkForTable;
/** Limit when an attribute is treated as string attribute and not as a nominal one because it has to many values. */
protected int m_nominalToStringLimit;
/** The number of rows obtained by m_query, eg the size of the ResultSet to load*/
protected int m_rowCount;
/** Indicates how many rows has already been loaded incrementally */
protected int m_counter;
/** Decides which SQL statement to limit the number of rows should be used. DBMS dependent. Algorithm just tries several possibilities. */
protected int m_choice;
/** Flag indicating that incremental process wants to read first instance*/
protected boolean m_firstTime;
/** Flag indicating that incremental mode is chosen (for command line use only)*/
protected boolean m_inc;
/** Contains the name of the columns that uniquely define a row in the ResultSet. Ensures a unique ordering of instances for indremental loading.*/
protected ArrayList<String> m_orderBy;
/** Stores the index of a nominal value */
protected Hashtable<String,Double> [] m_nominalIndexes;
/** Stores the nominal value*/
protected ArrayList<String> [] m_nominalStrings;
/** Name of the primary key column that will allow unique ordering necessary for incremental loading. The name is specified in the DatabaseUtils file.*/
protected String m_idColumn;
/** the JDBC URL to use */
protected String m_URL = null;
/** the database user to use */
protected String m_User = "";
/** the database password to use */
protected String m_Password = "";
/** the keys for unique ordering */
protected String m_Keys = "";
/** the custom props file to use instead of default one. */
protected File m_CustomPropsFile = null;
/** Determines whether sparse data is created */
protected boolean m_CreateSparseData = false;
/** Environment variables */
protected transient Environment m_env;
/**
* Constructor
*
* @throws Exception if initialization fails
*/
public DatabaseLoader() throws Exception{
resetOptions();
}
/**
* Returns a string describing this Loader
*
* @return a description of the Loader suitable for
* displaying in the explorer/experimenter gui
*/
public String globalInfo() {
return
"Reads Instances from a Database. "
+ "Can read a database in batch or incremental mode.\n"
+ "In inremental mode MySQL and HSQLDB are supported.\n"
+ "For all other DBMS set a pseudoincremental mode is used:\n"
+ "In pseudo incremental mode the instances are read into main memory all at once and then incrementally provided to the user.\n"
+ "For incremental loading the rows in the database table have to be ordered uniquely.\n"
+ "The reason for this is that every time only a single row is fetched by extending the user query by a LIMIT clause.\n"
+ "If this extension is impossible instances will be loaded pseudoincrementally. To ensure that every row is fetched exaclty once, they have to ordered.\n"
+ "Therefore a (primary) key is necessary.This approach is chosen, instead of using JDBC driver facilities, because the latter one differ betweeen different drivers.\n"
+ "If you use the DatabaseSaver and save instances by generating automatically a primary key (its name is defined in DtabaseUtils), this primary key will "
+ "be used for ordering but will not be part of the output. The user defined SQL query to extract the instances should not contain LIMIT and ORDER BY clauses (see -Q option).\n"
+ "In addition, for incremental loading, you can define in the DatabaseUtils file how many distinct values a nominal attribute is allowed to have. If this number is exceeded, the column will become a string attribute.\n"
+ "In batch mode no string attributes will be created.";
}
/**
* Set the environment variables to use.
*
* @param env the environment variables to use
*/
public void setEnvironment(Environment env) {
m_env = env;
try {
// force a new connection and setting of all parameters
// with environment variables resolved
m_DataBaseConnection = newDatabaseConnection();
setUrl(m_URL);
setUser(m_User);
setPassword(m_Password);
} catch (Exception ex) {
// we won't complain about it here...
}
}
private void checkEnv() {
if (m_env == null) {
m_env = Environment.getSystemWide();
}
}
/**
* Initializes a new DatabaseConnection object, either default one or from
* custom props file.
*
* @return the DatabaseConnection object
* @see #m_CustomPropsFile
*/
protected DatabaseConnection newDatabaseConnection() throws Exception {
DatabaseConnection result;
checkEnv();
if (m_CustomPropsFile != null) {
File pFile = new File(m_CustomPropsFile.getPath());
String pPath = m_CustomPropsFile.getPath();
try {
pPath = m_env.substitute(pPath);
pFile = new File(pPath);
} catch (Exception ex) { }
result = new DatabaseConnection(pFile);
} else {
result = new DatabaseConnection();
}
m_pseudoIncremental = false;
m_checkForTable = true;
String props = result.getProperties().getProperty("nominalToStringLimit");
m_nominalToStringLimit = Integer.parseInt(props);
m_idColumn = result.getProperties().getProperty("idColumn");
if (result.getProperties().getProperty("checkForTable", "").equalsIgnoreCase("FALSE"))
m_checkForTable = false;
return result;
}
/**
* Resets the Loader to the settings in either the default DatabaseUtils.props
* or any property file that the user has specified via setCustomPropsFile().
*/
public void resetOptions() {
resetStructure();
try {
if(m_DataBaseConnection != null && m_DataBaseConnection.isConnected())
m_DataBaseConnection.disconnectFromDatabase();
m_DataBaseConnection = newDatabaseConnection();
} catch (Exception ex) {
printException(ex);
}
m_URL = m_DataBaseConnection.getDatabaseURL();
if (m_URL == null) {
m_URL = "none set!";
}
m_User = m_DataBaseConnection.getUsername();
if (m_User == null) {
m_User = "";
}
m_Password = m_DataBaseConnection.getPassword();
if (m_Password == null) {
m_Password = "";
}
m_orderBy = new ArrayList<String>();
}
/** Resets the Loader ready to read a new data set using set options
* @throws Exception if an error occurs while disconnecting from the database
*/
public void reset() {
resetStructure();
try {
if(m_DataBaseConnection != null && m_DataBaseConnection.isConnected())
m_DataBaseConnection.disconnectFromDatabase();
m_DataBaseConnection = newDatabaseConnection();
} catch (Exception ex) {
printException(ex);
}
// don't lose previously set connection data!
if (m_URL != null) {
setUrl(m_URL);
}
if (m_User != null) {
setUser(m_User);
}
if (m_Password != null) {
setPassword(m_Password);
}
m_orderBy = new ArrayList<String>();
// don't lose previously set key columns!
if (m_Keys != null) {
String k = m_Keys;
try {
k = m_env.substitute(k);
} catch (Exception ex) { }
setKeys(k);
}
m_inc = false;
}
/**
* Resets the structure of instances
*/
public void resetStructure(){
m_structure = null;
m_datasetPseudoInc = null;
m_oldStructure = null;
m_rowCount = 0;
m_counter = 0;
m_choice = 0;
m_firstTime = true;
setRetrieval(NONE);
}
/**
* Sets the query to execute against the database
*
* @param q the query to execute
*/
public void setQuery(String q) {
q = q.replaceAll("[fF][rR][oO][mM]","FROM");
q = q.replaceFirst("[sS][eE][lL][eE][cC][tT]","SELECT");
m_query = q;
}
/**
* Gets the query to execute against the database
*
* @return the query
*/
public String getQuery() {
return m_query;
}
/**
* the tip text for this property
*
* @return the tip text
*/
public String queryTipText(){
return "The query that should load the instances."
+"\n The query has to be of the form SELECT <column-list>|* FROM <table> [WHERE <conditions>]";
}
/**
* Sets the key columns of a database table
*
* @param keys a String containing the key columns in a comma separated list.
*/
public void setKeys(String keys){
m_Keys = keys;
m_orderBy.clear();
StringTokenizer st = new StringTokenizer(keys, ",");
while (st.hasMoreTokens()) {
String column = st.nextToken();
column = column.replaceAll(" ","");
m_orderBy.add(column);
}
}
/**
* Gets the key columns' name
*
* @return name of the key columns'
*/
public String getKeys(){
StringBuffer key = new StringBuffer();
for(int i = 0;i < m_orderBy.size(); i++){
key.append((String)m_orderBy.get(i));
if(i != m_orderBy.size()-1)
key.append(", ");
}
return key.toString();
}
/**
* the tip text for this property
*
* @return the tip text
*/
public String keysTipText(){
return "For incremental loading a unique identiefer has to be specified."
+"\nIf the query includes all columns of a table (SELECT *...) a primary key"
+"\ncan be detected automatically depending on the JDBC driver. If that is not possible"
+"\nspecify the key columns here in a comma separated list.";
}
/**
* Sets the custom properties file to use.
*
* @param value the custom props file to load database parameters from,
* use null or directory to disable custom properties.
*/
public void setCustomPropsFile(File value) {
m_CustomPropsFile = value;
}
/**
* Returns the custom properties file in use, if any.
*
* @return the custom props file, null if none used
*/
public File getCustomPropsFile() {
return m_CustomPropsFile;
}
/**
* The tip text for this property.
*
* @return the tip text
*/
public String customPropsFileTipText(){
return "The custom properties that the user can use to override the default ones.";
}
/**
* Sets the database URL
*
* @param url string with the database URL
*/
public void setUrl(String url) {
checkEnv();
m_URL = url;
String dbU = m_URL;
try {
dbU = m_env.substitute(dbU);
} catch (Exception ex) { }
m_DataBaseConnection.setDatabaseURL(dbU);
}
/**
* Gets the URL
*
* @return the URL
*/
public String getUrl(){
//return m_DataBaseConnection.getDatabaseURL();
return m_URL;
}
/**
* the tip text for this property
*
* @return the tip text
*/
public String urlTipText(){
return "The URL of the database";
}
/**
* Sets the database user
*
* @param user the database user name
*/
public void setUser(String user){
checkEnv();
m_User = user;
String userCopy = user;
try {
userCopy = m_env.substitute(userCopy);
} catch (Exception ex) {
}
m_DataBaseConnection.setUsername(userCopy);
}
/**
* Gets the user name
*
* @return name of database user
*/
public String getUser(){
//return m_DataBaseConnection.getUsername();
return m_User;
}
/**
* the tip text for this property
*
* @return the tip text
*/
public String userTipText(){
return "The user name for the database";
}
/**
* Sets user password for the database
*
* @param password the password
*/
public void setPassword(String password) {
checkEnv();
m_Password = password;
String passCopy = password;
try {
passCopy = m_env.substitute(passCopy);
} catch (Exception ex) { }
m_DataBaseConnection.setPassword(password);
}
/**
* Returns the database password
*
* @return the database password
*/
public String getPassword() {
// return m_DataBaseConnection.getPassword();
return m_Password;
}
/**
* the tip text for this property
*
* @return the tip text
*/
public String passwordTipText(){
return "The database password";
}
/**
* Returns the tip text for this property
* @return tip text for this property suitable for
* displaying in the explorer/experimenter gui
*/
public String sparseDataTipText() {
return "Encode data as sparse instances.";
}
/**
* Sets whether data should be encoded as sparse instances
* @param s true if data should be encoded as a set of sparse instances
*/
public void setSparseData(boolean s) {
m_CreateSparseData = s;
}
/**
* Gets whether data is to be returned as a set of sparse instances
* @return true if data is to be encoded as sparse instances
*/
public boolean getSparseData() {
return m_CreateSparseData;
}
/**
* Sets the database url, user and pw
*
* @param url the database url
* @param userName the user name
* @param password the password
*/
public void setSource(String url, String userName, String password){
try{
m_DataBaseConnection = newDatabaseConnection();
setUrl(url);
setUser(userName);
setPassword(password);
} catch(Exception ex) {
printException(ex);
}
}
/**
* Sets the database url
*
* @param url the database url
*/
public void setSource(String url){
try{
m_DataBaseConnection = newDatabaseConnection();
setUrl(url);
m_User = m_DataBaseConnection.getUsername();
m_Password = m_DataBaseConnection.getPassword();
} catch(Exception ex) {
printException(ex);
}
}
/**
* Sets the database url using the DatabaseUtils file
*
* @throws Exception if something goes wrong
*/
public void setSource() throws Exception{
m_DataBaseConnection = newDatabaseConnection();
m_URL = m_DataBaseConnection.getDatabaseURL();
m_User = m_DataBaseConnection.getUsername();
m_Password = m_DataBaseConnection.getPassword();
}
/**
* Opens a connection to the database
*/
public void connectToDatabase() {
try{
if(!m_DataBaseConnection.isConnected()){
m_DataBaseConnection.connectToDatabase();
}
} catch(Exception ex) {
printException(ex);
}
}
/**
* Returns the table name or all after the FROM clause of the user specified query
* to retrieve instances.
*
* @param onlyTableName true if only the table name should be returned, false otherwise
* @return the end of the query
*/
private String endOfQuery(boolean onlyTableName){
String table;
int beginIndex, endIndex;
beginIndex = m_query.indexOf("FROM ")+5;
while(m_query.charAt(beginIndex) == ' ')
beginIndex++;
endIndex = m_query.indexOf(" ",beginIndex);
if(endIndex != -1 && onlyTableName)
table = m_query.substring(beginIndex,endIndex);
else
table = m_query.substring(beginIndex);
if(m_DataBaseConnection.getUpperCase())
table = table.toUpperCase();
return table;
}
/**
* Checks for a unique key using the JDBC driver's method:
* getPrimaryKey(), getBestRowIdentifier().
* Depending on their implementation a key can be detected.
* The key is needed to order the instances uniquely for an inremental loading.
* If an existing key cannot be detected, use -P option.
*
* @throws Exception if database error occurs
* @return true, if a key could have been detected, false otherwise
*/
private boolean checkForKey() throws Exception {
String query = m_query;
query = query.replaceAll(" +"," ");
//query has to use all columns
if(!query.startsWith("SELECT *"))
return false;
m_orderBy.clear();
if(!m_DataBaseConnection.isConnected())
m_DataBaseConnection.connectToDatabase();
DatabaseMetaData dmd = m_DataBaseConnection.getMetaData();
String table = endOfQuery(true);
//System.out.println(table);
//check for primary keys
ResultSet rs = dmd.getPrimaryKeys(null,null,table);
while(rs.next()){
m_orderBy.add(rs.getString(4));
}
rs.close();
if(m_orderBy.size() != 0)
return true;
//check for unique keys
rs = dmd.getBestRowIdentifier(null,null,table,DatabaseMetaData.bestRowSession,false);
ResultSetMetaData rmd = rs.getMetaData();
int help = 0;
while(rs.next()){
m_orderBy.add(rs.getString(2));
help++;
}
rs.close();
if(help == rmd.getColumnCount()){
m_orderBy.clear();
}
if(m_orderBy.size() != 0)
return true;
return false;
}
/**
* Converts string attribute into nominal ones for an instance read during
* incremental loading
*
* @param rs The result set
* @param i the index of the nominal attribute
* @throws Exception exception if it cannot be converted
*/
private void stringToNominal(ResultSet rs, int i) throws Exception{
while(rs.next()){
String str = rs.getString(1);
if(!rs.wasNull()){
Double index = (Double)m_nominalIndexes[i - 1].get(str);
if (index == null) {
index = new Double(m_nominalStrings[i - 1].size());
m_nominalIndexes[i - 1].put(str, index);
m_nominalStrings[i - 1].add(str);
}
}
}
}
/**
* Used in incremental loading. Modifies the SQL statement,
* so that only one instance per time is tretieved and the instances are ordered
* uniquely.
*
* @param query the query to modify for incremental loading
* @param offset sets which tuple out of the uniquely ordered ones should be returned
* @param choice the kind of query that is suitable for the used DBMS
* @return the modified query that returns only one result tuple.
*/
private String limitQuery(String query, int offset, int choice){
String limitedQuery;
StringBuffer order = new StringBuffer();
String orderByString = "";
if(m_orderBy.size() != 0){
order.append(" ORDER BY ");
for(int i = 0; i < m_orderBy.size()-1; i++){
if(m_DataBaseConnection.getUpperCase())
order.append(((String)m_orderBy.get(i)).toUpperCase());
else
order.append((String)m_orderBy.get(i));
order.append(", ");
}
if(m_DataBaseConnection.getUpperCase())
order.append(((String)m_orderBy.get(m_orderBy.size()-1)).toUpperCase());
else
order.append((String)m_orderBy.get(m_orderBy.size()-1));
orderByString = order.toString();
}
if(choice == 0){
limitedQuery = query.replaceFirst("SELECT","SELECT LIMIT "+offset+" 1");
limitedQuery = limitedQuery.concat(orderByString);
return limitedQuery;
}
if(choice == 1){
limitedQuery = query.concat(orderByString+" LIMIT 1 OFFSET "+offset);
return limitedQuery;
}
limitedQuery = query.concat(orderByString+" LIMIT "+offset+", 1");
//System.out.println(limitedQuery);
return limitedQuery;
}
/**
* Counts the number of rows that are loaded from the database
*
* @throws Exception if the number of rows cannot be calculated
* @return the entire number of rows
*/
private int getRowCount() throws Exception{
String query = "SELECT COUNT(*) FROM "+endOfQuery(false);
if(m_DataBaseConnection.execute(query) == false) {
throw new Exception("Cannot count results tuples.");
}
ResultSet rs = m_DataBaseConnection.getResultSet();
rs.next();
int i = rs.getInt(1);
rs.close();
return i;
}
/**
* Determines and returns (if possible) the structure (internally the
* header) of the data set as an empty set of instances.
*
* @return the structure of the data set as an empty set of Instances
* @throws IOException if an error occurs
*/
public Instances getStructure() throws IOException {
if (m_DataBaseConnection == null) {
throw new IOException("No source database has been specified");
}
connectToDatabase();
pseudo:
try{
if(m_pseudoIncremental && m_structure == null){
if (getRetrieval() == BATCH) {
throw new IOException("Cannot mix getting instances in both incremental and batch modes");
}
setRetrieval(NONE);
m_datasetPseudoInc = getDataSet();
m_structure = new Instances(m_datasetPseudoInc,0);
setRetrieval(NONE);
return m_structure;
}
if (m_structure == null) {
if(m_checkForTable) {
if(!m_DataBaseConnection.tableExists(endOfQuery(true)))
throw new IOException(
"Table does not exist according to metadata from JDBC driver. "
+ "If you are convinced the table exists, set 'checkForTable' "
+ "to 'False' in your DatabaseUtils.props file and try again.");
}
//finds out which SQL statement to use for the DBMS to limit the number of resulting rows to one
int choice = 0;
boolean rightChoice = false;
while (!rightChoice){
try{
String limitQ = limitQuery(m_query,0,choice);
if (m_DataBaseConnection.execute(limitQ) == false) {
throw new IOException("Query didn't produce results");
}
m_choice = choice;
rightChoice = true;
}
catch (SQLException ex) {
choice++;
if(choice == 3){
System.out.println("Incremental loading not supported for that DBMS. Pseudoincremental mode is used if you use incremental loading.\nAll rows are loaded into memory once and retrieved incrementally from memory instead of from the database.");
m_pseudoIncremental = true;
break pseudo;
}
}
}
String end = endOfQuery(false);
ResultSet rs = m_DataBaseConnection.getResultSet();
ResultSetMetaData md = rs.getMetaData();
// rs.close();
int numAttributes = md.getColumnCount();
int [] attributeTypes = new int [numAttributes];
m_nominalIndexes = Utils.cast(new Hashtable [numAttributes]);
m_nominalStrings = Utils.cast(new ArrayList [numAttributes]);
for (int i = 1; i <= numAttributes; i++) {
switch (m_DataBaseConnection.translateDBColumnType(md.getColumnTypeName(i))) {
case DatabaseConnection.STRING :
String columnName = md.getColumnLabel(i);
if(m_DataBaseConnection.getUpperCase())
columnName = columnName.toUpperCase();
m_nominalIndexes[i - 1] = new Hashtable<String,Double>();
m_nominalStrings[i - 1] = new ArrayList<String>();
// fast incomplete structure for batch mode - actual
// structure is determined by InstanceQuery in getDataSet()
if (getRetrieval() != INCREMENTAL) {
attributeTypes[i - 1] = Attribute.STRING;
break;
}
//System.err.println("String --> nominal");
ResultSet rs1;
String query = "SELECT COUNT(DISTINCT( "+columnName+" )) FROM " + end;
if (m_DataBaseConnection.execute(query) == true){
rs1 = m_DataBaseConnection.getResultSet();
rs1.next();
int count = rs1.getInt(1);
rs1.close();
// if(count > m_nominalToStringLimit || m_DataBaseConnection.execute("SELECT DISTINCT ( "+columnName+" ) FROM "+ end) == false){
if(count > m_nominalToStringLimit ||
m_DataBaseConnection.execute("SELECT DISTINCT ( "
+ columnName
+ " ) FROM "
+ end
+ " ORDER BY "
+ columnName) == false){
attributeTypes[i - 1] = Attribute.STRING;
break;
}
rs1 = m_DataBaseConnection.getResultSet();
}
else{
//System.err.println("Count for nominal values cannot be calculated. Attribute "+columnName+" treated as String.");
attributeTypes[i - 1] = Attribute.STRING;
break;
}
attributeTypes[i - 1] = Attribute.NOMINAL;
stringToNominal(rs1,i);
rs1.close();
break;
case DatabaseConnection.TEXT:
//System.err.println("boolean --> string");
columnName = md.getColumnLabel(i);
if(m_DataBaseConnection.getUpperCase())
columnName = columnName.toUpperCase();
m_nominalIndexes[i - 1] = new Hashtable<String,Double>();
m_nominalStrings[i - 1] = new ArrayList<String>();
// fast incomplete structure for batch mode - actual
// structure is determined by InstanceQuery in getDataSet()
if (getRetrieval() != INCREMENTAL) {
attributeTypes[i - 1] = Attribute.STRING;
break;
}
query = "SELECT COUNT(DISTINCT( "+columnName+" )) FROM " + end;
if (m_DataBaseConnection.execute(query) == true){
rs1 = m_DataBaseConnection.getResultSet();
stringToNominal(rs1,i);
rs1.close();
}
attributeTypes[i - 1] = Attribute.STRING;
break;
case DatabaseConnection.BOOL:
//System.err.println("boolean --> nominal");
attributeTypes[i - 1] = Attribute.NOMINAL;
m_nominalIndexes[i - 1] = new Hashtable<String,Double>();
m_nominalIndexes[i - 1].put("false", new Double(0));
m_nominalIndexes[i - 1].put("true", new Double(1));
m_nominalStrings[i - 1] = new ArrayList<String>();
m_nominalStrings[i - 1].add("false");
m_nominalStrings[i - 1].add("true");
break;
case DatabaseConnection.DOUBLE:
//System.err.println("BigDecimal --> numeric");
attributeTypes[i - 1] = Attribute.NUMERIC;
break;
case DatabaseConnection.BYTE:
//System.err.println("byte --> numeric");
attributeTypes[i - 1] = Attribute.NUMERIC;
break;
case DatabaseConnection.SHORT:
//System.err.println("short --> numeric");
attributeTypes[i - 1] = Attribute.NUMERIC;
break;
case DatabaseConnection.INTEGER:
//System.err.println("int --> numeric");
attributeTypes[i - 1] = Attribute.NUMERIC;
break;
case DatabaseConnection.LONG:
//System.err.println("long --> numeric");
attributeTypes[i - 1] = Attribute.NUMERIC;
break;
case DatabaseConnection.FLOAT:
//System.err.println("float --> numeric");
attributeTypes[i - 1] = Attribute.NUMERIC;
break;
case DatabaseConnection.DATE:
attributeTypes[i - 1] = Attribute.DATE;
break;
case DatabaseConnection.TIME:
attributeTypes[i - 1] = Attribute.DATE;
break;
default:
//System.err.println("Unknown column type");
attributeTypes[i - 1] = Attribute.STRING;
}
}
ArrayList<Attribute> attribInfo = new ArrayList<Attribute>();
for (int i = 0; i < numAttributes; i++) {
/* Fix for databases that uppercase column names */
//String attribName = attributeCaseFix(md.getColumnName(i + 1));
String attribName = md.getColumnLabel(i + 1);
switch (attributeTypes[i]) {
case Attribute.NOMINAL:
attribInfo.add(new Attribute(attribName, m_nominalStrings[i]));
break;
case Attribute.NUMERIC:
attribInfo.add(new Attribute(attribName));
break;
case Attribute.STRING:
Attribute att = new Attribute(attribName, (ArrayList<String>)null);
for (int n = 0; n < m_nominalStrings[i].size(); n++) {
att.addStringValue((String) m_nominalStrings[i].get(n));
}
attribInfo.add(att);
break;
case Attribute.DATE:
attribInfo.add(new Attribute(attribName, (String)null));
break;
default:
throw new IOException("Unknown attribute type");
}
}
m_structure = new Instances(endOfQuery(true), attribInfo,0);
//get rid of m_idColumn
if(m_DataBaseConnection.getUpperCase())
m_idColumn = m_idColumn.toUpperCase();
//System.out.println(m_structure.attribute(0).name().equals(idColumn));
if(m_structure.attribute(0).name().equals(m_idColumn)){
m_oldStructure = new Instances(m_structure,0);
m_oldStructure.deleteAttributeAt(0);
//System.out.println(m_structure);
}
else
m_oldStructure = new Instances(m_structure,0);
if (m_DataBaseConnection.getResultSet() != null) {
rs.close();
}
}
else{
if(m_oldStructure == null)
m_oldStructure = new Instances(m_structure,0);
}
m_DataBaseConnection.disconnectFromDatabase();
}
catch(Exception ex) {
ex.printStackTrace();
printException(ex);
}
return m_oldStructure;
}
/**
* Return the full data set in batch mode (header and all intances at once).
*
* @return the structure of the data set as an empty set of Instances
* @throws IOException if there is no source or parsing fails
*/
public Instances getDataSet() throws IOException {
if (m_DataBaseConnection == null) {
throw new IOException("No source database has been specified");
}
if (getRetrieval() == INCREMENTAL) {
throw new IOException("Cannot mix getting Instances in both incremental and batch modes");
}
setRetrieval(BATCH);
Instances result = null;
checkEnv();
try {
InstanceQuery iq = new InstanceQuery();
iq.initialize(m_CustomPropsFile);
String realURL = m_URL;
try {
realURL = m_env.substitute(realURL);
} catch (Exception ex) { }
iq.setDatabaseURL(realURL);
String realUser = m_User;
try {
realUser = m_env.substitute(realUser);
} catch (Exception ex) { }
iq.setUsername(realUser);
String realPass = m_Password;
try {
realPass = m_env.substitute(realPass);
} catch (Exception ex) { }
iq.setPassword(realPass);
String realQuery = m_query;
try {
realQuery = m_env.substitute(realQuery);
} catch (Exception ex) { }
iq.setQuery(realQuery);
iq.setSparseData(m_CreateSparseData);
result = iq.retrieveInstances();
if(m_DataBaseConnection.getUpperCase()) {
m_idColumn = m_idColumn.toUpperCase();
}
if(result.attribute(0).name().equals(m_idColumn)){
result.deleteAttributeAt(0);
}
m_structure = new Instances(result,0);
} catch (Exception ex) {
printException(ex);
StringBuffer text = new StringBuffer();
if(m_query.equals("Select * from Results0")){
text.append("\n\nDatabaseLoader options:\n");
Enumeration enumi = listOptions();
while (enumi.hasMoreElements()) {
Option option = (Option)enumi.nextElement();
text.append(option.synopsis()+'\n');
text.append(option.description()+'\n');
}
System.out.println(text);
}
}
return result;
}
/**
* Reads an instance from a database.
*
* @param rs the ReusltSet to load
* @throws Exception if instance cannot be read
* @return an instance read from the database
*/
private Instance readInstance(ResultSet rs) throws Exception{
ResultSetMetaData md = rs.getMetaData();
int numAttributes = md.getColumnCount();
double[] vals = new double[numAttributes];
m_structure.delete();
for(int i = 1; i <= numAttributes; i++) {
switch (m_DataBaseConnection.translateDBColumnType(md.getColumnTypeName(i))) {
case DatabaseConnection.STRING :
String str = rs.getString(i);
if (rs.wasNull()) {
vals[i - 1] = Utils.missingValue();
} else {
Double index = (Double)m_nominalIndexes[i - 1].get(str);
if (index == null) {
index = new Double(m_structure.attribute(i-1).addStringValue(str));
}
vals[i - 1] = index.doubleValue();
}
break;
case DatabaseConnection.TEXT:
str = rs.getString(i);
if (rs.wasNull()) {
vals[i - 1] = Utils.missingValue();
}
else {
Double index = (Double)m_nominalIndexes[i - 1].get(str);
if (index == null) {
index = new Double(m_structure.attribute(i-1).addStringValue(str));
}
vals[i - 1] = index.doubleValue();
}
break;
case DatabaseConnection.BOOL:
boolean boo = rs.getBoolean(i);
if (rs.wasNull()) {
vals[i - 1] = Utils.missingValue();
} else {
vals[i - 1] = (boo ? 1.0 : 0.0);
}
break;
case DatabaseConnection.DOUBLE:
// BigDecimal bd = rs.getBigDecimal(i, 4);
double dd = rs.getDouble(i);
// Use the column precision instead of 4?
if (rs.wasNull()) {
vals[i - 1] = Utils.missingValue();
} else {
// newInst.setValue(i - 1, bd.doubleValue());
vals[i - 1] = dd;
}
break;
case DatabaseConnection.BYTE:
byte by = rs.getByte(i);
if (rs.wasNull()) {
vals[i - 1] = Utils.missingValue();
} else {
vals[i - 1] = (double)by;
}
break;
case DatabaseConnection.SHORT:
short sh = rs.getShort(i);
if (rs.wasNull()) {
vals[i - 1] = Utils.missingValue();
} else {
vals[i - 1] = (double)sh;
}
break;
case DatabaseConnection.INTEGER:
int in = rs.getInt(i);
if (rs.wasNull()) {
vals[i - 1] = Utils.missingValue();
} else {
vals[i - 1] = (double)in;
}
break;
case DatabaseConnection.LONG:
long lo = rs.getLong(i);
if (rs.wasNull()) {
vals[i - 1] = Utils.missingValue();
} else {
vals[i - 1] = (double)lo;
}
break;
case DatabaseConnection.FLOAT:
float fl = rs.getFloat(i);
if (rs.wasNull()) {
vals[i - 1] = Utils.missingValue();
} else {
vals[i - 1] = (double)fl;
}
break;
case DatabaseConnection.DATE:
Date date = rs.getDate(i);
if (rs.wasNull()) {
vals[i - 1] = Utils.missingValue();
} else {
// TODO: Do a value check here.
vals[i - 1] = (double)date.getTime();
}
break;
case DatabaseConnection.TIME:
Time time = rs.getTime(i);
if (rs.wasNull()) {
vals[i - 1] = Utils.missingValue();
} else {
// TODO: Do a value check here.
vals[i - 1] = (double) time.getTime();
}
break;
default:
vals[i - 1] = Utils.missingValue();
}
}
Instance inst;
if (m_CreateSparseData)
inst = new SparseInstance(1.0, vals);
else
inst = new DenseInstance(1.0, vals);
//get rid of m_idColumn
if(m_DataBaseConnection.getUpperCase())
m_idColumn = m_idColumn.toUpperCase();
if(m_structure.attribute(0).name().equals(m_idColumn)){
inst.deleteAttributeAt(0);
m_oldStructure.add(inst);
inst = m_oldStructure.instance(0);
m_oldStructure.delete(0);
}
else{
//instances is added to and deleted from the structure to get the true nominal values instead of the index of the values.
m_structure.add(inst);
inst = m_structure.instance(0);
m_structure.delete(0);
}
return inst;
}
/**
* Read the data set incrementally---get the next instance in the data
* set or returns null if there are no
* more instances to get. If the structure hasn't yet been
* determined by a call to getStructure then method does so before
* returning the next instance in the data set.
*
* @param structure the dataset header information, will get updated in
* case of string or relational attributes
* @return the next instance in the data set as an Instance object or null
* if there are no more instances to be read
* @throws IOException if there is an error during parsing
*/
public Instance getNextInstance(Instances structure) throws IOException {
m_structure = structure;
if (m_DataBaseConnection == null)
throw new IOException("No source database has been specified");
if (getRetrieval() == BATCH) {
throw new IOException("Cannot mix getting Instances in both incremental and batch modes");
}
//pseudoInremental: Load all instances into main memory in batch mode and give them incrementally to user
if(m_pseudoIncremental){
setRetrieval(INCREMENTAL);
if(m_datasetPseudoInc.numInstances() > 0){
Instance current = m_datasetPseudoInc.instance(0);
m_datasetPseudoInc.delete(0);
return current;
}
else{
resetStructure();
return null;
}
}
//real incremental mode. At the moment(version 1.0) only for MySQL and HSQLDB (Postgres not tested, should work)
setRetrieval(INCREMENTAL);
try{
if(!m_DataBaseConnection.isConnected())
connectToDatabase();
//if no key columns specified by user, try to detect automatically
if(m_firstTime && m_orderBy.size() == 0){
if(!checkForKey())
throw new Exception("A unique order cannot be detected automatically.\nYou have to use SELECT * in your query to enable this feature.\nMaybe JDBC driver is not able to detect key.\nDefine primary key in your database or use -P option (command line) or enter key columns in the GUI.");
}
if(m_firstTime){
m_firstTime = false;
m_rowCount = getRowCount();
}
//as long as not all rows has been loaded
if(m_counter < m_rowCount){
if (m_DataBaseConnection.execute(limitQuery(m_query,m_counter,m_choice)) == false) {
throw new Exception("Tuple could not be retrieved.");
}
m_counter++;
ResultSet rs = m_DataBaseConnection.getResultSet();
rs.next();
Instance current = readInstance(rs);
rs.close();
return current;
}
else{
m_DataBaseConnection.disconnectFromDatabase();
resetStructure();
return null;
}
}catch(Exception ex) {
printException(ex);
}
return null;
}
/**
* Gets the setting
*
* @return the current setting
*/
public String[] getOptions() {
Vector<String> options = new Vector<String>();
if ( (getUrl() != null) && (getUrl().length() != 0) ) {
options.add("-url");
options.add(getUrl());
}
if ( (getUser() != null) && (getUser().length() != 0) ) {
options.add("-user");
options.add(getUser());
}
if ( (getPassword() != null) && (getPassword().length() != 0) ) {
options.add("-password");
options.add(getPassword());
}
options.add("-Q");
options.add(getQuery());
StringBuffer text = new StringBuffer();
for (int i = 0; i < m_orderBy.size(); i++) {
if (i > 0)
text.append(", ");
text.append((String) m_orderBy.get(i));
}
options.add("-P");
options.add(text.toString());
if (m_inc)
options.add("-I");
if ((m_CustomPropsFile != null) && !m_CustomPropsFile.isDirectory()) {
options.add("-custom-props");
options.add(m_CustomPropsFile.toString());
}
return (String[]) options.toArray(new String[options.size()]);
}
/**
* Lists the available options
*
* @return an enumeration of the available options
*/
public java.util.Enumeration listOptions() {
Vector<Option> newVector = new Vector<Option>();
newVector.add(new Option(
"\tThe JDBC URL to connect to.\n"
+ "\t(default: from DatabaseUtils.props file)",
"url", 1, "-url <JDBC URL>"));
newVector.add(new Option(
"\tThe user to connect with to the database.\n"
+ "\t(default: none)",
"user", 1, "-user <name>"));
newVector.add(new Option(
"\tThe password to connect with to the database.\n"
+ "\t(default: none)",
"password", 1, "-password <password>"));
newVector.add(new Option(
"\tSQL query of the form\n"
+ "\t\tSELECT <list of columns>|* FROM <table> [WHERE]\n"
+ "\tto execute.\n"
+ "\t(default: Select * From Results0)",
"Q",1,"-Q <query>"));
newVector.add(new Option(
"\tList of column names uniquely defining a DB row\n"
+ "\t(separated by ', ').\n"
+ "\tUsed for incremental loading.\n"
+ "\tIf not specified, the key will be determined automatically,\n"
+ "\tif possible with the used JDBC driver.\n"
+ "\tThe auto ID column created by the DatabaseSaver won't be loaded.",
"P",1,"-P <list of column names>"));
newVector.add(new Option(
"\tSets incremental loading",
"I", 0, "-I"));
newVector.addElement(new Option(
"\tReturn sparse rather than normal instances.",
"S", 0, "-S"));
newVector.add(new Option(
"\tThe custom properties file to use instead of default ones,\n"
+ "\tcontaining the database parameters.\n"
+ "\t(default: none)",
"custom-props", 1, "-custom-props <file>"));
return newVector.elements();
}
/**
* Sets the options.
*
<!-- options-start -->
* Valid options are: <p/>
*
* <pre> -url <JDBC URL>
* The JDBC URL to connect to.
* (default: from DatabaseUtils.props file)</pre>
*
* <pre> -user <name>
* The user to connect with to the database.
* (default: none)</pre>
*
* <pre> -password <password>
* The password to connect with to the database.
* (default: none)</pre>
*
* <pre> -Q <query>
* SQL query of the form
* SELECT <list of columns>|* FROM <table> [WHERE]
* to execute.
* (default: Select * From Results0)</pre>
*
* <pre> -P <list of column names>
* List of column names uniquely defining a DB row
* (separated by ', ').
* Used for incremental loading.
* If not specified, the key will be determined automatically,
* if possible with the used JDBC driver.
* The auto ID column created by the DatabaseSaver won't be loaded.</pre>
*
* <pre> -I
* Sets incremental loading</pre>
*
<!-- options-end -->
*
* @param options the options
* @throws Exception if options cannot be set
*/
public void setOptions(String[] options) throws Exception {
String optionString, keyString, tmpStr;
optionString = Utils.getOption('Q', options);
keyString = Utils.getOption('P', options);
reset();
tmpStr = Utils.getOption("url", options);
if (tmpStr.length() != 0)
setUrl(tmpStr);
tmpStr = Utils.getOption("user", options);
if (tmpStr.length() != 0)
setUser(tmpStr);
tmpStr = Utils.getOption("password", options);
if (tmpStr.length() != 0)
setPassword(tmpStr);
if (optionString.length() != 0)
setQuery(optionString);
m_orderBy.clear();
m_inc = Utils.getFlag('I', options);
if(m_inc){
StringTokenizer st = new StringTokenizer(keyString, ",");
while (st.hasMoreTokens()) {
String column = st.nextToken();
column = column.replaceAll(" ","");
m_orderBy.add(column);
}
}
tmpStr = Utils.getOption("custom-props", options);
if (tmpStr.length() == 0)
setCustomPropsFile(null);
else
setCustomPropsFile(new File(tmpStr));
}
/**Prints an exception
* @param ex the exception to print
*/
private void printException(Exception ex){
System.out.println("\n--- Exception caught ---\n");
while (ex != null) {
System.out.println("Message: "
+ ex.getMessage ());
if(ex instanceof SQLException){
System.out.println("SQLState: "
+ ((SQLException)ex).getSQLState ());
System.out.println("ErrorCode: "
+ ((SQLException)ex).getErrorCode ());
ex = ((SQLException)ex).getNextException();
}
else
ex = null;
System.out.println("");
}
}
/**
* Returns the revision string.
*
* @return the revision
*/
public String getRevision() {
return RevisionUtils.extract("$Revision: 7489 $");
}
/** Main method.
* @param options the options
*/
public static void main(String [] options) {
DatabaseLoader atf;
try {
atf = new DatabaseLoader();
atf.setOptions(options);
atf.setSource(atf.getUrl(), atf.getUser(), atf.getPassword());
if(!atf.m_inc)
System.out.println(atf.getDataSet());
else{
Instances structure = atf.getStructure();
System.out.println(structure);
Instance temp;
do {
temp = atf.getNextInstance(structure);
if (temp != null) {
System.out.println(temp);
}
} while (temp != null);
}
} catch (Exception e) {
e.printStackTrace();
System.out.println("\n"+e.getMessage());
}
}
}
| {
"pile_set_name": "Github"
} |
/*******************************************************************************
* Copyright 2012-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use
* this file except in compliance with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file.
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
* *****************************************************************************
*
* AWS Tools for Windows (TM) PowerShell (TM)
*
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Management.Automation;
using System.Text;
using Amazon.PowerShell.Common;
using Amazon.Runtime;
using Amazon.WAF;
using Amazon.WAF.Model;
namespace Amazon.PowerShell.Cmdlets.WAF
{
/// <summary>
/// <note><para>
/// This is <b>AWS WAF Classic</b> documentation. For more information, see <a href="https://docs.aws.amazon.com/waf/latest/developerguide/classic-waf-chapter.html">AWS
/// WAF Classic</a> in the developer guide.
/// </para><para><b>For the latest version of AWS WAF</b>, use the AWS WAFV2 API and see the <a href="https://docs.aws.amazon.com/waf/latest/developerguide/waf-chapter.html">AWS
/// WAF Developer Guide</a>. With the latest version, AWS WAF has a single set of endpoints
/// for regional and global use.
/// </para></note><para>
/// Returns an array of <a>IPSetSummary</a> objects in the response.
/// </para><br/><br/>This cmdlet automatically pages all available results to the pipeline - parameters related to iteration are only needed if you want to manually control the paginated output. To disable autopagination, use -NoAutoIteration.
/// </summary>
[Cmdlet("Get", "WAFIPSetList")]
[OutputType("Amazon.WAF.Model.IPSetSummary")]
[AWSCmdlet("Calls the AWS WAF ListIPSets API operation.", Operation = new[] {"ListIPSets"}, SelectReturnType = typeof(Amazon.WAF.Model.ListIPSetsResponse))]
[AWSCmdletOutput("Amazon.WAF.Model.IPSetSummary or Amazon.WAF.Model.ListIPSetsResponse",
"This cmdlet returns a collection of Amazon.WAF.Model.IPSetSummary objects.",
"The service call response (type Amazon.WAF.Model.ListIPSetsResponse) can also be referenced from properties attached to the cmdlet entry in the $AWSHistory stack."
)]
public partial class GetWAFIPSetListCmdlet : AmazonWAFClientCmdlet, IExecutor
{
#region Parameter Limit
/// <summary>
/// <para>
/// <para>Specifies the number of <code>IPSet</code> objects that you want AWS WAF to return
/// for this request. If you have more <code>IPSet</code> objects than the number you
/// specify for <code>Limit</code>, the response includes a <code>NextMarker</code> value
/// that you can use to get another batch of <code>IPSet</code> objects.</para>
/// </para>
/// <para>
/// <br/><b>Note:</b> In AWSPowerShell and AWSPowerShell.NetCore this parameter is used to limit the total number of items returned by the cmdlet.
/// <br/>In AWS.Tools this parameter is simply passed to the service to specify how many items should be returned by each service call.
/// <br/>Pipe the output of this cmdlet into Select-Object -First to terminate retrieving data pages early and control the number of items returned.
/// </para>
/// <para>If a value for this parameter is not specified the cmdlet will use a default value of '<b>100</b>'.</para>
/// </summary>
[System.Management.Automation.Parameter(ValueFromPipelineByPropertyName = true)]
[Alias("MaxItems")]
public int? Limit { get; set; }
#endregion
#region Parameter NextMarker
/// <summary>
/// <para>
/// <para>AWS WAF returns a <code>NextMarker</code> value in the response that allows you to
/// list another group of <code>IPSets</code>. For the second and subsequent <code>ListIPSets</code>
/// requests, specify the value of <code>NextMarker</code> from the previous response
/// to get information about another batch of <code>IPSets</code>.</para>
/// </para>
/// <para>
/// <br/><b>Note:</b> This parameter is only used if you are manually controlling output pagination of the service API call.
/// <br/>In order to manually control output pagination, use '-NextMarker $null' for the first call and '-NextMarker $AWSHistory.LastServiceResponse.NextMarker' for subsequent calls.
/// </para>
/// </summary>
[System.Management.Automation.Parameter(ValueFromPipelineByPropertyName = true)]
[Alias("NextToken")]
public System.String NextMarker { get; set; }
#endregion
#region Parameter Select
/// <summary>
/// Use the -Select parameter to control the cmdlet output. The default value is 'IPSets'.
/// Specifying -Select '*' will result in the cmdlet returning the whole service response (Amazon.WAF.Model.ListIPSetsResponse).
/// Specifying the name of a property of type Amazon.WAF.Model.ListIPSetsResponse will result in that property being returned.
/// Specifying -Select '^ParameterName' will result in the cmdlet returning the selected cmdlet parameter value.
/// </summary>
[System.Management.Automation.Parameter(ValueFromPipelineByPropertyName = true)]
public string Select { get; set; } = "IPSets";
#endregion
#region Parameter NoAutoIteration
/// <summary>
/// By default the cmdlet will auto-iterate and retrieve all results to the pipeline by performing multiple
/// service calls. If set, the cmdlet will retrieve only the next 'page' of results using the value of NextMarker
/// as the start point.
/// </summary>
[System.Management.Automation.Parameter(ValueFromPipelineByPropertyName = true)]
public SwitchParameter NoAutoIteration { get; set; }
#endregion
protected override void ProcessRecord()
{
base.ProcessRecord();
var context = new CmdletContext();
// allow for manipulation of parameters prior to loading into context
PreExecutionContextLoad(context);
if (ParameterWasBound(nameof(this.Select)))
{
context.Select = CreateSelectDelegate<Amazon.WAF.Model.ListIPSetsResponse, GetWAFIPSetListCmdlet>(Select) ??
throw new System.ArgumentException("Invalid value for -Select parameter.", nameof(this.Select));
}
context.Limit = this.Limit;
#if MODULAR
if (!ParameterWasBound(nameof(this.Limit)))
{
WriteVerbose("Limit parameter unset, using default value of '100'");
context.Limit = 100;
}
#endif
#if !MODULAR
if (ParameterWasBound(nameof(this.Limit)) && this.Limit.HasValue)
{
WriteWarning("AWSPowerShell and AWSPowerShell.NetCore use the Limit parameter to limit the total number of items returned by the cmdlet." +
" This behavior is obsolete and will be removed in a future version of these modules. Pipe the output of this cmdlet into Select-Object -First to terminate" +
" retrieving data pages early and control the number of items returned. AWS.Tools already implements the new behavior of simply passing Limit" +
" to the service to specify how many items should be returned by each service call.");
}
#endif
context.NextMarker = this.NextMarker;
// allow further manipulation of loaded context prior to processing
PostExecutionContextLoad(context);
var output = Execute(context) as CmdletOutput;
ProcessOutput(output);
}
#region IExecutor Members
#if MODULAR
public object Execute(ExecutorContext context)
{
var cmdletContext = context as CmdletContext;
var useParameterSelect = this.Select.StartsWith("^");
// create request and set iteration invariants
var request = new Amazon.WAF.Model.ListIPSetsRequest();
if (cmdletContext.Limit != null)
{
request.Limit = AutoIterationHelpers.ConvertEmitLimitToServiceTypeInt32(cmdletContext.Limit.Value);
}
// Initialize loop variant and commence piping
var _nextToken = cmdletContext.NextMarker;
var _userControllingPaging = this.NoAutoIteration.IsPresent || ParameterWasBound(nameof(this.NextMarker));
var client = Client ?? CreateClient(_CurrentCredentials, _RegionEndpoint);
do
{
request.NextMarker = _nextToken;
CmdletOutput output;
try
{
var response = CallAWSServiceOperation(client, request);
object pipelineOutput = null;
if (!useParameterSelect)
{
pipelineOutput = cmdletContext.Select(response, this);
}
output = new CmdletOutput
{
PipelineOutput = pipelineOutput,
ServiceResponse = response
};
_nextToken = response.NextMarker;
}
catch (Exception e)
{
output = new CmdletOutput { ErrorResponse = e };
}
ProcessOutput(output);
} while (!_userControllingPaging && AutoIterationHelpers.HasValue(_nextToken));
if (useParameterSelect)
{
WriteObject(cmdletContext.Select(null, this));
}
return null;
}
#else
public object Execute(ExecutorContext context)
{
var cmdletContext = context as CmdletContext;
var useParameterSelect = this.Select.StartsWith("^");
// create request and set iteration invariants
var request = new Amazon.WAF.Model.ListIPSetsRequest();
// Initialize loop variants and commence piping
System.String _nextToken = null;
int? _emitLimit = null;
int _retrievedSoFar = 0;
if (AutoIterationHelpers.HasValue(cmdletContext.NextMarker))
{
_nextToken = cmdletContext.NextMarker;
}
if (cmdletContext.Limit.HasValue)
{
// The service has a maximum page size of 100. If the user has
// asked for more items than page max, and there is no page size
// configured, we rely on the service ignoring the set maximum
// and giving us 100 items back. If a page size is set, that will
// be used to configure the pagination.
// We'll make further calls to satisfy the user's request.
_emitLimit = cmdletContext.Limit;
}
var _userControllingPaging = this.NoAutoIteration.IsPresent || ParameterWasBound(nameof(this.NextMarker));
var client = Client ?? CreateClient(_CurrentCredentials, _RegionEndpoint);
do
{
request.NextMarker = _nextToken;
if (_emitLimit.HasValue)
{
int correctPageSize = Math.Min(100, _emitLimit.Value);
request.Limit = AutoIterationHelpers.ConvertEmitLimitToInt32(correctPageSize);
}
else if (!ParameterWasBound(nameof(this.Limit)))
{
request.Limit = AutoIterationHelpers.ConvertEmitLimitToInt32(100);
}
CmdletOutput output;
try
{
var response = CallAWSServiceOperation(client, request);
object pipelineOutput = null;
if (!useParameterSelect)
{
pipelineOutput = cmdletContext.Select(response, this);
}
output = new CmdletOutput
{
PipelineOutput = pipelineOutput,
ServiceResponse = response
};
int _receivedThisCall = response.IPSets.Count;
_nextToken = response.NextMarker;
_retrievedSoFar += _receivedThisCall;
if (_emitLimit.HasValue)
{
_emitLimit -= _receivedThisCall;
}
}
catch (Exception e)
{
if (_retrievedSoFar == 0 || !_emitLimit.HasValue)
{
output = new CmdletOutput { ErrorResponse = e };
}
else
{
break;
}
}
ProcessOutput(output);
} while (!_userControllingPaging && AutoIterationHelpers.HasValue(_nextToken) && (!_emitLimit.HasValue || _emitLimit.Value >= 0));
if (useParameterSelect)
{
WriteObject(cmdletContext.Select(null, this));
}
return null;
}
#endif
public ExecutorContext CreateContext()
{
return new CmdletContext();
}
#endregion
#region AWS Service Operation Call
private Amazon.WAF.Model.ListIPSetsResponse CallAWSServiceOperation(IAmazonWAF client, Amazon.WAF.Model.ListIPSetsRequest request)
{
Utils.Common.WriteVerboseEndpointMessage(this, client.Config, "AWS WAF", "ListIPSets");
try
{
#if DESKTOP
return client.ListIPSets(request);
#elif CORECLR
return client.ListIPSetsAsync(request).GetAwaiter().GetResult();
#else
#error "Unknown build edition"
#endif
}
catch (AmazonServiceException exc)
{
var webException = exc.InnerException as System.Net.WebException;
if (webException != null)
{
throw new Exception(Utils.Common.FormatNameResolutionFailureMessage(client.Config, webException.Message), webException);
}
throw;
}
}
#endregion
internal partial class CmdletContext : ExecutorContext
{
public int? Limit { get; set; }
public System.String NextMarker { get; set; }
public System.Func<Amazon.WAF.Model.ListIPSetsResponse, GetWAFIPSetListCmdlet, object> Select { get; set; } =
(response, cmdlet) => response.IPSets;
}
}
}
| {
"pile_set_name": "Github"
} |
-- Created by: Peter KURNEV
-- Copyright (c) 1999-2014 OPEN CASCADE SAS
--
-- This file is part of Open CASCADE Technology software library.
--
-- This library is free software; you can redistribute it and/or modify it under
-- the terms of the GNU Lesser General Public License version 2.1 as published
-- by the Free Software Foundation, with special exception defined in the file
-- OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
-- distribution for complete text of the license and disclaimer of any warranty.
--
-- Alternatively, this file may be used under the terms of Open CASCADE
-- commercial license or contractual agreement.
class SetMapHasher from BOPTools
---Purpose:
uses
Set from BOPTools
--raises
is
HashCode(myclass;
aSet : Set from BOPTools;
Upper : Integer from Standard)
returns Integer from Standard;
---C++: inline
IsEqual(myclass;
aSet1 : Set from BOPTools;
aSet2 : Set from BOPTools)
returns Boolean from Standard;
---C++: inline
--fields
end SetMapHasher;
| {
"pile_set_name": "Github"
} |
//
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by Steve Nygard.
//
#pragma mark Blocks
typedef void (^CDUnknownBlockType)(void); // return type and parameters are unknown
#pragma mark -
//
// File: /Applications/Xcode.app/Contents/PlugIns/IDEInstrumentsService.ideplugin/Contents/MacOS/IDEInstrumentsService
// UUID: C45D8F63-7C80-31E7-AEEB-D5158A18E1D9
//
// Arch: x86_64
// Current version: 6244.0.0
// Compatibility version: 1.0.0
// Source version: 6244.0.0.0.0
// Minimum Mac OS X version: 10.9.0
// SDK version: 10.10.0
//
// Objective-C Garbage Collection: Unsupported
//
// Run path: @loader_path/../../../
// = /Applications/Xcode.app/Contents/PlugIns
//
@protocol IDEAnalysisToolService <NSObject>
+ (IDEAnalysisTool *)analysisToolWithIdentifier:(NSString *)arg1 platformIdentifier:(NSString *)arg2;
+ (NSArray *)analysisToolsForPlatformIdentifier:(NSString *)arg1;
@property(retain) IDELaunchParametersSnapshot *launchParameters;
@property(copy) IDERunDestination *runDestination;
@property(copy) DVTFilePath *runnableBuildProductPath;
@property(copy) DVTFilePath *runnableLocation;
@property(retain) IDEAnalysisTool *analysisTool;
- (IDEProfileOperation *)operationWithWorkingDirectory:(DVTFilePath *)arg1 workspaceFilePath:(DVTFilePath *)arg2 projectFilePath:(DVTFilePath *)arg3 outError:(id *)arg4;
- (void)setAttachRunnablePID:(int)arg1 applicationName:(NSString *)arg2;
- (id)initWithRunnableUTIType:(DVTFileDataType *)arg1;
@end
@protocol NSObject
@property(readonly, copy) NSString *description;
@property(readonly) Class superclass;
@property(readonly) unsigned long long hash;
- (struct _NSZone *)zone;
- (unsigned long long)retainCount;
- (id)autorelease;
- (oneway void)release;
- (id)retain;
- (BOOL)respondsToSelector:(SEL)arg1;
- (BOOL)conformsToProtocol:(Protocol *)arg1;
- (BOOL)isMemberOfClass:(Class)arg1;
- (BOOL)isKindOfClass:(Class)arg1;
- (BOOL)isProxy;
- (id)performSelector:(SEL)arg1 withObject:(id)arg2 withObject:(id)arg3;
- (id)performSelector:(SEL)arg1 withObject:(id)arg2;
- (id)performSelector:(SEL)arg1;
- (id)self;
- (Class)class;
- (BOOL)isEqual:(id)arg1;
@optional
@property(readonly, copy) NSString *debugDescription;
@end
@interface XRXcodeAnalysisService : NSObject <IDEAnalysisToolService>
{
DVTFileDataType *_runnableUTIType;
id _runnableLocation;
DVTFilePath *_runnableBuildProductPath;
IDERunDestination *_runDestination;
int _pidForAttaching;
NSString *_appNameForAttaching;
IDEAnalysisTool *_currentTool;
NSMutableDictionary *_currentToolForPlatformDict;
IDEAnalysisTool *_analysisTool;
IDELaunchParametersSnapshot *_launchParameters;
}
+ (id)_apppleInternalTemplatesPath;
+ (id)_homePrefsTemplatesPath;
+ (id)_instrumentsAppTemplatesPath;
+ (id)_platformsPath;
+ (id)_developerAppsPath;
+ (id)_analysisToolsForPlugin:(id)arg1 platform:(id)arg2;
+ (id)_analysisToolsForPlatform:(id)arg1;
+ (id)_analysisToolsDictForAllPlatforms;
+ (id)_analysisToolsAtPath:(id)arg1 type:(int)arg2;
+ (id)sharedPlatformToAnalysisToolsMap;
+ (id)_sharedGraphicsDetectiveTool;
+ (id)_anyPlatformID;
+ (id)_graphicsPerformanceDetectivePath;
+ (id)analysisToolWithIdentifier:(id)arg1 platformIdentifier:(id)arg2;
+ (id)analysisToolsForPlatformIdentifier:(id)arg1;
+ (void)addUniqueTools:(id)arg1 toArray:(id)arg2;
@property(copy) DVTFilePath *runnableLocation; // @synthesize runnableLocation=_runnableLocation;
@property(copy) DVTFilePath *runnableBuildProductPath; // @synthesize runnableBuildProductPath=_runnableBuildProductPath;
@property(copy) IDERunDestination *runDestination; // @synthesize runDestination=_runDestination;
@property(retain) IDELaunchParametersSnapshot *launchParameters; // @synthesize launchParameters=_launchParameters;
- (void).cxx_destruct;
- (void)setAttachRunnablePID:(int)arg1 applicationName:(id)arg2;
- (id)operationWithWorkingDirectory:(id)arg1 workspaceFilePath:(id)arg2 projectFilePath:(id)arg3 outError:(id *)arg4;
- (CDUnknownBlockType)_operationBlockWithWorkingDirectory:(id)arg1 workspaceFilePath:(id)arg2 projectFilePath:(id)arg3;
- (void)_launch:(id)arg1 WithConfigFile:(id)arg2;
- (id)_analysisAppPathForType:(int)arg1;
- (id)_instrumentsPath;
@property(retain) IDEAnalysisTool *analysisTool; // @synthesize analysisTool=_analysisTool;
- (id)_currentToolForPlatformDict;
- (id)_currentPlatformIdentifier;
- (id)initWithRunnableUTIType:(id)arg1;
// Remaining properties
@property(readonly, copy) NSString *debugDescription;
@property(readonly, copy) NSString *description;
@property(readonly) unsigned long long hash;
@property(readonly) Class superclass;
@end
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<!--
/**
* Copyright © Magento, Inc. All rights reserved.
* See COPYING.txt for license details.
*/
-->
<config xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="urn:magento:framework:Mview/etc/mview.xsd">
<view id="view_two" class="Ogogo\Class\Two" group="default">
<subscriptions>
<table name="some_entity" entity_column="entity_id" />
<table name="some_product_relation_other" entity_column="product_id" />
</subscriptions>
</view>
<view id="view_three" class="Ogogo\Class\Three" group="default">
<subscriptions>
<table name="some_entity" entity_column="entity_id" />
<table name="some_product_relation" entity_column="product_id" />
</subscriptions>
</view>
</config>
| {
"pile_set_name": "Github"
} |
*quickfix.txt* For Vim バージョン 8.2. Last change: 2020 May 31
VIMリファレンスマニュアル by Bram Moolenaar
この話題に関してはユーザーマニュアルの|30.1|でも紹介されている。
1. QuickFixコマンドの使い方 |quickfix|
2. エラーウィンドウ |quickfix-window|
3. 複数のエラーリストを使う |quickfix-error-lists|
4. :makeの使い方 |:make_makeprg|
5. :grepの使い方 |grep|
6. コンパイラを選択する |compiler-select|
7. エラーフォーマット |error-file-format|
8. ディレクトリスタック |quickfix-directory-stack|
9. 具体的なエラーファイルフォーマット |errorformats|
10. Quickfix ウィンドウのカスタマイズ |quickfix-window-function|
コンパイル時に|+quickfix|機能が無効にされた場合は、quickfixコマンドは使えない。
=============================================================================
1. QuickFixコマンドの使い方 *quickfix* *Quickfix* *E42*
Vimには編集-コンパイル-編集のサイクルを加速するための特別なモードがある。これ
はAmigaのManx's Aztec C compilerのquickfixオプションにインスパイアされた。Cコ
ンパイラから出力されたエラーメッセージをファイルに保存し、Vimでそのエラーに
ジャンプするというアイデアである。エラーメッセージを全部覚えておかなくても、そ
れぞれの問題を検証し、修正することができる。
quickfixコマンドはより一般的に、ファイル中の位置のリストを作成し、ジャンプする
ために使うことができる。例えば、|:vimgrep|はパターンにマッチした位置をリストす
る。スクリプト中で|getqflist()|を使ってこれらの位置を参照することができる。
そのため、編集・コンパイル・修正のサイクル以外にも多くの事に利用できる。
ファイルにエラーメッセージがあるなら "vim -q filename" で起動する: >
vim -q filename
<
Vim の中でコマンドを実行し結果を得る方法の一つに|:make|コマンドがある (後述)。
各コンパイラからのエラーメッセージを解釈させるためには、オプション
'errorformat' をセットする (下の|errorformat|を参照)。
*quickfix-ID*
各quickfixリストはquickfix IDと呼ばれる一意な識別子を持ち、この番号はVimセッ
ションの中で変わらない。|getqflist()| 関数はリストに割り当てられた識別子の取得
に使用できる。quickfixリスト番号もある。この番号は、10個以上のリストがquickfix
スタックに追加されるたびに変更される可能性がある。
*location-list* *E776*
locationリストはウィンドウローカルなquickfixリストである。`:vimgrep`, `:grep`,
`:helpgrep`, `:make` などのコマンドはquickfixリストを作成するが、それらに対応
する `:lvimgrep`, `:lgrep`, `:lhelpgrep`, `:lmake` などのコマンドを使うことで
locationリストを得ることができる。
*location-list-file-window*
locationリストはウィンドウに関連付けられていて、各ウィンドウが別々のlocationリ
ストを持つことができる。locationリストは1個のウィンドウにだけ関連付けることが
できる。locationリストはquickfixリストとは独立している。
locationリストを持つウィンドウが分割されると、新しいウィンドウはlocationリスト
のコピーを得る。locationリストへの参照が全てなくなると、そのlocationリストは破
棄される。
*quickfix-changedtick*
全てのquickfixリストとlocationリストはそのリストに行われた変更の総数を追跡する
読み込み専用のchangedtick変数を持つ。quickfixリストが変更されるたびに、この総
数はインクリメントされる。これは、リストが変更されたときだけアクションを実行す
ることに使用できる。|getqflist()| および |getloclist()| 関数は changedtickの現
在の値を問い合わせるのに使用できる。changedtick変数を変更することはできない。
以下のquickfixコマンドが利用できる。locationリストコマンドはquickfixコマンドに
似ていて、quickfixコマンドのプリフィックス 'c' が 'l' に置き換わっている。
*E924*
locationリストコマンドで処理されているにもかかわらず現在のウィンドウが
|autocommand| により閉じられる場合、それは中断される。
*E925* *E926*
locationリストコマンドで処理されているにもかかわらず現在のquickfixまたは
locationリストが |autocommand| に変更される場合、それは中断される。
*:cc*
:cc[!] [nr] エラー [nr]を表示する。[nr]が省略されると同じエラーが
:[nr]cc[!] 再度表示される。[!]が無く、現在のバッファに変更が有り
ウィンドウが1つしか無く、'hidden' も 'autowrite' もoff
である場合には、他のバッファへジャンプする事は無い。
[!]を使用して他のバッファに移る時、現在のバッファへの
変更点は、'hidden' がセットされているか別のウィンドウ
が開いているかしない場合、破棄されてしまう。
バッファ移動の際は設定 'switchbuf' が関係してくる。
quickfixウィンドウで使用するときは、現在行の "." や 最
終行の "$" を含む行番号を使用できる。
*:ll*
:ll[!] [nr] ":cc" と同様だが、quickfixリストでなくカレントウィンド
:[nr]ll[!] ウのlocationリストが使われる。
*:cn* *:cne* *:cnext* *E553*
:[count]cn[ext][!] ファイル名を含むエラーリストで[count]個後のエラーを表
示する。ファイル名が無かった場合[count]個後のエラーに
移動する。[!]と 'switchbuf' については|:cc|を参照。
*:lne* *:lnext*
:[count]lne[xt][!] ":cnext" と同様だが、quickfixリストでなくカレントウィ
ンドウのlocationリストが使われる。
:[count]cN[ext][!] *:cp* *:cprevious* *:cprev* *:cN* *:cNext*
:[count]cp[revious][!] ファイル名を含むエラーリストで[count]個前のエラーを表
示する。ファイル名が無かった場合[count]個前のエラーに
移動する。[!]と 'switchbuf' については|:cc|を参照。
:[count]lN[ext][!] *:lp* *:lprevious* *:lprev* *:lN* *:lNext*
:[count]lp[revious][!] ":cNext" と ":cprevious" と同様だが、quickfixリストで
なく、カレントウィンドウのlocationリストが使われる。
*:cabo* *:cabove*
:[count]cabo[ve] カレントバッファの現在行の [count] 上のエラーに移動す
る。[count] を省略すると、1が使用される。エラーがない
場合は、エラーメッセージが表示される。quickfixリストの
エントリは、バッファ番号と行番号でソートされていると仮
定する。同じ行に複数のエラーがある場合は、最初のエント
リだけが使用される。[count] が現在の行より上のエントリ
の数を超えると、ファイル内の最初のエラーが選択される。
*:lab* *:labove*
:[count]lab[ove] quickfixリストの代わりにカレントウィンドウのlocationリ
ストが使用されることを除けば、":cabove" と同じ。
*:cbel* *:cbelow*
:[count]cbel[ow] カレントバッファの現在行の [count] 下にあるエラーに移
動する。[count] を省略すると、1が使用される。エラーが
ない場合は、エラーメッセージが表示される。quickfixリス
トのエントリは、バッファ番号と行番号でソートされている
と仮定する。同じ行に複数のエラーがある場合は、最初のエ
ントリだけが使用される。[count] が現在の行より下のエン
トリの数を超えると、ファイル内の最後のエラーが選択され
る。
*:lbel* *:lbelow*
:[count]lbel[ow] quickfixリストの代わりにカレントウィンドウのlocationリ
ストが使用されることを除けば、":cbelow" と同じ。
*:cbe* *:cbefore*
:[count]cbe[fore] カレントバッファ内の現在のカーソル位置の [count] 前の
エラーに移動する。[count] を省略すると、1が使用される。
エラーがない場合は、エラーメッセージが表示される。
quickfixリストのエントリは、バッファ、行番号および桁番
号でソートされていると仮定する。[count] が現在位置より
前のエントリ数を超えると、ファイル内の最初のエラーが選
択される。
*:lbe* *:lbefore*
:[count]lbe[fore] quickfixリストの代わりにカレントウィンドウのlocationリ
ストが使用されることを除けば、":cbefore" と同じ。
*:caf* *:cafter*
:[count]caf[ter] カレントバッファ内の現在のカーソル位置の [count] 後の
エラーに移動する。[count] を省略すると、1が使用される。
エラーがない場合は、エラーメッセージが表示される。
quickfixリストのエントリは、バッファ、行番号および桁番
号でソートされていると仮定する。[count] が現在位置以降
のエントリ数を超えると、ファイル内の最後のエラーが選択
される。
*:laf* *:lafter*
:[count]laf[ter] quickfixリストの代わりにカレントウィンドウのlocationリ
ストが使用されることを除けば、":cafter" と同じ。
*:cnf* *:cnfile*
:[count]cnf[ile][!] ファイル名を含むエラーリストで[count]個後のファイルの
最初のエラーを表示する。ファイル名が無いか後のファイル
が無い場合には、[count]後のエラーに移動する。[!]と
'switchbuf' については|:cc|を参照。
*:lnf* *:lnfile*
:[count]lnf[ile][!] ":cnfile" と同様だが、quickfixリストでなく、カレント
ウィンドウのlocationリストが使われる。
:[count]cNf[ile][!] *:cpf* *:cpfile* *:cNf* *:cNfile*
:[count]cpf[ile][!] ファイル名を含むエラーリストで[count]個前のファイルの
最後のエラーを表示する。ファイル名が無いか後のファイル
が無い場合には、[count]個前のエラーに移動する。[!]と
'switchbuf' については|:cc|を参照。
:[count]lNf[ile][!] *:lpf* *:lpfile* *:lNf* *:lNfile*
:[count]lpf[ile][!] ":cNfile" と ":cpfile" と同様だが、quickfixリストでな
く、カレントウィンドウのlocationリストが使われる。
*:crewind* *:cr*
:cr[ewind][!] [nr] [nr]のエラーを表示する。[nr]が省略されると一番最初のエ
ラーが表示される。|:cc|を参照。
*:lrewind* *:lr*
:lr[ewind][!] [nr] ":crewind" と同様だが、quickfixリストでなく、カレント
ウィンドウのlocationリストが使われる。
*:cfirst* *:cfir*
:cfir[st][!] [nr] ":crewind" と同じ。
*:lfirst* *:lfir*
:lfir[st][!] [nr] ":lrewind" と同じ。
*:clast* *:cla*
:cla[st][!] [nr] [nr]のエラーを表示する。[nr]が省略されると一番最後のエ
ラーが表示される。|:cc|を参照。
*:llast* *:lla*
:lla[st][!] [nr] ":clast" と同様だが、quickfixリストでなく、カレント
ウィンドウのlocationリストが使われる。
*:cq* *:cquit*
:cq[uit][!]
:{N}cq[uit][!]
:cq[uit][!] {N} Vim をエラーコード {N} で終了する。{N} の既定は 1。
Vim がほかのプログラムから呼ばれたときに有用である:
例、コンパイラが同じファイルを再度コンパイルする事が無
くなる、`git commit` がコミット処理を中断する、`fc`
(bash や zsh のようなシェルの組み込みコマンド) がコマ
ンドを起動しない、など。
{N} を 0 にすることもできる。その場合はVimは通常終了す
る。
警告: ファイルに対する変更はすべて失われる ([!] を指定
しなくても)! このコマンドは、システムへの戻り値が非零
であるということ以外 ":qall!" |:qall| と同じである。
*:cf* *:cfile*
:cf[ile][!] [errorfile] エラーファイルを読みこみ最初のエラーへ移動する。Vimが
オプション -q で起動された時には自動的に行われる。コン
パイルの間Vimを実行したままにしたい時に使うことができ
る。エラーファイルの名前を与えればオプション
'errorfile' に [errorfile]が設定される。[!]について
は |:cc| を参照。
エラーファイルのエンコーディングが 'encoding' と異なる
場合には、'makeencoding' オプションでエンコーディング
を指定できる。
*:lf* *:lfi* *:lfile*
:lf[ile][!] [errorfile] ":cfile" と同様だが、quickfixリストでなく、カレントウィ
ンドウのlocationリストが使われる。コマンドラインオプ
ション -q を使ってlocationリストを設定することはできな
い。
:cg[etfile] [errorfile] *:cg* *:cgetfile*
エラーファイルを読み込む。":cfile" に似ているが、最初
のエラーに移動しない。
エラーファイルのエンコーディングが 'encoding' と異なる
場合には、'makeencoding' オプションでエンコーディング
を指定できる。
:lg[etfile] [errorfile] *:lg* *:lge* *:lgetfile*
":cgetfile" と同様だが、quickfixリストでなく、カレント
ウィンドウのlocationリストが使われる。
*:caddf* *:caddfile*
:caddf[ile] [errorfile] エラーファイルを読み込み、現在のquickfixリストにエラー
を追加する。quickfixリストがまだない場合は、新しいリス
トが作成される。
エラーファイルのエンコーディングが 'encoding' と異なる
場合には、'makeencoding' オプションでエンコーディング
を指定できる。
*:laddf* *:laddfile*
:laddf[ile] [errorfile] ":caddfile" と同様だが、quickfixリストでなく、カレント
ウィンドウのlocationリストが使われる。
*:cb* *:cbuffer* *E681*
:cb[uffer][!] [bufnr] カレントバッファからエラーリストを読み込む。[bufnr] を
指定すると、カレントバッファの代わりにそのバッファが使
われる。bufnrには読み込まれているバッファ番号を指定し
なければならない。範囲を指定すると、読み込む行を指定す
ることができる。範囲指定がないとバッファ全体が使われ
る。
[!]については|:cc|を参照。
*:lb* *:lbuffer*
:lb[uffer][!] [bufnr] ":cbuffer" と同様だが、quickfixリストでなく、カレント
ウィンドウのlocationリストが使われる。
*:cgetb* *:cgetbuffer*
:cgetb[uffer] [bufnr] カレントバッファからエラーリストを読み込む。":cbuffer"
と同じだが、最初のエラーにジャンプしない点が異なる。
*:lgetb* *:lgetbuffer*
:lgetb[uffer] [bufnr] ":cgetbuffer" と同様だが、quickfixリストでなく、カレン
トウィンドウのlocationリストが使われる。
*:cad* *:cadd* *:caddbuffer*
:cad[dbuffer] [bufnr] カレントバッファからエラーリストを読み込み、現在の
quickfixリストにエラーを追加する。quickfixリストがまだ
存在しない場合は、新しいリストが作成される。それ以外は
":cbuffer" と同じ。
*:laddb* *:laddbuffer*
:laddb[uffer] [bufnr] ":caddbuffer" と同様だが、quickfixリストでなく、カレン
トウィンドウのlocationリストが使われる。
*:cex* *:cexpr* *E777*
:cex[pr][!] {expr} {expr} の結果を使って quickfix リストを作成し、最初の
エラーにジャンプする。
{expr} が文字列のときは、その文字列を改行コードで区切
り、各行を 'errorformat' のグローバル値に従って解釈
し、結果を quickfix リストに追加する。
{expr} がリストのときはリストの各文字列要素を解釈し、
quickfix リストに追加する。リスト中の文字列でない要素
は無視される。
[!]については|:cc|を参照。
例: >
:cexpr system('grep -n xyz *')
:cexpr getline(1, '$')
<
*:lex* *:lexpr*
:lex[pr][!] {expr} |:cexpr|と同様だが、quickfixリストでなく、カレントウィ
ンドウのlocationリストが使われる。
*:cgete* *:cgetexpr*
:cgete[xpr] {expr} {expr}の結果を使ってquickfixリストを作成する。|:cexpr|
と同様だが、最初のエラーにジャンプしない点が異なる。
*:lgete* *:lgetexpr*
:lgete[xpr] {expr} |:cgetexpr|と同様だが、quickfixリストでなく、カレント
ウィンドウのlocationリストが使われる。
*:cadde* *:caddexpr*
:cadde[xpr] {expr} {expr}を評価し、結果の行を現在のquickfixリストに追加す
る。quickfixリストがまだ無い場合は、新しいリストが作成
される。現在のカーソル位置は変わらない。より詳しくは
|:cexpr|を参照。
例: >
:g/mypattern/caddexpr expand("%") . ":" . line(".") . ":" . getline(".")
<
*:lad* *:addd* *:laddexpr*
:lad[dexpr] {expr} ":caddexpr" と同様だが、quickfixリストでなく、カレント
ウィンドウのlocationリストが使われる。
*:cl* *:clist*
:cl[ist] [from] [, [to]]
有効なエラーを全て列挙する|quickfix-valid|。[from] 及
び/もしくは [to] で行数を指定された場合、その範囲のエ
ラーが表示される。負であった場合最後のエラーから数え
る。-1が最後のエラーとなる。設定 'switchbuf' がバッファ
の移動に関係する。
|:filter| コマンドは、指定されたパターンに一致する
quickfixのエントリだけを表示するために使用できる。パ
ターンは、エントリのファイル名、モジュール名、パターン
およびテキストと照合される。
:cl[ist]! [from] [, [to]]
全てのエラーを表示する。
*:lli* *:llist*
:lli[st] [from] [, [to]]
":clist" と同様だが、quickfixリストでなく、カレントウィ
ンドウのlocationリストが使われる。
:lli[st]! [from] [, [to]]
カレントウィンドウのlocationリストの中身を全部表示す
る。
正しいエラーの位置は隠されたマークによって示されているので、例え行を挿入したり
削除したとしても問題はない (Manx's Z editorではそうではない)。時々マークが幾つ
かの理由で消されてしまう事があり、メッセージ "line changed" がその警告となる。
一度Vimを終了し再起動した場合マークは失われ正しいエラー位置は把握できない。
quickfixコマンド (':make', ':grep' など) を実行する前後に2つの自動コマンドが利
用できる。詳しくは |QuickFixCmdPre| と |QuickFixCmdPost| を参照。
*QuickFixCmdPost-example*
'encoding' とロケールが異なる場合、コンパイラのエラーメッセージと Vim 内部のエ
ンコーディングが異なる場合がある。次のようにすれば、このメッセージを変換できる:
>
>
function QfMakeConv()
let qflist = getqflist()
for i in qflist
let i.text = iconv(i.text, "cp936", "utf-8")
endfor
call setqflist(qflist)
endfunction
au QuickfixCmdPost make call QfMakeConv()
代わりに 'makeencoding' オプションを使うこともできる。
*quickfix-title*
すべてのquickfixリストとlocationリストはタイトルを持つ。デフォルトではタイトル
はそのリストを作成したコマンドに設定される。|getqflist()| と |getloclist()| 関
数はそれぞれquickfixリストとlocationリストのタイトルを取得するために使われる。
|setqflist()| と |setloclist()| 関数はそれぞれquickfixリストとlocationリストの
タイトルを変更するために使われる。例: >
call setqflist([], 'a', {'title' : 'Cmd output'})
echo getqflist({'title' : 1})
call setloclist(3, [], 'a', {'title' : 'Cmd output'})
echo getloclist(3, {'title' : 1})
<
*quickfix-index*
いずれかのquickfixコマンド(例えば |:cc|, |:cnext|, |:cprev| 等)を使用して
quickfix/locationリストエントリに移動すると、そのエントリが現在選択されている
エントリになる。quickfix/locationリストで現在選択されているエントリのインデッ
クスは、getqflist()/getloclist()関数を使って取得できる。例: >
echo getqflist({'idx' : 0}).idx
echo getqflist({'id' : qfid, 'idx' : 0}).idx
echo getloclist(2, {'idx' : 0}).idx
<
新しいquickfixリストの場合、最初のエントリが選択され、インデックスは1になる。
すべてのquickfix/locationリスト内のすべてのエントリは、setqflist()関数を使用し
て現在選択されているエントリとして設定できる。例: >
call setqflist([], 'a', {'idx' : 12})
call setqflist([], 'a', {'id' : qfid, 'idx' : 7})
call setloclist(1, [], 'a', {'idx' : 7})
<
*quickfix-size*
quickfixリストとlocationリストの中の項目の数をそれぞれ |getqflist()| と
|getloclist()| 関数を使用して取得できる。例: >
echo getqflist({'size' : 1})
echo getloclist(5, {'size' : 1})
<
*quickfix-context*
任意のVimの型はquickfixリストまたはlocationリストとのコンテキストとして関連付
けることができる。|setqflist()| と |setloclist()| 関数はコンテキストをquickfix
とlocationリストにそれぞれ関連付けることに使用できる。|getqflist()| と
|getloclist()| 関数はそれぞれquickfixとlocationリストのコンテキストの取得に使
用できる。これは複数のquickfix/locationリストを取り扱うVimプラグインにとって便
利である。
例: >
let somectx = {'name' : 'Vim', 'type' : 'Editor'}
call setqflist([], 'a', {'context' : somectx})
echo getqflist({'context' : 1})
let newctx = ['red', 'green', 'blue']
call setloclist(2, [], 'a', {'id' : qfid, 'context' : newctx})
echo getloclist(2, {'id' : qfid, 'context' : 1})
<
*quickfix-parse*
|getqflist()| 関数を使用してquickfixリストを作成または変更せずに、'errorformat'
を使用して行のリストをパースできる。例: >
echo getqflist({'lines' : ["F1:10:Line10", "F2:20:Line20"]})
echo getqflist({'lines' : systemlist('grep -Hn quickfix *')})
これは 'items' キーに行からパースされたquickfixの項目のリストが含まれている辞
書を返す。以下は 'errorformat' オプションを変更せずにカスタム 'errorformat' を
使って行をパースする方法を示している。 >
echo getqflist({'efm' : '%f#%l#%m', 'lines' : ['F1#10#Line']})
<
quickfixリストやlocationリスト中のバッファに対してコマンドを実行:
*:cdo*
:cdo[!] {cmd} quickfix リスト中の有効な各項目に対して {cmd} を実行す
る。以下の操作と同様に動作する: >
:cfirst
:{cmd}
:cnext
:{cmd}
etc.
< カレントファイルが破棄できず(|abandon|)、かつ[!] が与
えられない場合、このコマンドは失敗する。
次のエントリへの移動が失敗すると、実行は停止する。
最後のバッファ (またはエラーが起こったバッファ) がカレ
ントウィンドウになる。
{cmd} の中では '|' で複数のコマンドを連結できる。
quickfixリスト中の有効な項目のみが使われる。
「範囲」を指定して項目を選択することができる。例: >
:10,$cdo cmd
< こうすることで1番目から9番目までをスキップできる。
Note: このコマンドを実行している間、Syntax 自動コマン
ドイベントが 'eventignore' に追加され、無効化される。
これは各バッファの編集を大幅にスピードアップさせる。
|:bufdo|、|:tabdo|、|:argdo|、|:windo|、|:ldo|、
|:cfdo|、|:lfdo| も参照。
*:cfdo*
:cfdo[!] {cmd} quickfixリスト中の各ファイルに対して {cmd} を実行する。
以下の操作と同様に動作する: >
:cfirst
:{cmd}
:cnfile
:{cmd}
etc.
< それ以外は `:cdo` の場合と同様である。
*:ldo*
:ld[o][!] {cmd} カレントウィンドウのlocationリスト中の有効な各項目に対
して {cmd} を実行する。
以下の操作と同様に動作する: >
:lfirst
:{cmd}
:lnext
:{cmd}
etc.
< locationリスト中の有効な項目のみが使われる。
それ以外は `:cdo` の場合と同様である。
*:lfdo*
:lfdo[!] {cmd} カレントウィンドウのlocationリスト中の各ファイルに対し
て {cmd} を実行する
以下の操作と同様に動作する: >
:lfirst
:{cmd}
:lnfile
:{cmd}
etc.
< それ以外は `:cdo` の場合と同様である。
QUICKFIX または LOCATIONリストのフィルタリング:
*cfilter-plugin* *:Cfilter* *:Lfilter*
quickfixリストのエントリ数が多すぎる場合は、cfilterプラグインを使用してエント
リ数を減らすことができる。プラグインをロードする: >
packadd cfilter
その後で、以下のコマンドを使って、quickfix/locationリストをフィルタすることが
できる: >
:Cfilter[!] /{pat}/
:Lfilter[!] /{pat}/
|:Cfilter| コマンドは、現在のquickfixリストの {pat} に一致するエントリから新し
いquickfixリストを作成する。{pat} はVimの正規表現 |regular-expression| パター
ンである。ファイル名とエントリのテキストの両方が {pat}と照合される。オプション
の ! が指定された場合、{pat} と一致しないエントリが使用される。パターンは、次
のいずれかの文字を使用して任意に囲むことができる: ', ", /。パターンが空の場合
は、最後に使用された検索パターンが使用される。
|:Lfilter| コマンドは |:Cfilter| と同じ動作をするが、カレントlocationリストを
操作する。
=============================================================================
2. エラーウィンドウ *quickfix-window*
*:cope* *:copen* *w:quickfix_title*
:cope[n] [height] 現在のエラーリストを表示するウィンドウを開く。
[height]が与えられたとき、(余地があれば) ウィンドウの
高さがその値になる。[height] を省略したときはウィンド
ウの高さは 10 行になる。
すでにquickfixウィンドウがある場合はそれがカレントウィ
ンドウになる。2 個目のquickfixウィンドウを開くことはで
きない。[height] が指定されたとき、既存のウィンドウは
その高さにリサイズされる。
*quickfix-buffer*
ウィンドウは 'buftype' の値が "quickfix" である特別な
バッファを含んでいる。これを変更してはならない。
ウィンドウは w:quickfix_title 変数を持っている。この変
数はquickfixリストを生成したコマンドを示している。変数
の値は 'statusline' が適切に調整されていればステータス
ラインに表示される。このバッファがquickfixコマンドまた
は関数により変更されるたびに、|b:changedtick| 変数はイ
ンクリメントされる。このバッファにおける変数の値は
getqflist() や getloclist() 関数を用い 'qfbufnr'
アイテムを通して取得できる。locationリストの場合は、
locationリストが削除されるときに、このバッファも削除さ
れる。
*:lop* *:lopen*
:lop[en] [height] カレントウィンドウのlocationリストを表示するウィンドウ
を開く。カレントウィンドウにlocationリストが存在すると
きだけ動作する。一度に2個以上のlocationリストを開くこ
とができる。それ以外は ":copen" と同様。
*:ccl* *:cclose*
:ccl[ose] quickfixウィンドウを閉じる。
*:lcl* *:lclose*
:lcl[ose] カレントウィンドウのlocationリストを表示しているウィン
ドウを閉じる。
*:cw* *:cwindow*
:cw[indow] [height] 認識されたエラーがあるときquickfixウィンドウを開く。
ウィンドウがすでに開いていて認識されたエラーがない場合
はウィンドウを閉じる。
*:lw* *:lwindow*
:lw[indow] [height] ":cwindow" と同様だが、quickfixリストでなく、カレント
ウィンドウのlocationリストが使われる。
*:cbo* *:cbottom*
:cbo[ttom] quickfix ウィンドウの最後の行にカーソルを移動し、見え
る様にスクロールする。これは非同期コールバックでエラー
を追加するのに便利である。
大量の更新に伴う多くの再描画を避けるには一度限り呼び出
すこと。
*:lbo* *:lbottom*
:lbo[ttom] 現在のウィンドウに関するlocationリストが表示されている
ウィンドウである事を除いて ":cbottom" と同じ。
通常、quickfixウィンドウはスクリーンの一番下に現れる。垂直分割したウィンドウが
ある場合は、一番右下に現れる。常に最大幅を占めるようにさせたい場合は >
:botright cwindow
とする。このウィンドウを|window-moving|コマンドで移動させることもできる。
例えば、一番上に移動させたければCTRL-W Kとする。
'winfixheight' オプションが設定されれば、'winheight' と 'equalalways' を無視
し、たいていその高さを維持する。高さを手動で変更することもできる (例えばステー
タスラインをマウスで上へドラッグするなど)。
quickfixウィンドウには各行に1個ずつエラーが表示される。その行数はエラー番号に
等しい。現在の項目はQuickFixLineハイライティングでハイライトされる。あなたはそ
れをあなたの好みに変更することができる。例: >
:hi QuickFixLine ctermbg=Yellow guibg=Yellow
":.cc" でカーソル下のエラーに移動できる。
<Enter>キーを押すのと行をダブルクリックするのは同じ効果がある。そのエラーを含
むファイルがquickfixウィンドウの上に開かれる。そのファイルがすでにウィンドウに
開かれていたらそのウィンドウがアクティブになる。そのウィンドウにあるバッファが
変更されていて、そのエラーが別のファイルにある場合はエラーへの移動は失敗する。
まず、そのウィンドウが破棄してもよいバッファを含んでいることを確かめなければな
らない。
*CTRL-W_<Enter>* *CTRL-W_<CR>*
CTRL-W <Enter>を使うと、新しいウィンドウを開いてそこでエラーにジャンプできる。
quickfixウィンドウが一杯になったとき、2つの自動コマンドイベントが発生する。第
一は 'filetype' オプションが "qf" にセットされ、FileTypeイベントが発生する
(|qf.vim| も参照)。それからBufReadPostイベントが発生する。そのときのバッファ名
は "quickfix" となる。これを使ってエラーリストに対して操作を行うことができる。
例: >
au BufReadPost quickfix setlocal modifiable
\ | silent exe 'g/^/s//\=line(".")." "/'
\ | setlocal nomodifiable
これは各行に行番号を追加する。文字列の置換 ":s" コマンドの中で使われている "\="
に注目。これは式を評価するのに使われる。
BufWinEnterイベントも発生する。ここでもバッファ名は "quickfix" になる。
Note: 存在する quickfix リストに追加される場合、autocommand イベントは発生しま
せん。
Note: quickfixウィンドウ内で変更を加えてもエラーのリストには何の影響もない。変
更を防ぐために 'modifiable' がオフになっている。それでも行を削除や挿入した場合
は、テキストとエラー番号の関係がめちゃくちゃになる。本当にエラーリストを変更し
たいのなら、quickfixウィンドウの内容をファイルに保存し、":cfile" を実行、ファ
イルをパースさせ、新しいエラーリストとして使うこと。
*location-list-window*
locationリストウィンドウはlocationリストの中身を表示する。locationリストウィン
ドウを開くと、カレントウィンドウの下に開かれ、カレントウィンドウのlocationリス
トが表示される。locationリストはquickfixウィンドウに似ているが、一度に2個以上
のlocationリストウィンドウを開ける点が異なる。このウィンドウ内でlocationリスト
コマンドを使うと、表示されているlocationリストが使われる。
locationリストウィンドウからファイルを選択すると、以下のステップによって、その
ファイルを編集するウィンドウが探される。
1. locationリストウィンドウに表示されているlocationリストに関連付けられている
ウィンドウがあるなら、そのウィンドウが使われる。
2. 上のステップが失敗した場合、そのファイルが既に他のウィンドウで開かれている
なら、そのウィンドウが使われる。
3. 上のステップが失敗した場合、'buftype' がセットされていないバッファを表示し
ているウィンドウが存在するなら、そのウィンドウが使われる。
4. 上のステップが失敗した場合、新しいウィンドウでファイルが開かれる。
上の全ての場合において、選択されたウィンドウに対してまだlocationリストが関連付
けられていなかった場合、locationリストウィンドウに表示されているlocationリスト
が関連づけられる。
*quickfix-window-ID*
|getqflist()| と |getloclist()| 関数を使用して、quickfixウィンドウとlocationリ
ストウィンドウのウィンドウIDをそれぞれ取得できる(もしあれば)。例: >
echo getqflist({'winid' : 1}).winid
echo getloclist(2, {'winid' : 1}).winid
<
*getqflist-examples*
|getqflist()| 関数と |getloclist()| 関数は、それぞれquickfixとlocationリストの
さまざまな属性を取得するのに使用できる。これらの関数の使用例を以下に示す:
>
" 現在のquickfixリストのタイトルを取得する
:echo getqflist({'title' : 0}).title
" 現在のquickfixリストの識別子を取得する
:let qfid = getqflist({'id' : 0}).id
" スタックの中の4番目のquickfixリストの識別子を取得する
:let qfid = getqflist({'nr' : 4, 'id' : 0}).id
" 特定の識別子を持つquickfixリストが存在するかどうかを検査する
:if getqflist({'id' : qfid}).id == qfid
" スタックの中の現在のquickfixリストのインデックスを取得する
:let qfnum = getqflist({'nr' : 0}).nr
" 識別子により指定されたquickfixリストのitemsを取得する
:echo getqflist({'id' : qfid, 'items' : 0}).items
" idにより指定されたquickfixリストの中の項目の数を取得する
:echo getqflist({'id' : qfid, 'size' : 0}).size
" スタックの中の3番目のquickfixリストのcontextを取得する
:echo getqflist({'nr' : 3, 'context' : 0}).context
" スタックの中のquickfixリストの数を取得する
:echo getqflist({'nr' : '$'}).nr
" 現在のquickfixリストが変更された回数を取得する
:echo getqflist({'changedtick' : 0}).changedtick
" 識別子により指定されたquickfixリストの中の現在の項目を取得する
:echo getqflist({'id' : qfid, 'idx' : 0}).idx
" 識別子を使用して全てのquickfixリスト属性を取得する
:echo getqflist({'id' : qfid, 'all' : 0})
" linesのリストの値からテキストをパースしてquickfixリストを返す
:let myList = ["a.java:10:L10", "b.java:20:L20"]
:echo getqflist({'lines' : myList}).items
" カスタム 'efm' を使用してテキストをパースしてquickfixリストを返す
:echo getqflist({'lines' : ['a.c#10#Line 10'], 'efm':'%f#%l#%m'}).items
" quickfixリストのウィンドウIDを取得する
:echo getqflist({'winid' : 0}).winid
" quickfixリストのウィンドウのバッファ番号を取得する
:echo getqflist({'qfbufnr' : 0}).qfbufnr
" 現在のlocationリストのcontextを取得する
:echo getloclist(0, {'context' : 0}).context
" 3番目のウィンドウのlocationリストのウィンドウIDを取得する
:echo getloclist(3, {'winid' : 0}).winid
" 3番目のウィンドウのlocationリストのバッファ番号を取得する
:echo getloclist(3, {'qfbufnr' : 0}).qfbufnr
" locationリストウィンドウ(winnr: 4)のファイルウィンドウIDを取得する
:echo getloclist(4, {'filewinid' : 0}).filewinid
<
*setqflist-examples*
|setqflist()| 関数と |setloclist()| 関数は、それぞれquickfixとlocationリストの
さまざまな属性を設定するのに使用できる。これらの関数の使用例を以下に示す:
>
" titleとcontextを持つ空のquickfixリストを作成する
:let t = 'Search results'
:let c = {'cmd' : 'grep'}
:call setqflist([], ' ', {'title' : t, 'context' : c})
" 現在のquickfixリストのtitleを設定する
:call setqflist([], 'a', {'title' : 'Mytitle'})
" 識別子により指定されたquickfixリストの現在のエントリを変更する
:call setqflist([], 'a', {'id' : qfid, 'idx' : 10})
" 識別子により指定されたquickfixリストのcontextを設定する
:call setqflist([], 'a', {'id' : qfid, 'context' : {'val' : 100}})
" コマンド出力から新しいquickfixリストを作成する
:call setqflist([], ' ', {'lines' : systemlist('grep -Hn main *.c')})
" カスタムefmを使用してテキストをパースして特定のquickfixリストに追加する
:call setqflist([], 'a', {'id' : qfid,
\ 'lines' : ["a.c#10#L10", "b.c#20#L20"], 'efm':'%f#%l#%m'})
" 識別子により指定されたquickfixリストにitemsを追加する
:let newItems = [{'filename' : 'a.txt', 'lnum' : 10, 'text' : "Apple"},
\ {'filename' : 'b.txt', 'lnum' : 20, 'text' : "Orange"}]
:call setqflist([], 'a', {'id' : qfid, 'items' : newItems})
" 識別子により指定されたquickfixリストを空にする
:call setqflist([], 'r', {'id' : qfid, 'items' : []})
" スタックの中の全てのquickfixリストを開放する
:call setqflist([], 'f')
" 4番目のquickfixリストのtitleを設定する
:call setqflist([], 'a', {'nr' : 4, 'title' : 'SomeTitle'})
" スタックの最後に新しいquickfixリストを作成する
:call setqflist([], ' ', {'nr' : '$',
\ 'lines' : systemlist('grep -Hn class *.java')})
" コマンド出力から新しいlocationリストを作成する
:call setloclist(0, [], ' ', {'lines' : systemlist('grep -Hn main *.c')})
" 3番目のウィンドウのlocationリスト項目を置き換える
:call setloclist(3, [], 'r', {'items' : newItems})
<
=============================================================================
3. 複数のエラーリストを使う *quickfix-error-lists*
これまでは一つだけのエラーリストがあると仮定してきた。実際は最後に使った10個
迄のエラーリストが記憶される。新しいリストではじめた時には、以前のリストは自動
的に保存される。古いエラーリストにアクセスするために、2つのコマンドが用意され
ている。これらは存在するエラーリストの内1つを現在のエラーリストに設定する。
*:colder* *:col* *E380*
:col[der] [count] 古いエラーリストへ移動する。[count]が与えられると、そ
の回数繰り返し移動する。既に一番古いエラーリストにいる
場合、エラーメッセージが表示される。
*:lolder* *:lol*
:lol[der] [count] `:colder` と同様だが、quickfixリストでなく、カレント
ウィンドウのlocationリストが使われる。
*:cnewer* *:cnew* *E381*
:cnew[er] [count] 新しいエラーリストへ移動する。[count]が与えられると、
その回数繰り返し移動する。既に一番新しいエラーリストに
いる場合、エラーメッセージが表示される。
*:lnewer* *:lnew*
:lnew[er] [count] `:cnewer` と同様だが、quickfixリストでなく、カレント
ウィンドウのlocationリストが使われる。
*:chistory* *:chi*
:[count]chi[story] エラーリストの一覧を表示する。現在のリストは ">" でマー
クされる。出力は以下の様になる:
error list 1 of 3; 43 errors ~
> error list 2 of 3; 0 errors ~
error list 3 of 3; 15 errors ~
[count] が与えられると、count番目のquickfixリストがカ
レントリストになる。例: >
" 4番目のquickfixリストをカレントにする
:4chistory
<
*:lhistory* *:lhi*
:[count]lhi[story] locationリストの一覧を表示する。`:chistory` の様に。
新しいエラーリストが追加された時には、それがカレントリストとなる。
":colder" が実行された後で ":make" や ":grep" が実行され新しいエラーリストが追
加されたときは1個新しいリストが上書きされる。これは ":grep" |grep| でブラウジ
ングしているときに特に便利である。もっと最近のエラーリストを残しておきたい場合
は初めに ":cnewer 99" を行うこと。
quickfixやlocationリストのスタック数を取得するには、特別な値 '$' に設定された
リスト番号でそれぞれ、|getqflist()| 関数と |getloclist()| 関数を使用できる。例:
>
echo getqflist({'nr' : '$'}).nr
echo getloclist(3, {'nr' : '$'}).nr
スタック内の現在のリスト番号を取得するには: >
echo getqflist({'nr' : 0}).nr
<
=============================================================================
4. :makeの使い方 *:make_makeprg*
*:mak* *:make*
:mak[e][!] [arguments] 1. |QuickFixCmdPre| に関連付けられた自動コマンドが全て
実行される。
2. オプション 'autowrite' がonならば変更のあるバッファ
は保存される。
3. 'makeef' からエラーファイルの名前が生成される。
'makeef' が "##" を含まずかつ既に名前が存在する場合
それは削除される。
4. オプション 'makeprg' で与えられたプログラム (省略時
"make") が [argument]をオプションにして実行され、出
力がerrorfileに保存される (Unixではそれも画面にecho
される)。
5. 'errorformat' を使ってerrorfileが読みこまれる。
6. |QuickFixCmdPost| に関連付けられた自動コマンドが全
て実行される。後述のサンプルを参照。
7. [!]が与えられていないときは最初のエラーに移動する。
8. エラーファイルが削除される。
9. |:cnext|や|:cprevious| などのコマンドでエラー間を移
動できる。上を参照。
このコマンドは如何なるコメントも受けつけず、どんな "
という文字もargumentの一部とみなされる。
プログラム出力のエンコーディングが 'encoding' と異なる
場合には、'makeencoding' オプションでエンコーディング
を指定できる。
*:lmak* *:lmake*
:lmak[e][!] [arguments]
":make" と同様だが、quickfixリストでなく、カレントウィ
ンドウのlocationリストが使われる。
コマンド ":make" はオプション 'makeprg' で与えられるコマンドを実行する。これは
オプション 'shell' で与えられたシェルにコマンドを渡す事で実行されている。以下
をタイピングするのとほぼ同じである。
":!{makeprg} [arguments] {shellpipe} {errorfile}".
{makeprg}は 'makeprg' オプションで指定された文字列である。"make" に限らず、ど
んなコマンドでも使用できる。'%' と '#' の文字は通常通りコマンドライン中で展開
される。拡張子無しの現在ファイル名を表すのに "%<"、拡張子無しの代替ファイル名
を表すのに "#<" が使える。例えば: >
:set makeprg=make\ #<.o
[arguments] ":make" より後に入力した全て。
{shellpipe} オプション 'shellpipe'
{errorfile} オプション 'makeef'。"##" は一意な名前にする
コマンドがargumentsの後にオプションを必要とするならば、{makeprg}の中で引数リス
トに展開される置換子 "$*" が使用できる。$*は引数全てに置換えられる。例: >
:set makeprg=latex\ \\\\nonstopmode\ \\\\input\\{$*}
またはより単純に >
:let &mp = 'latex \\nonstopmode \\input\{$*}'
"$*" は次の例のように何度でも与える事ができる: >
:set makeprg=gcc\ -o\ $*\ $*
オプション 'shellpipe' の省略値はAmigaでは ">" で、Win32では ">%s 2>&1" であ
る。これはコンパイラの出力が直接ファイルに出力されスクリーンには出力されないこ
とを意味する。Unixでは "| tee" が使用される。コンパイラがファイルに出力すると
同時にスクリーンにも表示される。使っているシェルに応じて標準エラーへの出力も含
めるために "|& tee" や "2>&1| tee" が省略値となる。
'shellpipe' が空の場合、{errorfile}が省略される。これはコンパイラ自身がエラー
ファイルを作成する場合 (Manx's Amiga C) に便利である。
QuickFixCmdPost を使ってエンコーディングを修正する ~
ビルドプログラムが出力するメッセージと 'encoding' の値が異なる場合がある。この
例は、Vim がそのエラーメッセージを読み込んだ後でエンコーディングを変換する方法
を示している: >
function QfMakeConv()
let qflist = getqflist()
for i in qflist
let i.text = iconv(i.text, "cp936", "utf-8")
endfor
call setqflist(qflist)
endfunction
au QuickfixCmdPost make call QfMakeConv()
(Faque Cheng による例)
代わりに 'makeencoding' オプションを使うこともできる。
==============================================================================
5. :vimgrepと:grepの使い方 *grep* *lid*
Vimにはパターンを検索する方法が2つある: 内部grepと外部grepである。内部grepの利
点は、全てのシステム上で動作し、Vimの強力な検索パターンを使えることである。内
部grepが目的に合わない場合は外部grepを使うことができる。
内部grepはファイルをメモリに読み込むため、より遅い。利点は:
- ファイルを開くときと同様に改行コードとエンコーディングが自動的に認識される。
- Vimの検索パターンを使う。複数行にわたるパターンが使える。
- プラグインが有効になっていれば、圧縮ファイル、リモートファイルを検索できる。
|gzip| |netrw|
これを行うために、Vimは各ファイルを編集するときと同じように読み込む。そのファ
イルにマッチがなかったら、そのバッファは消去 (wiped out) される。多数のファイ
ルを扱うときのメモリ不足やファイル記述子不足を避けるために、ここではオプショ
ン 'hidden' は無視される。しかし、コマンド修飾子|:hide|が使われたときは、バッ
ファが読み込まれたままになる。これによって、同じファイルを続けて検索するのがと
ても高速になる。
Note: 検索結果へのリンク一覧を開くには |:copen| (|:lgrep| なら |:lopen|) が使
われる。|:silent| コマンドを使うことで grep の出力が画面いっぱいに表示されるの
を防ぐことができる。|:grep| コマンドを ":grep!" 形式で使うと最初のマッチに自動
的にジャンプしなくなる。これらのコマンドを組み合わせて NewGrep コマンドを作る
と次のようになる: >
command! -nargs=+ NewGrep execute 'silent grep! <args>' | copen 42
5.1 Vimの内部grepの使い方
*:vim* *:vimgrep* *E682* *E683*
:vim[grep][!] /{pattern}/[g][j] {file} ...
ファイル{file}から{pattern}を検索し、マッチ位置をエラー
リストに追加する。'wildignore' にマッチしたファイルは
無視される。'suffixes' にマッチしたファイルは最後に検
索される。
フラグ 'g' がない場合、各行は1度だけ追加される。
'g' がある場合、マッチ位置が毎回追加される。
{pattern}はVimの検索パターンである。/ で囲まない場合、
それが{pattern}中に現れない限り、どんな非ID文字
(|'isident'| を参照) でも使える。
'ignorecase' が適用される。パターン中に|/\c|を含めると
大文字小文字を区別しなくなり、|/\C|を含めると区別する
ようになる。これは 'ignorecase' より優先される。
'smartcase' は適用されない。
{pattern} が空のときは(つまり // が指定されたときは)、
最後に使われた検索パターンが使用される。|last-pattern|
:{count}vim[grep] ...
このコマンドの前に数字が置かれると、その数が検索する
マッチの最大数となる。":1vimgrep pattern file" とする
と最初のマッチだけを検索する。マッチが存在するかどうか
だけをチェックしたく、それが見つかったらすぐに終了して
ほしい場合に便利である。
フラグ 'j' がない場合、最初のマッチへジャンプする。
'j' がある場合はquickfixリストが更新されるだけである。
[!]がついた場合、カレントバッファに対する変更は全て失
われる。
進行状況を示すため、1秒程度ごとに検索されたファイル名
が表示される。
例: >
:vimgrep /an error/ *.c
:vimgrep /\<FileName\>/ *.h include/*
:vimgrep /myfunc/ **/*.c
< "**" の使い方については|starstar-wildcard|を参照。
:vim[grep][!] {pattern} {file} ...
上と同様だが、パターンを非ID文字で囲むのでなく、空白で
パターンを区切る。パターンはID文字で始まらねばならな
い。
例: >
:vimgrep Error *.c
<
*:lv* *:lvimgrep*
:lv[imgrep][!] /{pattern}/[g][j] {file} ...
:lv[imgrep][!] {pattern} {file} ...
":vimgrep" と同様だが、quickfixリストでなく、カレント
ウィンドウのlocationリストが使われる。
*:vimgrepa* *:vimgrepadd*
:vimgrepa[dd][!] /{pattern}/[g][j] {file} ...
:vimgrepa[dd][!] {pattern} {file} ...
":vimgrep" と同様だが、新しくエラーリストを作る代わり
に、現在のリストに追加する。
*:lvimgrepa* *:lvimgrepadd*
:lvimgrepa[dd][!] /{pattern}/[g][j] {file} ...
:lvimgrepa[dd][!] {pattern} {file} ...
":vimgrepadd" と同様だが、quickfixリストでなく、カレン
トウィンドウのlocationリストが使われる。
5.2 外部grep
Vimはコンパイラに対するのと同じ方法 (|:make|参照) で "grep" やGNU id-utilsなどの
grepライクなプログラムと連携できる。
[Unix豆知識: Unixのコマンド "grep" の名前は ":g/re/p" に由来している。"re" は
Regular Expression (正規表現) を意味する。]
*:gr* *:grep*
:gr[ep][!] [arguments] ":make" と同じようにしかし 'makeprg' の代わりに
'grepprg' が、'errorformat' の代わりに 'grepformat' が
使われる。'grepprg' が "internal" の場合、|:vimgrep|と
同様に機能する。その場合、パターンが区切り文字で囲まれ
ていなければならないことに注意。
プログラム出力のエンコーディングが 'encoding' と異なる
場合には、'makeencoding' オプションでエンコーディング
を指定できる。
*:lgr* *:lgrep*
:lgr[ep][!] [arguments] ":grep" と同様だが、quickfixリストでなく、カレントウィ
ンドウのlocationリストが使われる。
*:grepa* *:grepadd*
:grepa[dd][!] [arguments]
":grep" と似ているが、新しいエラーリストを作らず、解釈
されたエラーが現在のリストに追加される。
例: >
:call setqflist([])
:bufdo grepadd! something %
< 1番目のコマンドは新しい空のエラーリストを作成する。2番
目のコマンドはバッファリスト内の各バッファに対し
"grepadd" を実行する。最初のエラーへジャンプするのを避
けるために ! を使っていることに注意。|:bufdo| でジャン
プすることはできない。
引数リスト内のファイルに対して実行し、マッチがないファ
イルでのエラーを回避する例: >
:silent argdo try
\ | grepadd! something %
\ | catch /E480:/
\ | endtry"
<
プログラム出力のエンコーディングが 'encoding' と異なる
場合には、'makeencoding' オプションでエンコーディング
を指定できる。
*:lgrepa* *:lgrepadd*
:lgrepa[dd][!] [arguments]
":grepadd" と同様だが、quickfixリストでなく、カレント
ウィンドウのlocationリストが使われる。
5.3 grepをセットアップする
標準的な "grep" プログラムがインストールされていれば :grep コマンドはデフォル
トのままで動くだろう。使い方は標準的なコマンドにとてもよく似ている: >
:grep foo *.c
これは拡張子.cの全てのファイルの中から部分文字列 "foo" を検索する。:grepへの引
数はそのまま "grep" プログラムに渡されるので、その "grep" がサポートするオプ
ションはなんでも使うことができる。
デフォルトでは :grep は grep を -n オプションつきで呼び出す (これはファイル名
と行番号を出力させる)。これは 'grepprg' オプションで変更できる。次のような場合
に 'grepprg' を変更する必要があるだろう:
a) "grep" 以外の名前のプログラムを使っているとき
b) grepをフルパスで呼ばなければならないとき
c) 他のオプションを自動的に渡したいとき (例: 大文字・小文字の無視)
"grep" が実行されると、Vimはその結果を 'grepformat' オプションに従って解釈す
る。このオプションは 'errorformat' オプションと同様に働くので詳細はそちらを参
照すること。あなたのgrepが標準的でない書式で出力したり、あるいは特別な書式を持
つ他のプログラムを使っている場合は 'grepformat' をデフォルト値から変更する必要
があるだろう。
結果が解釈されると、|quickfix|モードにおけるコンパイルエラーと同様に、Vim は
マッチした部分を含む最初のファイルを読み込み、対応した行へジャンプする。その後
は |:cnext|, |:clist| などのコマンドを使って他のマッチにジャンプすることができ
る。
5.4 id-utilsと共に:grepを使う
:grepをGNU id-utilsと共に使うにはこのようにする: >
:set grepprg=lid\ -Rgrep\ -s
:set grepformat=%f:%l:%m
そして >
:grep (regexp)
これで期待通りの動作をする。
(最初にmkidをするのを忘れていなければ)
5.5 :vimgrepや:grepを使ってソースコードをわたり歩く
Vimが保存するエラーリストのスタックを使うことによって、ファイルをわたり歩き、
関数とその関数が呼んでいる関数を探すことができる。例えば、read_file()関数に引
数を加えたいとする。次のようにコマンドを打てばよい: >
:vimgrep /\<read_file\>/ *.c
":cn" でマッチのリストを巡り、引数を加えることができる。またあるとき上位の関数
msg()から新しい引数を得て、それを変更しなければならないとする。ならばこうする
とよい: >
:vimgrep /\<msg\>/ *.c
msg()関数を変更しているときに、上位から引数を得なければならない関数をもう1個見
つけたとする。ならばその関数を見つけるのにまた ":vimgrep" を使えばよい。1つの
関数が終わったら、 >
:colder
とすれば1つ前に戻ることができる。
これはツリーをわたるのに似ている: ":vimgrep" が1レベル深く進むにつれて、分岐の
リストが1つ作られる。":colder" は1つ上に戻る。":vimgrep" と ":colder" を使って
ツリーに似た方法ですべての場所をわたることができる。これを一貫して行えば、
"todo" のリストを書き留めることなく、すべての場所に行くことができる。
=============================================================================
6. コンパイラを選ぶ *compiler-select*
*:comp* *:compiler* *E666*
:comp[iler][!] {name} コンパイラ{name}を使うときに機能するオプション
を設定する。"!" オプションがない場合は現在の
バッファに対して設定される。"!" がある場合はグ
ローバルオプションが設定される。
"file.foo" で ":compiler foo" とし、その後別の
バッファで ":compiler! bar" としたとき、Vimは
"file.foo" では "foo" を使い続ける。
{|+eval|機能なしでコンパイルされた場合には使用
できない}
"compiler" ディレクトリ内にあるVimプラグインによって、選択されたコンパイラを使
うためのオプションが設定される。`:compiler` はローカルオプションを設定し、
`:compiler!` はグローバルオプションを設定する。
*current_compiler*
Vimの古いバージョンをサポートするために、それらのプラグインは常に
"b:current_compiler" でなく "current_compiler" を使う。このコマンドが実際に行
うことは次の通り:
- 変数 "current_compiler" と "b:current_compiler" を削除する
- ユーザーコマンド "CompilerSet" を定義する。"!" がついた場合は ":set" を行い、
"!" が無い場合は ":setlocal" を実行する。
- ":runtime! compiler/{name}.vim" を実行する。このプラグインは "CompilerSet"
に伴うオプションを設定し、変数 "current_compiler" をそのコンパイラの名前に設
定すると期待される。
- ユーザーコマンド "CompilerSet" を削除する。
- "b:current_compiler" を "current_compiler" の値に設定する。
- "!" が無い場合は "current_compiler" の元の値を復元する。
コンパイラプラグインを書くためには|write-compiler-plugin|を参照せよ。
GCC *quickfix-gcc* *compiler-gcc*
GCC用に設定できる変数は1つある:
g:compiler_gcc_ignore_unmatched_lines
GCC用に定義されたどのパターンにもマッチしない
行を無視する。makeから起動されたコマンドの出力
のせいで誤検出 (false positive) が発生してしま
うときに有用である。
MANX AZTEC C *quickfix-manx* *compiler-manx*
Amiga上でManx's Aztec C compilerとともにVimを使うには次のようにする:
- 環境変数CCEDITを次のコマンドで設定する: >
mset "CCEDIT=vim -q"
- -qfオプションをつけてコンパイルする。もしコンパイラがエラーを見つけたらVimが
カーソルを最初のエラーの上に置いた状態で起動する。エラーメッセージは最後の行
に表示される。上で述べたコマンドを使って他のエラーへ移動することができる。エ
ラーを修正し、ファイルを保存できる。
- Vimを普通に終了するとコンパイラが同じファイルを再コンパイルする。:cqコマンド
で終了した場合はコンパイラは終了する。エラーを修正できないときや、まず他の
ファイルをコンパイルする必要があるときはそうするとよい。
AmigaにおけるQuickfixモードには他にも制限がある。コンパイラは最初の25個のエ
ラーしか出力しない (Manx'sのドキュメントにはそれ以上出力する方法が書かれていな
い)。それ以上のエラーを探したいのならば、幾つかのエラーを修正しエディタを抜け
る必要がある。再コンパイルの後残り25個のエラーが出てくる
Vimがコンパイラから起動された場合、:shやいくつかの:!コマンドは機能しない。Vim
がコンパイラと同じプロセスの中で動いているため、標準出力が利用できないからで
ある。
PERL *quickfix-perl* *compiler-perl*
Perl コンパイラプラグインはコンパイルはしないが、Perl 内部の構文チェック機能を
呼び出し、その出力を解析してエラーを quickfix モードで修正できるようにする。
チェックするファイルの中に "no warnings" または "$^W = 0" と書いてあっても関係
なく警告が表示される。これを無効にするには g:perl_compiler_force_warnings に 0
を代入する。例: >
let g:perl_compiler_force_warnings = 0
PYUNIT COMPILER *compiler-pyunit*
これは実際にはコンパイラではなく、Python言語用のユニットテストフレームワークで
ある。PYUNITはバージョン2.0からPython標準ディストリビューションに含まれるよう
になった。それより古いバージョンは
http://pyunit.sourceforge.net
で入手できる。
フレームワークの助けを借りてテストを走らせるとき、エラーがあればVimによって解
釈され、quickfixモードで表示される。
残念ながら、テストを走らせる標準的な方法はない。alltests.pyスクリプトがよく使
われると思われるが、それだけである。
よって、'makeprg' に対する実用的な値は
setlocal makeprg=./alltests.py " テストスイートを走らせる
setlocal makeprg=python\ %:S " 1つのテストケースを走らせる
となる。
次も参照。
http://vim.sourceforge.net/tip_view.php?tip_id=280.
TEX COMPILER *compiler-tex*
ディストリビューションに含まれているTeX用のコンパイラスクリプト
($VIMRUNTIME/compiler/tex.vim) は、可能ならmakeコマンドを使う。コンパイラがカ
レントディレクトリに "Makefile" または "makefile" というファイルを見つけたら、
*TeXファイルをmakeを使って処理しようとし、そのmakefile通りの動作をする。この場
合コンパイラは 'errorformat' を*TeX出力用にセットし、'makeprg' は触らずにその
ままにしておく。"Makefie" も "makefile" も見つからない場合はコンパイラはmakeを
使わない。makefileを無視するように指定することもできる。変数
b:tex_ignore_makefileかg:tex_ignore_makefileを設定すればよい (これらは存在する
かのみチェックされる)。
コンパイラがmakeを使わないことになったら、コンパイラは入力を処理するプログラム
を選択する。変数b:tex_flavorかg:tex_flavor (この順で探される) が存在すれば、そ
れが:makeコマンドのためのオプションを定義する。もし両方とも存在しなければ、既
定値 "latex" になる。例えば、AMS-TeXで書かれたmypaper.texから\inputされた
chapter2.texを編集中に >
:let b:tex_flavor = 'amstex'
:compiler tex
< [editing...] >
:make mypaper
処理するファイルの名前を引数に指定しなければならないことに注意 (\inputか
\includeされたファイルを編集中に正しいファイルを処理するため; %を引数なしに置
換するポータブルな方法もよい)。これはソースではなく、ターゲットを指定するとい
うmakeの意味論ではないが、拡張子 ".tex" を除いたファイル名を指定してもよい。そ
の場合、「filename.dviまたはfilename.pdfまたは filename.[コンパイラに応じた何
らかの結果の拡張子] をメイクしろ」ということを意味する。
Note: tex コマンドライン文法はMikTex (Srinath Avadhanulaによって提案された) と
teTeX (Artem Chuprinaによってチェックされた) の両方で使えるように設定されてい
る。|errorformat-LaTeX|からの提案は他のシェルやOSで動かせるようにするには複雑
すぎるし、他のTeXオプションを使うことも許さない。もしあなたのTeXが
"-interaction=nonstopmode" をサポートしていなければ、コマンドラインから
\nonstopmodeを表現する他の方法とともにその旨を報告してください。
=============================================================================
7. エラーフォーマット *error-file-format*
*errorformat* *E372* *E373* *E374*
*E375* *E376* *E377* *E378*
'errorformat' オプションは認識されるエラーフォーマットのリストを指定する。その
中からエラーメッセージにマッチした最初のフォーマットが使われる。複数のフォーマッ
トを指定して、数種類のメッセージに対応したり、複数のコンパイラに対応したりする
ことができる。|efm-entries|を参照。
'errorformat' の各要素は、scanfに似たフォーマットを記述する文字列である。はじ
めに、scanfがどのように働くか知る必要がある。Cコンパイラのドキュメントを読むこ
と。以下はVimが理解する%の項目である。他は無効になる。
'errorformat' 中の特別な文字はコンマとバックスラッシュである。それがどう扱われ
るかは|efm-entries|を参照。"%%" はリテラル "%" にマッチする。よってこれはバッ
クスラッシュでエスケープしない。
`:make`と `:grep` の出力のすべての NUL 文字は SOH (0x01) に置換されるので注
意。
Note: デフォルトでは大文字と小文字の違いは無視される。もし大文字・小文字の区別
をしたいなら "\C" をパターンに付け加える|/\C|。
Vimは任意の長さの行を読み取るが、最初の4095バイトのみが使用され、残りは無視さ
れる。要素の長さは1023バイトまでである。
基本要素
%f ファイル名 (文字列を検索)
%o モジュール名 (文字列を検索)
%l 行番号 (数字を検索)
%c 桁番号 (エラーの桁を表す数字 (<Tab>1個は1桁と数える))
%v 画面上の桁番号 (エラーの画面上の桁を表す番号 (<Tab>1個
はスクリーン上8桁と数える))
%t エラーの種類 (1文字を検索):
e - エラーメッセージ
w - 警告メッセージ
i - 情報メッセージ
n - ノートメッセージ
%n エラー番号 (数字を検索)
%m エラーメッセージ (文字列を検索)
%r その行の残り全部 %O/%P/%Q
%p ポインタ行 ('-', '.', ' ' またはタブの列を検索し、その
長さを桁番号とする)
%*{conv} scanfに割り当てられない変換
%% 1個のリテラル '%'
%s テキスト検索 (文字列を検索)
"%f" の変換は現在の 'isfname' の設定に依存する。"~/" はホームディレクトリ名に
展開され、環境変数も展開される。
変換 "%f" と "%m" はその文字列の終端を検出しなければならない。通常は、後に続く
文字と要素がマッチすれば、そこが終端になる。もし後に続く要素がなかったら、その
行の残りの部分がマッチする。"%f" の後に '%' かバックスラッシュが続いているな
ら、それは 'isfname' 文字の列を検索する。
MS-Windowsでは、"C:" で始まる部分は "%f" に含まれる。"%f:" と指定したときでも
そうなる。これはアルファベット1文字の名前のファイルは検出されないことを意味す
る。
"%p" の後には通常 "^" をつける。これは、以下のような出力によってエラーの桁を示
すコンパイラ用に使える: >
^
または >
---------^
これは複数行のエラーメッセージでも使える。実用的なサンプルとしては
|errorformat-javac|を参照。
"%s" はエラー行の位置を探すためのテキストを指定する。そのテキストは文字列リテ
ラルして使われる。検索テキストに正確にマッチするエラー行を探すために、"^" と
"$" がテキストに加えられる。また、テキストの先頭に "\V" が追加され、"very
nomagic" とされる。"%s" はエラー出力中の行番号がない行を探すために使うことがで
きる。シェルコマンド "grep" の出力のように。
パターンがある場合は行番号は使われない。
"%o" はQuickfix項目の中のモジュール名を指定する。もし指定があればそれがファイ
ル名の代わりにQuickfixエラーウィンドウの中で使われる。モジュール名は結果を表示
するためだけに使われ、ファイル名はそのファイルにジャンプするときに使われる。
ディレクトリを変更する
次の大文字の変換文字は、特別なフォーマット文字列のタイプを指定する。これらのう
ち高々1つをコンマ区切りのフォーマットパターンの先頭につけることができる。
"%f" によって読まれるファイル名の前に付け足す必要があるディレクトリ名を出力す
るコンパイラがある (例: GNU make)。以下のコードはそれらのディレクトリ名を解釈
するのに使われる。そのディレクトリ名は内部のディレクトリスタックに保存される。
*E379*
%D "enter directory" フォーマット文字列。これ以下の%fはそ
のディレクトリ名を検索する。
%X "leave directory" フォーマット文字列。これ以下の%fは
ディレクトリスタックの1つ前のディレクトリを検索する。
"enter directory" や "leave directory" フォーマットを定義する場合、"%D" や
"%X" は部分文字列の最初に置かれなけれならない。Vimはディレクトリ変更を追跡し相
対パスによって指定されたファイル名の前にカレントディレクトリ名を付け足す。
Tipsや制限など詳細は|quickfix-directory-stack|を参照。
複数行にわたるメッセージ *errorformat-multi-line*
複数行にわたるメッセージを解釈することも可能である。取りうるプリフィックスは:
%E 複数行エラーメッセージの開始
%W 複数行警告メッセージの開始
%I 複数行情報メッセージの開始
%N 複数行ノートメッセージの開始
%A 複数行メッセージの開始 (種類指定なし)
%> 現在と同じパターンで始まっている次行 |efm-%>|
%C 複数行メッセージの継続
%Z 複数行メッセージの終了
これらに対して '+' と '-' をつけることもできる。|efm-ignore|を参照。
パターンに "\n" を含めても、複数行のメッセージにはマッチしない。
例: コンパイラが次のフォーマットでエラーを出力したとする。
(行頭の行番号は実際の出力の一部ではない):
1 Error 275 ~
2 line 42 ~
3 column 3 ~
4 ' ' expected after '--' ~
適切なエラーフォーマット文字列はこのようになる: >
:set efm=%EError\ %n,%Cline\ %l,%Ccolumn\ %c,%Z%m
すると、このエラーに対し|:clist|が表示するエラーメッセージはこのようになる:
1:42 col 3 error 275: ' ' expected after '--'
別の例: 次のエラーメッセージを出力するPythonインタープリターを考える。
(行頭の行番号は実際の出力の一部ではない):
1 ==============================================================
2 FAIL: testGetTypeIdCachesResult (dbfacadeTest.DjsDBFacadeTest)
3 --------------------------------------------------------------
4 Traceback (most recent call last):
5 File "unittests/dbfacadeTest.py", line 89, in testFoo
6 self.assertEquals(34, dtid)
7 File "/usr/lib/python2.2/unittest.py", line 286, in
8 failUnlessEqual
9 raise self.failureException, \
10 AssertionError: 34 != 33
11
12 --------------------------------------------------------------
13 Ran 27 tests in 0.063s
このメッセージに関する情報だけを|:clist|で表示させたいところだろう。
このように:
5 unittests/dbfacadeTest.py:89: AssertionError: 34 != 33
そのためにはエラーフォーマット文字列を次のように定義する: >
:set efm=%C\ %.%#,%A\ \ File\ \"%f\"\\,\ line\ %l%.%#,%Z%[%^\ ]%\\@=%m
"%C" を "%A" の前に置いていることに注意: ' %.%#' (これは正規表現 ' .*' を意味
する) がスペースで始まるすべての行にマッチするので、それが7行目を以降を隠して
くれる。そうでないと7行目は別のエラーメッセージの始まりと解釈されてしまう。エ
ラーフォーマットは常に、リストの中から1つ1つ、最初のマッチが起こるまで試されて
いく。
*efm-%>*
要素 %> は 'errorformat' の最初の方に出てくるパターンを試すのを避けるために使
える。これはほとんど何にでもマッチするパターンに便利である。例えば、エラーが
このようなら:
Error in line 123 of foo.c: ~
unknown variable "i" ~
これは以下でマッチできる: >
:set efm=xxx,%E%>Error in line %l of %f:,%Z%m
ここで "xxx" には2番目の行にもマッチするパターンが入るとする。
重要: エラーフォーマットのどの部分が以前にマッチしたかは記憶されていない。すな
わち、エラーファイルの各行が毎回エラーフォーマットの各行に対してテストされる。
例えば、次のようになっているとする: >
setlocal efm=aa,bb,cc,dd,ee
ここでaa, bbなどはエラーフォーマット文字列とする。エラーファイルの各行がパター
ンaa,次にbb,次にcc…とテストされる。ccがエラーの1つ前の行にマッチしたからといっ
て、ddが現在行に対して最初にテストされるということにはならない。ccとddが複数行
エラーフォーマット文字列だったとしても、である。
ファイル名を分割する *errorformat-separate-filename*
1度現れたファイル名を複数のメッセージが参照する場合には、これらのプリフィック
スが有効である。
%O 1行ファイルメッセージ: マッチ部分を読み込む (それ以前
に記憶されていたものは消去される)
%P 1行ファイルメッセージ: ファイル%fをスタックにプッシュ
する。
%Q 1行ファイルメッセージ: スタックから最後のファイル名を
ポップする。
例: 次のエラーログファイルを出力するコンパイラがあるとする (行番号は実際の出力
ではない)
1 [a1.tt]
2 (1,17) error: ';' missing
3 (21,2) warning: variable 'z' not defined
4 (67,3) error: end of file found before string ended
5
6 [a2.tt]
7
8 [a3.tt]
9 NEW compiler v1.1
10 (2,2) warning: variable 'x' not defined
11 (67,3) warning: 's' already defined
このログファイルは[...]で囲まれたファイルに対し複数のメッセージを示している。
これは次のエラーフォーマットで適切に解釈できる: >
:set efm=%+P[%f],(%l\\,%c)%*[\ ]%t%*[^:]:\ %m,%-Q
|:clist|を呼ぶとこれらをファイル名とともに適切に表示してくれる:
2 a1.tt:1 col 17 error: ';' missing
3 a1.tt:21 col 2 warning: variable 'z' not defined
4 a1.tt:67 col 3 error: end of file found before string ended
8 a3.tt:2 col 2 warning: variable 'x' not defined
9 a3.tt:67 col 3 warning: 's' already defined
行全体にマッチする他のプリフィックスとは違い、%P, %Q, %Oは同一行の複数のパター
ンにマッチさせるのに使える。それゆえ、次のようにファイルがネストした場合を解釈
することもできる:
{"file1" {"file2" error1} error2 {"file3" error3 {"file4" error4 error5}}}
%Oはファイル名情報のプッシュ・ポップを含まない文字列を解釈する。発展例について
は|errorformat-LaTeX|を参照。
メッセージ全体を無視する・使う *efm-ignore*
'+', '-' は大文字の指定文字と組み合わせて使う。'%+A' や '%-G' のように指定文字
の前につける。
%- 複数行のマッチを含まない。
%+ エラー文字列%m中でマッチした行全体
プリフィックス%Gだけは '+' か '-' と組み合わせたときのみ意味を持つ。これはコン
パイラバージョンのような一般的な情報を含む行か、無視するべきヘッダーを読み込む。
%-G このメッセージを無視する
%+G 一般的なメッセージ
パターンマッチング
古いバージョンのVimとの下位互換性の為にscanf()と同じ "%*[]" という記法がサポー
トされている。しかし、フォーマット文字列にVimがサポートするほぼ全ての正規表現
を用いる事も可能である。正規表現言語のメタ文字は普通の文字列やファイル検索の一
部と重なってしまうから (従って内部的にはエスケープされる必要がある)、メタシン
ボルは '%' を付加して表記される必要がある:
%\ 単体の '\' という文字。これは ":set errorformat=" の定
義の中ではエスケープされて ("%\\") 書かれなければなら
ない。
%. 単体の '.' という文字。
%# 単体の '*' (!) という文字。
%^ 単体の '^' という文字。注意: これなしでも行頭にはマッ
チするので、これは特に便利ではない。
%$ 単体の '$' という文字。注意: これなしでも行末にはマッ
チするので、これは特に便利ではない。
%[ 単体の '[' という文字。文字の範囲[]のために使われる。
%~ 単体の '~' という文字。
表現の中でキャラクタクラスを使用する場合 (概要は|/\i|を参照)、数量子 "\+" を含む
語はscanf()の中に "%*" という記法で書くことができる。例: "%\\d%\\+" ("\d\+",
"どんな数字でも") は "%*\\d" と等価である。重要: \(...\)のグループ表現は、内部
変換に使うため予約されているからフォーマット指定内では使用することができない。
'errorformat' 内の複数の要素 *efm-entries*
複数のコンパイラからの出力を見つけることを可能にするために、コンマで区切って複
数のフォーマットパターンを 'errorformat' に設定することができるだろう (note:
コンマ直後の空白は無視される)。完全にマッチした最初のパターンが採択される。マッ
チするものが無い場合、最後にマッチした部分が使われるが、ファイルネームは除外さ
れエラーメッセージは全体のメッセージとして設定される。複数のコンパイラからの出
力メッセージにマッチしてしまうパターンがあった (しかし正確には一致しない) 時に
は、より制限されたもの {訳注: 他のメッセージにマッチし難いもの} の後に置く。パ
ターンの先頭にコンマを含めるにはバックスラッシュ (":set" コマンド中では2度タイ
プするべきだ) を添える。バックスラッシュを含めるためには2つ与える (つまり
":set" コマンドの中では4つタイプする)。また、":set" コマンド内のスペースの前に
はバックスラッシュを置く必要がある。
有効なマッチ *quickfix-valid*
もし 'errorformat' に完全には一致しない行が現れた場合、エラーメッセージ全体が
表示され、エントリは無効とされコマンド ":cn" や ":cp" 使用時にはスキップされる
(有効なエントリが全く無い場合で無い限り)。エラーメッセージの全てはコマンド
":cl!" で表示する事ができる。
エラーフォーマットがファイル名を含んでいないとVimは正しいファイルへジャンプ
することができない。手動でやる必要がある。
例
Aztec compilerのファイルの書式は:
ファイル名>行:列:エラータイプ:識別番号:メッセージ
ファイル名 エラーが見つかったファイルの名前
行 エラーが見つかった行の通し番号
列 エラーが見つかった場所の列数 (行先頭からの文字数)
タイプ エラーの種類、通常は一文字で 'E' か 'W'
識別番号 エラーの番号 (マニュアルの検索用)
メッセージ エラーの説明
これは 'errorformat' をこのように設定すればマッチできる:
%f>%l:%c:%t:%n:%m
単行エラーを出力するCコンパイラのための幾つかの例:
%f:%l:\ %t%*[^0123456789]%n:\ %m Manx/Aztec C エラーメッセージ
(scanf()は[0-9]を理解しない)
%f\ %l\ %t%*[^0-9]%n:\ %m SAS C用
\"%f\"\\,%*[^0-9]%l:\ %m generic C compilers用
%f:%l:\ %m GCC用
%f:%l:\ %m,%Dgmake[%*\\d]:\ Entering\ directory\ `%f',
%Dgmake[%*\\d]:\ Leaving\ directory\ `%f'
GCC with gmake用 (行を連結すること!)
%f(%l)\ :\ %*[^:]:\ %m old SCO C compiler (pre-OS5)
%f(%l)\ :\ %t%*[^0-9]%n:\ %m idem, エラーの種類と番号つき
%f:%l:\ %m,In\ file\ included\ from\ %f:%l:,\^I\^Ifrom\ %f:%l%m
いくつかの拡張つきGCC
複数行メッセージを扱うために拡張した例が次の所で与えられる。
|errorformat-Jikes|と|errorformat-LaTeX|を参照。
:setコマンドで使うときにはスペースとダブルクォートの前にバックスラッシュが必要
なことに注意。コンマの前には2つのバックスラッシュを置く。1つは:setコマンドのた
め、もう1つはコンマがエラーフォーマットの区切りと認識されるのを避けるためであ
る。
メッセージをフィルタリングする
もしコンパイラがフォーマットに合わないエラーメッセージを作成する場合、エラー
メッセージをこのフォーマットに変換するプログラムを書く方法もある。その時は
コマンド ":make" によって起動されるプログラムオプション 'makeprg' を変更するこ
とで指定できる。例: >
:set mp=make\ \\\|&\ error_filter
パイプ (|) の前のバックスラッシュはコマンドセパレータとして認識されないために
必要。コマンド "set" では空白の前にバックスラッシュが必要。
=============================================================================
8. ディレクトリスタック *quickfix-directory-stack*
Quickfixはmakeの出力を解釈し、使われたディレクトリ全てをスタックで保持する。
GNU-Makeではディレクトリに入ったり出たりすると常に絶対パスで表示されるので、
これはむしろシンプルである。これはmakefile中のcdコマンドか、起動パラメーター
"-C dir" (makefileの読みこみ前にディレクトリを変更) なのかには因らない。
GNU-Makeに強制的に処理の前後にワーキングディレクトリを表示されるためにスイッ
チ "-w" を使用するのは便利かもしれない。
GNU-makeを使用しない場合、正しいディレクトリを管理する事はもっと複雑になる。
例えばAIX-makeはワーキングディレクトリに関してなんの情報も表示しない。
よってmakefileに細工が必要となる。LessTifのmakefileには "Making {target} in
{dir}" と表示するコマンドがある。ここにはディレクトリを出る時の情報とその相対
パスが表示されないという重要な問題もある。
パスの関係とメッセージ "leave directory" が現れない問題のためにVimでは次の
アルゴリズムで対処している:
1) 与えられたディレクトリがカレントディレクトリの子か調べる。真ならばそれを
カレントディレクトリとする。
2) カレントディレクトリの子ディレクトリでなかった場合、上のディレクトリの子
ディレクトリか (つまり兄弟ディレクトリ) を調べる。
3) まだディレクトリが見つからない場合、これはVimのカレントディレクトリの子
ディレクトリだと仮定される。
付け加えて、全てのファイルについて認識されたディレクトリに実際に存在するのか調
べられる。もしもなければディレクトリスタックの中の全てのディレクトリ (サブディ
レクトリではない) について探す。これでも見つからなければVimのカレントディレク
トリにあるものと仮定される。
このアルゴリズムには制限がある。この例はmakeがディレクトリに入った時に
"Making all in dir" の形で情報を表示すると仮定している。
1) 次のようなディレクトリとファイルがあったとする
./dir1
./dir1/file1.c
./file1.c
カレントディレクトリの前にmakeが "./dir1" を処理し "./file1.c" にエラーがあ
るとVimは "./dir1/file.c" をロードしてしまう。
これはメッセージ "leave directory" があれば解決する事ができる。
2) 次のようなディレクトリとファイルがあったとする
./dir1
./dir1/dir2
./dir2
次のようになる:
Makeの出力 Vimが解釈するディレクトリ
------------------------ ----------------------------
Making all in dir1 ./dir1
Making all in dir2 ./dir1/dir2
Making all in dir2 ./dir1/dir2
これはメッセージ "enter directory" に絶対パスが記述されるか、メッセージ
"leave directory" が表示されれば解決される。
この問題を避けるため、ディレクトリの絶対パスとメッセージ "leave directory"
が表示されるようにすればよい。
Makefileの例:
Unix:
libs:
for dn in $(LIBDIRS); do \
(cd $$dn; echo "Entering dir '$$(pwd)'"; make); \
echo "Leaving dir"; \
done
上の出力を取り扱うために
%DEntering\ dir\ '%f',%XLeaving\ dir
を 'errorformat' につけ加える。
注意: Vimはメッセージ "leave directory" の中のディレクトリ名がカレント
ディレクトリかどうかはチェックしない。これが何故メッセージ "Leaveing dir" だけ
で良いかの理由だ。
=============================================================================
9. 具体的なエラーファイルフォーマット *errorformats*
*errorformat-Jikes*
IBM Researchによって公開されているJavaコンパイラJikes(TM)はシンプルなマルチラ
インエラーメッセージを出力する。
このメッセージにマッチする 'errorformat' の文字列を下に示す。これをユーザーの
|vimrc|に書くことでVimがデフォルトで認識するフォーマットを上書きする事が
できる。またデフォルトに追加インストールする方法は|:set+=|を参照。 >
:set efm=%A%f:%l:%c:%*\\d:%*\\d:,
\%C%*\\s%trror:%m,
\%+C%*[^:]%trror:%m,
\%C%*\\s%tarning:%m,
\%C%m
Jikes(TM)はオプション "+E" とともに起動されたときは1行エラーメッセージを出力す
る。これは次によってマッチできる。 >
:setl efm=%f:%l:%v:%*\\d:%*\\d:%*\\s%m
<
*errorformat-javac*
この 'errorformat' は、エラーの桁を示すのに "^" の行を出力するjavac用にうまく
動作すると報告されている: >
:setl efm=%A%f:%l:\ %m,%-Z%p^,%-C%.%#
または: >
:setl efm=%A%f:%l:\ %m,%+Z%p^,%+C%.%#,%-G%.%#
<
Michael F. Lambが考案した別の方法を以下に示す。これはUnix用で、最初にエラーを
フィルタリングする: >
:setl errorformat=%Z%f:%l:\ %m,%A%p^,%-G%*[^sl]%.%#
:setl makeprg=javac\ %:S\ 2>&1\ \\\|\ vim-javac-filter
以下の行を "vim-javac-filter" というファイルに書いて、PATHの通ったディレクトリ
(例えば~/bin) に置き、実行可能にしておく必要がある: >
#!/bin/sed -f
/\^$/s/\t/\ /g;/:[0-9]\+:/{h;d};/^[ \t]*\^/G;
{訳注: BSD sed では動作しないようです。GNU sed では動作します。}
このsedスクリプトを言葉で説明すると次のようになる:
- 1つのタブを1つのスペースに置換し、
- ファイル名・行番号・エラーメッセージを含む行をポインタ行 ("^" の行のこと) の
直後に移動する。これによって、エラーメッセージ行とポインタ行の間の使われない
テキストが無視され、vimの「複数行メッセージ」の記法にマッチするようになり、
また、それを「複数行メッセージの継続」として含めなくてもよいようになる。
*errorformat-ant*
ant (http://jakarta.apache.org/) 用には、各javacの出力行の前につく[javac]を受
け取るために、上のエラーフォーマットを修正しなければならない: >
:set efm=%A\ %#[javac]\ %f:%l:\ %m,%-Z\ %#[javac]\ %p^,%-C%.%#
javacやjikesとantをともに扱うためにこの 'errorformat' を調整することができる。
jikesを使っているなら、jikesの+Eコマンドラインスイッチを使うことをantに教えな
ければならない (このスイッチはjikesに1行エラーメッセージを生成させる)。これが
build.xmlファイルの2行目が行っていることである: >
<property name = "build.compiler" value = "jikes"/>
<property name = "build.compiler.emacs" value = "true"/>
javac、jikesと組み合わせたantを扱う 'errorformat' はこうである: >
:set efm=\ %#[javac]\ %#%f:%l:%c:%*\\d:%*\\d:\ %t%[%^:]%#:%m,
\%A\ %#[javac]\ %f:%l:\ %m,%-Z\ %#[javac]\ %p^,%-C%.%#
<
*errorformat-jade*
jade (http://www.jclark.com/ 参照) のエラーを解釈するのは簡単である: >
:set efm=jade:%f:%l:%c:%t:%m
<
*errorformat-LaTeX*
次のは複数行に渡ってエラーメッセージを表示する (La)TeX タイプセッティング
システム用の 'errorformat' 文字列を指定する一つの例である。":clist" や ":cc"
等々のコマンドは先行する空白を削除して複数行のものを一行にまとめて表示する。
以下のLaTeX用errorformatはマルチラインエラーを出力する他のコンパイラへ応用する
のは簡単だろう。
コマンドは|vimrc|ファイルか別のVim scriptファイルに書ける。例えばLaTeXに関連し
た内容を含むスクリプトをLaTeXソースの編集時にだけ読みこまれるようにする。
サンプルの全行をコピーしたことを確認する (順番もそのまま)。行の始まりに見るこ
とのできる '\' の表記は|line-continuation|を参照。
まず 'makeprg' をLaTeXが最初のエラーで止まることなく複数のエ
ラーを返すように準備する。 >
:set makeprg=latex\ \\\\nonstopmode\ \\\\input\\{$*}
<
マルチラインエラーメッセージの始まり: >
:set efm=%E!\ LaTeX\ %trror:\ %m,
\%E!\ %m,
< マルチライン警告メッセージの始まり;最初の2つは行番号も含んで
いる。幾つかの正規表現の意味:
- "%.%#" (".*") 文字列 (空文字列も含む) にマッチ
- "%*\\d" ("\d\+") 数字にマッチ >
\%+WLaTeX\ %.%#Warning:\ %.%#line\ %l%.%#,
\%+W%.%#\ at\ lines\ %l--%*\\d,
\%WLaTeX\ %.%#Warning:\ %m,
< エラー/警告メッセージが続く可能性;最初の一つは行番号も含んで
いる: >
\%Cl.%l\ %m,
\%+C\ \ %m.,
\%+C%.%#-%.%#,
\%+C%.%#[]%.%#,
\%+C[]%.%#,
\%+C%.%#%[{}\\]%.%#,
\%+C<%.%#>%.%#,
\%C\ \ %m,
< 次のパターンにマッチする行には重要な情報は含まれていない;
よってメッセージに含まないようにする。 >
\%-GSee\ the\ LaTeX%m,
\%-GType\ \ H\ <return>%m,
\%-G\ ...%.%#,
\%-G%.%#\ (C)\ %.%#,
\%-G(see\ the\ transcript%.%#),
< 通常、空白文字だけの行は表示しない: >
\%-G%*\\s,
< LaTeXの出力ログには個々のライン毎にエラーのあったファイル名が
特定(記述)されているわけではない;ログのあらゆる所で与えられ、
括弧にくくられている。
続くパターンはそれらの名前を取り出し内部スタックに保存しようと
試みる。パターンは時として一つの行を複数回走査 (一つ目を
見つけた後、同じ行に次のを発見しようと) するので、パターンの
末尾の "%r" が行の残りの部分が次の試行で解釈の対象になることと、
行の末尾に達するまでそれが繰り返されることを示す。
'('...')' でくくられたファイル名を読み飛ばす;明らかにエラーを
含まないファイルはスタックに積まない: >
\%+O(%f)%r,
< ファイル名をスタックに積む。名前は '(' の後に与えられる。 >
\%+P(%f%r,
\%+P\ %\\=(%f%r,
\%+P%*[^()](%f%r,
\%+P[%\\d%[^()]%#(%f%r,
< ')' が見つかったらファイル名をスタックから取り崩す。 >
\%+Q)%r,
\%+Q%*[^()])%r,
\%+Q[%\\d%*[^()])%r
幾つかのケースにおいてLaTeXの出力したログの中のファイル名を正確に取り出す事が
できないことに注意。括弧の対応が正しくつかない時パーサーは混乱してしまう。上記
のサンプルはもっとも一般的なケースだけ検出できるようにしてある。目的に合わせて
このサンプルを変える事はできる。例えば全てのいまいましい "Overfull ..." という
警告メッセージがエラーとして認識されてしまう事を防ぐ事ができる。
付け加えてLaTeXコンパイラの出力をフィルタリングするには、[La]TeXコンパイラに
よって生成されるファイル*.logを直接読むことも可能である。これは起こる可能性の
あるエラーについてより便利な情報を沢山含んでいる。しかしそのように複雑なファイ
ルを正確に解釈するには、外部フィルタを使うほうが良い。そのようなVimに識別され
るフィルタの作り方はずっと以前に述べたので参照。
*errorformat-Perl*
$VIMRUNTIME/tools にefm_perl.plスクリプトがある。これはPerlのエラーメッセージ
をフィルタし、quickfixモードが理解できるフォーマットに変換する。使い方はファイ
ルの先頭を参照。(このスクリプトはもう非推奨で、今は |compiler-perl| を参照のこ
と)
=============================================================================
10. Quickfix ウィンドウのカスタマイズ *quickfix-window-function*
quickfix ウィンドウおよび location リストウィンドウ内で各行を表示するデフォル
トのフォーマットは:
<filename>|<lnum> col <col>|<text>
各行で表示されるのは |getqflist()| 関数が返す、"bufnr", "lnum", "col", "text"
の各フィールドに相当する値。
いくつかの quickfix/location リストでは、表示テキストのカスタマイズが必要。例
えば、quickfixのエントリにファイル名のみが存在し、ファイル名の後にあるフィール
ド区切りの2つの "|" が不要な場合。あるいはファイル名をカスタマイズしてパスを表
示する場合。デフォルトでは、ファイルについてカレントディレクトリ以下ではなく、
完全なパス (長すぎるかもしれない) が表示される。ファイルのパスは共通の親ディレ
クトリについてシンプル化の必要があるかもしれない。
表示するテキストは 'quickfixtextfunc' オプションに Vim の関数を設定することで
カスタマイズできる。この関数は辞書を引数として呼ばれ、quickfix か location ウィ
ンドウで表示される文字列のリストを返さなくてはならない。引数の辞書は以下の
フィールドを持っている:
quickfix quickfix リストを呼ぶときは1が設定され、location リストのとき
は0が設定される。
winid location リストの時、そのウィンドウIDが設定される。quickfix
リストの時、0が設定される。getloclist() で location リストの項
目を取得するのに使える。
id quickfix か location リストの識別子
start_idx 返されたテキストの最初の項目のインデックス
end_idx 返されたテキストの最後の項目のインデックス
関数は項目の start_idx から end_idx について quickfix ウィンドウに表示する単一
の行の表示テキストを返さなくてはならない。関数は項目の情報を |getqflist()| 関
数から quickfix リストを識別する識別子 "id" で取れる。location リストの時は、
'winid' 引数付きで getloclist() 関数を使う。空のリストが返された場合、全エント
リの表示にデフォルトのフォーマットが使われる。返されたリストの項目が空文字列の
場合、そのエントリに一致する項目の表示にデフォルトのフォーマットが使われる。
もし quickfix か location リストで固有のカスタマイズが必要ならば、利用する
|setqflist()| か |setloclist()| 関数で 'quickfixtextfunc' 属性をリストに設定
できる。これは 'quickfixtextfunc' を上書きする。
下の例では履歴ファイル (|v:oldfiles|) を quickfix ウィンドウに表示している。そ
こでは各項目の行番号、カラム番号、関連するエラーのテキストはなく、
'quickfixtextfunc' の関数はファイル名だけを返している。
例: >
" quickfix リストを v:oldfiles から作る
call setqflist([], ' ', {'lines' : v:oldfiles, 'efm' : '%f',
\ 'quickfixtextfunc' : 'QfOldFiles'})
func QfOldFiles(info)
" quickfix の項目の対象範囲から情報を取得する
let items = getqflist({'id' : a:info.id, 'items' : 1}).items
let l = []
for idx in range(a:info.start_idx - 1, a:info.end_idx - 1)
" ファイル名をシンプルにして利用する
call add(l, fnamemodify(bufname(items[idx].bufnr), ':p:.'))
endfor
return l
endfunc
<
vim:tw=78:ts=8:noet:ft=help:norl:
| {
"pile_set_name": "Github"
} |
#!/bin/bash
FN="moe430b.db_3.2.3.tar.gz"
URLS=(
"https://bioconductor.org/packages/3.11/data/annotation/src/contrib/moe430b.db_3.2.3.tar.gz"
"https://bioarchive.galaxyproject.org/moe430b.db_3.2.3.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-moe430b.db/bioconductor-moe430b.db_3.2.3_src_all.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-moe430b.db/bioconductor-moe430b.db_3.2.3_src_all.tar.gz"
)
MD5="74027eafdb3815a8310e84ade3a43dff"
# Use a staging area in the conda dir rather than temp dirs, both to avoid
# permission issues as well as to have things downloaded in a predictable
# manner.
STAGING=$PREFIX/share/$PKG_NAME-$PKG_VERSION-$PKG_BUILDNUM
mkdir -p $STAGING
TARBALL=$STAGING/$FN
SUCCESS=0
for URL in ${URLS[@]}; do
curl $URL > $TARBALL
[[ $? == 0 ]] || continue
# Platform-specific md5sum checks.
if [[ $(uname -s) == "Linux" ]]; then
if md5sum -c <<<"$MD5 $TARBALL"; then
SUCCESS=1
break
fi
else if [[ $(uname -s) == "Darwin" ]]; then
if [[ $(md5 $TARBALL | cut -f4 -d " ") == "$MD5" ]]; then
SUCCESS=1
break
fi
fi
fi
done
if [[ $SUCCESS != 1 ]]; then
echo "ERROR: post-link.sh was unable to download any of the following URLs with the md5sum $MD5:"
printf '%s\n' "${URLS[@]}"
exit 1
fi
# Install and clean up
R CMD INSTALL --library=$PREFIX/lib/R/library $TARBALL
rm $TARBALL
rmdir $STAGING
| {
"pile_set_name": "Github"
} |
:103E000001C0B6C0112484B790E89093610010926D
:103E10006100882361F0982F9A70923041F081FF01
:103E200002C097EF94BF282E80E0C5D0E9C085E09E
:103E30008093810092E09093C00098E19093C1003C
:103E40008093C40086E08093C2008EE0B4D0209AB4
:103E500084E024E23DEF91E03093850020938400DC
:103E600096BBB09BFECF189AA8954091C00047FD25
:103E700002C0815089F793D0813479F490D0182F03
:103E8000A0D0123811F480E004C088E0113809F0A5
:103E900083E07ED080E17CD0EECF823419F484E1DF
:103EA00098D0F8CF853411F485E0FACF853541F408
:103EB00076D0C82F74D0D82FCC0FDD1F82D0EACF98
:103EC000863519F484E085D0DECF843691F567D04D
:103ED00066D0F82E64D0D82E00E011E058018FEFA4
:103EE000A81AB80A5CD0F80180838501FA10F6CFD1
:103EF00068D0F5E4DF1201C0FFCF50E040E063E09E
:103F0000CE0136D08E01E0E0F1E06F0182E0C80E14
:103F1000D11C4081518161E0C8012AD00E5F1F4F42
:103F2000F601FC10F2CF50E040E065E0CE0120D079
:103F3000B1CF843771F433D032D0F82E30D041D0A5
:103F40008E01F80185918F0123D0FA94F110F9CFF9
:103F5000A1CF853739F435D08EE11AD084E918D055
:103F60008AE097CF813509F0A9CF88E024D0A6CF89
:103F7000FC010A0167BFE895112407B600FCFDCFDC
:103F8000667029F0452B19F481E187BFE895089503
:103F90009091C00095FFFCCF8093C600089580915A
:103FA000C00087FFFCCF8091C00084FD01C0A895B0
:103FB0008091C6000895E0E6F0E098E19083808368
:103FC0000895EDDF803219F088E0F5DFFFCF84E15E
:103FD000DFCFCF93C82FE3DFC150E9F7CF91F1CF07
:023FFE000008B9
:0400000300003E00BB
:00000001FF
| {
"pile_set_name": "Github"
} |
#include <stdbool.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <mosquitto.h>
static int run = -1;
void on_connect(struct mosquitto *mosq, void *obj, int rc)
{
if(rc){
printf("Connect error: %d\n", rc);
exit(1);
}
}
int main(int argc, char *argv[])
{
int rc;
struct mosquitto *mosq;
int port = atoi(argv[1]);
mosquitto_lib_init();
mosq = mosquitto_new("publish-qos1-test", true, &run);
mosquitto_connect_callback_set(mosq, on_connect);
rc = mosquitto_connect(mosq, "localhost", port, 5);
while(run == -1){
rc = mosquitto_loop(mosq, 300, 1);
if(rc){
exit(0);
}
}
mosquitto_lib_cleanup();
return 0;
}
| {
"pile_set_name": "Github"
} |
from __future__ import print_function
import numpy as np
import numba.unittest_support as unittest
from numba.ctypes_support import *
from numba import _helperlib
class ArrayStruct3D(Structure):
# Mimick the structure defined in numba.targets.arrayobj's make_array()
_fields_ = [
("meminfo", c_void_p),
("parent", c_void_p),
("nitems", c_ssize_t),
("itemsize", c_ssize_t),
("data", c_void_p),
("shape", (c_ssize_t * 3)),
("strides", (c_ssize_t * 3)),
]
class TestArrayAdaptor(unittest.TestCase):
def test_array_adaptor(self):
arystruct = ArrayStruct3D()
adaptorptr = _helperlib.c_helpers['adapt_ndarray']
adaptor = PYFUNCTYPE(c_int, py_object, c_void_p)(adaptorptr)
ary = np.arange(60).reshape(2, 3, 10)
status = adaptor(ary, byref(arystruct))
self.assertEqual(status, 0)
self.assertEqual(arystruct.data, ary.ctypes.data)
self.assertNotEqual(arystruct.meminfo, 0)
self.assertEqual(arystruct.parent, id(ary))
self.assertEqual(arystruct.nitems, 60)
self.assertEqual(arystruct.itemsize, ary.itemsize)
for i in range(3):
self.assertEqual(arystruct.shape[i], ary.ctypes.shape[i])
self.assertEqual(arystruct.strides[i], ary.ctypes.strides[i])
if __name__ == '__main__':
unittest.main()
| {
"pile_set_name": "Github"
} |
<?php
declare(strict_types=1);
/*
* This file is part of the Sonata Project package.
*
* (c) Thomas Rabaix <[email protected]>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Sonata\BlockBundle\DependencyInjection\Compiler;
use Symfony\Component\DependencyInjection\Compiler\CompilerPassInterface;
use Symfony\Component\DependencyInjection\ContainerBuilder;
use Symfony\Component\DependencyInjection\Definition;
use Symfony\Component\DependencyInjection\Reference;
/**
* Link the block service to the Page Manager.
*
* @final since sonata-project/block-bundle 3.0
*
* @author Thomas Rabaix <[email protected]>
*/
class TweakCompilerPass implements CompilerPassInterface
{
public function process(ContainerBuilder $container)
{
$manager = $container->getDefinition('sonata.block.manager');
$registry = $container->getDefinition('sonata.block.menu.registry');
$blocks = $container->getParameter('sonata_block.blocks');
$blockTypes = $container->getParameter('sonata_blocks.block_types');
$cacheBlocks = $container->getParameter('sonata_block.cache_blocks');
$defaultContexs = $container->getParameter('sonata_blocks.default_contexts');
foreach ($container->findTaggedServiceIds('sonata.block') as $id => $tags) {
$definition = $container->getDefinition($id);
$definition->setPublic(true);
if (!$definition->isAutowired()) {
$this->replaceBlockName($container, $definition, $id);
}
$settings = $this->createBlockSettings($id, $tags, $defaultContexs);
// Register blocks dynamicaly
if (!\array_key_exists($id, $blocks)) {
$blocks[$id] = $settings;
}
if (!\in_array($id, $blockTypes, true)) {
$blockTypes[] = $id;
}
if (isset($cacheBlocks['by_type']) && !\array_key_exists($id, $cacheBlocks['by_type'])) {
$cacheBlocks['by_type'][$id] = $settings['cache'];
}
$manager->addMethodCall('add', [$id, $id, $settings['contexts']]);
}
foreach ($container->findTaggedServiceIds('knp_menu.menu') as $id => $tags) {
foreach ($tags as $attributes) {
if (empty($attributes['alias'])) {
throw new \InvalidArgumentException(sprintf('The alias is not defined in the "knp_menu.menu" tag for the service "%s"', $id));
}
$registry->addMethodCall('add', [$attributes['alias']]);
}
}
$services = [];
foreach ($container->findTaggedServiceIds('sonata.block.loader') as $id => $tags) {
$services[] = new Reference($id);
}
$container->setParameter('sonata_block.blocks', $blocks);
$container->setParameter('sonata_blocks.block_types', $blockTypes);
$container->setParameter('sonata_block.cache_blocks', $cacheBlocks);
$container->getDefinition('sonata.block.loader.service')->replaceArgument(0, $blockTypes);
$container->getDefinition('sonata.block.loader.chain')->replaceArgument(0, $services);
$this->applyContext($container);
}
/**
* Apply configurations to the context manager.
*/
public function applyContext(ContainerBuilder $container)
{
$definition = $container->findDefinition('sonata.block.context_manager');
foreach ($container->getParameter('sonata_block.blocks') as $service => $settings) {
if (\count($settings['settings']) > 0) {
$definition->addMethodCall('addSettingsByType', [$service, $settings['settings'], true]);
}
}
foreach ($container->getParameter('sonata_block.blocks_by_class') as $class => $settings) {
if (\count($settings['settings']) > 0) {
$definition->addMethodCall('addSettingsByClass', [$class, $settings['settings'], true]);
}
}
}
private function createBlockSettings(string $id, array $tags = [], array $defaultContexts = []): array
{
$contexts = $this->getContextFromTags($tags);
if (0 === \count($contexts)) {
$contexts = $defaultContexts;
}
return [
'contexts' => $contexts,
'templates' => [],
'cache' => 'sonata.cache.noop',
'settings' => [],
];
}
/**
* Replaces the empty service name with the service id.
*/
private function replaceBlockName(ContainerBuilder $container, Definition $definition, $id)
{
$arguments = $definition->getArguments();
// Replace empty block id with service id
if ($this->serviceDefinitionNeedsFirstArgument($definition)) {
// NEXT_MAJOR: Remove the if block when Symfony 2.8 support will be dropped.
if (method_exists($definition, 'setArgument')) {
$definition->setArgument(0, $id);
return;
}
$definition->replaceArgument(0, $id);
}
}
private function serviceDefinitionNeedsFirstArgument(Definition $definition): bool
{
$arguments = $definition->getArguments();
return empty($arguments) ||
null === ($arguments[0]) ||
\is_string($arguments[0]) && 0 === \strlen($arguments[0]);
}
/**
* @param string[][]
*
* @return string[]
*/
private function getContextFromTags(array $tags)
{
return array_filter(array_map(static function (array $attribute) {
if (\array_key_exists('context', $attribute) && \is_string($attribute['context'])) {
return $attribute['context'];
}
return null;
}, $tags));
}
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="15.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
<PropertyGroup>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
<ProjectGuid>{A943CE44-82B8-4000-A0A0-A86FE0051240}</ProjectGuid>
<OutputType>Exe</OutputType>
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>ThoughtWorks.CruiseControl.CCCmd</RootNamespace>
<AssemblyName>CCCmd</AssemblyName>
<TargetFrameworkVersion>v4.5</TargetFrameworkVersion>
<FileAlignment>512</FileAlignment>
<Deterministic>true</Deterministic>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<PlatformTarget>AnyCPU</PlatformTarget>
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>DEBUG;TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<NoWarn>1591</NoWarn>
<CodeAnalysisRuleSet>AllRules.ruleset</CodeAnalysisRuleSet>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Build|AnyCPU' ">
<PlatformTarget>AnyCPU</PlatformTarget>
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>true</Optimize>
<OutputPath>..\..\Build\CCCmd\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<NoWarn>1591</NoWarn>
<CodeAnalysisRuleSet>AllRules.ruleset</CodeAnalysisRuleSet>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<PlatformTarget>AnyCPU</PlatformTarget>
<DebugType>pdbonly</DebugType>
<Optimize>true</Optimize>
<OutputPath>..\..\Build\CCCmd\</OutputPath>
<DefineConstants>TRACE</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<NoWarn>1591</NoWarn>
<CodeAnalysisRuleSet>AllRules.ruleset</CodeAnalysisRuleSet>
</PropertyGroup>
<ItemGroup>
<Reference Include="System" />
<Reference Include="System.Core" />
<Reference Include="System.Data" />
<Reference Include="System.Xml" />
</ItemGroup>
<ItemGroup>
<Compile Include="CommandType.cs" />
<Compile Include="Program.cs" />
<Compile Include="Properties\AssemblyInfo.cs" />
</ItemGroup>
<ItemGroup>
<EmbeddedResource Include="Help.txt" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Remote\Remote.csproj">
<Project>{E820CF3B-8C5A-4002-BC16-B7818D3D54A8}</Project>
<Name>Remote</Name>
</ProjectReference>
</ItemGroup>
<ItemGroup>
<None Include="app.config" />
</ItemGroup>
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
<Target Name="BeforeBuild">
</Target>
<Target Name="AfterBuild">
</Target>
-->
</Project> | {
"pile_set_name": "Github"
} |
package com.grace.zhihunews.network.entity.video;
/**
* Created by Administrator on 2016/9/10.
*/
public class Category {
private int id;
private String name;
private Object alias;
private String bgPicture;
private String bgColor;
public int getId() {
return id;
}
public String getName() {
return name;
}
public Object getAlias() {
return alias;
}
public String getBgPicture() {
return bgPicture;
}
public String getBgColor() {
return bgColor;
}
public void setId(int id) {
this.id = id;
}
public void setName(String name) {
this.name = name;
}
public void setAlias(Object alias) {
this.alias = alias;
}
public void setBgPicture(String bgPicture) {
this.bgPicture = bgPicture;
}
public void setBgColor(String bgColor) {
this.bgColor = bgColor;
}
}
| {
"pile_set_name": "Github"
} |
<html><head>
<title>Substring matching attribute value selector on end with declared namespace</title>
<style type="text/css">@namespace a url(http://www.example.org/a);
@namespace b url(http://www.example.org/b);
@namespace html url(http://www.w3.org/1999/xhtml);
*|p, *|q, *|r, *|s { display : block ; margin-bottom : 1em }
*|p, *|r { background-color : lime ! important }
*|*[a|title$="tait"], *|*[html|title$="tait"] { background-color : red }
</style>
<link rel="author" title="Daniel Glazman" href="http://glazman.org/">
<link rel="author" title="Ian Hickson" href="mailto:[email protected]">
<link rel="help" href="https://www.w3.org/TR/css3-selectors/#selectors"> <!-- bogus link to make sure it gets found -->
<meta name="flags" content=" namespace">
</head>
<body>
<p title="si on chantait">This paragraph should have a green background</p>
<q xmlns="http://www.example.org/a" xmlns:a="http://www.example.org/a" a:title="si nous chantions">This paragraph should be unstyled.</q>
<r xmlns="http://www.example.org/a" xmlns:a="http://www.example.org/a" a:title="si on chantait">This paragraph should have a green background.</r>
<s xmlns="http://www.example.org/b" xmlns:b="http://www.example.org/b" b:title="si on chantait">This paragraph should be unstyled.</s>
</body></html> | {
"pile_set_name": "Github"
} |
#################################
# Wrapper for rpl_insert_id.test#
#################################
-- source include/not_ndb_default.inc
-- source include/have_tokudb.inc
-- source include/not_group_replication_plugin.inc
let $engine_type=tokudb;
-- source extra/rpl_tests/rpl_insert_id.test
| {
"pile_set_name": "Github"
} |
import { Vector2 } from '../../../../src/Three';
import { NodeFrame } from '../core/NodeFrame';
import { Vector2Node } from '../inputs/Vector2Node';
export class ResolutionNode extends Vector2Node {
constructor();
size: Vector2;
nodeType: string;
updateFrame( frame: NodeFrame ): void;
copy( source: ResolutionNode ): this;
}
| {
"pile_set_name": "Github"
} |
rem create reference
set MSCORE=..\msvc.install_x64\bin\musescore.exe
set DPI=130
%MSCORE% %1.mscz -r %DPI% -o %1.png
del %1-ref.png 2>nul
rename %1-1.png %1-ref.png
| {
"pile_set_name": "Github"
} |
module A3 where
import Control.Parallel.Strategies (rpar, runEval)
f = n1_2 + n2 + 1
where
n1 = fib 23
n2 = fib 24
fib n | n <= 1 = 1
| otherwise = fib (n-1) + fib (n-2) + 1
n1_2
=
runEval
(do n1_2 <- rpar n1
return n1_2) | {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2011 Red Hat, Inc.
*
* This file is released under the GPL.
*/
#ifndef _LINUX_DM_TRANSACTION_MANAGER_H
#define _LINUX_DM_TRANSACTION_MANAGER_H
#include "dm-block-manager.h"
struct dm_transaction_manager;
struct dm_space_map;
/*----------------------------------------------------------------*/
/*
* This manages the scope of a transaction. It also enforces immutability
* of the on-disk data structures by limiting access to writeable blocks.
*
* Clients should not fiddle with the block manager directly.
*/
void dm_tm_destroy(struct dm_transaction_manager *tm);
/*
* The non-blocking version of a transaction manager is intended for use in
* fast path code that needs to do lookups e.g. a dm mapping function.
* You create the non-blocking variant from a normal tm. The interface is
* the same, except that most functions will just return -EWOULDBLOCK.
* Methods that return void yet may block should not be called on a clone
* viz. dm_tm_inc, dm_tm_dec. Call dm_tm_destroy() as you would with a normal
* tm when you've finished with it. You may not destroy the original prior
* to clones.
*/
struct dm_transaction_manager *dm_tm_create_non_blocking_clone(struct dm_transaction_manager *real);
/*
* We use a 2-phase commit here.
*
* i) Make all changes for the transaction *except* for the superblock.
* Then call dm_tm_pre_commit() to flush them to disk.
*
* ii) Lock your superblock. Update. Then call dm_tm_commit() which will
* unlock the superblock and flush it. No other blocks should be updated
* during this period. Care should be taken to never unlock a partially
* updated superblock; perform any operations that could fail *before* you
* take the superblock lock.
*/
int dm_tm_pre_commit(struct dm_transaction_manager *tm);
int dm_tm_commit(struct dm_transaction_manager *tm, struct dm_block *superblock);
/*
* These methods are the only way to get hold of a writeable block.
*/
/*
* dm_tm_new_block() is pretty self-explanatory. Make sure you do actually
* write to the whole of @data before you unlock, otherwise you could get
* a data leak. (The other option is for tm_new_block() to zero new blocks
* before handing them out, which will be redundant in most, if not all,
* cases).
* Zeroes the new block and returns with write lock held.
*/
int dm_tm_new_block(struct dm_transaction_manager *tm,
struct dm_block_validator *v,
struct dm_block **result);
/*
* dm_tm_shadow_block() allocates a new block and copies the data from @orig
* to it. It then decrements the reference count on original block. Use
* this to update the contents of a block in a data structure, don't
* confuse this with a clone - you shouldn't access the orig block after
* this operation. Because the tm knows the scope of the transaction it
* can optimise requests for a shadow of a shadow to a no-op. Don't forget
* to unlock when you've finished with the shadow.
*
* The @inc_children flag is used to tell the caller whether it needs to
* adjust reference counts for children. (Data in the block may refer to
* other blocks.)
*
* Shadowing implicitly drops a reference on @orig so you must not have
* it locked when you call this.
*/
int dm_tm_shadow_block(struct dm_transaction_manager *tm, dm_block_t orig,
struct dm_block_validator *v,
struct dm_block **result, int *inc_children);
/*
* Read access. You can lock any block you want. If there's a write lock
* on it outstanding then it'll block.
*/
int dm_tm_read_lock(struct dm_transaction_manager *tm, dm_block_t b,
struct dm_block_validator *v,
struct dm_block **result);
int dm_tm_unlock(struct dm_transaction_manager *tm, struct dm_block *b);
/*
* Functions for altering the reference count of a block directly.
*/
void dm_tm_inc(struct dm_transaction_manager *tm, dm_block_t b);
void dm_tm_dec(struct dm_transaction_manager *tm, dm_block_t b);
int dm_tm_ref(struct dm_transaction_manager *tm, dm_block_t b,
uint32_t *result);
struct dm_block_manager *dm_tm_get_bm(struct dm_transaction_manager *tm);
/*
* If you're using a non-blocking clone the tm will build up a list of
* requested blocks that weren't in core. This call will request those
* blocks to be prefetched.
*/
void dm_tm_issue_prefetches(struct dm_transaction_manager *tm);
/*
* A little utility that ties the knot by producing a transaction manager
* that has a space map managed by the transaction manager...
*
* Returns a tm that has an open transaction to write the new disk sm.
* Caller should store the new sm root and commit.
*
* The superblock location is passed so the metadata space map knows it
* shouldn't be used.
*/
int dm_tm_create_with_sm(struct dm_block_manager *bm, dm_block_t sb_location,
struct dm_transaction_manager **tm,
struct dm_space_map **sm);
int dm_tm_open_with_sm(struct dm_block_manager *bm, dm_block_t sb_location,
void *sm_root, size_t root_len,
struct dm_transaction_manager **tm,
struct dm_space_map **sm);
#endif /* _LINUX_DM_TRANSACTION_MANAGER_H */
| {
"pile_set_name": "Github"
} |
package com.snowalker;
import org.apache.catalina.connector.Connector;
import org.apache.coyote.http11.Http11NioProtocol;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
import org.springframework.boot.context.embedded.tomcat.TomcatConnectorCustomizer;
import org.springframework.scheduling.annotation.EnableScheduling;
@EnableScheduling
@SpringBootApplication(exclude={DataSourceAutoConfiguration.class})
public class AlipayGatewayServerBootstrap {
private static final Logger LOGGER = LoggerFactory.getLogger(AlipayGatewayServerBootstrap.class);
public static void main(String[] args) {
SpringApplication.run(AlipayGatewayServerBootstrap.class, args);
LOGGER.info("redis-distributed-lock-demo-spring启动完成......");
}
/**
* @author snowalker
* @date 2017-3-17
* @describe 优化tomcat线程数目
*/
class MyTomcatConnectorCustomizer implements TomcatConnectorCustomizer {
@Override
public void customize(Connector connector) {
Http11NioProtocol protocol = (Http11NioProtocol) connector
.getProtocolHandler();
// 设置最大连接数
protocol.setMaxConnections(2000);
// 设置最大线程数
protocol.setMaxThreads(2000);
protocol.setConnectionTimeout(30000);
}
}
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2004-2007 The Trustees of Indiana University and Indiana
* University Research and Technology
* Corporation. All rights reserved.
* Copyright (c) 2004-2020 The University of Tennessee and The University
* of Tennessee Research Foundation. All rights
* reserved.
* Copyright (c) 2004-2008 High Performance Computing Center Stuttgart,
* University of Stuttgart. All rights reserved.
* Copyright (c) 2004-2005 The Regents of the University of California.
* All rights reserved.
* Copyright (c) 2009-2014 Cisco Systems, Inc. All rights reserved.
* Copyright (c) 2015 Research Organization for Information Science
* and Technology (RIST). All rights reserved.
* $COPYRIGHT$
*
* Additional copyrights may follow
*
* $HEADER$
*/
#include "ompi_config.h"
#include <stdio.h>
#include "ompi/mpi/c/bindings.h"
#include "ompi/runtime/params.h"
#include "ompi/communicator/communicator.h"
#include "ompi/errhandler/errhandler.h"
#include "ompi/op/op.h"
#if OMPI_BUILD_MPI_PROFILING
#if OPAL_HAVE_WEAK_SYMBOLS
#pragma weak MPI_Op_commutative = PMPI_Op_commutative
#endif
#define MPI_Op_commutative PMPI_Op_commutative
#endif
static const char FUNC_NAME[] = "MPI_Op_commutative";
int MPI_Op_commutative(MPI_Op op, int *commute)
{
OPAL_CR_NOOP_PROGRESS();
/* Error checking */
if (MPI_PARAM_CHECK) {
OMPI_ERR_INIT_FINALIZE(FUNC_NAME);
if (NULL == op || MPI_OP_NULL == op) {
return OMPI_ERRHANDLER_NOHANDLE_INVOKE(MPI_ERR_OP,
FUNC_NAME);
}
if (NULL == commute) {
return OMPI_ERRHANDLER_NOHANDLE_INVOKE(MPI_ERR_ARG,
FUNC_NAME);
}
}
/* We have a valid op, get the flag */
*commute = ompi_op_is_commute(op);
/* All done */
return MPI_SUCCESS;
}
| {
"pile_set_name": "Github"
} |
package com.jcandksolutions.gradle.androidunittest
import com.android.build.gradle.api.ApplicationVariant
import com.android.build.gradle.api.BaseVariant
import org.junit.Before
import org.junit.Test
import static org.fest.assertions.api.Assertions.assertThat
import static org.mockito.Mockito.mock
public class AppHandlerTest {
private AppHandler mTarget
private MockProvider mProvider
private VariantWrapper mWrapper
@Before
public void setUp() {
mProvider = new MockProvider()
mWrapper = mProvider.provideAppVariantWrapper(null)
mTarget = new AppHandler(mProvider)
}
@Test
public void testIsVariantInvalid() {
BaseVariant variant = mock(BaseVariant.class)
assertThat(mTarget.isVariantInvalid(variant)).isFalse()
}
@Test
public void testCreateVariantWrapper() {
ApplicationVariant variant = mock(ApplicationVariant.class)
VariantWrapper wrapper = mTarget.createVariantWrapper(variant)
assertThat(wrapper).isEqualTo(mWrapper)
}
}
| {
"pile_set_name": "Github"
} |
#include "libm.h"
#if LDBL_MANT_DIG == 53 && LDBL_MAX_EXP == 1024
long double complex cacoshl(long double complex z)
{
return cacosh(z);
}
#else
long double complex cacoshl(long double complex z)
{
z = cacosl(z);
return CMPLXL(-cimagl(z), creall(z));
}
#endif
| {
"pile_set_name": "Github"
} |
#图层几何学
在第二章里面,我们介绍了图层背后的图片,和一些控制图层坐标和旋转的属性。在这一章中,我们将要看一看图层内部是如何根据父图层和兄弟图层来控制位置和尺寸的。另外我们也会涉及如何管理图层的几何结构,以及它是如何被自动调整和自动布局影响的。
##布局
`UIView `有三个比较重要的布局属性:`frame`,`bounds`和`center`,`CALayer`对应地叫做`frame`,`bounds`和`position`。为了能清楚区分,图层用了`position`,视图用了`center`,但是他们都代表同样的值。
`frame`代表了图层的外部坐标(也就是在父图层上占据的空间),`bounds`是内部坐标({0,0}通常是左上角)。`center`和`position`都代表了相对于父图层`anchorPoint`所在的位置。`anchorPoint`的属性将会在后续介绍到,现在把它想成图层的中心点就好了。

视图的`frame`,`bounds`和`center`属性仅仅是存取方法,当操纵视图的`frame`,实际上是在改变位于视图下方`CALayer`的`frame`,不能够独立于图层之外改变视图的`frame`。
记住当对图层做变换的时候,比如旋转或者缩放,frame实际上代表了覆盖在图层旋转之后的整个轴对齐的矩形区域,也就是说frame的宽高可能和bounds的宽高不再一致了。

##锚点
之前提到过,视图的center属性和图层的position属性都指定了anchorPoint相对于父图层的位置。**图层的anchorPoint通过position来控制它的frame的位置,你可以认为anchorPoint是用来移动图层的把柄。**
默认来说,anchorPoint位于图层的中点,所以图层的将会以这个点为中心放置。anchorPoint属性并没有被UIView接口暴露出来,这也是视图的position属性被叫做“center”的原因。但是图层的anchorPoint可以被移动,比如你可以把它置于图层frame的左上角,于是图层的内容将会向右下角的position方向移动(图3.3),而不是居中了。

和第二章提到的contentsRect和contentsCenter属性类似,anchorPoint用单位坐标来描述,也就是图层的相对坐标,图层左上角是{0, 0},右下角是{1, 1},因此默认坐标是{0.5, 0.5}。anchorPoint可以通过指定x和y值小于0或者大于1,使它放置在图层范围之外。
那在什么场合需要改变anchorPoint呢?既然我们可以随意改变图层位置,那改变anchorPoint不会造成困惑么?为了举例说明,我们来举一个实用的例子,创建一个模拟闹钟的项目。
钟面和钟表由四张图片组成(图3.4),为了简单说明,我们还是用传统的方式来装载和加载图片,使用四个UIImageView实例(当然你也可以用正常的视图,设置他们图层的contents图片)。
```
@interface ViewController ()
@property (nonatomic, weak) IBOutlet UIImageView *hourHand;
@property (nonatomic, weak) IBOutlet UIImageView *minuteHand;
@property (nonatomic, weak) IBOutlet UIImageView *secondHand;
@property (nonatomic, weak) NSTimer *timer;
@end
@implementation ViewController
- (void)viewDidLoad
{
[super viewDidLoad];
//start timer
self.timer = [NSTimer scheduledTimerWithTimeInterval:1.0 target:self selector:@selector(tick) userInfo:nil repeats:YES];

//set initial hand positions
[self tick];
}
- (void)tick
{
//convert time to hours, minutes and seconds
NSCalendar *calendar = [[NSCalendar alloc] initWithCalendarIdentifier:NSGregorianCalendar];
NSUInteger units = NSHourCalendarUnit | NSMinuteCalendarUnit | NSSecondCalendarUnit;
NSDateComponents *components = [calendar components:units fromDate:[NSDate date]];
CGFloat hoursAngle = (components.hour / 12.0) * M_PI * 2.0;
//calculate hour hand angle //calculate minute hand angle
CGFloat minsAngle = (components.minute / 60.0) * M_PI * 2.0;
//calculate second hand angle
CGFloat secsAngle = (components.second / 60.0) * M_PI * 2.0;
//rotate hands
self.hourHand.transform = CGAffineTransformMakeRotation(hoursAngle);
self.minuteHand.transform = CGAffineTransformMakeRotation(minsAngle);
self.secondHand.transform = CGAffineTransformMakeRotation(secsAngle);
}
@end
```

```
- (void)viewDidLoad
{
[super viewDidLoad];
// adjust anchor points
self.secondHand.layer.anchorPoint = CGPointMake(0.5f, 0.9f);
self.minuteHand.layer.anchorPoint = CGPointMake(0.5f, 0.9f);
self.hourHand.layer.anchorPoint = CGPointMake(0.5f, 0.9f);
// start timer
}
```

##坐标系
和视图一样,图层在图层树当中也是相对于父图层按层级关系放置,一个图层的position依赖于它父图层的bounds,如果父图层发生了移动,它的所有子图层也会跟着移动。
这样对于放置图层会更加方便,因为你可以通过移动根图层来将它的子图层作为一个整体来移动,但是有时候你需要知道一个图层的绝对位置,或者是相对于另一个图层的位置,而不是它当前父图层的位置。
CALayer给不同坐标系之间的图层转换提供了一些工具类方法:
```
- (CGPoint)convertPoint:(CGPoint)point fromLayer:(CALayer *)layer;
- (CGPoint)convertPoint:(CGPoint)point toLayer:(CALayer *)layer;
- (CGRect)convertRect:(CGRect)rect fromLayer:(CALayer *)layer;
- (CGRect)convertRect:(CGRect)rect toLayer:(CALayer *)layer;
```
这些方法可以把定义在一个图层坐标系下的点或者矩形转换成另一个图层坐标系下的点或者矩形。
###翻转的几何结构
常规说来,在iOS上,一个图层的position位于父图层的左上角,但是在Mac OS上,通常是位于左下角。Core Animation可以通过geometryFlipped属性来适配这两种情况,它决定了一个图层的坐标是否相对于父图层垂直翻转,是一个BOOL类型。在iOS上通过设置它为YES意味着它的子图层将会被垂直翻转,也就是将会沿着底部排版而不是通常的顶部(它的所有子图层也同理,除非把它们的geometryFlipped属性也设为YES)。
###Z坐标轴
*和UIView严格的二维坐标系不同,CALayer存在于一个三维空间当中。*除了我们已经讨论过的position和anchorPoint属性之外,CALayer还有另外两个属性,zPosition和anchorPointZ,二者都是在Z轴上描述图层位置的浮点类型。
注意这里并没有更深的属性来描述由宽和高做成的bounds了,图层是一个完全扁平的对象,你可以把它们想象成类似于一页二维的坚硬的纸片,用胶水粘成一个空洞,就像三维结构的折纸一样。
*zPosition*属性在大多数情况下其实并不常用。在第五章,我们将会涉及CATransform3D,你会知道如何在三维空间移动和旋转图层,除了做变换之外,zPosition最实用的功能就是改变图层的显示顺序了。

```
@interface ViewController ()
@property (nonatomic, weak) IBOutlet UIView *greenView;
@property (nonatomic, weak) IBOutlet UIView *redView;
@end
@implementation ViewController
- (void)viewDidLoad
{
[super viewDidLoad];

//move the green view zPosition nearer to the camera
self.greenView.layer.zPosition = 1.0f;
}
@end
```
##Hit Testing
第一章“图层树”证实了最好使用图层相关视图,而不是创建独立的图层关系。其中一个原因就是要处理额外复杂的触摸事件。
`CALayer`并不关心任何响应链事件,所以不能直接处理触摸事件或者手势。但是它有一系列的方法帮你处理事件:`-containsPoint:`和`-hitTest:`。
`-containsPoint:`接受一个在本图层坐标系下的CGPoint,如果这个点在图层frame范围内就返回YES。如清单3.4所示第一章的项目的另一个合适的版本,也就是使用-containsPoint:方法来判断到底是白色还是蓝色的图层被触摸了。这需要把触摸坐标转换成每个图层坐标系下的坐标,结果很不方便。
```
@interface ViewController ()
@property (nonatomic, weak) IBOutlet UIView *layerView;
@property (nonatomic, weak) CALayer *blueLayer;
@end
@implementation ViewController
- (void)viewDidLoad
{
[super viewDidLoad];
//create sublayer
self.blueLayer = [CALayer layer];
self.blueLayer.frame = CGRectMake(50.0f, 50.0f, 100.0f, 100.0f);
self.blueLayer.backgroundColor = [UIColor blueColor].CGColor;
//add it to our view
[self.layerView.layer addSublayer:self.blueLayer];
}
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
//get touch position relative to main view
CGPoint point = [[touches anyObject] locationInView:self.view];
//convert point to the white layer's coordinates
point = [self.layerView.layer convertPoint:point fromLayer:self.view.layer];
//get layer using containsPoint:
if ([self.layerView.layer containsPoint:point]) {
//convert point to blueLayer’s coordinates
point = [self.blueLayer convertPoint:point fromLayer:self.layerView.layer];
if ([self.blueLayer containsPoint:point]) {
[[[UIAlertView alloc] initWithTitle:@"Inside Blue Layer"
message:nil
delegate:nil
cancelButtonTitle:@"OK"
otherButtonTitles:nil] show];
} else {
[[[UIAlertView alloc] initWithTitle:@"Inside White Layer"
message:nil
delegate:nil
cancelButtonTitle:@"OK"
otherButtonTitles:nil] show];
}
}
}
@end
```
`-hitTest:`方法同样接受一个CGPoint类型参数,而不是BOOL类型,它返回图层本身,或者包含这个坐标点的叶子节点图层。这意味着不再需要像使用-containsPoint:那样,人工地在每个子图层变换或者测试点击的坐标。如果这个点在最外面图层的范围之外,则返回nil。
```
- (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event
{
//get touch position
CGPoint point = [[touches anyObject] locationInView:self.view];
//get touched layer
CALayer *layer = [self.layerView.layer hitTest:point];
//get layer using hitTest
if (layer == self.blueLayer) {
[[[UIAlertView alloc] initWithTitle:@"Inside Blue Layer"
message:nil
delegate:nil
cancelButtonTitle:@"OK"
otherButtonTitles:nil] show];
} else if (layer == self.layerView.layer) {
[[[UIAlertView alloc] initWithTitle:@"Inside White Layer"
message:nil
delegate:nil
cancelButtonTitle:@"OK"
otherButtonTitles:nil] show];
}
}
```
注意当调用图层的-hitTest:方法时,测算的顺序严格依赖于图层树当中的图层顺序(和UIView处理事件类似)。之前提到的zPosition属性可以明显改变屏幕上图层的顺序,但不能改变事件传递的顺序。
这意味着如果改变了图层的z轴顺序,你会发现将不能够检测到最前方的视图点击事件,这是因为被另一个图层遮盖住了,虽然它的zPosition值较小,但是在图层树中的顺序靠前。我们将在第五章详细讨论这个问题。
##自动布局
在Mac OS平台,`CALayer`有一个叫做`layoutManager`的属性可以通过`CALayoutManager`协议和`CAConstraintLayoutManager`类来实现自动排版的机制。但由于某些原因,这在iOS上并不适用。
当使用视图的时候,可以充分利用UIView类接口暴露出来的`UIViewAutoresizingMask`和`NSLayoutConstraint`API,但如果想随意控制CALayer的布局,就需要手工操作。最简单的方法就是使用CALayerDelegate如下函数:
```
- (void)layoutSublayersOfLayer:(CALayer *)layer;
```
当图层的bounds发生改变,或者图层的-setNeedsLayout方法被调用的时候,这个函数将会被执行。这使得你可以手动地重新摆放或者重新调整子图层的大小,但是不能像UIView的autoresizingMask和constraints属性做到自适应屏幕旋转。
这也是为什么最好使用视图而不是单独的图层来构建应用程序的另一个重要原因之一。 | {
"pile_set_name": "Github"
} |
//
// Take.swift
// RxSwift
//
// Created by Krunoslav Zaher on 6/12/15.
// Copyright © 2015 Krunoslav Zaher. All rights reserved.
//
import Foundation
// count version
class TakeCountSink<ElementType, O: ObserverType where O.E == ElementType> : Sink<O>, ObserverType {
typealias Parent = TakeCount<ElementType>
typealias E = ElementType
private let _parent: Parent
private var _remaining: Int
init(parent: Parent, observer: O) {
_parent = parent
_remaining = parent._count
super.init(observer: observer)
}
func on(event: Event<E>) {
switch event {
case .Next(let value):
if _remaining > 0 {
_remaining -= 1
forwardOn(.Next(value))
if _remaining == 0 {
forwardOn(.Completed)
dispose()
}
}
case .Error:
forwardOn(event)
dispose()
case .Completed:
forwardOn(event)
dispose()
}
}
}
class TakeCount<Element>: Producer<Element> {
private let _source: Observable<Element>
private let _count: Int
init(source: Observable<Element>, count: Int) {
if count < 0 {
rxFatalError("count can't be negative")
}
_source = source
_count = count
}
override func run<O : ObserverType where O.E == Element>(observer: O) -> Disposable {
let sink = TakeCountSink(parent: self, observer: observer)
sink.disposable = _source.subscribe(sink)
return sink
}
}
// time version
class TakeTimeSink<ElementType, O: ObserverType where O.E == ElementType>
: Sink<O>
, LockOwnerType
, ObserverType
, SynchronizedOnType {
typealias Parent = TakeTime<ElementType>
typealias E = ElementType
private let _parent: Parent
let _lock = NSRecursiveLock()
init(parent: Parent, observer: O) {
_parent = parent
super.init(observer: observer)
}
func on(event: Event<E>) {
synchronizedOn(event)
}
func _synchronized_on(event: Event<E>) {
switch event {
case .Next(let value):
forwardOn(.Next(value))
case .Error:
forwardOn(event)
dispose()
case .Completed:
forwardOn(event)
dispose()
}
}
func tick() {
_lock.lock(); defer { _lock.unlock() }
forwardOn(.Completed)
dispose()
}
func run() -> Disposable {
let disposeTimer = _parent._scheduler.scheduleRelative((), dueTime: _parent._duration) {
self.tick()
return NopDisposable.instance
}
let disposeSubscription = _parent._source.subscribe(self)
return BinaryDisposable(disposeTimer, disposeSubscription)
}
}
class TakeTime<Element> : Producer<Element> {
typealias TimeInterval = RxTimeInterval
private let _source: Observable<Element>
private let _duration: TimeInterval
private let _scheduler: SchedulerType
init(source: Observable<Element>, duration: TimeInterval, scheduler: SchedulerType) {
_source = source
_scheduler = scheduler
_duration = duration
}
override func run<O : ObserverType where O.E == Element>(observer: O) -> Disposable {
let sink = TakeTimeSink(parent: self, observer: observer)
sink.disposable = sink.run()
return sink
}
} | {
"pile_set_name": "Github"
} |
(NOTE: mmltex.xsl was modified slightly to put an `\equation` tag around
mathml that is in a paragraph by itself, as well as to allow labels for
equations -- Fletcher T. Penney)
README for the XSLT MathML Library
MultiMarkdown Version - 2.0.b6
XSLT MathML Library is a set of XSLT stylesheets to transform
MathML 2.0 to LaTeX.
For more information, see
http://www.raleigh.ru/MathML/mmltex/index.php?lang=en
Manifest
--------
README this file
mmltex.xsl
tokens.xsl
glayout.xsl
scripts.xsl
tables.xsl
entities.xsl
cmarkup.xsl
Use
---
There are two ways of using the library:
* Use a local copy of the library.
1. Download the distribution (see below).
2. Unpack the distribution, using unzip.
3. In your stylesheet import or include either the main
stylesheet, mmltex.xsl, or the stylesheet module you
wish to use, such as tokens.xsl. This example assumes
that the distribution has been extracted into the same
directory as your own stylesheet:
<xsl:import href="mmltex.xsl"/>
* Import or include either the main stylesheet, or the
stylesheet module you wish to use, directly from the library
website; http://www.raleigh.ru/MathML/mmltex/. For example:
<xsl:import href="http://www.raleigh.ru/MathML/mmltex/mmltex.xsl"/>
Obtaining The Library
---------------------
The XSLT MathML Library is available for download as:
* Zip file: http://www.raleigh.ru/MathML/mmltex/xsltml_2.1.2.zip
Copyright
---------
Copyright (C) 2001-2003 Vasil Yaroshevich
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the ``Software''), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
Except as contained in this notice, the names of individuals
credited with contribution to this software shall not be used in
advertising or otherwise to promote the sale, use or other
dealings in this Software without prior written authorization
from the individuals in question.
Any stylesheet derived from this Software that is publically
distributed will be identified with a different name and the
version strings in any derived Software will be changed so that
no possibility of confusion between the derived package and this
Software will exist.
Warranty
--------
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL NORMAN WALSH OR ANY OTHER
CONTRIBUTOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
Contacting the Author
---------------------
These stylesheets are maintained by Vasil Yaroshevich, <[email protected]>.
| {
"pile_set_name": "Github"
} |
{{- if .Values.podSecurityPolicy.enabled}}
apiVersion: extensions/v1beta1
kind: PodSecurityPolicy
metadata:
name: {{ template "nginx-ingress.fullname" . }}
labels:
app: {{ template "nginx-ingress.name" . }}
chart: {{ .Chart.Name }}-{{ .Chart.Version }}
heritage: {{ .Release.Service }}
release: {{ .Release.Name }}
spec:
allowedCapabilities:
- NET_BIND_SERVICE
privileged: false
allowPrivilegeEscalation: true
# Allow core volume types.
volumes:
- 'configMap'
#- 'emptyDir'
#- 'projected'
- 'secret'
#- 'downwardAPI'
hostNetwork: {{ .Values.controller.hostNetwork }}
hostIPC: false
hostPID: false
runAsUser:
# Require the container to run without root privileges.
rule: 'MustRunAsNonRoot'
supplementalGroups:
rule: 'MustRunAs'
ranges:
# Forbid adding the root group.
- min: 1
max: 65535
fsGroup:
rule: 'MustRunAs'
ranges:
# Forbid adding the root group.
- min: 1
max: 65535
readOnlyRootFilesystem: false
seLinux:
rule: 'RunAsAny'
hostPorts:
- max: 65535
min: 1
{{- end }}
| {
"pile_set_name": "Github"
} |
// (C) Copyright Edward Diener 2012,2013
// Use, modification and distribution are subject to the Boost Software License,
// Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt).
#if !defined(BOOST_TTI_DETAIL_LAMBDA_HPP)
#define BOOST_TTI_DETAIL_LAMBDA_HPP
#include <boost/mpl/or.hpp>
#include <boost/tti/detail/dmetafunc.hpp>
#include <boost/tti/detail/dplaceholder.hpp>
#include <boost/tti/gen/namespace_gen.hpp>
namespace boost
{
namespace tti
{
namespace detail
{
template <class BOOST_TTI_DETAIL_TP_T>
struct is_lambda_expression :
boost::mpl::or_
<
BOOST_TTI_NAMESPACE::detail::is_metafunction_class<BOOST_TTI_DETAIL_TP_T>,
BOOST_TTI_NAMESPACE::detail::is_placeholder_expression<BOOST_TTI_DETAIL_TP_T>
>
{
};
}
}
}
#endif // BOOST_TTI_DETAIL_LAMBDA_HPP
| {
"pile_set_name": "Github"
} |
opam-version: "2.0"
maintainer: "[email protected]"
authors: ["Jane Street Group, LLC <[email protected]>"]
homepage: "https://github.com/janestreet/splittable_random"
bug-reports: "https://github.com/janestreet/splittable_random/issues"
dev-repo: "git+https://github.com/janestreet/splittable_random.git"
doc: "https://ocaml.janestreet.com/ocaml-core/latest/doc/splittable_random/index.html"
license: "MIT"
build: [
["dune" "build" "-p" name "-j" jobs]
]
depends: [
"ocaml" {>= "4.04.2"}
"base" {>= "v0.13" & < "v0.14"}
"ppx_assert" {>= "v0.13" & < "v0.14"}
"ppx_bench" {>= "v0.13" & < "v0.14"}
"ppx_inline_test" {>= "v0.13" & < "v0.14"}
"ppx_sexp_message" {>= "v0.13" & < "v0.14"}
"dune" {>= "1.5.1"}
]
synopsis: "PRNG that can be split into independent streams"
description: "
PRNG that can be split into independent streams
A splittable pseudo-random number generator (SPRNG) functions like a PRNG in that it can
be used as a stream of random values; it can also be \"split\" to produce a second,
independent stream of random values.
This library implements a splittable pseudo-random number generator that sacrifices
cryptographic-quality randomness in favor of performance.
"
url {
src: "https://ocaml.janestreet.com/ocaml-core/v0.13/files/splittable_random-v0.13.0.tar.gz"
checksum: "md5=3894b41a18a6b011c3b7e32e0d645044"
}
| {
"pile_set_name": "Github"
} |
**Added:**
* R2S example files
**Changed:**
* Update r2s documentation. Add example usage.
**Deprecated:** None
**Removed:** None
**Fixed:** None
**Security:** None
| {
"pile_set_name": "Github"
} |
sha256:585525751159a976347e340731b48b887ad3464942ee4b2f7af8d7f4c4b6fbeb
| {
"pile_set_name": "Github"
} |
/*
* /MathJax/localization/pt/pt.js
*
* Copyright (c) 2009-2018 The MathJax Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
MathJax.Localization.addTranslation("pt",null,{menuTitle:"portugu\u00EAs",version:"2.7.5",isLoaded:true,domains:{_:{version:"2.7.5",isLoaded:true,strings:{MathProcessingError:"Erro no processamento das f\u00F3rmulas",MathError:"Erro de matem\u00E1tica",LoadFile:"A carregar %1",Loading:"A carregar",LoadFailed:"O ficheiro n\u00E3o pode ser carregado: %1",ProcessMath:"A processar f\u00F3rmula: %1%%",Processing:"A processar",TypesetMath:"A formatar f\u00F3rmulas: %1%%",Typesetting:"A formatar",MathJaxNotSupported:"O seu navegador n\u00E3o suporta MathJax",ErrorTips:"Dicas de depura\u00E7\u00E3o: use %%1, para inspecionar %%2 no console do navegador"}},FontWarnings:{},"HTML-CSS":{},HelpDialog:{},MathML:{},MathMenu:{},TeX:{}},plural:function(a){if(a===1){return 1}return 2},number:function(a){return String(a).replace(".",",")}});MathJax.Ajax.loadComplete("[MathJax]/localization/pt/pt.js");
| {
"pile_set_name": "Github"
} |
var convert = require('./convert'),
func = convert('stubObject', require('../stubObject'), require('./_falseOptions'));
func.placeholder = require('./placeholder');
module.exports = func;
| {
"pile_set_name": "Github"
} |
module.exports = require('./flowRight');
| {
"pile_set_name": "Github"
} |
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build gccgo,linux,sparc64
package unix
import "syscall"
//extern sysconf
func realSysconf(name int) int64
func sysconf(name int) (n int64, err syscall.Errno) {
r := realSysconf(name)
if r < 0 {
return 0, syscall.GetErrno()
}
return r, 0
}
| {
"pile_set_name": "Github"
} |
{
"Resources": {
"testlambdaelasticsearchkibanaLambdaFunctionServiceRole3AFFEAA2": {
"Type": "AWS::IAM::Role",
"Properties": {
"AssumeRolePolicyDocument": {
"Statement": [
{
"Action": "sts:AssumeRole",
"Effect": "Allow",
"Principal": {
"Service": "lambda.amazonaws.com"
}
}
],
"Version": "2012-10-17"
},
"Policies": [
{
"PolicyDocument": {
"Statement": [
{
"Action": [
"logs:CreateLogGroup",
"logs:CreateLogStream",
"logs:PutLogEvents"
],
"Effect": "Allow",
"Resource": {
"Fn::Join": [
"",
[
"arn:",
{
"Ref": "AWS::Partition"
},
":logs:",
{
"Ref": "AWS::Region"
},
":",
{
"Ref": "AWS::AccountId"
},
":log-group:/aws/lambda/*"
]
]
}
}
],
"Version": "2012-10-17"
},
"PolicyName": "LambdaFunctionServiceRolePolicy"
}
]
}
},
"testlambdaelasticsearchkibanaLambdaFunctionServiceRoleDefaultPolicy199413EB": {
"Type": "AWS::IAM::Policy",
"Properties": {
"PolicyDocument": {
"Statement": [
{
"Action": [
"xray:PutTraceSegments",
"xray:PutTelemetryRecords"
],
"Effect": "Allow",
"Resource": "*"
}
],
"Version": "2012-10-17"
},
"PolicyName": "testlambdaelasticsearchkibanaLambdaFunctionServiceRoleDefaultPolicy199413EB",
"Roles": [
{
"Ref": "testlambdaelasticsearchkibanaLambdaFunctionServiceRole3AFFEAA2"
}
]
},
"Metadata": {
"cfn_nag": {
"rules_to_suppress": [
{
"id": "W12",
"reason": "Lambda needs the following minimum required permissions to send trace data to X-Ray."
}
]
}
}
},
"testlambdaelasticsearchkibanaLambdaFunction601D26D3": {
"Type": "AWS::Lambda::Function",
"Properties": {
"Code": {
"S3Bucket": {
"Ref": "AssetParameters67a9971e29baab2bde3043bb70ce5b53318b95429a1ce9b189cf65223e8682dbS3BucketBAF5BF3A"
},
"S3Key": {
"Fn::Join": [
"",
[
{
"Fn::Select": [
0,
{
"Fn::Split": [
"||",
{
"Ref": "AssetParameters67a9971e29baab2bde3043bb70ce5b53318b95429a1ce9b189cf65223e8682dbS3VersionKeyADB3CCA3"
}
]
}
]
},
{
"Fn::Select": [
1,
{
"Fn::Split": [
"||",
{
"Ref": "AssetParameters67a9971e29baab2bde3043bb70ce5b53318b95429a1ce9b189cf65223e8682dbS3VersionKeyADB3CCA3"
}
]
}
]
}
]
]
}
},
"Handler": "index.handler",
"Role": {
"Fn::GetAtt": [
"testlambdaelasticsearchkibanaLambdaFunctionServiceRole3AFFEAA2",
"Arn"
]
},
"Runtime": "nodejs12.x",
"Environment": {
"Variables": {
"AWS_NODEJS_CONNECTION_REUSE_ENABLED": "1",
"DOMAIN_ENDPOINT": {
"Fn::GetAtt": [
"testlambdaelasticsearchkibanaElasticsearchDomain50D5F86E",
"DomainEndpoint"
]
}
}
},
"TracingConfig": {
"Mode": "Active"
}
},
"DependsOn": [
"testlambdaelasticsearchkibanaLambdaFunctionServiceRoleDefaultPolicy199413EB",
"testlambdaelasticsearchkibanaLambdaFunctionServiceRole3AFFEAA2"
],
"Metadata": {
"cfn_nag": {
"rules_to_suppress": [
{
"id": "W58",
"reason": "Lambda functions has the required permission to write CloudWatch Logs. It uses custom policy instead of arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole with more tighter permissions."
}
]
}
}
},
"testlambdaelasticsearchkibanaCognitoUserPool9537802B": {
"Type": "AWS::Cognito::UserPool",
"Properties": {
"AccountRecoverySetting": {
"RecoveryMechanisms": [
{
"Name": "verified_phone_number",
"Priority": 1
},
{
"Name": "verified_email",
"Priority": 2
}
]
},
"AdminCreateUserConfig": {
"AllowAdminCreateUserOnly": true
},
"EmailVerificationMessage": "The verification code to your new account is {####}",
"EmailVerificationSubject": "Verify your new account",
"SmsVerificationMessage": "The verification code to your new account is {####}",
"UserPoolAddOns": {
"AdvancedSecurityMode": "ENFORCED"
},
"VerificationMessageTemplate": {
"DefaultEmailOption": "CONFIRM_WITH_CODE",
"EmailMessage": "The verification code to your new account is {####}",
"EmailSubject": "Verify your new account",
"SmsMessage": "The verification code to your new account is {####}"
}
}
},
"testlambdaelasticsearchkibanaCognitoUserPoolClient8F70A2AA": {
"Type": "AWS::Cognito::UserPoolClient",
"Properties": {
"UserPoolId": {
"Ref": "testlambdaelasticsearchkibanaCognitoUserPool9537802B"
},
"AllowedOAuthFlows": [
"implicit",
"code"
],
"AllowedOAuthFlowsUserPoolClient": true,
"AllowedOAuthScopes": [
"profile",
"phone",
"email",
"openid",
"aws.cognito.signin.user.admin"
],
"CallbackURLs": [
"https://example.com"
],
"SupportedIdentityProviders": [
"COGNITO"
]
}
},
"testlambdaelasticsearchkibanaCognitoIdentityPoolC48068F0": {
"Type": "AWS::Cognito::IdentityPool",
"Properties": {
"AllowUnauthenticatedIdentities": false,
"CognitoIdentityProviders": [
{
"ClientId": {
"Ref": "testlambdaelasticsearchkibanaCognitoUserPoolClient8F70A2AA"
},
"ProviderName": {
"Fn::GetAtt": [
"testlambdaelasticsearchkibanaCognitoUserPool9537802B",
"ProviderName"
]
},
"ServerSideTokenCheck": true
}
]
}
},
"testlambdaelasticsearchkibanaUserPoolDomainB9BDF063": {
"Type": "AWS::Cognito::UserPoolDomain",
"Properties": {
"Domain": "myconstructsdomain",
"UserPoolId": {
"Ref": "testlambdaelasticsearchkibanaCognitoUserPool9537802B"
}
},
"DependsOn": [
"testlambdaelasticsearchkibanaCognitoUserPool9537802B"
]
},
"testlambdaelasticsearchkibanaCognitoAuthorizedRole88FAFCFA": {
"Type": "AWS::IAM::Role",
"Properties": {
"AssumeRolePolicyDocument": {
"Statement": [
{
"Action": "sts:AssumeRoleWithWebIdentity",
"Condition": {
"StringEquals": {
"cognito-identity.amazonaws.com:aud": {
"Ref": "testlambdaelasticsearchkibanaCognitoIdentityPoolC48068F0"
}
},
"ForAnyValue:StringLike": {
"cognito-identity.amazonaws.com:amr": "authenticated"
}
},
"Effect": "Allow",
"Principal": {
"Federated": "cognito-identity.amazonaws.com"
}
}
],
"Version": "2012-10-17"
},
"Policies": [
{
"PolicyDocument": {
"Statement": [
{
"Action": "es:ESHttp*",
"Effect": "Allow",
"Resource": {
"Fn::Join": [
"",
[
"arn:",
{
"Ref": "AWS::Partition"
},
":es:",
{
"Ref": "AWS::Region"
},
":",
{
"Ref": "AWS::AccountId"
},
":domain/myconstructsdomain/*"
]
]
}
}
],
"Version": "2012-10-17"
},
"PolicyName": "CognitoAccessPolicy"
}
]
}
},
"testlambdaelasticsearchkibanaIdentityPoolRoleMappingBD0A239B": {
"Type": "AWS::Cognito::IdentityPoolRoleAttachment",
"Properties": {
"IdentityPoolId": {
"Ref": "testlambdaelasticsearchkibanaCognitoIdentityPoolC48068F0"
},
"Roles": {
"authenticated": {
"Fn::GetAtt": [
"testlambdaelasticsearchkibanaCognitoAuthorizedRole88FAFCFA",
"Arn"
]
}
}
}
},
"testlambdaelasticsearchkibanaCognitoKibanaConfigureRole8F40C1A1": {
"Type": "AWS::IAM::Role",
"Properties": {
"AssumeRolePolicyDocument": {
"Statement": [
{
"Action": "sts:AssumeRole",
"Effect": "Allow",
"Principal": {
"Service": "es.amazonaws.com"
}
}
],
"Version": "2012-10-17"
}
}
},
"testlambdaelasticsearchkibanaCognitoKibanaConfigureRolePolicyB7090E91": {
"Type": "AWS::IAM::Policy",
"Properties": {
"PolicyDocument": {
"Statement": [
{
"Action": [
"cognito-idp:DescribeUserPool",
"cognito-idp:CreateUserPoolClient",
"cognito-idp:DeleteUserPoolClient",
"cognito-idp:DescribeUserPoolClient",
"cognito-idp:AdminInitiateAuth",
"cognito-idp:AdminUserGlobalSignOut",
"cognito-idp:ListUserPoolClients",
"cognito-identity:DescribeIdentityPool",
"cognito-identity:UpdateIdentityPool",
"cognito-identity:SetIdentityPoolRoles",
"cognito-identity:GetIdentityPoolRoles",
"es:UpdateElasticsearchDomainConfig"
],
"Effect": "Allow",
"Resource": [
{
"Fn::GetAtt": [
"testlambdaelasticsearchkibanaCognitoUserPool9537802B",
"Arn"
]
},
{
"Fn::Join": [
"",
[
"arn:aws:cognito-identity:",
{
"Ref": "AWS::Region"
},
":",
{
"Ref": "AWS::AccountId"
},
":identitypool/",
{
"Ref": "testlambdaelasticsearchkibanaCognitoIdentityPoolC48068F0"
}
]
]
},
{
"Fn::Join": [
"",
[
"arn:aws:es:",
{
"Ref": "AWS::Region"
},
":",
{
"Ref": "AWS::AccountId"
},
":domain/myconstructsdomain"
]
]
}
]
},
{
"Action": "iam:PassRole",
"Condition": {
"StringLike": {
"iam:PassedToService": "cognito-identity.amazonaws.com"
}
},
"Effect": "Allow",
"Resource": {
"Fn::GetAtt": [
"testlambdaelasticsearchkibanaCognitoKibanaConfigureRole8F40C1A1",
"Arn"
]
}
}
],
"Version": "2012-10-17"
},
"PolicyName": "testlambdaelasticsearchkibanaCognitoKibanaConfigureRolePolicyB7090E91",
"Roles": [
{
"Ref": "testlambdaelasticsearchkibanaCognitoKibanaConfigureRole8F40C1A1"
}
]
}
},
"testlambdaelasticsearchkibanaElasticsearchDomain50D5F86E": {
"Type": "AWS::Elasticsearch::Domain",
"Properties": {
"AccessPolicies": {
"Statement": [
{
"Action": "es:ESHttp*",
"Effect": "Allow",
"Principal": {
"AWS": [
{
"Fn::GetAtt": [
"testlambdaelasticsearchkibanaCognitoAuthorizedRole88FAFCFA",
"Arn"
]
},
{
"Fn::GetAtt": [
"testlambdaelasticsearchkibanaLambdaFunctionServiceRole3AFFEAA2",
"Arn"
]
}
]
},
"Resource": {
"Fn::Join": [
"",
[
"arn:aws:es:",
{
"Ref": "AWS::Region"
},
":",
{
"Ref": "AWS::AccountId"
},
":domain/myconstructsdomain/*"
]
]
}
}
],
"Version": "2012-10-17"
},
"CognitoOptions": {
"Enabled": true,
"IdentityPoolId": {
"Ref": "testlambdaelasticsearchkibanaCognitoIdentityPoolC48068F0"
},
"RoleArn": {
"Fn::GetAtt": [
"testlambdaelasticsearchkibanaCognitoKibanaConfigureRole8F40C1A1",
"Arn"
]
},
"UserPoolId": {
"Ref": "testlambdaelasticsearchkibanaCognitoUserPool9537802B"
}
},
"DomainName": "myconstructsdomain",
"EBSOptions": {
"EBSEnabled": true,
"VolumeSize": 10
},
"ElasticsearchClusterConfig": {
"DedicatedMasterCount": 3,
"DedicatedMasterEnabled": true,
"InstanceCount": 3,
"ZoneAwarenessConfig": {
"AvailabilityZoneCount": 3
},
"ZoneAwarenessEnabled": true
},
"ElasticsearchVersion": "6.3",
"EncryptionAtRestOptions": {
"Enabled": true
},
"NodeToNodeEncryptionOptions": {
"Enabled": true
},
"SnapshotOptions": {
"AutomatedSnapshotStartHour": 1
}
},
"Metadata": {
"cfn_nag": {
"rules_to_suppress": [
{
"id": "W28",
"reason": "The ES Domain is passed dynamically as as parameter and explicitly specified to ensure that IAM policies are configured to lockdown access to this specific ES instance only"
}
]
}
}
},
"testlambdaelasticsearchkibanaStatusRedAlarmCFCDB629": {
"Type": "AWS::CloudWatch::Alarm",
"Properties": {
"ComparisonOperator": "GreaterThanOrEqualToThreshold",
"EvaluationPeriods": 1,
"AlarmDescription": "At least one primary shard and its replicas are not allocated to a node. ",
"MetricName": "ClusterStatus.red",
"Namespace": "AWS/ES",
"Period": 60,
"Statistic": "Maximum",
"Threshold": 1
}
},
"testlambdaelasticsearchkibanaStatusYellowAlarm24B9D1CB": {
"Type": "AWS::CloudWatch::Alarm",
"Properties": {
"ComparisonOperator": "GreaterThanOrEqualToThreshold",
"EvaluationPeriods": 1,
"AlarmDescription": "At least one replica shard is not allocated to a node.",
"MetricName": "ClusterStatus.yellow",
"Namespace": "AWS/ES",
"Period": 60,
"Statistic": "Maximum",
"Threshold": 1
}
},
"testlambdaelasticsearchkibanaFreeStorageSpaceTooLowAlarm0B4D4E35": {
"Type": "AWS::CloudWatch::Alarm",
"Properties": {
"ComparisonOperator": "LessThanOrEqualToThreshold",
"EvaluationPeriods": 1,
"AlarmDescription": "A node in your cluster is down to 20 GiB of free storage space.",
"MetricName": "FreeStorageSpace",
"Namespace": "AWS/ES",
"Period": 60,
"Statistic": "Minimum",
"Threshold": 2000
}
},
"testlambdaelasticsearchkibanaIndexWritesBlockedTooHighAlarmB8C0E99C": {
"Type": "AWS::CloudWatch::Alarm",
"Properties": {
"ComparisonOperator": "GreaterThanOrEqualToThreshold",
"EvaluationPeriods": 1,
"AlarmDescription": "Your cluster is blocking write requests.",
"MetricName": "ClusterIndexWritesBlocked",
"Namespace": "AWS/ES",
"Period": 300,
"Statistic": "Maximum",
"Threshold": 1
}
},
"testlambdaelasticsearchkibanaAutomatedSnapshotFailureTooHighAlarm75F2676B": {
"Type": "AWS::CloudWatch::Alarm",
"Properties": {
"ComparisonOperator": "GreaterThanOrEqualToThreshold",
"EvaluationPeriods": 1,
"AlarmDescription": "An automated snapshot failed. This failure is often the result of a red cluster health status.",
"MetricName": "AutomatedSnapshotFailure",
"Namespace": "AWS/ES",
"Period": 60,
"Statistic": "Maximum",
"Threshold": 1
}
},
"testlambdaelasticsearchkibanaCPUUtilizationTooHighAlarmF16BA5D9": {
"Type": "AWS::CloudWatch::Alarm",
"Properties": {
"ComparisonOperator": "GreaterThanOrEqualToThreshold",
"EvaluationPeriods": 3,
"AlarmDescription": "100% CPU utilization is not uncommon, but sustained high usage is problematic. Consider using larger instance types or adding instances.",
"MetricName": "CPUUtilization",
"Namespace": "AWS/ES",
"Period": 900,
"Statistic": "Average",
"Threshold": 80
}
},
"testlambdaelasticsearchkibanaJVMMemoryPressureTooHighAlarm18224533": {
"Type": "AWS::CloudWatch::Alarm",
"Properties": {
"ComparisonOperator": "GreaterThanOrEqualToThreshold",
"EvaluationPeriods": 1,
"AlarmDescription": "Average JVM memory pressure over last 15 minutes too high. Consider scaling vertically.",
"MetricName": "JVMMemoryPressure",
"Namespace": "AWS/ES",
"Period": 900,
"Statistic": "Average",
"Threshold": 80
}
},
"testlambdaelasticsearchkibanaMasterCPUUtilizationTooHighAlarmE5E5999F": {
"Type": "AWS::CloudWatch::Alarm",
"Properties": {
"ComparisonOperator": "GreaterThanOrEqualToThreshold",
"EvaluationPeriods": 3,
"AlarmDescription": "Average CPU utilization over last 45 minutes too high. Consider using larger instance types for your dedicated master nodes.",
"MetricName": "MasterCPUUtilization",
"Namespace": "AWS/ES",
"Period": 900,
"Statistic": "Average",
"Threshold": 50
}
},
"testlambdaelasticsearchkibanaMasterJVMMemoryPressureTooHighAlarm297FF1BE": {
"Type": "AWS::CloudWatch::Alarm",
"Properties": {
"ComparisonOperator": "GreaterThanOrEqualToThreshold",
"EvaluationPeriods": 1,
"AlarmDescription": "Average JVM memory pressure over last 15 minutes too high. Consider scaling vertically.",
"MetricName": "MasterJVMMemoryPressure",
"Namespace": "AWS/ES",
"Period": 900,
"Statistic": "Average",
"Threshold": 50
}
}
},
"Parameters": {
"AssetParameters67a9971e29baab2bde3043bb70ce5b53318b95429a1ce9b189cf65223e8682dbS3BucketBAF5BF3A": {
"Type": "String",
"Description": "S3 bucket for asset \"67a9971e29baab2bde3043bb70ce5b53318b95429a1ce9b189cf65223e8682db\""
},
"AssetParameters67a9971e29baab2bde3043bb70ce5b53318b95429a1ce9b189cf65223e8682dbS3VersionKeyADB3CCA3": {
"Type": "String",
"Description": "S3 key for asset version \"67a9971e29baab2bde3043bb70ce5b53318b95429a1ce9b189cf65223e8682db\""
},
"AssetParameters67a9971e29baab2bde3043bb70ce5b53318b95429a1ce9b189cf65223e8682dbArtifactHash322F5E2F": {
"Type": "String",
"Description": "Artifact hash for asset \"67a9971e29baab2bde3043bb70ce5b53318b95429a1ce9b189cf65223e8682db\""
}
}
} | {
"pile_set_name": "Github"
} |
fileFormatVersion: 2
guid: 420e88a40988d7a46b97ca92ebd14e54
DefaultImporter:
userData:
| {
"pile_set_name": "Github"
} |
--[[
compat_env - see README for details.
(c) 2012 David Manura. Licensed under Lua 5.1/5.2 terms (MIT license).
--]]
local M = {_TYPE='module', _NAME='compat_env', _VERSION='0.2.2.20120406'}
local function check_chunk_type(s, mode)
local nmode = mode or 'bt'
local is_binary = s and #s > 0 and s:byte(1) == 27
if is_binary and not nmode:match'b' then
return nil, ("attempt to load a binary chunk (mode is '%s')"):format(mode)
elseif not is_binary and not nmode:match't' then
return nil, ("attempt to load a text chunk (mode is '%s')"):format(mode)
end
return true
end
local IS_52_LOAD = pcall(load, '')
if IS_52_LOAD then
M.load = _G.load
M.loadfile = _G.loadfile
else
-- 5.2 style `load` implemented in 5.1
function M.load(ld, source, mode, env)
local f
if type(ld) == 'string' then
local s = ld
local ok, err = check_chunk_type(s, mode)
if not ok then return ok, err end
local err; f, err = loadstring(s, source)
if not f then return f, err end
elseif type(ld) == 'function' then
local ld2 = ld
if (mode or 'bt') ~= 'bt' then
local first = ld()
local ok, err = check_chunk_type(first, mode)
if not ok then return ok, err end
ld2 = function()
if first then
local chunk=first; first=nil; return chunk
else return ld() end
end
end
local err; f, err = load(ld2, source); if not f then return f, err end
else
error(("bad argument #1 to 'load' (function expected, got %s)")
:format(type(ld)), 2)
end
if env then setfenv(f, env) end
return f
end
-- 5.2 style `loadfile` implemented in 5.1
function M.loadfile(filename, mode, env)
if (mode or 'bt') ~= 'bt' then
local ioerr
local fh, err = io.open(filename, 'rb'); if not fh then return fh,err end
local function ld()
local chunk; chunk,ioerr = fh:read(4096); return chunk
end
local f, err = M.load(ld, filename and '@'..filename, mode, env)
fh:close()
if not f then return f, err end
if ioerr then return nil, ioerr end
return f
else
local f, err = loadfile(filename); if not f then return f, err end
if env then setfenv(f, env) end
return f
end
end
end
if _G.setfenv then -- Lua 5.1
M.setfenv = _G.setfenv
M.getfenv = _G.getfenv
else -- >= Lua 5.2
-- helper function for `getfenv`/`setfenv`
local function envlookup(f)
local name, val
local up = 0
local unknown
repeat
up=up+1; name, val = debug.getupvalue(f, up)
if name == '' then unknown = true end
until name == '_ENV' or name == nil
if name ~= '_ENV' then
up = nil
if unknown then
error("upvalues not readable in Lua 5.2 when debug info missing", 3)
end
end
return (name == '_ENV') and up, val, unknown
end
-- helper function for `getfenv`/`setfenv`
local function envhelper(f, name)
if type(f) == 'number' then
if f < 0 then
error(("bad argument #1 to '%s' (level must be non-negative)")
:format(name), 3)
elseif f < 1 then
error("thread environments unsupported in Lua 5.2", 3) --[*]
end
f = debug.getinfo(f+2, 'f').func
elseif type(f) ~= 'function' then
error(("bad argument #1 to '%s' (number expected, got %s)")
:format(type(name, f)), 2)
end
return f
end
-- [*] might simulate with table keyed by coroutine.running()
-- 5.1 style `setfenv` implemented in 5.2
function M.setfenv(f, t)
local f = envhelper(f, 'setfenv')
local up, val, unknown = envlookup(f)
if up then
debug.upvaluejoin(f, up, function() return up end, 1) --unique upval[*]
debug.setupvalue(f, up, t)
else
local what = debug.getinfo(f, 'S').what
if what ~= 'Lua' and what ~= 'main' then -- not Lua func
error("'setfenv' cannot change environment of given object", 2)
end -- else ignore no _ENV upvalue (warning: incompatible with 5.1)
end
return f -- invariant: original f ~= 0
end
-- [*] http://lua-users.org/lists/lua-l/2010-06/msg00313.html
-- 5.1 style `getfenv` implemented in 5.2
function M.getfenv(f)
if f == 0 or f == nil then return _G end -- simulated behavior
local f = envhelper(f, 'setfenv')
local up, val = envlookup(f)
if not up then return _G end -- simulated behavior [**]
return val
end
-- [**] possible reasons: no _ENV upvalue, C function
end
return M
| {
"pile_set_name": "Github"
} |
/* This file is (c) 2008-2012 Konstantin Isakov <[email protected]>
* Part of GoldenDict. Licensed under GPLv3 or later, see the LICENSE file */
#ifndef __WEBSITE_HH_INCLUDED__
#define __WEBSITE_HH_INCLUDED__
#include "dictionary.hh"
#include "config.hh"
#include <QNetworkAccessManager>
#include <QNetworkReply>
/// Support for any web sites via a templated url.
namespace WebSite {
using std::vector;
using std::string;
vector< sptr< Dictionary::Class > > makeDictionaries( Config::WebSites const &,
QNetworkAccessManager & )
THROW_SPEC( std::exception );
/// Exposed here for moc
class WebSiteDataRequestSlots: public Dictionary::DataRequest
{
Q_OBJECT
protected slots:
virtual void requestFinished( QNetworkReply * )
{}
};
}
#endif
| {
"pile_set_name": "Github"
} |
/**
******************************************************************************
* @file stm32f30x_rcc.h
* @author MCD Application Team
* @version V1.0.1
* @date 23-October-2012
* @brief This file contains all the functions prototypes for the RCC
* firmware library.
******************************************************************************
* @attention
*
* <h2><center>© COPYRIGHT 2012 STMicroelectronics</center></h2>
*
* Licensed under MCD-ST Liberty SW License Agreement V2, (the "License");
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.st.com/software_license_agreement_liberty_v2
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************
*/
/* Define to prevent recursive inclusion -------------------------------------*/
#ifndef __STM32F30x_RCC_H
#define __STM32F30x_RCC_H
#ifdef __cplusplus
extern "C" {
#endif
/* Includes ------------------------------------------------------------------*/
#include "stm32f30x.h"
/** @addtogroup STM32F30x_StdPeriph_Driver
* @{
*/
/** @addtogroup RCC
* @{
*/
/* Exported types ------------------------------------------------------------*/
typedef struct
{
uint32_t SYSCLK_Frequency;
uint32_t HCLK_Frequency;
uint32_t PCLK1_Frequency;
uint32_t PCLK2_Frequency;
uint32_t ADC12CLK_Frequency;
uint32_t ADC34CLK_Frequency;
uint32_t I2C1CLK_Frequency;
uint32_t I2C2CLK_Frequency;
uint32_t TIM1CLK_Frequency;
uint32_t TIM8CLK_Frequency;
uint32_t USART1CLK_Frequency;
uint32_t USART2CLK_Frequency;
uint32_t USART3CLK_Frequency;
uint32_t UART4CLK_Frequency;
uint32_t UART5CLK_Frequency;
}RCC_ClocksTypeDef;
/* Exported constants --------------------------------------------------------*/
/** @defgroup RCC_Exported_Constants
* @{
*/
/** @defgroup RCC_HSE_configuration
* @{
*/
#define RCC_HSE_OFF ((uint8_t)0x00)
#define RCC_HSE_ON ((uint8_t)0x01)
#define RCC_HSE_Bypass ((uint8_t)0x05)
#define IS_RCC_HSE(HSE) (((HSE) == RCC_HSE_OFF) || ((HSE) == RCC_HSE_ON) || \
((HSE) == RCC_HSE_Bypass))
/**
* @}
*/
/** @defgroup RCC_PLL_Clock_Source
* @{
*/
#define RCC_PLLSource_HSI_Div2 RCC_CFGR_PLLSRC_HSI_Div2
#define RCC_PLLSource_PREDIV1 RCC_CFGR_PLLSRC_PREDIV1
#define IS_RCC_PLL_SOURCE(SOURCE) (((SOURCE) == RCC_PLLSource_HSI_Div2) || \
((SOURCE) == RCC_PLLSource_PREDIV1))
/**
* @}
*/
/** @defgroup RCC_PLL_Multiplication_Factor
* @{
*/
#define RCC_PLLMul_2 RCC_CFGR_PLLMULL2
#define RCC_PLLMul_3 RCC_CFGR_PLLMULL3
#define RCC_PLLMul_4 RCC_CFGR_PLLMULL4
#define RCC_PLLMul_5 RCC_CFGR_PLLMULL5
#define RCC_PLLMul_6 RCC_CFGR_PLLMULL6
#define RCC_PLLMul_7 RCC_CFGR_PLLMULL7
#define RCC_PLLMul_8 RCC_CFGR_PLLMULL8
#define RCC_PLLMul_9 RCC_CFGR_PLLMULL9
#define RCC_PLLMul_10 RCC_CFGR_PLLMULL10
#define RCC_PLLMul_11 RCC_CFGR_PLLMULL11
#define RCC_PLLMul_12 RCC_CFGR_PLLMULL12
#define RCC_PLLMul_13 RCC_CFGR_PLLMULL13
#define RCC_PLLMul_14 RCC_CFGR_PLLMULL14
#define RCC_PLLMul_15 RCC_CFGR_PLLMULL15
#define RCC_PLLMul_16 RCC_CFGR_PLLMULL16
#define IS_RCC_PLL_MUL(MUL) (((MUL) == RCC_PLLMul_2) || ((MUL) == RCC_PLLMul_3) || \
((MUL) == RCC_PLLMul_4) || ((MUL) == RCC_PLLMul_5) || \
((MUL) == RCC_PLLMul_6) || ((MUL) == RCC_PLLMul_7) || \
((MUL) == RCC_PLLMul_8) || ((MUL) == RCC_PLLMul_9) || \
((MUL) == RCC_PLLMul_10) || ((MUL) == RCC_PLLMul_11) || \
((MUL) == RCC_PLLMul_12) || ((MUL) == RCC_PLLMul_13) || \
((MUL) == RCC_PLLMul_14) || ((MUL) == RCC_PLLMul_15) || \
((MUL) == RCC_PLLMul_16))
/**
* @}
*/
/** @defgroup RCC_PREDIV1_division_factor
* @{
*/
#define RCC_PREDIV1_Div1 RCC_CFGR2_PREDIV1_DIV1
#define RCC_PREDIV1_Div2 RCC_CFGR2_PREDIV1_DIV2
#define RCC_PREDIV1_Div3 RCC_CFGR2_PREDIV1_DIV3
#define RCC_PREDIV1_Div4 RCC_CFGR2_PREDIV1_DIV4
#define RCC_PREDIV1_Div5 RCC_CFGR2_PREDIV1_DIV5
#define RCC_PREDIV1_Div6 RCC_CFGR2_PREDIV1_DIV6
#define RCC_PREDIV1_Div7 RCC_CFGR2_PREDIV1_DIV7
#define RCC_PREDIV1_Div8 RCC_CFGR2_PREDIV1_DIV8
#define RCC_PREDIV1_Div9 RCC_CFGR2_PREDIV1_DIV9
#define RCC_PREDIV1_Div10 RCC_CFGR2_PREDIV1_DIV10
#define RCC_PREDIV1_Div11 RCC_CFGR2_PREDIV1_DIV11
#define RCC_PREDIV1_Div12 RCC_CFGR2_PREDIV1_DIV12
#define RCC_PREDIV1_Div13 RCC_CFGR2_PREDIV1_DIV13
#define RCC_PREDIV1_Div14 RCC_CFGR2_PREDIV1_DIV14
#define RCC_PREDIV1_Div15 RCC_CFGR2_PREDIV1_DIV15
#define RCC_PREDIV1_Div16 RCC_CFGR2_PREDIV1_DIV16
#define IS_RCC_PREDIV1(PREDIV1) (((PREDIV1) == RCC_PREDIV1_Div1) || ((PREDIV1) == RCC_PREDIV1_Div2) || \
((PREDIV1) == RCC_PREDIV1_Div3) || ((PREDIV1) == RCC_PREDIV1_Div4) || \
((PREDIV1) == RCC_PREDIV1_Div5) || ((PREDIV1) == RCC_PREDIV1_Div6) || \
((PREDIV1) == RCC_PREDIV1_Div7) || ((PREDIV1) == RCC_PREDIV1_Div8) || \
((PREDIV1) == RCC_PREDIV1_Div9) || ((PREDIV1) == RCC_PREDIV1_Div10) || \
((PREDIV1) == RCC_PREDIV1_Div11) || ((PREDIV1) == RCC_PREDIV1_Div12) || \
((PREDIV1) == RCC_PREDIV1_Div13) || ((PREDIV1) == RCC_PREDIV1_Div14) || \
((PREDIV1) == RCC_PREDIV1_Div15) || ((PREDIV1) == RCC_PREDIV1_Div16))
/**
* @}
*/
/** @defgroup RCC_System_Clock_Source
* @{
*/
#define RCC_SYSCLKSource_HSI RCC_CFGR_SW_HSI
#define RCC_SYSCLKSource_HSE RCC_CFGR_SW_HSE
#define RCC_SYSCLKSource_PLLCLK RCC_CFGR_SW_PLL
#define IS_RCC_SYSCLK_SOURCE(SOURCE) (((SOURCE) == RCC_SYSCLKSource_HSI) || \
((SOURCE) == RCC_SYSCLKSource_HSE) || \
((SOURCE) == RCC_SYSCLKSource_PLLCLK))
/**
* @}
*/
/** @defgroup RCC_AHB_Clock_Source
* @{
*/
#define RCC_SYSCLK_Div1 RCC_CFGR_HPRE_DIV1
#define RCC_SYSCLK_Div2 RCC_CFGR_HPRE_DIV2
#define RCC_SYSCLK_Div4 RCC_CFGR_HPRE_DIV4
#define RCC_SYSCLK_Div8 RCC_CFGR_HPRE_DIV8
#define RCC_SYSCLK_Div16 RCC_CFGR_HPRE_DIV16
#define RCC_SYSCLK_Div64 RCC_CFGR_HPRE_DIV64
#define RCC_SYSCLK_Div128 RCC_CFGR_HPRE_DIV128
#define RCC_SYSCLK_Div256 RCC_CFGR_HPRE_DIV256
#define RCC_SYSCLK_Div512 RCC_CFGR_HPRE_DIV512
#define IS_RCC_HCLK(HCLK) (((HCLK) == RCC_SYSCLK_Div1) || ((HCLK) == RCC_SYSCLK_Div2) || \
((HCLK) == RCC_SYSCLK_Div4) || ((HCLK) == RCC_SYSCLK_Div8) || \
((HCLK) == RCC_SYSCLK_Div16) || ((HCLK) == RCC_SYSCLK_Div64) || \
((HCLK) == RCC_SYSCLK_Div128) || ((HCLK) == RCC_SYSCLK_Div256) || \
((HCLK) == RCC_SYSCLK_Div512))
/**
* @}
*/
/** @defgroup RCC_APB1_APB2_clock_source
* @{
*/
#define RCC_HCLK_Div1 ((uint32_t)0x00000000)
#define RCC_HCLK_Div2 ((uint32_t)0x00000400)
#define RCC_HCLK_Div4 ((uint32_t)0x00000500)
#define RCC_HCLK_Div8 ((uint32_t)0x00000600)
#define RCC_HCLK_Div16 ((uint32_t)0x00000700)
#define IS_RCC_PCLK(PCLK) (((PCLK) == RCC_HCLK_Div1) || ((PCLK) == RCC_HCLK_Div2) || \
((PCLK) == RCC_HCLK_Div4) || ((PCLK) == RCC_HCLK_Div8) || \
((PCLK) == RCC_HCLK_Div16))
/**
* @}
*/
/** @defgroup RCC_ADC_clock_source
* @{
*/
/* ADC1 & ADC2 */
#define RCC_ADC12PLLCLK_OFF ((uint32_t)0x00000000)
#define RCC_ADC12PLLCLK_Div1 ((uint32_t)0x00000100)
#define RCC_ADC12PLLCLK_Div2 ((uint32_t)0x00000110)
#define RCC_ADC12PLLCLK_Div4 ((uint32_t)0x00000120)
#define RCC_ADC12PLLCLK_Div6 ((uint32_t)0x00000130)
#define RCC_ADC12PLLCLK_Div8 ((uint32_t)0x00000140)
#define RCC_ADC12PLLCLK_Div10 ((uint32_t)0x00000150)
#define RCC_ADC12PLLCLK_Div12 ((uint32_t)0x00000160)
#define RCC_ADC12PLLCLK_Div16 ((uint32_t)0x00000170)
#define RCC_ADC12PLLCLK_Div32 ((uint32_t)0x00000180)
#define RCC_ADC12PLLCLK_Div64 ((uint32_t)0x00000190)
#define RCC_ADC12PLLCLK_Div128 ((uint32_t)0x000001A0)
#define RCC_ADC12PLLCLK_Div256 ((uint32_t)0x000001B0)
/* ADC3 & ADC4 */
#define RCC_ADC34PLLCLK_OFF ((uint32_t)0x10000000)
#define RCC_ADC34PLLCLK_Div1 ((uint32_t)0x10002000)
#define RCC_ADC34PLLCLK_Div2 ((uint32_t)0x10002200)
#define RCC_ADC34PLLCLK_Div4 ((uint32_t)0x10002400)
#define RCC_ADC34PLLCLK_Div6 ((uint32_t)0x10002600)
#define RCC_ADC34PLLCLK_Div8 ((uint32_t)0x10002800)
#define RCC_ADC34PLLCLK_Div10 ((uint32_t)0x10002A00)
#define RCC_ADC34PLLCLK_Div12 ((uint32_t)0x10002C00)
#define RCC_ADC34PLLCLK_Div16 ((uint32_t)0x10002E00)
#define RCC_ADC34PLLCLK_Div32 ((uint32_t)0x10003000)
#define RCC_ADC34PLLCLK_Div64 ((uint32_t)0x10003200)
#define RCC_ADC34PLLCLK_Div128 ((uint32_t)0x10003400)
#define RCC_ADC34PLLCLK_Div256 ((uint32_t)0x10003600)
#define IS_RCC_ADCCLK(ADCCLK) (((ADCCLK) == RCC_ADC12PLLCLK_OFF) || ((ADCCLK) == RCC_ADC12PLLCLK_Div1) || \
((ADCCLK) == RCC_ADC12PLLCLK_Div2) || ((ADCCLK) == RCC_ADC12PLLCLK_Div4) || \
((ADCCLK) == RCC_ADC12PLLCLK_Div6) || ((ADCCLK) == RCC_ADC12PLLCLK_Div8) || \
((ADCCLK) == RCC_ADC12PLLCLK_Div10) || ((ADCCLK) == RCC_ADC12PLLCLK_Div12) || \
((ADCCLK) == RCC_ADC12PLLCLK_Div16) || ((ADCCLK) == RCC_ADC12PLLCLK_Div32) || \
((ADCCLK) == RCC_ADC12PLLCLK_Div64) || ((ADCCLK) == RCC_ADC12PLLCLK_Div128) || \
((ADCCLK) == RCC_ADC12PLLCLK_Div256) || ((ADCCLK) == RCC_ADC34PLLCLK_OFF) || \
((ADCCLK) == RCC_ADC34PLLCLK_Div1) || ((ADCCLK) == RCC_ADC34PLLCLK_Div2) || \
((ADCCLK) == RCC_ADC34PLLCLK_Div4) || ((ADCCLK) == RCC_ADC34PLLCLK_Div6) || \
((ADCCLK) == RCC_ADC34PLLCLK_Div8) || ((ADCCLK) == RCC_ADC34PLLCLK_Div10) || \
((ADCCLK) == RCC_ADC34PLLCLK_Div12) || ((ADCCLK) == RCC_ADC34PLLCLK_Div16) || \
((ADCCLK) == RCC_ADC34PLLCLK_Div32) || ((ADCCLK) == RCC_ADC34PLLCLK_Div64) || \
((ADCCLK) == RCC_ADC34PLLCLK_Div128) || ((ADCCLK) == RCC_ADC34PLLCLK_Div256))
/**
* @}
*/
/** @defgroup RCC_TIM_clock_source
* @{
*/
#define RCC_TIM1CLK_HCLK ((uint32_t)0x00000000)
#define RCC_TIM1CLK_PLLCLK RCC_CFGR3_TIM1SW
#define RCC_TIM8CLK_HCLK ((uint32_t)0x10000000)
#define RCC_TIM8CLK_PLLCLK ((uint32_t)0x10000200)
#define IS_RCC_TIMCLK(TIMCLK) (((TIMCLK) == RCC_TIM1CLK_HCLK) || ((TIMCLK) == RCC_TIM1CLK_PLLCLK) || \
((TIMCLK) == RCC_TIM8CLK_HCLK) || ((TIMCLK) == RCC_TIM8CLK_PLLCLK))
/**
* @}
*/
/** @defgroup RCC_I2C_clock_source
* @{
*/
#define RCC_I2C1CLK_HSI ((uint32_t)0x00000000)
#define RCC_I2C1CLK_SYSCLK RCC_CFGR3_I2C1SW
#define RCC_I2C2CLK_HSI ((uint32_t)0x10000000)
#define RCC_I2C2CLK_SYSCLK ((uint32_t)0x10000020)
#define IS_RCC_I2CCLK(I2CCLK) (((I2CCLK) == RCC_I2C1CLK_HSI) || ((I2CCLK) == RCC_I2C1CLK_SYSCLK) || \
((I2CCLK) == RCC_I2C2CLK_HSI) || ((I2CCLK) == RCC_I2C2CLK_SYSCLK))
/**
* @}
*/
/** @defgroup RCC_USART_clock_source
* @{
*/
#define RCC_USART1CLK_PCLK ((uint32_t)0x10000000)
#define RCC_USART1CLK_SYSCLK ((uint32_t)0x10000001)
#define RCC_USART1CLK_LSE ((uint32_t)0x10000002)
#define RCC_USART1CLK_HSI ((uint32_t)0x10000003)
#define RCC_USART2CLK_PCLK ((uint32_t)0x20000000)
#define RCC_USART2CLK_SYSCLK ((uint32_t)0x20010000)
#define RCC_USART2CLK_LSE ((uint32_t)0x20020000)
#define RCC_USART2CLK_HSI ((uint32_t)0x20030000)
#define RCC_USART3CLK_PCLK ((uint32_t)0x30000000)
#define RCC_USART3CLK_SYSCLK ((uint32_t)0x30040000)
#define RCC_USART3CLK_LSE ((uint32_t)0x30080000)
#define RCC_USART3CLK_HSI ((uint32_t)0x300C0000)
#define RCC_UART4CLK_PCLK ((uint32_t)0x40000000)
#define RCC_UART4CLK_SYSCLK ((uint32_t)0x40100000)
#define RCC_UART4CLK_LSE ((uint32_t)0x40200000)
#define RCC_UART4CLK_HSI ((uint32_t)0x40300000)
#define RCC_UART5CLK_PCLK ((uint32_t)0x50000000)
#define RCC_UART5CLK_SYSCLK ((uint32_t)0x50400000)
#define RCC_UART5CLK_LSE ((uint32_t)0x50800000)
#define RCC_UART5CLK_HSI ((uint32_t)0x50C00000)
#define IS_RCC_USARTCLK(USARTCLK) (((USARTCLK) == RCC_USART1CLK_PCLK) || ((USARTCLK) == RCC_USART1CLK_SYSCLK) || \
((USARTCLK) == RCC_USART1CLK_LSE) || ((USARTCLK) == RCC_USART1CLK_HSI) ||\
((USARTCLK) == RCC_USART2CLK_PCLK) || ((USARTCLK) == RCC_USART2CLK_SYSCLK) || \
((USARTCLK) == RCC_USART2CLK_LSE) || ((USARTCLK) == RCC_USART2CLK_HSI) || \
((USARTCLK) == RCC_USART3CLK_PCLK) || ((USARTCLK) == RCC_USART3CLK_SYSCLK) || \
((USARTCLK) == RCC_USART3CLK_LSE) || ((USARTCLK) == RCC_USART3CLK_HSI) || \
((USARTCLK) == RCC_UART4CLK_PCLK) || ((USARTCLK) == RCC_UART4CLK_SYSCLK) || \
((USARTCLK) == RCC_UART4CLK_LSE) || ((USARTCLK) == RCC_UART4CLK_HSI) || \
((USARTCLK) == RCC_UART5CLK_PCLK) || ((USARTCLK) == RCC_UART5CLK_SYSCLK) || \
((USARTCLK) == RCC_UART5CLK_LSE) || ((USARTCLK) == RCC_UART5CLK_HSI))
/**
* @}
*/
/** @defgroup RCC_Interrupt_Source
* @{
*/
#define RCC_IT_LSIRDY ((uint8_t)0x01)
#define RCC_IT_LSERDY ((uint8_t)0x02)
#define RCC_IT_HSIRDY ((uint8_t)0x04)
#define RCC_IT_HSERDY ((uint8_t)0x08)
#define RCC_IT_PLLRDY ((uint8_t)0x10)
#define RCC_IT_CSS ((uint8_t)0x80)
#define IS_RCC_IT(IT) ((((IT) & (uint8_t)0xC0) == 0x00) && ((IT) != 0x00))
#define IS_RCC_GET_IT(IT) (((IT) == RCC_IT_LSIRDY) || ((IT) == RCC_IT_LSERDY) || \
((IT) == RCC_IT_HSIRDY) || ((IT) == RCC_IT_HSERDY) || \
((IT) == RCC_IT_PLLRDY) || ((IT) == RCC_IT_CSS))
#define IS_RCC_CLEAR_IT(IT) ((((IT) & (uint8_t)0x40) == 0x00) && ((IT) != 0x00))
/**
* @}
*/
/** @defgroup RCC_LSE_configuration
* @{
*/
#define RCC_LSE_OFF ((uint32_t)0x00000000)
#define RCC_LSE_ON RCC_BDCR_LSEON
#define RCC_LSE_Bypass ((uint32_t)(RCC_BDCR_LSEON | RCC_BDCR_LSEBYP))
#define IS_RCC_LSE(LSE) (((LSE) == RCC_LSE_OFF) || ((LSE) == RCC_LSE_ON) || \
((LSE) == RCC_LSE_Bypass))
/**
* @}
*/
/** @defgroup RCC_RTC_Clock_Source
* @{
*/
#define RCC_RTCCLKSource_LSE RCC_BDCR_RTCSEL_LSE
#define RCC_RTCCLKSource_LSI RCC_BDCR_RTCSEL_LSI
#define RCC_RTCCLKSource_HSE_Div32 RCC_BDCR_RTCSEL_HSE
#define IS_RCC_RTCCLK_SOURCE(SOURCE) (((SOURCE) == RCC_RTCCLKSource_LSE) || \
((SOURCE) == RCC_RTCCLKSource_LSI) || \
((SOURCE) == RCC_RTCCLKSource_HSE_Div32))
/**
* @}
*/
/** @defgroup RCC_I2S_Clock_Source
* @{
*/
#define RCC_I2S2CLKSource_SYSCLK ((uint8_t)0x00)
#define RCC_I2S2CLKSource_Ext ((uint8_t)0x01)
#define IS_RCC_I2SCLK_SOURCE(SOURCE) (((SOURCE) == RCC_I2S2CLKSource_SYSCLK) || ((SOURCE) == RCC_I2S2CLKSource_Ext))
/** @defgroup RCC_LSE_Drive_Configuration
* @{
*/
#define RCC_LSEDrive_Low ((uint32_t)0x00000000)
#define RCC_LSEDrive_MediumLow RCC_BDCR_LSEDRV_0
#define RCC_LSEDrive_MediumHigh RCC_BDCR_LSEDRV_1
#define RCC_LSEDrive_High RCC_BDCR_LSEDRV
#define IS_RCC_LSE_DRIVE(DRIVE) (((DRIVE) == RCC_LSEDrive_Low) || ((DRIVE) == RCC_LSEDrive_MediumLow) || \
((DRIVE) == RCC_LSEDrive_MediumHigh) || ((DRIVE) == RCC_LSEDrive_High))
/**
* @}
*/
/** @defgroup RCC_AHB_Peripherals
* @{
*/
#define RCC_AHBPeriph_ADC34 RCC_AHBENR_ADC34EN
#define RCC_AHBPeriph_ADC12 RCC_AHBENR_ADC12EN
#define RCC_AHBPeriph_GPIOA RCC_AHBENR_GPIOAEN
#define RCC_AHBPeriph_GPIOB RCC_AHBENR_GPIOBEN
#define RCC_AHBPeriph_GPIOC RCC_AHBENR_GPIOCEN
#define RCC_AHBPeriph_GPIOD RCC_AHBENR_GPIODEN
#define RCC_AHBPeriph_GPIOE RCC_AHBENR_GPIOEEN
#define RCC_AHBPeriph_GPIOF RCC_AHBENR_GPIOFEN
#define RCC_AHBPeriph_TS RCC_AHBENR_TSEN
#define RCC_AHBPeriph_CRC RCC_AHBENR_CRCEN
#define RCC_AHBPeriph_FLITF RCC_AHBENR_FLITFEN
#define RCC_AHBPeriph_SRAM RCC_AHBENR_SRAMEN
#define RCC_AHBPeriph_DMA2 RCC_AHBENR_DMA2EN
#define RCC_AHBPeriph_DMA1 RCC_AHBENR_DMA1EN
#define IS_RCC_AHB_PERIPH(PERIPH) ((((PERIPH) & 0xCE81FFA8) == 0x00) && ((PERIPH) != 0x00))
#define IS_RCC_AHB_RST_PERIPH(PERIPH) ((((PERIPH) & 0xCE81FFFF) == 0x00) && ((PERIPH) != 0x00))
/**
* @}
*/
/** @defgroup RCC_APB2_Peripherals
* @{
*/
#define RCC_APB2Periph_SYSCFG ((uint32_t)0x00000001)
#define RCC_APB2Periph_TIM1 ((uint32_t)0x00000800)
#define RCC_APB2Periph_SPI1 ((uint32_t)0x00001000)
#define RCC_APB2Periph_TIM8 ((uint32_t)0x00002000)
#define RCC_APB2Periph_USART1 ((uint32_t)0x00004000)
#define RCC_APB2Periph_TIM15 ((uint32_t)0x00010000)
#define RCC_APB2Periph_TIM16 ((uint32_t)0x00020000)
#define RCC_APB2Periph_TIM17 ((uint32_t)0x00040000)
#define IS_RCC_APB2_PERIPH(PERIPH) ((((PERIPH) & 0xFFF887FE) == 0x00) && ((PERIPH) != 0x00))
/**
* @}
*/
/** @defgroup RCC_APB1_Peripherals
* @{
*/
#define RCC_APB1Periph_TIM2 ((uint32_t)0x00000001)
#define RCC_APB1Periph_TIM3 ((uint32_t)0x00000002)
#define RCC_APB1Periph_TIM4 ((uint32_t)0x00000004)
#define RCC_APB1Periph_TIM6 ((uint32_t)0x00000010)
#define RCC_APB1Periph_TIM7 ((uint32_t)0x00000020)
#define RCC_APB1Periph_WWDG ((uint32_t)0x00000800)
#define RCC_APB1Periph_SPI2 ((uint32_t)0x00004000)
#define RCC_APB1Periph_SPI3 ((uint32_t)0x00008000)
#define RCC_APB1Periph_USART2 ((uint32_t)0x00020000)
#define RCC_APB1Periph_USART3 ((uint32_t)0x00040000)
#define RCC_APB1Periph_UART4 ((uint32_t)0x00080000)
#define RCC_APB1Periph_UART5 ((uint32_t)0x00100000)
#define RCC_APB1Periph_I2C1 ((uint32_t)0x00200000)
#define RCC_APB1Periph_I2C2 ((uint32_t)0x00400000)
#define RCC_APB1Periph_USB ((uint32_t)0x00800000)
#define RCC_APB1Periph_CAN1 ((uint32_t)0x02000000)
#define RCC_APB1Periph_PWR ((uint32_t)0x10000000)
#define RCC_APB1Periph_DAC ((uint32_t)0x20000000)
#define IS_RCC_APB1_PERIPH(PERIPH) ((((PERIPH) & 0xCD0137C8) == 0x00) && ((PERIPH) != 0x00))
/**
* @}
*/
/** @defgroup RCC_MCO_Clock_Source
* @{
*/
#define RCC_MCOSource_NoClock ((uint8_t)0x00)
#define RCC_MCOSource_LSI ((uint8_t)0x02)
#define RCC_MCOSource_LSE ((uint8_t)0x03)
#define RCC_MCOSource_SYSCLK ((uint8_t)0x04)
#define RCC_MCOSource_HSI ((uint8_t)0x05)
#define RCC_MCOSource_HSE ((uint8_t)0x06)
#define RCC_MCOSource_PLLCLK_Div2 ((uint8_t)0x07)
#define IS_RCC_MCO_SOURCE(SOURCE) (((SOURCE) == RCC_MCOSource_NoClock) ||((SOURCE) == RCC_MCOSource_SYSCLK) ||\
((SOURCE) == RCC_MCOSource_HSI) || ((SOURCE) == RCC_MCOSource_HSE) || \
((SOURCE) == RCC_MCOSource_LSI) || ((SOURCE) == RCC_MCOSource_LSE) || \
((SOURCE) == RCC_MCOSource_PLLCLK_Div2))
/**
* @}
*/
/** @defgroup RCC_USB_Device_clock_source
* @{
*/
#define RCC_USBCLKSource_PLLCLK_1Div5 ((uint8_t)0x00)
#define RCC_USBCLKSource_PLLCLK_Div1 ((uint8_t)0x01)
#define IS_RCC_USBCLK_SOURCE(SOURCE) (((SOURCE) == RCC_USBCLKSource_PLLCLK_1Div5) || \
((SOURCE) == RCC_USBCLKSource_PLLCLK_Div1))
/**
* @}
*/
/** @defgroup RCC_Flag
* @{
*/
#define RCC_FLAG_HSIRDY ((uint8_t)0x01)
#define RCC_FLAG_HSERDY ((uint8_t)0x11)
#define RCC_FLAG_PLLRDY ((uint8_t)0x19)
#define RCC_FLAG_MCOF ((uint8_t)0x9C)
#define RCC_FLAG_LSERDY ((uint8_t)0x21)
#define RCC_FLAG_LSIRDY ((uint8_t)0x41)
#define RCC_FLAG_OBLRST ((uint8_t)0x59)
#define RCC_FLAG_PINRST ((uint8_t)0x5A)
#define RCC_FLAG_PORRST ((uint8_t)0x5B)
#define RCC_FLAG_SFTRST ((uint8_t)0x5C)
#define RCC_FLAG_IWDGRST ((uint8_t)0x5D)
#define RCC_FLAG_WWDGRST ((uint8_t)0x5E)
#define RCC_FLAG_LPWRRST ((uint8_t)0x5F)
#define IS_RCC_FLAG(FLAG) (((FLAG) == RCC_FLAG_HSIRDY) || ((FLAG) == RCC_FLAG_HSERDY) || \
((FLAG) == RCC_FLAG_PLLRDY) || ((FLAG) == RCC_FLAG_LSERDY) || \
((FLAG) == RCC_FLAG_LSIRDY) || ((FLAG) == RCC_FLAG_OBLRST) || \
((FLAG) == RCC_FLAG_PINRST) || ((FLAG) == RCC_FLAG_PORRST) || \
((FLAG) == RCC_FLAG_SFTRST) || ((FLAG) == RCC_FLAG_IWDGRST)|| \
((FLAG) == RCC_FLAG_WWDGRST)|| ((FLAG) == RCC_FLAG_LPWRRST)|| \
((FLAG) == RCC_FLAG_MCOF))
#define IS_RCC_HSI_CALIBRATION_VALUE(VALUE) ((VALUE) <= 0x1F)
/**
* @}
*/
/**
* @}
*/
/* Exported macro ------------------------------------------------------------*/
/* Exported functions ------------------------------------------------------- */
/* Function used to set the RCC clock configuration to the default reset state */
void RCC_DeInit(void);
/* Internal/external clocks, PLL, CSS and MCO configuration functions *********/
void RCC_HSEConfig(uint8_t RCC_HSE);
ErrorStatus RCC_WaitForHSEStartUp(void);
void RCC_AdjustHSICalibrationValue(uint8_t HSICalibrationValue);
void RCC_HSICmd(FunctionalState NewState);
void RCC_LSEConfig(uint32_t RCC_LSE);
void RCC_LSEDriveConfig(uint32_t RCC_LSEDrive);
void RCC_LSICmd(FunctionalState NewState);
void RCC_PLLConfig(uint32_t RCC_PLLSource, uint32_t RCC_PLLMul);
void RCC_PLLCmd(FunctionalState NewState);
void RCC_PREDIV1Config(uint32_t RCC_PREDIV1_Div);
void RCC_ClockSecuritySystemCmd(FunctionalState NewState);
void RCC_MCOConfig(uint8_t RCC_MCOSource);
/* System, AHB and APB busses clocks configuration functions ******************/
void RCC_SYSCLKConfig(uint32_t RCC_SYSCLKSource);
uint8_t RCC_GetSYSCLKSource(void);
void RCC_HCLKConfig(uint32_t RCC_SYSCLK);
void RCC_PCLK1Config(uint32_t RCC_HCLK);
void RCC_PCLK2Config(uint32_t RCC_HCLK);
void RCC_GetClocksFreq(RCC_ClocksTypeDef* RCC_Clocks);
/* Peripheral clocks configuration functions **********************************/
void RCC_ADCCLKConfig(uint32_t RCC_PLLCLK);
void RCC_I2CCLKConfig(uint32_t RCC_I2CCLK);
void RCC_TIMCLKConfig(uint32_t RCC_TIMCLK);
void RCC_I2SCLKConfig(uint32_t RCC_I2SCLKSource);
void RCC_USARTCLKConfig(uint32_t RCC_USARTCLK);
void RCC_USBCLKConfig(uint32_t RCC_USBCLKSource);
void RCC_RTCCLKConfig(uint32_t RCC_RTCCLKSource);
void RCC_RTCCLKCmd(FunctionalState NewState);
void RCC_BackupResetCmd(FunctionalState NewState);
void RCC_AHBPeriphClockCmd(uint32_t RCC_AHBPeriph, FunctionalState NewState);
void RCC_APB2PeriphClockCmd(uint32_t RCC_APB2Periph, FunctionalState NewState);
void RCC_APB1PeriphClockCmd(uint32_t RCC_APB1Periph, FunctionalState NewState);
void RCC_AHBPeriphResetCmd(uint32_t RCC_AHBPeriph, FunctionalState NewState);
void RCC_APB2PeriphResetCmd(uint32_t RCC_APB2Periph, FunctionalState NewState);
void RCC_APB1PeriphResetCmd(uint32_t RCC_APB1Periph, FunctionalState NewState);
/* Interrupts and flags management functions **********************************/
void RCC_ITConfig(uint8_t RCC_IT, FunctionalState NewState);
FlagStatus RCC_GetFlagStatus(uint8_t RCC_FLAG);
void RCC_ClearFlag(void);
ITStatus RCC_GetITStatus(uint8_t RCC_IT);
void RCC_ClearITPendingBit(uint8_t RCC_IT);
#ifdef __cplusplus
}
#endif
#endif /* __STM32F30x_RCC_H */
/**
* @}
*/
/**
* @}
*/
/************************ (C) COPYRIGHT STMicroelectronics *****END OF FILE****/
| {
"pile_set_name": "Github"
} |
企業再建整備法施行令
(昭和二十一年十月二十九日勅令第五百一号)最終改正:平成一七年二月一八日政令第二四号
第一条
この勅令で、特別経理会社、特別経理株式会社、旧債権、旧債権者、旧勘定、新勘定、仮勘定、指定時、特別管理人、特別損失、整備計画又は決定整備計画といふのは、企業再建整備法
(以下法といふ。)の特別経理会社、特別経理株式会社、旧債権、旧債権者、旧勘定、新勘定、仮勘定、指定時、特別管理人、特別損失、整備計画又は決定整備計画をいひ、金融機関といふのは、金融機関再建整備法
の金融機関をいふ。
○2
この勅令で、資本の負担すべき特別損失の額とは、法第七条の規定により、特別損失の額について、株主の負担額として計算した額(整備計画の定めるところにより、指定時後整備計画立案の時までに新勘定に生じた利益金に相当する額を超えない額の特別損失の額を繰越欠損として処理しようとするときには、その額を控除した額とする。)をいふ。
○3
この勅令で、信託株式とは、信託法第三条第二項
の規定により株主名簿に信託財産である旨の記載のある株式又は金融機関経理応急措置法第八条第一項
の規定により公証人の認証を受けた信託会社若しくは信託業務を兼営する銀行の指定時における信託勘定の新勘定に属する資産の目録に記載のある株式をいふ。
第二条
法第七条第一項第二号の規定により、左に掲げる債権を除くの外、会社経理応急措置法
(以下措置法といふ。)第十四条第一項
の旧債権(同項
但書の債権を除く。以下同じ。)のうち知れたる債権を知れたる特別損失負担債権とする。
一
特別経理株式会社に対する債権であつて外国(主務大臣の指定する地域を含む。以下同じ。)を履行地とするもの
二
前号に掲げるものを除くの外、当該債権の債権者について、会社経理応急措置法施行令
(以下措置法施行令といふ。)第二十五条第九号
の規定により在外資産となる債権
第三条
特別経理株式会社が、新勘定に所属する資産(法第三十四条の四第三項又は法第三十四条の五第一項の規定により譲渡する資産を除く。本条に於て以下同じ。)の全部を一の者に出資(法第十条第二項の規定による譲渡を含む。本条において以下同じ。)する場合においては、その出資を受ける者は、当該会社の新勘定に所属するすべての債務を承継しなければならない。
○2
特別経理株式会社が、新勘定に所属する資産の全部を二以上の者に出資する場合においては、その出資を受ける者は、左の各号に規定する分担の方法に従ひ、当該会社の新勘定に所属する債務を分担して承継しなければならない。但し、特定の資産を担保とする場合等であつて、決定整備計画に左の各号に規定する分担の方法と異なる方法を定めたときには、その方法による。
一
特定の資産の取得(特定の資産である設備の新設、拡張又は改良を含む。)、管理又は運営に因り生じた債務は、当該資産の出資を受ける者が、これを承継する。
二
前号以外の債務は、出資を受ける資産の額(前号の規定によつて債務を承継する場合には、その債務の額を控除した額とする。)の割合に応じて出資を受ける者が、これを按分して承継する。
○3
前号但書の規定による方法を定める整備計画の認可を申請する場合には、その理由を附記しなければならない。
○4
前二項の規定は、特別経理株式会社が新勘定に所属する資産の一部を出資する場合の当該会社の新勘定に所属する債務の一部の承継の場合に、これを準用する。
第三条の二
特別経理株式会社が、決定整備計画の定めるところにより、その資産の全部又は一部を出資し、又は譲渡(法第十条第二項、法第三十四条の四第三項又は法第三十四条の五第一項の規定による譲渡を含む。)する場合において、その出資又は譲渡を受ける会社の定款に、商法第百六十八条第一項第五号又は第六号の規定により当該出資又は譲渡の目的たる財産及びその価格を記載するときには、その財産及びその価格の記載は、同項第五号又は第六号の規定にかかはらず、命令の定めるところにより、その種類及び数量並びに価格を記載すれば足りる。
○2
前項の規定は、同項に規定する場合において、出資又は譲渡を受ける会社が商法第百七十五条第二項第七号
又は第二百八十条ノ六第三号
の規定により株式申込証に当該出資又は譲渡の目的たる財産及びその価格を記載するときに、これを準用する。
第四条
法第十一条第一項の規定による議決権のない株式の議決権のある株式への転換の請求をなすことのできる期間は、当該議決権のない株式を発行する場合の登記の日から開始する。
○2
前項の期間は、二年を下ることができない。
○3
会社経理応急措置法第十四条第一項
の旧債権(同項
但書の債権を除く。)を有した金融機関経理応急措置法第二十七条
の金融機関はその債権を出資して与へられた当該特別経理株式会社の議決権のない株式については、前二項の規定にかかはらず、転換の請求をなすことができない。
第四条の二
措置法第十四条第一項の旧債権は、第五条、第六条、第二十条及び第二十一条の二に規定する場合を除くの外、決定整備計画に定める法第六条第一項第十号の割合を乗じた額に相当する額だけ、法第十五条第一項乃至第三項の規定による認可を受けた日に消滅し、その債権の額は、その認可に因り確定する。
第五条
措置法第十四条第一項の旧債権の連帯債務者の一部又は全部が特別経理株式会社である場合において、各債務者について法第十九条第一項の規定によつて確定すべき額(連帯債務者中に特別経理株式会社でない者のあるとき、指定時後連帯債務を負担した特別経理株式会社のあるとき又は法第七条の規定により旧債権の負担額の計算を行はない特別経理株式会社のあるときは、当該債務者については当該債権の全額。以下残存額という。)が異なるときは、最も小額の残存額に相当する部分の債権についてはすべての債権者が連帯して債務を負担するものとし、最も小額の残存額と次に小額の残存額との差額に相当する部分の債権については次に小額の残存額以上の残存額の債務を負担する債務者が連帯して債務を負担するものとし、順次に小額の残存額の差額に相当する部分の債権について当該残存額以上の残存額を負担する債務者が連帯して債務を負担するものとする。
○2
前項の場合において、各連帯債務者は、同項の規定によつて負担する各連帯債務について、従前の負担部分の割合の負担部分を負担するものとする。
○3
第一項の場合において、債権者は、最も多額の残存額に達するまで各債務者の残存額の範囲内において、各債務者に履行を請求することができる。
○4
債務者が、その残存額に満たない額の弁済をしたときには、その残存額について第一項の規定によつて連帯して債務を負担する債務者の多数ある部分から、その弁済を充当する。
○5
第一項、第三項及び前項の規定は、手形又は小切手上の債務者の一部又は全部が特別経理株式会社である場合に、これを準用する。
第六条
第二条に掲げる債権は、法第十九条第一項の規定にかかはらず、法第十五条第一項乃至第三項の規定による認可を受けた日に消滅せず、その債権の額は認可に因り確定しないものとする。
第六条の二
第二会社に出資又は譲渡された資産につき工場財団その他の財団を設ける場合において、財団目録を調製しようとするときは、左に掲げる物件は、法第二十九条の五第一項の規定により、これを一括して表示することができる。
一
鉄道抵当法第三条第一項第一号
乃至第四号
の器具機械並びに同項第六号
及び第七号
の物件
二
工場抵当法第十一条第二号
の物件
三
鉱業抵当法第二条第五号
の物件
四
明治四十二年法律第二十八号第二条第一項第一号乃至第四号の器具機械並びに同項第六号及び第七号の物件
五
運河法第十四条第一号
乃至第三号
及び第五号
の器具機械並びに同条第六号
の物件
六
漁業財団抵当法第二条第一項第二号
の属具及び附属設備並びに同項第五号
及び第六号
の物件
七
自動車交通事業法第三十九条第一号乃至第四号の器具機械並びに同条第六号及び第七号の物件
○2
前項の規定により財団目録に一括して表示することのできる物件であつて、その財団に属させないものがあるときは、命令の定めるところにより、財団目録にその旨を記載することを要する。
○3
前二項の規定は、工場抵当法第三十九条
(鉱業抵当法第三条
、漁業財団抵当法第六条
及び自動車交通事業法第四十七条第一項
において準用する場合を含む。)の目録に、これを準用する。
第六条の三
法第三十四条の八第二項の規定による第二会社特別勘定の償却は、毎決算期において生ずる利益の全額(当該利益に対して法人税及び地方税法による事業税を課せられる場合においては、当該利益の額から当該利益に対し課せられるべき法人税及び地方税法による事業税の額に相当する額を控除した額)をもつて、これをなさなければならない。
○2
商法第二百八十八条
の規定は、前項の規定により第二会社特別勘定の償却に充てられるべき毎決算期の利益については、これを適用しない。
第六条の四
法第三十四条の九第二項の規定により損金に算入される金額は、同項に規定する特別経理株式会社の事業年度において生じた損金に相当する金額(当該損金のうち第二会社の設立の日の前日を含む事業年度までに当該特別経理株式会社において法人税法第九条第五項
の規定により損金に算入された額があるときは、その額を控除した額に相当する金額)に第二会社特別勘定の額の当該特別経理株式会社において当該第二会社の設立の日までに生じた新勘定の損失の額に対する割合を乗じて得た金額(当該第二会社においてすでに本条の規定の適用を受けた額があるときは、その額を控除した額)とする。
第七条
法第三十九条第二項に規定する会社の資産の譲渡に因る益金は、整備計画立案の時までに会社財産を譲渡した場合の当該譲渡に因る益金(商品、原料品、半製品その他財務大臣の指定する資産については、当該譲渡に因る益金のうち財務大臣の定めるものを除く。)とする。
○2
法第三十九条第二項に規定する益金で、特別経理株式会社の納付すべき戦時補償特別税額(戦時補償請求権に因る益金に相当する金額を除く。)、指定時において納付すべき指定時を以て終了する事業年度以前の各事業年度の法人税額及び臨時利得税額、措置法施行令第八条の二の規定により旧勘定の負担として経理される非戦災者特別税法による非戦災家屋税額及び非戦災者税額並びに指定時において指定時以前から繰り越した損金(指定時以前一年以内に開始した事業年度において生じたものを除く。)の合計額から指定時における法人税法第十六条第一項
に規定する積立金額(法第三十四条の四第一項の規定により定められる金額のある場合には、当該金額を控除した額)を控除した金額に達するまでの金額は、法人税法
による各事業年度の普通所得、旧営業税法による各事業年度の純益又は地方税法
により事業税を課する場合における各事業年度の純益の計算上、これを益金に算入しない。
○3
法第三十九条第二項の規定の適用を受けようとする特別経理株式会社は、法人税法第十八条
乃至第二十一条
に規定する申告書に財務大臣の定める事項を記載しなければならない。
○4
前項の申告書には、財務大臣の定める明細書を添附しなければならない。
○5
法第三十九条第二項の規定は、法人税法第十八条
乃至第二十一条
に規定する申告書に、第三項に規定する事項の記載がない場合には、これを適用しない。
○6
税務署長は、特別の事情があると認めたときは、財務大臣の定めるところにより、第三項の申請書に同項に規定する事項の記載がなかつた場合においても、法第三十九条第二項の規定を適用することができる。
第八条
法第四十条の二第一項の規定により旧勘定及び新勘定の併合の日(法第三十六条第一項第一号但書の規定に該当する場合においては、法第十五条第一項乃至第三項の規定による認可の日)を以て終了する事業年度に関する定時総会は、他の法令又は定款の規定にかかはらず、当該日から三箇月以内に、これを招集しなければならない。
○2
特別経理株式会社の取締役又は監査役の任期は、商法第二百五十六条第三項
(同法第二百八十条
において準用する場合を含む。)の規定によりこれを伸長することができる場合においては、前項の定時総会の終結に至るまで、これを伸長する。
第九条
法第五十一条の規定により、日本銀行が取扱ふ事務に要する費用は、日本銀行の負担とする。
第十条
法第三十条第一項の規定により効力を失つた強制執行、仮差押え、仮処分又は担保権の実行としての競売の費用は、特別経理株式会社の負担とする。ただし、当該手続の程度において、権利の実行に必要でなかつたものは、この限りでない。
第十一条
第三条の規定は、法第五十四条の三の規定による債務の承継の場合に、これを準用する。但し、この場合において「新勘定に所属する資産」とあるのは「当該会社の資産」と、「新勘定に所属する債務」とあるのは「当該会社の債務」と読み替へるものとする。
第十二条
特別経理株式会社は、資本の負担すべき特別損失の額について、左の各号に定めるところにより各株式(指定時後あらたに発行した株式を除く。)につき、株主の負担額を計算しなければならない。
一
払ひ込みたる株金額(以下払込額といふ。)の異なる株式がある場合において、資本の負担すべき特別損失の額が指定時現在の資本金の額の十分の九に相当する額を超えるとき又は各株式の払込額が均一であるとき(資本の負担すべき特別損失の額÷株式の総数)
二
払込額の異なる株式がある場合において、資本の負担すべき特別損失の額が払ひ込みたる株金総額の十分の九に相当する額以下であるとき資本の負担すべき特別損失の額×(当該株式一株の払込額÷払ひ込みたる株金総額)
三
払込額の異なる株式がある場合において、資本の負担すべき特別損失の額が払ひ込みたる株金総額の十分の九に相当する額を超え、指定時現在の資本金の額の十分の九に相当する額以下であるときイ 株金の全額の払込ある株式については当該株式一株の払込額×(9÷10)
ロ 未払込株金を有する株式(以下未払込株式といふ。)については当該株式一株の払込額×(9÷10)+〔{資本の負担すべき特別損失の額−払ひ込みたる株金総額×(9÷10)}÷未払込株式の総数〕
○2
前項第三号ロの規定により株主の負担額として計算した額が株式の金額の十分の九を超える株式については、その十分の九を負担額として計算する。この場合において各株式ごとの超過額を合計し、その総額を同号ロの規定により株主の負担額として計算した額が株式の金額の十分の九に満たない株式の総数で除した額を当該株式の同号ロの規定による負担額に加算した額をその負担額として計算しなければならない。この場合において株主の負担額が株式の金額の十分の九を超えるに至つたときも同様に計算する。その以後においても同様とする。
第十三条
特別経理株式会社は、その発行する未払込株式(指定時後あらたに発行した株式を除く。)のうちでその払込額の十分の九に相当する額が、前条の規定により各株式につき株主の負担額として計算した額に満たないものがあるときは、その株式につき、その差額に相当する額以上の未払込株金の払込を催告しなければならない。但し、資本の負担すべき特別損失の額が、指定時現在の資本金の額の十分の九に相当する額を超える場合においては、左の算式により計算した額以上の未払込株金の払込を催告しなければならない。当該株式一株の株主の負担額−当該株式一株の払込額×(当該株式一株の株主の負担額÷当該株式一株の金額)
第十四条
特別経理株式会社は、命令の定めるところにより、第十二条の規定による株主の負担額、前条の規定による未払込株金の払込催告額及び第三十条第二項の規定による株金減少額を明かならしめる書類を作成し、特別管理人の承認を受けなければならない。
○2
特別経理株式会社は、命令の定めるところにより、遅滞なく前項の規定による承認を受けた書類を公告するとともに指定時において株主として株主名簿に記載された者に提出し、且つその書類を本店及び支店に備え置き、利害関係人の閲覧に供しなければならない。
第十五条
資本の減少を行はなければならない特別経理株式会社は、法第十五条第一項乃至第三項(法第二十条第二項、法第二十一条第二項及び法第三十五条第四項において準用する場合を含む。第十六条第一項の場合を除くの外以下同じ。)の認可を受けた後法第十八条(法第二十条第二項及び法第二十一条第二項において準用する場合を含む。以下同じ。)又は法第三十五条の四の規定による公告とともに当該特別経理株式会社の株主及び株主名簿に記載された質権者は資本の減少に係る株券を一定期間内に当該特別経理株式会社に提出すべき旨の公告をしなければならない。
○2
前項の一定期間は、同項の公告の日から一箇月以上二箇月の範囲内で、これを定めなければならない。
第十五条の二
法第三十四条第四項(法第三十五条第四項において準用する場合を含む。以下同じ。)の規定により株式の併合をする特別経理株式会社は、その旨並びに当該特別経理株式会社の株主及び株主名簿に記載された質権者は株券を一定期間内に当該特別経理株式会社に提出すべき旨の公告をしなければならない。
○2
前項の一定期間は、同項の公告の日から一箇月以上二箇月の範囲内で、これを定めなければならない。
○3
法第三十四条第二項(法第三十五条第四項において準用する場合を含む。以下同じ。)の規定による資本の減少とともに法第三十四条第四項の規定による株式の併合をしようとする特別経理株式会社は、法第十八条又は法第三十五条の四の規定による公告とともに、第一項の規定による公告をしなければならない。
○4
前項の場合においては、第一項の一定期間は、前条第一項の一定期間と同一に、これを定めなければならない。
第十六条
第十三条の規定により未払込株金の払込を催告しなければならない特別経理株式会社(以下未払込株金徴収会社といふ。)は、同条の規定による催告により未払込株金の払込をなさしめる株式について、法第十五条第一項乃至第三項(法第二十条第二項及び法第二十一条第二項において準用する場合を含む。)の認可を受けた後遅滞なく、指定時において株主として株主名簿に記載された者(その者について相続若しくは包括遺贈又は分割若しくは合併のあつた場合においてはその一般承継人とする。以下指定時株主といふ。)以外の株主(指定時株主で当該株式を指定時後譲り受けた株主を含む。)に対し期日を定め決定整備計画に定める当該株式の未払込株金の払込をなすべき旨を催告し、同時に、その株主及びその株主の株式につき株主名簿に記載のある質権者に対し株主がその払込をしないときは、その催告は効力を失ひ、その株主はその株式につき株主の権利を失ふ旨を通知しなければならない。
○2
前項の期日は、法第十八条の規定による公告の日から一箇月後二箇月内に、これを定めなければならない。
○3
第一項の規定による催告を受けた者が同項の規定による払込をしないときは、その催告は効力を失ひ、その株主はその株式につき株主の権利を失ひ、その株式は指定時株主(指定時において信託株式であつた株式については、その際その株式につき信託の委託者であつた者とする。以下同じ。)に帰属する。
○4
前項の規定により株式が帰属すべき者が存しないときは、その株式は、未払込株金徴収会社に帰属する。
○5
特別経理会社(措置法第三十九条の規定により、同法の規定を準用する者を含む。以下同じ。)である株主が旧勘定に所属する株式につき第一項又は金融機関再建整備法第二十五条の四第一項
の規定による催告に基き払込をなし、又は払込をしないときは、特別管理人の承認を受けなければならない。
○6
措置法第十四条第三項の規定は、前項の規定による払込の場合に、これを準用する。
第十七条
未払込株金徴収会社は、前条第一項の期日後二週間以内に、決定整備計画の定めるところにより、払込期日を定め、指定時株主(前条第一項の規定による払込のあつた株式の指定時株主及び外国に住所を有する指定時株主を除く。)に対し、未払込株金の払込をなすべき旨を催告しなければならない。
○2
前項の場合において、前条第三項の規定により株式の帰属した指定時株主(指定時株主で当該株式を指定時後譲り受けた株主を除く。)に対する催告は、指定時においてその株式の株主として株主名簿に記載された者に対し、株主名簿に記載されたその者の住所に宛てて、これをなせば足りる。但し、指定時株主がその氏名及び住所を会社に通知したときはこの限りでない。
○3
第一項の払込期日は、前条第一項の期日後二週間を経過した日から一箇月後二箇月内に、これを定めなければならない。
○4
金融機関又は特別経理会社が、その所有する株式について、第一項の規定により未払込株金の払込をなすべき旨の催告を受けた場合において、同項の払込催告が当該金融機関(金融機関が信託の委託者である場合における信託株式については委託者たる金融機関とする。本条において以下同じ。)の新勘定及び旧勘定の区分の消滅の日又は当該特別経理会社(特別経理会社が信託の委託者である場合における信託株式については委託者たる特別経理会社とする。本条において以下同じ。)の旧勘定及び新勘定の併合(旧勘定のみを設ける特別経理会社については、旧勘定の廃止とする。以下同じ。)の日以前なるときは、当該株主に対する払込期日は、第一項の規定にかかはらず、当該金融機関の新勘定及び旧勘定の区分の消滅の日又は当該特別経理会社の旧勘定及び新勘定の併合の日後一箇月を経過した日とする。
第十八条
前条第一項の規定により催告があつた株式が、左の各号の一に該当するものである場合において、その株主が払込期日までに払込をしないときは、その株主は、同項の催告に係る株金払込の義務を免れるとともに、払込をしないその株式につき株主の権利を失ふ。
一
法人(国を含み、民法第千五十一条
の法人を除く。以下同じ。)以外の者の所有する株式
二
閉鎖機関令第一条
に規定する閉鎖機関(以下閉鎖機関といふ。)の所有する株式
三
信託株式で前二号に掲げる者がその信託の委託者であるもの
第十九条
第十七条第一項の規定により催告があつた株式が前条各号に掲げるもの以外のものである場合において、その株主が払込期日までに払込みをしないときは、未払込株金徴収会社は、決定整備計画の定めるところによりその株主が未払込株金の払込みをしない株式を、換価のため競売し、又は他の方法により売却することができる。この場合において、損害賠償及び定款をもつて定めた違約金の請求をなすことは、これを妨げない。
○2
商法第二百十四条第二項
及び第三項
の規定(譲渡人の責任に関する部分を除く。)は、前項の場合にこれを準用する。
○3
商法第三百九十二条及び第三百九十三条並びに非訟事件手続法第百三十五条ノ二十四及び第百三十五条ノ四十三乃至第百三十五条ノ四十六の規定は、未払込株金徴収会社が第一項の規定の適用を受ける法人に株金の払込をなさしめる場合に、これを準用する。
○4
第一項の規定により競売をなすもその結果を得られなかつたとき、又は同項の規定により売却ができなかつたときは、未払込株金徴収会社は、同項の株主に対しその旨を通知することができる。
○5
前項の通知があつたときは、当該株主はその権利を失ふ。この場合においては、商法第二百十四条第三項
の規定(譲渡人の責任に関する部分を除く。)を準用する。
○6
第十七条第二項の規定は、第四項の通知について、これを準用する。
第二十条
第十七条第一項の規定により催告を受けた株主(信託株式についてはその委託者とする。)が特別経理会社である場合において、当該特別経理会社に対し法第十九条の規定の適用又は準用があるときは、その催告のあつた株式を、株式を発行した者、株式の種類及び株式の払込額の異なるごとに区分し、当該区分に属する株式の数に決定整備計画に定める法第六条第一項第十号の割合を乗じて得た数(一未満の端数があるときはその端数は切り上げる。)の当該区分に属する株式については、その株主は当該特別経理会社の旧勘定及び新勘定の併合の日(法第三十六条第一項第一号及び同号の規定を準用する場合の特別経理会社が旧勘定及び新勘定の併合の日後整備計画の全部の実行を終る日前にその催告を受けた場合においては払込期日とする。)において、第十七条第一項の催告に係る株金払込の義務を免れるとともに、株主の権利を失ふ。この場合においては、同項の規定による催告のあつたその他の株式に係る株金払込請求権は、法第十九条第一項の規定にかかはらず消滅しない。
○2
前項の場合において、当該株主がいづれの株式について株主の権利を失ふかを確定するために必要な事項は、主務大臣がこれを定める。
第二十一条
第十七条第一項の規定により催告を受けた株主(信託株式についてはその委託者とする。)が金融機関である場合において、当該金融機関に対し金融機関再建整備法第二十四条第一項第七号
又は第九号
の規定の適用があるときは、その催告のあつた株式を、株式を発行した者、株式の種類及び株式の払込額の異なるものごとに区分しその区分の異なるごとに、同項第七号
又は第九号
の規定により確定損の整理負担額を計算し、その計算額を当該区分に属する株式の一株当り払込催告額で除して得た数(一未満の端数があるときは、その端数は切り上げる。)の当該区分に属する株式について、その株主は当該金融機関の新勘定及び旧勘定の区分の消滅の日において第十七条第一項の催告に係る株金払込の義務を免れるとともに、株主の権利を失ふ。
○2
前条第二項の規定は、前項の場合に、これを準用する。
第二十一条の二
金融機関再建整備法第二十五条の五第一項
の規定による催告のあつた株式のうち、同法第二十五条の九第一項
の規定により特別経理会社が株金払込の義務を免れるとともに株主の権利を失つた株式以外の株式に係る株金払込請求権は、法第十九条第一項の規定にかかはらず消滅しない。
第二十二条
金融機関(金融機関が信託の委託者である場合における信託株式については受託者とする。)が、当該金融機関(金融機関が信託の委託者である場合における信託株式については委託者たる金融機関とする。本条において以下同じ。)の新勘定及び旧勘定の区分の消滅後に第十七条第一項の規定により催告を受けた場合において、当該金融機関に対し前に金融機関再建整備法第二十四条第一項第七号
又は第九号
の規定の適用があつたときは、若し当該催告が当該金融機関の新勘定及び旧勘定の区分消滅前にあつたならば第二十一条第一項の規定によりその株主が株主の権利を失ふべきであつた株式について、その株主は、その払込期日において第十七条第一項の催告に係る株金払込の義務を免れるとともに株主の権利を失ふ。
○2
第二十条第二項の規定は、前項の場合に、これを準用する。
第二十三条
特別経理会社(特別経理会社が信託の委託者である場合における信託株式については受託者とする。)が、当該特別経理会社(特別経理会社が信託の委託者である場合における信託株式については委託者たる特別経理会社とする。本条において以下同じ。)の旧勘定及び新勘定の併合の日(法第三十六条第一項第一号及び同号の規定を準用する場合の特別経理会社については法第四十一条第一項の規定による決定整備計画の実行を終つた日とする。本条において以下同じ。)後に第十七条第一項の規定により催告を受けた場合において、当該特別経理会社に対し前に法第十九条の規定の適用又は準用があつたときは、若し当該催告がその旧勘定及び新勘定の併合の日前にあつたならば第二十条第一項の規定により当該特別経理会社が株主の権利を失ふべきであつた株式について、その株主は、その払込期日において第十七条第一項の催告に係る株金払込の義務を免れるとともに株主の権利を失ふ。
○2
第二十条第二項の規定は、前項の場合に、これを準用する。
第二十四条
第十八条乃至前条の規定により株主がその権利を失つた株式は、株主がその権利を失つた日において、未払込株金徴収会社に帰属する。
○2
閉鎖機関が、第十八条の規定により株主の権利を失つた株式について主務大臣の指定する日までに第十七条第一項の規定による当該株式の払込催告額に相当する金額を提供してこれを買ひ受けることを申し出たときは、未払込株金徴収会社は、その金額を以て、当該閉鎖機関にその株式を譲渡しなければならない。
○3
第一項又は第十六条第四項の規定により未払込株金徴収会社に帰属した株式は、前項に規定する株式については同項の規定により主務大臣の指定する日後、その他の株式については当該特別経理株式会社に帰属した日後の相当の時期に、決定整備計画に定めるところにより、換価のため競売その他の方法によりこれを処分しなければならない。第十六条第三項の規定により未払込株金徴収会社に帰属した株式があつた場合においてその株式についてもまた同様とする。
○4
第二項に規定する株式については、同項に定める場合を除くの外同項の規定により主務大臣の指定する日以前になした処分は効力を有しない。
第二十五条
閉鎖機関が第十八条の規定により、株主の権利を失つた場合においては、商法第二百四十一条第二項
の規定にかかはらず未払込株金徴収会社は、前条第一項の規定により、当該特別経理株式会社に帰属した株式については同条第二項の規定による主務大臣の指定する日(同日以前に閉鎖機関に譲渡された株式については、その譲渡のあつた日)まで議決権を有する。
○2
前項の場合においては、未払込株金徴収会社は、主務大臣の定めるところにより、同項の株式についてその議決権の行使を閉鎖機関令第九条
の規定による特殊清算人に委任しなければならない。この場合においては、当該特殊清算人はその委任を受けることを拒むことができない。
第二十六条
第十六条第三項の規定により株主の権利を失つた株主が、その権利を失つた株式を有償で取得した者である場合においては、当該株主は、当該株式の譲渡人(その者が指定時において信託株式の受託者であつた場合にはその委託者とする。本条において以下同じ。)に対し、当該株式の対価に相当する金額の返還を請求することができる。但し、当該株式を有償で取得した者が左の各号の一に該当する場合はこの限りでない。
一
法人
二
証券取引法第二条第九項に規定する証券業者
三
当該株式について第十三条の規定による未払込株金の払込の催告のあるべきことを知ることができる地位にある者で命令で定める者
○2
前項の場合において譲渡人が当該株式の対価に相当する金額を返還したときは、その者は当該株式を有償で取得した者である場合に限り当該株式の譲渡人に対しその者が請求に応じて返還した金額の範囲内において当該株式を取得した場合における対価に相当する金額の返還を請求することができる。但し、指定時株主又は前項但書各号の一に該当する者であつて昭和二十二年五月十三日以後当該株式を譲渡したものは、その対価の返還を請求することができない。
○3
第一項の規定による請求権は、その株主の権利を失つた日から、前項の規定による請求権は、請求に応じて返還をした日から、一年間これを行はないときは時効に因つて消滅する。
第二十七条
第十六条第一項又は第十七条第一項の規定により払込の催告を受けた株主は、商法第二百条第二項
の規定にかかはらず株金の払込につき、相殺をなすことができる。
○2
第十六条第一項又は第十七条第一項の規定により払込の催告を受けた株主が未払込株金徴収会社に対する債権で担保権の目的たるもの以外のものを有するときは、その弁済期前においても、未払込株金の払込につきその債権を以て相殺をなすことができる。この場合においては、当該債権及び未払込株金の払込請求権は相殺の意思表示をなしたときにおいて、その対等額につき消滅する。
○3
商法第百二十五条第二項
及び第三項
の規定は、前項の場合に、これを準用する。
○4
未払込株金の払込請求権その他主務大臣の指定する債権は第一項及び第二項の規定にかかわらずこれを以て株金払込につき相殺をなすことができない。
○5
第一項及び第二項の規定により相殺した債権に係る債務が未払込株金徴収会社の新勘定に所属する債務であるときは未払込株金徴収会社は、相殺した債権の額と同じ金額を旧勘定の貸借対照表の資産の部の未整理受取勘定に計上した金額及び新勘定の貸借対照表の負債の部の未整理支払勘定に計上した金額に夫々加算しなければならない。
第二十八条
未払込株金徴収会社の株主は、株金の払込に代へ当該未払込株金徴収会社に、国債、地方債その他主務大臣の指定する有価証券を交付することができる。この場合においては、その交付は未払込株金の払込と同一の効力を有する。
○2
前項の場合における国債、地方債その他有価証券の評価額は、主務大臣の定めるところによる。
第二十九条
第十三条の規定による催告によりなす未払込株金の払込の場合に関しては、商法第二百十三条
乃至第二百二十条
の規定は、これを適用しない。
第三十条
法第三十四条第二項の規定により資本を減少しなければならない額(以下資本減少額といふ。)は、左の各号に掲げる額の合計額とする。
一
資本の負担すべき特別損失の額
二
未払込株金の総額、但し決定整備計画に定めるところにより未払込株金の払込の催告をなす場合はその催告額の総額を控除した額
○2
前項の規定により資本を減少する場合において、各株式(指定時後あらたに発行した株式を除く。)の株金減少額は第十二条の規定により各株式につき計算された各株主の負担額とする。但し、未払込株式については、未払込株金額より決定整備計画の定めるところにより未払込株金の払込を催告しなければならない金額を控除した額を当該負担額に加算した額とする。
○3
前項の規定により各株式につき株金減少額を計算する場合において株金減少後の各株式につき一円未満の端数を生ずるときは、前二項の規定にかかはらず、その端数が五十銭以上のものについては一円に切り上げ各株式の株金減少額を計算し、その切り上げた額に当該各株式の総数を乗じて得た額に相当する額を第一項第一号及び第二号の合計額から控除した額を資本減少額とし、その端数が五十銭未満のものについては、これを切り捨て各株式の株金減少額を計算し、その端数に当該株式の総株数を乗じて得た額を同項第一号及び第二号の合計額に加算した額を資本減少額とすることができる。
○4
第一項の資本の減少については、商法第三百七十六条第二項
及び第三項
の規定はこれを適用しない。
第三十一条
法第三十四条第二項の規定による資本の減少又は同条第四項の規定による株式の併合がその効力を生ずる日は、夫々第十五条第一項又は第十五条の二第一項の一定期間満了の日とする。但し、未払込株金徴収会社について、第十七条第一項の払込期日が、又は資本減少額が資本の総額に相当する特別経理株式会社であつて決定整備計画の定めるところによりその発行する株式の総数を増加し、新株を発行するものについて、その最初に発行する株式の払込期日が、当該一定期間満了の日より遅いときは、その最も遅い日とする。
○2
法第三十四条第二項の規定による資本の減少又は同条第四項の規定による株式の併合があつた場合において交付すべき新株券は、第十五条第一項又は第十五条の二第一項の規定により提出のあつた株券につき、これに記載された一株の金額その他の事項に所要の変更を加へたものを以て、これに充てるものとする。
第三十一条の二
法第三十四条第二項の規定による資本の減少又は同条第四項の規定による株式の併合のあつた場合において、旧株券を提出することのできない者があるときは、特別経理株式会社は、その者の請求によつて、利害関係人に対して、異議があれば、一定の期間内にこれを述べるべき旨を公告し、その期間経過後において新株券を交付することができる。但し、その期間は、一箇月以上二箇月の範囲内で、これを定めなければならない。
○2
前項の公告の費用は、請求者の負担とする。
第三十一条の三
法第三十四条第四項の規定による併合に適しない数の株式があるときは、その併合に適しない部分について、新たに発行した株式を換価のため競売その他の方法により処分し、かつ、株数に応じてその代金を従前の株主に交付しなければならない。
○2
前条の規定は、前項の場合に、これを準用する。
○3
前二項の規定は、無記名式の株券であつて第十五条の二第一項の規定による提出のなかつたものに、これを準用する。
第三十二条
特別経理株式会社が法第三十四条第二項の規定により資本を減少した場合において、金額の異なる株式あるときは、各株主は商法第二百四十一条第一項
本文の規定にかかはらず、株式の最低金額ごとに一個の議決権を有するものとする。
第三十三条
特別経理株式会社が、決定整備計画に定のある事項のうち株主総会の決議を要すべき事項について登記を申請する場合においては、その登記の申請書には、決定整備計画書又はその認証ある謄本若しくは抄本を添附しなければならない。第二会社の設立登記の申請書についても、同様である。
第三十四条
特別経理株式会社が、決定整備計画の定めるところにより合併若しくは資本の減少をし、又は法第三十五条第四項において準用する法第三十四条第二項の規定により資本の減少をする場合においては、当該合併による解散、変更若しくは設立又は資本減少の登記の申請書には、法第十八条の二第三項において準用する同条第一項の規定により異議を述べた債権者があつたときは、これに対し、弁済し、若しくは担保を供し、若しくは信託をしたこと又は合併若しくは資本の減少をしてもその債権者を害するおそれがないことを証する書面を添付しなければならない。法第十条第一項の規定により債務を承継する第二会社の設立の登記又は新株発行による変更の登記の申請書についても、同様である。
第三十四条の二
法第四十二条の三第一項に規定する会社(以下本条乃至第三十四条の四において単に会社という。)は、決定整備計画の実行を終つた日において、政府以外の旧債権者であつて当該会社の業務を執行する役員でない者のうちその負担した特別損失の額の最も多額な者から順次同条同項に規定する代表者(以下旧債権者代表者という。)を選任しなければならない。但し、負担した特別損失の額が同額の場合においては、くじによる。
○2
前項の場合において、同項の規定により旧債権者代表者となるべき者が法人であるときは、当該法人が指名する当該法人の代表者を旧債権者代表者として選任するものとする。
○3
会社は、旧債権者代表者を選任しようとするときは、その旨を当該選任しようとする者に通知しなければならない。
○4
前項の通知を受けた者は、その通知を受けた日から二週間以内に、会社に対して旧債権者代表者に就任するか否かを通知しなければならない。
○5
第一項の規定により旧債権者代表者として選任されるべき者が就任せず、又は前項の期間内に同項の通知をしない場合には、会社は、第一項の規定によつて次の順位を有する者を旧債権者代表者として選任しなければならない。
第三十四条の三
旧債権者代表者は、その職務を行うについて、代理人を選任することができる。この場合においては、旧債権者代表者は、代理人の住所及び氏名を会社に対して通知しなければならない。
○2
旧債権者代表者は、病気その他正当な事由によりその職務を遂行することができないときは、その任務を辞することができる。この場合においては、遅滞なく、その旨を会社に対して通知しなければならない。
○3
会社は、旧債権者代表者がその任務を辞し、死亡し、又は当該会社の業務を執行する役員となつたときは、遅滞なく、前条の規定に準じ、欠員となつた旧債権者代表者を補充しなければならない。
第三十四条の四
会社は、旧債権者代表者がその職務の執行のために要した費用を旧債権者代表者に支払わなければならない。
○2
会社は、前項の費用及び旧債権者代表者に支払つた報酬を仮勘定の資産の部に計上することができる。
第三十五条
破産手続中の特別経理株式会社については、法の規定は、第三十七条、第四十二条、第五十四条及び第六十条第四号の規定を除くの外、これを適用しない。
第三十六条
この勅令における主務大臣は、法第五十五条の二に規定する主務大臣とする。
附 則 抄
○1
この勅令は、法施行の日から、これを施行する。
附 則 (昭和二二年五月二四日政令第七四号) 抄
○1
この政令は、公布の日から、これを施行する。
附 則 (昭和二二年六月二五日政令第一〇四号)
この政令は、公布の日から、これを施行する。
附 則 (昭和二三年四月九日政令第八一号)
この政令は、公布の日から、これを施行する。
附 則 (昭和二三年八月二一日政令第二五三号)
この政令は、公布の日から、これを施行する。
附 則 (昭和二四年五月一〇日政令第九二号)
この政令は、公布の日から施行する。
附 則 (昭和二六年六月三〇日政令第二四八号)
1
この政令は、昭和二十六年七月一日から施行する。
2
この政令施行前に整備計画の認可を受けた特別経理会社の整備計画に定める事項の実行については、企業再建整備法施行令第三条の二及び第三十一条の改正規定にかかわらず、なお従前の例による。
附 則 (昭和二九年六月一五日政令第一四二号) 抄
1
この政令は、公布の日から施行する。
附 則 (昭和五五年八月三〇日政令第二三一号) 抄
(施行期日)
1
この政令は、民事執行法の施行の日(昭和五十五年十月一日)から施行する。
附 則 (平成九年九月一九日政令第二八八号)
この政令は、商法等の一部を改正する法律の施行の日(平成九年十月一日)から施行する。
附 則 (平成一二年六月七日政令第三〇七号) 抄
(施行期日)
第一条
この政令は、平成十三年一月六日から施行する。
附 則 (平成一七年二月一八日政令第二四号) 抄
(施行期日)
第一条
この政令は、不動産登記法の施行の日(平成十七年三月七日)から施行する。
| {
"pile_set_name": "Github"
} |
/*
Copyright 2014-2015 Glen Joseph Fernandes
([email protected])
Distributed under the Boost Software License, Version 1.0.
(http://www.boost.org/LICENSE_1_0.txt)
*/
#ifndef BOOST_ALIGN_HPP
#define BOOST_ALIGN_HPP
#include <boost/align/align.hpp>
#include <boost/align/align_down.hpp>
#include <boost/align/align_up.hpp>
#include <boost/align/aligned_alloc.hpp>
#include <boost/align/aligned_allocator.hpp>
#include <boost/align/aligned_allocator_adaptor.hpp>
#include <boost/align/aligned_delete.hpp>
#include <boost/align/alignment_of.hpp>
#include <boost/align/assume_aligned.hpp>
#include <boost/align/is_aligned.hpp>
#endif
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1020"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "607FACCF1AFB9204008FA782"
BuildableName = "SearchTextField_Example.app"
BlueprintName = "SearchTextField_Example"
ReferencedContainer = "container:SearchTextField.xcodeproj">
</BuildableReference>
</BuildActionEntry>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "NO"
buildForArchiving = "NO"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "607FACE41AFB9204008FA782"
BuildableName = "SearchTextField_Tests.xctest"
BlueprintName = "SearchTextField_Tests"
ReferencedContainer = "container:SearchTextField.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
<TestableReference
skipped = "NO">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "607FACE41AFB9204008FA782"
BuildableName = "SearchTextField_Tests.xctest"
BlueprintName = "SearchTextField_Tests"
ReferencedContainer = "container:SearchTextField.xcodeproj">
</BuildableReference>
</TestableReference>
</Testables>
<MacroExpansion>
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "607FACCF1AFB9204008FA782"
BuildableName = "SearchTextField_Example.app"
BlueprintName = "SearchTextField_Example"
ReferencedContainer = "container:SearchTextField.xcodeproj">
</BuildableReference>
</MacroExpansion>
<AdditionalOptions>
</AdditionalOptions>
</TestAction>
<LaunchAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "607FACCF1AFB9204008FA782"
BuildableName = "SearchTextField_Example.app"
BlueprintName = "SearchTextField_Example"
ReferencedContainer = "container:SearchTextField.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
<AdditionalOptions>
</AdditionalOptions>
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
<BuildableProductRunnable
runnableDebuggingMode = "0">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "607FACCF1AFB9204008FA782"
BuildableName = "SearchTextField_Example.app"
BlueprintName = "SearchTextField_Example"
ReferencedContainer = "container:SearchTextField.xcodeproj">
</BuildableReference>
</BuildableProductRunnable>
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>
| {
"pile_set_name": "Github"
} |
eval_ddad
=========
.. code:: yaml
model:
name: 'SelfSupModel'
depth_net:
name: 'PackNet01'
version: '1A'
pose_net:
name: 'PoseNet'
version: ''
params:
crop: ''
min_depth: 0.0
max_depth: 200.0
datasets:
augmentation:
image_shape: (384, 640)
test:
dataset: ['DGP']
path: ['/data/datasets/DDAD/ddad.json']
split: ['val']
depth_type: ['lidar']
cameras: ['camera_01']
save:
folder: '/data/save'
viz: True
npz: True
| {
"pile_set_name": "Github"
} |
/*
Copyright 2020 The Knative Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package recorder_vent implements an observer.EventLog backed by Kubernetes
// Events using an event recorder.
package recorder_vent
| {
"pile_set_name": "Github"
} |
'use strict';
var NativeElement$ReactNative = require("../elements/NativeElement.bs.js");
/* NativeElement-ReactNative Not a pure module */
| {
"pile_set_name": "Github"
} |
// Boost.Units - A C++ library for zero-overhead dimensional analysis and
// unit/quantity manipulation and conversion
//
// Copyright (C) 2003-2008 Matthias Christian Schabel
// Copyright (C) 2008 Steven Watanabe
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef BOOST_UNITS_SI_ABSORBED_DOSE_HPP
#define BOOST_UNITS_SI_ABSORBED_DOSE_HPP
#include <boost/units/systems/si/base.hpp>
#include <boost/units/physical_dimensions/absorbed_dose.hpp>
namespace boost {
namespace units {
namespace si {
typedef unit<absorbed_dose_dimension,si::system> absorbed_dose;
BOOST_UNITS_STATIC_CONSTANT(gray,absorbed_dose);
BOOST_UNITS_STATIC_CONSTANT(grays,absorbed_dose);
} // namespace si
} // namespace units
} // namespace boost
#endif // BOOST_UNITS_SI_ABSORBED_DOSE_HPP
| {
"pile_set_name": "Github"
} |
/*******************************************************************************
* You may amend and distribute as you like, but don't remove this header!
*
* EPPlus provides server-side generation of Excel 2007/2010 spreadsheets.
* See http://www.codeplex.com/EPPlus for details.
*
* Copyright (C) 2011 Jan Källman
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The GNU Lesser General Public License can be viewed at http://www.opensource.org/licenses/lgpl-license.php
* If you unfamiliar with this license or have questions about it, here is an http://www.gnu.org/licenses/gpl-faq.html
*
* All code and executables are provided "as is" with no warranty either express or implied.
* The author accepts no liability for any damage or loss of business that this product may cause.
*
* Code change notes:
*
* Author Change Date
* ******************************************************************************
* Eyal Seagull Added 2012-04-03
*******************************************************************************/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Drawing;
using System.Xml;
using OfficeOpenXml.ConditionalFormatting.Contracts;
namespace OfficeOpenXml.ConditionalFormatting
{
/// <summary>
/// ExcelConditionalFormattingNotContainsErrors
/// </summary>
public class ExcelConditionalFormattingNotContainsErrors
: ExcelConditionalFormattingRule,
IExcelConditionalFormattingNotContainsErrors
{
/****************************************************************************************/
#region Constructors
/// <summary>
///
/// </summary>
/// <param name="address"></param>
/// <param name="priority"></param>
/// <param name="worksheet"></param>
/// <param name="itemElementNode"></param>
/// <param name="namespaceManager"></param>
internal ExcelConditionalFormattingNotContainsErrors(
ExcelAddress address,
int priority,
ExcelWorksheet worksheet,
XmlNode itemElementNode,
XmlNamespaceManager namespaceManager)
: base(
eExcelConditionalFormattingRuleType.NotContainsErrors,
address,
priority,
worksheet,
itemElementNode,
(namespaceManager == null) ? worksheet.NameSpaceManager : namespaceManager)
{
if (itemElementNode==null) //Set default values and create attributes if needed
{
Formula = string.Format(
"NOT(ISERROR({0}))",
Address.Start.Address);
}
}
/// <summary>
///
/// </summary>
/// <param name="priority"></param>
/// <param name="address"></param>
/// <param name="worksheet"></param>
/// <param name="itemElementNode"></param>
internal ExcelConditionalFormattingNotContainsErrors(
ExcelAddress address,
int priority,
ExcelWorksheet worksheet,
XmlNode itemElementNode)
: this(
address,
priority,
worksheet,
itemElementNode,
null)
{
}
/// <summary>
///
/// </summary>
/// <param name="priority"></param>
/// <param name="address"></param>
/// <param name="worksheet"></param>
internal ExcelConditionalFormattingNotContainsErrors(
ExcelAddress address,
int priority,
ExcelWorksheet worksheet)
: this(
address,
priority,
worksheet,
null,
null)
{
}
#endregion Constructors
/****************************************************************************************/
}
} | {
"pile_set_name": "Github"
} |
Configuration
=============
Your Hoodie back-end can be configured using default options that are part of
your repository as well as using hidden files, CLI arguments and environment variables.
Options
~~~~~~~
Here is a list of all available options
+-----------------+----------------------------------+--------------------------+--------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------+
| Option | Default value | CLI argument | ENV variable | description |
+=================+==================================+==========================+==========================+=================================================================================================================================================================+
| address | ``'127.0.0.1'`` | ``--address`` | ``hoodie_address`` | Address to which Hoodie binds |
+-----------------+----------------------------------+--------------------------+--------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------+
| data | ``'.hoodie'`` | ``--data`` | ``hoodie_data`` | Data path |
+-----------------+----------------------------------+--------------------------+--------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------+
| dbUrl | – | ``--dbUrl`` | ``hoodie_dbUrl`` | If provided, uses external CouchDB. Include credentials in `dbUrl`, or use `dbUrlUsername` and `dbUrlPassword`. Sets ``dbAdapter`` to ``pouchdb-adapter-http`` |
+-----------------+----------------------------------+--------------------------+--------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------+
| dbUrlUsername | – | ``dbUrlUsername`` | ``hoodie_dbUrlUsername`` | If ``dbUrl`` is set, you can use ``dbUrlUsername`` to set the username to use when making requests to CouchDB |
+-----------------+----------------------------------+--------------------------+--------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------+
| dbUrlPassword | – | ``dbUrlPassword`` | ``hoodie_dbUrlPassword`` | If ``dbUrl`` is set, you can use ``dbUrlPassword`` to set the password to use when making requests to CouchDB |
+-----------------+----------------------------------+--------------------------+--------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------+
| dbAdapter | ``'pouchdb-adapter-fs'`` | ``--dbAdapter`` | ``hoodie_dbAdapter`` | Sets default `PouchDB adapter <https://pouchdb.com/adapters.html>` unless ``inMemory`` or ``dbUrl`` set |
+-----------------+----------------------------------+--------------------------+--------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------+
| loglevel | ``'warn'`` | ``--loglevel`` | ``hoodie_loglevel`` | One of: silent, error, warn, http, info, verbose, silly |
+-----------------+----------------------------------+--------------------------+--------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------+
| inMemory | ``false`` | ``-m``, ``--inMemory`` | ``hoodie_inMemory`` | Whether to start the PouchDB Server in memory. Sets ``dbAdapter`` to ``pouchdb-adapter-memory`` |
+-----------------+----------------------------------+--------------------------+--------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------+
| port | ``8080`` | ``--port`` | ``hoodie_port`` | Port-number to run the Hoodie App on |
+-----------------+----------------------------------+--------------------------+--------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------+
| public | ``'public'`` | ``--public`` | ``hoodie_public`` | path to static assets |
+-----------------+----------------------------------+--------------------------+--------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------+
| url | - | ``--url`` | ``hoodie_url`` | Optional: external URL at which Hoodie Server is accessible (e.g. ``http://myhoodieapp.com``) |
+-----------------+----------------------------------+--------------------------+--------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------+
| adminPassword | - | ``--adminPassword`` | ``hoodie_adminPassword`` | Password to login to Admin Dashboard. Login is not possible unless set |
+-----------------+----------------------------------+--------------------------+--------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------+
| name | ``package.json``'s name property | ``--name`` | ``hoodie_name`` | Name your application. |
+-----------------+----------------------------------+--------------------------+--------------------------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------+
Defaults
--------
Default options are set in your app’s ``package.json`` file, using the
``"hoodie"`` key. Here is an example with all available options and their
default values
.. code:: json
{
"hoodie": {
"address": "127.0.0.1",
"port": 8080,
"data": ".hoodie",
"public": "public",
"dbUrl": "",
"dbAdapter": "pouchdb-adapter-fs",
"inMemory": false,
"loglevel": "warn",
"url": "",
"adminPassword": "",
"name": "my-hoodie-app"
}
}
.hoodierc
~~~~~~~~~
The ``.hoodierc`` can be used to set configuration when running your Hoodie
backend in that folder. It should not be committed to your repository.
The content can be in JSON or INI format. See the `rc package on npm <https://www.npmjs.com/package/rc>`__
for more information
CLI arguments and environment variables
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
To pass CLI options when starting Hoodie, you have to separate them with ``--``, for example:
.. code:: bash
$ npm start -- --port=8090 --inMemory
All environment variables are prefixed with ``hoodie_``. So to set the port to
``8090`` and to start Hoodie in memory mode, you have to
- set the ``hoodie_port`` environment variable to ``8090``
- set the ``hoodie_inMemory`` environment variable to ``true``
Hoodie CLI is using `rc <https://www.npmjs.com/package/rc>`__ for configuration,
so the same options can be set with environment variables and config files.
Environment variables are prefixed with ``hoodie_``.
The priority of configuration
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1. Command line arguments
2. Environment variables
3. ``.hoodierc`` files
4. Your app’s defaults form the ``"hoodie"`` key in ``"package.json"``
5. Hoodie’s default values as shown in table above
| {
"pile_set_name": "Github"
} |
// CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// File System.Web.UI.WebControls.Table.cs
// Automatically generated contract file.
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Diagnostics.Contracts;
using System;
// Disable the "this variable is not used" warning as every field would imply it.
#pragma warning disable 0414
// Disable the "this variable is never assigned to".
#pragma warning disable 0649
// Disable the "this variable is never used".
#pragma warning disable 0169
// Disable the "new keyword not required" warning.
#pragma warning disable 0109
// Disable the "extern without DllImport" warning.
#pragma warning disable 0626
// Disable the "could hide other member" warning, can happen on certain properties.
#pragma warning disable 0108
namespace System.Web.UI.WebControls
{
public partial class Table : WebControl, System.Web.UI.IPostBackEventHandler
{
#region Methods and constructors
protected override void AddAttributesToRender (System.Web.UI.HtmlTextWriter writer)
{
}
protected override System.Web.UI.ControlCollection CreateControlCollection ()
{
return default(System.Web.UI.ControlCollection);
}
protected override Style CreateControlStyle ()
{
return default(Style);
}
protected virtual new void RaisePostBackEvent (string argument)
{
}
public override void RenderBeginTag (System.Web.UI.HtmlTextWriter writer)
{
}
protected internal override void RenderContents (System.Web.UI.HtmlTextWriter writer)
{
}
void System.Web.UI.IPostBackEventHandler.RaisePostBackEvent (string eventArgument)
{
}
public Table ()
{
}
#endregion
#region Properties and indexers
public virtual new string BackImageUrl
{
get
{
return default(string);
}
set
{
}
}
public virtual new string Caption
{
get
{
return default(string);
}
set
{
}
}
public virtual new TableCaptionAlign CaptionAlign
{
get
{
return default(TableCaptionAlign);
}
set
{
}
}
public virtual new int CellPadding
{
get
{
return default(int);
}
set
{
}
}
public virtual new int CellSpacing
{
get
{
return default(int);
}
set
{
}
}
public virtual new GridLines GridLines
{
get
{
return default(GridLines);
}
set
{
}
}
public virtual new HorizontalAlign HorizontalAlign
{
get
{
return default(HorizontalAlign);
}
set
{
}
}
public virtual new TableRowCollection Rows
{
get
{
Contract.Ensures(Contract.Result<TableRowCollection>() != null);
return default(TableRowCollection);
}
}
#endregion
}
}
| {
"pile_set_name": "Github"
} |
// go run mksyscall.go -tags linux,ppc64le syscall_linux.go syscall_linux_ppc64x.go
// Code generated by the command above; see README.md. DO NOT EDIT.
// +build linux,ppc64le
package unix
import (
"syscall"
"unsafe"
)
var _ syscall.Errno
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func fanotifyMark(fd int, flags uint, mask uint64, dirFd int, pathname *byte) (err error) {
_, _, e1 := Syscall6(SYS_FANOTIFY_MARK, uintptr(fd), uintptr(flags), uintptr(mask), uintptr(dirFd), uintptr(unsafe.Pointer(pathname)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fallocate(fd int, mode uint32, off int64, len int64) (err error) {
_, _, e1 := Syscall6(SYS_FALLOCATE, uintptr(fd), uintptr(mode), uintptr(off), uintptr(len), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Tee(rfd int, wfd int, len int, flags int) (n int64, err error) {
r0, _, e1 := Syscall6(SYS_TEE, uintptr(rfd), uintptr(wfd), uintptr(len), uintptr(flags), 0, 0)
n = int64(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func dup2(oldfd int, newfd int) (err error) {
_, _, e1 := Syscall(SYS_DUP2, uintptr(oldfd), uintptr(newfd), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func EpollCreate(size int) (fd int, err error) {
r0, _, e1 := RawSyscall(SYS_EPOLL_CREATE, uintptr(size), 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func EpollWait(epfd int, events []EpollEvent, msec int) (n int, err error) {
var _p0 unsafe.Pointer
if len(events) > 0 {
_p0 = unsafe.Pointer(&events[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_EPOLL_WAIT, uintptr(epfd), uintptr(_p0), uintptr(len(events)), uintptr(msec), 0, 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fadvise(fd int, offset int64, length int64, advice int) (err error) {
_, _, e1 := Syscall6(SYS_FADVISE64, uintptr(fd), uintptr(offset), uintptr(length), uintptr(advice), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fchown(fd int, uid int, gid int) (err error) {
_, _, e1 := Syscall(SYS_FCHOWN, uintptr(fd), uintptr(uid), uintptr(gid))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fstat(fd int, stat *Stat_t) (err error) {
_, _, e1 := Syscall(SYS_FSTAT, uintptr(fd), uintptr(unsafe.Pointer(stat)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fstatat(dirfd int, path string, stat *Stat_t, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_NEWFSTATAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), uintptr(flags), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fstatfs(fd int, buf *Statfs_t) (err error) {
_, _, e1 := Syscall(SYS_FSTATFS, uintptr(fd), uintptr(unsafe.Pointer(buf)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Ftruncate(fd int, length int64) (err error) {
_, _, e1 := Syscall(SYS_FTRUNCATE, uintptr(fd), uintptr(length), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getegid() (egid int) {
r0, _ := RawSyscallNoError(SYS_GETEGID, 0, 0, 0)
egid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Geteuid() (euid int) {
r0, _ := RawSyscallNoError(SYS_GETEUID, 0, 0, 0)
euid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getgid() (gid int) {
r0, _ := RawSyscallNoError(SYS_GETGID, 0, 0, 0)
gid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getrlimit(resource int, rlim *Rlimit) (err error) {
_, _, e1 := RawSyscall(SYS_UGETRLIMIT, uintptr(resource), uintptr(unsafe.Pointer(rlim)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getuid() (uid int) {
r0, _ := RawSyscallNoError(SYS_GETUID, 0, 0, 0)
uid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func InotifyInit() (fd int, err error) {
r0, _, e1 := RawSyscall(SYS_INOTIFY_INIT, 0, 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Ioperm(from int, num int, on int) (err error) {
_, _, e1 := Syscall(SYS_IOPERM, uintptr(from), uintptr(num), uintptr(on))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Iopl(level int) (err error) {
_, _, e1 := Syscall(SYS_IOPL, uintptr(level), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Lchown(path string, uid int, gid int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_LCHOWN, uintptr(unsafe.Pointer(_p0)), uintptr(uid), uintptr(gid))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Listen(s int, n int) (err error) {
_, _, e1 := Syscall(SYS_LISTEN, uintptr(s), uintptr(n), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Lstat(path string, stat *Stat_t) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_LSTAT, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Pause() (err error) {
_, _, e1 := Syscall(SYS_PAUSE, 0, 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Pread(fd int, p []byte, offset int64) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_PREAD64, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), 0, 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Pwrite(fd int, p []byte, offset int64) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_PWRITE64, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), 0, 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Renameat(olddirfd int, oldpath string, newdirfd int, newpath string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(oldpath)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(newpath)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_RENAMEAT, uintptr(olddirfd), uintptr(unsafe.Pointer(_p0)), uintptr(newdirfd), uintptr(unsafe.Pointer(_p1)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Seek(fd int, offset int64, whence int) (off int64, err error) {
r0, _, e1 := Syscall(SYS_LSEEK, uintptr(fd), uintptr(offset), uintptr(whence))
off = int64(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Select(nfd int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (n int, err error) {
r0, _, e1 := Syscall6(SYS__NEWSELECT, uintptr(nfd), uintptr(unsafe.Pointer(r)), uintptr(unsafe.Pointer(w)), uintptr(unsafe.Pointer(e)), uintptr(unsafe.Pointer(timeout)), 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func sendfile(outfd int, infd int, offset *int64, count int) (written int, err error) {
r0, _, e1 := Syscall6(SYS_SENDFILE, uintptr(outfd), uintptr(infd), uintptr(unsafe.Pointer(offset)), uintptr(count), 0, 0)
written = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func setfsgid(gid int) (prev int, err error) {
r0, _, e1 := Syscall(SYS_SETFSGID, uintptr(gid), 0, 0)
prev = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func setfsuid(uid int) (prev int, err error) {
r0, _, e1 := Syscall(SYS_SETFSUID, uintptr(uid), 0, 0)
prev = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setregid(rgid int, egid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETREGID, uintptr(rgid), uintptr(egid), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setresgid(rgid int, egid int, sgid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETRESGID, uintptr(rgid), uintptr(egid), uintptr(sgid))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setresuid(ruid int, euid int, suid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETRESUID, uintptr(ruid), uintptr(euid), uintptr(suid))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setrlimit(resource int, rlim *Rlimit) (err error) {
_, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(resource), uintptr(unsafe.Pointer(rlim)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setreuid(ruid int, euid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETREUID, uintptr(ruid), uintptr(euid), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Shutdown(fd int, how int) (err error) {
_, _, e1 := Syscall(SYS_SHUTDOWN, uintptr(fd), uintptr(how), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Splice(rfd int, roff *int64, wfd int, woff *int64, len int, flags int) (n int64, err error) {
r0, _, e1 := Syscall6(SYS_SPLICE, uintptr(rfd), uintptr(unsafe.Pointer(roff)), uintptr(wfd), uintptr(unsafe.Pointer(woff)), uintptr(len), uintptr(flags))
n = int64(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Stat(path string, stat *Stat_t) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_STAT, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Statfs(path string, buf *Statfs_t) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_STATFS, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(buf)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Truncate(path string, length int64) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_TRUNCATE, uintptr(unsafe.Pointer(_p0)), uintptr(length), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Ustat(dev int, ubuf *Ustat_t) (err error) {
_, _, e1 := Syscall(SYS_USTAT, uintptr(dev), uintptr(unsafe.Pointer(ubuf)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) {
r0, _, e1 := Syscall(SYS_ACCEPT, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)))
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func accept4(s int, rsa *RawSockaddrAny, addrlen *_Socklen, flags int) (fd int, err error) {
r0, _, e1 := Syscall6(SYS_ACCEPT4, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)), uintptr(flags), 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func bind(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) {
_, _, e1 := Syscall(SYS_BIND, uintptr(s), uintptr(addr), uintptr(addrlen))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func connect(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) {
_, _, e1 := Syscall(SYS_CONNECT, uintptr(s), uintptr(addr), uintptr(addrlen))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func getgroups(n int, list *_Gid_t) (nn int, err error) {
r0, _, e1 := RawSyscall(SYS_GETGROUPS, uintptr(n), uintptr(unsafe.Pointer(list)), 0)
nn = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func setgroups(n int, list *_Gid_t) (err error) {
_, _, e1 := RawSyscall(SYS_SETGROUPS, uintptr(n), uintptr(unsafe.Pointer(list)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func getsockopt(s int, level int, name int, val unsafe.Pointer, vallen *_Socklen) (err error) {
_, _, e1 := Syscall6(SYS_GETSOCKOPT, uintptr(s), uintptr(level), uintptr(name), uintptr(val), uintptr(unsafe.Pointer(vallen)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func setsockopt(s int, level int, name int, val unsafe.Pointer, vallen uintptr) (err error) {
_, _, e1 := Syscall6(SYS_SETSOCKOPT, uintptr(s), uintptr(level), uintptr(name), uintptr(val), uintptr(vallen), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func socket(domain int, typ int, proto int) (fd int, err error) {
r0, _, e1 := RawSyscall(SYS_SOCKET, uintptr(domain), uintptr(typ), uintptr(proto))
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func socketpair(domain int, typ int, proto int, fd *[2]int32) (err error) {
_, _, e1 := RawSyscall6(SYS_SOCKETPAIR, uintptr(domain), uintptr(typ), uintptr(proto), uintptr(unsafe.Pointer(fd)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func getpeername(fd int, rsa *RawSockaddrAny, addrlen *_Socklen) (err error) {
_, _, e1 := RawSyscall(SYS_GETPEERNAME, uintptr(fd), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func getsockname(fd int, rsa *RawSockaddrAny, addrlen *_Socklen) (err error) {
_, _, e1 := RawSyscall(SYS_GETSOCKNAME, uintptr(fd), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen)))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func recvfrom(fd int, p []byte, flags int, from *RawSockaddrAny, fromlen *_Socklen) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_RECVFROM, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(flags), uintptr(unsafe.Pointer(from)), uintptr(unsafe.Pointer(fromlen)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func sendto(s int, buf []byte, flags int, to unsafe.Pointer, addrlen _Socklen) (err error) {
var _p0 unsafe.Pointer
if len(buf) > 0 {
_p0 = unsafe.Pointer(&buf[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall6(SYS_SENDTO, uintptr(s), uintptr(_p0), uintptr(len(buf)), uintptr(flags), uintptr(to), uintptr(addrlen))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func recvmsg(s int, msg *Msghdr, flags int) (n int, err error) {
r0, _, e1 := Syscall(SYS_RECVMSG, uintptr(s), uintptr(unsafe.Pointer(msg)), uintptr(flags))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func sendmsg(s int, msg *Msghdr, flags int) (n int, err error) {
r0, _, e1 := Syscall(SYS_SENDMSG, uintptr(s), uintptr(unsafe.Pointer(msg)), uintptr(flags))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func mmap(addr uintptr, length uintptr, prot int, flags int, fd int, offset int64) (xaddr uintptr, err error) {
r0, _, e1 := Syscall6(SYS_MMAP, uintptr(addr), uintptr(length), uintptr(prot), uintptr(flags), uintptr(fd), uintptr(offset))
xaddr = uintptr(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func futimesat(dirfd int, path string, times *[2]Timeval) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_FUTIMESAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(times)))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Gettimeofday(tv *Timeval) (err error) {
_, _, e1 := RawSyscall(SYS_GETTIMEOFDAY, uintptr(unsafe.Pointer(tv)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Time(t *Time_t) (tt Time_t, err error) {
r0, _, e1 := RawSyscall(SYS_TIME, uintptr(unsafe.Pointer(t)), 0, 0)
tt = Time_t(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Utime(path string, buf *Utimbuf) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_UTIME, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(buf)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func utimes(path string, times *[2]Timeval) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_UTIMES, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(times)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func pipe(p *[2]_C_int) (err error) {
_, _, e1 := RawSyscall(SYS_PIPE, uintptr(unsafe.Pointer(p)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func poll(fds *PollFd, nfds int, timeout int) (n int, err error) {
r0, _, e1 := Syscall(SYS_POLL, uintptr(unsafe.Pointer(fds)), uintptr(nfds), uintptr(timeout))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func syncFileRange2(fd int, flags int, off int64, n int64) (err error) {
_, _, e1 := Syscall6(SYS_SYNC_FILE_RANGE2, uintptr(fd), uintptr(flags), uintptr(off), uintptr(n), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func kexecFileLoad(kernelFd int, initrdFd int, cmdlineLen int, cmdline string, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(cmdline)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_KEXEC_FILE_LOAD, uintptr(kernelFd), uintptr(initrdFd), uintptr(cmdlineLen), uintptr(unsafe.Pointer(_p0)), uintptr(flags), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
| {
"pile_set_name": "Github"
} |
# Copyright 2013 The Go Authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
CLEANFILES+=maketables
maketables: maketables.go
go build $^
tables: maketables
./maketables > tables.go
gofmt -w -s tables.go
# Build (but do not run) maketables during testing,
# just to make sure it still compiles.
testshort: maketables
| {
"pile_set_name": "Github"
} |
#/bin/sh
StatusContinue=100 # RFC 7231, 6.2.1
StatusSwitchingProtocols=101 # RFC 7231, 6.2.2
StatusProcessing=102 # RFC 2518, 10.1
StatusEarlyHints=103 # RFC 8297
StatusOK=200 # RFC 7231, 6.3.1
StatusCreated=201 # RFC 7231, 6.3.2
StatusAccepted=202 # RFC 7231, 6.3.3
StatusNonAuthoritativeInfo=203 # RFC 7231, 6.3.4
StatusNoContent=204 # RFC 7231, 6.3.5
StatusResetContent=205 # RFC 7231, 6.3.6
StatusPartialContent=206 # RFC 7233, 4.1
StatusMultiStatus=207 # RFC 4918, 11.1
StatusAlreadyReported=208 # RFC 5842, 7.1
StatusIMUsed=226 # RFC 3229, 10.4.1
StatusMultipleChoices=300 # RFC 7231, 6.4.1
StatusMovedPermanently=301 # RFC 7231, 6.4.2
StatusFound=302 # RFC 7231, 6.4.3
StatusSeeOther=303 # RFC 7231, 6.4.4
StatusNotModified=304 # RFC 7232, 4.1
StatusUseProxy=305 # RFC 7231, 6.4.5
StatusTemporaryRedirect=307 # RFC 7231, 6.4.7
StatusPermanentRedirect=308 # RFC 7538, 3
StatusBadRequest=400 # RFC 7231, 6.5.1
StatusUnauthorized=401 # RFC 7235, 3.1
StatusPaymentRequired=402 # RFC 7231, 6.5.2
StatusForbidden=403 # RFC 7231, 6.5.3
StatusNotFound=404 # RFC 7231, 6.5.4
StatusMethodNotAllowed=405 # RFC 7231, 6.5.5
StatusNotAcceptable=406 # RFC 7231, 6.5.6
StatusProxyAuthRequired=407 # RFC 7235, 3.2
StatusRequestTimeout=408 # RFC 7231, 6.5.7
StatusConflict=409 # RFC 7231, 6.5.8
StatusGone=410 # RFC 7231, 6.5.9
StatusLengthRequired=411 # RFC 7231, 6.5.10
StatusPreconditionFailed=412 # RFC 7232, 4.2
StatusRequestEntityTooLarge=413 # RFC 7231, 6.5.11
StatusRequestURITooLong=414 # RFC 7231, 6.5.12
StatusUnsupportedMediaType=415 # RFC 7231, 6.5.13
StatusRequestedRangeNotSatisfiable=416 # RFC 7233, 4.4
StatusExpectationFailed=417 # RFC 7231, 6.5.14
StatusTeapot=418 # RFC 7168, 2.3.3
StatusMisdirectedRequest=421 # RFC 7540, 9.1.2
StatusUnprocessableEntity=422 # RFC 4918, 11.2
StatusLocked=423 # RFC 4918, 11.3
StatusFailedDependency=424 # RFC 4918, 11.4
StatusTooEarly=425 # RFC 8470, 5.2.
StatusUpgradeRequired=426 # RFC 7231, 6.5.15
StatusPreconditionRequired=428 # RFC 6585, 3
StatusTooManyRequests=429 # RFC 6585, 4
StatusRequestHeaderFieldsTooLarge=431 # RFC 6585, 5
StatusUnavailableForLegalReasons=451 # RFC 7725, 3
StatusInternalServerError=500 # RFC 7231, 6.6.1
StatusNotImplemented=501 # RFC 7231, 6.6.2
StatusBadGateway=502 # RFC 7231, 6.6.3
StatusServiceUnavailable=503 # RFC 7231, 6.6.4
StatusGatewayTimeout=504 # RFC 7231, 6.6.5
StatusHTTPVersionNotSupported=505 # RFC 7231, 6.6.6
StatusVariantAlsoNegotiates=506 # RFC 2295, 8.1
StatusInsufficientStorage=507 # RFC 4918, 11.5
StatusLoopDetected=508 # RFC 5842, 7.2
StatusNotExtended=510 # RFC 2774, 7
StatusNetworkAuthenticationRequired=511 # RFC 6585, 6
declare -a HTTP_RESPONSE_MESSAGE=(
[100]="Continue"
[101]="Switching Protocols"
[102]="Processing"
[103]="Early Hints",
[200]="OK"
[201]="Created"
[202]="Accepted"
[203]:"Non-Authoritative Information"
[204]="No Content"
[205]="Reset Content"
[206]="Partial Content"
[207]="Multi-Status"
[208]:"Already Reported"
[226]="IM Used"
[300]:"Multiple Choices"
[301]:"Moved Permanently"
[302]="Found"
[303]="See Other"
[304]="Not Modified"
[305]="Use Proxy"
[307]:"Temporary Redirect"
[308]:"Permanent Redirect"
[400]="Bad Request"
[401]="Unauthorized"
[402]="Payment Required"
[403]="Forbidden"
[404]="Not Found"
[405]="Method Not Allowed"
[406]="Not Acceptable"
[407]="Proxy Authentication Required"
[408]="Request Timeout"
[409]="Conflict"
[410]="Gone"
[411]="Length Required"
[412]="Precondition Failed"
[413]="Request Entity Too Large"
[414]="Request URI Too Long"
[415]="Unsupported Media Type"
[416]:"Requested Range Not Satisfiable"
[417]="Expectation Failed"
[418]="I'm a teapot"
[421]="Misdirected Request"
[422]="Unprocessable Entity"
[423]="Locked"
[424]="Failed Dependency"
[425]="Too Early"
[426]="Upgrade Required"
[428]="Precondition Required"
[429]="Too Many Requests"
[431]:"Request Header Fields Too Large"
[451]:"Unavailable For Legal Reasons"
[500]="Internal Server Error"
[501]="Not Implemented"
[502]="Bad Gateway"
[503]="Service Unavailable"
[504]="Gateway Timeout"
[505]="HTTP Version Not Supported"
[506]="Variant Also Negotiates"
[507]="Insufficient Storage"
[508]="Loop Detected"
[510]="Not Extended"
[511]="Network Authentication Required"
)
function http_add_header() {
echo "$1: $2" >&1;
}
# TODO: map http code to text
function http_message() {
code=$1
msg=${HTTP_RESPONSE[$1]}
echo "HTTP/1.0 $code $msg" >&1;
}
function http_set_content_type() {
http_add_header "Content-Type" $1
}
function http_set_content_length() {
http_add_header "Content-Length" $1
}
function http_message_body() {
echo '' >&1;
}
# TODO: fix OK
function http_response {
http_message $1
http_add_header "Server" "vesper"
http_set_content_type $2
http_message_body
}
# TODO: fix http message
function fail() {
http_message $1
http_add_header "Server" "vesper"
http_set_content_type "text/plain"
http_message_body
echo $2
exit 1
}
HTTP_REQUEST_METHOD=""
HTTP_REQUEST_URI=""
HTTP_REQUEST_HTTP_VERSION=""
declare -a HTTP_REQUEST_HEADERS
function http_request() {
recv() { echo "< $@" >&2; }
# HTTP RFC 2616 $5.1 request line
# https://tools.ietf.org/html/rfc2616#section-5.1
read -r raw
# if there is any trailing CR, strip it
raw=${raw%%$'\r'}
recv "$raw"
read -r HTTP_REQUEST_METHOD HTTP_REQUEST_URI HTTP_REQUEST_HTTP_VERSION <<<"$raw"
while read -r raw; do
raw=${raw%%$'\r'}
recv "$raw"
# check if we reached the end of the headers
[ -z "$raw" ] && break
HTTP_REQUEST_HEADERS+=("$raw")
done
}
function file_size() {
echo $(stat -f%z $1)
}
function file_mime() {
echo $(file --mime-type -b $1)
}
function http_sendfile() {
file=$1
http_message StatusOK
http_add_header "Server" "vesper"
http_set_content_type $(file_mime $file)
http_set_content_length $(file_size $file)
http_message_body
cat ${file} >&1
} | {
"pile_set_name": "Github"
} |
# dummy
| {
"pile_set_name": "Github"
} |
// (C) Copyright 2009-2011 Frederic Bron.
//
// Use, modification and distribution are subject to the Boost Software License,
// Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt).
//
// See http://www.boost.org/libs/type_traits for most recent version including documentation.
#ifndef BOOST_TT_HAS_LOGICAL_OR_HPP_INCLUDED
#define BOOST_TT_HAS_LOGICAL_OR_HPP_INCLUDED
#define BOOST_TT_TRAIT_NAME has_logical_or
#define BOOST_TT_TRAIT_OP ||
#define BOOST_TT_FORBIDDEN_IF\
/* pointer with fundamental non convertible to bool */\
(\
(\
::boost::is_pointer< Lhs_noref >::value && \
(\
::boost::is_fundamental< Rhs_nocv >::value && \
(! ::boost::is_convertible< Rhs_nocv, bool >::value )\
)\
)||\
(\
::boost::is_pointer< Rhs_noref >::value && \
(\
::boost::is_fundamental< Lhs_nocv >::value && \
(! ::boost::is_convertible< Lhs_nocv, bool >::value )\
)\
)\
)
#include <boost/type_traits/detail/has_binary_operator.hpp>
#undef BOOST_TT_TRAIT_NAME
#undef BOOST_TT_TRAIT_OP
#undef BOOST_TT_FORBIDDEN_IF
#endif
| {
"pile_set_name": "Github"
} |
---
title: Advice to People Nurturing a Career in Computering
date: 2019-06-18
tags:
- career
---
# Advice to People Nurturing a Career in Computering
Computering, or making computers do things in exchange for money, can be a
surprisingly hard field to break into as an outsider. There's lots of jargon,
tool holy wars, flamewars about the "right" way to do things and a whole host
of overhead that can make it feel difficult or impossible when starting from
scratch. I'm a college dropout, I know what it's like to be turned down over
and over because of the lack of that blessed square paper. In this post I
hope to give some general advice based on what has and hasn't worked for me
over the years.
Hopefully this can help you too.
## Make a Portfolio Site
When you are breaking into the industry, there is a huge initial "brand" issue.
You're nobody. This is both a very good thing and a very bad thing. It's a very
good thing because you have a clean slate to start from. It's also a very bad
thing because you have nothing to refer to yourself with.
Part of establishing a brand for yourself in this day and age is to make a website
(like the one you are probably reading this off of right now). This website can
be powered by anything. [GitHub Pages](https://pages.github.com) with the `github.io`
domain works, but it's probably a better idea to make your website backend from scratch.
Your website should include at least the following things:
- Your name
- A few buzzwords relating to the kind of thing you'd like to do with computers (example: I have myself listed as a "Backend Services and Devops Specialist" which sounds really impressive yet doesn't really mean much of anything)
- Tools or soft skills you are experienced with
- Links to yourself on other social media platforms (GitHub, Twitter, LinkedIn, etc.)
- Links to or words about projects of yours that you are proud of
- Some contact information (an email address is a good idea too)
If you feel comfortable doing so, I'd also suggest putting your [resume](https://christine.website/resume)
on this site too. Even if it's just got your foodservice jobs or education
history (including your high school diploma if need be).
This website can then be used as a landing page for other things in the future
too. It's _your_ space on the internet. _You_ get to decide what's up there or
not.
## Make a Tech Blog On That Site
This has been the single biggest thing to help me grow professionally. I regularly
put [articles](https://christine.website/blog) on my blog, sometimes not even about
technology topics. Even if you are writing about your take on something people have
already written about, it's still good practice. Your early posts are going to be
rough. It's normal to not be an expert when starting out in a new skill.
This helps you stand out in the interview process. I've actually managed to skip
interviews with companies purely because of the contents of my blog. One of them
had the interviewer almost word for word say the following:
> I've read your blog, you don't need to prove technical understanding to me.
It was one of the most awestruck feelings I've ever had in the hiring process.
## Find People to Mentor You
Starting out you are going to not be very skilled in anything. One good way you
can help yourself get good at things is to go out into communities and ask for
help understanding things. As you get involved in communities, naturally you will
end up finding people who are giving a lot of advice about things. Don't be
afraid to ask people for more details.
Get involved in niche communities (like unpopular Linux distros) and help them
out, even if it's just doing spellcheck over the documentation. This kind of
stuff really makes you stand out and people will remember it.
Formal mentorship is a very hard thing to try and define. It's probably better
to surround yourself with experts in various niche topics rather than looking
for that one magic mentor. Mentorship can be a very time consuming thing on the
expert's side. Be thankful for what you can get and try and give back by helping
other people too.
Seriously though, don't be afraid to email or DM people for more information about
topics that don't make sense in group chats. I have found that people really
appreciate that kind of stuff, even if they don't immediately have the time to
respond in detail.
## Do Stuff with Computers, Post the Results Somewhere
Repository hosting sites like GitHub and Gitlab allow you to show potential
employers exactly what you can do by example. Put your code up on them, even
if you think it's "bad" or the solution could have been implemented better by
someone more technically skilled. The best way to get experience in this industry
is by doing. The best way to do things is to just do them and then let other
people see the results.
Your first programs will be inelegant, but that's okay.
Your first repositories will be bloated or inefficient, but that's okay.
Nobody expects perfection out of the gate, and honestly even for skilled experts
perfection is probably too high of a bar. We're human. We make mistakes. Our job
is to turn the results of these mistakes into the products and services that
people rely on.
## You Don't Need 100% Of The Job Requirements
Many companies put job requirements as soft guidelines, not hard ones. It's easy
to see requirements for jobs like this:
> Applicants must have:
>
> - 1 year managing a distributed Flopnax system
> - Experience using Rilkef across multiple regions
> - Ropjar, HTML/CSS
and feel really disheartened. That "must" there seldom actually is a hard
requirement. Many companies will be willing to hire someone for a junior
level. You can learn the skills you miss as a natural part of doing your job.
There's support structures at nearly every company for things like this. You
don't need to be perfect out of the gate.
## Interviews
This one is a bit of a weird one to give advice for. Each company ends up having
their own interviewing style, and even then individual interviewers have their
own views on how to do it. My advice here is trying to be as generic as possible.
### Know the Things You Have Listed on Your Resume
If you say you know how to use a language, brush up on that language. If you say
you know how to use a tool, be able to explain that what that tool does and why
people should care about it to someone.
Don't misrepresent your skills on your resume either. It's similar to lying. It's
also a good idea to go back and prune out skills you don't feel as fresh with over
time.
### Be Yourself
It's tempting to put on a persona or try to present yourself as larger than life.
Resist this temptation. They want to see _you_, not a caricature of yourself. It's
scary to do interviews at times. It feels like you are being judged. It's not
personal. Everything in interviews is aimed at making the best decision for the
company.
Also, don't be afraid to say you don't know things. You don't need to have API
documentation memorized. They aren't looking for that. API documentation will be
available to you while you write code at your job. Interviews are usually there
to help the interviewer verify that you know how to break larger problems into
more understandable chunks. Ask questions. Ensure you understand what they are
and are not asking you. Nearly every interview that I've had that's resulted in
a job offer has had me ask questions about what they are asking.
### "Do You Have Any Questions?"
A few things I've found work really well for this:
- "Do you know of anyone who left this company and then came back?"
- "What is your favorite part of your workday?"
- "What is your least favorite part of your workday?"
- "Do postmortems have formal blame as a part of the process?"
- "Does code get reviewed before it ships into production?"
- "Are there any employee run interest groups for things like mindfulness?"
And then finally as your last question:
- "What are the next steps?"
This question in particular tends to signal interest in the person interviewing
you. I don't completely understand why, but it seems to be one of the most
useful questions to ask; especially with initial interviews with hiring managers
or human resources.
### Meditate Before Interviews
Even if it's just [watching your breath for 5 minutes](https://when-then-zen.christine.website/meditation/anapana).
I find that doing this helps reset the mind and reduces subjective experiences of
anxiety.
## Persistence
Getting the first few real jobs is tough, but after you get a year or two at any
employer things get a lot easier. Your first job is going to give you a lot of
experience. You are going to learn things about things you didn't even think
would be possible to learn about. People, processes and the like are going to
surprise or shock you.
At the end of the day though, it's just a job. It's impermanent. You might not
fit in. You might have to find another. Don't panic about it, even though it's
really, really tempting to. You can always find another job.
---
I hope this is able to help. Thanks for reading this and be well.
| {
"pile_set_name": "Github"
} |
// Copyright 2018 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package debug provides the portable interface to a program being debugged.
package debug
import (
"fmt"
"io"
"strings"
)
// Program is the interface to a (possibly remote) program being debugged.
// The process (if any) and text file associated with it may change during
// the session, but many resources are associated with the Program rather
// than process or text file so they persist across debuggging runs.
type Program interface {
// Open opens a virtual file associated with the process.
// Names are things like "text", "mem", "fd/2".
// Mode is one of "r", "w", "rw".
// Return values are open File and error.
// When the target binary is re-run, open files are
// automatically updated to refer to the corresponding
// file in the new process.
Open(name string, mode string) (File, error)
// Run abandons the current running process, if any,
// and execs a new instance of the target binary file
// (which may have changed underfoot).
// Breakpoints and open files are re-established.
// The call hangs until the program stops executing,
// at which point it returns the program status.
// args contains the command-line arguments for the process.
Run(args ...string) (Status, error)
// Stop stops execution of the current process but
// does not kill it.
Stop() (Status, error)
// Resume resumes execution of a stopped process.
// The call hangs until the program stops executing,
// at which point it returns the program status.
Resume() (Status, error)
// TODO: Step(). Where does the granularity happen,
// on the proxy end or the debugging control end?
// Kill kills the current process.
Kill() (Status, error)
// Breakpoint sets a breakpoint at the specified address.
Breakpoint(address uint64) (PCs []uint64, err error)
// BreakpointAtFunction sets a breakpoint at the start of the specified function.
BreakpointAtFunction(name string) (PCs []uint64, err error)
// BreakpointAtLine sets a breakpoint at the specified source line.
BreakpointAtLine(file string, line uint64) (PCs []uint64, err error)
// DeleteBreakpoints removes the breakpoints at the specified addresses.
// Addresses where no breakpoint is set are ignored.
DeleteBreakpoints(pcs []uint64) error
// Eval evaluates the expression (typically an address) and returns
// its string representation(s). Multivalued expressions such as
// matches for regular expressions return multiple values.
// TODO: change this to multiple functions with more specific names.
// Syntax:
// re:regexp
// Returns a list of symbol names that match the expression
// addr:symbol
// Returns a one-element list holding the hexadecimal
// ("0x1234") value of the address of the symbol
// val:symbol
// Returns a one-element list holding the formatted
// value of the symbol
// 0x1234, 01234, 467
// Returns a one-element list holding the name of the
// symbol ("main.foo") at that address (hex, octal, decimal).
Eval(expr string) ([]string, error)
// Evaluate evaluates an expression. Accepts a subset of Go expression syntax:
// basic literals, identifiers, parenthesized expressions, and most operators.
// Only the len function call is available.
//
// The expression can refer to local variables and function parameters of the
// function where the program is stopped.
//
// On success, the type of the value returned will be one of:
// int8, int16, int32, int64, uint8, uint16, uint32, uint64, float32, float64,
// complex64, complex128, bool, Pointer, Array, Slice, String, Map, Struct,
// Channel, Func, or Interface.
Evaluate(e string) (Value, error)
// Frames returns up to count stack frames from where the program
// is currently stopped.
Frames(count int) ([]Frame, error)
// VarByName returns a Var referring to a global variable with the given name.
// TODO: local variables
VarByName(name string) (Var, error)
// Value gets the value of a variable by reading the program's memory.
Value(v Var) (Value, error)
// MapElement returns Vars for the key and value of a map element specified by
// a 0-based index.
MapElement(m Map, index uint64) (Var, Var, error)
// Goroutines gets the current goroutines.
Goroutines() ([]*Goroutine, error)
}
type Goroutine struct {
ID int64
Status GoroutineStatus
StatusString string // A human-readable string explaining the status in more detail.
Function string // Name of the goroutine function.
Caller string // Name of the function that created this goroutine.
StackFrames []Frame
}
type GoroutineStatus byte
const (
Running GoroutineStatus = iota
Queued
Blocked
)
func (g GoroutineStatus) String() string {
switch g {
case Running:
return "running"
case Queued:
return "queued"
case Blocked:
return "blocked"
}
return "invalid status"
}
func (g *Goroutine) String() string {
return fmt.Sprintf("goroutine %d [%s] %s -> %s", g.ID, g.StatusString, g.Caller, g.Function)
}
// A reference to a variable in a program.
// TODO: handle variables stored in registers
type Var struct {
TypeID uint64 // A type identifier, opaque to the user.
Address uint64 // The address of the variable.
}
// A value read from a remote program.
type Value interface{}
// Pointer is a Value representing a pointer.
// Note that the TypeID field will be the type of the variable being pointed to,
// not the type of this pointer.
type Pointer struct {
TypeID uint64 // A type identifier, opaque to the user.
Address uint64 // The address of the variable.
}
// Array is a Value representing an array.
type Array struct {
ElementTypeID uint64
Address uint64
Length uint64 // Number of elements in the array
StrideBits uint64 // Number of bits between array entries
}
// Len returns the number of elements in the array.
func (a Array) Len() uint64 {
return a.Length
}
// Element returns a Var referring to the given element of the array.
func (a Array) Element(index uint64) Var {
return Var{
TypeID: a.ElementTypeID,
Address: a.Address + index*(a.StrideBits/8),
}
}
// Slice is a Value representing a slice.
type Slice struct {
Array
Capacity uint64
}
// String is a Value representing a string.
// TODO: a method to access more of a truncated string.
type String struct {
// Length contains the length of the remote string, in bytes.
Length uint64
// String contains the string itself; it may be truncated to fewer bytes than the value of the Length field.
String string
}
// Map is a Value representing a map.
type Map struct {
TypeID uint64
Address uint64
Length uint64 // Number of elements in the map.
}
// Struct is a Value representing a struct.
type Struct struct {
Fields []StructField
}
// StructField represents a field in a struct object.
type StructField struct {
Name string
Var Var
}
// Channel is a Value representing a channel.
type Channel struct {
ElementTypeID uint64
Address uint64 // Location of the channel struct in memory.
Buffer uint64 // Location of the buffer; zero for nil channels.
Length uint64 // Number of elements stored in the channel buffer.
Capacity uint64 // Capacity of the buffer; zero for unbuffered channels.
Stride uint64 // Number of bytes between buffer entries.
BufferStart uint64 // Index in the buffer of the element at the head of the queue.
}
// Element returns a Var referring to the given element of the channel's queue.
// If the channel is unbuffered, nil, or if the index is too large, returns a Var with Address == 0.
func (m Channel) Element(index uint64) Var {
if index >= m.Length {
return Var{
TypeID: m.ElementTypeID,
Address: 0,
}
}
if index < m.Capacity-m.BufferStart {
// The element is in the part of the queue that occurs later in the buffer
// than the head of the queue.
return Var{
TypeID: m.ElementTypeID,
Address: m.Buffer + (m.BufferStart+index)*m.Stride,
}
}
// The element is in the part of the queue that has wrapped around to the
// start of the buffer.
return Var{
TypeID: m.ElementTypeID,
Address: m.Buffer + (m.BufferStart+index-m.Capacity)*m.Stride,
}
}
// Func is a Value representing a func.
type Func struct {
Address uint64
}
// Interface is a Value representing an interface.
type Interface struct{}
// The File interface provides access to file-like resources in the program.
// It implements only ReaderAt and WriterAt, not Reader and Writer, because
// random access is a far more common pattern for things like symbol tables,
// and because enormous address space of virtual memory makes routines
// like io.Copy dangerous.
type File interface {
io.ReaderAt
io.WriterAt
io.Closer
}
type Status struct {
PC, SP uint64
}
type Frame struct {
// PC is the hardware program counter.
PC uint64
// SP is the hardware stack pointer.
SP uint64
// File and Line are the source code location of the PC.
File string
Line uint64
// Function is the name of this frame's function.
Function string
// FunctionStart is the starting PC of the function.
FunctionStart uint64
// Params contains the function's parameters.
Params []Param
// Vars contains the function's local variables.
Vars []LocalVar
}
func (f Frame) String() string {
params := make([]string, len(f.Params))
for i, p := range f.Params {
params[i] = p.Name // TODO: more information
}
p := strings.Join(params, ", ")
off := f.PC - f.FunctionStart
return fmt.Sprintf("%s(%s)\n\t%s:%d +0x%x", f.Function, p, f.File, f.Line, off)
}
// Param is a parameter of a function.
type Param struct {
Name string
Var Var
}
// LocalVar is a local variable of a function.
type LocalVar struct {
Name string
Var Var
}
| {
"pile_set_name": "Github"
} |
The MIT License (MIT)
Copyright (c) Sindre Sorhus <[email protected]> (sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
| {
"pile_set_name": "Github"
} |
// mkerrors.sh -Wall -Werror -static -I/tmp/include -m64
// Code generated by the command above; see README.md. DO NOT EDIT.
// +build amd64,linux
// Created by cgo -godefs - DO NOT EDIT
// cgo -godefs -- -Wall -Werror -static -I/tmp/include -m64 _const.go
package unix
import "syscall"
const (
AF_ALG = 0x26
AF_APPLETALK = 0x5
AF_ASH = 0x12
AF_ATMPVC = 0x8
AF_ATMSVC = 0x14
AF_AX25 = 0x3
AF_BLUETOOTH = 0x1f
AF_BRIDGE = 0x7
AF_CAIF = 0x25
AF_CAN = 0x1d
AF_DECnet = 0xc
AF_ECONET = 0x13
AF_FILE = 0x1
AF_IB = 0x1b
AF_IEEE802154 = 0x24
AF_INET = 0x2
AF_INET6 = 0xa
AF_IPX = 0x4
AF_IRDA = 0x17
AF_ISDN = 0x22
AF_IUCV = 0x20
AF_KCM = 0x29
AF_KEY = 0xf
AF_LLC = 0x1a
AF_LOCAL = 0x1
AF_MAX = 0x2c
AF_MPLS = 0x1c
AF_NETBEUI = 0xd
AF_NETLINK = 0x10
AF_NETROM = 0x6
AF_NFC = 0x27
AF_PACKET = 0x11
AF_PHONET = 0x23
AF_PPPOX = 0x18
AF_QIPCRTR = 0x2a
AF_RDS = 0x15
AF_ROSE = 0xb
AF_ROUTE = 0x10
AF_RXRPC = 0x21
AF_SECURITY = 0xe
AF_SMC = 0x2b
AF_SNA = 0x16
AF_TIPC = 0x1e
AF_UNIX = 0x1
AF_UNSPEC = 0x0
AF_VSOCK = 0x28
AF_WANPIPE = 0x19
AF_X25 = 0x9
ALG_OP_DECRYPT = 0x0
ALG_OP_ENCRYPT = 0x1
ALG_SET_AEAD_ASSOCLEN = 0x4
ALG_SET_AEAD_AUTHSIZE = 0x5
ALG_SET_IV = 0x2
ALG_SET_KEY = 0x1
ALG_SET_OP = 0x3
ARPHRD_6LOWPAN = 0x339
ARPHRD_ADAPT = 0x108
ARPHRD_APPLETLK = 0x8
ARPHRD_ARCNET = 0x7
ARPHRD_ASH = 0x30d
ARPHRD_ATM = 0x13
ARPHRD_AX25 = 0x3
ARPHRD_BIF = 0x307
ARPHRD_CAIF = 0x336
ARPHRD_CAN = 0x118
ARPHRD_CHAOS = 0x5
ARPHRD_CISCO = 0x201
ARPHRD_CSLIP = 0x101
ARPHRD_CSLIP6 = 0x103
ARPHRD_DDCMP = 0x205
ARPHRD_DLCI = 0xf
ARPHRD_ECONET = 0x30e
ARPHRD_EETHER = 0x2
ARPHRD_ETHER = 0x1
ARPHRD_EUI64 = 0x1b
ARPHRD_FCAL = 0x311
ARPHRD_FCFABRIC = 0x313
ARPHRD_FCPL = 0x312
ARPHRD_FCPP = 0x310
ARPHRD_FDDI = 0x306
ARPHRD_FRAD = 0x302
ARPHRD_HDLC = 0x201
ARPHRD_HIPPI = 0x30c
ARPHRD_HWX25 = 0x110
ARPHRD_IEEE1394 = 0x18
ARPHRD_IEEE802 = 0x6
ARPHRD_IEEE80211 = 0x321
ARPHRD_IEEE80211_PRISM = 0x322
ARPHRD_IEEE80211_RADIOTAP = 0x323
ARPHRD_IEEE802154 = 0x324
ARPHRD_IEEE802154_MONITOR = 0x325
ARPHRD_IEEE802_TR = 0x320
ARPHRD_INFINIBAND = 0x20
ARPHRD_IP6GRE = 0x337
ARPHRD_IPDDP = 0x309
ARPHRD_IPGRE = 0x30a
ARPHRD_IRDA = 0x30f
ARPHRD_LAPB = 0x204
ARPHRD_LOCALTLK = 0x305
ARPHRD_LOOPBACK = 0x304
ARPHRD_METRICOM = 0x17
ARPHRD_NETLINK = 0x338
ARPHRD_NETROM = 0x0
ARPHRD_NONE = 0xfffe
ARPHRD_PHONET = 0x334
ARPHRD_PHONET_PIPE = 0x335
ARPHRD_PIMREG = 0x30b
ARPHRD_PPP = 0x200
ARPHRD_PRONET = 0x4
ARPHRD_RAWHDLC = 0x206
ARPHRD_ROSE = 0x10e
ARPHRD_RSRVD = 0x104
ARPHRD_SIT = 0x308
ARPHRD_SKIP = 0x303
ARPHRD_SLIP = 0x100
ARPHRD_SLIP6 = 0x102
ARPHRD_TUNNEL = 0x300
ARPHRD_TUNNEL6 = 0x301
ARPHRD_VOID = 0xffff
ARPHRD_VSOCKMON = 0x33a
ARPHRD_X25 = 0x10f
B0 = 0x0
B1000000 = 0x1008
B110 = 0x3
B115200 = 0x1002
B1152000 = 0x1009
B1200 = 0x9
B134 = 0x4
B150 = 0x5
B1500000 = 0x100a
B1800 = 0xa
B19200 = 0xe
B200 = 0x6
B2000000 = 0x100b
B230400 = 0x1003
B2400 = 0xb
B2500000 = 0x100c
B300 = 0x7
B3000000 = 0x100d
B3500000 = 0x100e
B38400 = 0xf
B4000000 = 0x100f
B460800 = 0x1004
B4800 = 0xc
B50 = 0x1
B500000 = 0x1005
B57600 = 0x1001
B576000 = 0x1006
B600 = 0x8
B75 = 0x2
B921600 = 0x1007
B9600 = 0xd
BLKBSZGET = 0x80081270
BLKBSZSET = 0x40081271
BLKFLSBUF = 0x1261
BLKFRAGET = 0x1265
BLKFRASET = 0x1264
BLKGETSIZE = 0x1260
BLKGETSIZE64 = 0x80081272
BLKPBSZGET = 0x127b
BLKRAGET = 0x1263
BLKRASET = 0x1262
BLKROGET = 0x125e
BLKROSET = 0x125d
BLKRRPART = 0x125f
BLKSECTGET = 0x1267
BLKSECTSET = 0x1266
BLKSSZGET = 0x1268
BOTHER = 0x1000
BPF_A = 0x10
BPF_ABS = 0x20
BPF_ADD = 0x0
BPF_ALU = 0x4
BPF_AND = 0x50
BPF_B = 0x10
BPF_DIV = 0x30
BPF_H = 0x8
BPF_IMM = 0x0
BPF_IND = 0x40
BPF_JA = 0x0
BPF_JEQ = 0x10
BPF_JGE = 0x30
BPF_JGT = 0x20
BPF_JMP = 0x5
BPF_JSET = 0x40
BPF_K = 0x0
BPF_LD = 0x0
BPF_LDX = 0x1
BPF_LEN = 0x80
BPF_LL_OFF = -0x200000
BPF_LSH = 0x60
BPF_MAJOR_VERSION = 0x1
BPF_MAXINSNS = 0x1000
BPF_MEM = 0x60
BPF_MEMWORDS = 0x10
BPF_MINOR_VERSION = 0x1
BPF_MISC = 0x7
BPF_MOD = 0x90
BPF_MSH = 0xa0
BPF_MUL = 0x20
BPF_NEG = 0x80
BPF_NET_OFF = -0x100000
BPF_OR = 0x40
BPF_RET = 0x6
BPF_RSH = 0x70
BPF_ST = 0x2
BPF_STX = 0x3
BPF_SUB = 0x10
BPF_TAX = 0x0
BPF_TXA = 0x80
BPF_W = 0x0
BPF_X = 0x8
BPF_XOR = 0xa0
BRKINT = 0x2
BS0 = 0x0
BS1 = 0x2000
BSDLY = 0x2000
CAN_BCM = 0x2
CAN_EFF_FLAG = 0x80000000
CAN_EFF_ID_BITS = 0x1d
CAN_EFF_MASK = 0x1fffffff
CAN_ERR_FLAG = 0x20000000
CAN_ERR_MASK = 0x1fffffff
CAN_INV_FILTER = 0x20000000
CAN_ISOTP = 0x6
CAN_MAX_DLC = 0x8
CAN_MAX_DLEN = 0x8
CAN_MCNET = 0x5
CAN_MTU = 0x10
CAN_NPROTO = 0x7
CAN_RAW = 0x1
CAN_RAW_FILTER_MAX = 0x200
CAN_RTR_FLAG = 0x40000000
CAN_SFF_ID_BITS = 0xb
CAN_SFF_MASK = 0x7ff
CAN_TP16 = 0x3
CAN_TP20 = 0x4
CBAUD = 0x100f
CBAUDEX = 0x1000
CFLUSH = 0xf
CIBAUD = 0x100f0000
CLOCAL = 0x800
CLOCK_BOOTTIME = 0x7
CLOCK_BOOTTIME_ALARM = 0x9
CLOCK_DEFAULT = 0x0
CLOCK_EXT = 0x1
CLOCK_INT = 0x2
CLOCK_MONOTONIC = 0x1
CLOCK_MONOTONIC_COARSE = 0x6
CLOCK_MONOTONIC_RAW = 0x4
CLOCK_PROCESS_CPUTIME_ID = 0x2
CLOCK_REALTIME = 0x0
CLOCK_REALTIME_ALARM = 0x8
CLOCK_REALTIME_COARSE = 0x5
CLOCK_TAI = 0xb
CLOCK_THREAD_CPUTIME_ID = 0x3
CLOCK_TXFROMRX = 0x4
CLOCK_TXINT = 0x3
CLONE_CHILD_CLEARTID = 0x200000
CLONE_CHILD_SETTID = 0x1000000
CLONE_DETACHED = 0x400000
CLONE_FILES = 0x400
CLONE_FS = 0x200
CLONE_IO = 0x80000000
CLONE_NEWCGROUP = 0x2000000
CLONE_NEWIPC = 0x8000000
CLONE_NEWNET = 0x40000000
CLONE_NEWNS = 0x20000
CLONE_NEWPID = 0x20000000
CLONE_NEWUSER = 0x10000000
CLONE_NEWUTS = 0x4000000
CLONE_PARENT = 0x8000
CLONE_PARENT_SETTID = 0x100000
CLONE_PTRACE = 0x2000
CLONE_SETTLS = 0x80000
CLONE_SIGHAND = 0x800
CLONE_SYSVSEM = 0x40000
CLONE_THREAD = 0x10000
CLONE_UNTRACED = 0x800000
CLONE_VFORK = 0x4000
CLONE_VM = 0x100
CMSPAR = 0x40000000
CR0 = 0x0
CR1 = 0x200
CR2 = 0x400
CR3 = 0x600
CRDLY = 0x600
CREAD = 0x80
CRTSCTS = 0x80000000
CS5 = 0x0
CS6 = 0x10
CS7 = 0x20
CS8 = 0x30
CSIGNAL = 0xff
CSIZE = 0x30
CSTART = 0x11
CSTATUS = 0x0
CSTOP = 0x13
CSTOPB = 0x40
CSUSP = 0x1a
DT_BLK = 0x6
DT_CHR = 0x2
DT_DIR = 0x4
DT_FIFO = 0x1
DT_LNK = 0xa
DT_REG = 0x8
DT_SOCK = 0xc
DT_UNKNOWN = 0x0
DT_WHT = 0xe
ECHO = 0x8
ECHOCTL = 0x200
ECHOE = 0x10
ECHOK = 0x20
ECHOKE = 0x800
ECHONL = 0x40
ECHOPRT = 0x400
EFD_CLOEXEC = 0x80000
EFD_NONBLOCK = 0x800
EFD_SEMAPHORE = 0x1
ENCODING_DEFAULT = 0x0
ENCODING_FM_MARK = 0x3
ENCODING_FM_SPACE = 0x4
ENCODING_MANCHESTER = 0x5
ENCODING_NRZ = 0x1
ENCODING_NRZI = 0x2
EPOLLERR = 0x8
EPOLLET = 0x80000000
EPOLLEXCLUSIVE = 0x10000000
EPOLLHUP = 0x10
EPOLLIN = 0x1
EPOLLMSG = 0x400
EPOLLONESHOT = 0x40000000
EPOLLOUT = 0x4
EPOLLPRI = 0x2
EPOLLRDBAND = 0x80
EPOLLRDHUP = 0x2000
EPOLLRDNORM = 0x40
EPOLLWAKEUP = 0x20000000
EPOLLWRBAND = 0x200
EPOLLWRNORM = 0x100
EPOLL_CLOEXEC = 0x80000
EPOLL_CTL_ADD = 0x1
EPOLL_CTL_DEL = 0x2
EPOLL_CTL_MOD = 0x3
ETH_P_1588 = 0x88f7
ETH_P_8021AD = 0x88a8
ETH_P_8021AH = 0x88e7
ETH_P_8021Q = 0x8100
ETH_P_80221 = 0x8917
ETH_P_802_2 = 0x4
ETH_P_802_3 = 0x1
ETH_P_802_3_MIN = 0x600
ETH_P_802_EX1 = 0x88b5
ETH_P_AARP = 0x80f3
ETH_P_AF_IUCV = 0xfbfb
ETH_P_ALL = 0x3
ETH_P_AOE = 0x88a2
ETH_P_ARCNET = 0x1a
ETH_P_ARP = 0x806
ETH_P_ATALK = 0x809b
ETH_P_ATMFATE = 0x8884
ETH_P_ATMMPOA = 0x884c
ETH_P_AX25 = 0x2
ETH_P_BATMAN = 0x4305
ETH_P_BPQ = 0x8ff
ETH_P_CAIF = 0xf7
ETH_P_CAN = 0xc
ETH_P_CANFD = 0xd
ETH_P_CONTROL = 0x16
ETH_P_CUST = 0x6006
ETH_P_DDCMP = 0x6
ETH_P_DEC = 0x6000
ETH_P_DIAG = 0x6005
ETH_P_DNA_DL = 0x6001
ETH_P_DNA_RC = 0x6002
ETH_P_DNA_RT = 0x6003
ETH_P_DSA = 0x1b
ETH_P_ECONET = 0x18
ETH_P_EDSA = 0xdada
ETH_P_FCOE = 0x8906
ETH_P_FIP = 0x8914
ETH_P_HDLC = 0x19
ETH_P_HSR = 0x892f
ETH_P_IBOE = 0x8915
ETH_P_IEEE802154 = 0xf6
ETH_P_IEEEPUP = 0xa00
ETH_P_IEEEPUPAT = 0xa01
ETH_P_IP = 0x800
ETH_P_IPV6 = 0x86dd
ETH_P_IPX = 0x8137
ETH_P_IRDA = 0x17
ETH_P_LAT = 0x6004
ETH_P_LINK_CTL = 0x886c
ETH_P_LOCALTALK = 0x9
ETH_P_LOOP = 0x60
ETH_P_LOOPBACK = 0x9000
ETH_P_MACSEC = 0x88e5
ETH_P_MOBITEX = 0x15
ETH_P_MPLS_MC = 0x8848
ETH_P_MPLS_UC = 0x8847
ETH_P_MVRP = 0x88f5
ETH_P_NCSI = 0x88f8
ETH_P_PAE = 0x888e
ETH_P_PAUSE = 0x8808
ETH_P_PHONET = 0xf5
ETH_P_PPPTALK = 0x10
ETH_P_PPP_DISC = 0x8863
ETH_P_PPP_MP = 0x8
ETH_P_PPP_SES = 0x8864
ETH_P_PRP = 0x88fb
ETH_P_PUP = 0x200
ETH_P_PUPAT = 0x201
ETH_P_QINQ1 = 0x9100
ETH_P_QINQ2 = 0x9200
ETH_P_QINQ3 = 0x9300
ETH_P_RARP = 0x8035
ETH_P_SCA = 0x6007
ETH_P_SLOW = 0x8809
ETH_P_SNAP = 0x5
ETH_P_TDLS = 0x890d
ETH_P_TEB = 0x6558
ETH_P_TIPC = 0x88ca
ETH_P_TRAILER = 0x1c
ETH_P_TR_802_2 = 0x11
ETH_P_TSN = 0x22f0
ETH_P_WAN_PPP = 0x7
ETH_P_WCCP = 0x883e
ETH_P_X25 = 0x805
ETH_P_XDSA = 0xf8
EXTA = 0xe
EXTB = 0xf
EXTPROC = 0x10000
FALLOC_FL_COLLAPSE_RANGE = 0x8
FALLOC_FL_INSERT_RANGE = 0x20
FALLOC_FL_KEEP_SIZE = 0x1
FALLOC_FL_NO_HIDE_STALE = 0x4
FALLOC_FL_PUNCH_HOLE = 0x2
FALLOC_FL_UNSHARE_RANGE = 0x40
FALLOC_FL_ZERO_RANGE = 0x10
FD_CLOEXEC = 0x1
FD_SETSIZE = 0x400
FF0 = 0x0
FF1 = 0x8000
FFDLY = 0x8000
FLUSHO = 0x1000
FS_ENCRYPTION_MODE_AES_128_CBC = 0x5
FS_ENCRYPTION_MODE_AES_128_CTS = 0x6
FS_ENCRYPTION_MODE_AES_256_CBC = 0x3
FS_ENCRYPTION_MODE_AES_256_CTS = 0x4
FS_ENCRYPTION_MODE_AES_256_GCM = 0x2
FS_ENCRYPTION_MODE_AES_256_XTS = 0x1
FS_ENCRYPTION_MODE_INVALID = 0x0
FS_IOC_GET_ENCRYPTION_POLICY = 0x400c6615
FS_IOC_GET_ENCRYPTION_PWSALT = 0x40106614
FS_IOC_SET_ENCRYPTION_POLICY = 0x800c6613
FS_KEY_DESCRIPTOR_SIZE = 0x8
FS_KEY_DESC_PREFIX = "fscrypt:"
FS_KEY_DESC_PREFIX_SIZE = 0x8
FS_MAX_KEY_SIZE = 0x40
FS_POLICY_FLAGS_PAD_16 = 0x2
FS_POLICY_FLAGS_PAD_32 = 0x3
FS_POLICY_FLAGS_PAD_4 = 0x0
FS_POLICY_FLAGS_PAD_8 = 0x1
FS_POLICY_FLAGS_PAD_MASK = 0x3
FS_POLICY_FLAGS_VALID = 0x3
F_DUPFD = 0x0
F_DUPFD_CLOEXEC = 0x406
F_EXLCK = 0x4
F_GETFD = 0x1
F_GETFL = 0x3
F_GETLEASE = 0x401
F_GETLK = 0x5
F_GETLK64 = 0x5
F_GETOWN = 0x9
F_GETOWN_EX = 0x10
F_GETPIPE_SZ = 0x408
F_GETSIG = 0xb
F_LOCK = 0x1
F_NOTIFY = 0x402
F_OFD_GETLK = 0x24
F_OFD_SETLK = 0x25
F_OFD_SETLKW = 0x26
F_OK = 0x0
F_RDLCK = 0x0
F_SETFD = 0x2
F_SETFL = 0x4
F_SETLEASE = 0x400
F_SETLK = 0x6
F_SETLK64 = 0x6
F_SETLKW = 0x7
F_SETLKW64 = 0x7
F_SETOWN = 0x8
F_SETOWN_EX = 0xf
F_SETPIPE_SZ = 0x407
F_SETSIG = 0xa
F_SHLCK = 0x8
F_TEST = 0x3
F_TLOCK = 0x2
F_ULOCK = 0x0
F_UNLCK = 0x2
F_WRLCK = 0x1
GENL_ADMIN_PERM = 0x1
GENL_CMD_CAP_DO = 0x2
GENL_CMD_CAP_DUMP = 0x4
GENL_CMD_CAP_HASPOL = 0x8
GENL_HDRLEN = 0x4
GENL_ID_CTRL = 0x10
GENL_ID_PMCRAID = 0x12
GENL_ID_VFS_DQUOT = 0x11
GENL_MAX_ID = 0x3ff
GENL_MIN_ID = 0x10
GENL_NAMSIZ = 0x10
GENL_START_ALLOC = 0x13
GENL_UNS_ADMIN_PERM = 0x10
GRND_NONBLOCK = 0x1
GRND_RANDOM = 0x2
HUPCL = 0x400
IBSHIFT = 0x10
ICANON = 0x2
ICMPV6_FILTER = 0x1
ICRNL = 0x100
IEXTEN = 0x8000
IFA_F_DADFAILED = 0x8
IFA_F_DEPRECATED = 0x20
IFA_F_HOMEADDRESS = 0x10
IFA_F_MANAGETEMPADDR = 0x100
IFA_F_MCAUTOJOIN = 0x400
IFA_F_NODAD = 0x2
IFA_F_NOPREFIXROUTE = 0x200
IFA_F_OPTIMISTIC = 0x4
IFA_F_PERMANENT = 0x80
IFA_F_SECONDARY = 0x1
IFA_F_STABLE_PRIVACY = 0x800
IFA_F_TEMPORARY = 0x1
IFA_F_TENTATIVE = 0x40
IFA_MAX = 0x8
IFF_ALLMULTI = 0x200
IFF_ATTACH_QUEUE = 0x200
IFF_AUTOMEDIA = 0x4000
IFF_BROADCAST = 0x2
IFF_DEBUG = 0x4
IFF_DETACH_QUEUE = 0x400
IFF_DORMANT = 0x20000
IFF_DYNAMIC = 0x8000
IFF_ECHO = 0x40000
IFF_LOOPBACK = 0x8
IFF_LOWER_UP = 0x10000
IFF_MASTER = 0x400
IFF_MULTICAST = 0x1000
IFF_MULTI_QUEUE = 0x100
IFF_NOARP = 0x80
IFF_NOFILTER = 0x1000
IFF_NOTRAILERS = 0x20
IFF_NO_PI = 0x1000
IFF_ONE_QUEUE = 0x2000
IFF_PERSIST = 0x800
IFF_POINTOPOINT = 0x10
IFF_PORTSEL = 0x2000
IFF_PROMISC = 0x100
IFF_RUNNING = 0x40
IFF_SLAVE = 0x800
IFF_TAP = 0x2
IFF_TUN = 0x1
IFF_TUN_EXCL = 0x8000
IFF_UP = 0x1
IFF_VNET_HDR = 0x4000
IFF_VOLATILE = 0x70c5a
IFNAMSIZ = 0x10
IGNBRK = 0x1
IGNCR = 0x80
IGNPAR = 0x4
IMAXBEL = 0x2000
INLCR = 0x40
INPCK = 0x10
IN_ACCESS = 0x1
IN_ALL_EVENTS = 0xfff
IN_ATTRIB = 0x4
IN_CLASSA_HOST = 0xffffff
IN_CLASSA_MAX = 0x80
IN_CLASSA_NET = 0xff000000
IN_CLASSA_NSHIFT = 0x18
IN_CLASSB_HOST = 0xffff
IN_CLASSB_MAX = 0x10000
IN_CLASSB_NET = 0xffff0000
IN_CLASSB_NSHIFT = 0x10
IN_CLASSC_HOST = 0xff
IN_CLASSC_NET = 0xffffff00
IN_CLASSC_NSHIFT = 0x8
IN_CLOEXEC = 0x80000
IN_CLOSE = 0x18
IN_CLOSE_NOWRITE = 0x10
IN_CLOSE_WRITE = 0x8
IN_CREATE = 0x100
IN_DELETE = 0x200
IN_DELETE_SELF = 0x400
IN_DONT_FOLLOW = 0x2000000
IN_EXCL_UNLINK = 0x4000000
IN_IGNORED = 0x8000
IN_ISDIR = 0x40000000
IN_LOOPBACKNET = 0x7f
IN_MASK_ADD = 0x20000000
IN_MODIFY = 0x2
IN_MOVE = 0xc0
IN_MOVED_FROM = 0x40
IN_MOVED_TO = 0x80
IN_MOVE_SELF = 0x800
IN_NONBLOCK = 0x800
IN_ONESHOT = 0x80000000
IN_ONLYDIR = 0x1000000
IN_OPEN = 0x20
IN_Q_OVERFLOW = 0x4000
IN_UNMOUNT = 0x2000
IOCTL_VM_SOCKETS_GET_LOCAL_CID = 0x7b9
IPPROTO_AH = 0x33
IPPROTO_BEETPH = 0x5e
IPPROTO_COMP = 0x6c
IPPROTO_DCCP = 0x21
IPPROTO_DSTOPTS = 0x3c
IPPROTO_EGP = 0x8
IPPROTO_ENCAP = 0x62
IPPROTO_ESP = 0x32
IPPROTO_FRAGMENT = 0x2c
IPPROTO_GRE = 0x2f
IPPROTO_HOPOPTS = 0x0
IPPROTO_ICMP = 0x1
IPPROTO_ICMPV6 = 0x3a
IPPROTO_IDP = 0x16
IPPROTO_IGMP = 0x2
IPPROTO_IP = 0x0
IPPROTO_IPIP = 0x4
IPPROTO_IPV6 = 0x29
IPPROTO_MH = 0x87
IPPROTO_MPLS = 0x89
IPPROTO_MTP = 0x5c
IPPROTO_NONE = 0x3b
IPPROTO_PIM = 0x67
IPPROTO_PUP = 0xc
IPPROTO_RAW = 0xff
IPPROTO_ROUTING = 0x2b
IPPROTO_RSVP = 0x2e
IPPROTO_SCTP = 0x84
IPPROTO_TCP = 0x6
IPPROTO_TP = 0x1d
IPPROTO_UDP = 0x11
IPPROTO_UDPLITE = 0x88
IPV6_2292DSTOPTS = 0x4
IPV6_2292HOPLIMIT = 0x8
IPV6_2292HOPOPTS = 0x3
IPV6_2292PKTINFO = 0x2
IPV6_2292PKTOPTIONS = 0x6
IPV6_2292RTHDR = 0x5
IPV6_ADDRFORM = 0x1
IPV6_ADDR_PREFERENCES = 0x48
IPV6_ADD_MEMBERSHIP = 0x14
IPV6_AUTHHDR = 0xa
IPV6_AUTOFLOWLABEL = 0x46
IPV6_CHECKSUM = 0x7
IPV6_DONTFRAG = 0x3e
IPV6_DROP_MEMBERSHIP = 0x15
IPV6_DSTOPTS = 0x3b
IPV6_HDRINCL = 0x24
IPV6_HOPLIMIT = 0x34
IPV6_HOPOPTS = 0x36
IPV6_IPSEC_POLICY = 0x22
IPV6_JOIN_ANYCAST = 0x1b
IPV6_JOIN_GROUP = 0x14
IPV6_LEAVE_ANYCAST = 0x1c
IPV6_LEAVE_GROUP = 0x15
IPV6_MINHOPCOUNT = 0x49
IPV6_MTU = 0x18
IPV6_MTU_DISCOVER = 0x17
IPV6_MULTICAST_HOPS = 0x12
IPV6_MULTICAST_IF = 0x11
IPV6_MULTICAST_LOOP = 0x13
IPV6_NEXTHOP = 0x9
IPV6_ORIGDSTADDR = 0x4a
IPV6_PATHMTU = 0x3d
IPV6_PKTINFO = 0x32
IPV6_PMTUDISC_DO = 0x2
IPV6_PMTUDISC_DONT = 0x0
IPV6_PMTUDISC_INTERFACE = 0x4
IPV6_PMTUDISC_OMIT = 0x5
IPV6_PMTUDISC_PROBE = 0x3
IPV6_PMTUDISC_WANT = 0x1
IPV6_RECVDSTOPTS = 0x3a
IPV6_RECVERR = 0x19
IPV6_RECVFRAGSIZE = 0x4d
IPV6_RECVHOPLIMIT = 0x33
IPV6_RECVHOPOPTS = 0x35
IPV6_RECVORIGDSTADDR = 0x4a
IPV6_RECVPATHMTU = 0x3c
IPV6_RECVPKTINFO = 0x31
IPV6_RECVRTHDR = 0x38
IPV6_RECVTCLASS = 0x42
IPV6_ROUTER_ALERT = 0x16
IPV6_RTHDR = 0x39
IPV6_RTHDRDSTOPTS = 0x37
IPV6_RTHDR_LOOSE = 0x0
IPV6_RTHDR_STRICT = 0x1
IPV6_RTHDR_TYPE_0 = 0x0
IPV6_RXDSTOPTS = 0x3b
IPV6_RXHOPOPTS = 0x36
IPV6_TCLASS = 0x43
IPV6_TRANSPARENT = 0x4b
IPV6_UNICAST_HOPS = 0x10
IPV6_UNICAST_IF = 0x4c
IPV6_V6ONLY = 0x1a
IPV6_XFRM_POLICY = 0x23
IP_ADD_MEMBERSHIP = 0x23
IP_ADD_SOURCE_MEMBERSHIP = 0x27
IP_BIND_ADDRESS_NO_PORT = 0x18
IP_BLOCK_SOURCE = 0x26
IP_CHECKSUM = 0x17
IP_DEFAULT_MULTICAST_LOOP = 0x1
IP_DEFAULT_MULTICAST_TTL = 0x1
IP_DF = 0x4000
IP_DROP_MEMBERSHIP = 0x24
IP_DROP_SOURCE_MEMBERSHIP = 0x28
IP_FREEBIND = 0xf
IP_HDRINCL = 0x3
IP_IPSEC_POLICY = 0x10
IP_MAXPACKET = 0xffff
IP_MAX_MEMBERSHIPS = 0x14
IP_MF = 0x2000
IP_MINTTL = 0x15
IP_MSFILTER = 0x29
IP_MSS = 0x240
IP_MTU = 0xe
IP_MTU_DISCOVER = 0xa
IP_MULTICAST_ALL = 0x31
IP_MULTICAST_IF = 0x20
IP_MULTICAST_LOOP = 0x22
IP_MULTICAST_TTL = 0x21
IP_NODEFRAG = 0x16
IP_OFFMASK = 0x1fff
IP_OPTIONS = 0x4
IP_ORIGDSTADDR = 0x14
IP_PASSSEC = 0x12
IP_PKTINFO = 0x8
IP_PKTOPTIONS = 0x9
IP_PMTUDISC = 0xa
IP_PMTUDISC_DO = 0x2
IP_PMTUDISC_DONT = 0x0
IP_PMTUDISC_INTERFACE = 0x4
IP_PMTUDISC_OMIT = 0x5
IP_PMTUDISC_PROBE = 0x3
IP_PMTUDISC_WANT = 0x1
IP_RECVERR = 0xb
IP_RECVFRAGSIZE = 0x19
IP_RECVOPTS = 0x6
IP_RECVORIGDSTADDR = 0x14
IP_RECVRETOPTS = 0x7
IP_RECVTOS = 0xd
IP_RECVTTL = 0xc
IP_RETOPTS = 0x7
IP_RF = 0x8000
IP_ROUTER_ALERT = 0x5
IP_TOS = 0x1
IP_TRANSPARENT = 0x13
IP_TTL = 0x2
IP_UNBLOCK_SOURCE = 0x25
IP_UNICAST_IF = 0x32
IP_XFRM_POLICY = 0x11
ISIG = 0x1
ISTRIP = 0x20
IUCLC = 0x200
IUTF8 = 0x4000
IXANY = 0x800
IXOFF = 0x1000
IXON = 0x400
KEYCTL_ASSUME_AUTHORITY = 0x10
KEYCTL_CHOWN = 0x4
KEYCTL_CLEAR = 0x7
KEYCTL_DESCRIBE = 0x6
KEYCTL_DH_COMPUTE = 0x17
KEYCTL_GET_KEYRING_ID = 0x0
KEYCTL_GET_PERSISTENT = 0x16
KEYCTL_GET_SECURITY = 0x11
KEYCTL_INSTANTIATE = 0xc
KEYCTL_INSTANTIATE_IOV = 0x14
KEYCTL_INVALIDATE = 0x15
KEYCTL_JOIN_SESSION_KEYRING = 0x1
KEYCTL_LINK = 0x8
KEYCTL_NEGATE = 0xd
KEYCTL_READ = 0xb
KEYCTL_REJECT = 0x13
KEYCTL_RESTRICT_KEYRING = 0x1d
KEYCTL_REVOKE = 0x3
KEYCTL_SEARCH = 0xa
KEYCTL_SESSION_TO_PARENT = 0x12
KEYCTL_SETPERM = 0x5
KEYCTL_SET_REQKEY_KEYRING = 0xe
KEYCTL_SET_TIMEOUT = 0xf
KEYCTL_UNLINK = 0x9
KEYCTL_UPDATE = 0x2
KEY_REQKEY_DEFL_DEFAULT = 0x0
KEY_REQKEY_DEFL_GROUP_KEYRING = 0x6
KEY_REQKEY_DEFL_NO_CHANGE = -0x1
KEY_REQKEY_DEFL_PROCESS_KEYRING = 0x2
KEY_REQKEY_DEFL_REQUESTOR_KEYRING = 0x7
KEY_REQKEY_DEFL_SESSION_KEYRING = 0x3
KEY_REQKEY_DEFL_THREAD_KEYRING = 0x1
KEY_REQKEY_DEFL_USER_KEYRING = 0x4
KEY_REQKEY_DEFL_USER_SESSION_KEYRING = 0x5
KEY_SPEC_GROUP_KEYRING = -0x6
KEY_SPEC_PROCESS_KEYRING = -0x2
KEY_SPEC_REQKEY_AUTH_KEY = -0x7
KEY_SPEC_REQUESTOR_KEYRING = -0x8
KEY_SPEC_SESSION_KEYRING = -0x3
KEY_SPEC_THREAD_KEYRING = -0x1
KEY_SPEC_USER_KEYRING = -0x4
KEY_SPEC_USER_SESSION_KEYRING = -0x5
LINUX_REBOOT_CMD_CAD_OFF = 0x0
LINUX_REBOOT_CMD_CAD_ON = 0x89abcdef
LINUX_REBOOT_CMD_HALT = 0xcdef0123
LINUX_REBOOT_CMD_KEXEC = 0x45584543
LINUX_REBOOT_CMD_POWER_OFF = 0x4321fedc
LINUX_REBOOT_CMD_RESTART = 0x1234567
LINUX_REBOOT_CMD_RESTART2 = 0xa1b2c3d4
LINUX_REBOOT_CMD_SW_SUSPEND = 0xd000fce2
LINUX_REBOOT_MAGIC1 = 0xfee1dead
LINUX_REBOOT_MAGIC2 = 0x28121969
LOCK_EX = 0x2
LOCK_NB = 0x4
LOCK_SH = 0x1
LOCK_UN = 0x8
MADV_DODUMP = 0x11
MADV_DOFORK = 0xb
MADV_DONTDUMP = 0x10
MADV_DONTFORK = 0xa
MADV_DONTNEED = 0x4
MADV_FREE = 0x8
MADV_HUGEPAGE = 0xe
MADV_HWPOISON = 0x64
MADV_MERGEABLE = 0xc
MADV_NOHUGEPAGE = 0xf
MADV_NORMAL = 0x0
MADV_RANDOM = 0x1
MADV_REMOVE = 0x9
MADV_SEQUENTIAL = 0x2
MADV_UNMERGEABLE = 0xd
MADV_WILLNEED = 0x3
MAP_32BIT = 0x40
MAP_ANON = 0x20
MAP_ANONYMOUS = 0x20
MAP_DENYWRITE = 0x800
MAP_EXECUTABLE = 0x1000
MAP_FILE = 0x0
MAP_FIXED = 0x10
MAP_GROWSDOWN = 0x100
MAP_HUGETLB = 0x40000
MAP_HUGE_MASK = 0x3f
MAP_HUGE_SHIFT = 0x1a
MAP_LOCKED = 0x2000
MAP_NONBLOCK = 0x10000
MAP_NORESERVE = 0x4000
MAP_POPULATE = 0x8000
MAP_PRIVATE = 0x2
MAP_SHARED = 0x1
MAP_STACK = 0x20000
MAP_TYPE = 0xf
MCL_CURRENT = 0x1
MCL_FUTURE = 0x2
MCL_ONFAULT = 0x4
MNT_DETACH = 0x2
MNT_EXPIRE = 0x4
MNT_FORCE = 0x1
MSG_BATCH = 0x40000
MSG_CMSG_CLOEXEC = 0x40000000
MSG_CONFIRM = 0x800
MSG_CTRUNC = 0x8
MSG_DONTROUTE = 0x4
MSG_DONTWAIT = 0x40
MSG_EOR = 0x80
MSG_ERRQUEUE = 0x2000
MSG_FASTOPEN = 0x20000000
MSG_FIN = 0x200
MSG_MORE = 0x8000
MSG_NOSIGNAL = 0x4000
MSG_OOB = 0x1
MSG_PEEK = 0x2
MSG_PROXY = 0x10
MSG_RST = 0x1000
MSG_SYN = 0x400
MSG_TRUNC = 0x20
MSG_TRYHARD = 0x4
MSG_WAITALL = 0x100
MSG_WAITFORONE = 0x10000
MS_ACTIVE = 0x40000000
MS_ASYNC = 0x1
MS_BIND = 0x1000
MS_BORN = 0x20000000
MS_DIRSYNC = 0x80
MS_INVALIDATE = 0x2
MS_I_VERSION = 0x800000
MS_KERNMOUNT = 0x400000
MS_LAZYTIME = 0x2000000
MS_MANDLOCK = 0x40
MS_MGC_MSK = 0xffff0000
MS_MGC_VAL = 0xc0ed0000
MS_MOVE = 0x2000
MS_NOATIME = 0x400
MS_NODEV = 0x4
MS_NODIRATIME = 0x800
MS_NOEXEC = 0x8
MS_NOREMOTELOCK = 0x8000000
MS_NOSEC = 0x10000000
MS_NOSUID = 0x2
MS_NOUSER = -0x80000000
MS_POSIXACL = 0x10000
MS_PRIVATE = 0x40000
MS_RDONLY = 0x1
MS_REC = 0x4000
MS_RELATIME = 0x200000
MS_REMOUNT = 0x20
MS_RMT_MASK = 0x2800051
MS_SHARED = 0x100000
MS_SILENT = 0x8000
MS_SLAVE = 0x80000
MS_STRICTATIME = 0x1000000
MS_SUBMOUNT = 0x4000000
MS_SYNC = 0x4
MS_SYNCHRONOUS = 0x10
MS_UNBINDABLE = 0x20000
MS_VERBOSE = 0x8000
NAME_MAX = 0xff
NETLINK_ADD_MEMBERSHIP = 0x1
NETLINK_AUDIT = 0x9
NETLINK_BROADCAST_ERROR = 0x4
NETLINK_CAP_ACK = 0xa
NETLINK_CONNECTOR = 0xb
NETLINK_CRYPTO = 0x15
NETLINK_DNRTMSG = 0xe
NETLINK_DROP_MEMBERSHIP = 0x2
NETLINK_ECRYPTFS = 0x13
NETLINK_EXT_ACK = 0xb
NETLINK_FIB_LOOKUP = 0xa
NETLINK_FIREWALL = 0x3
NETLINK_GENERIC = 0x10
NETLINK_INET_DIAG = 0x4
NETLINK_IP6_FW = 0xd
NETLINK_ISCSI = 0x8
NETLINK_KOBJECT_UEVENT = 0xf
NETLINK_LISTEN_ALL_NSID = 0x8
NETLINK_LIST_MEMBERSHIPS = 0x9
NETLINK_NETFILTER = 0xc
NETLINK_NFLOG = 0x5
NETLINK_NO_ENOBUFS = 0x5
NETLINK_PKTINFO = 0x3
NETLINK_RDMA = 0x14
NETLINK_ROUTE = 0x0
NETLINK_RX_RING = 0x6
NETLINK_SCSITRANSPORT = 0x12
NETLINK_SELINUX = 0x7
NETLINK_SMC = 0x16
NETLINK_SOCK_DIAG = 0x4
NETLINK_TX_RING = 0x7
NETLINK_UNUSED = 0x1
NETLINK_USERSOCK = 0x2
NETLINK_XFRM = 0x6
NL0 = 0x0
NL1 = 0x100
NLA_ALIGNTO = 0x4
NLA_F_NESTED = 0x8000
NLA_F_NET_BYTEORDER = 0x4000
NLA_HDRLEN = 0x4
NLDLY = 0x100
NLMSG_ALIGNTO = 0x4
NLMSG_DONE = 0x3
NLMSG_ERROR = 0x2
NLMSG_HDRLEN = 0x10
NLMSG_MIN_TYPE = 0x10
NLMSG_NOOP = 0x1
NLMSG_OVERRUN = 0x4
NLM_F_ACK = 0x4
NLM_F_ACK_TLVS = 0x200
NLM_F_APPEND = 0x800
NLM_F_ATOMIC = 0x400
NLM_F_CAPPED = 0x100
NLM_F_CREATE = 0x400
NLM_F_DUMP = 0x300
NLM_F_DUMP_FILTERED = 0x20
NLM_F_DUMP_INTR = 0x10
NLM_F_ECHO = 0x8
NLM_F_EXCL = 0x200
NLM_F_MATCH = 0x200
NLM_F_MULTI = 0x2
NLM_F_REPLACE = 0x100
NLM_F_REQUEST = 0x1
NLM_F_ROOT = 0x100
NOFLSH = 0x80
OCRNL = 0x8
OFDEL = 0x80
OFILL = 0x40
OLCUC = 0x2
ONLCR = 0x4
ONLRET = 0x20
ONOCR = 0x10
OPOST = 0x1
O_ACCMODE = 0x3
O_APPEND = 0x400
O_ASYNC = 0x2000
O_CLOEXEC = 0x80000
O_CREAT = 0x40
O_DIRECT = 0x4000
O_DIRECTORY = 0x10000
O_DSYNC = 0x1000
O_EXCL = 0x80
O_FSYNC = 0x101000
O_LARGEFILE = 0x0
O_NDELAY = 0x800
O_NOATIME = 0x40000
O_NOCTTY = 0x100
O_NOFOLLOW = 0x20000
O_NONBLOCK = 0x800
O_PATH = 0x200000
O_RDONLY = 0x0
O_RDWR = 0x2
O_RSYNC = 0x101000
O_SYNC = 0x101000
O_TMPFILE = 0x410000
O_TRUNC = 0x200
O_WRONLY = 0x1
PACKET_ADD_MEMBERSHIP = 0x1
PACKET_AUXDATA = 0x8
PACKET_BROADCAST = 0x1
PACKET_COPY_THRESH = 0x7
PACKET_DROP_MEMBERSHIP = 0x2
PACKET_FANOUT = 0x12
PACKET_FANOUT_CBPF = 0x6
PACKET_FANOUT_CPU = 0x2
PACKET_FANOUT_DATA = 0x16
PACKET_FANOUT_EBPF = 0x7
PACKET_FANOUT_FLAG_DEFRAG = 0x8000
PACKET_FANOUT_FLAG_ROLLOVER = 0x1000
PACKET_FANOUT_FLAG_UNIQUEID = 0x2000
PACKET_FANOUT_HASH = 0x0
PACKET_FANOUT_LB = 0x1
PACKET_FANOUT_QM = 0x5
PACKET_FANOUT_RND = 0x4
PACKET_FANOUT_ROLLOVER = 0x3
PACKET_FASTROUTE = 0x6
PACKET_HDRLEN = 0xb
PACKET_HOST = 0x0
PACKET_KERNEL = 0x7
PACKET_LOOPBACK = 0x5
PACKET_LOSS = 0xe
PACKET_MR_ALLMULTI = 0x2
PACKET_MR_MULTICAST = 0x0
PACKET_MR_PROMISC = 0x1
PACKET_MR_UNICAST = 0x3
PACKET_MULTICAST = 0x2
PACKET_ORIGDEV = 0x9
PACKET_OTHERHOST = 0x3
PACKET_OUTGOING = 0x4
PACKET_QDISC_BYPASS = 0x14
PACKET_RECV_OUTPUT = 0x3
PACKET_RESERVE = 0xc
PACKET_ROLLOVER_STATS = 0x15
PACKET_RX_RING = 0x5
PACKET_STATISTICS = 0x6
PACKET_TIMESTAMP = 0x11
PACKET_TX_HAS_OFF = 0x13
PACKET_TX_RING = 0xd
PACKET_TX_TIMESTAMP = 0x10
PACKET_USER = 0x6
PACKET_VERSION = 0xa
PACKET_VNET_HDR = 0xf
PARENB = 0x100
PARITY_CRC16_PR0 = 0x2
PARITY_CRC16_PR0_CCITT = 0x4
PARITY_CRC16_PR1 = 0x3
PARITY_CRC16_PR1_CCITT = 0x5
PARITY_CRC32_PR0_CCITT = 0x6
PARITY_CRC32_PR1_CCITT = 0x7
PARITY_DEFAULT = 0x0
PARITY_NONE = 0x1
PARMRK = 0x8
PARODD = 0x200
PENDIN = 0x4000
PERF_EVENT_IOC_DISABLE = 0x2401
PERF_EVENT_IOC_ENABLE = 0x2400
PERF_EVENT_IOC_ID = 0x80082407
PERF_EVENT_IOC_PAUSE_OUTPUT = 0x40042409
PERF_EVENT_IOC_PERIOD = 0x40082404
PERF_EVENT_IOC_REFRESH = 0x2402
PERF_EVENT_IOC_RESET = 0x2403
PERF_EVENT_IOC_SET_BPF = 0x40042408
PERF_EVENT_IOC_SET_FILTER = 0x40082406
PERF_EVENT_IOC_SET_OUTPUT = 0x2405
PRIO_PGRP = 0x1
PRIO_PROCESS = 0x0
PRIO_USER = 0x2
PROT_EXEC = 0x4
PROT_GROWSDOWN = 0x1000000
PROT_GROWSUP = 0x2000000
PROT_NONE = 0x0
PROT_READ = 0x1
PROT_WRITE = 0x2
PR_CAPBSET_DROP = 0x18
PR_CAPBSET_READ = 0x17
PR_CAP_AMBIENT = 0x2f
PR_CAP_AMBIENT_CLEAR_ALL = 0x4
PR_CAP_AMBIENT_IS_SET = 0x1
PR_CAP_AMBIENT_LOWER = 0x3
PR_CAP_AMBIENT_RAISE = 0x2
PR_ENDIAN_BIG = 0x0
PR_ENDIAN_LITTLE = 0x1
PR_ENDIAN_PPC_LITTLE = 0x2
PR_FPEMU_NOPRINT = 0x1
PR_FPEMU_SIGFPE = 0x2
PR_FP_EXC_ASYNC = 0x2
PR_FP_EXC_DISABLED = 0x0
PR_FP_EXC_DIV = 0x10000
PR_FP_EXC_INV = 0x100000
PR_FP_EXC_NONRECOV = 0x1
PR_FP_EXC_OVF = 0x20000
PR_FP_EXC_PRECISE = 0x3
PR_FP_EXC_RES = 0x80000
PR_FP_EXC_SW_ENABLE = 0x80
PR_FP_EXC_UND = 0x40000
PR_FP_MODE_FR = 0x1
PR_FP_MODE_FRE = 0x2
PR_GET_CHILD_SUBREAPER = 0x25
PR_GET_DUMPABLE = 0x3
PR_GET_ENDIAN = 0x13
PR_GET_FPEMU = 0x9
PR_GET_FPEXC = 0xb
PR_GET_FP_MODE = 0x2e
PR_GET_KEEPCAPS = 0x7
PR_GET_NAME = 0x10
PR_GET_NO_NEW_PRIVS = 0x27
PR_GET_PDEATHSIG = 0x2
PR_GET_SECCOMP = 0x15
PR_GET_SECUREBITS = 0x1b
PR_GET_THP_DISABLE = 0x2a
PR_GET_TID_ADDRESS = 0x28
PR_GET_TIMERSLACK = 0x1e
PR_GET_TIMING = 0xd
PR_GET_TSC = 0x19
PR_GET_UNALIGN = 0x5
PR_MCE_KILL = 0x21
PR_MCE_KILL_CLEAR = 0x0
PR_MCE_KILL_DEFAULT = 0x2
PR_MCE_KILL_EARLY = 0x1
PR_MCE_KILL_GET = 0x22
PR_MCE_KILL_LATE = 0x0
PR_MCE_KILL_SET = 0x1
PR_MPX_DISABLE_MANAGEMENT = 0x2c
PR_MPX_ENABLE_MANAGEMENT = 0x2b
PR_SET_CHILD_SUBREAPER = 0x24
PR_SET_DUMPABLE = 0x4
PR_SET_ENDIAN = 0x14
PR_SET_FPEMU = 0xa
PR_SET_FPEXC = 0xc
PR_SET_FP_MODE = 0x2d
PR_SET_KEEPCAPS = 0x8
PR_SET_MM = 0x23
PR_SET_MM_ARG_END = 0x9
PR_SET_MM_ARG_START = 0x8
PR_SET_MM_AUXV = 0xc
PR_SET_MM_BRK = 0x7
PR_SET_MM_END_CODE = 0x2
PR_SET_MM_END_DATA = 0x4
PR_SET_MM_ENV_END = 0xb
PR_SET_MM_ENV_START = 0xa
PR_SET_MM_EXE_FILE = 0xd
PR_SET_MM_MAP = 0xe
PR_SET_MM_MAP_SIZE = 0xf
PR_SET_MM_START_BRK = 0x6
PR_SET_MM_START_CODE = 0x1
PR_SET_MM_START_DATA = 0x3
PR_SET_MM_START_STACK = 0x5
PR_SET_NAME = 0xf
PR_SET_NO_NEW_PRIVS = 0x26
PR_SET_PDEATHSIG = 0x1
PR_SET_PTRACER = 0x59616d61
PR_SET_PTRACER_ANY = 0xffffffffffffffff
PR_SET_SECCOMP = 0x16
PR_SET_SECUREBITS = 0x1c
PR_SET_THP_DISABLE = 0x29
PR_SET_TIMERSLACK = 0x1d
PR_SET_TIMING = 0xe
PR_SET_TSC = 0x1a
PR_SET_UNALIGN = 0x6
PR_TASK_PERF_EVENTS_DISABLE = 0x1f
PR_TASK_PERF_EVENTS_ENABLE = 0x20
PR_TIMING_STATISTICAL = 0x0
PR_TIMING_TIMESTAMP = 0x1
PR_TSC_ENABLE = 0x1
PR_TSC_SIGSEGV = 0x2
PR_UNALIGN_NOPRINT = 0x1
PR_UNALIGN_SIGBUS = 0x2
PTRACE_ARCH_PRCTL = 0x1e
PTRACE_ATTACH = 0x10
PTRACE_CONT = 0x7
PTRACE_DETACH = 0x11
PTRACE_EVENT_CLONE = 0x3
PTRACE_EVENT_EXEC = 0x4
PTRACE_EVENT_EXIT = 0x6
PTRACE_EVENT_FORK = 0x1
PTRACE_EVENT_SECCOMP = 0x7
PTRACE_EVENT_STOP = 0x80
PTRACE_EVENT_VFORK = 0x2
PTRACE_EVENT_VFORK_DONE = 0x5
PTRACE_GETEVENTMSG = 0x4201
PTRACE_GETFPREGS = 0xe
PTRACE_GETFPXREGS = 0x12
PTRACE_GETREGS = 0xc
PTRACE_GETREGSET = 0x4204
PTRACE_GETSIGINFO = 0x4202
PTRACE_GETSIGMASK = 0x420a
PTRACE_GET_THREAD_AREA = 0x19
PTRACE_INTERRUPT = 0x4207
PTRACE_KILL = 0x8
PTRACE_LISTEN = 0x4208
PTRACE_OLDSETOPTIONS = 0x15
PTRACE_O_EXITKILL = 0x100000
PTRACE_O_MASK = 0x3000ff
PTRACE_O_SUSPEND_SECCOMP = 0x200000
PTRACE_O_TRACECLONE = 0x8
PTRACE_O_TRACEEXEC = 0x10
PTRACE_O_TRACEEXIT = 0x40
PTRACE_O_TRACEFORK = 0x2
PTRACE_O_TRACESECCOMP = 0x80
PTRACE_O_TRACESYSGOOD = 0x1
PTRACE_O_TRACEVFORK = 0x4
PTRACE_O_TRACEVFORKDONE = 0x20
PTRACE_PEEKDATA = 0x2
PTRACE_PEEKSIGINFO = 0x4209
PTRACE_PEEKSIGINFO_SHARED = 0x1
PTRACE_PEEKTEXT = 0x1
PTRACE_PEEKUSR = 0x3
PTRACE_POKEDATA = 0x5
PTRACE_POKETEXT = 0x4
PTRACE_POKEUSR = 0x6
PTRACE_SECCOMP_GET_FILTER = 0x420c
PTRACE_SEIZE = 0x4206
PTRACE_SETFPREGS = 0xf
PTRACE_SETFPXREGS = 0x13
PTRACE_SETOPTIONS = 0x4200
PTRACE_SETREGS = 0xd
PTRACE_SETREGSET = 0x4205
PTRACE_SETSIGINFO = 0x4203
PTRACE_SETSIGMASK = 0x420b
PTRACE_SET_THREAD_AREA = 0x1a
PTRACE_SINGLEBLOCK = 0x21
PTRACE_SINGLESTEP = 0x9
PTRACE_SYSCALL = 0x18
PTRACE_SYSEMU = 0x1f
PTRACE_SYSEMU_SINGLESTEP = 0x20
PTRACE_TRACEME = 0x0
RLIMIT_AS = 0x9
RLIMIT_CORE = 0x4
RLIMIT_CPU = 0x0
RLIMIT_DATA = 0x2
RLIMIT_FSIZE = 0x1
RLIMIT_LOCKS = 0xa
RLIMIT_MEMLOCK = 0x8
RLIMIT_MSGQUEUE = 0xc
RLIMIT_NICE = 0xd
RLIMIT_NOFILE = 0x7
RLIMIT_NPROC = 0x6
RLIMIT_RSS = 0x5
RLIMIT_RTPRIO = 0xe
RLIMIT_RTTIME = 0xf
RLIMIT_SIGPENDING = 0xb
RLIMIT_STACK = 0x3
RLIM_INFINITY = 0xffffffffffffffff
RTAX_ADVMSS = 0x8
RTAX_CC_ALGO = 0x10
RTAX_CWND = 0x7
RTAX_FEATURES = 0xc
RTAX_FEATURE_ALLFRAG = 0x8
RTAX_FEATURE_ECN = 0x1
RTAX_FEATURE_MASK = 0xf
RTAX_FEATURE_SACK = 0x2
RTAX_FEATURE_TIMESTAMP = 0x4
RTAX_HOPLIMIT = 0xa
RTAX_INITCWND = 0xb
RTAX_INITRWND = 0xe
RTAX_LOCK = 0x1
RTAX_MAX = 0x10
RTAX_MTU = 0x2
RTAX_QUICKACK = 0xf
RTAX_REORDERING = 0x9
RTAX_RTO_MIN = 0xd
RTAX_RTT = 0x4
RTAX_RTTVAR = 0x5
RTAX_SSTHRESH = 0x6
RTAX_UNSPEC = 0x0
RTAX_WINDOW = 0x3
RTA_ALIGNTO = 0x4
RTA_MAX = 0x1a
RTCF_DIRECTSRC = 0x4000000
RTCF_DOREDIRECT = 0x1000000
RTCF_LOG = 0x2000000
RTCF_MASQ = 0x400000
RTCF_NAT = 0x800000
RTCF_VALVE = 0x200000
RTF_ADDRCLASSMASK = 0xf8000000
RTF_ADDRCONF = 0x40000
RTF_ALLONLINK = 0x20000
RTF_BROADCAST = 0x10000000
RTF_CACHE = 0x1000000
RTF_DEFAULT = 0x10000
RTF_DYNAMIC = 0x10
RTF_FLOW = 0x2000000
RTF_GATEWAY = 0x2
RTF_HOST = 0x4
RTF_INTERFACE = 0x40000000
RTF_IRTT = 0x100
RTF_LINKRT = 0x100000
RTF_LOCAL = 0x80000000
RTF_MODIFIED = 0x20
RTF_MSS = 0x40
RTF_MTU = 0x40
RTF_MULTICAST = 0x20000000
RTF_NAT = 0x8000000
RTF_NOFORWARD = 0x1000
RTF_NONEXTHOP = 0x200000
RTF_NOPMTUDISC = 0x4000
RTF_POLICY = 0x4000000
RTF_REINSTATE = 0x8
RTF_REJECT = 0x200
RTF_STATIC = 0x400
RTF_THROW = 0x2000
RTF_UP = 0x1
RTF_WINDOW = 0x80
RTF_XRESOLVE = 0x800
RTM_BASE = 0x10
RTM_DELACTION = 0x31
RTM_DELADDR = 0x15
RTM_DELADDRLABEL = 0x49
RTM_DELLINK = 0x11
RTM_DELMDB = 0x55
RTM_DELNEIGH = 0x1d
RTM_DELNETCONF = 0x51
RTM_DELNSID = 0x59
RTM_DELQDISC = 0x25
RTM_DELROUTE = 0x19
RTM_DELRULE = 0x21
RTM_DELTCLASS = 0x29
RTM_DELTFILTER = 0x2d
RTM_F_CLONED = 0x200
RTM_F_EQUALIZE = 0x400
RTM_F_FIB_MATCH = 0x2000
RTM_F_LOOKUP_TABLE = 0x1000
RTM_F_NOTIFY = 0x100
RTM_F_PREFIX = 0x800
RTM_GETACTION = 0x32
RTM_GETADDR = 0x16
RTM_GETADDRLABEL = 0x4a
RTM_GETANYCAST = 0x3e
RTM_GETDCB = 0x4e
RTM_GETLINK = 0x12
RTM_GETMDB = 0x56
RTM_GETMULTICAST = 0x3a
RTM_GETNEIGH = 0x1e
RTM_GETNEIGHTBL = 0x42
RTM_GETNETCONF = 0x52
RTM_GETNSID = 0x5a
RTM_GETQDISC = 0x26
RTM_GETROUTE = 0x1a
RTM_GETRULE = 0x22
RTM_GETSTATS = 0x5e
RTM_GETTCLASS = 0x2a
RTM_GETTFILTER = 0x2e
RTM_MAX = 0x63
RTM_NEWACTION = 0x30
RTM_NEWADDR = 0x14
RTM_NEWADDRLABEL = 0x48
RTM_NEWCACHEREPORT = 0x60
RTM_NEWLINK = 0x10
RTM_NEWMDB = 0x54
RTM_NEWNDUSEROPT = 0x44
RTM_NEWNEIGH = 0x1c
RTM_NEWNEIGHTBL = 0x40
RTM_NEWNETCONF = 0x50
RTM_NEWNSID = 0x58
RTM_NEWPREFIX = 0x34
RTM_NEWQDISC = 0x24
RTM_NEWROUTE = 0x18
RTM_NEWRULE = 0x20
RTM_NEWSTATS = 0x5c
RTM_NEWTCLASS = 0x28
RTM_NEWTFILTER = 0x2c
RTM_NR_FAMILIES = 0x15
RTM_NR_MSGTYPES = 0x54
RTM_SETDCB = 0x4f
RTM_SETLINK = 0x13
RTM_SETNEIGHTBL = 0x43
RTNH_ALIGNTO = 0x4
RTNH_COMPARE_MASK = 0x19
RTNH_F_DEAD = 0x1
RTNH_F_LINKDOWN = 0x10
RTNH_F_OFFLOAD = 0x8
RTNH_F_ONLINK = 0x4
RTNH_F_PERVASIVE = 0x2
RTNH_F_UNRESOLVED = 0x20
RTN_MAX = 0xb
RTPROT_BABEL = 0x2a
RTPROT_BIRD = 0xc
RTPROT_BOOT = 0x3
RTPROT_DHCP = 0x10
RTPROT_DNROUTED = 0xd
RTPROT_GATED = 0x8
RTPROT_KERNEL = 0x2
RTPROT_MROUTED = 0x11
RTPROT_MRT = 0xa
RTPROT_NTK = 0xf
RTPROT_RA = 0x9
RTPROT_REDIRECT = 0x1
RTPROT_STATIC = 0x4
RTPROT_UNSPEC = 0x0
RTPROT_XORP = 0xe
RTPROT_ZEBRA = 0xb
RT_CLASS_DEFAULT = 0xfd
RT_CLASS_LOCAL = 0xff
RT_CLASS_MAIN = 0xfe
RT_CLASS_MAX = 0xff
RT_CLASS_UNSPEC = 0x0
RUSAGE_CHILDREN = -0x1
RUSAGE_SELF = 0x0
RUSAGE_THREAD = 0x1
SCM_CREDENTIALS = 0x2
SCM_RIGHTS = 0x1
SCM_TIMESTAMP = 0x1d
SCM_TIMESTAMPING = 0x25
SCM_TIMESTAMPING_OPT_STATS = 0x36
SCM_TIMESTAMPING_PKTINFO = 0x3a
SCM_TIMESTAMPNS = 0x23
SCM_WIFI_STATUS = 0x29
SECCOMP_MODE_DISABLED = 0x0
SECCOMP_MODE_FILTER = 0x2
SECCOMP_MODE_STRICT = 0x1
SHUT_RD = 0x0
SHUT_RDWR = 0x2
SHUT_WR = 0x1
SIOCADDDLCI = 0x8980
SIOCADDMULTI = 0x8931
SIOCADDRT = 0x890b
SIOCATMARK = 0x8905
SIOCBONDCHANGEACTIVE = 0x8995
SIOCBONDENSLAVE = 0x8990
SIOCBONDINFOQUERY = 0x8994
SIOCBONDRELEASE = 0x8991
SIOCBONDSETHWADDR = 0x8992
SIOCBONDSLAVEINFOQUERY = 0x8993
SIOCBRADDBR = 0x89a0
SIOCBRADDIF = 0x89a2
SIOCBRDELBR = 0x89a1
SIOCBRDELIF = 0x89a3
SIOCDARP = 0x8953
SIOCDELDLCI = 0x8981
SIOCDELMULTI = 0x8932
SIOCDELRT = 0x890c
SIOCDEVPRIVATE = 0x89f0
SIOCDIFADDR = 0x8936
SIOCDRARP = 0x8960
SIOCETHTOOL = 0x8946
SIOCGARP = 0x8954
SIOCGHWTSTAMP = 0x89b1
SIOCGIFADDR = 0x8915
SIOCGIFBR = 0x8940
SIOCGIFBRDADDR = 0x8919
SIOCGIFCONF = 0x8912
SIOCGIFCOUNT = 0x8938
SIOCGIFDSTADDR = 0x8917
SIOCGIFENCAP = 0x8925
SIOCGIFFLAGS = 0x8913
SIOCGIFHWADDR = 0x8927
SIOCGIFINDEX = 0x8933
SIOCGIFMAP = 0x8970
SIOCGIFMEM = 0x891f
SIOCGIFMETRIC = 0x891d
SIOCGIFMTU = 0x8921
SIOCGIFNAME = 0x8910
SIOCGIFNETMASK = 0x891b
SIOCGIFPFLAGS = 0x8935
SIOCGIFSLAVE = 0x8929
SIOCGIFTXQLEN = 0x8942
SIOCGIFVLAN = 0x8982
SIOCGMIIPHY = 0x8947
SIOCGMIIREG = 0x8948
SIOCGPGRP = 0x8904
SIOCGRARP = 0x8961
SIOCGSKNS = 0x894c
SIOCGSTAMP = 0x8906
SIOCGSTAMPNS = 0x8907
SIOCINQ = 0x541b
SIOCOUTQ = 0x5411
SIOCOUTQNSD = 0x894b
SIOCPROTOPRIVATE = 0x89e0
SIOCRTMSG = 0x890d
SIOCSARP = 0x8955
SIOCSHWTSTAMP = 0x89b0
SIOCSIFADDR = 0x8916
SIOCSIFBR = 0x8941
SIOCSIFBRDADDR = 0x891a
SIOCSIFDSTADDR = 0x8918
SIOCSIFENCAP = 0x8926
SIOCSIFFLAGS = 0x8914
SIOCSIFHWADDR = 0x8924
SIOCSIFHWBROADCAST = 0x8937
SIOCSIFLINK = 0x8911
SIOCSIFMAP = 0x8971
SIOCSIFMEM = 0x8920
SIOCSIFMETRIC = 0x891e
SIOCSIFMTU = 0x8922
SIOCSIFNAME = 0x8923
SIOCSIFNETMASK = 0x891c
SIOCSIFPFLAGS = 0x8934
SIOCSIFSLAVE = 0x8930
SIOCSIFTXQLEN = 0x8943
SIOCSIFVLAN = 0x8983
SIOCSMIIREG = 0x8949
SIOCSPGRP = 0x8902
SIOCSRARP = 0x8962
SIOCWANDEV = 0x894a
SOCK_CLOEXEC = 0x80000
SOCK_DCCP = 0x6
SOCK_DGRAM = 0x2
SOCK_IOC_TYPE = 0x89
SOCK_NONBLOCK = 0x800
SOCK_PACKET = 0xa
SOCK_RAW = 0x3
SOCK_RDM = 0x4
SOCK_SEQPACKET = 0x5
SOCK_STREAM = 0x1
SOL_AAL = 0x109
SOL_ALG = 0x117
SOL_ATM = 0x108
SOL_CAIF = 0x116
SOL_CAN_BASE = 0x64
SOL_DCCP = 0x10d
SOL_DECNET = 0x105
SOL_ICMPV6 = 0x3a
SOL_IP = 0x0
SOL_IPV6 = 0x29
SOL_IRDA = 0x10a
SOL_IUCV = 0x115
SOL_KCM = 0x119
SOL_LLC = 0x10c
SOL_NETBEUI = 0x10b
SOL_NETLINK = 0x10e
SOL_NFC = 0x118
SOL_PACKET = 0x107
SOL_PNPIPE = 0x113
SOL_PPPOL2TP = 0x111
SOL_RAW = 0xff
SOL_RDS = 0x114
SOL_RXRPC = 0x110
SOL_SOCKET = 0x1
SOL_TCP = 0x6
SOL_TIPC = 0x10f
SOL_X25 = 0x106
SOMAXCONN = 0x80
SO_ACCEPTCONN = 0x1e
SO_ATTACH_BPF = 0x32
SO_ATTACH_FILTER = 0x1a
SO_ATTACH_REUSEPORT_CBPF = 0x33
SO_ATTACH_REUSEPORT_EBPF = 0x34
SO_BINDTODEVICE = 0x19
SO_BPF_EXTENSIONS = 0x30
SO_BROADCAST = 0x6
SO_BSDCOMPAT = 0xe
SO_BUSY_POLL = 0x2e
SO_CNX_ADVICE = 0x35
SO_COOKIE = 0x39
SO_DEBUG = 0x1
SO_DETACH_BPF = 0x1b
SO_DETACH_FILTER = 0x1b
SO_DOMAIN = 0x27
SO_DONTROUTE = 0x5
SO_ERROR = 0x4
SO_GET_FILTER = 0x1a
SO_INCOMING_CPU = 0x31
SO_INCOMING_NAPI_ID = 0x38
SO_KEEPALIVE = 0x9
SO_LINGER = 0xd
SO_LOCK_FILTER = 0x2c
SO_MARK = 0x24
SO_MAX_PACING_RATE = 0x2f
SO_MEMINFO = 0x37
SO_NOFCS = 0x2b
SO_NO_CHECK = 0xb
SO_OOBINLINE = 0xa
SO_PASSCRED = 0x10
SO_PASSSEC = 0x22
SO_PEEK_OFF = 0x2a
SO_PEERCRED = 0x11
SO_PEERGROUPS = 0x3b
SO_PEERNAME = 0x1c
SO_PEERSEC = 0x1f
SO_PRIORITY = 0xc
SO_PROTOCOL = 0x26
SO_RCVBUF = 0x8
SO_RCVBUFFORCE = 0x21
SO_RCVLOWAT = 0x12
SO_RCVTIMEO = 0x14
SO_REUSEADDR = 0x2
SO_REUSEPORT = 0xf
SO_RXQ_OVFL = 0x28
SO_SECURITY_AUTHENTICATION = 0x16
SO_SECURITY_ENCRYPTION_NETWORK = 0x18
SO_SECURITY_ENCRYPTION_TRANSPORT = 0x17
SO_SELECT_ERR_QUEUE = 0x2d
SO_SNDBUF = 0x7
SO_SNDBUFFORCE = 0x20
SO_SNDLOWAT = 0x13
SO_SNDTIMEO = 0x15
SO_TIMESTAMP = 0x1d
SO_TIMESTAMPING = 0x25
SO_TIMESTAMPNS = 0x23
SO_TYPE = 0x3
SO_VM_SOCKETS_BUFFER_MAX_SIZE = 0x2
SO_VM_SOCKETS_BUFFER_MIN_SIZE = 0x1
SO_VM_SOCKETS_BUFFER_SIZE = 0x0
SO_VM_SOCKETS_CONNECT_TIMEOUT = 0x6
SO_VM_SOCKETS_NONBLOCK_TXRX = 0x7
SO_VM_SOCKETS_PEER_HOST_VM_ID = 0x3
SO_VM_SOCKETS_TRUSTED = 0x5
SO_WIFI_STATUS = 0x29
SPLICE_F_GIFT = 0x8
SPLICE_F_MORE = 0x4
SPLICE_F_MOVE = 0x1
SPLICE_F_NONBLOCK = 0x2
S_BLKSIZE = 0x200
S_IEXEC = 0x40
S_IFBLK = 0x6000
S_IFCHR = 0x2000
S_IFDIR = 0x4000
S_IFIFO = 0x1000
S_IFLNK = 0xa000
S_IFMT = 0xf000
S_IFREG = 0x8000
S_IFSOCK = 0xc000
S_IREAD = 0x100
S_IRGRP = 0x20
S_IROTH = 0x4
S_IRUSR = 0x100
S_IRWXG = 0x38
S_IRWXO = 0x7
S_IRWXU = 0x1c0
S_ISGID = 0x400
S_ISUID = 0x800
S_ISVTX = 0x200
S_IWGRP = 0x10
S_IWOTH = 0x2
S_IWRITE = 0x80
S_IWUSR = 0x80
S_IXGRP = 0x8
S_IXOTH = 0x1
S_IXUSR = 0x40
TAB0 = 0x0
TAB1 = 0x800
TAB2 = 0x1000
TAB3 = 0x1800
TABDLY = 0x1800
TASKSTATS_CMD_ATTR_MAX = 0x4
TASKSTATS_CMD_MAX = 0x2
TASKSTATS_GENL_NAME = "TASKSTATS"
TASKSTATS_GENL_VERSION = 0x1
TASKSTATS_TYPE_MAX = 0x6
TASKSTATS_VERSION = 0x8
TCFLSH = 0x540b
TCGETA = 0x5405
TCGETS = 0x5401
TCGETS2 = 0x802c542a
TCGETX = 0x5432
TCIFLUSH = 0x0
TCIOFF = 0x2
TCIOFLUSH = 0x2
TCION = 0x3
TCOFLUSH = 0x1
TCOOFF = 0x0
TCOON = 0x1
TCP_CC_INFO = 0x1a
TCP_CONGESTION = 0xd
TCP_COOKIE_IN_ALWAYS = 0x1
TCP_COOKIE_MAX = 0x10
TCP_COOKIE_MIN = 0x8
TCP_COOKIE_OUT_NEVER = 0x2
TCP_COOKIE_PAIR_SIZE = 0x20
TCP_COOKIE_TRANSACTIONS = 0xf
TCP_CORK = 0x3
TCP_DEFER_ACCEPT = 0x9
TCP_FASTOPEN = 0x17
TCP_FASTOPEN_CONNECT = 0x1e
TCP_INFO = 0xb
TCP_KEEPCNT = 0x6
TCP_KEEPIDLE = 0x4
TCP_KEEPINTVL = 0x5
TCP_LINGER2 = 0x8
TCP_MAXSEG = 0x2
TCP_MAXWIN = 0xffff
TCP_MAX_WINSHIFT = 0xe
TCP_MD5SIG = 0xe
TCP_MD5SIG_MAXKEYLEN = 0x50
TCP_MSS = 0x200
TCP_MSS_DEFAULT = 0x218
TCP_MSS_DESIRED = 0x4c4
TCP_NODELAY = 0x1
TCP_NOTSENT_LOWAT = 0x19
TCP_QUEUE_SEQ = 0x15
TCP_QUICKACK = 0xc
TCP_REPAIR = 0x13
TCP_REPAIR_OPTIONS = 0x16
TCP_REPAIR_QUEUE = 0x14
TCP_REPAIR_WINDOW = 0x1d
TCP_SAVED_SYN = 0x1c
TCP_SAVE_SYN = 0x1b
TCP_SYNCNT = 0x7
TCP_S_DATA_IN = 0x4
TCP_S_DATA_OUT = 0x8
TCP_THIN_DUPACK = 0x11
TCP_THIN_LINEAR_TIMEOUTS = 0x10
TCP_TIMESTAMP = 0x18
TCP_USER_TIMEOUT = 0x12
TCP_WINDOW_CLAMP = 0xa
TCSAFLUSH = 0x2
TCSBRK = 0x5409
TCSBRKP = 0x5425
TCSETA = 0x5406
TCSETAF = 0x5408
TCSETAW = 0x5407
TCSETS = 0x5402
TCSETS2 = 0x402c542b
TCSETSF = 0x5404
TCSETSF2 = 0x402c542d
TCSETSW = 0x5403
TCSETSW2 = 0x402c542c
TCSETX = 0x5433
TCSETXF = 0x5434
TCSETXW = 0x5435
TCXONC = 0x540a
TIOCCBRK = 0x5428
TIOCCONS = 0x541d
TIOCEXCL = 0x540c
TIOCGDEV = 0x80045432
TIOCGETD = 0x5424
TIOCGEXCL = 0x80045440
TIOCGICOUNT = 0x545d
TIOCGLCKTRMIOS = 0x5456
TIOCGPGRP = 0x540f
TIOCGPKT = 0x80045438
TIOCGPTLCK = 0x80045439
TIOCGPTN = 0x80045430
TIOCGPTPEER = 0x5441
TIOCGRS485 = 0x542e
TIOCGSERIAL = 0x541e
TIOCGSID = 0x5429
TIOCGSOFTCAR = 0x5419
TIOCGWINSZ = 0x5413
TIOCINQ = 0x541b
TIOCLINUX = 0x541c
TIOCMBIC = 0x5417
TIOCMBIS = 0x5416
TIOCMGET = 0x5415
TIOCMIWAIT = 0x545c
TIOCMSET = 0x5418
TIOCM_CAR = 0x40
TIOCM_CD = 0x40
TIOCM_CTS = 0x20
TIOCM_DSR = 0x100
TIOCM_DTR = 0x2
TIOCM_LE = 0x1
TIOCM_RI = 0x80
TIOCM_RNG = 0x80
TIOCM_RTS = 0x4
TIOCM_SR = 0x10
TIOCM_ST = 0x8
TIOCNOTTY = 0x5422
TIOCNXCL = 0x540d
TIOCOUTQ = 0x5411
TIOCPKT = 0x5420
TIOCPKT_DATA = 0x0
TIOCPKT_DOSTOP = 0x20
TIOCPKT_FLUSHREAD = 0x1
TIOCPKT_FLUSHWRITE = 0x2
TIOCPKT_IOCTL = 0x40
TIOCPKT_NOSTOP = 0x10
TIOCPKT_START = 0x8
TIOCPKT_STOP = 0x4
TIOCSBRK = 0x5427
TIOCSCTTY = 0x540e
TIOCSERCONFIG = 0x5453
TIOCSERGETLSR = 0x5459
TIOCSERGETMULTI = 0x545a
TIOCSERGSTRUCT = 0x5458
TIOCSERGWILD = 0x5454
TIOCSERSETMULTI = 0x545b
TIOCSERSWILD = 0x5455
TIOCSER_TEMT = 0x1
TIOCSETD = 0x5423
TIOCSIG = 0x40045436
TIOCSLCKTRMIOS = 0x5457
TIOCSPGRP = 0x5410
TIOCSPTLCK = 0x40045431
TIOCSRS485 = 0x542f
TIOCSSERIAL = 0x541f
TIOCSSOFTCAR = 0x541a
TIOCSTI = 0x5412
TIOCSWINSZ = 0x5414
TIOCVHANGUP = 0x5437
TOSTOP = 0x100
TS_COMM_LEN = 0x20
TUNATTACHFILTER = 0x401054d5
TUNDETACHFILTER = 0x401054d6
TUNGETFEATURES = 0x800454cf
TUNGETFILTER = 0x801054db
TUNGETIFF = 0x800454d2
TUNGETSNDBUF = 0x800454d3
TUNGETVNETBE = 0x800454df
TUNGETVNETHDRSZ = 0x800454d7
TUNGETVNETLE = 0x800454dd
TUNSETDEBUG = 0x400454c9
TUNSETGROUP = 0x400454ce
TUNSETIFF = 0x400454ca
TUNSETIFINDEX = 0x400454da
TUNSETLINK = 0x400454cd
TUNSETNOCSUM = 0x400454c8
TUNSETOFFLOAD = 0x400454d0
TUNSETOWNER = 0x400454cc
TUNSETPERSIST = 0x400454cb
TUNSETQUEUE = 0x400454d9
TUNSETSNDBUF = 0x400454d4
TUNSETTXFILTER = 0x400454d1
TUNSETVNETBE = 0x400454de
TUNSETVNETHDRSZ = 0x400454d8
TUNSETVNETLE = 0x400454dc
UMOUNT_NOFOLLOW = 0x8
UTIME_NOW = 0x3fffffff
UTIME_OMIT = 0x3ffffffe
VDISCARD = 0xd
VEOF = 0x4
VEOL = 0xb
VEOL2 = 0x10
VERASE = 0x2
VINTR = 0x0
VKILL = 0x3
VLNEXT = 0xf
VMADDR_CID_ANY = 0xffffffff
VMADDR_CID_HOST = 0x2
VMADDR_CID_HYPERVISOR = 0x0
VMADDR_CID_RESERVED = 0x1
VMADDR_PORT_ANY = 0xffffffff
VMIN = 0x6
VM_SOCKETS_INVALID_VERSION = 0xffffffff
VQUIT = 0x1
VREPRINT = 0xc
VSTART = 0x8
VSTOP = 0x9
VSUSP = 0xa
VSWTC = 0x7
VT0 = 0x0
VT1 = 0x4000
VTDLY = 0x4000
VTIME = 0x5
VWERASE = 0xe
WALL = 0x40000000
WCLONE = 0x80000000
WCONTINUED = 0x8
WDIOC_GETBOOTSTATUS = 0x80045702
WDIOC_GETPRETIMEOUT = 0x80045709
WDIOC_GETSTATUS = 0x80045701
WDIOC_GETSUPPORT = 0x80285700
WDIOC_GETTEMP = 0x80045703
WDIOC_GETTIMELEFT = 0x8004570a
WDIOC_GETTIMEOUT = 0x80045707
WDIOC_KEEPALIVE = 0x80045705
WDIOC_SETOPTIONS = 0x80045704
WDIOC_SETPRETIMEOUT = 0xc0045708
WDIOC_SETTIMEOUT = 0xc0045706
WEXITED = 0x4
WNOHANG = 0x1
WNOTHREAD = 0x20000000
WNOWAIT = 0x1000000
WORDSIZE = 0x40
WSTOPPED = 0x2
WUNTRACED = 0x2
XATTR_CREATE = 0x1
XATTR_REPLACE = 0x2
XCASE = 0x4
XTABS = 0x1800
)
// Errors
const (
E2BIG = syscall.Errno(0x7)
EACCES = syscall.Errno(0xd)
EADDRINUSE = syscall.Errno(0x62)
EADDRNOTAVAIL = syscall.Errno(0x63)
EADV = syscall.Errno(0x44)
EAFNOSUPPORT = syscall.Errno(0x61)
EAGAIN = syscall.Errno(0xb)
EALREADY = syscall.Errno(0x72)
EBADE = syscall.Errno(0x34)
EBADF = syscall.Errno(0x9)
EBADFD = syscall.Errno(0x4d)
EBADMSG = syscall.Errno(0x4a)
EBADR = syscall.Errno(0x35)
EBADRQC = syscall.Errno(0x38)
EBADSLT = syscall.Errno(0x39)
EBFONT = syscall.Errno(0x3b)
EBUSY = syscall.Errno(0x10)
ECANCELED = syscall.Errno(0x7d)
ECHILD = syscall.Errno(0xa)
ECHRNG = syscall.Errno(0x2c)
ECOMM = syscall.Errno(0x46)
ECONNABORTED = syscall.Errno(0x67)
ECONNREFUSED = syscall.Errno(0x6f)
ECONNRESET = syscall.Errno(0x68)
EDEADLK = syscall.Errno(0x23)
EDEADLOCK = syscall.Errno(0x23)
EDESTADDRREQ = syscall.Errno(0x59)
EDOM = syscall.Errno(0x21)
EDOTDOT = syscall.Errno(0x49)
EDQUOT = syscall.Errno(0x7a)
EEXIST = syscall.Errno(0x11)
EFAULT = syscall.Errno(0xe)
EFBIG = syscall.Errno(0x1b)
EHOSTDOWN = syscall.Errno(0x70)
EHOSTUNREACH = syscall.Errno(0x71)
EHWPOISON = syscall.Errno(0x85)
EIDRM = syscall.Errno(0x2b)
EILSEQ = syscall.Errno(0x54)
EINPROGRESS = syscall.Errno(0x73)
EINTR = syscall.Errno(0x4)
EINVAL = syscall.Errno(0x16)
EIO = syscall.Errno(0x5)
EISCONN = syscall.Errno(0x6a)
EISDIR = syscall.Errno(0x15)
EISNAM = syscall.Errno(0x78)
EKEYEXPIRED = syscall.Errno(0x7f)
EKEYREJECTED = syscall.Errno(0x81)
EKEYREVOKED = syscall.Errno(0x80)
EL2HLT = syscall.Errno(0x33)
EL2NSYNC = syscall.Errno(0x2d)
EL3HLT = syscall.Errno(0x2e)
EL3RST = syscall.Errno(0x2f)
ELIBACC = syscall.Errno(0x4f)
ELIBBAD = syscall.Errno(0x50)
ELIBEXEC = syscall.Errno(0x53)
ELIBMAX = syscall.Errno(0x52)
ELIBSCN = syscall.Errno(0x51)
ELNRNG = syscall.Errno(0x30)
ELOOP = syscall.Errno(0x28)
EMEDIUMTYPE = syscall.Errno(0x7c)
EMFILE = syscall.Errno(0x18)
EMLINK = syscall.Errno(0x1f)
EMSGSIZE = syscall.Errno(0x5a)
EMULTIHOP = syscall.Errno(0x48)
ENAMETOOLONG = syscall.Errno(0x24)
ENAVAIL = syscall.Errno(0x77)
ENETDOWN = syscall.Errno(0x64)
ENETRESET = syscall.Errno(0x66)
ENETUNREACH = syscall.Errno(0x65)
ENFILE = syscall.Errno(0x17)
ENOANO = syscall.Errno(0x37)
ENOBUFS = syscall.Errno(0x69)
ENOCSI = syscall.Errno(0x32)
ENODATA = syscall.Errno(0x3d)
ENODEV = syscall.Errno(0x13)
ENOENT = syscall.Errno(0x2)
ENOEXEC = syscall.Errno(0x8)
ENOKEY = syscall.Errno(0x7e)
ENOLCK = syscall.Errno(0x25)
ENOLINK = syscall.Errno(0x43)
ENOMEDIUM = syscall.Errno(0x7b)
ENOMEM = syscall.Errno(0xc)
ENOMSG = syscall.Errno(0x2a)
ENONET = syscall.Errno(0x40)
ENOPKG = syscall.Errno(0x41)
ENOPROTOOPT = syscall.Errno(0x5c)
ENOSPC = syscall.Errno(0x1c)
ENOSR = syscall.Errno(0x3f)
ENOSTR = syscall.Errno(0x3c)
ENOSYS = syscall.Errno(0x26)
ENOTBLK = syscall.Errno(0xf)
ENOTCONN = syscall.Errno(0x6b)
ENOTDIR = syscall.Errno(0x14)
ENOTEMPTY = syscall.Errno(0x27)
ENOTNAM = syscall.Errno(0x76)
ENOTRECOVERABLE = syscall.Errno(0x83)
ENOTSOCK = syscall.Errno(0x58)
ENOTSUP = syscall.Errno(0x5f)
ENOTTY = syscall.Errno(0x19)
ENOTUNIQ = syscall.Errno(0x4c)
ENXIO = syscall.Errno(0x6)
EOPNOTSUPP = syscall.Errno(0x5f)
EOVERFLOW = syscall.Errno(0x4b)
EOWNERDEAD = syscall.Errno(0x82)
EPERM = syscall.Errno(0x1)
EPFNOSUPPORT = syscall.Errno(0x60)
EPIPE = syscall.Errno(0x20)
EPROTO = syscall.Errno(0x47)
EPROTONOSUPPORT = syscall.Errno(0x5d)
EPROTOTYPE = syscall.Errno(0x5b)
ERANGE = syscall.Errno(0x22)
EREMCHG = syscall.Errno(0x4e)
EREMOTE = syscall.Errno(0x42)
EREMOTEIO = syscall.Errno(0x79)
ERESTART = syscall.Errno(0x55)
ERFKILL = syscall.Errno(0x84)
EROFS = syscall.Errno(0x1e)
ESHUTDOWN = syscall.Errno(0x6c)
ESOCKTNOSUPPORT = syscall.Errno(0x5e)
ESPIPE = syscall.Errno(0x1d)
ESRCH = syscall.Errno(0x3)
ESRMNT = syscall.Errno(0x45)
ESTALE = syscall.Errno(0x74)
ESTRPIPE = syscall.Errno(0x56)
ETIME = syscall.Errno(0x3e)
ETIMEDOUT = syscall.Errno(0x6e)
ETOOMANYREFS = syscall.Errno(0x6d)
ETXTBSY = syscall.Errno(0x1a)
EUCLEAN = syscall.Errno(0x75)
EUNATCH = syscall.Errno(0x31)
EUSERS = syscall.Errno(0x57)
EWOULDBLOCK = syscall.Errno(0xb)
EXDEV = syscall.Errno(0x12)
EXFULL = syscall.Errno(0x36)
)
// Signals
const (
SIGABRT = syscall.Signal(0x6)
SIGALRM = syscall.Signal(0xe)
SIGBUS = syscall.Signal(0x7)
SIGCHLD = syscall.Signal(0x11)
SIGCLD = syscall.Signal(0x11)
SIGCONT = syscall.Signal(0x12)
SIGFPE = syscall.Signal(0x8)
SIGHUP = syscall.Signal(0x1)
SIGILL = syscall.Signal(0x4)
SIGINT = syscall.Signal(0x2)
SIGIO = syscall.Signal(0x1d)
SIGIOT = syscall.Signal(0x6)
SIGKILL = syscall.Signal(0x9)
SIGPIPE = syscall.Signal(0xd)
SIGPOLL = syscall.Signal(0x1d)
SIGPROF = syscall.Signal(0x1b)
SIGPWR = syscall.Signal(0x1e)
SIGQUIT = syscall.Signal(0x3)
SIGSEGV = syscall.Signal(0xb)
SIGSTKFLT = syscall.Signal(0x10)
SIGSTOP = syscall.Signal(0x13)
SIGSYS = syscall.Signal(0x1f)
SIGTERM = syscall.Signal(0xf)
SIGTRAP = syscall.Signal(0x5)
SIGTSTP = syscall.Signal(0x14)
SIGTTIN = syscall.Signal(0x15)
SIGTTOU = syscall.Signal(0x16)
SIGURG = syscall.Signal(0x17)
SIGUSR1 = syscall.Signal(0xa)
SIGUSR2 = syscall.Signal(0xc)
SIGVTALRM = syscall.Signal(0x1a)
SIGWINCH = syscall.Signal(0x1c)
SIGXCPU = syscall.Signal(0x18)
SIGXFSZ = syscall.Signal(0x19)
)
// Error table
var errors = [...]string{
1: "operation not permitted",
2: "no such file or directory",
3: "no such process",
4: "interrupted system call",
5: "input/output error",
6: "no such device or address",
7: "argument list too long",
8: "exec format error",
9: "bad file descriptor",
10: "no child processes",
11: "resource temporarily unavailable",
12: "cannot allocate memory",
13: "permission denied",
14: "bad address",
15: "block device required",
16: "device or resource busy",
17: "file exists",
18: "invalid cross-device link",
19: "no such device",
20: "not a directory",
21: "is a directory",
22: "invalid argument",
23: "too many open files in system",
24: "too many open files",
25: "inappropriate ioctl for device",
26: "text file busy",
27: "file too large",
28: "no space left on device",
29: "illegal seek",
30: "read-only file system",
31: "too many links",
32: "broken pipe",
33: "numerical argument out of domain",
34: "numerical result out of range",
35: "resource deadlock avoided",
36: "file name too long",
37: "no locks available",
38: "function not implemented",
39: "directory not empty",
40: "too many levels of symbolic links",
42: "no message of desired type",
43: "identifier removed",
44: "channel number out of range",
45: "level 2 not synchronized",
46: "level 3 halted",
47: "level 3 reset",
48: "link number out of range",
49: "protocol driver not attached",
50: "no CSI structure available",
51: "level 2 halted",
52: "invalid exchange",
53: "invalid request descriptor",
54: "exchange full",
55: "no anode",
56: "invalid request code",
57: "invalid slot",
59: "bad font file format",
60: "device not a stream",
61: "no data available",
62: "timer expired",
63: "out of streams resources",
64: "machine is not on the network",
65: "package not installed",
66: "object is remote",
67: "link has been severed",
68: "advertise error",
69: "srmount error",
70: "communication error on send",
71: "protocol error",
72: "multihop attempted",
73: "RFS specific error",
74: "bad message",
75: "value too large for defined data type",
76: "name not unique on network",
77: "file descriptor in bad state",
78: "remote address changed",
79: "can not access a needed shared library",
80: "accessing a corrupted shared library",
81: ".lib section in a.out corrupted",
82: "attempting to link in too many shared libraries",
83: "cannot exec a shared library directly",
84: "invalid or incomplete multibyte or wide character",
85: "interrupted system call should be restarted",
86: "streams pipe error",
87: "too many users",
88: "socket operation on non-socket",
89: "destination address required",
90: "message too long",
91: "protocol wrong type for socket",
92: "protocol not available",
93: "protocol not supported",
94: "socket type not supported",
95: "operation not supported",
96: "protocol family not supported",
97: "address family not supported by protocol",
98: "address already in use",
99: "cannot assign requested address",
100: "network is down",
101: "network is unreachable",
102: "network dropped connection on reset",
103: "software caused connection abort",
104: "connection reset by peer",
105: "no buffer space available",
106: "transport endpoint is already connected",
107: "transport endpoint is not connected",
108: "cannot send after transport endpoint shutdown",
109: "too many references: cannot splice",
110: "connection timed out",
111: "connection refused",
112: "host is down",
113: "no route to host",
114: "operation already in progress",
115: "operation now in progress",
116: "stale file handle",
117: "structure needs cleaning",
118: "not a XENIX named type file",
119: "no XENIX semaphores available",
120: "is a named type file",
121: "remote I/O error",
122: "disk quota exceeded",
123: "no medium found",
124: "wrong medium type",
125: "operation canceled",
126: "required key not available",
127: "key has expired",
128: "key has been revoked",
129: "key was rejected by service",
130: "owner died",
131: "state not recoverable",
132: "operation not possible due to RF-kill",
133: "memory page has hardware error",
}
// Signal table
var signals = [...]string{
1: "hangup",
2: "interrupt",
3: "quit",
4: "illegal instruction",
5: "trace/breakpoint trap",
6: "aborted",
7: "bus error",
8: "floating point exception",
9: "killed",
10: "user defined signal 1",
11: "segmentation fault",
12: "user defined signal 2",
13: "broken pipe",
14: "alarm clock",
15: "terminated",
16: "stack fault",
17: "child exited",
18: "continued",
19: "stopped (signal)",
20: "stopped",
21: "stopped (tty input)",
22: "stopped (tty output)",
23: "urgent I/O condition",
24: "CPU time limit exceeded",
25: "file size limit exceeded",
26: "virtual timer expired",
27: "profiling timer expired",
28: "window changed",
29: "I/O possible",
30: "power failure",
31: "bad system call",
}
| {
"pile_set_name": "Github"
} |
##===- unittests/Bitcode/Makefile --------------------------*- Makefile -*-===##
#
# The LLVM Compiler Infrastructure
#
# This file is distributed under the University of Illinois Open Source
# License. See LICENSE.TXT for details.
#
##===----------------------------------------------------------------------===##
LEVEL = ../..
TESTNAME = Bitcode
LINK_COMPONENTS := core support bitreader bitwriter
include $(LEVEL)/Makefile.config
include $(LLVM_SRC_ROOT)/unittests/Makefile.unittest
| {
"pile_set_name": "Github"
} |
# Changelog
Here described only the breaking and most significant changes. The full changelog and documentation for all released versions could be found in nicely formatted [commit history](https://github.com/frictionlessdata/frictionless-py/commits/master).
## v3.11
- Added experimental BigQuery support (#424)
## v3.10
- Added experimental SPSS support (#421)
## v3.9
- Rebased on a `goodtables` successor versioning
## v3.8
- Add support SQL/Pandas import/export (#31)
## v3.7
- Add support for custom JSONEncoder classes (#24)
## v3.6
- Normalize header terminology
## v3.5
- Initial public version
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.