text
stringlengths 2
1.04M
| meta
dict |
---|---|
package com.amazonaws.services.simplesystemsmanagement.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ssm-2014-11-06/DescribeMaintenanceWindowTargets"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeMaintenanceWindowTargetsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable,
Cloneable {
/**
* <p>
* Information about the targets in the maintenance window.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<MaintenanceWindowTarget> targets;
/**
* <p>
* The token to use when requesting the next set of items. If there are no additional items to return, the string is
* empty.
* </p>
*/
private String nextToken;
/**
* <p>
* Information about the targets in the maintenance window.
* </p>
*
* @return Information about the targets in the maintenance window.
*/
public java.util.List<MaintenanceWindowTarget> getTargets() {
if (targets == null) {
targets = new com.amazonaws.internal.SdkInternalList<MaintenanceWindowTarget>();
}
return targets;
}
/**
* <p>
* Information about the targets in the maintenance window.
* </p>
*
* @param targets
* Information about the targets in the maintenance window.
*/
public void setTargets(java.util.Collection<MaintenanceWindowTarget> targets) {
if (targets == null) {
this.targets = null;
return;
}
this.targets = new com.amazonaws.internal.SdkInternalList<MaintenanceWindowTarget>(targets);
}
/**
* <p>
* Information about the targets in the maintenance window.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setTargets(java.util.Collection)} or {@link #withTargets(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param targets
* Information about the targets in the maintenance window.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeMaintenanceWindowTargetsResult withTargets(MaintenanceWindowTarget... targets) {
if (this.targets == null) {
setTargets(new com.amazonaws.internal.SdkInternalList<MaintenanceWindowTarget>(targets.length));
}
for (MaintenanceWindowTarget ele : targets) {
this.targets.add(ele);
}
return this;
}
/**
* <p>
* Information about the targets in the maintenance window.
* </p>
*
* @param targets
* Information about the targets in the maintenance window.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeMaintenanceWindowTargetsResult withTargets(java.util.Collection<MaintenanceWindowTarget> targets) {
setTargets(targets);
return this;
}
/**
* <p>
* The token to use when requesting the next set of items. If there are no additional items to return, the string is
* empty.
* </p>
*
* @param nextToken
* The token to use when requesting the next set of items. If there are no additional items to return, the
* string is empty.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* The token to use when requesting the next set of items. If there are no additional items to return, the string is
* empty.
* </p>
*
* @return The token to use when requesting the next set of items. If there are no additional items to return, the
* string is empty.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* The token to use when requesting the next set of items. If there are no additional items to return, the string is
* empty.
* </p>
*
* @param nextToken
* The token to use when requesting the next set of items. If there are no additional items to return, the
* string is empty.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeMaintenanceWindowTargetsResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getTargets() != null)
sb.append("Targets: ").append(getTargets()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeMaintenanceWindowTargetsResult == false)
return false;
DescribeMaintenanceWindowTargetsResult other = (DescribeMaintenanceWindowTargetsResult) obj;
if (other.getTargets() == null ^ this.getTargets() == null)
return false;
if (other.getTargets() != null && other.getTargets().equals(this.getTargets()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getTargets() == null) ? 0 : getTargets().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public DescribeMaintenanceWindowTargetsResult clone() {
try {
return (DescribeMaintenanceWindowTargetsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| {
"content_hash": "1ffc2617243adadd9eefe788a6b10341",
"timestamp": "",
"source": "github",
"line_count": 209,
"max_line_length": 153,
"avg_line_length": 33.00956937799043,
"alnum_prop": 0.6228438904189013,
"repo_name": "jentfoo/aws-sdk-java",
"id": "55c1c08cc61f5a4ff3d6aa73ae9cf37c4aedbbd6",
"size": "7479",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aws-java-sdk-ssm/src/main/java/com/amazonaws/services/simplesystemsmanagement/model/DescribeMaintenanceWindowTargetsResult.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "270"
},
{
"name": "FreeMarker",
"bytes": "173637"
},
{
"name": "Gherkin",
"bytes": "25063"
},
{
"name": "Java",
"bytes": "356214839"
},
{
"name": "Scilab",
"bytes": "3924"
},
{
"name": "Shell",
"bytes": "295"
}
],
"symlink_target": ""
} |
#include <memory>
#include "modules/audio_coding/acm2/rent_a_codec.h"
#include "rtc_base/arraysize.h"
#include "test/gtest.h"
#include "test/mock_audio_encoder.h"
namespace webrtc {
namespace acm2 {
using ::testing::Return;
namespace {
const int kDataLengthSamples = 80;
const int kPacketSizeSamples = 2 * kDataLengthSamples;
const int16_t kZeroData[kDataLengthSamples] = {0};
const CodecInst kDefaultCodecInst = {0, "pcmu", 8000, kPacketSizeSamples,
1, 64000};
const int kCngPt = 13;
class Marker final {
public:
MOCK_METHOD1(Mark, void(std::string desc));
};
} // namespace
class RentACodecTestF : public ::testing::Test {
protected:
void CreateCodec() {
auto speech_encoder = rent_a_codec_.RentEncoder(kDefaultCodecInst);
ASSERT_TRUE(speech_encoder);
RentACodec::StackParameters param;
param.use_cng = true;
param.speech_encoder = std::move(speech_encoder);
encoder_ = rent_a_codec_.RentEncoderStack(¶m);
}
void EncodeAndVerify(size_t expected_out_length,
uint32_t expected_timestamp,
int expected_payload_type,
int expected_send_even_if_empty) {
rtc::Buffer out;
AudioEncoder::EncodedInfo encoded_info;
encoded_info =
encoder_->Encode(timestamp_, kZeroData, &out);
timestamp_ += kDataLengthSamples;
EXPECT_TRUE(encoded_info.redundant.empty());
EXPECT_EQ(expected_out_length, encoded_info.encoded_bytes);
EXPECT_EQ(expected_timestamp, encoded_info.encoded_timestamp);
if (expected_payload_type >= 0)
EXPECT_EQ(expected_payload_type, encoded_info.payload_type);
if (expected_send_even_if_empty >= 0)
EXPECT_EQ(static_cast<bool>(expected_send_even_if_empty),
encoded_info.send_even_if_empty);
}
RentACodec rent_a_codec_;
std::unique_ptr<AudioEncoder> encoder_;
uint32_t timestamp_ = 0;
};
// This test verifies that CNG frames are delivered as expected. Since the frame
// size is set to 20 ms, we expect the first encode call to produce no output
// (which is signaled as 0 bytes output of type kNoEncoding). The next encode
// call should produce one SID frame of 9 bytes. The third call should not
// result in any output (just like the first one). The fourth and final encode
// call should produce an "empty frame", which is like no output, but with
// AudioEncoder::EncodedInfo::send_even_if_empty set to true. (The reason to
// produce an empty frame is to drive sending of DTMF packets in the RTP/RTCP
// module.)
TEST_F(RentACodecTestF, VerifyCngFrames) {
CreateCodec();
uint32_t expected_timestamp = timestamp_;
// Verify no frame.
{
SCOPED_TRACE("First encoding");
EncodeAndVerify(0, expected_timestamp, -1, -1);
}
// Verify SID frame delivered.
{
SCOPED_TRACE("Second encoding");
EncodeAndVerify(9, expected_timestamp, kCngPt, 1);
}
// Verify no frame.
{
SCOPED_TRACE("Third encoding");
EncodeAndVerify(0, expected_timestamp, -1, -1);
}
// Verify NoEncoding.
expected_timestamp += 2 * kDataLengthSamples;
{
SCOPED_TRACE("Fourth encoding");
EncodeAndVerify(0, expected_timestamp, kCngPt, 1);
}
}
TEST(RentACodecTest, ExternalEncoder) {
const int kSampleRateHz = 8000;
auto* external_encoder = new MockAudioEncoder;
EXPECT_CALL(*external_encoder, SampleRateHz())
.WillRepeatedly(Return(kSampleRateHz));
EXPECT_CALL(*external_encoder, NumChannels()).WillRepeatedly(Return(1));
EXPECT_CALL(*external_encoder, SetFec(false)).WillRepeatedly(Return(true));
RentACodec rac;
RentACodec::StackParameters param;
param.speech_encoder = std::unique_ptr<AudioEncoder>(external_encoder);
std::unique_ptr<AudioEncoder> encoder_stack = rac.RentEncoderStack(¶m);
EXPECT_EQ(external_encoder, encoder_stack.get());
const int kPacketSizeSamples = kSampleRateHz / 100;
int16_t audio[kPacketSizeSamples] = {0};
rtc::Buffer encoded;
AudioEncoder::EncodedInfo info;
Marker marker;
{
::testing::InSequence s;
info.encoded_timestamp = 0;
EXPECT_CALL(
*external_encoder,
EncodeImpl(0, rtc::ArrayView<const int16_t>(audio), &encoded))
.WillOnce(Return(info));
EXPECT_CALL(marker, Mark("A"));
EXPECT_CALL(marker, Mark("B"));
EXPECT_CALL(*external_encoder, Die());
EXPECT_CALL(marker, Mark("C"));
}
info = encoder_stack->Encode(0, audio, &encoded);
EXPECT_EQ(0u, info.encoded_timestamp);
marker.Mark("A");
// Change to internal encoder.
CodecInst codec_inst = kDefaultCodecInst;
codec_inst.pacsize = kPacketSizeSamples;
param.speech_encoder = rac.RentEncoder(codec_inst);
ASSERT_TRUE(param.speech_encoder);
AudioEncoder* enc = param.speech_encoder.get();
std::unique_ptr<AudioEncoder> stack = rac.RentEncoderStack(¶m);
EXPECT_EQ(enc, stack.get());
// Don't expect any more calls to the external encoder.
info = stack->Encode(1, audio, &encoded);
marker.Mark("B");
encoder_stack.reset();
marker.Mark("C");
}
// Verify that the speech encoder's Reset method is called when CNG or RED
// (or both) are switched on, but not when they're switched off.
void TestCngAndRedResetSpeechEncoder(bool use_cng, bool use_red) {
auto make_enc = [] {
auto speech_encoder =
std::unique_ptr<MockAudioEncoder>(new MockAudioEncoder);
EXPECT_CALL(*speech_encoder, NumChannels()).WillRepeatedly(Return(1));
EXPECT_CALL(*speech_encoder, Max10MsFramesInAPacket())
.WillRepeatedly(Return(2));
EXPECT_CALL(*speech_encoder, SampleRateHz()).WillRepeatedly(Return(8000));
EXPECT_CALL(*speech_encoder, SetFec(false)).WillRepeatedly(Return(true));
return speech_encoder;
};
auto speech_encoder1 = make_enc();
auto speech_encoder2 = make_enc();
Marker marker;
{
::testing::InSequence s;
EXPECT_CALL(marker, Mark("disabled"));
EXPECT_CALL(*speech_encoder1, Die());
EXPECT_CALL(marker, Mark("enabled"));
if (use_cng || use_red)
EXPECT_CALL(*speech_encoder2, Reset());
EXPECT_CALL(*speech_encoder2, Die());
}
RentACodec::StackParameters param1, param2;
param1.speech_encoder = std::move(speech_encoder1);
param2.speech_encoder = std::move(speech_encoder2);
param2.use_cng = use_cng;
param2.use_red = use_red;
marker.Mark("disabled");
RentACodec rac;
rac.RentEncoderStack(¶m1);
marker.Mark("enabled");
rac.RentEncoderStack(¶m2);
}
TEST(RentACodecTest, CngResetsSpeechEncoder) {
TestCngAndRedResetSpeechEncoder(true, false);
}
TEST(RentACodecTest, RedResetsSpeechEncoder) {
TestCngAndRedResetSpeechEncoder(false, true);
}
TEST(RentACodecTest, CngAndRedResetsSpeechEncoder) {
TestCngAndRedResetSpeechEncoder(true, true);
}
TEST(RentACodecTest, NoCngAndRedNoSpeechEncoderReset) {
TestCngAndRedResetSpeechEncoder(false, false);
}
TEST(RentACodecTest, RentEncoderError) {
const CodecInst codec_inst = {
0, "Robert'); DROP TABLE Students;", 8000, 160, 1, 64000};
RentACodec rent_a_codec;
EXPECT_FALSE(rent_a_codec.RentEncoder(codec_inst));
}
TEST(RentACodecTest, RentEncoderStackWithoutSpeechEncoder) {
RentACodec::StackParameters sp;
EXPECT_EQ(nullptr, sp.speech_encoder);
EXPECT_EQ(nullptr, RentACodec().RentEncoderStack(&sp));
}
} // namespace acm2
} // namespace webrtc
| {
"content_hash": "7c453cf75875fd47d2c14cee5032b757",
"timestamp": "",
"source": "github",
"line_count": 224,
"max_line_length": 80,
"avg_line_length": 32.607142857142854,
"alnum_prop": 0.7016703176341731,
"repo_name": "wangcy6/storm_app",
"id": "9eded20d23e9c7d8f23b13518e984d1088f61336",
"size": "7716",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "frame/c++/webrtc-master/modules/audio_coding/acm2/rent_a_codec_unittest.cc",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ActionScript",
"bytes": "86225"
},
{
"name": "Assembly",
"bytes": "4834"
},
{
"name": "Batchfile",
"bytes": "50141"
},
{
"name": "C",
"bytes": "9700081"
},
{
"name": "C#",
"bytes": "1587148"
},
{
"name": "C++",
"bytes": "14378340"
},
{
"name": "CMake",
"bytes": "756439"
},
{
"name": "CSS",
"bytes": "59712"
},
{
"name": "Clojure",
"bytes": "535480"
},
{
"name": "DTrace",
"bytes": "147"
},
{
"name": "Fancy",
"bytes": "6234"
},
{
"name": "FreeMarker",
"bytes": "3512"
},
{
"name": "Go",
"bytes": "27069"
},
{
"name": "Groovy",
"bytes": "1755"
},
{
"name": "HTML",
"bytes": "1235479"
},
{
"name": "Java",
"bytes": "41653938"
},
{
"name": "JavaScript",
"bytes": "260093"
},
{
"name": "Lua",
"bytes": "11887"
},
{
"name": "M4",
"bytes": "96283"
},
{
"name": "Makefile",
"bytes": "977879"
},
{
"name": "NSIS",
"bytes": "6522"
},
{
"name": "Objective-C",
"bytes": "324010"
},
{
"name": "PHP",
"bytes": "348909"
},
{
"name": "Perl",
"bytes": "182487"
},
{
"name": "PowerShell",
"bytes": "19465"
},
{
"name": "Prolog",
"bytes": "243"
},
{
"name": "Python",
"bytes": "3649738"
},
{
"name": "QML",
"bytes": "9975"
},
{
"name": "QMake",
"bytes": "63106"
},
{
"name": "Roff",
"bytes": "12319"
},
{
"name": "Ruby",
"bytes": "858066"
},
{
"name": "Scala",
"bytes": "5203874"
},
{
"name": "Shell",
"bytes": "714435"
},
{
"name": "Smarty",
"bytes": "1047"
},
{
"name": "Swift",
"bytes": "3486"
},
{
"name": "Tcl",
"bytes": "492616"
},
{
"name": "Thrift",
"bytes": "31449"
},
{
"name": "XS",
"bytes": "20183"
},
{
"name": "XSLT",
"bytes": "8784"
}
],
"symlink_target": ""
} |
import { ExtendedDevice, DeviceSecurityVariation } from "./types";
type DeviceFilterCondition = (device: ExtendedDevice) => boolean;
const MS_DAY = 24 * 60 * 60 * 1000;
export const INACTIVE_DEVICE_AGE_MS = 7.776e+9; // 90 days
export const INACTIVE_DEVICE_AGE_DAYS = INACTIVE_DEVICE_AGE_MS / MS_DAY;
export const isDeviceInactive: DeviceFilterCondition = device =>
!!device.last_seen_ts && device.last_seen_ts < Date.now() - INACTIVE_DEVICE_AGE_MS;
const filters: Record<DeviceSecurityVariation, DeviceFilterCondition> = {
[DeviceSecurityVariation.Verified]: device => !!device.isVerified,
[DeviceSecurityVariation.Unverified]: device => !device.isVerified,
[DeviceSecurityVariation.Inactive]: isDeviceInactive,
};
export const filterDevicesBySecurityRecommendation = (
devices: ExtendedDevice[],
securityVariations: DeviceSecurityVariation[],
) => {
const activeFilters = securityVariations.map(variation => filters[variation]);
if (!activeFilters.length) {
return devices;
}
return devices.filter(device => activeFilters.every(filter => filter(device)));
};
| {
"content_hash": "97b90c30744d791f9e303c785c840c15",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 87,
"avg_line_length": 38.48275862068966,
"alnum_prop": 0.7347670250896058,
"repo_name": "matrix-org/matrix-react-sdk",
"id": "05ceb9c69726c576e8001dfbe9702cf41abfada3",
"size": "1694",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "src/components/views/settings/devices/filter.ts",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "984691"
},
{
"name": "Dockerfile",
"bytes": "1550"
},
{
"name": "HTML",
"bytes": "1043"
},
{
"name": "JavaScript",
"bytes": "33429"
},
{
"name": "Perl",
"bytes": "10945"
},
{
"name": "Python",
"bytes": "5019"
},
{
"name": "Shell",
"bytes": "5451"
},
{
"name": "TypeScript",
"bytes": "9543345"
}
],
"symlink_target": ""
} |
declare module 'tcomb-validation' {
declare type Path = Array<string | number>;
declare interface ValidationError {
actual: any;
expected: Tcomb$Type<*>;
path: Path;
message: string;
}
declare interface ValidationResult<T> {
errors: Array<ValidationError>;
value: T;
}
declare type ValidationOptions = {
path?: Path,
context?: any
};
declare var exports: {
validate<T>(x: any, type: Tcomb$Type<T>, options?: ValidationOptions): ValidationResult<T>
};
}
| {
"content_hash": "235a451ef873832ec71eda1778956246",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 94,
"avg_line_length": 19.73076923076923,
"alnum_prop": 0.6510721247563352,
"repo_name": "gcanti/pantarei",
"id": "961654df3a439092b69fcab332b2b7d00b7e10e1",
"size": "513",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tcomb-validation/3.x.x-0.33.x/tcomb-validation.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "25827"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
Index Fungorum
#### Published in
Persoonia 18(3): 384 (2004)
#### Original name
Octospora nemoralis Benkert & Brouwer
### Remarks
null | {
"content_hash": "b6a94af59a575566565b2daf4aba74ea",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 37,
"avg_line_length": 12.692307692307692,
"alnum_prop": 0.7090909090909091,
"repo_name": "mdoering/backbone",
"id": "4a438847a0d26d39dded813f2ce8ac43adf00475",
"size": "226",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Fungi/Ascomycota/Pezizomycetes/Pezizales/Pyronemataceae/Octospora/Octospora nemoralis/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
//
// TKDataFormGroupTitleStyle.h
// TelerikUI
//
// Copyright © 2015 Telerik. All rights reserved.
//
#import "TKStyleNode.h"
@class TKStroke;
@class TKFill;
/**
@discussion Represents style fot group's title.
*/
@interface TKDataFormGroupTitleStyle : TKStyleNode
/**
Stroke for the group's title.
*/
@property (nonatomic, strong) TKStroke *stroke;
/**
Fill for the group's title.
*/
@property (nonatomic, strong) TKFill *fill;
/**
Group's title separator color.
*/
@property (nonatomic, strong) TKFill *separatorColor;
/**
Group's title separator leading space.
*/
@property (nonatomic) CGFloat separatorLeadingSpace;
/**
Group's title separator trailing space.
*/
@property (nonatomic) CGFloat separatorTrailingSpace;
/**
Insets for the group's title.
*/
@property (nonatomic) UIEdgeInsets insets;
@end
| {
"content_hash": "c490f9af94a80d974b9b8c23f53808be",
"timestamp": "",
"source": "github",
"line_count": 48,
"max_line_length": 53,
"avg_line_length": 17.354166666666668,
"alnum_prop": 0.7166866746698679,
"repo_name": "zdzdz/Signal-iOS",
"id": "fdf469980c71bef425dacace463bc4cf5145d56b",
"size": "834",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "Signal/Signal/TelerikUI.framework/Versions/A/Headers/TKDataFormGroupTitleStyle.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Objective-C",
"bytes": "2745295"
},
{
"name": "Ruby",
"bytes": "238"
},
{
"name": "Shell",
"bytes": "8263"
},
{
"name": "Swift",
"bytes": "4267"
}
],
"symlink_target": ""
} |
FROM balenalib/smarc-px30-fedora:31-build
ENV NODE_VERSION 14.16.0
ENV YARN_VERSION 1.22.4
RUN for key in \
6A010C5166006599AA17F08146C2130DFD2497F5 \
; do \
gpg --keyserver pgp.mit.edu --recv-keys "$key" || \
gpg --keyserver keyserver.pgp.com --recv-keys "$key" || \
gpg --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; \
done \
&& curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-arm64.tar.gz" \
&& echo "2b78771550f8a3e6e990d8e60e9ade82c7a9e2738b6222e92198bcd5ea857ea6 node-v$NODE_VERSION-linux-arm64.tar.gz" | sha256sum -c - \
&& tar -xzf "node-v$NODE_VERSION-linux-arm64.tar.gz" -C /usr/local --strip-components=1 \
&& rm "node-v$NODE_VERSION-linux-arm64.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \
&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& mkdir -p /opt/yarn \
&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \
&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& npm config set unsafe-perm true -g --unsafe-perm \
&& rm -rf /tmp/*
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/[email protected]" \
&& echo "Running test-stack@node" \
&& chmod +x [email protected] \
&& bash [email protected] \
&& rm -rf [email protected]
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo $'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v8 \nOS: Fedora 31 \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v14.16.0, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo $'#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | {
"content_hash": "c7c3b026376d9efc40eb45286aa6ac46",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 691,
"avg_line_length": 66.63414634146342,
"alnum_prop": 0.708272327964861,
"repo_name": "nghiant2710/base-images",
"id": "4837dffbdbf475b08e3bd8e56ef55a7d523dd1a1",
"size": "2753",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "balena-base-images/node/smarc-px30/fedora/31/14.16.0/build/Dockerfile",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "144558581"
},
{
"name": "JavaScript",
"bytes": "16316"
},
{
"name": "Shell",
"bytes": "368690"
}
],
"symlink_target": ""
} |
const chalk = require('chalk');
const fs = require('fs');
const path = require('path');
module.exports.out = {
print: text => console.log(chalk.blue(text)),
success: text => console.log(chalk.green(text)),
error: text => console.log(chalk.bold.red(text)),
};
module.exports.writeJSON = (filePath, data, reject) => {
if (!fs.existsSync(path.dirname(filePath))) {
fs.mkdirSync(path.dirname(filePath));
}
fs.writeFile(filePath, JSON.stringify(data), { mode: '0600' }, (err) => {
if (err) {
// who knows what happened, just send it back
reject(err);
}
});
};
| {
"content_hash": "f00a8e30f8bfcedb9a97c40a5a499176",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 75,
"avg_line_length": 27.09090909090909,
"alnum_prop": 0.62751677852349,
"repo_name": "ferjgar/youtube-backup",
"id": "e8bcc5574c082a327c3960107a8ef49aec54b1bf",
"size": "596",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/util.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "65"
},
{
"name": "JavaScript",
"bytes": "8526"
}
],
"symlink_target": ""
} |
Rails.application.config.assets.version = '1.0'
# Add additional assets to the asset load path
# Rails.application.config.assets.paths << Emoji.images_path
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
Rails.application.config.assets.precompile += %w(ckeditor/config.js)
| {
"content_hash": "c5de85065b97f2a18ba2af5c149f10be",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 93,
"avg_line_length": 44.125,
"alnum_prop": 0.7818696883852692,
"repo_name": "leschenko/ab_admin",
"id": "86a4b1c4717350ebe505be45772ebf7a4c1bb5ff",
"size": "491",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/dummy/config/initializers/assets.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "546"
},
{
"name": "CoffeeScript",
"bytes": "44028"
},
{
"name": "Gherkin",
"bytes": "30415"
},
{
"name": "HTML",
"bytes": "11533"
},
{
"name": "JavaScript",
"bytes": "6104"
},
{
"name": "Ruby",
"bytes": "361380"
},
{
"name": "SCSS",
"bytes": "18715"
},
{
"name": "Slim",
"bytes": "37857"
}
],
"symlink_target": ""
} |
define([
'hbs!templates/alert'
], function (alertTemplate) {
'use strict';
return Backbone.Marionette.ItemView.extend({
title: "alert",
message: "alert text",
type: "success",
serializeData: function () {
return {
title: Marionette.getOption(this, "title"),
message: Marionette.getOption(this, "message"),
type: Marionette.getOption(this, "type")
}
},
onRender: function () {
var el = this.$el.find('.alert-message');
el.fadeIn();
},
onDomRefresh: function () {
var el = this.$el.find('.alert-message');
window.setTimeout(function () {
el.fadeOut('slow');
}, 5000);
},
template: alertTemplate
});
});
| {
"content_hash": "475e9ddf9b7e985142e8b5f9c8f6d2a2",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 55,
"avg_line_length": 18.82051282051282,
"alnum_prop": 0.5667574931880109,
"repo_name": "1ukash/devops-service",
"id": "40c35fbba78ca143c7c692c2d54f9aed13b479ff",
"size": "734",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "devops-service-web-core/public/js/app/views/item/AlertView.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "914"
},
{
"name": "JavaScript",
"bytes": "234925"
},
{
"name": "Ruby",
"bytes": "343522"
},
{
"name": "Shell",
"bytes": "3973"
}
],
"symlink_target": ""
} |
namespace hpe
{
TemplateCreatorProcessor::TemplateCreatorProcessor(void)
: m_cloudKey("FilteredCloud"), m_templateKey("Template")
{
}
TemplateCreatorProcessor::~TemplateCreatorProcessor(void)
{
}
void TemplateCreatorProcessor::Init()
{
}
void TemplateCreatorProcessor::Process(IDataStorage::Ptr dataStorage)
{
CloudXYZRGBA::Ptr cloudObject = dataStorage->GetAndCast<CloudXYZRGBA>(m_cloudKey);
if (cloudObject.get() != nullptr)
{
CloudXYZRGBA::Ptr templateObject(new CloudXYZRGBA);
templateObject->cloud = m_templateCreator.AddCloudToTemplate(cloudObject->cloud);
dataStorage->Set(m_templateKey, templateObject);
}
}
IncrementalHeadTemplateCreator::CloudType::Ptr TemplateCreatorProcessor::GetTemplate()
{
return m_templateCreator.GetTemplate();
}
} | {
"content_hash": "5882d9b1fbb4352d961bd86490378436",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 93,
"avg_line_length": 25.714285714285715,
"alnum_prop": 0.6722222222222223,
"repo_name": "sergeytulyakov/FaceCept3D",
"id": "0ee36ddebd7f5bd1689bca6267328f7fbe1719d8",
"size": "977",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "FaceCept3D/HeadPoseEstimationFramework/Processor/TemplateCreatorProcessor.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1651"
},
{
"name": "C++",
"bytes": "281282"
},
{
"name": "CMake",
"bytes": "1426"
}
],
"symlink_target": ""
} |
require File.dirname(__FILE__) + '/../spec_helper'
require 'cucumber'
require 'cucumber/rb_support/rb_language'
module Cucumber
describe StepMother do
before do
@dsl = Object.new
@dsl.extend(RbSupport::RbDsl)
@step_mother = StepMother.new
@step_mother.load_natural_language('en')
@rb = @step_mother.load_programming_language('rb')
@visitor = mock('Visitor')
end
it "should format step names" do
@dsl.Given(/it (.*) in (.*)/) do |what, month|
end
@dsl.Given(/nope something else/) do |what, month|
end
format = @step_mother.step_match("it snows in april").format_args("[%s]")
format.should == "it [snows] in [april]"
end
it "should raise Ambiguous error with guess hint when multiple step definitions match" do
@dsl.Given(/Three (.*) mice/) {|disability|}
@dsl.Given(/Three blind (.*)/) {|animal|}
lambda do
@step_mother.step_match("Three blind mice")
end.should raise_error(Ambiguous, %{Ambiguous match of "Three blind mice":
spec/cucumber/step_mother_spec.rb:30:in `/Three (.*) mice/'
spec/cucumber/step_mother_spec.rb:31:in `/Three blind (.*)/'
You can run again with --guess to make Cucumber be more smart about it
})
end
it "should not show --guess hint when --guess is used" do
@step_mother.options = {:guess => true}
@dsl.Given(/Three (.*) mice/) {|disability|}
@dsl.Given(/Three cute (.*)/) {|animal|}
lambda do
@step_mother.step_match("Three cute mice")
end.should raise_error(Ambiguous, %{Ambiguous match of "Three cute mice":
spec/cucumber/step_mother_spec.rb:47:in `/Three (.*) mice/'
spec/cucumber/step_mother_spec.rb:48:in `/Three cute (.*)/'
})
end
it "should not raise Ambiguous error when multiple step definitions match, but --guess is enabled" do
@step_mother.options = {:guess => true}
@dsl.Given(/Three (.*) mice/) {|disability|}
@dsl.Given(/Three (.*)/) {|animal|}
lambda do
@step_mother.step_match("Three blind mice")
end.should_not raise_error
end
it "should pick right step definition when --guess is enabled and equal number of capture groups" do
@step_mother.options = {:guess => true}
right = @dsl.Given(/Three (.*) mice/) {|disability|}
wrong = @dsl.Given(/Three (.*)/) {|animal|}
@step_mother.step_match("Three blind mice").step_definition.should == right
end
it "should pick right step definition when --guess is enabled and unequal number of capture groups" do
@step_mother.options = {:guess => true}
right = @dsl.Given(/Three (.*) mice ran (.*)/) {|disability|}
wrong = @dsl.Given(/Three (.*)/) {|animal|}
@step_mother.step_match("Three blind mice ran far").step_definition.should == right
end
it "should pick most specific step definition when --guess is enabled and unequal number of capture groups" do
@step_mother.options = {:guess => true}
general = @dsl.Given(/Three (.*) mice ran (.*)/) {|disability|}
specific = @dsl.Given(/Three blind mice ran far/) {}
more_specific = @dsl.Given(/^Three blind mice ran far$/) {}
@step_mother.step_match("Three blind mice ran far").step_definition.should == more_specific
end
it "should raise Undefined error when no step definitions match" do
lambda do
@step_mother.step_match("Three blind mice")
end.should raise_error(Undefined)
end
# http://railsforum.com/viewtopic.php?pid=93881
it "should not raise Redundant unless it's really redundant" do
@dsl.Given(/^(.*) (.*) user named '(.*)'$/) {|a,b,c|}
@dsl.Given(/^there is no (.*) user named '(.*)'$/) {|a,b|}
end
it "should raise an error if the world is nil" do
@dsl.World do
end
begin
@step_mother.before_and_after(nil) {}
raise "Should fail"
rescue RbSupport::NilWorld => e
e.message.should == "World procs should never return nil"
e.backtrace.should == ["spec/cucumber/step_mother_spec.rb:108:in `World'"]
end
end
module ModuleOne
end
module ModuleTwo
end
class ClassOne
end
it "should implicitly extend world with modules" do
@dsl.World(ModuleOne, ModuleTwo)
@step_mother.before(mock('scenario', :null_object => true))
class << @rb.current_world
included_modules.index(ModuleOne).should_not == nil
included_modules.index(ModuleTwo).should_not == nil
end
@rb.current_world.class.should == Object
end
it "should raise error when we try to register more than one World proc" do
@dsl.World { Hash.new }
lambda do
@dsl.World { Array.new }
end.should raise_error(RbSupport::MultipleWorld, %{You can only pass a proc to #World once, but it's happening
in 2 places:
spec/cucumber/step_mother_spec.rb:140:in `World'
spec/cucumber/step_mother_spec.rb:142:in `World'
Use Ruby modules instead to extend your worlds. See the Cucumber::RbSupport::RbDsl#World RDoc
or http://wiki.github.com/aslakhellesoy/cucumber/a-whole-new-world.
})
end
it "should find before hooks" do
fish = @dsl.Before('@fish'){}
meat = @dsl.Before('@meat'){}
scenario = mock('Scenario')
scenario.should_receive(:accept_hook?).with(fish).and_return(true)
scenario.should_receive(:accept_hook?).with(meat).and_return(false)
@rb.hooks_for(:before, scenario).should == [fish]
end
end
describe StepMother, "step argument transformations" do
before do
@dsl = Object.new
@dsl.extend(RbSupport::RbDsl)
@step_mother = StepMother.new
@step_mother.load_natural_language('en')
@rb = @step_mother.load_programming_language('rb')
end
describe "without capture groups" do
it "complains when registering with a with no transform block" do
lambda do
@dsl.Transform('^abc$')
end.should raise_error(Cucumber::RbSupport::RbTransform::MissingProc)
end
it "complains when registering with a zero-arg transform block" do
lambda do
@dsl.Transform('^abc$') {42}
end.should raise_error(Cucumber::RbSupport::RbTransform::MissingProc)
end
it "complains when registering with a splat-arg transform block" do
lambda do
@dsl.Transform('^abc$') {|*splat| 42 }
end.should raise_error(Cucumber::RbSupport::RbTransform::MissingProc)
end
it "complains when transforming with an arity mismatch" do
lambda do
@dsl.Transform('^abc$') {|one, two| 42 }
@rb.execute_transforms(['abc'])
end.should raise_error(Cucumber::ArityMismatchError)
end
it "allows registering a regexp pattern that yields the step_arg matched" do
@dsl.Transform(/^ab*c$/) {|arg| 42}
@rb.execute_transforms(['ab']).should == ['ab']
@rb.execute_transforms(['ac']).should == [42]
@rb.execute_transforms(['abc']).should == [42]
@rb.execute_transforms(['abbc']).should == [42]
end
end
describe "with capture groups" do
it "complains when registering with a with no transform block" do
lambda do
@dsl.Transform('^a(.)c$')
end.should raise_error(Cucumber::RbSupport::RbTransform::MissingProc)
end
it "complains when registering with a zero-arg transform block" do
lambda do
@dsl.Transform('^a(.)c$') { 42 }
end.should raise_error(Cucumber::RbSupport::RbTransform::MissingProc)
end
it "complains when registering with a splat-arg transform block" do
lambda do
@dsl.Transform('^a(.)c$') {|*splat| 42 }
end.should raise_error(Cucumber::RbSupport::RbTransform::MissingProc)
end
it "complains when transforming with an arity mismatch" do
lambda do
@dsl.Transform('^a(.)c$') {|one, two| 42 }
@rb.execute_transforms(['abc'])
end.should raise_error(Cucumber::ArityMismatchError)
end
it "allows registering a regexp pattern that yields capture groups" do
@dsl.Transform(/^shape: (.+), color: (.+)$/) do |shape, color|
{shape.to_sym => color.to_sym}
end
@rb.execute_transforms(['shape: circle, color: blue']).should == [{:circle => :blue}]
@rb.execute_transforms(['shape: square, color: red']).should == [{:square => :red}]
@rb.execute_transforms(['not shape: square, not color: red']).should == ['not shape: square, not color: red']
end
end
it "allows registering a string pattern" do
@dsl.Transform('^ab*c$') {|arg| 42}
@rb.execute_transforms(['ab']).should == ['ab']
@rb.execute_transforms(['ac']).should == [42]
@rb.execute_transforms(['abc']).should == [42]
@rb.execute_transforms(['abbc']).should == [42]
end
it "gives match priority to transforms defined last" do
@dsl.Transform(/^transform_me$/) {|arg| :foo }
@dsl.Transform(/^transform_me$/) {|arg| :bar }
@dsl.Transform(/^transform_me$/) {|arg| :baz }
@rb.execute_transforms(['transform_me']).should == [:baz]
end
it "allows registering a transform which returns nil" do
@dsl.Transform('^ac$') {|arg| nil}
@rb.execute_transforms(['ab']).should == ['ab']
@rb.execute_transforms(['ac']).should == [nil]
end
end
end
| {
"content_hash": "d06ce5511db74f0e2c17021ebc852d3c",
"timestamp": "",
"source": "github",
"line_count": 270,
"max_line_length": 117,
"avg_line_length": 35.2962962962963,
"alnum_prop": 0.6164742917103883,
"repo_name": "cykod/Webiva",
"id": "f4c4bc9a809b7da55ca61db51cf34c4e116ffc42",
"size": "9530",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "vendor/gems/cucumber-0.6.2/spec/cucumber/step_mother_spec.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "1603531"
},
{
"name": "PHP",
"bytes": "19204"
},
{
"name": "Perl",
"bytes": "4042"
},
{
"name": "Ruby",
"bytes": "3033832"
},
{
"name": "Shell",
"bytes": "17397"
}
],
"symlink_target": ""
} |
package connect.ui.activity.chat.bean;
import java.io.Serializable;
/**
* Created by pujin on 2017/2/20.
*/
public class WebsiteExt1Bean implements Serializable{
private String linkTitle;
private String linkSubtitle;
private String linkImg;
public WebsiteExt1Bean() {
}
public WebsiteExt1Bean(String linkTitle, String linkSubtitle, String linkImg) {
this.linkTitle = linkTitle;
this.linkSubtitle = linkSubtitle;
this.linkImg = linkImg;
}
public String getLinkTitle() {
return linkTitle;
}
public void setLinkTitle(String linkTitle) {
this.linkTitle = linkTitle;
}
public String getLinkSubtitle() {
return linkSubtitle;
}
public void setLinkSubtitle(String linkSubtitle) {
this.linkSubtitle = linkSubtitle;
}
public String getLinkImg() {
return linkImg;
}
public void setLinkImg(String linkImg) {
this.linkImg = linkImg;
}
public void setExt1(String title, String sub) {
this.linkTitle = title;
this.linkSubtitle = sub;
}
}
| {
"content_hash": "13da4b9881d4a831f95a8c9a87ae8de1",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 83,
"avg_line_length": 22.16,
"alnum_prop": 0.6534296028880866,
"repo_name": "connectim/Android",
"id": "6ce383ce2fc6621f6283c3596fb81bbba936479e",
"size": "1108",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/src/main/java/connect/ui/activity/chat/bean/WebsiteExt1Bean.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "FreeMarker",
"bytes": "51905"
},
{
"name": "HTML",
"bytes": "5531"
},
{
"name": "Java",
"bytes": "2716967"
},
{
"name": "Protocol Buffer",
"bytes": "18166"
}
],
"symlink_target": ""
} |
<!doctype html>
<html>
<head>
<meta charset="utf-8">
<title>Store locator with Panel</title>
<script
src="https://maps.googleapis.com/maps/api/js?libraries=places"></script>
<script
src="https://ajax.googleapis.com/ajax/libs/jquery/1.6/jquery.min.js">
</script>
<script src="js/store-locator.min.js"></script>
<script src="js/cloudsql-datasource.js"></script>
<script src="js/cloudsql.js"></script>
<link rel="stylesheet" href="css/storelocator.css">
<style>
body { font-family: sans-serif; }
#map-canvas, #panel { height: 500px; }
#panel { width: 300px; float: left; margin-right: 10px; }
p.attribution, p.attribution a { color: #666; }
</style>
<script>
var _gaq = _gaq || [];
_gaq.push(['_setAccount', 'UA-12846745-20']);
_gaq.push(['_trackPageview']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' === document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
})();
</script>
</head>
<body>
<h1>Medicare offices</h1>
<div id="panel"></div>
<div id="map-canvas"></div>
<p class="attribution">Medicare location data from <a href="http://data.gov.au/66">data.gov.au</a>, licensed under <a href="http://creativecommons.org/licenses/by/2.5/au/">CC-BY 2.5</a></p>
</body>
</html>
| {
"content_hash": "84b84464e4220ccdb25c2300b6ed76d5",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 193,
"avg_line_length": 39.51282051282051,
"alnum_prop": 0.6048020765736535,
"repo_name": "google/maps-for-work-samples",
"id": "4e6eb7acfa069dfa3e46a4471c82f3354f321add",
"size": "1541",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "demos/CloudSQL/cloudsql-store-locator/cloudsql.html",
"mode": "33261",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
<?php
/**
* Created by PhpStorm.
* User: hrustbb
* Date: 15.10.16
* Time: 11:08
*/
namespace common\models;
use yii\db\ActiveRecord;
class Journal extends ActiveRecord {
public static function tableName()
{
return 'journals';
}
} | {
"content_hash": "1c5a39a67b9f7d1192f2a11b32b0d92f",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 38,
"avg_line_length": 12.95,
"alnum_prop": 0.6332046332046332,
"repo_name": "hrustbb2/yiimanager",
"id": "95d1a448623808fe2ca435c1ec2bbc116e0d873e",
"size": "259",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "common/models/Journal.php",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "152"
},
{
"name": "Batchfile",
"bytes": "1541"
},
{
"name": "CSS",
"bytes": "11884"
},
{
"name": "HTML",
"bytes": "10433"
},
{
"name": "JavaScript",
"bytes": "500724"
},
{
"name": "PHP",
"bytes": "401495"
},
{
"name": "Shell",
"bytes": "3257"
}
],
"symlink_target": ""
} |
layout: default
---
<div class="project_page">
{% for project in site.data.projects %}
{% if page.project == project.id %}
<div class="well">
<h4 class="home-subtitle">{{ project.name }}</h4>
<div class="row image">
<div class="col-xs-12">
<img class="img-responsive" src="{{ site.url }}/static/projects/{{ project.image }}">
</div>
</div>
<div>
{{ content }}
</div>
{% if project.url %}
<a href="{{ project.url }}" target="_blank" class="btn btn-default btn-raised btn-sm project-direct-link">Website <span class="fa fa-external-link"></span></a>
{% endif %}
</div>
{% endif %}
{% endfor %}
</div> | {
"content_hash": "d69051dc28348bc195eb832ae029b060",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 179,
"avg_line_length": 34.64,
"alnum_prop": 0.4376443418013857,
"repo_name": "DHainzl/dhainzl.github.io",
"id": "6054ccbba2c6e53b8bc2709df9b305773ce30f16",
"size": "870",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_layouts/project_page.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "24964"
},
{
"name": "HTML",
"bytes": "21600"
},
{
"name": "JavaScript",
"bytes": "734"
},
{
"name": "Ruby",
"bytes": "3700"
}
],
"symlink_target": ""
} |
SYNONYM
#### According to
The Catalogue of Life, 3rd January 2011
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "bf7e93e725d4df82ee5cb70d6818a1c5",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 39,
"avg_line_length": 10.23076923076923,
"alnum_prop": 0.6917293233082706,
"repo_name": "mdoering/backbone",
"id": "0ecaa25ed851e66c316c3a1b14352ac3ec694f35",
"size": "182",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Fabales/Fabaceae/Dialium/Dialium angolense/ Syn. Dialium evrardii/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
package com.github.cwilper.fcrepo.cloudsync.service.rest;
import java.util.List;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import org.apache.cxf.jaxrs.model.wadl.Description;
import org.apache.cxf.jaxrs.model.wadl.Descriptions;
import org.apache.cxf.jaxrs.model.wadl.DocTarget;
import com.github.cwilper.fcrepo.cloudsync.api.CloudSyncService;
import com.github.cwilper.fcrepo.cloudsync.api.NameConflictException;
import com.github.cwilper.fcrepo.cloudsync.api.ResourceInUseException;
import com.github.cwilper.fcrepo.cloudsync.api.ResourceNotFoundException;
import com.github.cwilper.fcrepo.cloudsync.api.Task;
import com.github.cwilper.fcrepo.cloudsync.service.util.PATCH;
@Path("tasks")
public class TaskResource extends AbstractResource {
public static final String TASK_JSON =
"application/vnd.fcrepo-cloudsync.task+json";
public static final String TASK_XML =
"application/vnd.fcrepo-cloudsync.task+xml";
public static final String TASKS_JSON =
"application/vnd.fcrepo-cloudsync.tasks+json";
public static final String TASKS_XML =
"application/vnd.fcrepo-cloudsync.tasks+xml";
public TaskResource(CloudSyncService service) {
super(service);
}
@POST
@Path("/")
@Consumes({TASK_JSON, TASK_XML})
@Produces({JSON, XML, TASK_JSON, TASK_XML})
@Descriptions({
@Description(value = "Creates a task", target = DocTarget.METHOD),
@Description(value = STATUS_201_CREATED, target = DocTarget.RESPONSE)
})
public Response createTask(@Context UriInfo uriInfo,
@Context HttpServletRequest req,
Task task) {
try {
Task newTask = service.createTask(task);
setUris(uriInfo, req, newTask);
return Response.created(newTask.getUri()).entity(newTask).build();
} catch (NameConflictException e) {
throw new WebApplicationException(e, Response.Status.CONFLICT);
}
}
@GET
@Path("/")
@Produces({JSON, XML, TASKS_JSON, TASKS_XML})
@Descriptions({
@Description(value = "Lists all tasks", target = DocTarget.METHOD),
@Description(value = STATUS_200_OK, target = DocTarget.RESPONSE)
})
public List<Task> listTasks(@Context UriInfo uriInfo,
@Context HttpServletRequest req) {
List<Task> tasks = service.listTasks();
for (Task task: tasks) {
setUris(uriInfo, req, task);
}
return tasks;
}
@GET
@Path("{id}")
@Produces({JSON, XML, TASK_JSON, TASK_XML})
@Descriptions({
@Description(value = "Gets a task", target = DocTarget.METHOD),
@Description(value = STATUS_200_OK, target = DocTarget.RESPONSE)
})
public Task getTask(@Context UriInfo uriInfo,
@Context HttpServletRequest req,
@PathParam("id") String id) {
try {
Task task = service.getTask(id);
setUris(uriInfo, req, task);
return task;
} catch (ResourceNotFoundException e) {
throw new WebApplicationException(e, Response.Status.NOT_FOUND);
}
}
@PATCH
@Path("{id}")
@Consumes({TASK_JSON, TASK_XML})
@Produces({JSON, XML, TASK_JSON, TASK_XML})
@Descriptions({
@Description(value = "Updates a task", target = DocTarget.METHOD),
@Description(value = STATUS_200_OK, target = DocTarget.RESPONSE)
})
public Task updateTask(@Context UriInfo uriInfo,
@Context HttpServletRequest req,
@PathParam("id") String id,
Task task) {
try {
Task updatedTask = service.updateTask(id, task);
setUris(uriInfo, req, updatedTask);
return updatedTask;
} catch (ResourceNotFoundException e) {
throw new WebApplicationException(e, Response.Status.NOT_FOUND);
} catch (NameConflictException e) {
throw new WebApplicationException(e, Response.Status.CONFLICT);
}
}
@DELETE
@Path("{id}")
@Descriptions({
@Description(value = "Deletes a task", target = DocTarget.METHOD),
@Description(value = STATUS_204_NO_CONTENT, target = DocTarget.RESPONSE)
})
public void deleteTask(@PathParam("id") String id) {
try {
service.deleteTask(id);
} catch (ResourceInUseException e) {
throw new WebApplicationException(e, Response.Status.CONFLICT);
}
}
private void setUris(UriInfo uriInfo, HttpServletRequest req, Task task) {
task.setUri(URIMapper.getUri(uriInfo, req, "tasks/" + task.getId()));
if (task.getActiveLogId() != null && !task.getActiveLogId().equals("0")) {
task.setActiveLogUri(URIMapper.getUri(uriInfo, req, "taskLogs/" + task.getActiveLogId()));
}
task.setActiveLogId(null);
task.setId(null);
}
}
| {
"content_hash": "1bf56b2fdf45cc5c8885a0b70e699d71",
"timestamp": "",
"source": "github",
"line_count": 148,
"max_line_length": 102,
"avg_line_length": 36.263513513513516,
"alnum_prop": 0.6344326439351593,
"repo_name": "cdchapman/fcrepo-cloudsync",
"id": "b176672c0e229debde44d6eff7ede1500c48191f",
"size": "5367",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fcrepo-cloudsync-service/src/main/java/com/github/cwilper/fcrepo/cloudsync/service/rest/TaskResource.java",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
if Gem::Requirement.new('< 12.14.77').satisfied_by?(Gem::Version.new(Chef::VERSION))
#--
# Author:: Adam Jacob (<[email protected]>)
# Author:: Christopher Walters (<[email protected]>)
# Author:: Tim Hinderliter (<[email protected]>)
# Copyright:: Copyright 2008-2016, Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "chef/exceptions"
require "chef/mixin/params_validate"
require "chef/node"
require "chef/resource_collection"
class Chef
# == Chef::Runner
# This class is responsible for executing the steps in a Chef run.
class Runner
attr_reader :run_context
include Chef::Mixin::ParamsValidate
def initialize(run_context)
@run_context = run_context
end
def delayed_actions
@run_context.delayed_actions
end
def events
@run_context.events
end
# Determine the appropriate provider for the given resource, then
# execute it.
def run_action(resource, action, notification_type = nil, notifying_resource = nil)
# If there are any before notifications, why-run the resource
# and notify anyone who needs notifying
before_notifications = run_context.before_notifications(resource) || []
unless before_notifications.empty?
forced_why_run do
Chef::Log.info("#{resource} running why-run #{action} action to support before action")
resource.run_action(action, notification_type, notifying_resource)
end
if resource.updated_by_last_action?
before_notifications.each do |notification|
Chef::Log.info("#{resource} sending #{notification.action} action to #{notification.resource} (before)")
run_action(notification.resource, notification.action, :before, resource)
end
resource.updated_by_last_action(false)
end
end
# Actually run the action for realsies
resource.run_action(action, notification_type, notifying_resource)
# Execute any immediate and queue up any delayed notifications
# associated with the resource, but only if it was updated *this time*
# we ran an action on it.
if resource.updated_by_last_action?
run_context.immediate_notifications(resource).each do |notification|
Chef::Log.info("#{resource} sending #{notification.action} action to #{notification.resource} (immediate)")
run_action(notification.resource, notification.action, :immediate, resource)
end
run_context.delayed_notifications(resource).each do |notification|
# send the notification to the run_context of the receiving resource
notification.resource.run_context.add_delayed_action(notification)
end
end
end
# Iterates over the +resource_collection+ in the +run_context+ calling
# +run_action+ for each resource in turn.
def converge
# Resolve all lazy/forward references in notifications
run_context.resource_collection.each do |resource|
resource.resolve_notification_references
end
# Execute each resource.
run_context.resource_collection.execute_each_resource do |resource|
Array(resource.action).each { |action| run_action(resource, action) }
end
rescue Exception => e
Chef::Log.info "Running queued delayed notifications before re-raising exception"
run_delayed_notifications(e)
else
run_delayed_notifications(nil)
true
end
private
# Run all our :delayed actions
def run_delayed_notifications(error = nil)
collected_failures = Exceptions::MultipleFailures.new
collected_failures.client_run_failure(error) unless error.nil?
delayed_actions.each do |notification|
result = run_delayed_notification(notification)
if result.kind_of?(Exception)
collected_failures.notification_failure(result)
end
end
collected_failures.raise!
end
def run_delayed_notification(notification)
Chef::Log.info( "#{notification.notifying_resource} sending #{notification.action}"\
" action to #{notification.resource} (delayed)")
# Struct of resource/action to call
run_action(notification.resource, notification.action, :delayed)
true
rescue Exception => e
e
end
# helper to run a block of code with why_run forced to true and then restore it correctly
def forced_why_run
saved = Chef::Config[:why_run]
Chef::Config[:why_run] = true
yield
ensure
Chef::Config[:why_run] = saved
end
end
end
end
| {
"content_hash": "becdeb942e33f157099389fad8cb5f01",
"timestamp": "",
"source": "github",
"line_count": 145,
"max_line_length": 117,
"avg_line_length": 35.324137931034485,
"alnum_prop": 0.6878172588832487,
"repo_name": "akzero53/basic-development-machine",
"id": "1aab5b1fa97edf99a1d72d7741442b24b1caa792",
"size": "5325",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cookbooks/compat_resource/files/lib/chef_compat/monkeypatches/chef/runner.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "446"
},
{
"name": "C",
"bytes": "47932"
},
{
"name": "HTML",
"bytes": "29750"
},
{
"name": "Ruby",
"bytes": "564602"
},
{
"name": "Shell",
"bytes": "2328"
}
],
"symlink_target": ""
} |
<!--
* FCKeditor - The text editor for internet
* Copyright (C) 2003-2005 Frederico Caldeira Knabben
*
* Licensed under the terms of the GNU Lesser General Public License:
* http://www.opensource.org/licenses/lgpl-license.php
*
* For further information visit:
* http://www.fckeditor.net/
*
* File Name: default.html
* Samples Frameset page.
*
* File Authors:
* Frederico Caldeira Knabben ([email protected])
-->
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN" >
<html>
<head>
<title>FCKeditor - Samples</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<meta name="robots" content="noindex, nofollow">
</head>
<frameset rows="60,*">
<frame src="sampleslist.html" noresize scrolling="no">
<frame name="Sample" src="html/sample01.html" noresize>
</frameset>
</html>
| {
"content_hash": "aa3350044fcb87737ecdb447e1013e7c",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 69,
"avg_line_length": 30.25,
"alnum_prop": 0.6906729634002361,
"repo_name": "nmunir/TransworldBase",
"id": "0c2777289eebb714cdcea11613ddf190d387a60d",
"size": "847",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fckeditor/_samples/default.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "10891527"
},
{
"name": "C#",
"bytes": "108746"
},
{
"name": "CSS",
"bytes": "873275"
},
{
"name": "ColdFusion",
"bytes": "32068"
},
{
"name": "JavaScript",
"bytes": "1129606"
},
{
"name": "PHP",
"bytes": "94604"
},
{
"name": "Perl",
"bytes": "71256"
},
{
"name": "Shell",
"bytes": "597"
},
{
"name": "Visual Basic",
"bytes": "210855"
}
],
"symlink_target": ""
} |
<div class="auth-main">
<div class="content">
<input type="text">
<button>put</button>
</div>
</div>
| {
"content_hash": "0e92d2bd7a56c99e7c9093be4a64bee9",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 24,
"avg_line_length": 18.833333333333332,
"alnum_prop": 0.5752212389380531,
"repo_name": "Gss615/swie2",
"id": "3b3a3bffb4362924f8a363d4ca1005ac0b19b5ca",
"size": "113",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/app/pages/admin/admin-login/trouble-logging/trouble-logging.component.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "149145"
},
{
"name": "HTML",
"bytes": "206945"
},
{
"name": "JavaScript",
"bytes": "44892"
},
{
"name": "Shell",
"bytes": "153"
},
{
"name": "TypeScript",
"bytes": "593216"
}
],
"symlink_target": ""
} |
using System;
using osu.Framework.Graphics.Sprites;
using osu.Game.Overlays.Dialog;
using osu.Game.Localisation;
namespace osu.Game.Screens.Select.Carousel
{
public class UpdateLocalConfirmationDialog : DeleteConfirmationDialog
{
public UpdateLocalConfirmationDialog(Action onConfirm)
{
HeaderText = PopupDialogStrings.UpdateLocallyModifiedText;
BodyText = PopupDialogStrings.UpdateLocallyModifiedDescription;
Icon = FontAwesome.Solid.ExclamationTriangle;
DeleteAction = onConfirm;
}
}
}
| {
"content_hash": "ed9818bb51bf0f1a7ece7ffc898551a4",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 75,
"avg_line_length": 31.833333333333332,
"alnum_prop": 0.7242582897033158,
"repo_name": "peppy/osu",
"id": "f5267e905e524a164287a8af5bdf041a65c50ac1",
"size": "723",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "osu.Game/Screens/Select/Carousel/UpdateLocalConfirmationDialog.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "14007692"
},
{
"name": "GLSL",
"bytes": "230"
},
{
"name": "PowerShell",
"bytes": "1988"
},
{
"name": "Ruby",
"bytes": "4185"
},
{
"name": "Shell",
"bytes": "1548"
}
],
"symlink_target": ""
} |
from .. import core, symbol
class Sequence(core.ast.Expr):
__slots__ = 'actions'
keyword = 'do'
def __init__(self, actions):
self.actions = tuple(actions)
if not self.actions or type(self.actions[-1]) is Sequence.Binding:
self.actions = self.actions + (Sequence.ret(core.ast.Lit(None)), )
class Binding(core.ast.Expr):
__slots__ = 'name', 'value'
keyword = 'var'
def __init__(self, name, value):
self.name = symbol(name)
self.value = value
assert isinstance(value, core.ast.Expr)
def __repr__(self):
return '({0} {1} {2})'.format(Binding.keyword, self.name, self.value)
class Return(core.ast.Expr):
keyword = 'return'
class Bind(core.ast.Expr):
def __repr__(self):
return '>>'
@staticmethod
def bind(value, name, body):
func = core.ast.Abs( (name, ), body)
return core.ast.App( Sequence.Bind(), (value, func) )
@staticmethod
def then(value, body):
return Sequence.bind(value, core.ast.Var.wildcard, body)
@staticmethod
def ret(value):
return core.ast.App( Sequence.Return(), (value, ))
def monad(self):
def do(gen):
first = next(gen)
try:
rest = do(gen)
except StopIteration:
return first
if type(first) is Sequence.Binding:
return Sequence.bind(first.value, first.name, rest)
else:
return Sequence.then(first, rest)
return do( x for x in self.actions )
class New(core.ast.Expr):
keyword = 'new'
class Use(core.ast.Expr):
keyword = 'use'
class Drop(core.ast.Expr):
'''removes from current scope and returns'''
keyword = 'drop'
__slots__ = 'value'
def __init__(self, value):
self.value = value
assert isinstance(value, core.ast.Expr)
def __repr__(self):
return '({0} {1})'.format(Drop.keyword, self.value)
class Free(core.ast.Expr):
keyword = 'free'
__slots__ = 'value'
def __init__(self, value):
# free moves
self.value = Drop(value)
def __repr__(self):
return '({0} {1})'.format(Free.keyword, self.value)
class Move(core.ast.Expr):
keyword = 'move'
__slots__ = 'value'
def __init__(self, value):
# free moves
self.value = Drop(value)
def __repr__(self):
return '({0} {1})'.format(Move.keyword, self.value)
| {
"content_hash": "766e4a8c89a74a44da3ab990c094c356",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 81,
"avg_line_length": 21.37704918032787,
"alnum_prop": 0.5302914110429447,
"repo_name": "maxime-tournier/hm.py",
"id": "4a1a384f8d6f7acc26a5c3f4621ae11b8cd64f65",
"size": "2609",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "slip/io/ast.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Common Lisp",
"bytes": "1133"
},
{
"name": "Python",
"bytes": "84570"
}
],
"symlink_target": ""
} |
/**
* A Virtual Joystick
* @class Phaser.Plugin.VirtualJoystick
*/
Phaser.Plugin.VirtualJoystick = function (game, parent) {
Phaser.Plugin.call(this, game, parent);
this.x = 0;
this.y = 0;
this.limit = 10;
this.baseCircle;
this.baseBMD;
this.nubBMD;
this.base;
this.nub;
this.buttonA;
this.buttonB;
this.buttonC;
this.baseCenter;
this.nubCenter;
this.isDragging = false;
this.angle = 0;
this.distance = 0;
this.force = 0;
this.deltaX = 0;
this.deltaY = 0;
this.speed = 0;
this.pointer = null;
this.callbackID = -1;
};
Phaser.Plugin.VirtualJoystick.prototype = Object.create(Phaser.Plugin.prototype);
Phaser.Plugin.VirtualJoystick.prototype.constructor = Phaser.Plugin.VirtualJoystick;
Phaser.Plugin.VirtualJoystick.prototype.init = function (x, y, baseDiameter, stickDiameter, limit, baseColor, stickColor) {
if (typeof x === 'undefined') { x=this.game.stage.width/6; }
if (typeof y === 'undefined') { y=this.game.stage.height-this.game.stage.height/5; }
if (typeof baseDiameter === 'undefined') { baseDiameter = 140; }
if (typeof stickDiameter === 'undefined') { stickDiameter = 100; }
if (typeof limit === 'undefined') { limit = Math.floor(baseDiameter / 2); }
if (typeof baseColor === 'undefined') { baseColor = 'rgba(255, 0, 0, 0.5)'; }
if (typeof stickColor === 'undefined') { stickColor = 'rgba(0, 255, 0, 0.7)'; }
this.x = x;
this.y = y;
this.isDragging = false;
this.limit = limit;
this.limitPoint = new Phaser.Point(x, y);
this.location = new Phaser.Point(x, y);
var radius = Math.floor(baseDiameter / 2);
var nr = Math.floor(stickDiameter / 2);
this.baseCircle = new Phaser.Circle(x, y, baseDiameter);
this.baseBMD = this.game.make.bitmapData(baseDiameter, baseDiameter);
this.nubBMD = this.game.make.bitmapData(stickDiameter, stickDiameter);
this.baseBMD.circle(radius, radius, radius, baseColor);
this.nubBMD.circle(nr, nr, nr, stickColor);
// Base
this.base = this.game.make.sprite(x, y, this.baseBMD);
this.base.anchor.set(0.5);
// Nub (stick)
this.nub = this.game.make.sprite(x, y, this.nubBMD);
this.nub.anchor.set(0.5);
this.nub.inputEnabled = true;
this.nub.events.onInputDown.add(this.startDrag, this);
this.nub.events.onInputUp.add(this.stopDrag, this);
};
Phaser.Plugin.VirtualJoystick.prototype.start = function () {
this.game.stage.addChild(this.base);
this.game.stage.addChild(this.nub);
if (this.callbackID > -1)
{
this.game.input.deleteMoveCallback(this.callbackID);
}
this.callbackID = this.game.input.addMoveCallback(this.move, this);
this.isDragging = false;
this.distance = 0;
this.speed = 0;
this.force = 0;
this.deltaX = 0;
this.deltaY = 0;
this.nub.x = this.base.x;
this.nub.y = this.base.y;
this.base.visible = true;
this.nub.visible = true;
this.limitPoint.set(this.base.x, this.base.y);
this.location.set(this.base.x, this.base.y);
};
Phaser.Plugin.VirtualJoystick.prototype.stop = function () {
// if (this.nub.parent === null || this.base.parent === null)
// {
// return;
// }
this.base.visible = false;
this.nub.visible = false;
this.nub.x = this.base.x;
this.nub.y = this.base.y;
this.nub.input.enabled = false;
this.game.stage.removeChild(this.base);
this.game.stage.removeChild(this.nub);
if(this.buttonA){
this.game.stage.removeChild(this.buttonA);
}
if(this.buttonB){
this.game.stage.removeChild(this.buttonB);
}
if(this.buttonC){
this.game.stage.removeChild(this.buttonC);
}
this.game.input.deleteMoveCallback(this.callbackID);
};
//Phaser.Plugin.VirtualJoystick.prototype.resize = function (x, y, baseDiameter, stickDiameter, limit, baseColor, stickColor) {
// this.stop();
//
// this.init(arguments)
//
// this.start();
//};
Phaser.Plugin.VirtualJoystick.prototype.startDrag = function (nub, pointer) {
this.isDragging = true;
this.pointer = pointer;
this.location.set(pointer.x, pointer.y);
this.distance = Phaser.Point.distance(this.base, this.location, true);
this.angle = this.game.math.wrapAngle(this.location.angle(this.base, true) + 180);
this.force = this.game.math.percent(this.distance, this.limit);
this.deltaX = Math.cos(this.game.math.degToRad(this.angle));
this.deltaY = Math.sin(this.game.math.degToRad(this.angle));
};
Phaser.Plugin.VirtualJoystick.prototype.stopDrag = function (nub, pointer) {
console.log('stopDrag');
this.isDragging = false;
this.distance = 0;
this.angle = 0;
this.force = 0;
this.nub.x = this.base.x;
this.nub.y = this.base.y;
this.deltaX = 0;
this.deltaY = 0;
this.limitPoint.set(this.base.x, this.base.y);
};
Phaser.Plugin.VirtualJoystick.prototype.move = function (pointer, x, y) {
if (!this.isDragging)
{
return;
}
var _location = new Phaser.Point(x,y);
var _distance = Phaser.Point.distance(this.limitPoint,_location, true);
if (_distance > this.limit)
{
//超出了返回的点击事件,不处理 多点触摸的时候很有用
//console.log("超出了返回的点击事件,不处理");
return;
}
this.location.set(x, y);
this.distance = Phaser.Point.distance(this.base, this.location, true);
this.angle = this.game.math.wrapAngle(this.location.angle(this.base, true) + 180);
this.force = this.game.math.percent(this.distance, this.limit);
if (this.distance < this.limit)
{
this.limitPoint.copyFrom(this.location);
}
else
{
this.baseCircle.circumferencePoint(this.angle, true, this.limitPoint);
}
this.nub.position.set(this.limitPoint.x, this.limitPoint.y);
this.deltaX = Math.cos(this.game.math.degToRad(this.angle));
this.deltaY = Math.sin(this.game.math.degToRad(this.angle));
};
/**
* Given the speed calculate the velocity and return it as a Point object, or set it to the given point object.
* One way to use this is: velocityFromAngle(angle, 200, sprite.velocity) which will set the values directly to the sprites velocity and not create a new Point object.
*
* @method Phaser.Plugin.VirtualJoystick#setVelocity
* @param {Phaser.Sprite} sprite - The Sprite to set the velocity on. The Sprite must have a physics body already set. The value will be set into Sprite.body.velocity.
* @param {number} [minSpeed=0] - The minimum speed the Sprite will move if the joystick is at its default (non-moved) position.
* @param {number} [maxSpeed=100] - The maximum speed the Sprite will move if the joystick is at its full extent.
* @return {Phaser.Sprite} The Sprite object.
*/
Phaser.Plugin.VirtualJoystick.prototype.setVelocity = function (sprite, minSpeed, maxSpeed) {
if (typeof minSpeed === 'undefined') { minSpeed = 0; }
if (typeof maxSpeed === 'undefined') { maxSpeed = 200; }
if (this.force === 0 && minSpeed === 0)
{
sprite.body.velocity.set(0, 0);
}
else
{
var speed = (maxSpeed - minSpeed) * this.force;
sprite.body.velocity.set(this.deltaX * speed, this.deltaY * speed);
}
return sprite;
};
Phaser.Plugin.VirtualJoystick.prototype.update = function () {
if (this.isDragging && (!this.pointer.isDown || !this.pointer.withinGame))
{
this.stopDrag();
}
};
Phaser.Plugin.VirtualJoystick.prototype.addButton = function (x,y,key,callback, callbackContext, upFrame, downFrame) {
var button = this.game.add.button(x, y,
key, callback, callbackContext, upFrame,upFrame, downFrame,upFrame);
button.anchor.setTo(0.5, 0.5);
//button.scale.setTo(0.8, 0.8);
//button.fixedToCamera = true; //our buttons should stay on the same place 跟下面这种方式可以2选一
this.game.stage.addChild(button);
//this.buttons.push(button);
return button;
};
Phaser.Plugin.VirtualJoystick.prototype.addButtonByKey = function (key,callback,callbackContext) {
if(key=="buttonA"){
if (this.buttonA){
return this.buttonA;
}
var x=this.game.stage.width-this.game.stage.width/3;
var y=this.game.stage.height-this.game.stage.height/5;
var upFrame='button1-up';
var downFrame='button1-down';
this.buttonA = this.game.add.button(x, y,
"generic", callback, callbackContext, upFrame,upFrame, downFrame,upFrame);
this.buttonA.onInputDown.add(callback, callbackContext);
this.buttonA.anchor.setTo(0.5, 0.5);
this.game.stage.addChild(this.buttonA);
return this.buttonA;
}else if(key=="buttonB"){
if (this.buttonB){
return this.buttonB;
}
var x=this.game.stage.width-this.game.stage.width/4;
var y=this.game.stage.height-this.game.stage.height/3;
var upFrame='button2-up';
var downFrame='button2-down';
this.buttonB = this.game.add.button(x, y,
"generic", callback, callbackContext, upFrame,upFrame, downFrame,upFrame);
this.buttonB.anchor.setTo(0.5, 0.5);
this.game.stage.addChild(this.buttonB);
return this.buttonB;
}else if(key=="buttonC"){
if (this.buttonC){
return this.buttonC;
}
var x=this.game.stage.width-this.game.stage.width/6;
var y=this.game.stage.height-this.game.stage.height/5;
var upFrame='button3-up';
var downFrame='button3-down';
this.buttonC = this.game.add.button(x, y,
"generic", callback, callbackContext, upFrame,upFrame, downFrame,upFrame);
this.buttonC.anchor.setTo(0.5, 0.5);
this.game.stage.addChild(this.buttonC);
return this.buttonC;
}
};
| {
"content_hash": "6de8bf3427c1e7b1fd6ae7838bcefc75",
"timestamp": "",
"source": "github",
"line_count": 326,
"max_line_length": 166,
"avg_line_length": 30.033742331288344,
"alnum_prop": 0.6501889490348279,
"repo_name": "liangdas/mqantserver",
"id": "330cdd0445aac1d23f9c6bb092bdb70764d8a3e5",
"size": "9885",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "static/hitball/js/lib/phaser_plugins/VirtualJoystick/VirtualJoystick.js",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2765"
},
{
"name": "Go",
"bytes": "86975"
},
{
"name": "HTML",
"bytes": "8388"
},
{
"name": "JavaScript",
"bytes": "7480585"
},
{
"name": "PLSQL",
"bytes": "9373"
},
{
"name": "Python",
"bytes": "2481"
}
],
"symlink_target": ""
} |
<?php
namespace Checkdomain\Holiday\Provider;
/**
* Class AbstractTest
*/
abstract class AbstractTest extends \PHPUnit\Framework\TestCase
{
/**
* @var \Checkdomain\Holiday\ProviderInterface
*/
protected $provider;
/**
* @param string $date
* @param string $state
* @param array $expectation
*
* @dataProvider dateProvider
*/
public function testHolidays($date, $state = null, array $expectation = null)
{
$date = new \DateTime($date);
$holiday = $this->provider->getHolidayByDate($date, $state);
if ($expectation === null) {
$this->assertNull($holiday);
} else {
$this->assertNotNull($holiday, 'No Holiday found but assumed to find one on ' . $date->format('Y-m-d'));
$this->assertEquals($date->format('Y-m-d'), $holiday->getDate()->format('Y-m-d'));
foreach ($expectation as $property => $expectedValue) {
$method = 'get' . ucfirst($property);
$value = $holiday->$method();
$this->assertEquals($expectedValue, $value);
}
}
}
}
| {
"content_hash": "dc0a468580a947021ae8b269cdf11509",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 116,
"avg_line_length": 28.097560975609756,
"alnum_prop": 0.5616319444444444,
"repo_name": "checkdomain/Holiday",
"id": "88df612bf69984815ce33b249f391a8f18b6255c",
"size": "1152",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/Provider/AbstractTest.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "84555"
}
],
"symlink_target": ""
} |
var exec = require('cordova/exec');
var STATE_ACTIVE = 'active';
var STATE_IDLE = 'idle';
var STATE_LOCKED = 'locked';
// This keeps track of the current state.
var currentState = STATE_ACTIVE;
// These listeners are called when the device's state changes.
var stateListeners = [ ];
// This interval represents the amount of idle time required for the above state listeners to fire with status "idle".
var detectionIntervalInSeconds = 60;
// This is a timeout used to trigger the idle state.
var idleTimer = null;
// This tracks the last time a touch event occurred.
var lastInputDate = new Date();
exports.queryState = function(_detectionIntervalInSeconds, callback) {
var currentDate = new Date();
msSinceLastInput = currentDate.getTime() - lastInputDate.getTime();
if (msSinceLastInput >= _detectionIntervalInSeconds * 1000) {
callback(STATE_IDLE);
} else {
callback(STATE_ACTIVE);
}
};
exports.setDetectionInterval = function(_detectionIntervalInSeconds) {
detectionIntervalInSeconds = _detectionIntervalInSeconds;
resetIdleTimer();
};
exports.onStateChanged = { };
exports.onStateChanged.addListener = function(listener) {
if (typeof(listener) === 'function') {
stateListeners.push(listener);
} else {
console.log('Attempted to add a non-function listener.');
}
}
// This function fires the state listeners with the given state.
var fireListeners = function(state) {
for (var i = 0; i < stateListeners.length; i++) {
stateListeners[i](state);
}
};
// This function resets the idle timer.
var resetIdleTimer = function() {
clearTimeout(idleTimer);
idleTimer = setTimeout(changeState, detectionIntervalInSeconds * 1000, STATE_IDLE);
};
// This function handles a change in state.
var changeState = function(state) {
// If we have a new state, set it appropriately and fire the state listeners.
if (currentState !== state) {
currentState = state;
fireListeners(state);
}
}
// This function handles a touch event by resetting the idle timer and changing the state.
var handleTouchEvent = function() {
lastInputDate = new Date();
resetIdleTimer();
changeState(STATE_ACTIVE);
}
// Start the idle timer.
resetIdleTimer();
// Add a touch listener.
document.addEventListener('touchstart', handleTouchEvent);
| {
"content_hash": "7882a2c0261b85a9e82a3893d76dcc3e",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 118,
"avg_line_length": 29.810126582278482,
"alnum_prop": 0.7065817409766454,
"repo_name": "0359xiaodong/mobile-chrome-apps",
"id": "401c8b2301e9d05b7ea7ffa1669b6d47adadca84",
"size": "2525",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "chrome-cordova/plugins/chrome.idle/idle.js",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
package thosakwe.fray.pipeline;
import java.io.IOException;
/**
* Created on 2/7/2017.
*/
public class CoreImporterTransformer extends FrayTransformer {
@Override
public boolean claim(FrayAsset asset) {
return asset.getExtension().equals("fray");
}
@Override
public String getName() {
return "<core> Importer";
}
@Override
public FrayAsset transform(FrayAsset asset) throws IOException {
final String src = asset.readAsString();
return asset.changeText("import <core>;\n" + src);
}
}
| {
"content_hash": "8a761b7db73c130e01903e561b06c850",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 68,
"avg_line_length": 23.25,
"alnum_prop": 0.6594982078853047,
"repo_name": "fray-lang/fray",
"id": "d6b38e6581bfd0ad8c7375eb2b0cada88b04bf52",
"size": "558",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/thosakwe/fray/pipeline/CoreImporterTransformer.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ANTLR",
"bytes": "6124"
},
{
"name": "Batchfile",
"bytes": "90"
},
{
"name": "Java",
"bytes": "195129"
},
{
"name": "JavaScript",
"bytes": "6730"
},
{
"name": "Shell",
"bytes": "131"
},
{
"name": "TypeScript",
"bytes": "129"
}
],
"symlink_target": ""
} |
<?php
/**
* ZipCode Plugin
*
* UserApplePie
* @author David (DaVaR) Sargent <[email protected]>
* @version 4.3.0
*/
namespace Libs;
use Libs\Database,
Libs\Cookie,
Libs\BBCode;
class ZipCode
{
private static $db;
// Get City, State based on Zip Code
public static function getCityState($zip){
self::$db = Database::get();
$data = self::$db->select("
SELECT
*
FROM
".PREFIX."cities_extended
WHERE
zip = :zip
",
array(':zip' => $zip));
return $data[0]->city.", ".$data[0]->state_code;
}
}
| {
"content_hash": "2b4e4878ccf976d58d74eac8923761e0",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 56,
"avg_line_length": 15.771428571428572,
"alnum_prop": 0.5996376811594203,
"repo_name": "UserApplePie/UserApplePie-v3",
"id": "8a24bde95ddbe3f89a1b0537df30c56e7d9b38b5",
"size": "552",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/System/Libraries/ZipCode.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "6043"
},
{
"name": "CSS",
"bytes": "68732"
},
{
"name": "HTML",
"bytes": "13"
},
{
"name": "JavaScript",
"bytes": "11166"
},
{
"name": "PHP",
"bytes": "730821"
}
],
"symlink_target": ""
} |
/* $OpenBSD: auth1.c,v 1.79 2013/05/19 02:42:42 djm Exp $ */
/*
* Copyright (c) 1995 Tatu Ylonen <[email protected]>, Espoo, Finland
* All rights reserved
*
* As far as I am concerned, the code I have written for this software
* can be used freely for any purpose. Any derived versions of this
* software must be clearly marked as such, and if the derived work is
* incompatible with the protocol description in the RFC file, it must be
* called by a name other than "ssh" or "Secure Shell".
*/
#include "includes.h"
#include <sys/types.h>
#include <stdarg.h>
#include <stdio.h>
#include <string.h>
#include <unistd.h>
#include <pwd.h>
#include "openbsd-compat/sys-queue.h"
#include "xmalloc.h"
#include "rsa.h"
#include "ssh1.h"
#include "packet.h"
#include "buffer.h"
#include "log.h"
#include "servconf.h"
#include "compat.h"
#include "key.h"
#include "hostfile.h"
#include "auth.h"
#include "channels.h"
#include "session.h"
#include "uidswap.h"
#ifdef GSSAPI
#include "ssh-gss.h"
#endif
#include "monitor_wrap.h"
#include "buffer.h"
/* import */
extern ServerOptions options;
extern Buffer loginmsg;
static int auth1_process_password(Authctxt *);
static int auth1_process_rsa(Authctxt *);
static int auth1_process_rhosts_rsa(Authctxt *);
static int auth1_process_tis_challenge(Authctxt *);
static int auth1_process_tis_response(Authctxt *);
static char *client_user = NULL; /* Used to fill in remote user for PAM */
struct AuthMethod1 {
int type;
char *name;
int *enabled;
int (*method)(Authctxt *);
};
const struct AuthMethod1 auth1_methods[] = {
{
SSH_CMSG_AUTH_PASSWORD, "password",
&options.password_authentication, auth1_process_password
},
{
SSH_CMSG_AUTH_RSA, "rsa",
&options.rsa_authentication, auth1_process_rsa
},
{
SSH_CMSG_AUTH_RHOSTS_RSA, "rhosts-rsa",
&options.rhosts_rsa_authentication, auth1_process_rhosts_rsa
},
{
SSH_CMSG_AUTH_TIS, "challenge-response",
&options.challenge_response_authentication,
auth1_process_tis_challenge
},
{
SSH_CMSG_AUTH_TIS_RESPONSE, "challenge-response",
&options.challenge_response_authentication,
auth1_process_tis_response
},
{ -1, NULL, NULL, NULL}
};
static const struct AuthMethod1
*lookup_authmethod1(int type)
{
int i;
for (i = 0; auth1_methods[i].name != NULL; i++)
if (auth1_methods[i].type == type)
return (&(auth1_methods[i]));
return (NULL);
}
static char *
get_authname(int type)
{
const struct AuthMethod1 *a;
static char buf[64];
if ((a = lookup_authmethod1(type)) != NULL)
return (a->name);
snprintf(buf, sizeof(buf), "bad-auth-msg-%d", type);
return (buf);
}
/*ARGSUSED*/
static int
auth1_process_password(Authctxt *authctxt)
{
int authenticated = 0;
char *password;
u_int dlen;
/*
* Read user password. It is in plain text, but was
* transmitted over the encrypted channel so it is
* not visible to an outside observer.
*/
password = packet_get_string(&dlen);
packet_check_eom();
/* Try authentication with the password. */
authenticated = PRIVSEP(auth_password(authctxt, password));
memset(password, 0, dlen);
free(password);
return (authenticated);
}
/*ARGSUSED*/
static int
auth1_process_rsa(Authctxt *authctxt)
{
int authenticated = 0;
BIGNUM *n;
/* RSA authentication requested. */
if ((n = BN_new()) == NULL)
fatal("do_authloop: BN_new failed");
packet_get_bignum(n);
packet_check_eom();
authenticated = auth_rsa(authctxt, n);
BN_clear_free(n);
return (authenticated);
}
/*ARGSUSED*/
static int
auth1_process_rhosts_rsa(Authctxt *authctxt)
{
int keybits, authenticated = 0;
u_int bits;
Key *client_host_key;
u_int ulen;
/*
* Get client user name. Note that we just have to
* trust the client; root on the client machine can
* claim to be any user.
*/
client_user = packet_get_cstring(&ulen);
/* Get the client host key. */
client_host_key = key_new(KEY_RSA1);
bits = packet_get_int();
packet_get_bignum(client_host_key->rsa->e);
packet_get_bignum(client_host_key->rsa->n);
keybits = BN_num_bits(client_host_key->rsa->n);
if (keybits < 0 || bits != (u_int)keybits) {
verbose("Warning: keysize mismatch for client_host_key: "
"actual %d, announced %d",
BN_num_bits(client_host_key->rsa->n), bits);
}
packet_check_eom();
authenticated = auth_rhosts_rsa(authctxt, client_user,
client_host_key);
key_free(client_host_key);
auth_info(authctxt, "ruser %.100s", client_user);
return (authenticated);
}
/*ARGSUSED*/
static int
auth1_process_tis_challenge(Authctxt *authctxt)
{
char *challenge;
if ((challenge = get_challenge(authctxt)) == NULL)
return (0);
debug("sending challenge '%s'", challenge);
packet_start(SSH_SMSG_AUTH_TIS_CHALLENGE);
packet_put_cstring(challenge);
free(challenge);
packet_send();
packet_write_wait();
return (-1);
}
/*ARGSUSED*/
static int
auth1_process_tis_response(Authctxt *authctxt)
{
int authenticated = 0;
char *response;
u_int dlen;
response = packet_get_string(&dlen);
packet_check_eom();
authenticated = verify_response(authctxt, response);
memset(response, 'r', dlen);
free(response);
return (authenticated);
}
/*
* read packets, try to authenticate the user and
* return only if authentication is successful
*/
static void
do_authloop(Authctxt *authctxt)
{
int authenticated = 0;
int prev = 0, type = 0;
const struct AuthMethod1 *meth;
debug("Attempting authentication for %s%.100s.",
authctxt->valid ? "" : "invalid user ", authctxt->user);
/* If the user has no password, accept authentication immediately. */
if (options.permit_empty_passwd && options.password_authentication &&
#ifdef KRB5
(!options.kerberos_authentication || options.kerberos_or_local_passwd) &&
#endif
PRIVSEP(auth_password(authctxt, ""))) {
#ifdef USE_PAM
if (options.use_pam && (PRIVSEP(do_pam_account())))
#endif
{
auth_log(authctxt, 1, 0, "without authentication",
NULL);
return;
}
}
/* Indicate that authentication is needed. */
packet_start(SSH_SMSG_FAILURE);
packet_send();
packet_write_wait();
for (;;) {
/* default to fail */
authenticated = 0;
/* Get a packet from the client. */
prev = type;
type = packet_read();
/*
* If we started challenge-response authentication but the
* next packet is not a response to our challenge, release
* the resources allocated by get_challenge() (which would
* normally have been released by verify_response() had we
* received such a response)
*/
if (prev == SSH_CMSG_AUTH_TIS &&
type != SSH_CMSG_AUTH_TIS_RESPONSE)
abandon_challenge_response(authctxt);
if (authctxt->failures >= options.max_authtries)
goto skip;
if ((meth = lookup_authmethod1(type)) == NULL) {
logit("Unknown message during authentication: "
"type %d", type);
goto skip;
}
if (!*(meth->enabled)) {
verbose("%s authentication disabled.", meth->name);
goto skip;
}
authenticated = meth->method(authctxt);
if (authenticated == -1)
continue; /* "postponed" */
#ifdef BSD_AUTH
if (authctxt->as) {
auth_close(authctxt->as);
authctxt->as = NULL;
}
#endif
if (!authctxt->valid && authenticated)
fatal("INTERNAL ERROR: authenticated invalid user %s",
authctxt->user);
#ifdef _UNICOS
if (authenticated && cray_access_denied(authctxt->user)) {
authenticated = 0;
fatal("Access denied for user %s.",authctxt->user);
}
#endif /* _UNICOS */
#ifndef HAVE_CYGWIN
/* Special handling for root */
if (authenticated && authctxt->pw->pw_uid == 0 &&
!auth_root_allowed(meth->name)) {
authenticated = 0;
# ifdef SSH_AUDIT_EVENTS
PRIVSEP(audit_event(SSH_LOGIN_ROOT_DENIED));
# endif
}
#endif
#ifdef USE_PAM
if (options.use_pam && authenticated &&
!PRIVSEP(do_pam_account())) {
char *msg;
size_t len;
error("Access denied for user %s by PAM account "
"configuration", authctxt->user);
len = buffer_len(&loginmsg);
buffer_append(&loginmsg, "\0", 1);
msg = buffer_ptr(&loginmsg);
/* strip trailing newlines */
if (len > 0)
while (len > 0 && msg[--len] == '\n')
msg[len] = '\0';
else
msg = "Access denied.";
packet_disconnect("%s", msg);
}
#endif
skip:
/* Log before sending the reply */
auth_log(authctxt, authenticated, 0, get_authname(type), NULL);
free(client_user);
client_user = NULL;
if (authenticated)
return;
if (++authctxt->failures >= options.max_authtries) {
#ifdef SSH_AUDIT_EVENTS
PRIVSEP(audit_event(SSH_LOGIN_EXCEED_MAXTRIES));
#endif
packet_disconnect(AUTH_FAIL_MSG, authctxt->user);
}
packet_start(SSH_SMSG_FAILURE);
packet_send();
packet_write_wait();
}
}
/*
* Performs authentication of an incoming connection. Session key has already
* been exchanged and encryption is enabled.
*/
void
do_authentication(Authctxt *authctxt)
{
u_int ulen;
char *user, *style = NULL;
/* Get the name of the user that we wish to log in as. */
packet_read_expect(SSH_CMSG_USER);
/* Get the user name. */
user = packet_get_cstring(&ulen);
packet_check_eom();
if ((style = strchr(user, ':')) != NULL)
*style++ = '\0';
authctxt->user = user;
authctxt->style = style;
/* Verify that the user is a valid user. */
if ((authctxt->pw = PRIVSEP(getpwnamallow(user))) != NULL)
authctxt->valid = 1;
else {
debug("do_authentication: invalid user %s", user);
authctxt->pw = fakepw();
}
/* Configuration may have changed as a result of Match */
if (options.num_auth_methods != 0)
fatal("AuthenticationMethods is not supported with SSH "
"protocol 1");
setproctitle("%s%s", authctxt->valid ? user : "unknown",
use_privsep ? " [net]" : "");
#ifdef USE_PAM
if (options.use_pam)
PRIVSEP(start_pam(authctxt));
#endif
/*
* If we are not running as root, the user must have the same uid as
* the server.
*/
#ifndef HAVE_CYGWIN
if (!use_privsep && getuid() != 0 && authctxt->pw &&
authctxt->pw->pw_uid != getuid())
packet_disconnect("Cannot change user when server not running as root.");
#endif
/*
* Loop until the user has been authenticated or the connection is
* closed, do_authloop() returns only if authentication is successful
*/
do_authloop(authctxt);
/* The user has been authenticated and accepted. */
packet_start(SSH_SMSG_SUCCESS);
packet_send();
packet_write_wait();
}
| {
"content_hash": "d97c8aab3a0c521eb4517bb252534b99",
"timestamp": "",
"source": "github",
"line_count": 439,
"max_line_length": 78,
"avg_line_length": 23.496583143507973,
"alnum_prop": 0.6716432380029084,
"repo_name": "jhbsz/OSI-OS",
"id": "f1ac598147c5cae70f3e366fe5a25251731ba4d4",
"size": "10315",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "crypto/openssh/auth1.c",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
import { Injectable } from "@angular/core";
import { Observable } from "rxjs/Observable";
import 'rxjs/add/operator/map';
import 'rxjs/add/operator/catch';
import 'rxjs/add/operator/do';
import 'rxjs/add/operator/finally';
import { Http, Response, RequestOptions } from '@angular/http';
import { URLSearchParams } from "@angular/http";
import { Headers } from '@angular/http';
import { Mission, Tracking, MissionResponse, Alarm, SARUser, Expence } from '../models/models';
import { ExceptionService } from './exception.service';
import { SpinnerService } from '../blocks/spinner/spinner';
import { ConfigService } from "./config.service";
@Injectable()
export class SARService {
token: string;
available: string;
id: any;
missions: Observable<Mission[]>;
mission: Mission;
alarm: Alarm;
alarms: Observable<Alarm[]>;
user: SARUser;
tracking: Tracking;
longitude: number;
latitude: number;
baseUrl: string;
constructor(
private http: Http,
public ExceptionService: ExceptionService,
private spinnerService: SpinnerService,
private configService: ConfigService
) {
this.baseUrl = configService.get("sar.status.url");
}
private getApiUrl(uri: string) {
return this.baseUrl + uri;
}
savePushtokenOnUser(token: string, userId: number) {
const url = this.getApiUrl('sarusers/' + userId);
const options = new RequestOptions({ withCredentials: true });
this._configureOptions(options);
const body = {
"deviceToken": token
};
console.log("SARSERVICE savepushtokneonsuser _--");
console.log(url);
console.log(body);
return this.http.patch(url, JSON.stringify(body), options)
.map(res => {
console.log(res.json());
return res.json();
})
}
/**
* Configures options with token and header for http-operations on server.
*/
private _configureOptions(options: RequestOptions) {
let headers = new Headers();
headers.append('Authorization', 'Bearer ' + JSON.parse(localStorage.getItem("currentUser")).access_token);
headers.append('Content-Type', 'application/json');
options.headers = headers;
}
/**
* Filter out ID from JSON-object.
* @param key
* @param value
*/
private _replacer(key, value) {
if (key == "id") return undefined;
else return value;
}
/**
* Returns SARUser-object with active user from localStorage.
* @return Object from JSON-string
*/
getUser() {
return JSON.parse(localStorage.getItem('currentUser'));
}
/**
* Fetches user from database
* @param id of wanted user
* @return SARUser object
*/
getUserFromDAO(id: number) {
let url = this.getApiUrl("/sarusers/" + id);
let options = new RequestOptions({ withCredentials: true });
this._configureOptions(options);
return this.http.get(url, options)
.map((res) => {
this.user = res.json();
return this.user;
})
.catch(this.ExceptionService.catchBadResponse);
}
/**
* Logs user in to the app. Stores currentUser in localStorage.
*/
public login(username: string, password: string) {
let data = new URLSearchParams();
data.append('username', username);
data.append('password', password);
let options = new RequestOptions();
this.spinnerService.show();
let url = this.getApiUrl('/sarusers/login');
return this.http
.post(url, data, options)
.map((response: Response) => {
// login successful if there's a token in the response
let res = response.json();
if (res.user && res.user.access_token) {
// store user details and token in local storage to keep user logged in between page refreshes
localStorage.setItem('currentUser', JSON.stringify(res.user));
} else {
return Observable.throw(new Error("Login error"));
}
})
.catch(this.ExceptionService.catchBadResponse)
.finally(() => this.spinnerService.hide())
}
/**
* Logs the user out of the app. Also unregisters from push notifications.
*/
public logout() {
localStorage.removeItem('currentUser');
}
/**
* Method to persist user availability to the server.
* @param isAvailable new status of user.
*/
public setAvailability(isAvailable: boolean) {
let postBody = {
"isAvailable": isAvailable
};
let url = this.getApiUrl("/sarusers/" + this.getUser().id);
let options = new RequestOptions({ withCredentials: true });
this._configureOptions(options);
this.spinnerService.show();
return this.http.patch(url, JSON.stringify(postBody), options)
.map(res => {
console.log("Set available to " + isAvailable + " in db");
return res.json();
})
.catch(this.ExceptionService.catchBadResponse)
.finally(() => this.spinnerService.hide())
}
/**
* Method to persist SARUser-variable isTrackable to database.
* @param isTrackable boolean value to persist.
*/
public setTrackable(isTrackable: boolean) {
let postBody = {
"isTrackable": isTrackable
};
let url = this.getApiUrl("/sarusers/" + this.getUser().id);
let options = new RequestOptions({ withCredentials: true });
this._configureOptions(options);
return this.http.patch(url, JSON.stringify(postBody), options)
.map(res => {
return res.json();
})
.catch(this.ExceptionService.catchBadResponse)
}
/**
* Returns a spesific Mission from database.
* @param missionId Id of wanted Mission.
* @return Mission-object
*/
public getMission(missionId?: number) {
let options = new RequestOptions({ withCredentials: true });
this._configureOptions(options);
let url = this.getApiUrl('/missions/' + missionId + '?filter[include][alarms]');
this.spinnerService.show();
return this.http
.get(url, options)
.map(response => {
this.mission = response.json();
return this.mission;
})
.catch(this.ExceptionService.catchBadResponse)
.finally(() => this.spinnerService.hide())
}
/**
* Gets a list of all/latests missions
* @param limit - the maximum number of missions the method should fetch.
* @return Observable with array of latest Missions
*/
public getMissions(limit?: number) {
let options = new RequestOptions({ withCredentials: true });
this._configureOptions(options);
let url = this.getApiUrl('/missions');
this.spinnerService.show();
return this.http.get(url, options)
.map((response) => {
this.missions = response.json();
return this.missions;
})
.catch(this.ExceptionService.catchBadResponse)
.finally(() => this.spinnerService.hide())
}
/**
* Method fetches all Missions with an alarm connected to a user id from DAO.
* @param userId SARUser id
*/
public getUserAlarms(userId: number) {
let options = new RequestOptions({ withCredentials: true });
this._configureOptions(options);
let url = this.getApiUrl('/attendants?filter[include][mission]&filter[where][sarUserId]=' + userId);
this.spinnerService.show();
return this.http.get(url, options)
.map(response => { return response.json(); })
.catch(this.ExceptionService.catchBadResponse)
.finally(() => this.spinnerService.hide())
}
public getUserResponses(userId: number, missionId: number) {
console.log("SARservice check if user has answered mission");
let options = new RequestOptions({ withCredentials: true });
this._configureOptions(options);
let url = this.getApiUrl('/missions?filter[include]=missionresponses&' +
'filter[where][sarUserId]='+userId+'&filter[where][missionId]=' + missionId);
return this.http.get(url, options)
.map(response => {
let responses = [];
let res = response.json();
res.forEach((mission) => {
responses = responses.concat(mission.missionresponses);
});
console.log("User has number of sent responses " + responses.length);
console.log("User is " + (responses.slice(-1).pop().response ? 'coming' : ' not coming'));
return responses;
})
.catch(this.ExceptionService.catchBadResponse)
}
/**
* Send a persist MissionResponse to the database.
* @param missionResponse MissionResponse-object with user input.
*/
public postMissionResponse(missionResponse: MissionResponse) {
let options = new RequestOptions({ withCredentials: true });
this._configureOptions(options);
let url = this.getApiUrl('/missionResponses');
let postBody = JSON.stringify(missionResponse, this._replacer);
console.log(postBody);
return this.http.post(url, missionResponse, options)
.map(res => { return res.json(); })
.catch(this.ExceptionService.catchBadResponse)
}
/**
* Fetch alarm from database by id.
* @param alarmId id of wanted alarm
*/
public getAlarm(alarmId: number) {
let options = new RequestOptions({ withCredentials: true });
this._configureOptions(options);
let url = this.getApiUrl('/alarms/' + alarmId);
return this.http.get(url, options)
.map((response) => {
this.alarm = response.json();
return this.alarm;
})
.catch(this.ExceptionService.catchBadResponse)
}
/**
* Fetch all alarms of one mission
* @param missionId
*/
public getAlarms(missionId: number) {
let options = new RequestOptions({ withCredentials: true });
this._configureOptions(options);
let url = this.getApiUrl('/missions/' + missionId + '/alarms');
return this.http.get(url, options)
.map((res) => { return res.json(); })
.catch(this.ExceptionService.catchBadResponse)
}
/**
* Method to persist user expense
* @param amount of the expense
* @param description of the expense
*/
public addExpense(expense: Expence) {
let user = this.getUser();
let url = this.getApiUrl("/Expences");
let options = new RequestOptions({ withCredentials: true });
this._configureOptions(options);
let postBody = {
"title": "Brukerutgift for " + user.name,
"description": expense.description,
"amount": expense.amount,
"mission": expense.missionId,
"person": expense.sARUserId
};
return this.http.post(url, JSON.stringify(postBody), options)
.map(res => {
console.log(res.json());
return res.json()
})
.catch(this.ExceptionService.catchBadResponse)
}
/**
* Method to persist a new Tracking-object to the database.
* @param Tracking object to be persisted
*/
public setTracking(lat: number, lng: number, missionResponseId: number) {
console.log("-----set tracking---------");
let tracking = {
"date": new Date(),
"geopoint": {
"lat": lat,
"lng": lng
},
"missionResponseId": missionResponseId
};
console.log("posting initial " + JSON.stringify(tracking));
let url = this.getApiUrl('/missionresponses/' + missionResponseId + '/tracking');
let options = new RequestOptions({ withCredentials: true });
this._configureOptions(options);
return this.http.post(url, JSON.stringify(tracking), options)
.map(res => { return res.json(); })
.catch(this.ExceptionService.catchBadResponse)
}
/**
* Method to update an excisting Tracking-object
* @param Tracking object to be persisted. Id, geopoint and date required
*/
public updateTracking(latitude: number, longitude: number, id: number, missionResponseId: number) {
let postBody = {
"date": new Date(),
"geopoint": {
"lat": latitude,
"lng": longitude
},
"id": id,
"missionResponseId": missionResponseId
};
console.log("posting updated tracking" + JSON.stringify(postBody));
let url = this.getApiUrl("/Trackings/" + id);
let options = new RequestOptions({ withCredentials: true });
this._configureOptions(options);
return this.http.patch(url, JSON.stringify(postBody), options)
.map((res) => { console.log(res.json()); return res.json(); })
.do(() => console.log("sendte object"))
}
}
| {
"content_hash": "b3c01e2fc7f5f12240e236566adf0bb1",
"timestamp": "",
"source": "github",
"line_count": 419,
"max_line_length": 114,
"avg_line_length": 32.35560859188544,
"alnum_prop": 0.5846426200486834,
"repo_name": "DISCOOS/sar-status",
"id": "89b1a7621a42a929aad9a40374882bd1c929603a",
"size": "13557",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/services/sar.service.ts",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "9122"
},
{
"name": "HTML",
"bytes": "10780"
},
{
"name": "JavaScript",
"bytes": "2158"
},
{
"name": "TypeScript",
"bytes": "45506"
}
],
"symlink_target": ""
} |
package happy
import (
"github.com/kevinconway/wrapgen/v2/internal/test/happy"
)
type DemoType struct{}
type Demo interface {
Make(param happy.ExportedStruct, second DemoType) happy.NonInterfaceAlias
}
| {
"content_hash": "8a196c1f4f0b28d67e28ec0d30e8317b",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 74,
"avg_line_length": 18.818181818181817,
"alnum_prop": 0.7874396135265701,
"repo_name": "kevinconway/wrapgen",
"id": "55cb709ca4452daf722b4d815a086dd5059303a4",
"size": "207",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "internal/test/sub/happy/happy.go",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Go",
"bytes": "39610"
}
],
"symlink_target": ""
} |
package com.gmarciani.gmparser.automaton.base;
import org.junit.Test;
import com.gmarciani.gmparser.models.automaton.state.State;
import com.gmarciani.gmparser.models.automaton.state.States;
import com.gmarciani.gmparser.models.automaton.transition.DeterministicTransitionFunction;
import com.gmarciani.gmparser.models.automaton.transition.TransitionFunction;
import com.gmarciani.gmparser.models.grammar.alphabet.Alphabet;
public class TestDeterministicTransitionFunction {
@Test public void createCompleteFunction() {
System.out.println("#createCompleteFunction");
State<String> stateOne = new State<String>(1, "one");
State<String> stateTwo = new State<String>(2, "two");
State<String> stateThree = new State<String>(3, "three");
States<String> states = new States<String>();
states.add(stateOne);
states.add(stateTwo);
states.add(stateThree);
Alphabet alphabet = new Alphabet('a', 'b', 'c');
TransitionFunction<String> function = new DeterministicTransitionFunction<String>(states, alphabet, states);
for (State<String> state : states) {
for (Character symbol : alphabet) {
function.addTransition(state, state, symbol);
function.addTransition(state, stateThree, symbol);
}
}
System.out.println(function);
System.out.println(function.toExtendedFormattedTransitionFunction());
}
@Test public void createIncompleteFunction() {
System.out.println("#createIncompleteFunction");
State<String> stateOne = new State<String>(1, "one");
State<String> stateTwo = new State<String>(2, "two");
State<String> stateThree = new State<String>(3, "three");
States<String> states = new States<String>();
states.add(stateOne);
states.add(stateTwo);
states.add(stateThree);
Alphabet alphabet = new Alphabet('a', 'b', 'c');
TransitionFunction<String> function = new DeterministicTransitionFunction<String>(states, alphabet, states);
function.addTransition(stateOne, stateTwo, 'a');
function.addTransition(stateTwo, stateThree, 'b');
function.addTransition(stateThree, stateThree, 'c');
System.out.println(function);
System.out.println(function.toExtendedFormattedTransitionFunction());
}
@Test public void createCompleteAndRemoveAllStateSymbol() {
System.out.println("#createCompleteAndRemoveAllStateSymbol");
State<String> stateOne = new State<String>(1, "one");
State<String> stateTwo = new State<String>(2, "two");
State<String> stateThree = new State<String>(3, "three");
States<String> states = new States<String>();
states.add(stateOne);
states.add(stateTwo);
states.add(stateThree);
Alphabet alphabet = new Alphabet('a', 'b', 'c');
TransitionFunction<String> function = new DeterministicTransitionFunction<String>(states, alphabet, states);
for (State<String> state : states) {
for (Character symbol : alphabet) {
function.addTransition(state, state, symbol);
function.addTransition(state, stateThree, symbol);
}
}
function.removeAllTransitionsFromStateBySymbol(stateTwo, 'b');
System.out.println(function);
System.out.println(function.toExtendedFormattedTransitionFunction());
}
@Test public void createCompleteAndRemoveAllState() {
System.out.println("#createCompleteAndRemoveAllState");
State<String> stateOne = new State<String>(1, "one");
State<String> stateTwo = new State<String>(2, "two");
State<String> stateThree = new State<String>(3, "three");
States<String> states = new States<String>();
states.add(stateOne);
states.add(stateTwo);
states.add(stateThree);
Alphabet alphabet = new Alphabet('a', 'b', 'c');
TransitionFunction<String> function = new DeterministicTransitionFunction<String>(states, alphabet, states);
for (State<String> state : states) {
for (Character symbol : alphabet) {
function.addTransition(state, state, symbol);
function.addTransition(state, stateThree, symbol);
}
}
function.removeAllTransitionsFromState(stateTwo);
System.out.println(function);
System.out.println(function.toExtendedFormattedTransitionFunction());
}
@Test public void createCompleteAndRemoveAllSymbol() {
System.out.println("#createCompleteAndRemoveAllSymbol");
State<String> stateOne = new State<String>(1, "one");
State<String> stateTwo = new State<String>(2, "two");
State<String> stateThree = new State<String>(3, "three");
States<String> states = new States<String>();
states.add(stateOne);
states.add(stateTwo);
states.add(stateThree);
Alphabet alphabet = new Alphabet('a', 'b', 'c');
TransitionFunction<String> function = new DeterministicTransitionFunction<String>(states, alphabet, states);
for (State<String> state : states) {
for (Character symbol : alphabet) {
function.addTransition(state, state, symbol);
function.addTransition(state, stateThree, symbol);
}
}
function.removeAllTransitionsBySymbol('b');
System.out.println(function);
System.out.println(function.toExtendedFormattedTransitionFunction());
}
@Test public void createEmptyFunction() {
System.out.println("#createEmptyFunction");
State<String> stateOne = new State<String>(1, "one");
State<String> stateTwo = new State<String>(2, "two");
State<String> stateThree = new State<String>(3, "three");
States<String> states = new States<String>();
states.add(stateOne);
states.add(stateTwo);
states.add(stateThree);
Alphabet alphabet = new Alphabet('a', 'b', 'c');
TransitionFunction<String> function = new DeterministicTransitionFunction<String>(states, alphabet, states);
System.out.println(function);
System.out.println(function.toExtendedFormattedTransitionFunction());
}
@Test public void createEmptyStates() {
System.out.println("#createEmptyStates");
States<String> states = new States<String>();
Alphabet alphabet = new Alphabet('a', 'b', 'c');
TransitionFunction<String> function = new DeterministicTransitionFunction<String>(states, alphabet, states);
System.out.println(function);
System.out.println(function.toExtendedFormattedTransitionFunction());
}
@Test public void createEmptyAlphabet() {
System.out.println("#createEmptyAlphabet");
State<String> stateOne = new State<String>(1, "one");
State<String> stateTwo = new State<String>(2, "two");
State<String> stateThree = new State<String>(3, "three");
States<String> states = new States<String>();
states.add(stateOne);
states.add(stateTwo);
states.add(stateThree);
Alphabet alphabet = new Alphabet();
TransitionFunction<String> function = new DeterministicTransitionFunction<String>(states, alphabet, states);
System.out.println(function);
System.out.println(function.toExtendedFormattedTransitionFunction());
}
@Test public void createEmptyStatesAlphabet() {
System.out.println("#createEmptyStatesAlphabet");
States<String> states = new States<String>();
Alphabet alphabet = new Alphabet();
TransitionFunction<String> function = new DeterministicTransitionFunction<String>(states, alphabet, states);
System.out.println(function);
System.out.println(function.toExtendedFormattedTransitionFunction());
}
}
| {
"content_hash": "ab4ad1d106f22a04f15f5b16dd948b4f",
"timestamp": "",
"source": "github",
"line_count": 196,
"max_line_length": 110,
"avg_line_length": 36.56122448979592,
"alnum_prop": 0.7313703600334915,
"repo_name": "gmarciani/gmparser",
"id": "dab5fea6ad8500382fdcc1c7b226c127c6c5cd5b",
"size": "8313",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/com/gmarciani/gmparser/automaton/base/TestDeterministicTransitionFunction.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "427020"
}
],
"symlink_target": ""
} |
using System;
namespace _01_SumArreyElements
{
class Program
{
static void Main(string[] args)
{
int n = int.Parse(Console.ReadLine());
int[] numbers = new int[n];
int sum = 0;
for (int i = 0; i < numbers.Length; i++)
{
int currentNum = int.Parse(Console.ReadLine());
sum += currentNum;
}
Console.WriteLine(sum);
}
}
}
| {
"content_hash": "f49229ab76106a6c9fa22b7343aa761b",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 63,
"avg_line_length": 20.695652173913043,
"alnum_prop": 0.453781512605042,
"repo_name": "IPetrov007/Softuni-Programing-Fundamentals",
"id": "f39a038760bbafdb3331886a9b0162bf840b4e7e",
"size": "478",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Arreys/01_SumArreyElements/01_SumArreyElements/Program.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "528607"
},
{
"name": "Smalltalk",
"bytes": "4"
}
],
"symlink_target": ""
} |
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <X11/Xatom.h>
#include <X11/cursorfont.h>
#include <compiz-core.h>
static CompMetadata resizeMetadata;
#define ResizeUpMask (1L << 0)
#define ResizeDownMask (1L << 1)
#define ResizeLeftMask (1L << 2)
#define ResizeRightMask (1L << 3)
#define RESIZE_MODE_NORMAL 0
#define RESIZE_MODE_OUTLINE 1
#define RESIZE_MODE_RECTANGLE 2
#define RESIZE_MODE_STRETCH 3
#define RESIZE_MODE_LAST RESIZE_MODE_STRETCH
struct _ResizeKeys
{
char *name;
int dx;
int dy;
unsigned int warpMask;
unsigned int resizeMask;
} rKeys[] = {
{ "Left", -1, 0, ResizeLeftMask | ResizeRightMask, ResizeLeftMask },
{ "Right", 1, 0, ResizeLeftMask | ResizeRightMask, ResizeRightMask },
{ "Up", 0, -1, ResizeUpMask | ResizeDownMask, ResizeUpMask },
{ "Down", 0, 1, ResizeUpMask | ResizeDownMask, ResizeDownMask }
};
#define NUM_KEYS (sizeof (rKeys) / sizeof (rKeys[0]))
#define MIN_KEY_WIDTH_INC 24
#define MIN_KEY_HEIGHT_INC 24
#define RESIZE_DISPLAY_OPTION_INITIATE_NORMAL_KEY 0
#define RESIZE_DISPLAY_OPTION_INITIATE_OUTLINE_KEY 1
#define RESIZE_DISPLAY_OPTION_INITIATE_RECTANGLE_KEY 2
#define RESIZE_DISPLAY_OPTION_INITIATE_STRETCH_KEY 3
#define RESIZE_DISPLAY_OPTION_INITIATE_BUTTON 4
#define RESIZE_DISPLAY_OPTION_INITIATE_KEY 5
#define RESIZE_DISPLAY_OPTION_MODE 6
#define RESIZE_DISPLAY_OPTION_BORDER_COLOR 7
#define RESIZE_DISPLAY_OPTION_FILL_COLOR 8
#define RESIZE_DISPLAY_OPTION_NORMAL_MATCH 9
#define RESIZE_DISPLAY_OPTION_OUTLINE_MATCH 10
#define RESIZE_DISPLAY_OPTION_RECTANGLE_MATCH 11
#define RESIZE_DISPLAY_OPTION_STRETCH_MATCH 12
#define RESIZE_DISPLAY_OPTION_NUM 13
static int displayPrivateIndex;
typedef struct _ResizeDisplay
{
CompOption opt[RESIZE_DISPLAY_OPTION_NUM];
int screenPrivateIndex;
HandleEventProc handleEvent;
Atom resizeNotifyAtom;
Atom resizeInformationAtom;
CompWindow *w;
int mode;
XRectangle savedGeometry;
XRectangle geometry;
int releaseButton;
unsigned int mask;
int pointerDx;
int pointerDy;
KeyCode key[NUM_KEYS];
Region constraintRegion;
int inRegionStatus;
int lastGoodHotSpotY;
int lastGoodWidth;
int lastGoodHeight;
} ResizeDisplay;
typedef struct _ResizeScreen
{
int grabIndex;
WindowResizeNotifyProc windowResizeNotify;
PaintOutputProc paintOutput;
PaintWindowProc paintWindow;
DamageWindowRectProc damageWindowRect;
Cursor leftCursor;
Cursor rightCursor;
Cursor upCursor;
Cursor upLeftCursor;
Cursor upRightCursor;
Cursor downCursor;
Cursor downLeftCursor;
Cursor downRightCursor;
Cursor middleCursor;
Cursor cursor[NUM_KEYS];
} ResizeScreen;
#define GET_RESIZE_DISPLAY(d) \
((ResizeDisplay *)(d)->base.privates[displayPrivateIndex].ptr)
#define RESIZE_DISPLAY(d) \
ResizeDisplay * rd = GET_RESIZE_DISPLAY(d)
#define GET_RESIZE_SCREEN(s, rd) \
((ResizeScreen *)(s)->base.privates[(rd)->screenPrivateIndex].ptr)
#define RESIZE_SCREEN(s) \
ResizeScreen * rs = GET_RESIZE_SCREEN(s, GET_RESIZE_DISPLAY(s->display))
#define NUM_OPTIONS(d) (sizeof ((d)->opt) / sizeof (CompOption))
static void
resizeGetPaintRectangle(CompDisplay *d,
BoxPtr pBox)
{
RESIZE_DISPLAY(d);
pBox->x1 = rd->geometry.x - rd->w->input.left;
pBox->y1 = rd->geometry.y - rd->w->input.top;
pBox->x2 = rd->geometry.x +
rd->geometry.width + rd->w->serverBorderWidth * 2 +
rd->w->input.right;
if (rd->w->shaded)
{
pBox->y2 = rd->geometry.y + rd->w->height + rd->w->input.bottom;
}
else
{
pBox->y2 = rd->geometry.y +
rd->geometry.height + rd->w->serverBorderWidth * 2 +
rd->w->input.bottom;
}
}
static void
resizeGetStretchScale(CompWindow *w,
BoxPtr pBox,
float *xScale,
float *yScale)
{
int width, height;
width = w->width + w->input.left + w->input.right;
height = w->height + w->input.top + w->input.bottom;
*xScale = (width) ? (pBox->x2 - pBox->x1) / (float)width : 1.0f;
*yScale = (height) ? (pBox->y2 - pBox->y1) / (float)height : 1.0f;
}
static void
resizeGetStretchRectangle(CompDisplay *d,
BoxPtr pBox)
{
BoxRec box;
float xScale, yScale;
RESIZE_DISPLAY(d);
resizeGetPaintRectangle(d, &box);
resizeGetStretchScale(rd->w, &box, &xScale, &yScale);
pBox->x1 = box.x1 - (rd->w->output.left - rd->w->input.left) * xScale;
pBox->y1 = box.y1 - (rd->w->output.top - rd->w->input.top) * yScale;
pBox->x2 = box.x2 + rd->w->output.right * xScale;
pBox->y2 = box.y2 + rd->w->output.bottom * yScale;
}
static void
resizeDamageRectangle(CompScreen *s,
BoxPtr pBox)
{
REGION reg;
reg.rects = ®.extents;
reg.numRects = 1;
reg.extents = *pBox;
reg.extents.x1 -= 1;
reg.extents.y1 -= 1;
reg.extents.x2 += 1;
reg.extents.y2 += 1;
damageScreenRegion(s, ®);
}
static Cursor
resizeCursorFromResizeMask(CompScreen *s,
unsigned int mask)
{
Cursor cursor;
RESIZE_SCREEN(s);
if (mask & ResizeLeftMask)
{
if (mask & ResizeDownMask)
cursor = rs->downLeftCursor;
else if (mask & ResizeUpMask)
cursor = rs->upLeftCursor;
else
cursor = rs->leftCursor;
}
else if (mask & ResizeRightMask)
{
if (mask & ResizeDownMask)
cursor = rs->downRightCursor;
else if (mask & ResizeUpMask)
cursor = rs->upRightCursor;
else
cursor = rs->rightCursor;
}
else if (mask & ResizeUpMask)
{
cursor = rs->upCursor;
}
else
{
cursor = rs->downCursor;
}
return cursor;
}
static void
resizeSendResizeNotify(CompDisplay *d)
{
XEvent xev;
RESIZE_DISPLAY(d);
xev.xclient.type = ClientMessage;
xev.xclient.display = d->display;
xev.xclient.format = 32;
xev.xclient.message_type = rd->resizeNotifyAtom;
xev.xclient.window = rd->w->id;
xev.xclient.data.l[0] = rd->geometry.x;
xev.xclient.data.l[1] = rd->geometry.y;
xev.xclient.data.l[2] = rd->geometry.width;
xev.xclient.data.l[3] = rd->geometry.height;
xev.xclient.data.l[4] = 0;
XSendEvent(d->display,
rd->w->screen->root,
FALSE,
SubstructureRedirectMask | SubstructureNotifyMask,
&xev);
}
static void
resizeUpdateWindowProperty(CompDisplay *d)
{
unsigned long data[4];
RESIZE_DISPLAY(d);
data[0] = rd->geometry.x;
data[1] = rd->geometry.y;
data[2] = rd->geometry.width;
data[3] = rd->geometry.height;
XChangeProperty(d->display, rd->w->id,
rd->resizeInformationAtom,
XA_CARDINAL, 32, PropModeReplace,
(unsigned char *)data, 4);
}
static void
resizeFinishResizing(CompDisplay *d)
{
RESIZE_DISPLAY(d);
(*rd->w->screen->windowUngrabNotify)(rd->w);
XDeleteProperty(d->display,
rd->w->id,
rd->resizeInformationAtom);
rd->w = NULL;
}
static Region
resizeGetConstraintRegion(CompScreen *s)
{
Region region;
int i;
region = XCreateRegion();
if (!region)
return NULL;
for (i = 0; i < s->nOutputDev; i++)
XUnionRectWithRegion(&s->outputDev[i].workArea, region, region);
return region;
}
static Bool
resizeInitiate(CompDisplay *d,
CompAction *action,
CompActionState state,
CompOption *option,
int nOption)
{
CompWindow *w;
Window xid;
RESIZE_DISPLAY(d);
xid = getIntOptionNamed(option, nOption, "window", 0);
w = findWindowAtDisplay(d, xid);
if (w && (w->actions & CompWindowActionResizeMask))
{
unsigned int mask;
int x, y;
int button;
int i;
RESIZE_SCREEN(w->screen);
x = getIntOptionNamed(option, nOption, "x", pointerX);
y = getIntOptionNamed(option, nOption, "y", pointerY);
button = getIntOptionNamed(option, nOption, "button", -1);
mask = getIntOptionNamed(option, nOption, "direction", 0);
/* Initiate the resize in the direction suggested by the
* sector of the window the mouse is in, eg drag in top left
* will resize up and to the left. Keyboard resize starts out
* with the cursor in the middle of the window and then starts
* resizing the edge corresponding to the next key press. */
if (state & CompActionStateInitKey)
{
mask = 0;
}
else if (!mask)
{
int sectorSizeX = w->serverWidth / 3;
int sectorSizeY = w->serverHeight / 3;
int posX = x - w->serverX;
int posY = y - w->serverY;
if (posX < sectorSizeX)
mask |= ResizeLeftMask;
else if (posX > (2 * sectorSizeX))
mask |= ResizeRightMask;
if (posY < sectorSizeY)
mask |= ResizeUpMask;
else if (posY > (2 * sectorSizeY))
mask |= ResizeDownMask;
/* if the pointer was in the middle of the window,
just prevent input to the window */
if (!mask)
return TRUE;
}
if (otherScreenGrabExist(w->screen, "resize", NULL))
return FALSE;
if (rd->w)
return FALSE;
if (w->type & (CompWindowTypeDesktopMask |
CompWindowTypeDockMask |
CompWindowTypeFullscreenMask))
return FALSE;
if (w->attrib.override_redirect)
return FALSE;
if (state & CompActionStateInitButton)
action->state |= CompActionStateTermButton;
if (w->shaded)
mask &= ~(ResizeUpMask | ResizeDownMask);
rd->w = w;
rd->mask = mask;
rd->savedGeometry.x = w->serverX;
rd->savedGeometry.y = w->serverY;
rd->savedGeometry.width = w->serverWidth;
rd->savedGeometry.height = w->serverHeight;
rd->geometry = rd->savedGeometry;
rd->pointerDx = x - pointerX;
rd->pointerDy = y - pointerY;
if ((w->state & MAXIMIZE_STATE) == MAXIMIZE_STATE)
{
/* if the window is fully maximized, showing the outline or
rectangle would be visually distracting as the window can't
be resized anyway; so we better don't use them in this case */
rd->mode = RESIZE_MODE_NORMAL;
}
else
{
rd->mode = rd->opt[RESIZE_DISPLAY_OPTION_MODE].value.i;
for (i = 0; i <= RESIZE_MODE_LAST; i++)
{
if (action == &rd->opt[i].value.action)
{
rd->mode = i;
break;
}
}
if (i > RESIZE_MODE_LAST)
{
int index;
for (i = 0; i <= RESIZE_MODE_LAST; i++)
{
index = RESIZE_DISPLAY_OPTION_NORMAL_MATCH + i;
if (matchEval(&rd->opt[index].value.match, w))
{
rd->mode = i;
break;
}
}
}
}
if (!rs->grabIndex)
{
Cursor cursor;
if (state & CompActionStateInitKey)
{
cursor = rs->middleCursor;
}
else
{
cursor = resizeCursorFromResizeMask(w->screen, mask);
}
rs->grabIndex = pushScreenGrab(w->screen, cursor, "resize");
}
if (rs->grabIndex)
{
unsigned int grabMask = CompWindowGrabResizeMask |
CompWindowGrabButtonMask;
Bool sourceExternalApp = getBoolOptionNamed(option, nOption,
"external", FALSE);
if (sourceExternalApp)
grabMask |= CompWindowGrabExternalAppMask;
BoxRec box;
rd->releaseButton = button;
(w->screen->windowGrabNotify)(w, x, y, state, grabMask);
if (d->opt[COMP_DISPLAY_OPTION_RAISE_ON_CLICK].value.b)
updateWindowAttributes(w,
CompStackingUpdateModeAboveFullscreen);
/* using the paint rectangle is enough here
as we don't have any stretch yet */
resizeGetPaintRectangle(d, &box);
resizeDamageRectangle(w->screen, &box);
if (state & CompActionStateInitKey)
{
int xRoot, yRoot;
xRoot = w->serverX + (w->serverWidth / 2);
yRoot = w->serverY + (w->serverHeight / 2);
warpPointer(w->screen, xRoot - pointerX, yRoot - pointerY);
}
if (rd->constraintRegion)
XDestroyRegion(rd->constraintRegion);
if (sourceExternalApp)
{
/* Prevent resizing beyond work area edges when resize is
initiated externally (e.g. with window frame or menu)
and not with a key (e.g. alt+button) */
rd->inRegionStatus = RectangleOut;
rd->lastGoodHotSpotY = -1;
rd->lastGoodWidth = w->serverWidth;
rd->lastGoodHeight = w->serverHeight;
rd->constraintRegion = resizeGetConstraintRegion(w->screen);
}
else
{
rd->constraintRegion = NULL;
}
}
}
return FALSE;
}
static Bool
resizeTerminate(CompDisplay *d,
CompAction *action,
CompActionState state,
CompOption *option,
int nOption)
{
RESIZE_DISPLAY(d);
if (rd->w)
{
CompWindow *w = rd->w;
XWindowChanges xwc;
unsigned int mask = 0;
RESIZE_SCREEN(w->screen);
if (rd->mode == RESIZE_MODE_NORMAL)
{
if (state & CompActionStateCancel)
{
xwc.x = rd->savedGeometry.x;
xwc.y = rd->savedGeometry.y;
xwc.width = rd->savedGeometry.width;
xwc.height = rd->savedGeometry.height;
mask = CWX | CWY | CWWidth | CWHeight;
}
}
else
{
XRectangle geometry;
if (state & CompActionStateCancel)
geometry = rd->savedGeometry;
else
geometry = rd->geometry;
if (memcmp(&geometry, &rd->savedGeometry, sizeof (geometry)) == 0)
{
BoxRec box;
if (rd->mode == RESIZE_MODE_STRETCH)
resizeGetStretchRectangle(d, &box);
else
resizeGetPaintRectangle(d, &box);
resizeDamageRectangle(w->screen, &box);
}
else
{
xwc.x = geometry.x;
xwc.y = geometry.y;
xwc.width = geometry.width;
xwc.height = geometry.height;
mask = CWX | CWY | CWWidth | CWHeight;
}
}
if ((mask & CWWidth) && xwc.width == w->serverWidth)
mask &= ~CWWidth;
if ((mask & CWHeight) && xwc.height == w->serverHeight)
mask &= ~CWHeight;
if (mask)
{
if (mask & (CWWidth | CWHeight))
sendSyncRequest(w);
configureXWindow(w, mask, &xwc);
}
if (!(mask & (CWWidth | CWHeight)))
resizeFinishResizing(d);
if (rs->grabIndex)
{
removeScreenGrab(w->screen, rs->grabIndex, NULL);
rs->grabIndex = 0;
}
rd->releaseButton = 0;
}
action->state &= ~(CompActionStateTermKey | CompActionStateTermButton);
return FALSE;
}
static void
resizeUpdateWindowSize(CompDisplay *d)
{
RESIZE_DISPLAY(d);
if (rd->w->syncWait)
return;
if (rd->w->serverWidth != rd->geometry.width ||
rd->w->serverHeight != rd->geometry.height)
{
XWindowChanges xwc;
xwc.x = rd->geometry.x;
xwc.y = rd->geometry.y;
xwc.width = rd->geometry.width;
xwc.height = rd->geometry.height;
sendSyncRequest(rd->w);
configureXWindow(rd->w,
CWX | CWY | CWWidth | CWHeight,
&xwc);
}
}
static void
resizeHandleKeyEvent(CompScreen *s,
KeyCode keycode)
{
RESIZE_SCREEN(s);
RESIZE_DISPLAY(s->display);
if (rs->grabIndex && rd->w)
{
CompWindow *w = rd->w;
int widthInc, heightInc, i;
widthInc = w->sizeHints.width_inc;
heightInc = w->sizeHints.height_inc;
if (widthInc < MIN_KEY_WIDTH_INC)
widthInc = MIN_KEY_WIDTH_INC;
if (heightInc < MIN_KEY_HEIGHT_INC)
heightInc = MIN_KEY_HEIGHT_INC;
for (i = 0; i < NUM_KEYS; i++)
{
if (keycode != rd->key[i])
continue;
if (rd->mask & rKeys[i].warpMask)
{
XWarpPointer(s->display->display, None, None, 0, 0, 0, 0,
rKeys[i].dx * widthInc,
rKeys[i].dy * heightInc);
}
else
{
int x, y, left, top, width, height;
left = w->serverX - w->input.left;
top = w->serverY - w->input.top;
width = w->input.left + w->serverWidth + w->input.right;
height = w->input.top + w->serverHeight + w->input.bottom;
x = left + width * (rKeys[i].dx + 1) / 2;
y = top + height * (rKeys[i].dy + 1) / 2;
warpPointer(s, x - pointerX, y - pointerY);
rd->mask = rKeys[i].resizeMask;
updateScreenGrab(s, rs->grabIndex, rs->cursor[i]);
}
break;
}
}
}
static void
resizeHandleMotionEvent(CompScreen *s,
int xRoot,
int yRoot)
{
RESIZE_SCREEN(s);
if (rs->grabIndex)
{
BoxRec box;
int w, h; /* size of window contents */
int wX, wY, wWidth, wHeight; /* rect. for window contents+borders */
int i;
int workAreaSnapDistance = 15;
RESIZE_DISPLAY(s->display);
w = rd->savedGeometry.width;
h = rd->savedGeometry.height;
if (!rd->mask)
{
CompWindow *w = rd->w;
int xDist, yDist;
int minPointerOffsetX, minPointerOffsetY;
xDist = xRoot - (w->serverX + (w->serverWidth / 2));
yDist = yRoot - (w->serverY + (w->serverHeight / 2));
/* decision threshold is 10% of window size */
minPointerOffsetX = MIN(20, w->serverWidth / 10);
minPointerOffsetY = MIN(20, w->serverHeight / 10);
/* if we reached the threshold in one direction,
make the threshold in the other direction smaller
so there is a chance that this threshold also can
be reached (by diagonal movement) */
if (abs(xDist) > minPointerOffsetX)
minPointerOffsetY /= 2;
else if (abs(yDist) > minPointerOffsetY)
minPointerOffsetX /= 2;
if (abs(xDist) > minPointerOffsetX)
{
if (xDist > 0)
rd->mask |= ResizeRightMask;
else
rd->mask |= ResizeLeftMask;
}
if (abs(yDist) > minPointerOffsetY)
{
if (yDist > 0)
rd->mask |= ResizeDownMask;
else
rd->mask |= ResizeUpMask;
}
/* if the pointer movement was enough to determine a
direction, warp the pointer to the appropriate edge
and set the right cursor */
if (rd->mask)
{
Cursor cursor;
CompScreen *s = rd->w->screen;
CompAction *action;
int pointerAdjustX = 0;
int pointerAdjustY = 0;
int option = RESIZE_DISPLAY_OPTION_INITIATE_KEY;
RESIZE_SCREEN(s);
action = &rd->opt[option].value.action;
action->state |= CompActionStateTermButton;
if (rd->mask & ResizeRightMask)
pointerAdjustX = w->serverX + w->serverWidth +
w->input.right - xRoot;
else if (rd->mask & ResizeLeftMask)
pointerAdjustX = w->serverX - w->input.left - xRoot;
if (rd->mask & ResizeDownMask)
pointerAdjustY = w->serverY + w->serverHeight +
w->input.bottom - yRoot;
else if (rd->mask & ResizeUpMask)
pointerAdjustY = w->serverY - w->input.top - yRoot;
warpPointer(s, pointerAdjustX, pointerAdjustY);
cursor = resizeCursorFromResizeMask(s, rd->mask);
updateScreenGrab(s, rs->grabIndex, cursor);
}
}
else
{
/* only accumulate pointer movement if a mask is
already set as we don't have a use for the
difference information otherwise */
rd->pointerDx += xRoot - lastPointerX;
rd->pointerDy += yRoot - lastPointerY;
}
if (rd->mask & ResizeLeftMask)
w -= rd->pointerDx;
else if (rd->mask & ResizeRightMask)
w += rd->pointerDx;
if (rd->mask & ResizeUpMask)
h -= rd->pointerDy;
else if (rd->mask & ResizeDownMask)
h += rd->pointerDy;
if (rd->w->state & CompWindowStateMaximizedVertMask)
h = rd->w->serverHeight;
if (rd->w->state & CompWindowStateMaximizedHorzMask)
w = rd->w->serverWidth;
constrainNewWindowSize(rd->w, w, h, &w, &h);
/* compute rect. for window + borders */
wWidth = w + rd->w->input.left + rd->w->input.right;
wHeight = h + rd->w->input.top + rd->w->input.bottom;
if (rd->mask & ResizeLeftMask)
wX = rd->savedGeometry.x + rd->savedGeometry.width -
(w + rd->w->input.left);
else
wX = rd->savedGeometry.x - rd->w->input.left;
if (rd->mask & ResizeUpMask)
wY = rd->savedGeometry.y + rd->savedGeometry.height -
(h + rd->w->input.top);
else
wY = rd->savedGeometry.y - rd->w->input.top;
/* Check if resized edge(s) are near a work-area boundary */
for (i = 0; i < s->nOutputDev; i++)
{
const XRectangle *workArea = &s->outputDev[i].workArea;
/* if window and work-area intersect in x axis */
if (wX + wWidth > workArea->x &&
wX < workArea->x + workArea->width)
{
if (rd->mask & ResizeLeftMask)
{
int dw = workArea->x - wX;
if (0 < dw && dw < workAreaSnapDistance)
{
w -= dw;
wWidth -= dw;
wX += dw;
}
}
else if (rd->mask & ResizeRightMask)
{
int dw = wX + wWidth - (workArea->x + workArea->width);
if (0 < dw && dw < workAreaSnapDistance)
{
w -= dw;
wWidth -= dw;
}
}
}
/* if window and work-area intersect in y axis */
if (wY + wHeight > workArea->y &&
wY < workArea->y + workArea->height)
{
if (rd->mask & ResizeUpMask)
{
int dh = workArea->y - wY;
if (0 < dh && dh < workAreaSnapDistance)
{
h -= dh;
wHeight -= dh;
wY += dh;
}
}
else if (rd->mask & ResizeDownMask)
{
int dh = wY + wHeight - (workArea->y + workArea->height);
if (0 < dh && dh < workAreaSnapDistance)
{
h -= dh;
wHeight -= dh;
}
}
}
}
if (rd->constraintRegion)
{
int minWidth = 50;
int minHeight = 50;
/* rect. for a minimal height window + borders
(used for the constraining in X axis) */
int minimalInputHeight = minHeight +
rd->w->input.top + rd->w->input.bottom;
/* small hot-spot square (on window's corner or edge) that is to be
constrained to the combined output work-area region */
int x, y;
int width = rd->w->input.top; /* square size = title bar height */
int height = width;
int status;
/* compute x & y for constrained hot-spot rect */
if (rd->mask & ResizeLeftMask)
x = wX;
else if (rd->mask & ResizeRightMask)
x = wX + wWidth - width;
else
x = MIN(MAX(xRoot, wX), wX + wWidth - width);
if (rd->mask & ResizeUpMask)
y = wY;
else if (rd->mask & ResizeDownMask)
y = wY + wHeight - height;
else
y = MIN(MAX(yRoot, wY), wY + wHeight - height);
status = XRectInRegion(rd->constraintRegion,
x, y, width, height);
/* only constrain movement if previous position was valid */
if (rd->inRegionStatus == RectangleIn)
{
int xStatus, yForXResize;
int nx = x;
int nw = w;
int nh = h;
if (rd->mask & (ResizeLeftMask | ResizeRightMask))
{
xStatus = status;
if (rd->mask & ResizeUpMask)
yForXResize = wY + wHeight - minimalInputHeight;
else if (rd->mask & ResizeDownMask)
yForXResize = wY + minimalInputHeight - height;
else
yForXResize = y;
if (XRectInRegion(rd->constraintRegion,
x, yForXResize,
width, height) != RectangleIn)
{
if (rd->lastGoodHotSpotY >= 0)
yForXResize = rd->lastGoodHotSpotY;
else
yForXResize = y;
}
}
if (rd->mask & ResizeLeftMask)
{
while ((nw > minWidth) && xStatus != RectangleIn)
{
xStatus = XRectInRegion(rd->constraintRegion,
nx, yForXResize, width, height);
if (xStatus != RectangleIn)
{
nw--;
nx++;
}
}
if (nw > minWidth)
{
x = nx;
w = nw;
}
}
else if (rd->mask & ResizeRightMask)
{
while ((nw > minWidth) && xStatus != RectangleIn)
{
xStatus = XRectInRegion(rd->constraintRegion,
nx, yForXResize,
width, height);
if (xStatus != RectangleIn)
{
nw--;
nx--;
}
}
if (nw > minWidth)
{
x = nx;
w = nw;
}
}
if (rd->mask & ResizeUpMask)
{
while ((nh > minHeight) && status != RectangleIn)
{
status = XRectInRegion(rd->constraintRegion,
x, y, width, height);
if (status != RectangleIn)
{
nh--;
y++;
}
}
if (nh > minHeight)
h = nh;
}
else if (rd->mask & ResizeDownMask)
{
while ((nh > minHeight) && status != RectangleIn)
{
status = XRectInRegion(rd->constraintRegion,
x, y, width, height);
if (status != RectangleIn)
{
nh--;
y--;
}
}
if (nh > minHeight)
h = nh;
}
if (((rd->mask & (ResizeLeftMask | ResizeRightMask)) &&
xStatus == RectangleIn) ||
((rd->mask & (ResizeUpMask | ResizeDownMask)) &&
status == RectangleIn))
{
/* hot-spot inside work-area region, store good values */
rd->lastGoodHotSpotY = y;
rd->lastGoodWidth = w;
rd->lastGoodHeight = h;
}
else
{
/* failed to find a good hot-spot position, restore size */
w = rd->lastGoodWidth;
h = rd->lastGoodHeight;
}
}
else
{
rd->inRegionStatus = status;
}
}
if (rd->mode != RESIZE_MODE_NORMAL)
{
if (rd->mode == RESIZE_MODE_STRETCH)
resizeGetStretchRectangle(s->display, &box);
else
resizeGetPaintRectangle(s->display, &box);
resizeDamageRectangle(s, &box);
}
if (rd->mask & ResizeLeftMask)
rd->geometry.x -= w - rd->geometry.width;
if (rd->mask & ResizeUpMask)
rd->geometry.y -= h - rd->geometry.height;
rd->geometry.width = w;
rd->geometry.height = h;
if (rd->mode != RESIZE_MODE_NORMAL)
{
if (rd->mode == RESIZE_MODE_STRETCH)
resizeGetStretchRectangle(s->display, &box);
else
resizeGetPaintRectangle(s->display, &box);
resizeDamageRectangle(s, &box);
}
else
{
resizeUpdateWindowSize(s->display);
}
resizeUpdateWindowProperty(s->display);
resizeSendResizeNotify(s->display);
}
}
static void
resizeHandleEvent(CompDisplay *d,
XEvent *event)
{
CompScreen *s;
RESIZE_DISPLAY(d);
switch (event->type)
{
case KeyPress:
s = findScreenAtDisplay(d, event->xkey.root);
if (s)
resizeHandleKeyEvent(s, event->xkey.keycode);
break;
case ButtonRelease:
s = findScreenAtDisplay(d, event->xbutton.root);
if (s)
{
RESIZE_SCREEN(s);
if (rs->grabIndex)
{
if (rd->releaseButton == -1 ||
event->xbutton.button == rd->releaseButton)
{
int opt = RESIZE_DISPLAY_OPTION_INITIATE_BUTTON;
CompAction *action = &rd->opt[opt].value.action;
resizeTerminate(d, action, CompActionStateTermButton,
NULL, 0);
}
}
}
break;
case MotionNotify:
s = findScreenAtDisplay(d, event->xmotion.root);
if (s)
resizeHandleMotionEvent(s, pointerX, pointerY);
break;
case EnterNotify:
case LeaveNotify:
s = findScreenAtDisplay(d, event->xcrossing.root);
if (s)
resizeHandleMotionEvent(s, pointerX, pointerY);
break;
case ClientMessage:
if (event->xclient.message_type == d->wmMoveResizeAtom)
{
CompWindow *w;
if (event->xclient.data.l[2] <= WmMoveResizeSizeLeft ||
event->xclient.data.l[2] == WmMoveResizeSizeKeyboard)
{
w = findWindowAtDisplay(d, event->xclient.window);
if (w)
{
CompOption o[7];
int option;
o[0].type = CompOptionTypeInt;
o[0].name = "window";
o[0].value.i = event->xclient.window;
o[1].type = CompOptionTypeBool;
o[1].name = "external";
o[1].value.b = TRUE;
if (event->xclient.data.l[2] == WmMoveResizeSizeKeyboard)
{
option = RESIZE_DISPLAY_OPTION_INITIATE_KEY;
resizeInitiate(d, &rd->opt[option].value.action,
CompActionStateInitKey,
o, 2);
}
else
{
static unsigned int mask[] = {
ResizeUpMask | ResizeLeftMask,
ResizeUpMask,
ResizeUpMask | ResizeRightMask,
ResizeRightMask,
ResizeDownMask | ResizeRightMask,
ResizeDownMask,
ResizeDownMask | ResizeLeftMask,
ResizeLeftMask,
};
unsigned int mods;
Window root, child;
int xRoot, yRoot, i;
option = RESIZE_DISPLAY_OPTION_INITIATE_BUTTON;
XQueryPointer(d->display, w->screen->root,
&root, &child, &xRoot, &yRoot,
&i, &i, &mods);
/* TODO: not only button 1 */
if (mods & Button1Mask)
{
o[2].type = CompOptionTypeInt;
o[2].name = "modifiers";
o[2].value.i = mods;
o[3].type = CompOptionTypeInt;
o[3].name = "x";
o[3].value.i = event->xclient.data.l[0];
o[4].type = CompOptionTypeInt;
o[4].name = "y";
o[4].value.i = event->xclient.data.l[1];
o[5].type = CompOptionTypeInt;
o[5].name = "direction";
o[5].value.i = mask[event->xclient.data.l[2]];
o[6].type = CompOptionTypeInt;
o[6].name = "button";
o[6].value.i = event->xclient.data.l[3] ?
event->xclient.data.l[3] : -1;
resizeInitiate(d,
&rd->opt[option].value.action,
CompActionStateInitButton,
o, 7);
resizeHandleMotionEvent(w->screen, xRoot, yRoot);
}
}
}
}
else if (rd->w && event->xclient.data.l[2] == WmMoveResizeCancel)
{
if (rd->w->id == event->xclient.window)
{
int option;
option = RESIZE_DISPLAY_OPTION_INITIATE_BUTTON;
resizeTerminate(d, &rd->opt[option].value.action,
CompActionStateCancel, NULL, 0);
option = RESIZE_DISPLAY_OPTION_INITIATE_KEY;
resizeTerminate(d, &rd->opt[option].value.action,
CompActionStateCancel, NULL, 0);
}
}
}
break;
case DestroyNotify:
if (rd->w && rd->w->id == event->xdestroywindow.window)
{
int option;
option = RESIZE_DISPLAY_OPTION_INITIATE_BUTTON;
resizeTerminate(d, &rd->opt[option].value.action, 0, NULL, 0);
option = RESIZE_DISPLAY_OPTION_INITIATE_KEY;
resizeTerminate(d, &rd->opt[option].value.action, 0, NULL, 0);
}
break;
case UnmapNotify:
if (rd->w && rd->w->id == event->xunmap.window)
{
int option;
option = RESIZE_DISPLAY_OPTION_INITIATE_BUTTON;
resizeTerminate(d, &rd->opt[option].value.action, 0, NULL, 0);
option = RESIZE_DISPLAY_OPTION_INITIATE_KEY;
resizeTerminate(d, &rd->opt[option].value.action, 0, NULL, 0);
}
default:
break;
}
UNWRAP(rd, d, handleEvent);
(*d->handleEvent)(d, event);
WRAP(rd, d, handleEvent, resizeHandleEvent);
if (event->type == d->syncEvent + XSyncAlarmNotify)
{
if (rd->w)
{
XSyncAlarmNotifyEvent *sa;
sa = (XSyncAlarmNotifyEvent *)event;
if (rd->w->syncAlarm == sa->alarm)
resizeUpdateWindowSize(d);
}
}
}
static void
resizeWindowResizeNotify(CompWindow *w,
int dx,
int dy,
int dwidth,
int dheight)
{
RESIZE_DISPLAY(w->screen->display);
RESIZE_SCREEN(w->screen);
UNWRAP(rs, w->screen, windowResizeNotify);
(*w->screen->windowResizeNotify)(w, dx, dy, dwidth, dheight);
WRAP(rs, w->screen, windowResizeNotify, resizeWindowResizeNotify);
if (rd->w == w && !rs->grabIndex)
resizeFinishResizing(w->screen->display);
}
static void
resizePaintRectangle(CompScreen *s,
const ScreenPaintAttrib *sa,
const CompTransform *transform,
CompOutput *output,
unsigned short *borderColor,
unsigned short *fillColor)
{
BoxRec box;
CompTransform sTransform = *transform;
resizeGetPaintRectangle(s->display, &box);
glPushMatrix();
transformToScreenSpace(s, output, -DEFAULT_Z_CAMERA, &sTransform);
glLoadMatrixf(sTransform.m);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
glEnable(GL_BLEND);
/* fill rectangle */
if (fillColor)
{
glColor4usv(fillColor);
glRecti(box.x1, box.y2, box.x2, box.y1);
}
/* draw outline */
glColor4usv(borderColor);
glLineWidth(2.0);
glBegin(GL_LINE_LOOP);
glVertex2i(box.x1, box.y1);
glVertex2i(box.x2, box.y1);
glVertex2i(box.x2, box.y2);
glVertex2i(box.x1, box.y2);
glEnd();
/* clean up */
glColor4usv(defaultColor);
glDisable(GL_BLEND);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glPopMatrix();
}
static Bool
resizePaintOutput(CompScreen *s,
const ScreenPaintAttrib *sAttrib,
const CompTransform *transform,
Region region,
CompOutput *output,
unsigned int mask)
{
Bool status;
RESIZE_SCREEN(s);
RESIZE_DISPLAY(s->display);
if (rd->w && (s == rd->w->screen))
{
if (rd->mode == RESIZE_MODE_STRETCH)
mask |= PAINT_SCREEN_WITH_TRANSFORMED_WINDOWS_MASK;
}
UNWRAP(rs, s, paintOutput);
status = (*s->paintOutput)(s, sAttrib, transform, region, output, mask);
WRAP(rs, s, paintOutput, resizePaintOutput);
if (status && rd->w && (s == rd->w->screen))
{
unsigned short *border, *fill;
border = rd->opt[RESIZE_DISPLAY_OPTION_BORDER_COLOR].value.c;
fill = rd->opt[RESIZE_DISPLAY_OPTION_FILL_COLOR].value.c;
switch (rd->mode)
{
case RESIZE_MODE_OUTLINE:
resizePaintRectangle(s, sAttrib, transform, output, border, NULL);
break;
case RESIZE_MODE_RECTANGLE:
resizePaintRectangle(s, sAttrib, transform, output, border, fill);
default:
break;
}
}
return status;
}
static Bool
resizePaintWindow(CompWindow *w,
const WindowPaintAttrib *attrib,
const CompTransform *transform,
Region region,
unsigned int mask)
{
CompScreen *s = w->screen;
Bool status;
RESIZE_SCREEN(s);
RESIZE_DISPLAY(s->display);
if (w == rd->w && rd->mode == RESIZE_MODE_STRETCH)
{
FragmentAttrib fragment;
CompTransform wTransform = *transform;
BoxRec box;
float xOrigin, yOrigin;
float xScale, yScale;
if (mask & PAINT_WINDOW_OCCLUSION_DETECTION_MASK)
return FALSE;
UNWRAP(rs, s, paintWindow);
status = (*s->paintWindow)(w, attrib, transform, region,
mask | PAINT_WINDOW_NO_CORE_INSTANCE_MASK);
WRAP(rs, s, paintWindow, resizePaintWindow);
initFragmentAttrib(&fragment, &w->lastPaint);
if (w->alpha || fragment.opacity != OPAQUE)
mask |= PAINT_WINDOW_TRANSLUCENT_MASK;
resizeGetPaintRectangle(s->display, &box);
resizeGetStretchScale(w, &box, &xScale, &yScale);
xOrigin = w->attrib.x - w->input.left;
yOrigin = w->attrib.y - w->input.top;
matrixTranslate(&wTransform, xOrigin, yOrigin, 0.0f);
matrixScale(&wTransform, xScale, yScale, 1.0f);
matrixTranslate(&wTransform,
(rd->geometry.x - w->attrib.x) / xScale - xOrigin,
(rd->geometry.y - w->attrib.y) / yScale - yOrigin,
0.0f);
glPushMatrix();
glLoadMatrixf(wTransform.m);
(*s->drawWindow)(w, &wTransform, &fragment, region,
mask | PAINT_WINDOW_TRANSFORMED_MASK);
glPopMatrix();
}
else
{
UNWRAP(rs, s, paintWindow);
status = (*s->paintWindow)(w, attrib, transform, region, mask);
WRAP(rs, s, paintWindow, resizePaintWindow);
}
return status;
}
static Bool
resizeDamageWindowRect(CompWindow *w,
Bool initial,
BoxPtr rect)
{
Bool status = FALSE;
RESIZE_SCREEN(w->screen);
RESIZE_DISPLAY(w->screen->display);
if (w == rd->w && rd->mode == RESIZE_MODE_STRETCH)
{
BoxRec box;
resizeGetStretchRectangle(w->screen->display, &box);
resizeDamageRectangle(w->screen, &box);
status = TRUE;
}
UNWRAP(rs, w->screen, damageWindowRect);
status |= (*w->screen->damageWindowRect)(w, initial, rect);
WRAP(rs, w->screen, damageWindowRect, resizeDamageWindowRect);
return status;
}
static CompOption *
resizeGetDisplayOptions(CompPlugin *plugin,
CompDisplay *display,
int *count)
{
RESIZE_DISPLAY(display);
*count = NUM_OPTIONS(rd);
return rd->opt;
}
static Bool
resizeSetDisplayOption(CompPlugin *plugin,
CompDisplay *display,
const char *name,
CompOptionValue *value)
{
CompOption *o;
RESIZE_DISPLAY(display);
o = compFindOption(rd->opt, NUM_OPTIONS(rd), name, NULL);
if (!o)
return FALSE;
return compSetDisplayOption(display, o, value);
}
static const CompMetadataOptionInfo resizeDisplayOptionInfo[] = {
{ "initiate_normal_key", "key", 0, resizeInitiate, resizeTerminate },
{ "initiate_outline_key", "key", 0, resizeInitiate, resizeTerminate },
{ "initiate_rectangle_key", "key", 0, resizeInitiate, resizeTerminate },
{ "initiate_stretch_key", "key", 0, resizeInitiate, resizeTerminate },
{ "initiate_button", "button", 0, resizeInitiate, resizeTerminate },
{ "initiate_key", "key", 0, resizeInitiate, resizeTerminate },
{ "mode", "int", RESTOSTRING(0, RESIZE_MODE_LAST), 0, 0 },
{ "border_color", "color", 0, 0, 0 },
{ "fill_color", "color", 0, 0, 0 },
{ "normal_match", "match", 0, 0, 0 },
{ "outline_match", "match", 0, 0, 0 },
{ "rectangle_match", "match", 0, 0, 0 },
{ "stretch_match", "match", 0, 0, 0 }
};
static Bool
resizeInitDisplay(CompPlugin *p,
CompDisplay *d)
{
ResizeDisplay *rd;
int i;
if (!checkPluginABI("core", CORE_ABIVERSION))
return FALSE;
rd = malloc(sizeof (ResizeDisplay));
if (!rd)
return FALSE;
if (!compInitDisplayOptionsFromMetadata(d,
&resizeMetadata,
resizeDisplayOptionInfo,
rd->opt,
RESIZE_DISPLAY_OPTION_NUM))
{
free(rd);
return FALSE;
}
rd->screenPrivateIndex = allocateScreenPrivateIndex(d);
if (rd->screenPrivateIndex < 0)
{
compFiniDisplayOptions(d, rd->opt, RESIZE_DISPLAY_OPTION_NUM);
free(rd);
return FALSE;
}
rd->w = 0;
rd->releaseButton = 0;
rd->resizeNotifyAtom = XInternAtom(d->display,
"_COMPIZ_RESIZE_NOTIFY", 0);
rd->resizeInformationAtom = XInternAtom(d->display,
"_COMPIZ_RESIZE_INFORMATION", 0);
for (i = 0; i < NUM_KEYS; i++)
rd->key[i] = XKeysymToKeycode(d->display,
XStringToKeysym(rKeys[i].name));
rd->constraintRegion = NULL;
WRAP(rd, d, handleEvent, resizeHandleEvent);
d->base.privates[displayPrivateIndex].ptr = rd;
return TRUE;
}
static void
resizeFiniDisplay(CompPlugin *p,
CompDisplay *d)
{
RESIZE_DISPLAY(d);
freeScreenPrivateIndex(d, rd->screenPrivateIndex);
UNWRAP(rd, d, handleEvent);
compFiniDisplayOptions(d, rd->opt, RESIZE_DISPLAY_OPTION_NUM);
if (rd->constraintRegion)
XDestroyRegion(rd->constraintRegion);
free(rd);
}
static Bool
resizeInitScreen(CompPlugin *p,
CompScreen *s)
{
ResizeScreen *rs;
RESIZE_DISPLAY(s->display);
rs = malloc(sizeof (ResizeScreen));
if (!rs)
return FALSE;
rs->grabIndex = 0;
rs->leftCursor = XCreateFontCursor(s->display->display, XC_left_side);
rs->rightCursor = XCreateFontCursor(s->display->display, XC_right_side);
rs->upCursor = XCreateFontCursor(s->display->display,
XC_top_side);
rs->upLeftCursor = XCreateFontCursor(s->display->display,
XC_top_left_corner);
rs->upRightCursor = XCreateFontCursor(s->display->display,
XC_top_right_corner);
rs->downCursor = XCreateFontCursor(s->display->display,
XC_bottom_side);
rs->downLeftCursor = XCreateFontCursor(s->display->display,
XC_bottom_left_corner);
rs->downRightCursor = XCreateFontCursor(s->display->display,
XC_bottom_right_corner);
rs->middleCursor = XCreateFontCursor(s->display->display, XC_fleur);
rs->cursor[0] = rs->leftCursor;
rs->cursor[1] = rs->rightCursor;
rs->cursor[2] = rs->upCursor;
rs->cursor[3] = rs->downCursor;
WRAP(rs, s, windowResizeNotify, resizeWindowResizeNotify);
WRAP(rs, s, paintOutput, resizePaintOutput);
WRAP(rs, s, paintWindow, resizePaintWindow);
WRAP(rs, s, damageWindowRect, resizeDamageWindowRect);
s->base.privates[rd->screenPrivateIndex].ptr = rs;
return TRUE;
}
static void
resizeFiniScreen(CompPlugin *p,
CompScreen *s)
{
RESIZE_SCREEN(s);
if (rs->leftCursor)
XFreeCursor(s->display->display, rs->leftCursor);
if (rs->rightCursor)
XFreeCursor(s->display->display, rs->rightCursor);
if (rs->upCursor)
XFreeCursor(s->display->display, rs->upCursor);
if (rs->downCursor)
XFreeCursor(s->display->display, rs->downCursor);
if (rs->middleCursor)
XFreeCursor(s->display->display, rs->middleCursor);
if (rs->upLeftCursor)
XFreeCursor(s->display->display, rs->upLeftCursor);
if (rs->upRightCursor)
XFreeCursor(s->display->display, rs->upRightCursor);
if (rs->downLeftCursor)
XFreeCursor(s->display->display, rs->downLeftCursor);
if (rs->downRightCursor)
XFreeCursor(s->display->display, rs->downRightCursor);
UNWRAP(rs, s, windowResizeNotify);
UNWRAP(rs, s, paintOutput);
UNWRAP(rs, s, paintWindow);
UNWRAP(rs, s, damageWindowRect);
free(rs);
}
static CompBool
resizeInitObject(CompPlugin *p,
CompObject *o)
{
static InitPluginObjectProc dispTab[] = {
(InitPluginObjectProc)0, /* InitCore */
(InitPluginObjectProc)resizeInitDisplay,
(InitPluginObjectProc)resizeInitScreen
};
RETURN_DISPATCH(o, dispTab, ARRAY_SIZE(dispTab), TRUE, (p, o));
}
static void
resizeFiniObject(CompPlugin *p,
CompObject *o)
{
static FiniPluginObjectProc dispTab[] = {
(FiniPluginObjectProc)0, /* FiniCore */
(FiniPluginObjectProc)resizeFiniDisplay,
(FiniPluginObjectProc)resizeFiniScreen
};
DISPATCH(o, dispTab, ARRAY_SIZE(dispTab), (p, o));
}
static CompOption *
resizeGetObjectOptions(CompPlugin *plugin,
CompObject *object,
int *count)
{
static GetPluginObjectOptionsProc dispTab[] = {
(GetPluginObjectOptionsProc)0, /* GetCoreOptions */
(GetPluginObjectOptionsProc)resizeGetDisplayOptions
};
*count = 0;
RETURN_DISPATCH(object, dispTab, ARRAY_SIZE(dispTab),
(void *)count, (plugin, object, count));
}
static CompBool
resizeSetObjectOption(CompPlugin *plugin,
CompObject *object,
const char *name,
CompOptionValue *value)
{
static SetPluginObjectOptionProc dispTab[] = {
(SetPluginObjectOptionProc)0, /* SetCoreOption */
(SetPluginObjectOptionProc)resizeSetDisplayOption
};
RETURN_DISPATCH(object, dispTab, ARRAY_SIZE(dispTab), FALSE,
(plugin, object, name, value));
}
static Bool
resizeInit(CompPlugin *p)
{
if (!compInitPluginMetadataFromInfo(&resizeMetadata,
p->vTable->name,
resizeDisplayOptionInfo,
RESIZE_DISPLAY_OPTION_NUM,
0, 0))
return FALSE;
displayPrivateIndex = allocateDisplayPrivateIndex();
if (displayPrivateIndex < 0)
{
compFiniMetadata(&resizeMetadata);
return FALSE;
}
compAddMetadataFromFile(&resizeMetadata, p->vTable->name);
return TRUE;
}
static void
resizeFini(CompPlugin *p)
{
freeDisplayPrivateIndex(displayPrivateIndex);
compFiniMetadata(&resizeMetadata);
}
static CompMetadata *
resizeGetMetadata(CompPlugin *plugin)
{
return &resizeMetadata;
}
CompPluginVTable resizeVTable = {
"resize",
resizeGetMetadata,
resizeInit,
resizeFini,
resizeInitObject,
resizeFiniObject,
resizeGetObjectOptions,
resizeSetObjectOption
};
CompPluginVTable *
getCompPluginInfo20070830(void)
{
return &resizeVTable;
}
| {
"content_hash": "dd05631230d0700580bf705254c27561",
"timestamp": "",
"source": "github",
"line_count": 1783,
"max_line_length": 89,
"avg_line_length": 30.486259113853055,
"alnum_prop": 0.4891918244200379,
"repo_name": "zmike/compiz",
"id": "c70847b21b86440e1c12f4aa8ee6c48596cf6145",
"size": "55560",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/exports/plugins/resize.c",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "1977611"
},
{
"name": "Makefile",
"bytes": "1424"
},
{
"name": "Shell",
"bytes": "516"
}
],
"symlink_target": ""
} |
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="IActionDependentFeatureExample.cs">
// Copyright (c) by respective owners including Yahoo!, Microsoft, and
// individual contributors. All rights reserved. Released under a BSD
// license as described in the file LICENSE.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
using System.Collections.Generic;
namespace VW.Interfaces
{
/// <summary>
/// Types supporting action dependent features must implement this interface.
/// </summary>
/// <typeparam name="T">Type of each action dependent feature.</typeparam>
public interface IActionDependentFeatureExample<out T>
{
IReadOnlyList<T> ActionDependentFeatures { get; }
}
}
| {
"content_hash": "17720855262d46c16ac2588b53560b65",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 120,
"avg_line_length": 42.714285714285715,
"alnum_prop": 0.5239687848383501,
"repo_name": "emjotde/vowpal_wabbit",
"id": "4e91ff1cd0bfeab18e0d816e828b10d5d9066a93",
"size": "899",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "cs/Interfaces/IActionDependentFeatures.cs",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ANTLR",
"bytes": "755"
},
{
"name": "Batchfile",
"bytes": "3201"
},
{
"name": "C",
"bytes": "10934"
},
{
"name": "C#",
"bytes": "227278"
},
{
"name": "C++",
"bytes": "1125493"
},
{
"name": "HTML",
"bytes": "13166"
},
{
"name": "Java",
"bytes": "21271"
},
{
"name": "KiCad",
"bytes": "99"
},
{
"name": "Makefile",
"bytes": "131204"
},
{
"name": "Perl",
"bytes": "135969"
},
{
"name": "Python",
"bytes": "63979"
},
{
"name": "R",
"bytes": "14865"
},
{
"name": "Ruby",
"bytes": "5219"
},
{
"name": "Shell",
"bytes": "46601"
},
{
"name": "Tcl",
"bytes": "182"
}
],
"symlink_target": ""
} |
//
// TTShowGift.m
// TTShow
//
// Created by twb on 13-6-18.
// Copyright (c) 2013年 twb. All rights reserved.
//
#import "TTShowGift.h"
#pragma mark - Gift Category
@implementation TTShowGiftCategory
- (id)initWithAttributes:(NSDictionary *)attributes
{
self = [super init];
if (!self) {
return nil;
}
/*
@property (nonatomic, assign) NSUInteger _id;
@property (nonatomic, assign) NSUInteger lucky;
@property (nonatomic, strong) NSString *name;
@property (nonatomic, assign) NSUInteger order;
@property (nonatomic, assign) CGFloat ratio;
@property (nonatomic, assign) NSUInteger status;
@property (nonatomic, assign) NSUInteger vip;
*/
self._id = [[attributes valueForKey:@"_id"] unsignedIntegerValue];
self.lucky = [[attributes valueForKey:@"lucky"] unsignedIntegerValue];
self.name = [attributes valueForKey:@"name"];
self.order = [[attributes valueForKey:@"order"] unsignedIntegerValue];
self.ratio = [[attributes valueForKey:@"ratio"] floatValue];
self.status = [[attributes valueForKey:@"status"] unsignedIntegerValue];
self.vip = [[attributes valueForKey:@"vip"] unsignedIntegerValue];
return self;
}
@end
#pragma mark - Gift Class.
@implementation TTShowGift
- (id)initWithAttributes:(NSDictionary *)attributes {
self = [super init];
if (!self) {
return nil;
}
/*
@property (nonatomic, assign) NSUInteger _id;
@property (nonatomic, strong) NSString *name;
@property (nonatomic, strong) NSString *pic_url;
@property (nonatomic, strong) NSString *swf_url;
@property (nonatomic, strong) NSString *pic_pre_url;
@property (nonatomic, assign) NSUInteger coin_price;
@property (nonatomic, assign) NSUInteger category_id;
@property (nonatomic, assign) NSUInteger count;
@property (nonatomic, assign) NSUInteger star;
@property (nonatomic, assign) NSUInteger star_limit;
@property (nonatomic, assign) NSUInteger status;
@property (nonatomic, assign) NSUInteger order;
*/
self._id = [[attributes valueForKey:@"_id"] unsignedIntegerValue];
self.name = [attributes valueForKey:@"name"];
self.pic_url = [attributes valueForKey:@"pic_url"];
self.swf_url = [attributes valueForKey:@"swf_url"];
self.pic_pre_url = [attributes valueForKey:@"pic_pre_url"];
self.coin_price = [[attributes valueForKey:@"coin_price"] unsignedIntegerValue];
self.category_id = [[attributes valueForKey:@"category_id"] unsignedIntegerValue];
self.count = [[attributes valueForKey:@"count"] unsignedIntegerValue];
self.sale = [[attributes valueForKey:@"sale"] boolValue];
// self.sale = [[attributes valueForKey:@"isHot"] boolValue];
// self.sale = [[attributes valueForKey:@"isNew"] boolValue];
self.star = [[attributes valueForKey:@"star"] unsignedIntegerValue];
self.star_limit = [[attributes valueForKey:@"star_limit"] unsignedIntegerValue];
self.status = [[attributes valueForKey:@"status"] unsignedIntegerValue];
self.order = [[attributes valueForKey:@"order"] unsignedIntegerValue];
return self;
}
#pragma mark - NSCoding Protocal
- (void)encodeWithCoder:(NSCoder *)aCoder
{
[aCoder encodeObject:[NSNumber numberWithUnsignedInteger:self._id] forKey:@"_id"];
[aCoder encodeObject:self.name forKey:@"name"];
[aCoder encodeObject:self.pic_url forKey:@"pic_url"];
[aCoder encodeObject:self.swf_url forKey:@"swf_url"];
[aCoder encodeObject:self.pic_pre_url forKey:@"pic_pre_url"];
[aCoder encodeObject:[NSNumber numberWithUnsignedInteger:self.coin_price] forKey:@"coin_price"];
[aCoder encodeObject:[NSNumber numberWithUnsignedInteger:self.category_id] forKey:@"category_id"];
[aCoder encodeObject:[NSNumber numberWithUnsignedInteger:self.count] forKey:@"count"];
[aCoder encodeObject:[NSNumber numberWithBool:self.sale] forKey:@"sale"];
[aCoder encodeObject:@(self.star) forKey:@"star"];
[aCoder encodeObject:@(self.star_limit) forKey:@"star_limit"];
[aCoder encodeObject:@(self.status) forKey:@"status"];
[aCoder encodeObject:@(self.order) forKey:@"order"];
}
- (id)initWithCoder:(NSCoder *)aDecoder
{
if (self = [super init])
{
self._id = [[aDecoder decodeObjectForKey:@"_id"] unsignedIntegerValue];
self.name = [aDecoder decodeObjectForKey:@"name"];
self.pic_url = [aDecoder decodeObjectForKey:@"pic_url"];
self.swf_url = [aDecoder decodeObjectForKey:@"swf_url"];
self.pic_pre_url = [aDecoder decodeObjectForKey:@"pic_pre_url"];
self.coin_price = [[aDecoder decodeObjectForKey:@"coin_price"] unsignedIntegerValue];
self.category_id = [[aDecoder decodeObjectForKey:@"category_id"] unsignedIntegerValue];
self.count = [[aDecoder decodeObjectForKey:@"count"] unsignedIntegerValue];
self.sale = [[aDecoder decodeObjectForKey:@"sale"] boolValue];
self.star = [[aDecoder decodeObjectForKey:@"star"] unsignedIntegerValue];
self.star_limit = [[aDecoder decodeObjectForKey:@"star_limit"] unsignedIntegerValue];
self.status = [[aDecoder decodeObjectForKey:@"status"] unsignedIntegerValue];
self.order = [[aDecoder decodeObjectForKey:@"order"] unsignedIntegerValue];
}
return self;
}
@end
#pragma mark - Xiaowo Class.
@implementation XiaowoGift
- (id)initWithAttributes:(NSDictionary *)attributes {
self = [super init];
if (!self) {
return nil;
}
/*
@property (nonatomic, assign) NSUInteger _id;
@property (nonatomic, strong) NSString *swf_url;
@property (nonatomic, assign) float boxer_ratio;
@property (nonatomic, assign) NSUInteger order;
@property (nonatomic, assign) float ratio;
@property (nonatomic, assign) NSUInteger status;
@property (nonatomic, strong) NSString *name;
@property (nonatomic, assign) NSUInteger coin_price;
@property (nonatomic, strong) NSString *pic_url;
@property (nonatomic, strong) NSString *pic_pre_url;
*/
self._id = [[attributes valueForKey:@"_id"] unsignedIntegerValue];
self.name = [attributes valueForKey:@"name"];
self.pic_url = [attributes valueForKey:@"pic_url"];
self.swf_url = [attributes valueForKey:@"swf_url"];
self.pic_pre_url = [attributes valueForKey:@"pic_pre_url"];
self.coin_price = [[attributes valueForKey:@"coin_price"] unsignedIntegerValue];
self.status = [[attributes valueForKey:@"status"] unsignedIntegerValue];
self.order = [[attributes valueForKey:@"order"] unsignedIntegerValue];
self.boxer_ratio = [[attributes valueForKey:@"boxer_ratio"] floatValue];
self.ratio = [[attributes valueForKey:@"order"] floatValue];
return self;
}
#pragma mark - NSCoding Protocal
- (void)encodeWithCoder:(NSCoder *)aCoder
{
[aCoder encodeObject:[NSNumber numberWithUnsignedInteger:self._id] forKey:@"_id"];
[aCoder encodeObject:self.name forKey:@"name"];
[aCoder encodeObject:self.pic_url forKey:@"pic_url"];
[aCoder encodeObject:self.swf_url forKey:@"swf_url"];
[aCoder encodeObject:self.pic_pre_url forKey:@"pic_pre_url"];
[aCoder encodeObject:[NSNumber numberWithUnsignedInteger:self.coin_price] forKey:@"coin_price"];
// boxer_ratio ratio
[aCoder encodeObject:@(self.status) forKey:@"status"];
[aCoder encodeObject:@(self.order) forKey:@"order"];
}
- (id)initWithCoder:(NSCoder *)aDecoder
{
if (self = [super init])
{
self._id = [[aDecoder decodeObjectForKey:@"_id"] unsignedIntegerValue];
self.name = [aDecoder decodeObjectForKey:@"name"];
self.pic_url = [aDecoder decodeObjectForKey:@"pic_url"];
self.swf_url = [aDecoder decodeObjectForKey:@"swf_url"];
self.pic_pre_url = [aDecoder decodeObjectForKey:@"pic_pre_url"];
self.coin_price = [[aDecoder decodeObjectForKey:@"coin_price"] unsignedIntegerValue];
// boxer_ratio ratio
self.status = [[aDecoder decodeObjectForKey:@"status"] unsignedIntegerValue];
self.order = [[aDecoder decodeObjectForKey:@"order"] unsignedIntegerValue];
}
return self;
}
@end | {
"content_hash": "69d9822bbe40931535b9e266fb3046ab",
"timestamp": "",
"source": "github",
"line_count": 206,
"max_line_length": 102,
"avg_line_length": 39.771844660194176,
"alnum_prop": 0.68058098376663,
"repo_name": "wangzhi1027/MeMeZhiBoSDK",
"id": "e4fd39c81c0cf77c46676e15cad239a6e1a37a83",
"size": "8195",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "MeMeSDK/MeMeZhiBo/Model/TTShowGift.m",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1340744"
},
{
"name": "C++",
"bytes": "97868"
},
{
"name": "Objective-C",
"bytes": "3322094"
},
{
"name": "Ruby",
"bytes": "1224"
}
],
"symlink_target": ""
} |
<HTML><HEAD>
<TITLE>Review for Lost in Space (1998)</TITLE>
<LINK REL="STYLESHEET" TYPE="text/css" HREF="/ramr.css">
</HEAD>
<BODY BGCOLOR="#FFFFFF" TEXT="#000000">
<H1 ALIGN="CENTER" CLASS="title"><A HREF="/Title?0120738">Lost in Space (1998)</A></H1><H3 ALIGN=CENTER>reviewed by<BR><A HREF="/ReviewsBy?Nathaniel+R.+Atcheson">Nathaniel R. Atcheson</A></H3><HR WIDTH="40%" SIZE="4">
<PRE>Lost in Space (1998)</PRE>
<P>Director: Stephen Hopkins
Cast: William Hurt, Gary Oldman, Heather Graham, Mimi Rogers, Matt
LeBlanc, Lacey Chabert, Jack Johnson
Screenplay: Akiva Goldsman
Producers: Akiva Goldsman, Stephen Hopkins, Mark W. Koch
Runtime:
US Distribution: New Line Cinema
Rated PG-13: sci-fi violence</P>
<P>By Nathaniel R. Atcheson (<A HREF="mailto:[email protected]">[email protected]</A>)</P>
<P> If you're going to make a film that has absolutely nothing of interest
aside from its special effects, you might consider making the special
effects good. Lost in Space, the new spin-off of the old TV series,
does not abide by this seemingly-obvious bit of logic, and the result is
a film that has a silly story complimented by unimpressive visual
effects and sets. I can watch and enjoy a film that is mostly style and
little substance (Starship Troopers, for example), but the style has to
be stylish, not dull and rehashed like the "style" in Lost in Space. </P>
<P> Lost in Space isn't blatantly awful, but it comes dangerously close to
being so. There are too many characters (none of whom have any
noticeable personality), the story is too thin (and disintegrates in to
incoherence by the final scene), the interesting elements are all lifted
from other films, and, like I said, the special effects aren't special.
It's like trying to make a cake out of nothing but water and flour.
You'll end up with something, but it won't taste good. </P>
<P> We are introduced to one of the several main characters in an
introductory space-battle that is disturbingly close to the battle
scenes in the Star Wars trilogy. The hero is Major Don West (Matt
LeBlanc), and he's your typical hard-edged pilot. He's recruited to
escort the Robinsons (a really smart family-of-five) into the depths of
space so that they can find a new place for humans to live, because the
Earth can only support human life for twenty more years. </P>
<P> The Robinsons are led by John (William Hurt) and Maureen (Mimi
Rogers). The children are Judy (Heather Graham), Penny (Lacey Chabert),
and Will (Jack Johnson). Also onboard is Dr. Smith (Gary Oldman), who
tries to sabotage the mission, but fails, and gets caught. Well, he
only fails partially: he triggers a series of light shows that result
in the Robinson's ship careening into the unknown corners of uncharted
space. Finally, about forty-five minutes into the film, they are
actually lost, as the title rightfully implies. </P>
<P> After this, the story of Lost in Space is muddled, at best. They end
up at some lost ship (which reminded me of Event Horizon, though I know
that film is too recent for Lost in Space to have ripped it off), and
they have to fight off these metal spiders that have nothing to do with
anything. Later, they crash on this planet in a stunt that reminded me
a lot of Star Trek: Generations, and they uncover a time-travel plot by
a character whose identity must be hidden for the sake of shock. Anyone
seen any of this before? </P>
<P> What a tiresome journey it is getting to the end of this film. I knew
I was in trouble when the opening battle looked significantly worse than
anything in any of the three Star Wars films (those films are twenty
years younger than this one!). After that, the best special effects are
cartoony: they are pleasing to the eye, but they look fake. Unlike the
digital bugs in Starship Troopers or the dinosaurs in Spielberg's Park
movies, nothing here inspires us with awe and wonder. It all looks
distractingly digital, and it just doesn't mesh with the live-action
performers.</P>
<P> Speaking of live action, I've never seen a group of such talented
individuals go to waste like this. Gary Oldman is good (I hear he
recreates the character from the television series perfectly), and Matt
LeBlanc is sometimes amusing, but the rest of these actors are bland.
The biggest disappointment is Heather Graham, who was wonderful in
Boogie Nights; she isn't really given anything to work with, and, in
turn, doesn't end up doing anything. William Hurt and Mimi Rogers spew
their lines like actors getting paid to spew lines. The other two kids,
Chabert and Johnson, do their best, but are not very memorable. I don't
blame any of these performers: the script simply gives them nothing to
work with.</P>
<P> I really expected to enjoy this film. I don't understand how or why
Lost in Space came out so dull and familiar. There are a few pleasures,
though: the last-minute escape in the final scene is fun, and some of
the sequences are exciting despite their ultimate uselessness in
relation to the story. And the actors, despite their lack of fire, are
pleasant to watch. But Lost in Space is little more than a bundle of
missed opportunities replaced by scenes we've all seen hundreds of
times. But hope is not lost: if this becomes a series, just remember
what Star Trek II managed to do.</P>
<PRE>** out of ****
(5/10, C-)</PRE>
<PRE><HR> Visit FILM PSYCHOSIS at
<A HREF="http://www.pyramid.net/natesmovies">http://www.pyramid.net/natesmovies</A></PRE>
<PRE> Nathaniel R. Atcheson<HR></PRE>
<HR><P CLASS=flush><SMALL>The review above was posted to the
<A HREF="news:rec.arts.movies.reviews">rec.arts.movies.reviews</A> newsgroup (<A HREF="news:de.rec.film.kritiken">de.rec.film.kritiken</A> for German reviews).<BR>
The Internet Movie Database accepts no responsibility for the contents of the
review and has no editorial control. Unless stated otherwise, the copyright
belongs to the author.<BR>
Please direct comments/criticisms of the review to relevant newsgroups.<BR>
Broken URLs inthe reviews are the responsibility of the author.<BR>
The formatting of the review is likely to differ from the original due
to ASCII to HTML conversion.
</SMALL></P>
<P ALIGN=CENTER>Related links: <A HREF="/Reviews/">index of all rec.arts.movies.reviews reviews</A></P>
</P></BODY></HTML>
| {
"content_hash": "7172cb55af46ce3b63ac51b2ed99e7c5",
"timestamp": "",
"source": "github",
"line_count": 103,
"max_line_length": 217,
"avg_line_length": 63.077669902912625,
"alnum_prop": 0.7411112821302139,
"repo_name": "xianjunzhengbackup/code",
"id": "40b6141ec00d64d54b87515f35712b137972578d",
"size": "6497",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "data science/machine_learning_for_the_web/chapter_4/movie/11830.html",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "BitBake",
"bytes": "113"
},
{
"name": "BlitzBasic",
"bytes": "256"
},
{
"name": "CSS",
"bytes": "49827"
},
{
"name": "HTML",
"bytes": "157006325"
},
{
"name": "JavaScript",
"bytes": "14029"
},
{
"name": "Jupyter Notebook",
"bytes": "4875399"
},
{
"name": "Mako",
"bytes": "2060"
},
{
"name": "Perl",
"bytes": "716"
},
{
"name": "Python",
"bytes": "874414"
},
{
"name": "R",
"bytes": "454"
},
{
"name": "Shell",
"bytes": "3984"
}
],
"symlink_target": ""
} |
from __future__ import print_function
import io
import os
import platform
import sys
from distutils.command.build_ext import build_ext
from shutil import copytree, copy, rmtree
from setuptools import setup, Extension
if sys.version_info < (3, 6):
print("Python versions prior to 3.6 are not supported for PyFlink.",
file=sys.stderr)
sys.exit(-1)
def remove_if_exists(file_path):
if os.path.exists(file_path):
if os.path.islink(file_path) or os.path.isfile(file_path):
os.remove(file_path)
else:
assert os.path.isdir(file_path)
rmtree(file_path)
def copy_files(src_paths, output_directory):
for src_path, file_mode in src_paths:
if os.path.isdir(src_path):
child_files = os.listdir(src_path)
for child_file in child_files:
dst_path = copy(os.path.join(src_path, child_file), output_directory)
os.chmod(dst_path, file_mode)
else:
dst_path = copy(src_path, os.path.join(output_directory, os.path.basename(src_path)))
os.chmod(dst_path, file_mode)
def has_unsupported_tag(file_element):
unsupported_tags = ['includes', 'exclueds']
for unsupported_tag in unsupported_tags:
if file_element.getElementsByTagName(unsupported_tag):
print('Unsupported <{0}></{1}> tag'.format(unsupported_tag, unsupported_tag))
return True
return False
def extracted_output_files(base_dir, file_path, output_directory):
extracted_file_paths = []
from xml.dom.minidom import parse
dom = parse(file_path)
root_data = dom.documentElement
file_elements = (root_data.getElementsByTagName("files")[0]).getElementsByTagName("file")
# extracted <files><file></file></files>
for file_element in file_elements:
source = ((file_element.getElementsByTagName('source')[0]).childNodes[0]).data
file_mode = int(((file_element.getElementsByTagName('fileMode')[0]).childNodes[0]).data, 8)
try:
dst = ((file_element.getElementsByTagName('outputDirectory')[0]).childNodes[0]).data
if dst == output_directory:
if has_unsupported_tag(file_element):
sys.exit(-1)
extracted_file_paths.append((os.path.join(base_dir, source), file_mode))
except IndexError:
pass
# extracted <fileSets><fileSet></fileSet></fileSets>
file_elements = (root_data.getElementsByTagName("fileSets")[0]).getElementsByTagName("fileSet")
for file_element in file_elements:
source = ((file_element.getElementsByTagName('directory')[0]).childNodes[0]).data
file_mode = int(((file_element.getElementsByTagName('fileMode')[0]).childNodes[0]).data, 8)
try:
dst = ((file_element.getElementsByTagName('outputDirectory')[0]).childNodes[0]).data
if dst == output_directory:
if has_unsupported_tag(file_element):
sys.exit(-1)
extracted_file_paths.append((os.path.join(base_dir, source), file_mode))
except IndexError:
pass
return extracted_file_paths
# Currently Cython optimizing doesn't support Windows.
if platform.system() == 'Windows':
extensions = ([])
else:
try:
from Cython.Build import cythonize
extensions = cythonize([
Extension(
name="pyflink.fn_execution.coder_impl_fast",
sources=["pyflink/fn_execution/coder_impl_fast.pyx"],
include_dirs=["pyflink/fn_execution/"]),
Extension(
name="pyflink.fn_execution.table.aggregate_fast",
sources=["pyflink/fn_execution/table/aggregate_fast.pyx"],
include_dirs=["pyflink/fn_execution/table/"]),
Extension(
name="pyflink.fn_execution.table.window_aggregate_fast",
sources=["pyflink/fn_execution/table/window_aggregate_fast.pyx"],
include_dirs=["pyflink/fn_execution/table/"]),
Extension(
name="pyflink.fn_execution.stream_fast",
sources=["pyflink/fn_execution/stream_fast.pyx"],
include_dirs=["pyflink/fn_execution/"]),
Extension(
name="pyflink.fn_execution.beam.beam_stream_fast",
sources=["pyflink/fn_execution/beam/beam_stream_fast.pyx"],
include_dirs=["pyflink/fn_execution/beam"]),
Extension(
name="pyflink.fn_execution.beam.beam_coder_impl_fast",
sources=["pyflink/fn_execution/beam/beam_coder_impl_fast.pyx"],
include_dirs=["pyflink/fn_execution/beam"]),
Extension(
name="pyflink.fn_execution.beam.beam_operations_fast",
sources=["pyflink/fn_execution/beam/beam_operations_fast.pyx"],
include_dirs=["pyflink/fn_execution/beam"]),
])
except ImportError:
if os.path.exists("pyflink/fn_execution/coder_impl_fast.c"):
extensions = ([
Extension(
name="pyflink.fn_execution.coder_impl_fast",
sources=["pyflink/fn_execution/coder_impl_fast.c"],
include_dirs=["pyflink/fn_execution/"]),
Extension(
name="pyflink.fn_execution.table.aggregate_fast",
sources=["pyflink/fn_execution/table/aggregate_fast.c"],
include_dirs=["pyflink/fn_execution/table/"]),
Extension(
name="pyflink.fn_execution.table.window_aggregate_fast",
sources=["pyflink/fn_execution/table/window_aggregate_fast.c"],
include_dirs=["pyflink/fn_execution/table/"]),
Extension(
name="pyflink.fn_execution.stream_fast",
sources=["pyflink/fn_execution/stream_fast.c"],
include_dirs=["pyflink/fn_execution/"]),
Extension(
name="pyflink.fn_execution.beam.beam_stream_fast",
sources=["pyflink/fn_execution/beam/beam_stream_fast.c"],
include_dirs=["pyflink/fn_execution/beam"]),
Extension(
name="pyflink.fn_execution.beam.beam_coder_impl_fast",
sources=["pyflink/fn_execution/beam/beam_coder_impl_fast.c"],
include_dirs=["pyflink/fn_execution/beam"]),
Extension(
name="pyflink.fn_execution.beam.beam_operations_fast",
sources=["pyflink/fn_execution/beam/beam_operations_fast.c"],
include_dirs=["pyflink/fn_execution/beam"]),
])
else:
extensions = ([])
this_directory = os.path.abspath(os.path.dirname(__file__))
version_file = os.path.join(this_directory, 'pyflink/version.py')
try:
exec(open(version_file).read())
except IOError:
print("Failed to load PyFlink version file for packaging. " +
"'%s' not found!" % version_file,
file=sys.stderr)
sys.exit(-1)
VERSION = __version__ # noqa
with io.open(os.path.join(this_directory, 'README.md'), 'r', encoding='utf-8') as f:
long_description = f.read()
TEMP_PATH = "deps"
CONF_TEMP_PATH = os.path.join(TEMP_PATH, "conf")
LOG_TEMP_PATH = os.path.join(TEMP_PATH, "log")
EXAMPLES_TEMP_PATH = os.path.join(TEMP_PATH, "examples")
SCRIPTS_TEMP_PATH = os.path.join(TEMP_PATH, "bin")
LICENSE_FILE_TEMP_PATH = os.path.join(this_directory, "LICENSE")
README_FILE_TEMP_PATH = os.path.join("pyflink", "README.txt")
PYFLINK_UDF_RUNNER_SH = "pyflink-udf-runner.sh"
PYFLINK_UDF_RUNNER_BAT = "pyflink-udf-runner.bat"
in_flink_source = os.path.isfile("../flink-java/src/main/java/org/apache/flink/api/java/"
"ExecutionEnvironment.java")
try:
if in_flink_source:
try:
os.mkdir(TEMP_PATH)
except:
print("Temp path for symlink to parent already exists {0}".format(TEMP_PATH),
file=sys.stderr)
sys.exit(-1)
flink_version = VERSION.replace(".dev0", "-SNAPSHOT")
FLINK_HOME = os.path.abspath(
"../flink-dist/target/flink-%s-bin/flink-%s" % (flink_version, flink_version))
FLINK_ROOT = os.path.abspath("..")
FLINK_DIST = os.path.join(FLINK_ROOT, "flink-dist")
FLINK_BIN = os.path.join(FLINK_DIST, "src/main/flink-bin")
EXAMPLES_PATH = os.path.join(this_directory, "pyflink/examples")
LICENSE_FILE_PATH = os.path.join(FLINK_ROOT, "LICENSE")
README_FILE_PATH = os.path.join(FLINK_BIN, "README.txt")
FLINK_BIN_XML_FILE = os.path.join(FLINK_BIN, '../assemblies/bin.xml')
# copy conf files
os.mkdir(CONF_TEMP_PATH)
conf_paths = extracted_output_files(FLINK_DIST, FLINK_BIN_XML_FILE, 'conf')
copy_files(conf_paths, CONF_TEMP_PATH)
# copy bin files
os.mkdir(SCRIPTS_TEMP_PATH)
script_paths = extracted_output_files(FLINK_DIST, FLINK_BIN_XML_FILE, 'bin')
copy_files(script_paths, SCRIPTS_TEMP_PATH)
copy(os.path.join(this_directory, "pyflink", "bin", PYFLINK_UDF_RUNNER_SH),
os.path.join(SCRIPTS_TEMP_PATH, PYFLINK_UDF_RUNNER_SH))
copy(os.path.join(this_directory, "pyflink", "bin", PYFLINK_UDF_RUNNER_BAT),
os.path.join(SCRIPTS_TEMP_PATH, PYFLINK_UDF_RUNNER_BAT))
try:
os.symlink(EXAMPLES_PATH, EXAMPLES_TEMP_PATH)
os.symlink(LICENSE_FILE_PATH, LICENSE_FILE_TEMP_PATH)
os.symlink(README_FILE_PATH, README_FILE_TEMP_PATH)
except BaseException: # pylint: disable=broad-except
copytree(EXAMPLES_PATH, EXAMPLES_TEMP_PATH)
copy(LICENSE_FILE_PATH, LICENSE_FILE_TEMP_PATH)
copy(README_FILE_PATH, README_FILE_TEMP_PATH)
os.mkdir(LOG_TEMP_PATH)
with open(os.path.join(LOG_TEMP_PATH, "empty.txt"), 'w') as f:
f.write("This file is used to force setuptools to include the log directory. "
"You can delete it at any time after installation.")
else:
if not os.path.isdir(SCRIPTS_TEMP_PATH):
print("The flink core files are not found. Please make sure your installation package "
"is complete, or do this in the flink-python directory of the flink source "
"directory.")
sys.exit(-1)
if VERSION.find('dev0') != -1:
apache_flink_libraries_dependency = 'apache-flink-libraries==%s' % VERSION
else:
split_versions = VERSION.split('.')
split_versions[-1] = str(int(split_versions[-1]) + 1)
NEXT_VERSION = '.'.join(split_versions)
apache_flink_libraries_dependency = 'apache-flink-libraries>=%s,<%s' % \
(VERSION, NEXT_VERSION)
script_names = ["pyflink-shell.sh", "find-flink-home.sh"]
scripts = [os.path.join(SCRIPTS_TEMP_PATH, script) for script in script_names]
scripts.append("pyflink/find_flink_home.py")
PACKAGES = ['pyflink',
'pyflink.table',
'pyflink.util',
'pyflink.datastream',
'pyflink.common',
'pyflink.fn_execution',
'pyflink.fn_execution.beam',
'pyflink.fn_execution.datastream',
'pyflink.fn_execution.datastream.window',
'pyflink.fn_execution.table',
'pyflink.fn_execution.utils',
'pyflink.metrics',
'pyflink.conf',
'pyflink.log',
'pyflink.examples',
'pyflink.bin']
PACKAGE_DIR = {
'pyflink.conf': TEMP_PATH + '/conf',
'pyflink.log': TEMP_PATH + '/log',
'pyflink.examples': TEMP_PATH + '/examples',
'pyflink.bin': TEMP_PATH + '/bin'}
PACKAGE_DATA = {
'pyflink': ['README.txt'],
'pyflink.conf': ['*'],
'pyflink.log': ['*'],
'pyflink.examples': ['*.py', '*/*.py'],
'pyflink.bin': ['*']}
setup(
name='apache-flink',
version=VERSION,
packages=PACKAGES,
include_package_data=True,
package_dir=PACKAGE_DIR,
package_data=PACKAGE_DATA,
scripts=scripts,
url='https://flink.apache.org',
license='https://www.apache.org/licenses/LICENSE-2.0',
author='Apache Software Foundation',
author_email='[email protected]',
python_requires='>=3.6',
install_requires=['py4j==0.10.8.1', 'python-dateutil==2.8.0', 'apache-beam==2.27.0',
'cloudpickle==1.2.2', 'avro-python3>=1.8.1,!=1.9.2,<1.10.0',
'pandas>=1.0,<1.2.0', 'pyarrow>=0.15.1,<3.0.0',
'pytz>=2018.3', 'numpy>=1.14.3,<1.20', 'fastavro>=0.21.4,<0.24',
'requests>=2.26.0', 'protobuf<3.18',
apache_flink_libraries_dependency],
cmdclass={'build_ext': build_ext},
tests_require=['pytest==4.4.1'],
description='Apache Flink Python API',
long_description=long_description,
long_description_content_type='text/markdown',
zip_safe=False,
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8'],
ext_modules=extensions
)
finally:
if in_flink_source:
remove_if_exists(TEMP_PATH)
remove_if_exists(LICENSE_FILE_TEMP_PATH)
remove_if_exists(README_FILE_TEMP_PATH)
| {
"content_hash": "016682309c7cf23c797797b1205787a1",
"timestamp": "",
"source": "github",
"line_count": 318,
"max_line_length": 99,
"avg_line_length": 43.528301886792455,
"alnum_prop": 0.5831527235948563,
"repo_name": "StephanEwen/incubator-flink",
"id": "6905da210f171b4ee18c7f521ef98a6642484e18",
"size": "14800",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "flink-python/setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4588"
},
{
"name": "CSS",
"bytes": "57936"
},
{
"name": "Clojure",
"bytes": "90539"
},
{
"name": "Dockerfile",
"bytes": "10807"
},
{
"name": "FreeMarker",
"bytes": "11924"
},
{
"name": "HTML",
"bytes": "224454"
},
{
"name": "Java",
"bytes": "48116532"
},
{
"name": "JavaScript",
"bytes": "1829"
},
{
"name": "Makefile",
"bytes": "5134"
},
{
"name": "Python",
"bytes": "747014"
},
{
"name": "Scala",
"bytes": "13208937"
},
{
"name": "Shell",
"bytes": "461052"
},
{
"name": "TypeScript",
"bytes": "243702"
}
],
"symlink_target": ""
} |
import { createSelector } from 'reselect';
export default recordsByIdSelector => createSelector(
recordsByIdSelector,
recordsById => Object.keys(recordsById)
);
| {
"content_hash": "05367a4c7d2bf08427a74cfd490b8a91",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 53,
"avg_line_length": 27.666666666666668,
"alnum_prop": 0.7771084337349398,
"repo_name": "slightlytyler/arrowsmith-web-starter",
"id": "56242560a4246496e4da02e30393ad59d2597229",
"size": "166",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/helpers/selectors/createGetAllRecordIdsSelector.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1039"
},
{
"name": "HTML",
"bytes": "214"
},
{
"name": "JavaScript",
"bytes": "50725"
}
],
"symlink_target": ""
} |
\hypertarget{class_t_i_m_e_1_1_time_exception}{}\section{T\+I\+M\+E\+:\+:Time\+Exception Class Reference}
\label{class_t_i_m_e_1_1_time_exception}\index{T\+I\+M\+E\+::\+Time\+Exception@{T\+I\+M\+E\+::\+Time\+Exception}}
Classe permettant de g�rer les exceptions des classes du namespace \hyperlink{namespace_t_i_m_e}{T\+I\+M\+E}.
{\ttfamily \#include $<$timing.\+h$>$}
\subsection*{Public Member Functions}
\begin{DoxyCompactItemize}
\item
\hyperlink{class_t_i_m_e_1_1_time_exception_a08502d82065dd79b27cd954b45f4d5c7}{Time\+Exception} (const std\+::string \&m)
\begin{DoxyCompactList}\small\item\em Constructeur � partir d\textquotesingle{}une string. \end{DoxyCompactList}\item
const std\+::string \& \hyperlink{class_t_i_m_e_1_1_time_exception_ad86c212253ea1b8654f4cae34611d634}{Get\+Info} () const
\begin{DoxyCompactList}\small\item\em Retourne l\textquotesingle{}information stock�e dans la classe. \end{DoxyCompactList}\end{DoxyCompactItemize}
\subsection{Detailed Description}
Classe permettant de g�rer les exceptions des classes du namespace \hyperlink{namespace_t_i_m_e}{T\+I\+M\+E}.
\subsection{Constructor \& Destructor Documentation}
\hypertarget{class_t_i_m_e_1_1_time_exception_a08502d82065dd79b27cd954b45f4d5c7}{}\index{T\+I\+M\+E\+::\+Time\+Exception@{T\+I\+M\+E\+::\+Time\+Exception}!Time\+Exception@{Time\+Exception}}
\index{Time\+Exception@{Time\+Exception}!T\+I\+M\+E\+::\+Time\+Exception@{T\+I\+M\+E\+::\+Time\+Exception}}
\subsubsection[{Time\+Exception}]{\setlength{\rightskip}{0pt plus 5cm}T\+I\+M\+E\+::\+Time\+Exception\+::\+Time\+Exception (
\begin{DoxyParamCaption}
\item[{const std\+::string \&}]{m}
\end{DoxyParamCaption}
)\hspace{0.3cm}{\ttfamily [inline]}}\label{class_t_i_m_e_1_1_time_exception_a08502d82065dd79b27cd954b45f4d5c7}
Constructeur � partir d\textquotesingle{}une string.
\subsection{Member Function Documentation}
\hypertarget{class_t_i_m_e_1_1_time_exception_ad86c212253ea1b8654f4cae34611d634}{}\index{T\+I\+M\+E\+::\+Time\+Exception@{T\+I\+M\+E\+::\+Time\+Exception}!Get\+Info@{Get\+Info}}
\index{Get\+Info@{Get\+Info}!T\+I\+M\+E\+::\+Time\+Exception@{T\+I\+M\+E\+::\+Time\+Exception}}
\subsubsection[{Get\+Info}]{\setlength{\rightskip}{0pt plus 5cm}const std\+::string\& T\+I\+M\+E\+::\+Time\+Exception\+::\+Get\+Info (
\begin{DoxyParamCaption}
{}
\end{DoxyParamCaption}
) const\hspace{0.3cm}{\ttfamily [inline]}}\label{class_t_i_m_e_1_1_time_exception_ad86c212253ea1b8654f4cae34611d634}
Retourne l\textquotesingle{}information stock�e dans la classe.
The documentation for this class was generated from the following file\+:\begin{DoxyCompactItemize}
\item
L\+O21app/\hyperlink{timing_8h}{timing.\+h}\end{DoxyCompactItemize}
| {
"content_hash": "06ebde1d2aa63097408bf9d8f527abf9",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 189,
"avg_line_length": 50.074074074074076,
"alnum_prop": 0.7222633136094675,
"repo_name": "mmewen/LO21",
"id": "eec44b395b870926a97acf13f3644f2a5e7ada13",
"size": "2716",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "doc/latex/class_t_i_m_e_1_1_time_exception.tex",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "131252"
},
{
"name": "HTML",
"bytes": "4571"
},
{
"name": "QMake",
"bytes": "342"
}
],
"symlink_target": ""
} |
@interface EyeTextView : UITextView
@property(nonatomic, copy)NSString *placeholder;
@property(nonatomic, strong)UIColor *placeholderColor;
@end
| {
"content_hash": "77647df00f8630702828d57a43f8eda0",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 54,
"avg_line_length": 24.5,
"alnum_prop": 0.8095238095238095,
"repo_name": "chenzhenyong1/Kaka",
"id": "5a157a6c1bc2b238043cd2658edc07cedc38b107",
"size": "349",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "KaKa/KaKa/ViewController/FindClasses/Other/View/EyeTextView.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "212098"
},
{
"name": "Objective-C",
"bytes": "2511724"
},
{
"name": "Objective-C++",
"bytes": "70214"
},
{
"name": "Ruby",
"bytes": "2149"
}
],
"symlink_target": ""
} |
For now just sorting a given GraphQL IDL schema in alphabetical order.

| {
"content_hash": "d29c303c9f3fb1cc34075084896c819b",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 127,
"avg_line_length": 66.66666666666667,
"alnum_prop": 0.82,
"repo_name": "skorfmann/atom-graphql-toolkit",
"id": "b8bf4d17240d71adf34ebfd72a8da26ae554d9ed",
"size": "232",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "243"
},
{
"name": "JavaScript",
"bytes": "3651"
}
],
"symlink_target": ""
} |
#ifndef _CHATHANDLERS_H_
#define _CHATHANDLERS_H_
/*************
** INCLUDES **
*************/
#include "chat.h"
#include "chatSocket.h"
/**********
** TYPES **
**********/
typedef struct ciServerMessageType
{
char * command;
void (* handler)(CHAT chat, const ciServerMessage * message);
} ciServerMessageType;
typedef char ** ciCommands;
typedef struct ciServerMessageFilter
{
int type;
gsi_time timeout;
char * name;
char * name2;
void * callback;
void * callback2;
void * param;
void * data;
int ID;
struct ciServerMessageFilter * pnext;
} ciServerMessageFilter;
/************
** GLOBALS **
************/
extern ciServerMessageType serverMessageTypes[];
extern int numServerMessageTypes;
/**************
** FUNCTIONS **
**************/
void ciFilterThink(CHAT chat);
void ciCleanupFilters(CHAT chat);
int ciAddLISTFilter(CHAT chat, chatEnumChannelsCallbackEach callbackEach, chatEnumChannelsCallbackAll callbackAll, void * param);
int ciAddJOINFilter(CHAT chat, const char * channel, chatEnterChannelCallback callback, void * param, chatChannelCallbacks * callbacks, const char * password);
int ciAddTOPICFilter(CHAT chat, const char * channel, chatGetChannelTopicCallback callback, void * param);
int ciAddNAMESFilter(CHAT chat, const char * channel, chatEnumUsersCallback callback, void * param);
int ciAddWHOISFilter(CHAT chat, const char * user, chatGetUserInfoCallback callback, void * param);
int ciAddWHOFilter(CHAT chat, const char * user, chatGetBasicUserInfoCallback callback, void * param);
int ciAddCWHOFilter(CHAT chat, const char * channel, chatGetChannelBasicUserInfoCallback callback, void * param);
int ciAddCMODEFilter(CHAT chat, const char * channel, chatGetChannelModeCallback callback, void * param);
int ciAddUMODEFilter(CHAT chat, const char * user, const char * channel, chatGetUserModeCallback callback, void * param);
int ciAddBANFilter(CHAT chat, const char * user, const char * channel);
int ciAddGETBANFilter(CHAT chat, const char * channel, chatEnumChannelBansCallback callback, void * param);
int ciAddNICKFilter(CHAT chat, const char * oldNick, const char * newNick, chatChangeNickCallback callback, void * param);
int ciAddUNQUIETFilter(CHAT chat, const char * channel);
int ciAddGETKEYFilter(CHAT chat, const char * cookie, int num, const char ** keys, const char * channel, chatGetGlobalKeysCallback callback, void * param);
int ciAddGETCKEYFilter(CHAT chat, const char * cookie, int num, const char ** keys, CHATBool channel, CHATBool getBroadcastKeys, chatGetChannelKeysCallback callback, void * param);
int ciAddGETCHANKEYFilter(CHAT chat, const char * cookie, int num, const char ** keys, CHATBool getBroadcastKeys, chatGetChannelKeysCallback callback, void * param);
int ciAddCDKEYFilter(CHAT chat, chatAuthenticateCDKeyCallback callback, void * param);
int ciAddGETUDPRELAYFilter(CHAT chat, const char * channel, chatGetUdpRelayCallback callback, void * param);
int ciGetNextID(CHAT chat);
CHATBool ciCheckFiltersForID(CHAT chat, int ID);
#endif
| {
"content_hash": "69ea718a1e6f75b8fa1039428960333f",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 180,
"avg_line_length": 40.75675675675676,
"alnum_prop": 0.7572944297082228,
"repo_name": "art567/GameSpy",
"id": "af37c718cf087e72dad1967b79c4cd75c6a3dd32",
"size": "3151",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Chat/chatHandlers.h",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "3829369"
},
{
"name": "C++",
"bytes": "760043"
},
{
"name": "Makefile",
"bytes": "80237"
},
{
"name": "Objective-C",
"bytes": "704296"
}
],
"symlink_target": ""
} |
// "Add missing nested patterns" "true-preview"
class Main {
void foo(Object obj) {
switch (obj) {
case Point(double x/*blah blah blah*/, double y, double z) -> {}
default -> {}
}
}
record Point(double x, double y, double z) {}
}
| {
"content_hash": "f3a6c23c478480f70ad0df197d437ed6",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 70,
"avg_line_length": 23.363636363636363,
"alnum_prop": 0.5836575875486382,
"repo_name": "JetBrains/intellij-community",
"id": "99662b7a192c8360006348a90e1e1bc30e3009f8",
"size": "257",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "java/java-tests/testData/codeInsight/daemonCodeAnalyzer/quickFix/addMissingDeconstructionComponents/afterWithComments2.java",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
::RSpec::Matchers.define :have_public_method_defined do |value|
match do |klass|
klass.public_method_defined?(value.to_sym)
end
description do
"should define public instance method ##{value.to_s}"
end
failure_message_for_should do |klass|
"expected #{klass.inspect} to define public instance method " \
"#{value.inspect}, but it didn't"
end
failure_message_for_should_not do |klass|
"expected #{klass.inspect} to not define public instance method " \
"#{value.inspect}, but it did"
end
end
| {
"content_hash": "98a20492b45a3834ad46c8f57a68041a",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 71,
"avg_line_length": 28.05263157894737,
"alnum_prop": 0.6885553470919324,
"repo_name": "antw/montage",
"id": "3065cafb843981466a383423530e4d56837134f4",
"size": "533",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "spec/lib/have_public_method_defined.rb",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Ruby",
"bytes": "91531"
}
],
"symlink_target": ""
} |
package question_001;
public class Q014_Longest_Common_Prefix {
public String longestCommonPrefix(String[] strs) {
if(strs.length==0) return "";
String prefix = strs[0];
for(String s: strs){
while(s.indexOf(prefix)!=0){
prefix = prefix.substring(0, prefix.length()-1);
}
}
return prefix;
}
}
| {
"content_hash": "9b89a4b83663edde5dafee1d25ca90ee",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 64,
"avg_line_length": 25.466666666666665,
"alnum_prop": 0.5497382198952879,
"repo_name": "stingchang/CS_Java",
"id": "dffd89508ce6210524c75ef84cab1eb96912fad7",
"size": "382",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/question_001/Q014_Longest_Common_Prefix.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "229960"
},
{
"name": "JavaScript",
"bytes": "5151"
}
],
"symlink_target": ""
} |
module Ari
class ConfigInfo < Model
attr_reader :name, :default_language, :max_channels, :max_open_files, :max_load, :setid
def max_load=(val)
@max_load ||= double.new(val)
end
def setid=(val)
@setid ||= SetId.new(val)
end
end
end
| {
"content_hash": "c1c695f8f805ce53b69af71a49a648a4",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 91,
"avg_line_length": 17,
"alnum_prop": 0.6029411764705882,
"repo_name": "jgluhov/asterisk-ari",
"id": "c36abd28f9d456cb4d748f2a819110c0341edc09",
"size": "638",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "lib/ari/models/config_info.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "4386"
},
{
"name": "Ruby",
"bytes": "109522"
}
],
"symlink_target": ""
} |
<?php
/**
* Name: Event Create Action
* Description:
* This page is a system page used as the action page for event_create form.
* Arguments:
* $_POST['eventName'] - Name of the new event
* $_POST['startDate'] - Start date of the event
* $_POST['startTime'] - Start time of the event
* $_POST['endDate'] - End date of the event
* $_POST['endTime'] - End time of the event
* $_POST['description'] - Description of the event
* $_POST['location'] - Location of the event
* Modifications:
* 11/09/2014 - Created file.
* 12/12/2014 - Created Comments.
*/
///Include necessary files
include("function/event.php");
date_default_timezone_set('UTC');
/**---- Variables ----*/
$result = array();
$eventName = $_POST['eventName'];
$startDate = $_POST['startDate'];
$startTime = $_POST['startTime'];
$endDate = $_POST['endDate'];
$endTime = $_POST['endTime'];
$description = $_POST['description'];
$location = $_POST['location'];
$startDate .= " ".$startTime;
$endDate .= " ".$endTime;
/**--- END: Variables ---*/
/** Validation */
$result = validate_event($eventName,$startDate,$endDate,$description, $location);
/** END: Validation */
///If passed validation
if(empty($result))
{
/** Create event record in database. */
$eventId = createEvent($eventName,$startDate,$endDate,$description, $location);
if(mysql_error()){
array_push($result, array('Message' => mysql_error(), 'type' => 'error'));
} else {
array_push($result, array('SuccessURL' => 'event_detail.php?eventId=' . $eventId . '&message=Successfully+Created+Event', 'type' => 'success'));
}
}
///Return results array in json format
echo json_encode($result);
?> | {
"content_hash": "b5448d532471b749557b956714ffec42",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 158,
"avg_line_length": 37.907407407407405,
"alnum_prop": 0.5236932095749878,
"repo_name": "zwmcfarland/MSEF",
"id": "db664b4ec78786313390fc0f89e4bf20bb24c84d",
"size": "2047",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/event_create_action.php",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "82444"
},
{
"name": "JavaScript",
"bytes": "592914"
},
{
"name": "PHP",
"bytes": "221794"
}
],
"symlink_target": ""
} |
#ifndef PAGMO_ALGORITHM_IHS_H
#define PAGMO_ALGORITHM_IHS_H
#include <cstddef>
#include <iostream>
#include <string>
#include "../config.h"
#include "../population.h"
#include "../serialization.h"
#include "../types.h"
#include "base.h"
namespace pagmo { namespace algorithm {
// TODO: automatic reshuffling when the population becomes too homogeneous.
/// Improved harmony search algorithm.
/**
* Harmony search (HS) is a metaheuristic algorithm mimicking the improvisation process of musicians. In the process, each musician (i.e., each variable)
* plays (i.e., generates) a note (i.e., a value) for finding a best harmony (i.e., the global optimum) all together.
*
* This code implements the so-called improved harmony search algorithm (IHS), in which the probability of picking the variables from
* the decision vector and the amount of mutation to which they are subject vary respectively linearly and exponentially within each call
* of the evolve() method.
*
* In this algorithm the number of objective function evaluations is equal to the number of generations. All the individuals in the input population participate
* in the evolution. A new individual is generated at every iteration, substituting the current worst individual of the population if better. This algorithm
* will use the comparison methods provided by the problem in order to rank individuals.
*
* This algorithm is suitable for continuous, constrained, mixed-integer and multi-objective optimisation.
*
* @see http://en.wikipedia.org/wiki/Harmony_search for an introduction on harmony search.
* @see http://dx.doi.org/10.1016/j.amc.2006.11.033 for the paper that introduces and explains improved harmony search.
*
* @author Francesco Biscani ([email protected])
* @author Dario Izzo ([email protected])
*/
class __PAGMO_VISIBLE ihs: public base
{
public:
ihs(int gen = 1, const double &phmcr = 0.85, const double &ppar_min = 0.35, const double &ppar_max = 0.99,
const double &bw_min = 1E-5, const double &bw_max = 1);
base_ptr clone() const;
void evolve(population &) const;
std::string get_name() const;
protected:
std::string human_readable_extra() const;
private:
friend class boost::serialization::access;
template <class Archive>
void serialize(Archive &ar, const unsigned int)
{
ar & boost::serialization::base_object<base>(*this);
ar & const_cast<std::size_t &>(m_gen);
ar & const_cast<double &>(m_phmcr);
ar & const_cast<double &>(m_ppar_min);
ar & const_cast<double &>(m_ppar_max);
ar & const_cast<double &>(m_bw_min);
ar & const_cast<double &>(m_bw_max);
}
// Number of generations.
const std::size_t m_gen;
// Rate of choosing from memory (i.e., from population).
const double m_phmcr;
// Minimum pitch adjustment rate.
const double m_ppar_min;
// Maximum pitch adjustment rate.
const double m_ppar_max;
// Mininum distance bandwidth.
const double m_bw_min;
// Maximum distance bandwidth.
const double m_bw_max;
};
}} //namespaces
BOOST_CLASS_EXPORT_KEY(pagmo::algorithm::ihs)
#endif
| {
"content_hash": "76efa11e6c383d58d4398ed223e49f5e",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 160,
"avg_line_length": 38.18518518518518,
"alnum_prop": 0.7219527966375687,
"repo_name": "DailyActie/Surrogate-Model",
"id": "6133b4d53a58915160af6f92b03d748c35ec6ec1",
"size": "4910",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "01-codes/MOEA-pagmo-master/src/algorithm/ihs.h",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Awk",
"bytes": "345"
},
{
"name": "Batchfile",
"bytes": "18746"
},
{
"name": "C",
"bytes": "13004913"
},
{
"name": "C++",
"bytes": "14692003"
},
{
"name": "CMake",
"bytes": "72831"
},
{
"name": "CSS",
"bytes": "303488"
},
{
"name": "Fortran",
"bytes": "7339415"
},
{
"name": "HTML",
"bytes": "854774"
},
{
"name": "Java",
"bytes": "38854"
},
{
"name": "JavaScript",
"bytes": "2432846"
},
{
"name": "Jupyter Notebook",
"bytes": "829689"
},
{
"name": "M4",
"bytes": "1379"
},
{
"name": "Makefile",
"bytes": "48708"
},
{
"name": "Matlab",
"bytes": "4346"
},
{
"name": "Objective-C",
"bytes": "567"
},
{
"name": "PHP",
"bytes": "93585"
},
{
"name": "Pascal",
"bytes": "1449"
},
{
"name": "Perl",
"bytes": "1152272"
},
{
"name": "PowerShell",
"bytes": "17042"
},
{
"name": "Python",
"bytes": "34668203"
},
{
"name": "Roff",
"bytes": "5925"
},
{
"name": "Ruby",
"bytes": "92498"
},
{
"name": "Shell",
"bytes": "94698"
},
{
"name": "TeX",
"bytes": "156540"
},
{
"name": "TypeScript",
"bytes": "41691"
}
],
"symlink_target": ""
} |
ACCEPTED
#### According to
International Plant Names Index
#### Published in
null
#### Original name
null
### Remarks
null | {
"content_hash": "45f64e8d5e2c67bbd6bfb8302c56142a",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 31,
"avg_line_length": 9.692307692307692,
"alnum_prop": 0.7063492063492064,
"repo_name": "mdoering/backbone",
"id": "e6178802c738bdd03fc1aa7306f503b32981a1ee",
"size": "189",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "life/Plantae/Magnoliophyta/Magnoliopsida/Cornales/Cornaceae/Cornus/Cornus kousa/Cornus kousa yaeyamensis/README.md",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
'''
Created on Jun 12, 2014
@author: James
'''
import glob
import os
# a script that converts word file to txt files
# requires word application on Windows machine
# requirement:
# 1. Windows platform
# 2. python 2.7
# 3. pywin32, download from http://sourceforge.net/projects/pywin32/
# 4. word application installed on running machine
from win32com.client import constants, Dispatch
import pythoncom
from zipfile import ZipFile
def pdf_to_text(file_path):
import pdf2txt
(outpath,ext) = os.path.splitext(file_path)
outfile = outpath + '.txt'
print(os.path.abspath(outfile))
if os.path.exists(outfile):
return text_from_txt_file(outfile)
outfile = os.path.abspath(outfile)
file_path = os.path.abspath(file_path)
pdf2txt.main(argv=['pdf2txt', '-o', outfile, file_path ])
return text_from_txt_file(outfile)
# convert the word file to a text file.
# @arg wordapp: The word IDispatch object
# @arg wordfile: The word file name
# @returns: The txt file name
def doc_to_text(wordfile):
wordapp = Dispatch("Word.Application")
name, ext = os.path.splitext(wordfile)
if ext != '.doc' and ext != '.docx':
return None
txtfile = name + '.txt'
if os.path.exists(txtfile):
return text_from_txt_file(txtfile)
print txtfile
try:
wordapp.Documents.Open(os.path.abspath(wordfile))
wdFormatTextLineBreaks = 3
wordapp.ActiveDocument.SaveAs(os.path.abspath(txtfile),
FileFormat=wdFormatTextLineBreaks)
wordapp.ActiveDocument.Close()
except Exception as e:
print("***Could not process " + wordfile + " ****")
return text_from_txt_file(txtfile)
ConvertFunctions = {'.pdf' : pdf_to_text,
'.doc' : doc_to_text,
'.docx' : doc_to_text,
'.docx' : doc_to_text}
def FilesToText(folder_path, ext):
global ConvertFunctions
glob_path = os.path.join(folder_path, "*." + ext)
files = glob.glob(glob_path)
for file_path in files:
(b, f_ext) = os.path.splitext(file_path)
basename = os.path.basename(file_path)
if basename[0] == '~':
continue
if f_ext.lower() in ConvertFunctions:
convert_function = ConvertFunctions[f_ext]
text = convert_function(file_path)
base_name = os.path.basename(file_path)
if base_name[0] == '~':
continue
(base_name, ext) = os.path.splitext(base_name)
yield (base_name, text)
def text_from_txt_file(text_path):
f = file(text_path, 'r')
text = f.read()
f.close()
return text | {
"content_hash": "6d7762dd67b2a02047dba4d2a8d229f4",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 72,
"avg_line_length": 29.11578947368421,
"alnum_prop": 0.6012292118582792,
"repo_name": "jamesra/copycatcher",
"id": "e698f137f715f092e01e7fe03e66eddcd37cff82",
"size": "2766",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "copy_catcher/filehandler.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "29904"
}
],
"symlink_target": ""
} |
package com.xivvic.args.util;
import static org.junit.Assert.fail;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Objects;
public class FileUtil
{
public static String readFromResourceFile(String dir, String file)
{
Path path = Paths.get(dir, file);
if (! Files.exists(path))
{
fail("Missing test resource file: " + path);
}
byte[] bytes = {};
try
{
bytes = Files.readAllBytes(path);
}
catch (IOException e)
{
fail("Error reading test resource file: " + path + ". Error: " + e.getLocalizedMessage());
}
String content = new String(bytes);
return content;
}
public static String inputStreamToString(InputStream stream)
{
Objects.requireNonNull(stream);
ByteArrayOutputStream result = new ByteArrayOutputStream();
byte[] buffer = new byte[4096];
int length;
try
{
while ((length = stream.read(buffer)) != -1) {
result.write(buffer, 0, length);
}
}
catch (IOException e)
{
return null;
}
try
{
return result.toString(StandardCharsets.UTF_8.name());
}
catch (UnsupportedEncodingException cannotHappen)
{
return null;
}
}
}
| {
"content_hash": "49cae187ab009505090bc6ed10e464a0",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 93,
"avg_line_length": 19.652173913043477,
"alnum_prop": 0.696165191740413,
"repo_name": "crttcr/Args",
"id": "6c205f1e6b5ca8623dd680b198f4b98910166f08",
"size": "1356",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/main/java/com/xivvic/args/util/FileUtil.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ANTLR",
"bytes": "517"
},
{
"name": "Java",
"bytes": "171675"
}
],
"symlink_target": ""
} |
* Promise
* Async | {
"content_hash": "06b997cb6f27effbbf0a7207039a03e4",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 9,
"avg_line_length": 8.5,
"alnum_prop": 0.7058823529411765,
"repo_name": "alanwalter45/example-angular2",
"id": "600cf577d3498c161a37013670e6acb090454ab6",
"size": "26",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "promise-async/README.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "TypeScript",
"bytes": "4010"
}
],
"symlink_target": ""
} |
import { Pipe, PipeTransform } from '@angular/core';
import { BabelService } from '../services';
@Pipe({
name: 'babel',
pure: false
})
export class BabelPipe implements PipeTransform {
constructor(private babel: BabelService) {}
transform(value, args) {
return this.babel.translate(value);
}
}
| {
"content_hash": "58f314021528bab4deb67da25a904b87",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 52,
"avg_line_length": 20.375,
"alnum_prop": 0.6595092024539877,
"repo_name": "zarautz/munoa",
"id": "1ba951ce97852d6f5739832d4bb602d351dbf83b",
"size": "326",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/pipes/babel.pipe.ts",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2348"
},
{
"name": "HTML",
"bytes": "10903"
},
{
"name": "JavaScript",
"bytes": "8442"
},
{
"name": "TypeScript",
"bytes": "23234"
}
],
"symlink_target": ""
} |
define("dojo/parser", [
"require", "./_base/kernel", "./_base/lang", "./_base/array", "./_base/config", "./dom", "./_base/window",
"./_base/url", "./aspect", "./promise/all", "./date/stamp", "./Deferred", "./has", "./query", "./on", "./ready"
], function(require, dojo, dlang, darray, config, dom, dwindow, _Url, aspect, all, dates, Deferred, has, query, don, ready){
// module:
// dojo/parser
new Date("X"); // workaround for #11279, new Date("") == NaN
// data-dojo-props etc. is not restricted to JSON, it can be any javascript
function myEval(text){
return eval("(" + text + ")");
}
// Widgets like BorderContainer add properties to _Widget via dojo.extend().
// If BorderContainer is loaded after _Widget's parameter list has been cached,
// we need to refresh that parameter list (for _Widget and all widgets that extend _Widget).
var extendCnt = 0;
aspect.after(dlang, "extend", function(){
extendCnt++;
}, true);
function getNameMap(ctor){
// summary:
// Returns map from lowercase name to attribute name in class, ex: {onclick: "onClick"}
var map = ctor._nameCaseMap, proto = ctor.prototype;
// Create the map if it's undefined.
// Refresh the map if a superclass was possibly extended with new methods since the map was created.
if(!map || map._extendCnt < extendCnt){
map = ctor._nameCaseMap = {};
for(var name in proto){
if(name.charAt(0) === "_"){
continue;
} // skip internal properties
map[name.toLowerCase()] = name;
}
map._extendCnt = extendCnt;
}
return map;
}
function getCtor(/*String[]*/ types, /*Function?*/ contextRequire){
// summary:
// Retrieves a constructor. If the types array contains more than one class/MID then the
// subsequent classes will be mixed into the first class and a unique constructor will be
// returned for that array.
if(!contextRequire){
contextRequire = require;
}
// Map from widget name or list of widget names(ex: "dijit/form/Button,acme/MyMixin") to a constructor.
// Keep separate map for each requireContext to avoid false matches (ex: "./Foo" can mean different things
// depending on context.)
var ctorMap = contextRequire._dojoParserCtorMap || (contextRequire._dojoParserCtorMap = {});
var ts = types.join();
if(!ctorMap[ts]){
var mixins = [];
for(var i = 0, l = types.length; i < l; i++){
var t = types[i];
// TODO: Consider swapping getObject and require in the future
mixins[mixins.length] = (ctorMap[t] = ctorMap[t] || (dlang.getObject(t) || (~t.indexOf('/') &&
contextRequire(t))));
}
var ctor = mixins.shift();
ctorMap[ts] = mixins.length ? (ctor.createSubclass ? ctor.createSubclass(mixins) : ctor.extend.apply(ctor, mixins)) : ctor;
}
return ctorMap[ts];
}
var parser = {
// summary:
// The Dom/Widget parsing package
_clearCache: function(){
// summary:
// Clear cached data. Used mainly for benchmarking.
extendCnt++;
_ctorMap = {};
},
_functionFromScript: function(script, attrData){
// summary:
// Convert a `<script type="dojo/method" args="a, b, c"> ... </script>`
// into a function
// script: DOMNode
// The `<script>` DOMNode
// attrData: String
// For HTML5 compliance, searches for attrData + "args" (typically
// "data-dojo-args") instead of "args"
var preamble = "",
suffix = "",
argsStr = (script.getAttribute(attrData + "args") || script.getAttribute("args")),
withStr = script.getAttribute("with");
// Convert any arguments supplied in script tag into an array to be passed to the
var fnArgs = (argsStr || "").split(/\s*,\s*/);
if(withStr && withStr.length){
darray.forEach(withStr.split(/\s*,\s*/), function(part){
preamble += "with(" + part + "){";
suffix += "}";
});
}
return new Function(fnArgs, preamble + script.innerHTML + suffix);
},
instantiate: function(nodes, mixin, options){
// summary:
// Takes array of nodes, and turns them into class instances and
// potentially calls a startup method to allow them to connect with
// any children.
// nodes: Array
// Array of DOM nodes
// mixin: Object?
// An object that will be mixed in with each node in the array.
// Values in the mixin will override values in the node, if they
// exist.
// options: Object?
// An object used to hold kwArgs for instantiation.
// See parse.options argument for details.
// returns:
// Array of instances.
mixin = mixin || {};
options = options || {};
var dojoType = (options.scope || dojo._scopeName) + "Type", // typically "dojoType"
attrData = "data-" + (options.scope || dojo._scopeName) + "-", // typically "data-dojo-"
dataDojoType = attrData + "type", // typically "data-dojo-type"
dataDojoMixins = attrData + "mixins"; // typically "data-dojo-mixins"
var list = [];
darray.forEach(nodes, function(node){
var type = dojoType in mixin ? mixin[dojoType] : node.getAttribute(dataDojoType) || node.getAttribute(dojoType);
if(type){
var mixinsValue = node.getAttribute(dataDojoMixins),
types = mixinsValue ? [type].concat(mixinsValue.split(/\s*,\s*/)) : [type];
list.push({
node: node,
types: types
});
}
});
// Instantiate the nodes and return the list of instances.
return this._instantiate(list, mixin, options);
},
_instantiate: function(nodes, mixin, options, returnPromise){
// summary:
// Takes array of objects representing nodes, and turns them into class instances and
// potentially calls a startup method to allow them to connect with
// any children.
// nodes: Array
// Array of objects like
// | {
// | ctor: Function (may be null)
// | types: ["dijit/form/Button", "acme/MyMixin"] (used if ctor not specified)
// | node: DOMNode,
// | scripts: [ ... ], // array of <script type="dojo/..."> children of node
// | inherited: { ... } // settings inherited from ancestors like dir, theme, etc.
// | }
// mixin: Object
// An object that will be mixed in with each node in the array.
// Values in the mixin will override values in the node, if they
// exist.
// options: Object
// An options object used to hold kwArgs for instantiation.
// See parse.options argument for details.
// returnPromise: Boolean
// Return a Promise rather than the instance; supports asynchronous widget creation.
// returns:
// Array of instances, or if returnPromise is true, a promise for array of instances
// that resolves when instances have finished initializing.
// Call widget constructors. Some may be asynchronous and return promises.
var thelist = darray.map(nodes, function(obj){
var ctor = obj.ctor || getCtor(obj.types, options.contextRequire);
// If we still haven't resolved a ctor, it is fatal now
if(!ctor){
throw new Error("Unable to resolve constructor for: '" + obj.types.join() + "'");
}
return this.construct(ctor, obj.node, mixin, options, obj.scripts, obj.inherited);
}, this);
// After all widget construction finishes, call startup on each top level instance if it makes sense (as for
// widgets). Parent widgets will recursively call startup on their (non-top level) children
function onConstruct(thelist){
if(!mixin._started && !options.noStart){
darray.forEach(thelist, function(instance){
if(typeof instance.startup === "function" && !instance._started){
instance.startup();
}
});
}
return thelist;
}
if(returnPromise){
return all(thelist).then(onConstruct);
}else{
// Back-compat path, remove for 2.0
return onConstruct(thelist);
}
},
construct: function(ctor, node, mixin, options, scripts, inherited){
// summary:
// Calls new ctor(params, node), where params is the hash of parameters specified on the node,
// excluding data-dojo-type and data-dojo-mixins. Does not call startup().
// ctor: Function
// Widget constructor.
// node: DOMNode
// This node will be replaced/attached to by the widget. It also specifies the arguments to pass to ctor.
// mixin: Object?
// Attributes in this object will be passed as parameters to ctor,
// overriding attributes specified on the node.
// options: Object?
// An options object used to hold kwArgs for instantiation. See parse.options argument for details.
// scripts: DomNode[]?
// Array of `<script type="dojo/*">` DOMNodes. If not specified, will search for `<script>` tags inside node.
// inherited: Object?
// Settings from dir=rtl or lang=... on a node above this node. Overrides options.inherited.
// returns:
// Instance or Promise for the instance, if markupFactory() itself returned a promise
var proto = ctor && ctor.prototype;
options = options || {};
// Setup hash to hold parameter settings for this widget. Start with the parameter
// settings inherited from ancestors ("dir" and "lang").
// Inherited setting may later be overridden by explicit settings on node itself.
var params = {};
if(options.defaults){
// settings for the document itself (or whatever subtree is being parsed)
dlang.mixin(params, options.defaults);
}
if(inherited){
// settings from dir=rtl or lang=... on a node above this node
dlang.mixin(params, inherited);
}
// Get list of attributes explicitly listed in the markup
var attributes;
if(has("dom-attributes-explicit")){
// Standard path to get list of user specified attributes
attributes = node.attributes;
}else if(has("dom-attributes-specified-flag")){
// Special processing needed for IE8, to skip a few faux values in attributes[]
attributes = darray.filter(node.attributes, function(a){
return a.specified;
});
}else{
// Special path for IE6-7, avoid (sometimes >100) bogus entries in node.attributes
var clone = /^input$|^img$/i.test(node.nodeName) ? node : node.cloneNode(false),
attrs = clone.outerHTML.replace(/=[^\s"']+|="[^"]*"|='[^']*'/g, "").replace(/^\s*<[a-zA-Z0-9]*\s*/, "").replace(/\s*>.*$/, "");
attributes = darray.map(attrs.split(/\s+/), function(name){
var lcName = name.toLowerCase();
return {
name: name,
// getAttribute() doesn't work for button.value, returns innerHTML of button.
// but getAttributeNode().value doesn't work for the form.encType or li.value
value: (node.nodeName == "LI" && name == "value") || lcName == "enctype" ?
node.getAttribute(lcName) : node.getAttributeNode(lcName).value
};
});
}
// Hash to convert scoped attribute name (ex: data-dojo17-params) to something friendly (ex: data-dojo-params)
// TODO: remove scope for 2.0
var scope = options.scope || dojo._scopeName,
attrData = "data-" + scope + "-", // typically "data-dojo-"
hash = {};
if(scope !== "dojo"){
hash[attrData + "props"] = "data-dojo-props";
hash[attrData + "type"] = "data-dojo-type";
hash[attrData + "mixins"] = "data-dojo-mixins";
hash[scope + "type"] = "dojotype";
hash[attrData + "id"] = "data-dojo-id";
}
// Read in attributes and process them, including data-dojo-props, data-dojo-type,
// dojoAttachPoint, etc., as well as normal foo=bar attributes.
var i = 0, item, funcAttrs = [], jsname, extra;
while(item = attributes[i++]){
var name = item.name,
lcName = name.toLowerCase(),
value = item.value;
switch(hash[lcName] || lcName){
// Already processed, just ignore
case "data-dojo-type":
case "dojotype":
case "data-dojo-mixins":
break;
// Data-dojo-props. Save for later to make sure it overrides direct foo=bar settings
case "data-dojo-props":
extra = value;
break;
// data-dojo-id or jsId. TODO: drop jsId in 2.0
case "data-dojo-id":
case "jsid":
jsname = value;
break;
// For the benefit of _Templated
case "data-dojo-attach-point":
case "dojoattachpoint":
params.dojoAttachPoint = value;
break;
case "data-dojo-attach-event":
case "dojoattachevent":
params.dojoAttachEvent = value;
break;
// Special parameter handling needed for IE
case "class":
params["class"] = node.className;
break;
case "style":
params["style"] = node.style && node.style.cssText;
break;
default:
// Normal attribute, ex: value="123"
// Find attribute in widget corresponding to specified name.
// May involve case conversion, ex: onclick --> onClick
if(!(name in proto)){
var map = getNameMap(ctor);
name = map[lcName] || name;
}
// Set params[name] to value, doing type conversion
if(name in proto){
switch(typeof proto[name]){
case "string":
params[name] = value;
break;
case "number":
params[name] = value.length ? Number(value) : NaN;
break;
case "boolean":
// for checked/disabled value might be "" or "checked". interpret as true.
params[name] = value.toLowerCase() != "false";
break;
case "function":
if(value === "" || value.search(/[^\w\.]+/i) != -1){
// The user has specified some text for a function like "return x+5"
params[name] = new Function(value);
}else{
// The user has specified the name of a global function like "myOnClick"
// or a single word function "return"
params[name] = dlang.getObject(value, false) || new Function(value);
}
funcAttrs.push(name); // prevent "double connect", see #15026
break;
default:
var pVal = proto[name];
params[name] =
(pVal && "length" in pVal) ? (value ? value.split(/\s*,\s*/) : []) : // array
(pVal instanceof Date) ?
(value == "" ? new Date("") : // the NaN of dates
value == "now" ? new Date() : // current date
dates.fromISOString(value)) :
(pVal instanceof _Url) ? (dojo.baseUrl + value) :
myEval(value);
}
}else{
params[name] = value;
}
}
}
// Remove function attributes from DOMNode to prevent "double connect" problem, see #15026.
// Do this as a separate loop since attributes[] is often a live collection (depends on the browser though).
for(var j = 0; j < funcAttrs.length; j++){
var lcfname = funcAttrs[j].toLowerCase();
node.removeAttribute(lcfname);
node[lcfname] = null;
}
// Mix things found in data-dojo-props into the params, overriding any direct settings
if(extra){
try{
extra = myEval.call(options.propsThis, "{" + extra + "}");
dlang.mixin(params, extra);
}catch(e){
// give the user a pointer to their invalid parameters. FIXME: can we kill this in production?
throw new Error(e.toString() + " in data-dojo-props='" + extra + "'");
}
}
// Any parameters specified in "mixin" override everything else.
dlang.mixin(params, mixin);
// Get <script> nodes associated with this widget, if they weren't specified explicitly
if(!scripts){
scripts = (ctor && (ctor._noScript || proto._noScript) ? [] : query("> script[type^='dojo/']", node));
}
// Process <script type="dojo/*"> script tags
// <script type="dojo/method" data-dojo-event="foo"> tags are added to params, and passed to
// the widget on instantiation.
// <script type="dojo/method"> tags (with no event) are executed after instantiation
// <script type="dojo/connect" data-dojo-event="foo"> tags are dojo.connected after instantiation,
// and likewise with <script type="dojo/aspect" data-dojo-method="foo">
// <script type="dojo/watch" data-dojo-prop="foo"> tags are dojo.watch after instantiation
// <script type="dojo/on" data-dojo-event="foo"> tags are dojo.on after instantiation
// note: dojo/* script tags cannot exist in self closing widgets, like <input />
var aspects = [], // aspects to connect after instantiation
calls = [], // functions to call after instantiation
watches = [], // functions to watch after instantiation
ons = []; // functions to on after instantiation
if(scripts){
for(i = 0; i < scripts.length; i++){
var script = scripts[i];
node.removeChild(script);
// FIXME: drop event="" support in 2.0. use data-dojo-event="" instead
var event = (script.getAttribute(attrData + "event") || script.getAttribute("event")),
prop = script.getAttribute(attrData + "prop"),
method = script.getAttribute(attrData + "method"),
advice = script.getAttribute(attrData + "advice"),
scriptType = script.getAttribute("type"),
nf = this._functionFromScript(script, attrData);
if(event){
if(scriptType == "dojo/connect"){
aspects.push({ method: event, func: nf });
}else if(scriptType == "dojo/on"){
ons.push({ event: event, func: nf });
}else{
// <script type="dojo/method" data-dojo-event="foo">
// TODO for 2.0: use data-dojo-method="foo" instead (also affects dijit/Declaration)
params[event] = nf;
}
}else if(scriptType == "dojo/aspect"){
aspects.push({ method: method, advice: advice, func: nf });
}else if(scriptType == "dojo/watch"){
watches.push({ prop: prop, func: nf });
}else{
calls.push(nf);
}
}
}
// create the instance
var markupFactory = ctor.markupFactory || proto.markupFactory;
var instance = markupFactory ? markupFactory(params, node, ctor) : new ctor(params, node);
function onInstantiate(instance){
// map it to the JS namespace if that makes sense
if(jsname){
dlang.setObject(jsname, instance);
}
// process connections and startup functions
for(i = 0; i < aspects.length; i++){
aspect[aspects[i].advice || "after"](instance, aspects[i].method, dlang.hitch(instance, aspects[i].func), true);
}
for(i = 0; i < calls.length; i++){
calls[i].call(instance);
}
for(i = 0; i < watches.length; i++){
instance.watch(watches[i].prop, watches[i].func);
}
for(i = 0; i < ons.length; i++){
don(instance, ons[i].event, ons[i].func);
}
return instance;
}
if(instance.then){
return instance.then(onInstantiate);
}else{
return onInstantiate(instance);
}
},
scan: function(root, options){
// summary:
// Scan a DOM tree and return an array of objects representing the DOMNodes
// that need to be turned into widgets.
// description:
// Search specified node (or document root node) recursively for class instances
// and return an array of objects that represent potential widgets to be
// instantiated. Searches for either data-dojo-type="MID" or dojoType="MID" where
// "MID" is a module ID like "dijit/form/Button" or a fully qualified Class name
// like "dijit/form/Button". If the MID is not currently available, scan will
// attempt to require() in the module.
//
// See parser.parse() for details of markup.
// root: DomNode?
// A default starting root node from which to start the parsing. Can be
// omitted, defaulting to the entire document. If omitted, the `options`
// object can be passed in this place. If the `options` object has a
// `rootNode` member, that is used.
// options: Object
// a kwArgs options object, see parse() for details
//
// returns: Promise
// A promise that is resolved with the nodes that have been parsed.
var list = [], // Output List
mids = [], // An array of modules that are not yet loaded
midsHash = {}; // Used to keep the mids array unique
var dojoType = (options.scope || dojo._scopeName) + "Type", // typically "dojoType"
attrData = "data-" + (options.scope || dojo._scopeName) + "-", // typically "data-dojo-"
dataDojoType = attrData + "type", // typically "data-dojo-type"
dataDojoTextDir = attrData + "textdir", // typically "data-dojo-textdir"
dataDojoMixins = attrData + "mixins"; // typically "data-dojo-mixins"
// Info on DOMNode currently being processed
var node = root.firstChild;
// Info on parent of DOMNode currently being processed
// - inherited: dir, lang, and textDir setting of parent, or inherited by parent
// - parent: pointer to identical structure for my parent (or null if no parent)
// - scripts: if specified, collects <script type="dojo/..."> type nodes from children
var inherited = options.inherited;
if(!inherited){
function findAncestorAttr(node, attr){
return (node.getAttribute && node.getAttribute(attr)) ||
(node.parentNode && findAncestorAttr(node.parentNode, attr));
}
inherited = {
dir: findAncestorAttr(root, "dir"),
lang: findAncestorAttr(root, "lang"),
textDir: findAncestorAttr(root, dataDojoTextDir)
};
for(var key in inherited){
if(!inherited[key]){
delete inherited[key];
}
}
}
// Metadata about parent node
var parent = {
inherited: inherited
};
// For collecting <script type="dojo/..."> type nodes (when null, we don't need to collect)
var scripts;
// when true, only look for <script type="dojo/..."> tags, and don't recurse to children
var scriptsOnly;
function getEffective(parent){
// summary:
// Get effective dir, lang, textDir settings for specified obj
// (matching "parent" object structure above), and do caching.
// Take care not to return null entries.
if(!parent.inherited){
parent.inherited = {};
var node = parent.node,
grandparent = getEffective(parent.parent);
var inherited = {
dir: node.getAttribute("dir") || grandparent.dir,
lang: node.getAttribute("lang") || grandparent.lang,
textDir: node.getAttribute(dataDojoTextDir) || grandparent.textDir
};
for(var key in inherited){
if(inherited[key]){
parent.inherited[key] = inherited[key];
}
}
}
return parent.inherited;
}
// DFS on DOM tree, collecting nodes with data-dojo-type specified.
while(true){
if(!node){
// Finished this level, continue to parent's next sibling
if(!parent || !parent.node){
break;
}
node = parent.node.nextSibling;
scriptsOnly = false;
parent = parent.parent;
scripts = parent.scripts;
continue;
}
if(node.nodeType != 1){
// Text or comment node, skip to next sibling
node = node.nextSibling;
continue;
}
if(scripts && node.nodeName.toLowerCase() == "script"){
// Save <script type="dojo/..."> for parent, then continue to next sibling
type = node.getAttribute("type");
if(type && /^dojo\/\w/i.test(type)){
scripts.push(node);
}
node = node.nextSibling;
continue;
}
if(scriptsOnly){
// scriptsOnly flag is set, we have already collected scripts if the parent wants them, so now we shouldn't
// continue further analysis of the node and will continue to the next sibling
node = node.nextSibling;
continue;
}
// Check for data-dojo-type attribute, fallback to backward compatible dojoType
// TODO: Remove dojoType in 2.0
var type = node.getAttribute(dataDojoType) || node.getAttribute(dojoType);
// Short circuit for leaf nodes containing nothing [but text]
var firstChild = node.firstChild;
if(!type && (!firstChild || (firstChild.nodeType == 3 && !firstChild.nextSibling))){
node = node.nextSibling;
continue;
}
// Meta data about current node
var current;
var ctor = null;
if(type){
// If dojoType/data-dojo-type specified, add to output array of nodes to instantiate.
var mixinsValue = node.getAttribute(dataDojoMixins),
types = mixinsValue ? [type].concat(mixinsValue.split(/\s*,\s*/)) : [type];
// Note: won't find classes declared via dojo/Declaration or any modules that haven't been
// loaded yet so use try/catch to avoid throw from require()
try{
ctor = getCtor(types, options.contextRequire);
}catch(e){}
// If the constructor was not found, check to see if it has modules that can be loaded
if(!ctor){
darray.forEach(types, function(t){
if(~t.indexOf('/') && !midsHash[t]){
// If the type looks like a MID and it currently isn't in the array of MIDs to load, add it.
midsHash[t] = true;
mids[mids.length] = t;
}
});
}
var childScripts = ctor && !ctor.prototype._noScript ? [] : null; // <script> nodes that are parent's children
// Setup meta data about this widget node, and save it to list of nodes to instantiate
current = {
types: types,
ctor: ctor,
parent: parent,
node: node,
scripts: childScripts
};
current.inherited = getEffective(current); // dir & lang settings for current node, explicit or inherited
list.push(current);
}else{
// Meta data about this non-widget node
current = {
node: node,
scripts: scripts,
parent: parent
};
}
// Recurse, collecting <script type="dojo/..."> children, and also looking for
// descendant nodes with dojoType specified (unless the widget has the stopParser flag).
// When finished with children, go to my next sibling.
scripts = childScripts;
scriptsOnly = node.stopParser || (ctor && ctor.prototype.stopParser && !(options.template));
parent = current;
node = firstChild;
}
var d = new Deferred();
// If there are modules to load then require them in
if(mids.length){
// Warn that there are modules being auto-required
if( true ){
console.warn("WARNING: Modules being Auto-Required: " + mids.join(", "));
}
var r = options.contextRequire || require;
r(mids, function(){
// Go through list of widget nodes, filling in missing constructors, and filtering out nodes that shouldn't
// be instantiated due to a stopParser flag on an ancestor that we belatedly learned about due to
// auto-require of a module like ContentPane. Assumes list is in DFS order.
d.resolve(darray.filter(list, function(widget){
if(!widget.ctor){
// Attempt to find the constructor again. Still won't find classes defined via
// dijit/Declaration so need to try/catch.
try{
widget.ctor = getCtor(widget.types, options.contextRequire);
}catch(e){}
}
// Get the parent widget
var parent = widget.parent;
while(parent && !parent.types){
parent = parent.parent;
}
// Return false if this node should be skipped due to stopParser on an ancestor.
// Since list[] is in DFS order, this loop will always set parent.instantiateChildren before
// trying to compute widget.instantiate.
var proto = widget.ctor && widget.ctor.prototype;
widget.instantiateChildren = !(proto && proto.stopParser && !(options.template));
widget.instantiate = !parent || (parent.instantiate && parent.instantiateChildren);
return widget.instantiate;
}));
});
}else{
// There were no modules to load, so just resolve with the parsed nodes. This separate code path is for
// efficiency, to avoid running the require() and the callback code above.
d.resolve(list);
}
// Return the promise
return d.promise;
},
_require: function(/*DOMNode*/ script, /*Object?*/ options){
// summary:
// Helper for _scanAMD(). Takes a `<script type=dojo/require>bar: "acme/bar", ...</script>` node,
// calls require() to load the specified modules and (asynchronously) assign them to the specified global
// variables, and returns a Promise for when that operation completes.
//
// In the example above, it is effectively doing a require(["acme/bar", ...], function(a){ bar = a; }).
var hash = myEval("{" + script.innerHTML + "}"), // can't use dojo/json::parse() because maybe no quotes
vars = [],
mids = [],
d = new Deferred();
var contextRequire = (options && options.contextRequire) || require;
for(var name in hash){
vars.push(name);
mids.push(hash[name]);
}
contextRequire(mids, function(){
for(var i = 0; i < vars.length; i++){
dlang.setObject(vars[i], arguments[i]);
}
d.resolve(arguments);
});
return d.promise;
},
_scanAmd: function(root, options){
// summary:
// Scans the DOM for any declarative requires and returns their values.
// description:
// Looks for `<script type=dojo/require>bar: "acme/bar", ...</script>` node, calls require() to load the
// specified modules and (asynchronously) assign them to the specified global variables,
// and returns a Promise for when those operations complete.
// root: DomNode
// The node to base the scan from.
// options: Object?
// a kwArgs options object, see parse() for details
// Promise that resolves when all the <script type=dojo/require> nodes have finished loading.
var deferred = new Deferred(),
promise = deferred.promise;
deferred.resolve(true);
var self = this;
query("script[type='dojo/require']", root).forEach(function(node){
// Fire off require() call for specified modules. Chain this require to fire after
// any previous requires complete, so that layers can be loaded before individual module require()'s fire.
promise = promise.then(function(){
return self._require(node, options);
});
// Remove from DOM so it isn't seen again
node.parentNode.removeChild(node);
});
return promise;
},
parse: function(rootNode, options){
// summary:
// Scan the DOM for class instances, and instantiate them.
// description:
// Search specified node (or root node) recursively for class instances,
// and instantiate them. Searches for either data-dojo-type="Class" or
// dojoType="Class" where "Class" is a a fully qualified class name,
// like `dijit/form/Button`
//
// Using `data-dojo-type`:
// Attributes using can be mixed into the parameters used to instantiate the
// Class by using a `data-dojo-props` attribute on the node being converted.
// `data-dojo-props` should be a string attribute to be converted from JSON.
//
// Using `dojoType`:
// Attributes are read from the original domNode and converted to appropriate
// types by looking up the Class prototype values. This is the default behavior
// from Dojo 1.0 to Dojo 1.5. `dojoType` support is deprecated, and will
// go away in Dojo 2.0.
// rootNode: DomNode?
// A default starting root node from which to start the parsing. Can be
// omitted, defaulting to the entire document. If omitted, the `options`
// object can be passed in this place. If the `options` object has a
// `rootNode` member, that is used.
// options: Object?
// A hash of options.
//
// - noStart: Boolean?:
// when set will prevent the parser from calling .startup()
// when locating the nodes.
// - rootNode: DomNode?:
// identical to the function's `rootNode` argument, though
// allowed to be passed in via this `options object.
// - template: Boolean:
// If true, ignores ContentPane's stopParser flag and parses contents inside of
// a ContentPane inside of a template. This allows dojoAttachPoint on widgets/nodes
// nested inside the ContentPane to work.
// - inherited: Object:
// Hash possibly containing dir and lang settings to be applied to
// parsed widgets, unless there's another setting on a sub-node that overrides
// - scope: String:
// Root for attribute names to search for. If scopeName is dojo,
// will search for data-dojo-type (or dojoType). For backwards compatibility
// reasons defaults to dojo._scopeName (which is "dojo" except when
// multi-version support is used, when it will be something like dojo16, dojo20, etc.)
// - propsThis: Object:
// If specified, "this" referenced from data-dojo-props will refer to propsThis.
// Intended for use from the widgets-in-template feature of `dijit._WidgetsInTemplateMixin`
// - contextRequire: Function:
// If specified, this require is utilised for looking resolving modules instead of the
// `dojo/parser` context `require()`. Intended for use from the widgets-in-template feature of
// `dijit._WidgetsInTemplateMixin`.
// returns: Mixed
// Returns a blended object that is an array of the instantiated objects, but also can include
// a promise that is resolved with the instantiated objects. This is done for backwards
// compatibility. If the parser auto-requires modules, it will always behave in a promise
// fashion and `parser.parse().then(function(instances){...})` should be used.
// example:
// Parse all widgets on a page:
// | parser.parse();
// example:
// Parse all classes within the node with id="foo"
// | parser.parse(dojo.byId('foo'));
// example:
// Parse all classes in a page, but do not call .startup() on any
// child
// | parser.parse({ noStart: true })
// example:
// Parse all classes in a node, but do not call .startup()
// | parser.parse(someNode, { noStart:true });
// | // or
// | parser.parse({ noStart:true, rootNode: someNode });
// determine the root node and options based on the passed arguments.
if(rootNode && typeof rootNode != "string" && !("nodeType" in rootNode)){
// If called as parse(options) rather than parse(), parse(rootNode), or parse(rootNode, options)...
options = rootNode;
rootNode = options.rootNode;
}
var root = rootNode ? dom.byId(rootNode) : dwindow.body();
options = options || {};
var mixin = options.template ? { template: true } : {},
instances = [],
self = this;
// First scan for any <script type=dojo/require> nodes, and execute.
// Then scan for all nodes with data-dojo-type, and load any unloaded modules.
// Then build the object instances. Add instances to already existing (but empty) instances[] array,
// which may already have been returned to caller. Also, use otherwise to collect and throw any errors
// that occur during the parse().
var p =
this._scanAmd(root, options).then(function(){
return self.scan(root, options);
}).then(function(parsedNodes){
return self._instantiate(parsedNodes, mixin, options, true);
}).then(function(_instances){
// Copy the instances into the instances[] array we declared above, and are accessing as
// our return value.
return instances = instances.concat(_instances);
}).otherwise(function(e){
// TODO Modify to follow better pattern for promise error management when available
console.error("dojo/parser::parse() error", e);
throw e;
});
// Blend the array with the promise
dlang.mixin(instances, p);
return instances;
}
};
if( 1 ){
dojo.parser = parser;
}
// Register the parser callback. It should be the first callback
// after the a11y test.
if(config.parseOnLoad){
ready(100, parser, "parse");
}
return parser;
});
| {
"content_hash": "bfe5dd30f9f62d425f576537a0f63f8a",
"timestamp": "",
"source": "github",
"line_count": 921,
"max_line_length": 132,
"avg_line_length": 38.33767643865364,
"alnum_prop": 0.6448214336288198,
"repo_name": "PATRIC3/p3_user",
"id": "2c6a4cfb0e488621939aee36a96ee5b7d1ede07a",
"size": "35309",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "public/js/release/dojo/parser.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ActionScript",
"bytes": "20037"
},
{
"name": "Batchfile",
"bytes": "866"
},
{
"name": "CSS",
"bytes": "4319723"
},
{
"name": "EJS",
"bytes": "34080"
},
{
"name": "HTML",
"bytes": "146473"
},
{
"name": "JavaScript",
"bytes": "15162707"
},
{
"name": "Less",
"bytes": "507733"
},
{
"name": "PHP",
"bytes": "38104"
},
{
"name": "Shell",
"bytes": "4423"
},
{
"name": "XSLT",
"bytes": "47383"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<title>NodeJS - Testing express application with supertest and mocha – Sagar R Kothari</title>
<link rel="dns-prefetch" href="//maxcdn.bootstrapcdn.com">
<link rel="dns-prefetch" href="//cdnjs.cloudflare.com">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="description" content="Passionate and enthusiastic iOS application developer having 8 years of experience.">
<meta name="robots" content="all">
<meta name="author" content="">
<meta name="keywords" content="nodejs">
<link rel="canonical" href="http://localhost:4000/2017/12/12/NodeJS-Testing-express-with-supertest-mocha/">
<link rel="alternate" type="application/rss+xml" title="RSS Feed for Sagar R Kothari" href="/feed.xml" />
<!-- Custom CSS -->
<link rel="stylesheet" href="/css/pixyll.css?201810231918" type="text/css">
<!-- Fonts -->
<link href='//fonts.googleapis.com/css?family=Merriweather:900,900italic,300,300italic' rel='stylesheet' type='text/css'>
<link href='//fonts.googleapis.com/css?family=Lato:900,300' rel='stylesheet' type='text/css'>
<!-- MathJax -->
<!-- Verifications -->
<!-- Open Graph -->
<!-- From: https://github.com/mmistakes/hpstr-jekyll-theme/blob/master/_includes/head.html -->
<meta property="og:locale" content="">
<meta property="og:type" content="article">
<meta property="og:title" content="NodeJS - Testing express application with supertest and mocha">
<meta property="og:description" content="Passionate and enthusiastic iOS application developer having 8 years of experience.">
<meta property="og:url" content="http://localhost:4000/2017/12/12/NodeJS-Testing-express-with-supertest-mocha/">
<meta property="og:site_name" content="Sagar R Kothari">
<!-- Twitter Card -->
<meta name="twitter:card" content="summary" />
<meta name="twitter:title" content="NodeJS - Testing express application with supertest and mocha" />
<meta name="twitter:description" content="Passionate and enthusiastic iOS application developer having 8 years of experience." />
<meta name="twitter:url" content="http://localhost:4000/2017/12/12/NodeJS-Testing-express-with-supertest-mocha/" />
<!-- Icons -->
<link rel="apple-touch-icon" sizes="57x57" href="/apple-touch-icon-57x57.png">
<link rel="apple-touch-icon" sizes="114x114" href="/apple-touch-icon-114x114.png">
<link rel="apple-touch-icon" sizes="72x72" href="/apple-touch-icon-72x72.png">
<link rel="apple-touch-icon" sizes="144x144" href="/apple-touch-icon-144x144.png">
<link rel="apple-touch-icon" sizes="60x60" href="/apple-touch-icon-60x60.png">
<link rel="apple-touch-icon" sizes="120x120" href="/apple-touch-icon-120x120.png">
<link rel="apple-touch-icon" sizes="76x76" href="/apple-touch-icon-76x76.png">
<link rel="apple-touch-icon" sizes="152x152" href="/apple-touch-icon-152x152.png">
<link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon-180x180.png">
<link rel="icon" type="image/png" href="/favicon-192x192.png" sizes="192x192">
<link rel="icon" type="image/png" href="/favicon-160x160.png" sizes="160x160">
<link rel="icon" type="image/png" href="/favicon-96x96.png" sizes="96x96">
<link rel="icon" type="image/png" href="/favicon-16x16.png" sizes="16x16">
<link rel="icon" type="image/png" href="/favicon-32x32.png" sizes="32x32">
</head>
<body class="site">
<div class="site-wrap">
<header class="site-header px2 px-responsive">
<div class="mt2 wrap">
<div class="measure">
<a href="http://localhost:4000" class="site-title">Sagar R Kothari</a>
<nav class="site-nav">
<a href="/Archive/">Archive</a>
<a href="/Categories/">Categories</a>
<a href="/Tags/">Tags</a>
<a href="/about/">About</a>
</nav>
<div class="clearfix"></div>
</div>
</div>
</header>
<div class="post p2 p-responsive wrap" role="main">
<div class="measure">
<div class="post-header mb2">
<h1>NodeJS - Testing express application with supertest and mocha</h1>
<span class="post-meta">December 12, 2017</span><br>
<span class="post-meta small">
1
</span>
</div>
<article class="post-content">
<p>Install dependencies.</p>
<div class="highlighter-rouge"><pre class="highlight"><code>npm init
npm install express --save
npm install mocha --save-dev # this will install mocha as developer dependencies.
npm install supertest --save-dev # this will supertest mocha as developer dependencies.
</code></pre>
</div>
<p>Here is the sample file to be tested.</p>
<div class="language-javascript highlighter-rouge"><pre class="highlight"><code><span class="c1">// server.js</span>
<span class="kr">const</span> <span class="nx">express</span> <span class="o">=</span> <span class="nx">require</span><span class="p">(</span><span class="s1">'express'</span><span class="p">);</span>
<span class="kd">var</span> <span class="nx">app</span> <span class="o">=</span> <span class="nx">express</span><span class="p">();</span>
<span class="nx">app</span><span class="p">.</span><span class="nx">get</span><span class="p">(</span><span class="s1">'/'</span><span class="p">,</span> <span class="p">(</span><span class="nx">req</span><span class="p">,</span> <span class="nx">res</span><span class="p">)</span> <span class="o">=></span> <span class="p">{</span>
<span class="nx">res</span><span class="p">.</span><span class="nx">send</span><span class="p">(</span><span class="s1">'Hello World!'</span><span class="p">);</span>
<span class="p">});</span>
<span class="nx">app</span><span class="p">.</span><span class="nx">listen</span><span class="p">(</span><span class="mi">3000</span><span class="p">);</span>
<span class="nx">module</span><span class="p">.</span><span class="nx">exports</span><span class="p">.</span><span class="nx">app</span> <span class="o">=</span> <span class="nx">app</span><span class="p">;</span> <span class="c1">// exporting app so that it can be easily accessible from test classes.</span>
</code></pre>
</div>
<p>Here is the file for sample code having test case.</p>
<div class="language-javascript highlighter-rouge"><pre class="highlight"><code><span class="c1">// server.test.js</span>
<span class="kr">const</span> <span class="nx">request</span> <span class="o">=</span> <span class="nx">require</span><span class="p">(</span><span class="s1">'supertest'</span><span class="p">);</span>
<span class="kd">var</span> <span class="nx">app</span> <span class="o">=</span> <span class="nx">require</span><span class="p">(</span><span class="s1">'./server'</span><span class="p">).</span><span class="nx">app</span><span class="p">;</span>
<span class="nx">it</span><span class="p">(</span><span class="s1">'should return hello world response'</span><span class="p">,</span> <span class="p">(</span><span class="nx">done</span><span class="p">)</span> <span class="o">=></span> <span class="p">{</span>
<span class="nx">request</span><span class="p">(</span><span class="nx">app</span><span class="p">)</span>
<span class="p">.</span><span class="nx">get</span><span class="p">(</span><span class="s1">'/'</span><span class="p">)</span>
<span class="p">.</span><span class="nx">expect</span><span class="p">(</span><span class="mi">200</span><span class="p">)</span>
<span class="p">.</span><span class="nx">expect</span><span class="p">(</span><span class="s1">'Hello World!'</span><span class="p">)</span>
<span class="p">.</span><span class="nx">end</span><span class="p">(</span><span class="nx">done</span><span class="p">);</span>
<span class="p">});</span>
</code></pre>
</div>
</article>
<div id="disqus_thread"></div>
<script type="text/javascript">
var disqus_shortname = 'sagarrkothari-com';
var disqus_identifier = '/2017/12/12/NodeJS-Testing-express-with-supertest-mocha';
var disqus_title = "NodeJS - Testing express application with supertest and mocha";
(function() {
var dsq = document.createElement('script'); dsq.type = 'text/javascript'; dsq.async = true;
dsq.src = '//' + disqus_shortname + '.disqus.com/embed.js';
(document.getElementsByTagName('head')[0] || document.getElementsByTagName('body')[0]).appendChild(dsq);
})();
</script>
<noscript>Please enable JavaScript to view the <a href="http://disqus.com/?ref_noscript">comments powered by Disqus.</a></noscript>
</div>
</div>
</div>
<footer class="center">
<!--
<div class="measure">
<small>
Theme crafted with <3 by <a href="http://johnotander.com">John Otander</a> (<a href="https://twitter.com/4lpine">@4lpine</a>).<br>
</> available on <a href="https://github.com/johnotander/pixyll">GitHub</a>.
</small>
</div>
-->
</footer>
</body>
</html>
| {
"content_hash": "005a0020a5c7097d16f0083804eb6e61",
"timestamp": "",
"source": "github",
"line_count": 419,
"max_line_length": 335,
"avg_line_length": 23.694510739856803,
"alnum_prop": 0.5910556003223207,
"repo_name": "SagarRKothari/SagarRKothari.github.io",
"id": "4a2c3002f2bec3e6e4987876a390d907eb2731a1",
"size": "9928",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_site/2017/12/12/NodeJS-Testing-express-with-supertest-mocha/index.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "62674"
},
{
"name": "HTML",
"bytes": "2085242"
},
{
"name": "Ruby",
"bytes": "17455"
},
{
"name": "Shell",
"bytes": "161750"
},
{
"name": "Swift",
"bytes": "16506"
}
],
"symlink_target": ""
} |
<?php
namespace Garden\Cli;
/**
* Used to write a formatted table to the console.
*/
class Table {
/// Properties ///
/**
* @var array An array of column widths.
*/
protected $columnWidths;
/**
* @var bool Whether or not to format the console commands.
*/
protected $formatOutput = true;
/**
* @var array An array of the row data.
*/
protected $rows;
/**
* @var array|null A pointer to the current row.
*/
protected $currentRow;
/**
* @var int The maximum width of the table.
*/
public $maxWidth = 80;
/**
* @var int The left padding on each cell.
*/
public $padding = 3;
/**
* @var int The left indent on the table.
*/
public $indent = 2;
/// Methods ///
/**
* Initialize an instance of the {@link Table} class.
*/
public function __construct() {
$this->formatOutput = Cli::guessFormatOutput();
$this->reset();
}
/**
* Backwards compatibility for the **format** property.
*
* @param string $name Must be **format**.
* @return bool|null Returns {@link getFormatOutput()} or null if {@link $name} isn't **format**.
*/
public function __get($name) {
if ($name === 'format') {
trigger_error("Cli->format is deprecated. Use Cli->getFormatOutput() instead.", E_USER_DEPRECATED);
return $this->getFormatOutput();
}
return null;
}
/**
* Backwards compatibility for the **format** property.
*
* @param string $name Must be **format**.
* @param bool $value One of **true** or **false**.
*/
public function __set($name, $value) {
if ($name === 'format') {
trigger_error("Cli->format is deprecated. Use Cli->setFormatOutput() instead.", E_USER_DEPRECATED);
$this->setFormatOutput($value);
}
}
/**
* Get whether or not output should be formatted.
*
* @return boolean Returns **true** if output should be formatted or **false** otherwise.
*/
public function getFormatOutput() {
return $this->formatOutput;
}
/**
* Set whether or not output should be formatted.
*
* @param boolean $formatOutput Whether or not to format output.
*
* @return self
*/
public function setFormatOutput($formatOutput): self {
$this->formatOutput = $formatOutput;
return $this;
}
/**
* Add a cell to the table.
*
* @param string $text The text of the cell.
* @param array $wrap A two element array used to wrap the text in the cell.
* @return $this
*/
protected function addCell($text, $wrap = ['', '']) {
if ($this->currentRow === null) {
$this->row();
}
$i = count($this->currentRow);
$this->columnWidths[$i] = max(strlen($text), Cli::val($i, $this->columnWidths, 0)); // max column width
$this->currentRow[$i] = [$text, $wrap];
return $this;
}
/**
* Adds a cell.
*
* @param string $text The text of the cell.
* @return $this
*/
public function cell($text) {
return $this->addCell($text);
}
/**
* Adds a bold cell.
*
* @param string $text The text of the cell.
* @return $this
*/
public function bold($text) {
return $this->addCell($text, ["\033[1m", "\033[0m"]);
}
/**
* Adds a red cell.
*
* @param string $text The text of the cell.
* @return $this
*/
public function red($text) {
return $this->addCell($text, ["\033[1;31m", "\033[0m"]);
}
/**
* Adds a green cell.
*
* @param string $text The text of the cell.
* @return $this
*/
public function green($text) {
return $this->addCell($text, ["\033[1;32m", "\033[0m"]);
}
/**
* Adds a blue cell.
*
* @param string $text The text of the cell.
* @return $this
*/
public function blue($text) {
return $this->addCell($text, ["\033[1;34m", "\033[0m"]);
}
/**
* Adds a purple cell.
*
* @param string $text The text of the cell.
* @return $this
*/
public function purple($text) {
return $this->addCell($text, ["\033[0;35m", "\033[0m"]);
}
/**
* Reset the table so another one can be written with the same object.
*
* @return void
*/
public function reset(): void {
$this->columnWidths = [];
$this->rows = [];
$this->currentRow = null;
}
/**
* Start a new row.
*
* @return $this
*/
public function row() {
$this->rows[] = [];
$this->currentRow =& $this->rows[count($this->rows) - 1];
return $this;
}
/**
* Writes the final table.
*
* @return void
*/
public function write(): void {
// Determine the width of the last column.
$columnWidths = array_sum($this->columnWidths);
$totalWidth = $this->indent + $columnWidths + $this->padding * (count($this->columnWidths) - 1);
$lastWidth = end($this->columnWidths) + $this->maxWidth - $totalWidth;
$lastWidth = max($lastWidth, 10); // min width of 10
$this->columnWidths[count($this->columnWidths) - 1] = $lastWidth;
// Loop through each row and write it.
foreach ($this->rows as $row) {
$rowLines = [];
$lineCount = 0;
// Split the cells into lines.
foreach ($row as $i => $cell) {
list($text,) = $cell;
$width = $this->columnWidths[$i];
$lines = Cli::breakLines($text, $width, $i < count($this->columnWidths) - 1);
$rowLines[] = $lines;
$lineCount = max($lineCount, count($lines));
}
// Write all of the lines.
for ($i = 0; $i < $lineCount; $i++) {
foreach ($rowLines as $j => $lines) {
$padding = $j === 0 ? $this->indent : $this->padding;
if (isset($lines[$i])) {
if ($this->formatOutput) {
if (isset($row[$j])) {
$wrap = $row[$j][1];
} else {
// if we're out of array, use the latest wraps
$wrap = $row[count($row)-1][1];
}
echo str_repeat(' ', $padding).$wrap[0].$lines[$i].$wrap[1];
} else {
echo str_repeat(' ', $padding).$lines[$i];
}
} elseif ($j < count($this->columnWidths) - 1) {
// This is an empty line. Write the spaces.
echo str_repeat(' ', $padding + $this->columnWidths[$j]);
}
}
echo PHP_EOL;
}
}
}
}
| {
"content_hash": "545a05109d5221ab5dcd8d6b2f4c1f70",
"timestamp": "",
"source": "github",
"line_count": 263,
"max_line_length": 111,
"avg_line_length": 26.950570342205324,
"alnum_prop": 0.4885722347629797,
"repo_name": "vanilla/garden-cli",
"id": "33d7a1edff9e831bc4eb74b5ea6b1562a2a17b3f",
"size": "7202",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Table.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "152390"
}
],
"symlink_target": ""
} |
package com.azure.resourcemanager.elasticsan.generated;
import com.azure.core.util.BinaryData;
import com.azure.resourcemanager.elasticsan.models.SourceCreationData;
import com.azure.resourcemanager.elasticsan.models.VolumeCreateOption;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
public final class SourceCreationDataTests {
@Test
public void testDeserialize() {
SourceCreationData model =
BinaryData
.fromString("{\"createSource\":\"None\",\"sourceUri\":\"lokeyy\"}")
.toObject(SourceCreationData.class);
Assertions.assertEquals(VolumeCreateOption.NONE, model.createSource());
Assertions.assertEquals("lokeyy", model.sourceUri());
}
@Test
public void testSerialize() {
SourceCreationData model =
new SourceCreationData().withCreateSource(VolumeCreateOption.NONE).withSourceUri("lokeyy");
model = BinaryData.fromObject(model).toObject(SourceCreationData.class);
Assertions.assertEquals(VolumeCreateOption.NONE, model.createSource());
Assertions.assertEquals("lokeyy", model.sourceUri());
}
}
| {
"content_hash": "ca5a9a77f7ae5dc36f0e263a7a2e171f",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 103,
"avg_line_length": 41.57142857142857,
"alnum_prop": 0.7190721649484536,
"repo_name": "Azure/azure-sdk-for-java",
"id": "c8fd2a9228ac82f2f8c20f158270c56c5167ef25",
"size": "1321",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/elasticsan/azure-resourcemanager-elasticsan/src/test/java/com/azure/resourcemanager/elasticsan/generated/SourceCreationDataTests.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "8762"
},
{
"name": "Bicep",
"bytes": "15055"
},
{
"name": "CSS",
"bytes": "7676"
},
{
"name": "Dockerfile",
"bytes": "2028"
},
{
"name": "Groovy",
"bytes": "3237482"
},
{
"name": "HTML",
"bytes": "42090"
},
{
"name": "Java",
"bytes": "432409546"
},
{
"name": "JavaScript",
"bytes": "36557"
},
{
"name": "Jupyter Notebook",
"bytes": "95868"
},
{
"name": "PowerShell",
"bytes": "737517"
},
{
"name": "Python",
"bytes": "240542"
},
{
"name": "Scala",
"bytes": "1143898"
},
{
"name": "Shell",
"bytes": "18488"
},
{
"name": "XSLT",
"bytes": "755"
}
],
"symlink_target": ""
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<title>statsmodels.tsa.statespace.structural.UnobservedComponents.set_inversion_method — statsmodels 0.8.0 documentation</title>
<link rel="stylesheet" href="../_static/nature.css" type="text/css" />
<link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
<script type="text/javascript">
var DOCUMENTATION_OPTIONS = {
URL_ROOT: '../',
VERSION: '0.8.0',
COLLAPSE_INDEX: false,
FILE_SUFFIX: '.html',
HAS_SOURCE: true,
SOURCELINK_SUFFIX: '.txt'
};
</script>
<script type="text/javascript" src="../_static/jquery.js"></script>
<script type="text/javascript" src="../_static/underscore.js"></script>
<script type="text/javascript" src="../_static/doctools.js"></script>
<link rel="shortcut icon" href="../_static/statsmodels_hybi_favico.ico"/>
<link rel="author" title="About these documents" href="../about.html" />
<link rel="index" title="Index" href="../genindex.html" />
<link rel="search" title="Search" href="../search.html" />
<link rel="next" title="statsmodels.tsa.statespace.structural.UnobservedComponents.set_smoother_output" href="statsmodels.tsa.statespace.structural.UnobservedComponents.set_smoother_output.html" />
<link rel="prev" title="statsmodels.tsa.statespace.structural.UnobservedComponents.set_filter_method" href="statsmodels.tsa.statespace.structural.UnobservedComponents.set_filter_method.html" />
<link rel="stylesheet" href="../_static/examples.css" type="text/css" />
<link rel="stylesheet" href="../_static/facebox.css" type="text/css" />
<script type="text/javascript" src="../_static/scripts.js">
</script>
<script type="text/javascript" src="../_static/facebox.js">
</script>
</head>
<body role="document">
<div class="headerwrap">
<div class = "header">
<a href = "../index.html">
<img src="../_static/statsmodels_hybi_banner.png" alt="Logo"
style="padding-left: 15px"/></a>
</div>
</div>
<div class="related" role="navigation" aria-label="related navigation">
<h3>Navigation</h3>
<ul>
<li class="right" style="margin-right: 10px">
<a href="../genindex.html" title="General Index"
accesskey="I">index</a></li>
<li class="right" >
<a href="../py-modindex.html" title="Python Module Index"
>modules</a> |</li>
<li class="right" >
<a href="statsmodels.tsa.statespace.structural.UnobservedComponents.set_smoother_output.html" title="statsmodels.tsa.statespace.structural.UnobservedComponents.set_smoother_output"
accesskey="N">next</a> |</li>
<li class="right" >
<a href="statsmodels.tsa.statespace.structural.UnobservedComponents.set_filter_method.html" title="statsmodels.tsa.statespace.structural.UnobservedComponents.set_filter_method"
accesskey="P">previous</a> |</li>
<li><a href ="../install.html">Install</a></li> |
<li><a href="https://groups.google.com/group/pystatsmodels?hl=en">Support</a></li> |
<li><a href="https://github.com/statsmodels/statsmodels/issues">Bugs</a></li> |
<li><a href="../dev/index.html">Develop</a></li> |
<li><a href="../examples/index.html">Examples</a></li> |
<li><a href="../faq.html">FAQ</a></li> |
<li class="nav-item nav-item-1"><a href="../statespace.html" >Time Series Analysis by State Space Methods <code class="docutils literal"><span class="pre">statespace</span></code></a> |</li>
<li class="nav-item nav-item-2"><a href="statsmodels.tsa.statespace.structural.UnobservedComponents.html" accesskey="U">statsmodels.tsa.statespace.structural.UnobservedComponents</a> |</li>
</ul>
</div>
<div class="document">
<div class="documentwrapper">
<div class="bodywrapper">
<div class="body" role="main">
<div class="section" id="statsmodels-tsa-statespace-structural-unobservedcomponents-set-inversion-method">
<h1>statsmodels.tsa.statespace.structural.UnobservedComponents.set_inversion_method<a class="headerlink" href="#statsmodels-tsa-statespace-structural-unobservedcomponents-set-inversion-method" title="Permalink to this headline">¶</a></h1>
<dl class="method">
<dt id="statsmodels.tsa.statespace.structural.UnobservedComponents.set_inversion_method">
<code class="descclassname">UnobservedComponents.</code><code class="descname">set_inversion_method</code><span class="sig-paren">(</span><em>inversion_method=None</em>, <em>**kwargs</em><span class="sig-paren">)</span><a class="headerlink" href="#statsmodels.tsa.statespace.structural.UnobservedComponents.set_inversion_method" title="Permalink to this definition">¶</a></dt>
<dd><p>Set the inversion method</p>
<p>The Kalman filter may contain one matrix inversion: that of the
forecast error covariance matrix. The inversion method controls how and
if that inverse is performed.</p>
<table class="docutils field-list" frame="void" rules="none">
<col class="field-name" />
<col class="field-body" />
<tbody valign="top">
<tr class="field-odd field"><th class="field-name">Parameters:</th><td class="field-body"><p class="first"><strong>inversion_method</strong> : integer, optional</p>
<blockquote>
<div><p>Bitmask value to set the inversion method to. See notes for
details.</p>
</div></blockquote>
<p><strong>**kwargs</strong></p>
<blockquote class="last">
<div><p>Keyword arguments may be used to influence the inversion method by
setting individual boolean flags. See notes for details.</p>
</div></blockquote>
</td>
</tr>
</tbody>
</table>
<p class="rubric">Notes</p>
<p>This method is rarely used. See the corresponding function in the
<cite>KalmanFilter</cite> class for details.</p>
</dd></dl>
</div>
</div>
</div>
</div>
<div class="sphinxsidebar" role="navigation" aria-label="main navigation">
<div class="sphinxsidebarwrapper">
<h4>Previous topic</h4>
<p class="topless"><a href="statsmodels.tsa.statespace.structural.UnobservedComponents.set_filter_method.html"
title="previous chapter">statsmodels.tsa.statespace.structural.UnobservedComponents.set_filter_method</a></p>
<h4>Next topic</h4>
<p class="topless"><a href="statsmodels.tsa.statespace.structural.UnobservedComponents.set_smoother_output.html"
title="next chapter">statsmodels.tsa.statespace.structural.UnobservedComponents.set_smoother_output</a></p>
<div role="note" aria-label="source link">
<h3>This Page</h3>
<ul class="this-page-menu">
<li><a href="../_sources/generated/statsmodels.tsa.statespace.structural.UnobservedComponents.set_inversion_method.rst.txt"
rel="nofollow">Show Source</a></li>
</ul>
</div>
<div id="searchbox" style="display: none" role="search">
<h3>Quick search</h3>
<form class="search" action="../search.html" method="get">
<div><input type="text" name="q" /></div>
<div><input type="submit" value="Go" /></div>
<input type="hidden" name="check_keywords" value="yes" />
<input type="hidden" name="area" value="default" />
</form>
</div>
<script type="text/javascript">$('#searchbox').show(0);</script>
</div>
</div>
<div class="clearer"></div>
</div>
<div class="footer" role="contentinfo">
© Copyright 2009-2017, Josef Perktold, Skipper Seabold, Jonathan Taylor, statsmodels-developers.
Created using <a href="http://sphinx-doc.org/">Sphinx</a> 1.5.3.
</div>
</body>
</html> | {
"content_hash": "db951f7653511e39ef3588dab52f11ed",
"timestamp": "",
"source": "github",
"line_count": 163,
"max_line_length": 376,
"avg_line_length": 48.34969325153374,
"alnum_prop": 0.6704732901916001,
"repo_name": "statsmodels/statsmodels.github.io",
"id": "898efcd99966612860630c24927b683efb9f999a",
"size": "7884",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "0.8.0/generated/statsmodels.tsa.statespace.structural.UnobservedComponents.set_inversion_method.html",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
import React, { useMemo, useCallback } from 'react'
import { Link } from '../../../lib/router'
import styled from '../../../lib/styled/styled'
import {
borderBottom,
uiTextColor,
secondaryBackgroundColor,
inputStyle
} from '../../../lib/styled/styleFunctions'
import cc from 'classcat'
import { setTransferrableNoteData } from '../../../lib/dnd'
import HighlightText from '../../atoms/HighlightText'
import { formatDistanceToNow } from 'date-fns'
import { scaleAndTransformFromLeft } from '../../../lib/styled'
import { PopulatedNoteDoc } from '../../../lib/db/types'
import { useContextMenu, MenuTypes } from '../../../lib/contextMenu'
import { useDb } from '../../../lib/db'
import { useTranslation } from 'react-i18next'
export const StyledNoteListItem = styled.div`
margin: 0;
border-left: 2px solid transparent;
${uiTextColor}
&.active,
&:active,
&:focus,
&:hover {
${secondaryBackgroundColor}
}
&.active {
border-left: 2px solid ${({ theme }) => theme.primaryColor};
}
${borderBottom}
transition: 200ms background-color;
&.new {
position: relative;
left: -200px;
transform: scaleY(0.3);
transform-origin: top left;
animation: ${scaleAndTransformFromLeft} 0.2s linear forwards;
}
a {
text-decoration: none;
}
.container {
padding: 10px 12px;
}
.title {
font-size: 18px;
margin-bottom: 6px;
font-weight: 500;
}
.date {
font-size: 10px;
font-style: italic;
margin-bottom: 6px;
}
.preview {
font-size: 13px;
margin-bottom: 8px;
display: -webkit-box;
-webkit-box-orient: vertical;
-webkit-line-clamp: 2;
overflow: hidden;
}
.tag-area,
.title {
width: 100%;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
}
.tag {
font-size: 12px;
${inputStyle}
margin-right: 5px;
padding: 2px 8px;
border-radius: 13px;
display: inline-block;
}
`
type NoteItemProps = {
note: PopulatedNoteDoc
active: boolean
recentlyCreated?: boolean
storageId?: string
search: string
basePathname: string
focusList: () => void
}
export default ({
storageId,
note,
active,
basePathname,
search,
recentlyCreated
}: NoteItemProps) => {
const href = `${basePathname}/${note._id}`
const { popup } = useContextMenu()
const { createNote, trashNote, updateNote } = useDb()
const { t } = useTranslation()
const contextMenuCallback = useCallback(
(event: React.MouseEvent) => {
event.stopPropagation()
event.preventDefault()
popup(event, [
{
type: MenuTypes.Normal,
label: t('note.duplicate'),
onClick: async () => {
createNote(note.storageId, {
title: note.title,
content: note.content,
folderPathname: note.folderPathname,
tags: note.tags,
bookmarked: false,
data: note.data
})
}
},
{
type: MenuTypes.Normal,
label: t('note.delete'),
onClick: async () => {
trashNote(note.storageId, note._id)
}
},
{
type: MenuTypes.Normal,
label: note.bookmarked ? t('bookmark.remove') : t('bookmark.add'),
onClick: async () => {
note.bookmarked = !note.bookmarked
updateNote(note.storageId, note._id, note)
}
}
])
},
[popup, createNote, note, updateNote, trashNote]
)
const contentPreview = useMemo(() => {
const trimmedContent = note.content.trim()
const searchFirstIndex = trimmedContent
.toLowerCase()
.indexOf(search.toLowerCase())
if (search !== '' && searchFirstIndex !== -1) {
const contentToHighlight = trimmedContent
.substring(searchFirstIndex)
.split('\n')
.shift()
return contentToHighlight == null ? (
t('note.empty')
) : (
<HighlightText text={contentToHighlight} search={search} />
)
}
return trimmedContent.split('\n').shift() || t('note.empty')
}, [note.content, search])
const handleDragStart = useCallback(
(event: React.DragEvent) => {
setTransferrableNoteData(event, note.storageId, note)
},
[note, storageId]
)
return (
<StyledNoteListItem
onContextMenu={contextMenuCallback}
className={cc([active && 'active', recentlyCreated && 'new'])}
onDragStart={handleDragStart}
draggable={true}
>
<Link href={href}>
<div className='container'>
<div className='title'>
<HighlightText text={note.title} search={search} />
</div>
{note.title.length === 0 && (
<div className='title'>{t('note.noTitle')}</div>
)}
<div className='date'>
{formatDistanceToNow(new Date(note.updatedAt))} {t('note.date')}
</div>
<div className='preview'>{contentPreview}</div>
{note.tags.length > 0 && (
<div className='tag-area'>
{note.tags.map(tag => (
<span className='tag' key={tag}>
<HighlightText text={tag} search={search} />
</span>
))}
</div>
)}
</div>
</Link>
</StyledNoteListItem>
)
}
| {
"content_hash": "b39657c907928e6677418e5d096d9068",
"timestamp": "",
"source": "github",
"line_count": 215,
"max_line_length": 76,
"avg_line_length": 24.925581395348836,
"alnum_prop": 0.5734278783355103,
"repo_name": "Sarah-Seo/Inpad",
"id": "7748820a3cd8981b211d849e3e86ebb346988938",
"size": "5359",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/components/NotePage/NoteList/NoteItem.tsx",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "7335"
},
{
"name": "JavaScript",
"bytes": "167448"
}
],
"symlink_target": ""
} |
import React from 'react';
import { Route, IndexRoute } from 'react-router';
import Layout from './components/Layout';
// Main Pages
import IndexPage from './components/pages/IndexPage';
import LoginPage from './components/pages/LoginPage';
import RegistrationPage from './components/pages/RegistrationPage';
import NotFoundPage from './components/pages/NotFoundPage';
import AboutPage from './components/pages/AboutPage';
import ArchivePage from './components/pages/ArchivePage';
import CinematographyListPage from './components/pages/CinematographyListPage';
import CountryPage from './components/pages/CountryPage';
import DiscographyListPage from './components/pages/DiscographyListPage';
import DiscographyPage from './components/pages/DiscographyPage';
import DownloadListPage from './components/pages/DownloadListPage';
import CountryListPage from './components/pages/CountryListPage';
import PhotographyListPage from './components/pages/PhotographyListPage';
import PlaylistPage from './components/pages/PlaylistPage';
import SearchPage from './components/pages/SearchPage';
import SiteMapPage from './components/pages/SiteMapPage';
// Admin Pages
import EditAlbumReleasePage from './components/pages/admin/EditAlbumReleasePage';
import EditBioSectionPage from './components/pages/admin/EditBioSectionPage';
import EditContactListPage from './components/pages/admin/EditContactListPage';
import EditDigitalDownloadPage from './components/pages/admin/EditDigitalDownloadPage';
import EditEmbeddableMediaPage from './components/pages/admin/EditEmbeddableMediaPage';
import EditMediaMentionPage from './components/pages/admin/EditMediaMentionPage';
import EditMerchItemPage from './components/pages/admin/EditMerchItemPage';
import EditOriginPage from './components/pages/admin/EditOriginPage';
import EditSocialLinkListPage from './components/pages/admin/EditSocialLinkListPage';
import EditSongPage from './components/pages/admin/EditSongPage';
// Artist Pages
import ArtistCinematographyPage from './components/pages/artist/ArtistCinematographyPage';
import ArtistDigitalDownloadsPage from './components/pages/artist/ArtistDigitalDownloadsPage';
import ArtistListPage from './components/pages/artist/ArtistListPage';
import ArtistPage from './components/pages/artist/ArtistPage';
import ArtistPhotographyPage from './components/pages/artist/ArtistPhotographyPage';
// Profile Pages
import ProfilePage from './components/pages/profile/ProfilePage';
const routes = (
<Route path="/" component={Layout}>
<IndexRoute component={IndexPage}/>
<Route path="about" component={AboutPage}/>
<Route path="admin">
<IndexRoute component={ProfilePage}/>
<Route path="bio-section/create" component={EditBioSectionPage}/>
<Route path="bio-section/edit/:bioSectionId" component={EditBioSectionPage}/>
<Route path="contact-list/create" component={EditContactListPage}/>
<Route path="contact-list/edit/:contactListId" component={EditContactListPage}/>
<Route path="digital-download/create" component={EditDigitalDownloadPage}/>
<Route path="digital-download/edit/:digitalDownloadId" component={EditDigitalDownloadPage}/>
<Route path="discography/create" component={EditAlbumReleasePage}/>
<Route path="discography/edit/:discographyParam" component={EditAlbumReleasePage}/>
<Route path="embeddable-media/create" component={EditEmbeddableMediaPage}/>
<Route path="embeddable-media/edit/:embeddableMediaId" component={EditEmbeddableMediaPage}/>
<Route path="media-mention/create" component={EditMediaMentionPage}/>
<Route path="media-mention/edit/:mediaMentionId" component={EditMediaMentionPage}/>
<Route path="merch/create" component={EditMerchItemPage}/>
<Route path="merch/edit/:merchId" component={EditMerchItemPage}/>
<Route path="origin/create" component={EditOriginPage}/>
<Route path="origin/edit/:originId" component={EditOriginPage}/>
<Route path="social-link-list/create" component={EditSocialLinkListPage}/>
<Route path="social-link-list/edit/:socialLinkListId" component={EditSocialLinkListPage}/>
<Route path="song/create" component={EditSongPage}/>
<Route path="song/edit/:songId" component={EditSongPage}/>
</Route>
<Route path="archive" component={ArchivePage}/>
<Route path="artists">
<IndexRoute component={ArtistListPage}/>
<Route path=":artistParam" component={ArtistPage}/>
<Route path=":artistParam/cinematography" component={ArtistCinematographyPage}/>
<Route path=":artistParam/discography/:discographyParam" component={DiscographyPage}/>
<Route path=":artistParam/digital-downloads" component={ArtistDigitalDownloadsPage}/>
<Route path=":artistParam/photography" component={ArtistPhotographyPage}/>
</Route>
<Route path="cinematography" component={CinematographyListPage}/>
<Route path="discography" component={DiscographyListPage}/>
<Route path="digital-downloads" component={DownloadListPage}/>
<Route path="login" component={LoginPage}/>
<Route path="countries">
<IndexRoute component={CountryListPage}/>
<Route path=":countryCode" component={CountryPage}/>
</Route>
<Route path="photography" component={PhotographyListPage}/>
<Route path="playlist" component={PlaylistPage}/>
<Route path="profile">
<IndexRoute component={ProfilePage}/>
</Route>
<Route path="register" component={RegistrationPage}/>
<Route path="search" component={SearchPage}/>
<Route path="site-map" component={SiteMapPage}/>
<Route path="*" component={NotFoundPage}/>
</Route>
);
export default routes;
| {
"content_hash": "1f0694ae6010bf1ff78bf3152bd7709e",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 95,
"avg_line_length": 54.75247524752475,
"alnum_prop": 0.7833634719710669,
"repo_name": "zdizzle6717/tree-machine-records",
"id": "4532f846c3d9e9ed7b93d3cf261c12596582f335",
"size": "5530",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/routes.js",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "132967"
},
{
"name": "HTML",
"bytes": "47581"
},
{
"name": "JavaScript",
"bytes": "431097"
},
{
"name": "Python",
"bytes": "1890"
},
{
"name": "SQLPL",
"bytes": "124610"
}
],
"symlink_target": ""
} |
require 'email_spec' # add this line if you use spork
require 'email_spec/cucumber'
require 'capybara-screenshot/cucumber'
require "cgi"
Capybara.javascript_driver = :webkit
Capybara.save_and_open_page_path = 'tmp/screenshots'
begin
require 'database_cleaner'
require 'database_cleaner/cucumber'
DatabaseCleaner.strategy = :truncation
rescue NameError
raise "You need to add database_cleaner to your Gemfile (in the :test group) if you wish to use it."
end
module Helper
include ApplicationHelper
include FactoryGirl::Syntax::Methods
include ActionView::Helpers::SanitizeHelper
def html_unescape(text)
CGI.unescapeHTML(strip_tags(text))
end
def t(*args)
I18n.t(*args)
end
# Fill in a tinyMCE textarea with content
#
# @param id [String]
# @param opts [Hash]
# @reference: https://gist.github.com/eoinkelly/69be6c27beb0106aa555
#
def fill_in_tinymce(id, opts)
return unless opts[:with].present?
# wait until the TinyMCE editor instance is ready
sleep 0.5 until page.evaluate_script("tinyMCE.get('#{id}') !== null")
page.execute_script <<-SCRIPT
tinyMCE.get('#{id}').setContent('#{opts[:with]}');
SCRIPT
end
# Fill in a CKEditor textarea with content
#
# @param locator [String]
# @param opts [Hash]
# @reference: http://stackoverflow.com/a/10957870/6615480
#
def fill_in_ckeditor(locator, opts)
content = opts.fetch(:with).to_json # convert to a safe javascript string
using_wait_time 6 do
page.execute_script <<-SCRIPT
CKEDITOR.instances['#{locator}'].setData(#{content});
$('textarea##{locator}').text(#{content});
SCRIPT
end
end
end
World(Helper)
Transform(/^(-?\d+)$/) do |num|
num.to_i
end
Before do
DatabaseCleaner.start
end
# Before('@omniauth_test') do
# OmniAuth.config.test_mode = true
# Capybara.default_host = 'http://example.com'
# OmniAuth.config.add_mock(:twitter, {
# :uid => '12345',
# :info => {
# :name => 'twitteruser',
# }
# })
# OmniAuth.config.add_mock(:facebook, {
# :uid => '12345',
# :info => {
# :name => 'facebookuser'
# }
# })
# end
# After('@omniauth_test') do
# OmniAuth.config.test_mode = false
# end
After do |scenario|
DatabaseCleaner.clean
end
| {
"content_hash": "ae98130d9d70fca6c3e1ee380fc894c4",
"timestamp": "",
"source": "github",
"line_count": 99,
"max_line_length": 102,
"avg_line_length": 23.050505050505052,
"alnum_prop": 0.6603856266432954,
"repo_name": "greganswer/qwiznotes",
"id": "7830babe35dfd309eb82af113b84e9a124aa31e8",
"size": "2282",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "features/support/cucumber_world.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "13215"
},
{
"name": "CoffeeScript",
"bytes": "4074"
},
{
"name": "Gherkin",
"bytes": "8414"
},
{
"name": "HTML",
"bytes": "205719"
},
{
"name": "JavaScript",
"bytes": "2148"
},
{
"name": "Ruby",
"bytes": "222818"
}
],
"symlink_target": ""
} |
package org.springframework.boot.logging.logback;
import java.io.File;
import java.util.EnumSet;
import java.util.List;
import java.util.logging.Handler;
import java.util.logging.LogManager;
import ch.qos.logback.classic.Level;
import ch.qos.logback.classic.Logger;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.spi.LoggerContextListener;
import ch.qos.logback.core.ConsoleAppender;
import ch.qos.logback.core.CoreConstants;
import ch.qos.logback.core.rolling.RollingFileAppender;
import ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy;
import org.hamcrest.Matcher;
import org.hamcrest.Matchers;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.slf4j.ILoggerFactory;
import org.slf4j.bridge.SLF4JBridgeHandler;
import org.slf4j.impl.StaticLoggerBinder;
import org.springframework.boot.logging.AbstractLoggingSystemTests;
import org.springframework.boot.logging.LogFile;
import org.springframework.boot.logging.LogLevel;
import org.springframework.boot.logging.LoggerConfiguration;
import org.springframework.boot.logging.LoggingInitializationContext;
import org.springframework.boot.logging.LoggingSystem;
import org.springframework.boot.logging.LoggingSystemProperties;
import org.springframework.boot.testsupport.assertj.Matched;
import org.springframework.boot.testsupport.rule.OutputCapture;
import org.springframework.boot.testsupport.runner.classpath.ClassPathExclusions;
import org.springframework.boot.testsupport.runner.classpath.ModifiedClassPathRunner;
import org.springframework.mock.env.MockEnvironment;
import org.springframework.test.util.ReflectionTestUtils;
import org.springframework.util.StringUtils;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatIllegalStateException;
import static org.assertj.core.api.Assertions.contentOf;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.not;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
/**
* Tests for {@link LogbackLoggingSystem}.
*
* @author Dave Syer
* @author Phillip Webb
* @author Andy Wilkinson
* @author Ben Hale
* @author Madhura Bhave
* @author Vedran Pavic
* @author Robert Thornton
*/
@RunWith(ModifiedClassPathRunner.class)
@ClassPathExclusions("log4j-*.jar")
public class LogbackLoggingSystemTests extends AbstractLoggingSystemTests {
@Rule
public OutputCapture output = new OutputCapture();
private final LogbackLoggingSystem loggingSystem = new LogbackLoggingSystem(
getClass().getClassLoader());
private Logger logger;
private LoggingInitializationContext initializationContext;
@Before
public void setup() {
this.loggingSystem.cleanUp();
this.logger = ((LoggerContext) StaticLoggerBinder.getSingleton()
.getLoggerFactory()).getLogger(getClass());
MockEnvironment environment = new MockEnvironment();
this.initializationContext = new LoggingInitializationContext(environment);
}
@Override
@After
public void clear() {
super.clear();
this.loggingSystem.cleanUp();
((LoggerContext) StaticLoggerBinder.getSingleton().getLoggerFactory()).stop();
}
@Test
public void noFile() {
this.loggingSystem.beforeInitialize();
this.logger.info("Hidden");
this.loggingSystem.initialize(this.initializationContext, null, null);
this.logger.info("Hello world");
String output = this.output.toString().trim();
assertThat(output).contains("Hello world").doesNotContain("Hidden");
assertThat(getLineWithText(output, "Hello world")).contains("INFO");
assertThat(new File(tmpDir() + "/spring.log").exists()).isFalse();
}
@Test
public void withFile() {
this.loggingSystem.beforeInitialize();
this.logger.info("Hidden");
this.loggingSystem.initialize(this.initializationContext, null,
getLogFile(null, tmpDir()));
this.logger.info("Hello world");
String output = this.output.toString().trim();
File file = new File(tmpDir() + "/spring.log");
assertThat(output).contains("Hello world").doesNotContain("Hidden");
assertThat(getLineWithText(output, "Hello world")).contains("INFO");
assertThat(file.exists()).isTrue();
assertThat(getLineWithText(file, "Hello world")).contains("INFO");
assertThat(ReflectionTestUtils.getField(getRollingPolicy(), "maxFileSize")
.toString()).isEqualTo("10 MB");
assertThat(getRollingPolicy().getMaxHistory())
.isEqualTo(CoreConstants.UNBOUND_HISTORY);
}
@Test
public void defaultConfigConfiguresAConsoleAppender() {
this.loggingSystem.beforeInitialize();
this.loggingSystem.initialize(this.initializationContext, null, null);
assertThat(getConsoleAppender()).isNotNull();
}
@Test
public void testNonDefaultConfigLocation() {
int existingOutputLength = this.output.toString().length();
this.loggingSystem.beforeInitialize();
this.loggingSystem.initialize(this.initializationContext,
"classpath:logback-nondefault.xml",
getLogFile(tmpDir() + "/tmp.log", null));
this.logger.info("Hello world");
String output = this.output.toString().trim();
assertThat(output.substring(existingOutputLength)).doesNotContain("DEBUG");
assertThat(output).contains("Hello world").contains(tmpDir() + "/tmp.log");
assertThat(output).endsWith("BOOTBOOT");
assertThat(new File(tmpDir() + "/tmp.log").exists()).isFalse();
}
@Test
public void testLogbackSpecificSystemProperty() {
System.setProperty("logback.configurationFile", "/foo/my-file.xml");
try {
this.loggingSystem.beforeInitialize();
this.loggingSystem.initialize(this.initializationContext, null, null);
String output = this.output.toString().trim();
assertThat(output).contains("Ignoring 'logback.configurationFile' "
+ "system property. Please use 'logging.config' instead.");
}
finally {
System.clearProperty("logback.configurationFile");
}
}
@Test
public void testNonexistentConfigLocation() {
this.loggingSystem.beforeInitialize();
assertThatIllegalStateException().isThrownBy(() -> this.loggingSystem.initialize(
this.initializationContext, "classpath:logback-nonexistent.xml", null));
}
@Test
public void getSupportedLevels() {
assertThat(this.loggingSystem.getSupportedLogLevels())
.isEqualTo(EnumSet.of(LogLevel.TRACE, LogLevel.DEBUG, LogLevel.INFO,
LogLevel.WARN, LogLevel.ERROR, LogLevel.OFF));
}
@Test
public void setLevel() {
this.loggingSystem.beforeInitialize();
this.loggingSystem.initialize(this.initializationContext, null, null);
this.logger.debug("Hello");
this.loggingSystem.setLogLevel("org.springframework.boot", LogLevel.DEBUG);
this.logger.debug("Hello");
assertThat(StringUtils.countOccurrencesOf(this.output.toString(), "Hello"))
.isEqualTo(1);
}
@Test
public void setLevelToNull() {
this.loggingSystem.beforeInitialize();
this.loggingSystem.initialize(this.initializationContext, null, null);
this.logger.debug("Hello");
this.loggingSystem.setLogLevel("org.springframework.boot", LogLevel.DEBUG);
this.logger.debug("Hello");
this.loggingSystem.setLogLevel("org.springframework.boot", null);
this.logger.debug("Hello");
assertThat(StringUtils.countOccurrencesOf(this.output.toString(), "Hello"))
.isEqualTo(1);
}
@Test
public void getLoggingConfigurations() {
this.loggingSystem.beforeInitialize();
this.loggingSystem.initialize(this.initializationContext, null, null);
this.loggingSystem.setLogLevel(getClass().getName(), LogLevel.DEBUG);
List<LoggerConfiguration> configurations = this.loggingSystem
.getLoggerConfigurations();
assertThat(configurations).isNotEmpty();
assertThat(configurations.get(0).getName())
.isEqualTo(LoggingSystem.ROOT_LOGGER_NAME);
}
@Test
public void getLoggingConfiguration() {
this.loggingSystem.beforeInitialize();
this.loggingSystem.initialize(this.initializationContext, null, null);
this.loggingSystem.setLogLevel(getClass().getName(), LogLevel.DEBUG);
LoggerConfiguration configuration = this.loggingSystem
.getLoggerConfiguration(getClass().getName());
assertThat(configuration).isEqualTo(new LoggerConfiguration(getClass().getName(),
LogLevel.DEBUG, LogLevel.DEBUG));
}
@Test
public void getLoggingConfigurationForALL() {
this.loggingSystem.beforeInitialize();
this.loggingSystem.initialize(this.initializationContext, null, null);
Logger logger = (Logger) StaticLoggerBinder.getSingleton().getLoggerFactory()
.getLogger(getClass().getName());
logger.setLevel(Level.ALL);
LoggerConfiguration configuration = this.loggingSystem
.getLoggerConfiguration(getClass().getName());
assertThat(configuration).isEqualTo(new LoggerConfiguration(getClass().getName(),
LogLevel.TRACE, LogLevel.TRACE));
}
@Test
public void systemLevelTraceShouldReturnNativeLevelTraceNotAll() {
this.loggingSystem.beforeInitialize();
this.loggingSystem.initialize(this.initializationContext, null, null);
this.loggingSystem.setLogLevel(getClass().getName(), LogLevel.TRACE);
Logger logger = (Logger) StaticLoggerBinder.getSingleton().getLoggerFactory()
.getLogger(getClass().getName());
assertThat(logger.getLevel()).isEqualTo(Level.TRACE);
}
@Test
public void loggingThatUsesJulIsCaptured() {
this.loggingSystem.beforeInitialize();
this.loggingSystem.initialize(this.initializationContext, null, null);
java.util.logging.Logger julLogger = java.util.logging.Logger
.getLogger(getClass().getName());
julLogger.info("Hello world");
String output = this.output.toString().trim();
assertThat(output).contains("Hello world");
}
@Test
public void loggingLevelIsPropagatedToJul() {
this.loggingSystem.beforeInitialize();
this.loggingSystem.initialize(this.initializationContext, null, null);
this.loggingSystem.setLogLevel(getClass().getName(), LogLevel.DEBUG);
java.util.logging.Logger julLogger = java.util.logging.Logger
.getLogger(getClass().getName());
julLogger.fine("Hello debug world");
String output = this.output.toString().trim();
assertThat(output).contains("Hello debug world");
}
@Test
public void bridgeHandlerLifecycle() {
assertThat(bridgeHandlerInstalled()).isFalse();
this.loggingSystem.beforeInitialize();
assertThat(bridgeHandlerInstalled()).isTrue();
this.loggingSystem.cleanUp();
assertThat(bridgeHandlerInstalled()).isFalse();
}
@Test
public void standardConfigLocations() {
String[] locations = this.loggingSystem.getStandardConfigLocations();
assertThat(locations).containsExactly("logback-test.groovy", "logback-test.xml",
"logback.groovy", "logback.xml");
}
@Test
public void springConfigLocations() {
String[] locations = getSpringConfigLocations(this.loggingSystem);
assertThat(locations).containsExactly("logback-test-spring.groovy",
"logback-test-spring.xml", "logback-spring.groovy", "logback-spring.xml");
}
private boolean bridgeHandlerInstalled() {
java.util.logging.Logger rootLogger = LogManager.getLogManager().getLogger("");
Handler[] handlers = rootLogger.getHandlers();
for (Handler handler : handlers) {
if (handler instanceof SLF4JBridgeHandler) {
return true;
}
}
return false;
}
@Test
public void testConsolePatternProperty() {
MockEnvironment environment = new MockEnvironment();
environment.setProperty("logging.pattern.console", "%logger %msg");
LoggingInitializationContext loggingInitializationContext = new LoggingInitializationContext(
environment);
this.loggingSystem.initialize(loggingInitializationContext, null, null);
this.logger.info("Hello world");
String output = this.output.toString().trim();
assertThat(getLineWithText(output, "Hello world")).doesNotContain("INFO");
}
@Test
public void testLevelPatternProperty() {
MockEnvironment environment = new MockEnvironment();
environment.setProperty("logging.pattern.level", "X%clr(%p)X");
LoggingInitializationContext loggingInitializationContext = new LoggingInitializationContext(
environment);
this.loggingSystem.initialize(loggingInitializationContext, null, null);
this.logger.info("Hello world");
String output = this.output.toString().trim();
assertThat(getLineWithText(output, "Hello world")).contains("XINFOX");
}
@Test
public void testFilePatternProperty() {
MockEnvironment environment = new MockEnvironment();
environment.setProperty("logging.pattern.file", "%logger %msg");
LoggingInitializationContext loggingInitializationContext = new LoggingInitializationContext(
environment);
File file = new File(tmpDir(), "logback-test.log");
LogFile logFile = getLogFile(file.getPath(), null);
this.loggingSystem.initialize(loggingInitializationContext, null, logFile);
this.logger.info("Hello world");
String output = this.output.toString().trim();
assertThat(getLineWithText(output, "Hello world")).contains("INFO");
assertThat(getLineWithText(file, "Hello world")).doesNotContain("INFO");
}
@Test
public void testCleanHistoryOnStartProperty() {
MockEnvironment environment = new MockEnvironment();
environment.setProperty("logging.file.clean-history-on-start", "true");
LoggingInitializationContext loggingInitializationContext = new LoggingInitializationContext(
environment);
File file = new File(tmpDir(), "logback-test.log");
LogFile logFile = getLogFile(file.getPath(), null);
this.loggingSystem.initialize(loggingInitializationContext, null, logFile);
this.logger.info("Hello world");
assertThat(getLineWithText(file, "Hello world")).contains("INFO");
assertThat(getRollingPolicy().isCleanHistoryOnStart()).isTrue();
}
@Test
public void testCleanHistoryOnStartPropertyWithXmlConfiguration() {
MockEnvironment environment = new MockEnvironment();
environment.setProperty("logging.file.clean-history-on-start", "true");
LoggingInitializationContext loggingInitializationContext = new LoggingInitializationContext(
environment);
File file = new File(tmpDir(), "logback-test.log");
LogFile logFile = getLogFile(file.getPath(), null);
this.loggingSystem.initialize(loggingInitializationContext,
"classpath:logback-include-base.xml", logFile);
this.logger.info("Hello world");
assertThat(getLineWithText(file, "Hello world")).contains("INFO");
assertThat(getRollingPolicy().isCleanHistoryOnStart()).isTrue();
}
@Test
public void testMaxFileSizePropertyWithLogbackFileSize() {
testMaxFileSizeProperty("100 MB", "100 MB");
}
@Test
public void testMaxFileSizePropertyWithDataSize() {
testMaxFileSizeProperty("15MB", "15 MB");
}
@Test
public void testMaxFileSizePropertyWithBytesValue() {
testMaxFileSizeProperty(String.valueOf(10 * 1024 * 1024), "10 MB");
}
private void testMaxFileSizeProperty(String sizeValue, String expectedFileSize) {
MockEnvironment environment = new MockEnvironment();
environment.setProperty("logging.file.max-size", sizeValue);
LoggingInitializationContext loggingInitializationContext = new LoggingInitializationContext(
environment);
File file = new File(tmpDir(), "logback-test.log");
LogFile logFile = getLogFile(file.getPath(), null);
this.loggingSystem.initialize(loggingInitializationContext, null, logFile);
this.logger.info("Hello world");
assertThat(getLineWithText(file, "Hello world")).contains("INFO");
assertThat(ReflectionTestUtils.getField(getRollingPolicy(), "maxFileSize")
.toString()).isEqualTo(expectedFileSize);
}
@Test
public void testMaxFileSizePropertyWithXmlConfiguration() {
MockEnvironment environment = new MockEnvironment();
environment.setProperty("logging.file.max-size", "100MB");
LoggingInitializationContext loggingInitializationContext = new LoggingInitializationContext(
environment);
File file = new File(tmpDir(), "logback-test.log");
LogFile logFile = getLogFile(file.getPath(), null);
this.loggingSystem.initialize(loggingInitializationContext,
"classpath:logback-include-base.xml", logFile);
this.logger.info("Hello world");
assertThat(getLineWithText(file, "Hello world")).contains("INFO");
assertThat(ReflectionTestUtils.getField(getRollingPolicy(), "maxFileSize")
.toString()).isEqualTo("100 MB");
}
@Test
public void testMaxHistoryProperty() {
MockEnvironment environment = new MockEnvironment();
environment.setProperty("logging.file.max-history", "30");
LoggingInitializationContext loggingInitializationContext = new LoggingInitializationContext(
environment);
File file = new File(tmpDir(), "logback-test.log");
LogFile logFile = getLogFile(file.getPath(), null);
this.loggingSystem.initialize(loggingInitializationContext, null, logFile);
this.logger.info("Hello world");
assertThat(getLineWithText(file, "Hello world")).contains("INFO");
assertThat(getRollingPolicy().getMaxHistory()).isEqualTo(30);
}
@Test
public void testMaxHistoryPropertyWithXmlConfiguration() throws Exception {
MockEnvironment environment = new MockEnvironment();
environment.setProperty("logging.file.max-history", "30");
LoggingInitializationContext loggingInitializationContext = new LoggingInitializationContext(
environment);
File file = new File(tmpDir(), "logback-test.log");
LogFile logFile = getLogFile(file.getPath(), null);
this.loggingSystem.initialize(loggingInitializationContext,
"classpath:logback-include-base.xml", logFile);
this.logger.info("Hello world");
assertThat(getLineWithText(file, "Hello world")).contains("INFO");
assertThat(getRollingPolicy().getMaxHistory()).isEqualTo(30);
}
@Test
public void testTotalSizeCapPropertyWithLogbackFileSize() {
testTotalSizeCapProperty("101 MB", "101 MB");
}
@Test
public void testTotalSizeCapPropertyWithDataSize() {
testTotalSizeCapProperty("10MB", "10 MB");
}
@Test
public void testTotalSizeCapPropertyWithBytesValue() {
testTotalSizeCapProperty(String.valueOf(10 * 1024 * 1024), "10 MB");
}
private void testTotalSizeCapProperty(String sizeValue, String expectedFileSize) {
MockEnvironment environment = new MockEnvironment();
environment.setProperty("logging.file.total-size-cap", sizeValue);
LoggingInitializationContext loggingInitializationContext = new LoggingInitializationContext(
environment);
File file = new File(tmpDir(), "logback-test.log");
LogFile logFile = getLogFile(file.getPath(), null);
this.loggingSystem.initialize(loggingInitializationContext, null, logFile);
this.logger.info("Hello world");
assertThat(getLineWithText(file, "Hello world")).contains("INFO");
assertThat(ReflectionTestUtils.getField(getRollingPolicy(), "totalSizeCap")
.toString()).isEqualTo(expectedFileSize);
}
@Test
public void testTotalSizeCapPropertyWithXmlConfiguration() {
String expectedSize = "101 MB";
MockEnvironment environment = new MockEnvironment();
environment.setProperty("logging.file.total-size-cap", expectedSize);
LoggingInitializationContext loggingInitializationContext = new LoggingInitializationContext(
environment);
File file = new File(tmpDir(), "logback-test.log");
LogFile logFile = getLogFile(file.getPath(), null);
this.loggingSystem.initialize(loggingInitializationContext,
"classpath:logback-include-base.xml", logFile);
this.logger.info("Hello world");
assertThat(getLineWithText(file, "Hello world")).contains("INFO");
assertThat(ReflectionTestUtils.getField(getRollingPolicy(), "totalSizeCap")
.toString()).isEqualTo(expectedSize);
}
@Test
public void exceptionsIncludeClassPackaging() {
this.loggingSystem.beforeInitialize();
this.loggingSystem.initialize(this.initializationContext, null,
getLogFile(null, tmpDir()));
Matcher<String> expectedOutput = containsString("[junit-");
this.output.expect(expectedOutput);
this.logger.warn("Expected exception", new RuntimeException("Expected"));
String fileContents = contentOf(new File(tmpDir() + "/spring.log"));
assertThat(fileContents).is(Matched.by(expectedOutput));
}
@Test
public void customExceptionConversionWord() {
System.setProperty(LoggingSystemProperties.EXCEPTION_CONVERSION_WORD, "%ex");
try {
this.loggingSystem.beforeInitialize();
this.logger.info("Hidden");
this.loggingSystem.initialize(this.initializationContext, null,
getLogFile(null, tmpDir()));
Matcher<String> expectedOutput = Matchers.allOf(
containsString("java.lang.RuntimeException: Expected"),
not(containsString("Wrapped by:")));
this.output.expect(expectedOutput);
this.logger.warn("Expected exception",
new RuntimeException("Expected", new RuntimeException("Cause")));
String fileContents = contentOf(new File(tmpDir() + "/spring.log"));
assertThat(fileContents).is(Matched.by(expectedOutput));
}
finally {
System.clearProperty(LoggingSystemProperties.EXCEPTION_CONVERSION_WORD);
}
}
@Test
public void initializeShouldSetSystemProperty() {
// gh-5491
this.loggingSystem.beforeInitialize();
this.logger.info("Hidden");
LogFile logFile = getLogFile(tmpDir() + "/example.log", null, false);
this.loggingSystem.initialize(this.initializationContext,
"classpath:logback-nondefault.xml", logFile);
assertThat(System.getProperty(LoggingSystemProperties.LOG_FILE))
.endsWith("example.log");
}
@Test
public void initializationIsOnlyPerformedOnceUntilCleanedUp() {
LoggerContext loggerContext = (LoggerContext) StaticLoggerBinder.getSingleton()
.getLoggerFactory();
LoggerContextListener listener = mock(LoggerContextListener.class);
loggerContext.addListener(listener);
this.loggingSystem.beforeInitialize();
this.loggingSystem.initialize(this.initializationContext, null, null);
this.loggingSystem.beforeInitialize();
this.loggingSystem.initialize(this.initializationContext, null, null);
verify(listener, times(1)).onReset(loggerContext);
this.loggingSystem.cleanUp();
loggerContext.addListener(listener);
this.loggingSystem.beforeInitialize();
this.loggingSystem.initialize(this.initializationContext, null, null);
verify(listener, times(2)).onReset(loggerContext);
}
@Test
public void testDateformatPatternProperty() {
MockEnvironment environment = new MockEnvironment();
environment.setProperty("logging.pattern.dateformat",
"yyyy-MM-dd'T'hh:mm:ss.SSSZ");
LoggingInitializationContext loggingInitializationContext = new LoggingInitializationContext(
environment);
this.loggingSystem.initialize(loggingInitializationContext, null, null);
this.logger.info("Hello world");
String output = this.output.toString().trim();
assertThat(getLineWithText(output, "Hello world"))
.containsPattern("\\d{4}-\\d{2}\\-\\d{2}T\\d{2}:\\d{2}:\\d{2}");
}
private static Logger getRootLogger() {
ILoggerFactory factory = StaticLoggerBinder.getSingleton().getLoggerFactory();
LoggerContext context = (LoggerContext) factory;
return context.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME);
}
private static ConsoleAppender<?> getConsoleAppender() {
return (ConsoleAppender<?>) getRootLogger().getAppender("CONSOLE");
}
private static RollingFileAppender<?> getFileAppender() {
return (RollingFileAppender<?>) getRootLogger().getAppender("FILE");
}
private static SizeAndTimeBasedRollingPolicy<?> getRollingPolicy() {
return (SizeAndTimeBasedRollingPolicy<?>) getFileAppender().getRollingPolicy();
}
private String getLineWithText(File file, String outputSearch) {
return getLineWithText(contentOf(file), outputSearch);
}
private String getLineWithText(String output, String outputSearch) {
String[] lines = output.split("\\r?\\n");
for (String line : lines) {
if (line.contains(outputSearch)) {
return line;
}
}
return null;
}
}
| {
"content_hash": "f68b08d2b58632f5024f81f1afb276d8",
"timestamp": "",
"source": "github",
"line_count": 603,
"max_line_length": 95,
"avg_line_length": 39.24875621890547,
"alnum_prop": 0.7700595766256814,
"repo_name": "kdvolder/spring-boot",
"id": "d5de6d834ca32b4b3a4c72a416eb2ce55d8af2d4",
"size": "24288",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "spring-boot-project/spring-boot/src/test/java/org/springframework/boot/logging/logback/LogbackLoggingSystemTests.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1948"
},
{
"name": "CSS",
"bytes": "448"
},
{
"name": "Dockerfile",
"bytes": "2036"
},
{
"name": "FreeMarker",
"bytes": "3631"
},
{
"name": "Groovy",
"bytes": "58992"
},
{
"name": "HTML",
"bytes": "70279"
},
{
"name": "Java",
"bytes": "15205862"
},
{
"name": "JavaScript",
"bytes": "37789"
},
{
"name": "Kotlin",
"bytes": "46556"
},
{
"name": "Ruby",
"bytes": "4016"
},
{
"name": "Shell",
"bytes": "38021"
},
{
"name": "Smarty",
"bytes": "2879"
},
{
"name": "XSLT",
"bytes": "3545"
}
],
"symlink_target": ""
} |
<?php
namespace C45\Calculator;
/**
*
*/
class GainRatioCalculator extends AbstractCalculator
{
public function calculateGainRatio(array $gain, array $splitInfo)
{
$gainRatio = [];
foreach ($gain as $key => $value) {
if ($splitInfo[$key] == 0) {
$gainRatio[$key] = 0;
} else {
$gainRatio[$key] = $value / $splitInfo[$key];
}
}
return $gainRatio;
}
}
| {
"content_hash": "f6110ddd16ae5cb82de777e9011e9ffe",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 69,
"avg_line_length": 19.5,
"alnum_prop": 0.5021367521367521,
"repo_name": "juliardi/C45",
"id": "6a6a87f1d653ff7aa7884c648831030e4caa8c62",
"size": "468",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Calculator/GainRatioCalculator.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "PHP",
"bytes": "23914"
}
],
"symlink_target": ""
} |
'use strict';
/**
* Module dependencies.
*/
var mongoose = require('mongoose'),
errorHandler = require('./errors.server.controller'),
FabricTransferRegister = mongoose.model('FabricTransferRegister'),
_ = require('lodash');
/**
* Create a Fabric transfer register
*/
exports.create = function(req, res) {
var fabricTransferRegister = new FabricTransferRegister(req.body);
fabricTransferRegister.user = req.user;
fabricTransferRegister.save(function(err) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.jsonp(fabricTransferRegister);
}
});
};
/**
* Show the current Fabric transfer register
*/
exports.read = function(req, res) {
res.jsonp(req.fabricTransferRegister);
};
/**
* Update a Fabric transfer register
*/
exports.update = function(req, res) {
var fabricTransferRegister = req.fabricTransferRegister ;
fabricTransferRegister = _.extend(fabricTransferRegister , req.body);
fabricTransferRegister.save(function(err) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.jsonp(fabricTransferRegister);
}
});
};
/**
* Delete an Fabric transfer register
*/
exports.delete = function(req, res) {
var fabricTransferRegister = req.fabricTransferRegister ;
fabricTransferRegister.remove(function(err) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.jsonp(fabricTransferRegister);
}
});
};
/**
* List of Fabric transfer registers
*/
exports.list = function(req, res) {
FabricTransferRegister.find().sort('-created').populate('user', 'displayName').exec(function(err, fabricTransferRegisters) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.jsonp(fabricTransferRegisters);
}
});
};
/**
* Fabric transfer register middleware
*/
exports.fabricTransferRegisterByID = function(req, res, next, id) {
FabricTransferRegister.findById(id).populate('user', 'displayName').exec(function(err, fabricTransferRegister) {
if (err) return next(err);
if (! fabricTransferRegister) return next(new Error('Failed to load Fabric transfer register ' + id));
req.fabricTransferRegister = fabricTransferRegister ;
next();
});
};
/**
* Fabric transfer register authorization middleware
*/
exports.hasAuthorization = function(req, res, next) {
if (req.fabricTransferRegister.user.id !== req.user.id) {
return res.status(403).send('User is not authorized');
}
next();
};
| {
"content_hash": "c0a2d0be30405d3193d97bf0747c9368",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 125,
"avg_line_length": 24.252336448598133,
"alnum_prop": 0.7048169556840077,
"repo_name": "niranjan21/skt",
"id": "876639138dbef041e7fcc93febc0dd893514ec76",
"size": "2595",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/controllers/fabric-transfer-registers.server.controller.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "165624"
},
{
"name": "HTML",
"bytes": "1130994"
},
{
"name": "JavaScript",
"bytes": "1791769"
},
{
"name": "Shell",
"bytes": "414"
}
],
"symlink_target": ""
} |
#pragma once
#include <aws/glacier/Glacier_EXPORTS.h>
#include <aws/core/utils/memory/stl/AWSString.h>
#include <utility>
namespace Aws
{
namespace Utils
{
namespace Json
{
class JsonValue;
class JsonView;
} // namespace Json
} // namespace Utils
namespace Glacier
{
namespace Model
{
/**
* <p>Describes the options for a range inventory retrieval job.</p><p><h3>See
* Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/glacier-2012-06-01/InventoryRetrievalJobDescription">AWS
* API Reference</a></p>
*/
class AWS_GLACIER_API InventoryRetrievalJobDescription
{
public:
InventoryRetrievalJobDescription();
InventoryRetrievalJobDescription(Aws::Utils::Json::JsonView jsonValue);
InventoryRetrievalJobDescription& operator=(Aws::Utils::Json::JsonView jsonValue);
Aws::Utils::Json::JsonValue Jsonize() const;
/**
* <p>The output format for the vault inventory list, which is set by the
* <b>InitiateJob</b> request when initiating a job to retrieve a vault inventory.
* Valid values are <code>CSV</code> and <code>JSON</code>.</p>
*/
inline const Aws::String& GetFormat() const{ return m_format; }
/**
* <p>The output format for the vault inventory list, which is set by the
* <b>InitiateJob</b> request when initiating a job to retrieve a vault inventory.
* Valid values are <code>CSV</code> and <code>JSON</code>.</p>
*/
inline void SetFormat(const Aws::String& value) { m_formatHasBeenSet = true; m_format = value; }
/**
* <p>The output format for the vault inventory list, which is set by the
* <b>InitiateJob</b> request when initiating a job to retrieve a vault inventory.
* Valid values are <code>CSV</code> and <code>JSON</code>.</p>
*/
inline void SetFormat(Aws::String&& value) { m_formatHasBeenSet = true; m_format = std::move(value); }
/**
* <p>The output format for the vault inventory list, which is set by the
* <b>InitiateJob</b> request when initiating a job to retrieve a vault inventory.
* Valid values are <code>CSV</code> and <code>JSON</code>.</p>
*/
inline void SetFormat(const char* value) { m_formatHasBeenSet = true; m_format.assign(value); }
/**
* <p>The output format for the vault inventory list, which is set by the
* <b>InitiateJob</b> request when initiating a job to retrieve a vault inventory.
* Valid values are <code>CSV</code> and <code>JSON</code>.</p>
*/
inline InventoryRetrievalJobDescription& WithFormat(const Aws::String& value) { SetFormat(value); return *this;}
/**
* <p>The output format for the vault inventory list, which is set by the
* <b>InitiateJob</b> request when initiating a job to retrieve a vault inventory.
* Valid values are <code>CSV</code> and <code>JSON</code>.</p>
*/
inline InventoryRetrievalJobDescription& WithFormat(Aws::String&& value) { SetFormat(std::move(value)); return *this;}
/**
* <p>The output format for the vault inventory list, which is set by the
* <b>InitiateJob</b> request when initiating a job to retrieve a vault inventory.
* Valid values are <code>CSV</code> and <code>JSON</code>.</p>
*/
inline InventoryRetrievalJobDescription& WithFormat(const char* value) { SetFormat(value); return *this;}
/**
* <p>The start of the date range in Universal Coordinated Time (UTC) for vault
* inventory retrieval that includes archives created on or after this date. This
* value should be a string in the ISO 8601 date format, for example
* <code>2013-03-20T17:03:43Z</code>.</p>
*/
inline const Aws::String& GetStartDate() const{ return m_startDate; }
/**
* <p>The start of the date range in Universal Coordinated Time (UTC) for vault
* inventory retrieval that includes archives created on or after this date. This
* value should be a string in the ISO 8601 date format, for example
* <code>2013-03-20T17:03:43Z</code>.</p>
*/
inline void SetStartDate(const Aws::String& value) { m_startDateHasBeenSet = true; m_startDate = value; }
/**
* <p>The start of the date range in Universal Coordinated Time (UTC) for vault
* inventory retrieval that includes archives created on or after this date. This
* value should be a string in the ISO 8601 date format, for example
* <code>2013-03-20T17:03:43Z</code>.</p>
*/
inline void SetStartDate(Aws::String&& value) { m_startDateHasBeenSet = true; m_startDate = std::move(value); }
/**
* <p>The start of the date range in Universal Coordinated Time (UTC) for vault
* inventory retrieval that includes archives created on or after this date. This
* value should be a string in the ISO 8601 date format, for example
* <code>2013-03-20T17:03:43Z</code>.</p>
*/
inline void SetStartDate(const char* value) { m_startDateHasBeenSet = true; m_startDate.assign(value); }
/**
* <p>The start of the date range in Universal Coordinated Time (UTC) for vault
* inventory retrieval that includes archives created on or after this date. This
* value should be a string in the ISO 8601 date format, for example
* <code>2013-03-20T17:03:43Z</code>.</p>
*/
inline InventoryRetrievalJobDescription& WithStartDate(const Aws::String& value) { SetStartDate(value); return *this;}
/**
* <p>The start of the date range in Universal Coordinated Time (UTC) for vault
* inventory retrieval that includes archives created on or after this date. This
* value should be a string in the ISO 8601 date format, for example
* <code>2013-03-20T17:03:43Z</code>.</p>
*/
inline InventoryRetrievalJobDescription& WithStartDate(Aws::String&& value) { SetStartDate(std::move(value)); return *this;}
/**
* <p>The start of the date range in Universal Coordinated Time (UTC) for vault
* inventory retrieval that includes archives created on or after this date. This
* value should be a string in the ISO 8601 date format, for example
* <code>2013-03-20T17:03:43Z</code>.</p>
*/
inline InventoryRetrievalJobDescription& WithStartDate(const char* value) { SetStartDate(value); return *this;}
/**
* <p>The end of the date range in UTC for vault inventory retrieval that includes
* archives created before this date. This value should be a string in the ISO 8601
* date format, for example <code>2013-03-20T17:03:43Z</code>.</p>
*/
inline const Aws::String& GetEndDate() const{ return m_endDate; }
/**
* <p>The end of the date range in UTC for vault inventory retrieval that includes
* archives created before this date. This value should be a string in the ISO 8601
* date format, for example <code>2013-03-20T17:03:43Z</code>.</p>
*/
inline void SetEndDate(const Aws::String& value) { m_endDateHasBeenSet = true; m_endDate = value; }
/**
* <p>The end of the date range in UTC for vault inventory retrieval that includes
* archives created before this date. This value should be a string in the ISO 8601
* date format, for example <code>2013-03-20T17:03:43Z</code>.</p>
*/
inline void SetEndDate(Aws::String&& value) { m_endDateHasBeenSet = true; m_endDate = std::move(value); }
/**
* <p>The end of the date range in UTC for vault inventory retrieval that includes
* archives created before this date. This value should be a string in the ISO 8601
* date format, for example <code>2013-03-20T17:03:43Z</code>.</p>
*/
inline void SetEndDate(const char* value) { m_endDateHasBeenSet = true; m_endDate.assign(value); }
/**
* <p>The end of the date range in UTC for vault inventory retrieval that includes
* archives created before this date. This value should be a string in the ISO 8601
* date format, for example <code>2013-03-20T17:03:43Z</code>.</p>
*/
inline InventoryRetrievalJobDescription& WithEndDate(const Aws::String& value) { SetEndDate(value); return *this;}
/**
* <p>The end of the date range in UTC for vault inventory retrieval that includes
* archives created before this date. This value should be a string in the ISO 8601
* date format, for example <code>2013-03-20T17:03:43Z</code>.</p>
*/
inline InventoryRetrievalJobDescription& WithEndDate(Aws::String&& value) { SetEndDate(std::move(value)); return *this;}
/**
* <p>The end of the date range in UTC for vault inventory retrieval that includes
* archives created before this date. This value should be a string in the ISO 8601
* date format, for example <code>2013-03-20T17:03:43Z</code>.</p>
*/
inline InventoryRetrievalJobDescription& WithEndDate(const char* value) { SetEndDate(value); return *this;}
/**
* <p>The maximum number of inventory items returned per vault inventory retrieval
* request. This limit is set when initiating the job with the a <b>InitiateJob</b>
* request. </p>
*/
inline const Aws::String& GetLimit() const{ return m_limit; }
/**
* <p>The maximum number of inventory items returned per vault inventory retrieval
* request. This limit is set when initiating the job with the a <b>InitiateJob</b>
* request. </p>
*/
inline void SetLimit(const Aws::String& value) { m_limitHasBeenSet = true; m_limit = value; }
/**
* <p>The maximum number of inventory items returned per vault inventory retrieval
* request. This limit is set when initiating the job with the a <b>InitiateJob</b>
* request. </p>
*/
inline void SetLimit(Aws::String&& value) { m_limitHasBeenSet = true; m_limit = std::move(value); }
/**
* <p>The maximum number of inventory items returned per vault inventory retrieval
* request. This limit is set when initiating the job with the a <b>InitiateJob</b>
* request. </p>
*/
inline void SetLimit(const char* value) { m_limitHasBeenSet = true; m_limit.assign(value); }
/**
* <p>The maximum number of inventory items returned per vault inventory retrieval
* request. This limit is set when initiating the job with the a <b>InitiateJob</b>
* request. </p>
*/
inline InventoryRetrievalJobDescription& WithLimit(const Aws::String& value) { SetLimit(value); return *this;}
/**
* <p>The maximum number of inventory items returned per vault inventory retrieval
* request. This limit is set when initiating the job with the a <b>InitiateJob</b>
* request. </p>
*/
inline InventoryRetrievalJobDescription& WithLimit(Aws::String&& value) { SetLimit(std::move(value)); return *this;}
/**
* <p>The maximum number of inventory items returned per vault inventory retrieval
* request. This limit is set when initiating the job with the a <b>InitiateJob</b>
* request. </p>
*/
inline InventoryRetrievalJobDescription& WithLimit(const char* value) { SetLimit(value); return *this;}
/**
* <p>An opaque string that represents where to continue pagination of the vault
* inventory retrieval results. You use the marker in a new <b>InitiateJob</b>
* request to obtain additional inventory items. If there are no more inventory
* items, this value is <code>null</code>. For more information, see <a
* href="http://docs.aws.amazon.com/amazonglacier/latest/dev/api-initiate-job-post.html#api-initiate-job-post-vault-inventory-list-filtering">
* Range Inventory Retrieval</a>.</p>
*/
inline const Aws::String& GetMarker() const{ return m_marker; }
/**
* <p>An opaque string that represents where to continue pagination of the vault
* inventory retrieval results. You use the marker in a new <b>InitiateJob</b>
* request to obtain additional inventory items. If there are no more inventory
* items, this value is <code>null</code>. For more information, see <a
* href="http://docs.aws.amazon.com/amazonglacier/latest/dev/api-initiate-job-post.html#api-initiate-job-post-vault-inventory-list-filtering">
* Range Inventory Retrieval</a>.</p>
*/
inline void SetMarker(const Aws::String& value) { m_markerHasBeenSet = true; m_marker = value; }
/**
* <p>An opaque string that represents where to continue pagination of the vault
* inventory retrieval results. You use the marker in a new <b>InitiateJob</b>
* request to obtain additional inventory items. If there are no more inventory
* items, this value is <code>null</code>. For more information, see <a
* href="http://docs.aws.amazon.com/amazonglacier/latest/dev/api-initiate-job-post.html#api-initiate-job-post-vault-inventory-list-filtering">
* Range Inventory Retrieval</a>.</p>
*/
inline void SetMarker(Aws::String&& value) { m_markerHasBeenSet = true; m_marker = std::move(value); }
/**
* <p>An opaque string that represents where to continue pagination of the vault
* inventory retrieval results. You use the marker in a new <b>InitiateJob</b>
* request to obtain additional inventory items. If there are no more inventory
* items, this value is <code>null</code>. For more information, see <a
* href="http://docs.aws.amazon.com/amazonglacier/latest/dev/api-initiate-job-post.html#api-initiate-job-post-vault-inventory-list-filtering">
* Range Inventory Retrieval</a>.</p>
*/
inline void SetMarker(const char* value) { m_markerHasBeenSet = true; m_marker.assign(value); }
/**
* <p>An opaque string that represents where to continue pagination of the vault
* inventory retrieval results. You use the marker in a new <b>InitiateJob</b>
* request to obtain additional inventory items. If there are no more inventory
* items, this value is <code>null</code>. For more information, see <a
* href="http://docs.aws.amazon.com/amazonglacier/latest/dev/api-initiate-job-post.html#api-initiate-job-post-vault-inventory-list-filtering">
* Range Inventory Retrieval</a>.</p>
*/
inline InventoryRetrievalJobDescription& WithMarker(const Aws::String& value) { SetMarker(value); return *this;}
/**
* <p>An opaque string that represents where to continue pagination of the vault
* inventory retrieval results. You use the marker in a new <b>InitiateJob</b>
* request to obtain additional inventory items. If there are no more inventory
* items, this value is <code>null</code>. For more information, see <a
* href="http://docs.aws.amazon.com/amazonglacier/latest/dev/api-initiate-job-post.html#api-initiate-job-post-vault-inventory-list-filtering">
* Range Inventory Retrieval</a>.</p>
*/
inline InventoryRetrievalJobDescription& WithMarker(Aws::String&& value) { SetMarker(std::move(value)); return *this;}
/**
* <p>An opaque string that represents where to continue pagination of the vault
* inventory retrieval results. You use the marker in a new <b>InitiateJob</b>
* request to obtain additional inventory items. If there are no more inventory
* items, this value is <code>null</code>. For more information, see <a
* href="http://docs.aws.amazon.com/amazonglacier/latest/dev/api-initiate-job-post.html#api-initiate-job-post-vault-inventory-list-filtering">
* Range Inventory Retrieval</a>.</p>
*/
inline InventoryRetrievalJobDescription& WithMarker(const char* value) { SetMarker(value); return *this;}
private:
Aws::String m_format;
bool m_formatHasBeenSet;
Aws::String m_startDate;
bool m_startDateHasBeenSet;
Aws::String m_endDate;
bool m_endDateHasBeenSet;
Aws::String m_limit;
bool m_limitHasBeenSet;
Aws::String m_marker;
bool m_markerHasBeenSet;
};
} // namespace Model
} // namespace Glacier
} // namespace Aws
| {
"content_hash": "6239aa944d436503511395e9ee450da2",
"timestamp": "",
"source": "github",
"line_count": 335,
"max_line_length": 146,
"avg_line_length": 47.40298507462686,
"alnum_prop": 0.6877833753148614,
"repo_name": "JoyIfBam5/aws-sdk-cpp",
"id": "ee971572fd7c2e0a56ca992e0b71616b0a8cddbd",
"size": "16453",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "aws-cpp-sdk-glacier/include/aws/glacier/model/InventoryRetrievalJobDescription.h",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "11868"
},
{
"name": "C++",
"bytes": "167818064"
},
{
"name": "CMake",
"bytes": "591577"
},
{
"name": "HTML",
"bytes": "4471"
},
{
"name": "Java",
"bytes": "271801"
},
{
"name": "Python",
"bytes": "85650"
},
{
"name": "Shell",
"bytes": "5277"
}
],
"symlink_target": ""
} |
import React, { PropTypes } from 'react';
import { Link } from 'react-router';
import { connect } from 'react-redux';
class App extends React.Component {
constructor(props){
super(props);
this.state = {
loggedIn: false
};
this.BarLink = this.BarLink.bind(this);
this.LoginSlashProfile = this.LoginSlashProfile.bind(this);
}
BarLink(props){
const classes = "nav-icon glyphicon glyphicon-" + props.glyph;
return (
<td className="nav-button">
<Link className="nav-link" to={props.uri}>
{props.text}
<span className={classes} />
</Link>
</td>
);
}
LoginSlashProfile(props){
if(props.token){
return(
<this.BarLink uri="/profile" text="Profile" glyph="user" />
);
}else{
return(
<this.BarLink uri="/login" text="Login" glyph="user" />
);
}
}
render(){
return (
<div>
<div className="top-bar">
<table className="top-bar-table">
<tbody>
<tr>
<this.BarLink uri="/" text="Home" glyph="home" />
<this.BarLink uri="/view" text="View Polls" glyph="stats" />
<this.BarLink uri="/create" text="Create" glyph="plus" />
<this.LoginSlashProfile token={this.props.token}/>
</tr>
</tbody>
</table>
</div>
<p> </p>
<div className="container standard-page">
{React.cloneElement(this.props.children, {
loggedIn: this.state.loggedIn,
})}
</div>
</div>
);
}
}
App.propTypes = {
children: PropTypes.element,
token: PropTypes.string
};
const mapStateToProps = () => {
return {
token: localStorage.token
};
};
export default connect(mapStateToProps)(App);
| {
"content_hash": "6aa75c4cf74a2fb0d35676a023e0228b",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 76,
"avg_line_length": 24.756756756756758,
"alnum_prop": 0.5420305676855895,
"repo_name": "MouseZero/voting-app",
"id": "de9361f9bcd8f945005692532333587c9637190f",
"size": "1832",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/components/App.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2550"
},
{
"name": "HTML",
"bytes": "2264"
},
{
"name": "JavaScript",
"bytes": "42734"
}
],
"symlink_target": ""
} |
title: "New Website!"
date: 2014-09-11
update_date: 2014-09-13 07:16
excerpt: "What changed, and what I learned while making the switch."
categories:
- site
- update
---
First off, welcome to my new site. Where my old site was a hand-made page and slightly crummy, this one is built on top of [Jekyll](http://jekyllrb.com/) and the [Pixyll](https://github.com/johnotander/pixyll) theme, and looks pretty good (at least to me).
For my new site, I wanted somewhere I could gather together all my work and put it on display (as the old site did), and also provide somewhere I could host any articles I felt like writing. I suppose the latter is blogging, but I will try to stay away from slice-of-life blogging, and concentrate on informative, probably technical posts: the kind of thing that's useful for when I'm having a problem and I do a search for it, and someone else has already written up their encounter with the problem, and what they did to solve it.
### Choosing a Starting Point
My "problem" for this article was that my existing site:
1. Didn't look very good. I'm not a designer, and it showed.
2. Had a poor layout. Again, the design thing.
3. Wasn't terribly easy to maintain. Sure, writing HTML isn't hard, but it does distract from the content a little.
I wanted something that could solve those issues, and be portable across hosting providers, be something I could host locally without to much trouble (to aid development), and which would work on static hosts like GitHub.
I'd previously read about [Jekyll](http://jekyllrb.com/) in GitHub's blog, but it never really clicked with me. Still, I was hosting on GitHub, so I thought I'd check it out first.
The idea is that you write a simple proto-site, build the actual site from it using Jekyll, then deploy the site to your web host. If you're hosting on GitHub, you can deploy your proto-site, and they'll build it for you. I wanted to test my site out locally first though, which meant installing Jekyll.
### Installing Jekyll & Theming
First of all, I created a new Git repository in my site folder. Version control makes life so much easier when trying new things.
I followed [this guide](http://jekyll-windows.juthilo.com/) for getting Jekyll set up on Windows, choosing Rouge instead of Pygments because the latter looked like a lot of bother to set up locally. Once Jekyll was installed, the only interaction I had with it was to run `jekyll serve --watch` from inside my site folder, which builds the site and starts a local web server for it at `http://localhost:4000`.
The basic theme is serviceable but doesn't look great, so I Googled "Jekyll themes", and decided on [Pixyll](http://pixyll.com/). Installing it was just a matter of copying it into my site folder, letting it overwrite the existing files.
### Customising The Site
#### Existing Content
I then had to customise my Jekyll + Pixyll site:
1. I customised my `/_config.yml` to fill in the relevant information
2. I [deleted](https://github.com/Ortham/ortham.github.io/commit/c4cba1bd161910786fe1308af68aea79246aa4a3) the example posts, the `README.md` and the custom domain file. I also deleted the contact form because I wanted to just put my email up for people to use directly.
3. I customised the `/about.md` page to be about me instead of Pixyll.
4. I tweaked the footer include at `/_includes/footer.html` to hold my copyright info.
#### New Content
I created a new branch in my repository for content being added to new files. The idea is that it should help keep what I create separate from what I got from Jekyll and Pixyll, and I can just merge the content branch in every so often. Time will tell if that strategy works out.
Anyway, posts go in the `_posts` folder. I also created an archive page to list all my posts, since that was missing from the Pixyll theme, and a "My Projects" subfolder for pages listing all my work. Everything is written in Markdown, except from the icon + text description for [my applications](/projects/), and the image and caption on my [About](/about/) page.
Because I didn't want to make too many changes to the existing Pixyll files, I decided to put the CSS for the above HTML in a new `/css/custom.css` file, and I added the following to my `/_includes/head.html`:
```html
{% raw %}
<link rel="stylesheet" href="{{ "/css/custom.css" | prepend: site.baseurl }}" type="text/css">
{% endraw %}
```
The `{% raw %}{{ }}{% endraw %}` brackets are [Liquid](http://liquidmarkup.org/) syntax. Liquid is a preprocessor that Jekyll runs to substitute in variables and perform simple logic operations on content you write.
Anyway, with those pages written, I had to also update `/_includes/navigation.html` to add a link to my new projects page (which I did back in my main branch). A neater implementation would be to create a `navigation` list in my `_config.yml`, and rewrite the navigation page to generate a set of links for everything in the `navigation` list, but it wasn't really necessary in my case. If you have a lot of non-post pages though, it would be of more benefit.
#### Styling Tweaks
*Update: these changes have now been merged into Pixyll, so others can enjoy them without any extra effort!*
##### Abbreviations
While I was converting some of my content to Markdown, I came across a little problem: I use Wrye *ash to refer to the set of Wrye Bash and related utilities, and xEdit to refer to TES4Edit and its related utilities. In HTML, I'd just wrap these in an `<abbr>` tag and give it a title, but I didn't really want to put HTML in my Markdown (which you can do) unless I had to.
It turns out that [Kramdown](http://kramdown.gettalong.org/), the Markdown parser that Jekyll uses, extends the syntax to cover abbreviations, you just do the following:
```markdown
Kramdown also supports GFM.
*[GFM]: GitHub Flavored Markdown
```
which gets turned into:
> Kramdown also supports GFM.
*[GFM]: GitHub Flavored Markdown
This is pretty neat, but it turns out that Pixyll doesn't have any styling for abbreviations by default, so it's impossible to tell there's hover-text unless by accident. To get the above styling, I added the following:
```css
abbr {
border-bottom: thin black dotted;
cursor: help;
}
```
##### Heading Font Sizes
Another thing I noticed while writing the site was that Pixyll's sizes for `<h1>`, `<h2>`, etc. elements was a bit... off. `<h1>`, `<h2>` and `<h3>` looked fine on their own, but the jump in size between them was weird and `<h4>` was identical to `<h3>`. I looked in the Pixyll repository issue tracker and found [this issue](https://github.com/johnotander/pixyll/issues/59), so I re-scaled the headings according to the ratios given in the first link in that issue. I had to increase the heading sizes on small screens slightly, but I think it still looks good.
#### Social Links
I also wanted a simple, clear and unobtrusive way of linking people to my accounts on various social media sites, as well as a few other things such as the site RSS feed, my email and my PayPal donation button. I had a quick look for anything that already existed, but didn't find anything that would fit with my theme, so I wrote my own.
First, I added some entries to my `_config.yml`:
```yaml
twitter_username: OliverHamlet
github_username: Ortham
linkedin_username: oliverhamlet
paypal_url: https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=HDR3YBGGYCLBG
google_plus_id: 104661746805670028939
```
I then decided that I wanted to use the icons from [Font Awesome](http://fontawesome.io) to represent the various sites, so I added the following line to my `/_templates/head.html`:
```html
<link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet">
```
I them created a new `_templates/social_links.html` file with the following content:
```html
{% raw %}
{% if site.github_username %}
<a class="fa fa-github" href="https://github.com/{{ site.github_username }}"></a>
{% endif %}
<a class="fa fa-rss" href="{{ "/feed.xml" | prepend: site.baseurl }}"></a>
{% if site.twitter_username %}
<a class="fa fa-twitter" href="https://twitter.com/{{ site.twitter_username }}"></a>
{% endif %}
{% if site.google_plus_id %}
<a class="fa fa-google-plus" href="https://plus.google.com/{{ site.google_plus_id }}/posts"></a>
{% endif %}
{% if site.email %}
<a class="fa fa-envelope" href="mailto:{{ site.email }}"></a>
{% endif %}
{% if site.linkedin_username %}
<a class="fa fa-linkedin" href="https://www.linkedin.com/in/{{ site.linkedin_username }}"></a>
{% endif %}
{% if site.paypal_url %}
<a class="fa fa-paypal" href="{{ site.paypal_url }}"></a>
{% endif %}
{% endraw %}
```
The Liquid expressions are there to make sure icons aren't shown for any accounts you don't specify.
I'll probably split this little feature off into its own repository, and extend it to cover more sites, as it's probably the sort of thing others would find useful.
### Wrapping It All Up
After writing my content and applying my changes, all that was left to do was push my site to GitHub, which was as simple as always. A website is never finished, but I got an elegant, extensible one built in a day, and changes I can hopefully get pulled upstream.
At some point I'd like to add a "reading time" thing to posts, and an unobtrusive table of contents (maybe collapsible?) to longer posts, but there's no hurry for them.
| {
"content_hash": "6f44e83c3e8db955426a3849e4fdd955",
"timestamp": "",
"source": "github",
"line_count": 153,
"max_line_length": 562,
"avg_line_length": 61.64052287581699,
"alnum_prop": 0.7437175273035733,
"repo_name": "WrinklyNinja/wrinklyninja.github.io",
"id": "ea45e9653fd20b9818e8b910d3a36916355f5eee",
"size": "9435",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "_posts/2014-09-11-new-site.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1362"
},
{
"name": "HTML",
"bytes": "573"
},
{
"name": "Ruby",
"bytes": "8367"
}
],
"symlink_target": ""
} |
layout: resource
title: Bulgaria
---
<ul>
<li>No resources added yet.</li>
</ul>
<p>Know any other resources? <a href="mailto:{{site.contact.email}}">Send us an email</a> or add links to this page with a <a href="https://github.com/codeeu/codeeu.github.io">Pull request on GitHub</a>!</p>
| {
"content_hash": "14682668cec5fc909211d81be7eabe16",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 209,
"avg_line_length": 33.44444444444444,
"alnum_prop": 0.6710963455149501,
"repo_name": "jemole/codeeu.github.io",
"id": "f302c4f9d100d6f0e122291a30906b3130b1539b",
"size": "305",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "resources/bulgaria/index.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "8092"
},
{
"name": "HTML",
"bytes": "145390"
}
],
"symlink_target": ""
} |
import React from 'react';
import Radium from 'radium';
import moment from 'moment';
import variables from './styles/variables';
import Div from './base/div';
import Img from './base/img';
import { H4 } from './base/hn';
@Radium
export default class Comment extends React.Component {
static propTypes = {
avatar: React.PropTypes.string.isRequired,
date: React.PropTypes.instanceOf(Date).isRequired,
userName: React.PropTypes.string.isRequired,
primary: React.PropTypes.bool
};
static defaultProps = {
primary: false
};
getStyles() {
return {
header: {
before: {
content: '',
display: 'table'
},
base: {
marginBottom: variables.commentHeaderMarginBottom
},
after: {
content: '',
display: 'table',
clear: 'both'
}
},
avatar: {
marginRight: variables.commentAvatarMarginRight,
float: 'left'
},
title: {
margin: `${variables.commentTitleMarginTop} 0 0 0`,
fontSize: variables.commentTitleFontSize,
lineHeight: variables.commentTitleLineHeight
},
meta: {
margin: `${variables.commentMetaMarginTop} 0 0 0`,
fontSize: variables.commentMetaFontSize,
lineHeight: variables.commentMetaLineHeight,
color: variables.commentMetaColor
}
};
}
render() {
const styles = this.getStyles();
const props = this.props;
return (
<Div style={[props.primary && styles.primary]}>
<Div style={styles.header.base}>
<Div style={styles.header.before} />
<Img
style={styles.avatar}
src={props.avatar}
alt="" />
<H4 style={styles.title}>
{props.userName}
</H4>
<Div style={styles.meta}>
{moment(props.date).fromNow()}
</Div>
<Div style={styles.header.after} />
</Div>
<Div style={styles.body}>
{props.children}
</Div>
</Div>
);
}
}
| {
"content_hash": "e7e7b8080e92813c6ba4ae48ba4785f1",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 59,
"avg_line_length": 24.761904761904763,
"alnum_prop": 0.5615384615384615,
"repo_name": "ButuzGOL/constructor",
"id": "331d007a1431d647ff907f9a271b7b0f275f944a",
"size": "2080",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/scripts/comment.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "64553"
}
],
"symlink_target": ""
} |
var contexts = require('../lib/contexts.js'),
assert = require('assert');
// Create container
var container = contexts.createContainer();
assert.ok(container);
// Get undefined context
var context = container.getContext({ application: 'Accounting' });
assert.equal(context, null);
// Create and Get context
var context = container.createContext({ application: 'Accounting' });
assert.ok(context);
var result = container.getContext({ application: 'Accounting' });
assert.ok(result);
assert.ok(result === context);
// Create and Get context with two values
var context = container.createContext({ application: 'Accounting', country: 'Japan' });
assert.ok(context);
var result = container.getContext({ application: 'Accounting', country: 'Japan' });
assert.ok(result);
assert.ok(result === context);
// Has Values
assert.ok(context.hasValues({ application: 'Accounting' }));
assert.ok(context.hasValues({ country: 'Japan' }));
assert.ok(context.hasValues({ country: 'Japan', application: 'Accounting' }));
assert.equal(context.hasValues({ application: 'Sales' }), false);
assert.equal(context.hasValues({ country: 'Canada' }), false);
// Get Empty Subject Roles
var result = context.getSubjectRoles('adam');
assert.ok(result);
assert.equal(result.length, 0);
// Add Role and Get Roles
context.addSubjectRole('adam', 'accountant');
var result = context.getSubjectRoles('adam');
assert.ok(result);
assert.equal(result.length, 1);
assert.equal(result[0], 'accountant');
| {
"content_hash": "cc081fa34c8afd9299354d7064cfc985",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 87,
"avg_line_length": 27.571428571428573,
"alnum_prop": 0.6955958549222798,
"repo_name": "ajlopez/SimplePermissions",
"id": "aa459d4a550554b04ee461b8d4734ea2c7fff4a1",
"size": "1544",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/contexts.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "1296"
}
],
"symlink_target": ""
} |
package api;
import java.util.Random;
public class AufgabeStringSilben {
private Random rand = new Random();
private String[] arrayMitSilben = { "pro", "gi", "idre", "hier", "die",
"ist", "un", "mit", "ein", "gen",
"but", "neu", "von", "kon" };
public String generiere1(String[] aArrayMitSilben, int amount) {
String result = "";
int max = arrayMitSilben.length-1;
for (int ii= 0; ii<amount; ii++) {
result = result + aArrayMitSilben[rand.nextInt(max + 1)];
}
return result;
}
public StringBuilder generiere2(String[] aArrayMitSilben, int amount) {
StringBuilder result = new StringBuilder();
int max = arrayMitSilben.length-1;
for (int ii= 0; ii<amount; ii++) {
result.append(aArrayMitSilben[rand.nextInt(max + 1)]);
}
return result;
}
public long profile1(int amount) {
AufgabeStringSilben stringSilben = new AufgabeStringSilben();
long currentTime = System.currentTimeMillis();
stringSilben.generiere1(stringSilben.arrayMitSilben, amount);
long duration = System.currentTimeMillis() - currentTime;
return duration;
}
public long profile2(int amount) {
AufgabeStringSilben stringSilben = new AufgabeStringSilben();
long currentTime = System.currentTimeMillis();
stringSilben.generiere1(stringSilben.arrayMitSilben, amount).toString();
long duration = System.currentTimeMillis() - currentTime;
return duration;
}
public static void main(String[] args) {
int amount = 1000;
AufgabeStringSilben stringSilben = new AufgabeStringSilben();
System.out.println(stringSilben.generiere1(stringSilben.arrayMitSilben, amount) +
" , #Zeitdauer: " + stringSilben.profile1(amount));
System.out.println(stringSilben.generiere1(stringSilben.arrayMitSilben, amount).toString() +
" , #Zeitdauer: " + stringSilben.profile2(amount));
// TODO Auto-generated method stub
}
}
| {
"content_hash": "511f247e0b3077a49d9afb31babdc85d",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 95,
"avg_line_length": 35.517857142857146,
"alnum_prop": 0.6666666666666666,
"repo_name": "hemmerling/java-114014",
"id": "0356827646bc4ab2e203bc15e3fba1163e67c844",
"size": "1989",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/api/AufgabeStringSilben.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "399"
},
{
"name": "Java",
"bytes": "216203"
}
],
"symlink_target": ""
} |
<?php
require_once '../connDB.php';
$data = file_get_contents('php://input');
$comData = json_decode($data);
$i=0;
$queryS = 'UPDATE concepto SET empleo_sector = "'.$comData->empleo_sector.'", part_empleo_prov ="'
.$comData->part_empleo_prov.'", dinamica_empleo ="'.$comData->dinamica_empleo.'",exportaciones="'.$comData->exportaciones.'", part_exportaciones_pvciales= "'.$comData->part_exportaciones_pvciales.'", dinamica_exportaciones_pvciales ="'.$comData->dinamica_exportaciones_pvciales.'", empleo_sector_nacion ="'
.$comData->empleo_sector_nacion.'",empleo_total_nacion="'.$comData->empleo_total_nacion.'",dinamica_empleo_nacion="'.$comData->dinamica_empleo_nacion.'",participacion_sector_nacion="'
.$comData->participacion_sector_nacion.'",coefesp="'.$comData->coefesp.'",remuneracion="'.$comData->remuneracion.'",remuneracion_promedio_nacion="'.$comData->remuneracion_promedio_nacion.
'",dif_salario_sector ="'.$comData->dif_salario_sector.'" WHERE id = '.$comData->id;
$conn->query('SET CHARACTER SET utf8');
$resultQuery = $conn->query($queryS);
if ($resultQuery) {
echo "campo actualizado";
}else{
die($conn->error);
}
?> | {
"content_hash": "b2a2a902ea9c57cab56ffb631240b257",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 307,
"avg_line_length": 48.916666666666664,
"alnum_prop": 0.6890971039182283,
"repo_name": "oixxio/gis-mProduccion",
"id": "a623de937008e622f0aff8249246adf98d453c39",
"size": "1174",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "demo/app/api/admin/guardarDatos.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "324717"
},
{
"name": "HTML",
"bytes": "4472603"
},
{
"name": "JavaScript",
"bytes": "12163457"
},
{
"name": "PHP",
"bytes": "21331"
},
{
"name": "Ruby",
"bytes": "875"
}
],
"symlink_target": ""
} |
package org.apache.sysml.runtime.matrix.data;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import org.apache.sysml.lops.PartialAggregate.CorrectionLocationType;
import org.apache.sysml.runtime.DMLRuntimeException;
import org.apache.sysml.runtime.controlprogram.caching.MatrixObject.UpdateType;
import org.apache.sysml.runtime.functionobjects.Builtin;
import org.apache.sysml.runtime.functionobjects.Builtin.BuiltinCode;
import org.apache.sysml.runtime.functionobjects.CM;
import org.apache.sysml.runtime.functionobjects.IndexFunction;
import org.apache.sysml.runtime.functionobjects.KahanFunction;
import org.apache.sysml.runtime.functionobjects.KahanPlus;
import org.apache.sysml.runtime.functionobjects.KahanPlusSq;
import org.apache.sysml.runtime.functionobjects.Mean;
import org.apache.sysml.runtime.functionobjects.Multiply;
import org.apache.sysml.runtime.functionobjects.ReduceAll;
import org.apache.sysml.runtime.functionobjects.ReduceCol;
import org.apache.sysml.runtime.functionobjects.ReduceDiag;
import org.apache.sysml.runtime.functionobjects.ReduceRow;
import org.apache.sysml.runtime.functionobjects.ValueFunction;
import org.apache.sysml.runtime.instructions.InstructionUtils;
import org.apache.sysml.runtime.instructions.cp.CM_COV_Object;
import org.apache.sysml.runtime.instructions.cp.KahanObject;
import org.apache.sysml.runtime.matrix.operators.AggregateOperator;
import org.apache.sysml.runtime.matrix.operators.AggregateTernaryOperator;
import org.apache.sysml.runtime.matrix.operators.AggregateUnaryOperator;
import org.apache.sysml.runtime.matrix.operators.CMOperator;
import org.apache.sysml.runtime.matrix.operators.CMOperator.AggregateOperationTypes;
import org.apache.sysml.runtime.matrix.operators.Operator;
import org.apache.sysml.runtime.matrix.operators.UnaryOperator;
import org.apache.sysml.runtime.util.DataConverter;
import org.apache.sysml.runtime.util.UtilFunctions;
/**
* MB:
* Library for matrix aggregations including ak+, uak+ for all
* combinations of dense and sparse representations, and corrections.
* Those are performance-critical operations because they are used
* on combiners/reducers of important operations like tsmm, mvmult,
* indexing, but also basic sum/min/max/mean, row*, col*, etc. Specific
* handling is especially required for all non sparse-safe operations
* in order to prevent unnecessary worse asymptotic behavior.
*
* This library currently covers the following opcodes:
* ak+, uak+, uark+, uack+, uasqk+, uarsqk+, uacsqk+,
* uamin, uarmin, uacmin, uamax, uarmax, uacmax,
* ua*, uamean, uarmean, uacmean, uavar, uarvar, uacvar,
* uarimax, uaktrace, cumk+, cummin, cummax, cum*, tak+.
*
* TODO next opcode extensions: a+, colindexmax
*/
public class LibMatrixAgg
{
//internal configuration parameters
private static final boolean NAN_AWARENESS = false;
private static final long PAR_NUMCELL_THRESHOLD = 1024*1024; //Min 1M elements
private static final long PAR_INTERMEDIATE_SIZE_THRESHOLD = 2*1024*1024; //Max 2MB
////////////////////////////////
// public matrix agg interface
////////////////////////////////
private enum AggType {
KAHAN_SUM,
KAHAN_SUM_SQ,
CUM_KAHAN_SUM,
CUM_MIN,
CUM_MAX,
CUM_PROD,
MIN,
MAX,
MEAN,
VAR,
MAX_INDEX,
MIN_INDEX,
PROD,
INVALID,
}
private LibMatrixAgg() {
//prevent instantiation via private constructor
}
/**
* Core incremental matrix aggregate (ak+) as used in mapmult, tsmm,
* cpmm, etc. Note that we try to keep the current
* aggVal and aggCorr in dense format in order to allow efficient
* access according to the dense/sparse input.
*
* @param in input matrix
* @param aggVal current aggregate values (in/out)
* @param aggCorr current aggregate correction (in/out)
* @throws DMLRuntimeException if DMLRuntimeException occurs
*/
public static void aggregateBinaryMatrix(MatrixBlock in, MatrixBlock aggVal, MatrixBlock aggCorr)
throws DMLRuntimeException
{
//Timing time = new Timing(true);
//boolean saggVal = aggVal.isInSparseFormat(), saggCorr = aggCorr.isInSparseFormat();
//long naggVal = aggVal.getNonZeros(), naggCorr = aggCorr.getNonZeros();
//core aggregation
if(!in.sparse && !aggVal.sparse && !aggCorr.sparse)
aggregateBinaryMatrixAllDense(in, aggVal, aggCorr);
else if(in.sparse && !aggVal.sparse && !aggCorr.sparse)
aggregateBinaryMatrixSparseDense(in, aggVal, aggCorr);
else if(in.sparse ) //any aggVal, aggCorr
aggregateBinaryMatrixSparseGeneric(in, aggVal, aggCorr);
else //if( !in.sparse ) //any aggVal, aggCorr
aggregateBinaryMatrixDenseGeneric(in, aggVal, aggCorr);
//System.out.println("agg ("+in.rlen+","+in.clen+","+in.getNonZeros()+","+in.sparse+"), " +
// "("+naggVal+","+saggVal+"), ("+naggCorr+","+saggCorr+") -> " +
// "("+aggVal.getNonZeros()+","+aggVal.isInSparseFormat()+"), ("+aggCorr.getNonZeros()+","+aggCorr.isInSparseFormat()+") " +
// "in "+time.stop()+"ms.");
}
/**
* Core incremental matrix aggregate (ak+) as used for uack+ and acrk+.
* Embedded correction values.
*
* @param in matrix block
* @param aggVal aggregate operator
* @param aop aggregate operator
* @throws DMLRuntimeException if DMLRuntimeException occurs
*/
public static void aggregateBinaryMatrix(MatrixBlock in, MatrixBlock aggVal, AggregateOperator aop)
throws DMLRuntimeException
{
//sanity check matching dimensions
if( in.getNumRows()!=aggVal.getNumRows() || in.getNumColumns()!=aggVal.getNumColumns() )
throw new DMLRuntimeException("Dimension mismatch on aggregate: "+in.getNumRows()+"x"+in.getNumColumns()+
" vs "+aggVal.getNumRows()+"x"+aggVal.getNumColumns());
//Timing time = new Timing(true);
//core aggregation
boolean lastRowCorr = (aop.correctionLocation == CorrectionLocationType.LASTROW);
boolean lastColCorr = (aop.correctionLocation == CorrectionLocationType.LASTCOLUMN);
if( !in.sparse && lastRowCorr )
aggregateBinaryMatrixLastRowDenseGeneric(in, aggVal);
else if( in.sparse && lastRowCorr )
aggregateBinaryMatrixLastRowSparseGeneric(in, aggVal);
else if( !in.sparse && lastColCorr )
aggregateBinaryMatrixLastColDenseGeneric(in, aggVal);
else //if( in.sparse && lastColCorr )
aggregateBinaryMatrixLastColSparseGeneric(in, aggVal);
//System.out.println("agg ("+in.rlen+","+in.clen+","+in.getNonZeros()+","+in.sparse+"), ("+naggVal+","+saggVal+") -> " +
// "("+aggVal.getNonZeros()+","+aggVal.isInSparseFormat()+") in "+time.stop()+"ms.");
}
public static void aggregateUnaryMatrix(MatrixBlock in, MatrixBlock out, AggregateUnaryOperator uaop)
throws DMLRuntimeException
{
//prepare meta data
AggType aggtype = getAggType(uaop);
final int m = in.rlen;
final int m2 = out.rlen;
final int n2 = out.clen;
//filter empty input blocks (incl special handling for sparse-unsafe operations)
if( in.isEmptyBlock(false) ){
aggregateUnaryMatrixEmpty(in, out, aggtype, uaop.indexFn);
return;
}
//Timing time = new Timing(true);
//allocate output arrays (if required)
out.reset(m2, n2, false); //always dense
out.allocateDenseBlock();
if( !in.sparse )
aggregateUnaryMatrixDense(in, out, aggtype, uaop.aggOp.increOp.fn, uaop.indexFn, 0, m);
else
aggregateUnaryMatrixSparse(in, out, aggtype, uaop.aggOp.increOp.fn, uaop.indexFn, 0, m);
//cleanup output and change representation (if necessary)
out.recomputeNonZeros();
out.examSparsity();
//System.out.println("uagg ("+in.rlen+","+in.clen+","+in.sparse+") in "+time.stop()+"ms.");
}
public static void aggregateUnaryMatrix(MatrixBlock in, MatrixBlock out, AggregateUnaryOperator uaop, int k)
throws DMLRuntimeException
{
//fall back to sequential version if necessary
if( k <= 1 || (long)in.nonZeros < PAR_NUMCELL_THRESHOLD || in.rlen <= k/2
|| (!(uaop.indexFn instanceof ReduceCol) && out.clen*8*k > PAR_INTERMEDIATE_SIZE_THRESHOLD ) ||
!out.isThreadSafe()) {
aggregateUnaryMatrix(in, out, uaop);
return;
}
//prepare meta data
AggType aggtype = getAggType(uaop);
final int m = in.rlen;
final int m2 = out.rlen;
final int n2 = out.clen;
//filter empty input blocks (incl special handling for sparse-unsafe operations)
if( in.isEmptyBlock(false) ){
aggregateUnaryMatrixEmpty(in, out, aggtype, uaop.indexFn);
return;
}
//Timing time = new Timing(true);
//allocate output arrays (if required)
if( uaop.indexFn instanceof ReduceCol ) {
out.reset(m2, n2, false); //always dense
out.allocateDenseBlock();
}
//core multi-threaded unary aggregate computation
//(currently: always parallelization over number of rows)
try {
ExecutorService pool = Executors.newFixedThreadPool( k );
ArrayList<AggTask> tasks = new ArrayList<AggTask>();
int blklen = (int)(Math.ceil((double)m/k));
for( int i=0; i<k & i*blklen<m; i++ ) {
tasks.add( (uaop.indexFn instanceof ReduceCol) ?
new RowAggTask(in, out, aggtype, uaop, i*blklen, Math.min((i+1)*blklen, m)) :
new PartialAggTask(in, out, aggtype, uaop, i*blklen, Math.min((i+1)*blklen, m)) );
}
pool.invokeAll(tasks);
pool.shutdown();
//aggregate partial results
if( !(uaop.indexFn instanceof ReduceCol) ) {
out.copy(((PartialAggTask)tasks.get(0)).getResult()); //for init
for( int i=1; i<tasks.size(); i++ )
aggregateFinalResult(uaop.aggOp, out, ((PartialAggTask)tasks.get(i)).getResult());
}
}
catch(Exception ex) {
throw new DMLRuntimeException(ex);
}
//cleanup output and change representation (if necessary)
out.recomputeNonZeros();
out.examSparsity();
//System.out.println("uagg k="+k+" ("+in.rlen+","+in.clen+","+in.sparse+") in "+time.stop()+"ms.");
}
public static MatrixBlock cumaggregateUnaryMatrix(MatrixBlock in, MatrixBlock out, UnaryOperator uop)
throws DMLRuntimeException
{
//prepare meta data
AggType aggtype = getAggType(uop);
final int m = in.rlen;
final int m2 = out.rlen;
final int n2 = out.clen;
//filter empty input blocks (incl special handling for sparse-unsafe operations)
if( in.isEmptyBlock(false) ){
return aggregateUnaryMatrixEmpty(in, out, aggtype, null);
}
//allocate output arrays (if required)
out.reset(m2, n2, false); //always dense
out.allocateDenseBlock();
//Timing time = new Timing(true);
if( !in.sparse )
cumaggregateUnaryMatrixDense(in, out, aggtype, uop.fn, null, 0, m);
else
cumaggregateUnaryMatrixSparse(in, out, aggtype, uop.fn, null, 0, m);
//cleanup output and change representation (if necessary)
out.recomputeNonZeros();
out.examSparsity();
//System.out.println("uop ("+in.rlen+","+in.clen+","+in.sparse+") in "+time.stop()+"ms.");
return out;
}
public static MatrixBlock cumaggregateUnaryMatrix(MatrixBlock in, MatrixBlock out, UnaryOperator uop, int k)
throws DMLRuntimeException
{
AggregateUnaryOperator uaop = InstructionUtils.parseBasicCumulativeAggregateUnaryOperator(uop);
//fall back to sequential if necessary or agg not supported
if( k <= 1 || (long)in.rlen*in.clen < PAR_NUMCELL_THRESHOLD || in.rlen <= k
|| out.clen*8*k > PAR_INTERMEDIATE_SIZE_THRESHOLD || uaop == null || !out.isThreadSafe()) {
return cumaggregateUnaryMatrix(in, out, uop);
}
//prepare meta data
AggType aggtype = getAggType(uop);
final int m = in.rlen;
final int m2 = out.rlen;
final int n2 = out.clen;
final int mk = aggtype==AggType.CUM_KAHAN_SUM?2:1;
//filter empty input blocks (incl special handling for sparse-unsafe operations)
if( in.isEmptyBlock(false) ){
return aggregateUnaryMatrixEmpty(in, out, aggtype, null);
}
//Timing time = new Timing(true);
//allocate output arrays (if required)
out.reset(m2, n2, false); //always dense
out.allocateDenseBlock();
//core multi-threaded unary aggregate computation
//(currently: always parallelization over number of rows)
try {
ExecutorService pool = Executors.newFixedThreadPool( k );
int blklen = (int)(Math.ceil((double)m/k));
//step 1: compute aggregates per row partition
AggType uaoptype = getAggType(uaop);
ArrayList<PartialAggTask> tasks = new ArrayList<PartialAggTask>();
for( int i=0; i<k & i*blklen<m; i++ )
tasks.add( new PartialAggTask(in, new MatrixBlock(mk,n2,false), uaoptype, uaop, i*blklen, Math.min((i+1)*blklen, m)) );
List<Future<Object>> taskret = pool.invokeAll(tasks);
for( Future<Object> task : taskret )
task.get(); //check for errors
//step 2: cumulative aggregate of partition aggregates
MatrixBlock tmp = new MatrixBlock(tasks.size(), n2, false);
for( int i=0; i<tasks.size(); i++ ) {
MatrixBlock row = tasks.get(i).getResult();
if( uaop.aggOp.correctionExists )
row.dropLastRowsOrColums(uaop.aggOp.correctionLocation);
tmp.leftIndexingOperations(row, i, i, 0, n2-1, tmp, UpdateType.INPLACE_PINNED);
}
MatrixBlock tmp2 = cumaggregateUnaryMatrix(tmp, new MatrixBlock(tasks.size(), n2, false), uop);
//step 3: compute final cumulative aggregate
ArrayList<CumAggTask> tasks2 = new ArrayList<CumAggTask>();
for( int i=0; i<k & i*blklen<m; i++ ) {
double[] agg = (i==0)? null :
DataConverter.convertToDoubleVector(tmp2.sliceOperations(i-1, i-1, 0, n2-1, new MatrixBlock()));
tasks2.add( new CumAggTask(in, agg, out, aggtype, uop, i*blklen, Math.min((i+1)*blklen, m)) );
}
List<Future<Long>> taskret2 = pool.invokeAll(tasks2);
pool.shutdown();
//step 4: aggregate nnz
out.nonZeros = 0;
for( Future<Long> task : taskret2 )
out.nonZeros += task.get();
}
catch(Exception ex) {
throw new DMLRuntimeException(ex);
}
//cleanup output and change representation (if necessary)
out.examSparsity();
//System.out.println("uop k="+k+" ("+in.rlen+","+in.clen+","+in.sparse+") in "+time.stop()+"ms.");
return out;
}
public static MatrixBlock aggregateTernary(MatrixBlock in1, MatrixBlock in2, MatrixBlock in3, MatrixBlock ret, AggregateTernaryOperator op)
throws DMLRuntimeException
{
//early abort if any block is empty
if( in1.isEmptyBlock(false) || in2.isEmptyBlock(false) || in3!=null&&in3.isEmptyBlock(false) ) {
return ret;
}
//Timing time = new Timing(true);
//allocate output arrays (if required)
ret.reset(ret.rlen, ret.clen, false); //always dense
ret.allocateDenseBlock();
IndexFunction ixFn = op.indexFn;
if( !in1.sparse && !in2.sparse && (in3==null||!in3.sparse) ) //DENSE
aggregateTernaryDense(in1, in2, in3, ret, ixFn, 0, in1.rlen);
else //GENERAL CASE
aggregateTernaryGeneric(in1, in2, in3, ret, ixFn, 0, in1.rlen);
//cleanup output and change representation (if necessary)
ret.recomputeNonZeros();
ret.examSparsity();
//System.out.println("tak+ ("+in1.rlen+","+in1.sparse+","+in2.sparse+","+in3.sparse+") in "+time.stop()+"ms.");
return ret;
}
public static MatrixBlock aggregateTernary(MatrixBlock in1, MatrixBlock in2, MatrixBlock in3, MatrixBlock ret, AggregateTernaryOperator op, int k)
throws DMLRuntimeException
{
//fall back to sequential version if necessary
if( k <= 1 || in1.nonZeros+in2.nonZeros < PAR_NUMCELL_THRESHOLD || in1.rlen <= k/2
|| (!(op.indexFn instanceof ReduceCol) && ret.clen*8*k > PAR_INTERMEDIATE_SIZE_THRESHOLD) ) {
return aggregateTernary(in1, in2, in3, ret, op);
}
//early abort if any block is empty
if( in1.isEmptyBlock(false) || in2.isEmptyBlock(false) || in3!=null&&in3.isEmptyBlock(false) ) {
return ret;
}
//Timing time = new Timing(true);
try {
ExecutorService pool = Executors.newFixedThreadPool( k );
ArrayList<AggTernaryTask> tasks = new ArrayList<AggTernaryTask>();
int blklen = (int)(Math.ceil((double)in1.rlen/k));
IndexFunction ixFn = op.indexFn;
for( int i=0; i<k & i*blklen<in1.rlen; i++ )
tasks.add( new AggTernaryTask(in1, in2, in3, ret, ixFn, i*blklen, Math.min((i+1)*blklen, in1.rlen)));
List<Future<MatrixBlock>> rtasks = pool.invokeAll(tasks);
pool.shutdown();
//aggregate partial results and error handling
ret.copy(rtasks.get(0).get()); //for init
for( int i=1; i<rtasks.size(); i++ )
aggregateFinalResult(op.aggOp, ret, rtasks.get(i).get());
}
catch(Exception ex) {
throw new DMLRuntimeException(ex);
}
//cleanup output and change representation (if necessary)
ret.recomputeNonZeros();
ret.examSparsity();
//System.out.println("tak+ k="+k+" ("+in1.rlen+","+in1.sparse+","+in2.sparse+","+in3.sparse+") in "+time.stop()+"ms.");
return ret;
}
public static void groupedAggregate(MatrixBlock groups, MatrixBlock target, MatrixBlock weights, MatrixBlock result, int numGroups, Operator op)
throws DMLRuntimeException
{
if( !(op instanceof CMOperator || op instanceof AggregateOperator) ) {
throw new DMLRuntimeException("Invalid operator (" + op + ") encountered while processing groupedAggregate.");
}
//CM operator for count, mean, variance
//note: current support only for column vectors
if(op instanceof CMOperator) {
CMOperator cmOp = (CMOperator) op;
if( cmOp.getAggOpType()==AggregateOperationTypes.COUNT && weights==null && target.clen==1 ) {
//special case for vector counts
groupedAggregateVecCount(groups, result, numGroups);
}
else { //general case
groupedAggregateCM(groups, target, weights, result, numGroups, cmOp, 0, target.clen);
}
}
//Aggregate operator for sum (via kahan sum)
//note: support for row/column vectors and dense/sparse
else if( op instanceof AggregateOperator ) {
AggregateOperator aggop = (AggregateOperator) op;
groupedAggregateKahanPlus(groups, target, weights, result, numGroups, aggop, 0, target.clen);
}
}
public static void groupedAggregate(MatrixBlock groups, MatrixBlock target, MatrixBlock weights, MatrixBlock result, int numGroups, Operator op, int k)
throws DMLRuntimeException
{
//fall back to sequential version if necessary
boolean rowVector = (target.getNumRows()==1 && target.getNumColumns()>1);
if( k <= 1 || (long)target.rlen*target.clen < PAR_NUMCELL_THRESHOLD || rowVector || target.clen==1) {
groupedAggregate(groups, target, weights, result, numGroups, op);
return;
}
if( !(op instanceof CMOperator || op instanceof AggregateOperator) ) {
throw new DMLRuntimeException("Invalid operator (" + op + ") encountered while processing groupedAggregate.");
}
//preprocessing (no need to check isThreadSafe)
result.sparse = false;
result.allocateDenseBlock();
//core multi-threaded grouped aggregate computation
//(currently: parallelization over columns to avoid additional memory requirements)
try {
ExecutorService pool = Executors.newFixedThreadPool( k );
ArrayList<GrpAggTask> tasks = new ArrayList<GrpAggTask>();
int blklen = (int)(Math.ceil((double)target.clen/k));
for( int i=0; i<k & i*blklen<target.clen; i++ )
tasks.add( new GrpAggTask(groups, target, weights, result, numGroups, op, i*blklen, Math.min((i+1)*blklen, target.clen)) );
pool.invokeAll(tasks);
pool.shutdown();
}
catch(Exception ex) {
throw new DMLRuntimeException(ex);
}
//postprocessing
result.recomputeNonZeros();
result.examSparsity();
}
public static boolean isSupportedUnaryAggregateOperator( AggregateUnaryOperator op )
{
AggType type = getAggType( op );
return (type != AggType.INVALID);
}
public static boolean isSupportedUnaryOperator( UnaryOperator op )
{
AggType type = getAggType( op );
return (type != AggType.INVALID);
}
/**
* Recompute outputs (e.g., maxindex or minindex) according to block indexes from MR.
* TODO: this should not be part of block operations but of the MR instruction.
*
* @param out matrix block
* @param op aggregate unary operator
* @param brlen number of rows in a block
* @param bclen number of columns in a block
* @param ix matrix indexes
*/
public static void recomputeIndexes( MatrixBlock out, AggregateUnaryOperator op, int brlen, int bclen, MatrixIndexes ix )
{
AggType type = getAggType(op);
if( (type == AggType.MAX_INDEX || type == AggType.MIN_INDEX) && ix.getColumnIndex()!=1 ) //MAXINDEX or MININDEX
{
int m = out.rlen;
double[] c = out.getDenseBlock();
for( int i=0, cix=0; i<m; i++, cix+=2 )
c[cix] = UtilFunctions.computeCellIndex(ix.getColumnIndex(), bclen, (int)c[cix]-1);
}
}
private static AggType getAggType( AggregateUnaryOperator op )
{
ValueFunction vfn = op.aggOp.increOp.fn;
IndexFunction ifn = op.indexFn;
//(kahan) sum / sum squared / trace (for ReduceDiag)
if( vfn instanceof KahanFunction
&& (op.aggOp.correctionLocation == CorrectionLocationType.LASTCOLUMN || op.aggOp.correctionLocation == CorrectionLocationType.LASTROW)
&& (ifn instanceof ReduceAll || ifn instanceof ReduceCol || ifn instanceof ReduceRow || ifn instanceof ReduceDiag) )
{
if (vfn instanceof KahanPlus)
return AggType.KAHAN_SUM;
else if (vfn instanceof KahanPlusSq)
return AggType.KAHAN_SUM_SQ;
}
//mean
if( vfn instanceof Mean
&& (op.aggOp.correctionLocation == CorrectionLocationType.LASTTWOCOLUMNS || op.aggOp.correctionLocation == CorrectionLocationType.LASTTWOROWS)
&& (ifn instanceof ReduceAll || ifn instanceof ReduceCol || ifn instanceof ReduceRow) )
{
return AggType.MEAN;
}
//variance
if( vfn instanceof CM
&& ((CM) vfn).getAggOpType() == AggregateOperationTypes.VARIANCE
&& (op.aggOp.correctionLocation == CorrectionLocationType.LASTFOURCOLUMNS ||
op.aggOp.correctionLocation == CorrectionLocationType.LASTFOURROWS)
&& (ifn instanceof ReduceAll || ifn instanceof ReduceCol || ifn instanceof ReduceRow) )
{
return AggType.VAR;
}
//prod
if( vfn instanceof Multiply && ifn instanceof ReduceAll )
{
return AggType.PROD;
}
//min / max
if( vfn instanceof Builtin &&
(ifn instanceof ReduceAll || ifn instanceof ReduceCol || ifn instanceof ReduceRow) )
{
BuiltinCode bfcode = ((Builtin)vfn).bFunc;
switch( bfcode ){
case MAX: return AggType.MAX;
case MIN: return AggType.MIN;
case MAXINDEX: return AggType.MAX_INDEX;
case MININDEX: return AggType.MIN_INDEX;
default: //do nothing
}
}
return AggType.INVALID;
}
private static AggType getAggType( UnaryOperator op )
{
ValueFunction vfn = op.fn;
//cumsum/cumprod/cummin/cummax
if( vfn instanceof Builtin ) {
BuiltinCode bfunc = ((Builtin) vfn).bFunc;
switch( bfunc )
{
case CUMSUM: return AggType.CUM_KAHAN_SUM;
case CUMPROD: return AggType.CUM_PROD;
case CUMMIN: return AggType.CUM_MIN;
case CUMMAX: return AggType.CUM_MAX;
default: return AggType.INVALID;
}
}
return AggType.INVALID;
}
private static void aggregateFinalResult( AggregateOperator aop, MatrixBlock out, MatrixBlock partout )
throws DMLRuntimeException
{
AggregateOperator laop = aop;
//special handling for mean where the final aggregate operator (kahan plus)
//is not equals to the partial aggregate operator
if( aop.increOp.fn instanceof Mean ) {
laop = new AggregateOperator(0, KahanPlus.getKahanPlusFnObject(), aop.correctionExists, aop.correctionLocation);
}
//incremental aggregation of final results
if( laop.correctionExists )
out.incrementalAggregate(laop, partout);
else
out.binaryOperationsInPlace(laop.increOp, partout);
}
private static void aggregateTernaryDense(MatrixBlock in1, MatrixBlock in2, MatrixBlock in3, MatrixBlock ret, IndexFunction ixFn, int rl, int ru)
{
//compute block operations
KahanObject kbuff = new KahanObject(0, 0);
KahanPlus kplus = KahanPlus.getKahanPlusFnObject();
double[] a = in1.denseBlock;
double[] b1 = in2.denseBlock;
double[] b2 = (in3!=null) ? in3.denseBlock : null; //if null, literal 1
final int n = in1.clen;
if( ixFn instanceof ReduceAll ) //tak+*
{
for( int i=rl, ix=rl*n; i<ru; i++ )
for( int j=0; j<n; j++, ix++ ) {
double b2val = (b2 != null) ? b2[ix] : 1;
double val = a[ix] * b1[ix] * b2val;
kplus.execute2( kbuff, val );
}
ret.quickSetValue(0, 0, kbuff._sum);
ret.quickSetValue(0, 1, kbuff._correction);
}
else //tack+*
{
double[] c = ret.getDenseBlock();
for( int i=rl, ix=rl*n; i<ru; i++ )
for( int j=0; j<n; j++, ix++ ) {
double b2val = (b2 != null) ? b2[ix] : 1;
double val = a[ix] * b1[ix] * b2val;
kbuff._sum = c[j];
kbuff._correction = c[j+n];
kplus.execute2(kbuff, val);
c[j] = kbuff._sum;
c[j+n] = kbuff._correction;
}
}
}
private static void aggregateTernaryGeneric(MatrixBlock in1, MatrixBlock in2, MatrixBlock in3, MatrixBlock ret, IndexFunction ixFn, int rl, int ru)
{
//compute block operations
KahanObject kbuff = new KahanObject(0, 0);
KahanPlus kplus = KahanPlus.getKahanPlusFnObject();
//guaranteed to have at least one sparse input, sort by nnz, assume num cells if
//(potentially incorrect) in dense representation, keep null at end via stable sort
MatrixBlock[] blocks = new MatrixBlock[]{in1, in2, in3};
Arrays.sort(blocks, new Comparator<MatrixBlock>() {
@Override
public int compare(MatrixBlock o1, MatrixBlock o2) {
long nnz1 = (o1!=null && o1.sparse) ? o1.nonZeros : Long.MAX_VALUE;
long nnz2 = (o2!=null && o2.sparse) ? o2.nonZeros : Long.MAX_VALUE;
return Long.compare(nnz1, nnz2);
}
});
MatrixBlock lin1 = blocks[0];
MatrixBlock lin2 = blocks[1];
MatrixBlock lin3 = blocks[2];
SparseBlock a = lin1.sparseBlock;
final int n = in1.clen;
if( ixFn instanceof ReduceAll ) //tak+*
{
for( int i=rl; i<ru; i++ )
if( !a.isEmpty(i) ) {
int apos = a.pos(i);
int alen = a.size(i);
int[] aix = a.indexes(i);
double[] avals = a.values(i);
for( int j=apos; j<apos+alen; j++ ) {
double val1 = avals[j];
double val2 = lin2.quickGetValue(i, aix[j]);
double val = val1 * val2;
if( val != 0 && lin3 != null )
val *= lin3.quickGetValue(i, aix[j]);
kplus.execute2( kbuff, val );
}
}
ret.quickSetValue(0, 0, kbuff._sum);
ret.quickSetValue(0, 1, kbuff._correction);
}
else //tack+*
{
double[] c = ret.getDenseBlock();
for( int i=rl; i<ru; i++ )
if( !a.isEmpty(i) ) {
int apos = a.pos(i);
int alen = a.size(i);
int[] aix = a.indexes(i);
double[] avals = a.values(i);
for( int j=apos; j<apos+alen; j++ ) {
int colIx = aix[j];
double val1 = avals[j];
double val2 = lin2.quickGetValue(i, colIx);
double val = val1 * val2;
if( val != 0 && lin3 != null )
val *= lin3.quickGetValue(i, colIx);
kbuff._sum = c[colIx];
kbuff._correction = c[colIx+n];
kplus.execute2( kbuff, val );
c[colIx] = kbuff._sum;
c[colIx+n] = kbuff._correction;
}
}
}
}
/**
* This is a specific implementation for aggregate(fn="sum"), where we use KahanPlus for numerical
* stability. In contrast to other functions of aggregate, this implementation supports row and column
* vectors for target and exploits sparse representations since KahanPlus is sparse-safe.
*
* @param groups matrix block groups
* @param target matrix block target
* @param weights matrix block weights
* @param result matrix block result
* @param numGroups number of groups
* @param aggop aggregate operator
* @param cl column lower index
* @param cu column upper index
* @throws DMLRuntimeException if DMLRuntimeException occurs
*/
private static void groupedAggregateKahanPlus( MatrixBlock groups, MatrixBlock target, MatrixBlock weights, MatrixBlock result, int numGroups, AggregateOperator aggop, int cl, int cu )
throws DMLRuntimeException
{
boolean rowVector = (target.getNumRows()==1 && target.getNumColumns()>1);
int numCols = (!rowVector) ? target.getNumColumns() : 1;
double w = 1; //default weight
//skip empty blocks (sparse-safe operation)
if( target.isEmptyBlock(false) )
return;
//init group buffers
int numCols2 = cu-cl;
KahanObject[][] buffer = new KahanObject[numGroups][numCols2];
for( int i=0; i<numGroups; i++ )
for( int j=0; j<numCols2; j++ )
buffer[i][j] = new KahanObject(aggop.initialValue, 0);
if( rowVector ) //target is rowvector
{
//note: always sequential, no need to respect cl/cu
if( target.sparse ) //SPARSE target
{
if( !target.sparseBlock.isEmpty(0) )
{
int pos = target.sparseBlock.pos(0);
int len = target.sparseBlock.size(0);
int[] aix = target.sparseBlock.indexes(0);
double[] avals = target.sparseBlock.values(0);
for( int j=pos; j<pos+len; j++ ) //for each nnz
{
int g = (int) groups.quickGetValue(aix[j], 0);
if ( g > numGroups )
continue;
if ( weights != null )
w = weights.quickGetValue(aix[j],0);
aggop.increOp.fn.execute(buffer[g-1][0], avals[j]*w);
}
}
}
else //DENSE target
{
for ( int i=0; i < target.getNumColumns(); i++ ) {
double d = target.denseBlock[ i ];
if( d != 0 ) //sparse-safe
{
int g = (int) groups.quickGetValue(i, 0);
if ( g > numGroups )
continue;
if ( weights != null )
w = weights.quickGetValue(i,0);
// buffer is 0-indexed, whereas range of values for g = [1,numGroups]
aggop.increOp.fn.execute(buffer[g-1][0], d*w);
}
}
}
}
else //column vector or matrix
{
if( target.sparse ) //SPARSE target
{
SparseBlock a = target.sparseBlock;
for( int i=0; i < groups.getNumRows(); i++ )
{
int g = (int) groups.quickGetValue(i, 0);
if ( g > numGroups )
continue;
if( !a.isEmpty(i) )
{
int pos = a.pos(i);
int len = a.size(i);
int[] aix = a.indexes(i);
double[] avals = a.values(i);
int j = (cl==0) ? pos : a.posFIndexGTE(i,cl);
j = (j>=0) ? j : len;
for( ; j<pos+len && aix[j]<cu; j++ ) //for each nnz
{
if ( weights != null )
w = weights.quickGetValue(aix[j],0);
aggop.increOp.fn.execute(buffer[g-1][aix[j]-cl], avals[j]*w);
}
}
}
}
else //DENSE target
{
double[] a = target.denseBlock;
for( int i=0, aix=0; i < groups.getNumRows(); i++, aix+=numCols )
{
int g = (int) groups.quickGetValue(i, 0);
if ( g > numGroups )
continue;
for( int j=cl; j < cu; j++ ) {
double d = a[ aix+j ];
if( d != 0 ) { //sparse-safe
if ( weights != null )
w = weights.quickGetValue(i,0);
// buffer is 0-indexed, whereas range of values for g = [1,numGroups]
aggop.increOp.fn.execute(buffer[g-1][j-cl], d*w);
}
}
}
}
}
// extract the results from group buffers
for( int i=0; i < numGroups; i++ )
for( int j=0; j < numCols2; j++ )
result.appendValue(i, j+cl, buffer[i][j]._sum);
}
private static void groupedAggregateCM( MatrixBlock groups, MatrixBlock target, MatrixBlock weights, MatrixBlock result, int numGroups, CMOperator cmOp, int cl, int cu )
throws DMLRuntimeException
{
CM cmFn = CM.getCMFnObject(((CMOperator) cmOp).getAggOpType());
double w = 1; //default weight
//init group buffers
int numCols2 = cu-cl;
CM_COV_Object[][] cmValues = new CM_COV_Object[numGroups][numCols2];
for ( int i=0; i < numGroups; i++ )
for( int j=0; j < numCols2; j++ )
cmValues[i][j] = new CM_COV_Object();
//column vector or matrix
if( target.sparse ) //SPARSE target
{
SparseBlock a = target.sparseBlock;
for( int i=0; i < groups.getNumRows(); i++ )
{
int g = (int) groups.quickGetValue(i, 0);
if ( g > numGroups )
continue;
if( !a.isEmpty(i) )
{
int pos = a.pos(i);
int len = a.size(i);
int[] aix = a.indexes(i);
double[] avals = a.values(i);
int j = (cl==0) ? pos : a.posFIndexGTE(i,cl);
j = (j>=0) ? j : pos+len;
for( ; j<pos+len && aix[j]<cu; j++ ) //for each nnz
{
if ( weights != null )
w = weights.quickGetValue(i, 0);
cmFn.execute(cmValues[g-1][aix[j]-cl], avals[j], w);
}
//TODO sparse unsafe correction
}
}
}
else //DENSE target
{
double[] a = target.denseBlock;
for( int i=0, aix=0; i < groups.getNumRows(); i++, aix+=target.clen )
{
int g = (int) groups.quickGetValue(i, 0);
if ( g > numGroups )
continue;
for( int j=cl; j<cu; j++ ) {
double d = a[ aix+j ]; //sparse unsafe
if ( weights != null )
w = weights.quickGetValue(i,0);
// buffer is 0-indexed, whereas range of values for g = [1,numGroups]
cmFn.execute(cmValues[g-1][j-cl], d, w);
}
}
}
// extract the required value from each CM_COV_Object
for( int i=0; i < numGroups; i++ )
for( int j=0; j < numCols2; j++ ) {
// result is 0-indexed, so is cmValues
result.appendValue(i, j, cmValues[i][j+cl].getRequiredResult(cmOp));
}
}
private static void groupedAggregateVecCount( MatrixBlock groups, MatrixBlock result, int numGroups )
throws DMLRuntimeException
{
//note: groups are always dense because 0 invalid
if( groups.isInSparseFormat() || groups.isEmptyBlock(false) )
throw new DMLRuntimeException("Unsupported sparse input for aggregate-count on group vector.");
double[] a = groups.denseBlock;
int[] tmp = new int[numGroups];
int m = groups.rlen;
//compute counts
for( int i = 0; i < m; i++ ) {
int g = (int) a[i];
if ( g > numGroups )
continue;
tmp[g-1]++;
}
//copy counts into result
for( int i=0; i<numGroups; i++ ) {
result.appendValue(i, 0, tmp[i]);
}
}
private static void aggregateBinaryMatrixAllDense(MatrixBlock in, MatrixBlock aggVal, MatrixBlock aggCorr)
throws DMLRuntimeException
{
if( in.denseBlock==null || in.isEmptyBlock(false) )
return;
//allocate output arrays (if required)
aggVal.allocateDenseBlock(); //should always stay in dense
aggCorr.allocateDenseBlock(); //should always stay in dense
double[] a = in.getDenseBlock();
double[] c = aggVal.getDenseBlock();
double[] cc = aggCorr.getDenseBlock();
KahanObject buffer1 = new KahanObject(0, 0);
KahanPlus akplus = KahanPlus.getKahanPlusFnObject();
final int len = Math.min(a.length, in.rlen*in.clen);
int nnzC = 0;
int nnzCC = 0;
for( int i=0; i<len; i++ )
{
buffer1._sum = c[i];
buffer1._correction = cc[i];
akplus.execute2(buffer1, a[i]);
c[i] = buffer1._sum;
cc[i] = buffer1._correction;
nnzC += (buffer1._sum!=0)?1:0;
nnzCC += (buffer1._correction!=0)?1:0;
}
aggVal.nonZeros = nnzC;
aggCorr.nonZeros = nnzCC;
}
private static void aggregateBinaryMatrixSparseDense(MatrixBlock in, MatrixBlock aggVal, MatrixBlock aggCorr)
throws DMLRuntimeException
{
if( in.isEmptyBlock(false) )
return;
//allocate output arrays (if required)
aggVal.allocateDenseBlock(); //should always stay in dense
aggCorr.allocateDenseBlock(); //should always stay in dense
SparseBlock a = in.getSparseBlock();
double[] c = aggVal.getDenseBlock();
double[] cc = aggCorr.getDenseBlock();
KahanObject buffer1 = new KahanObject(0, 0);
KahanPlus akplus = KahanPlus.getKahanPlusFnObject();
final int m = in.rlen;
final int n = in.clen;
final int rlen = Math.min(a.numRows(), m);
for( int i=0, cix=0; i<rlen; i++, cix+=n )
{
if( !a.isEmpty(i) )
{
int apos = a.pos(i);
int alen = a.size(i);
int[] aix = a.indexes(i);
double[] avals = a.values(i);
for( int j=apos; j<apos+alen; j++ )
{
int ix = cix+aix[j];
buffer1._sum = c[ix];
buffer1._correction = cc[ix];
akplus.execute2(buffer1, avals[j]);
c[ix] = buffer1._sum;
cc[ix] = buffer1._correction;
}
}
}
aggVal.recomputeNonZeros();
aggCorr.recomputeNonZeros();
}
private static void aggregateBinaryMatrixSparseGeneric(MatrixBlock in, MatrixBlock aggVal, MatrixBlock aggCorr)
throws DMLRuntimeException
{
if( in.isEmptyBlock(false) )
return;
SparseBlock a = in.getSparseBlock();
KahanObject buffer1 = new KahanObject(0, 0);
KahanPlus akplus = KahanPlus.getKahanPlusFnObject();
final int m = in.rlen;
final int rlen = Math.min(a.numRows(), m);
for( int i=0; i<rlen; i++ )
{
if( !a.isEmpty(i) )
{
int apos = a.pos(i);
int alen = a.size(i);
int[] aix = a.indexes(i);
double[] avals = a.values(i);
for( int j=apos; j<apos+alen; j++ )
{
int jix = aix[j];
buffer1._sum = aggVal.quickGetValue(i, jix);
buffer1._correction = aggCorr.quickGetValue(i, jix);
akplus.execute2(buffer1, avals[j]);
aggVal.quickSetValue(i, jix, buffer1._sum);
aggCorr.quickSetValue(i, jix, buffer1._correction);
}
}
}
//note: nnz of aggVal/aggCorr maintained internally
aggVal.examSparsity();
aggCorr.examSparsity();
}
private static void aggregateBinaryMatrixDenseGeneric(MatrixBlock in, MatrixBlock aggVal, MatrixBlock aggCorr)
throws DMLRuntimeException
{
if( in.denseBlock==null || in.isEmptyBlock(false) )
return;
final int m = in.rlen;
final int n = in.clen;
double[] a = in.getDenseBlock();
KahanObject buffer = new KahanObject(0, 0);
KahanPlus akplus = KahanPlus.getKahanPlusFnObject();
//incl implicit nnz maintenance
for(int i=0, ix=0; i<m; i++)
for(int j=0; j<n; j++, ix++)
{
buffer._sum = aggVal.quickGetValue(i, j);
buffer._correction = aggCorr.quickGetValue(i, j);
akplus.execute(buffer, a[ix]);
aggVal.quickSetValue(i, j, buffer._sum);
aggCorr.quickSetValue(i, j, buffer._correction);
}
//note: nnz of aggVal/aggCorr maintained internally
aggVal.examSparsity();
aggCorr.examSparsity();
}
private static void aggregateBinaryMatrixLastRowDenseGeneric(MatrixBlock in, MatrixBlock aggVal)
throws DMLRuntimeException
{
if( in.denseBlock==null || in.isEmptyBlock(false) )
return;
final int m = in.rlen;
final int n = in.clen;
final int cix = (m-1)*n;
double[] a = in.getDenseBlock();
KahanObject buffer = new KahanObject(0, 0);
KahanPlus akplus = KahanPlus.getKahanPlusFnObject();
//incl implicit nnz maintenance
for(int i=0, ix=0; i<m-1; i++)
for(int j=0; j<n; j++, ix++)
{
buffer._sum = aggVal.quickGetValue(i, j);
buffer._correction = aggVal.quickGetValue(m-1, j);
akplus.execute(buffer, a[ix], a[cix+j]);
aggVal.quickSetValue(i, j, buffer._sum);
aggVal.quickSetValue(m-1, j, buffer._correction);
}
//note: nnz of aggVal maintained internally
aggVal.examSparsity();
}
private static void aggregateBinaryMatrixLastRowSparseGeneric(MatrixBlock in, MatrixBlock aggVal)
throws DMLRuntimeException
{
//sparse-safe operation
if( in.isEmptyBlock(false) )
return;
SparseBlock a = in.getSparseBlock();
KahanObject buffer1 = new KahanObject(0, 0);
KahanPlus akplus = KahanPlus.getKahanPlusFnObject();
final int m = in.rlen;
final int rlen = Math.min(a.numRows(), m);
for( int i=0; i<rlen-1; i++ )
{
if( !a.isEmpty(i) )
{
int apos = a.pos(i);
int alen = a.size(i);
int[] aix = a.indexes(i);
double[] avals = a.values(i);
for( int j=apos; j<apos+alen; j++ )
{
int jix = aix[j];
double corr = in.quickGetValue(m-1, jix);
buffer1._sum = aggVal.quickGetValue(i, jix);
buffer1._correction = aggVal.quickGetValue(m-1, jix);
akplus.execute(buffer1, avals[j], corr);
aggVal.quickSetValue(i, jix, buffer1._sum);
aggVal.quickSetValue(m-1, jix, buffer1._correction);
}
}
}
//note: nnz of aggVal/aggCorr maintained internally
aggVal.examSparsity();
}
private static void aggregateBinaryMatrixLastColDenseGeneric(MatrixBlock in, MatrixBlock aggVal)
throws DMLRuntimeException
{
if( in.denseBlock==null || in.isEmptyBlock(false) )
return;
final int m = in.rlen;
final int n = in.clen;
double[] a = in.getDenseBlock();
KahanObject buffer = new KahanObject(0, 0);
KahanPlus akplus = KahanPlus.getKahanPlusFnObject();
//incl implicit nnz maintenance
for(int i=0, ix=0; i<m; i++, ix+=n)
for(int j=0; j<n-1; j++)
{
buffer._sum = aggVal.quickGetValue(i, j);
buffer._correction = aggVal.quickGetValue(i, n-1);
akplus.execute(buffer, a[ix+j], a[ix+j+1]);
aggVal.quickSetValue(i, j, buffer._sum);
aggVal.quickSetValue(i, n-1, buffer._correction);
}
//note: nnz of aggVal maintained internally
aggVal.examSparsity();
}
private static void aggregateBinaryMatrixLastColSparseGeneric(MatrixBlock in, MatrixBlock aggVal)
throws DMLRuntimeException
{
//sparse-safe operation
if( in.isEmptyBlock(false) )
return;
SparseBlock a = in.getSparseBlock();
KahanObject buffer1 = new KahanObject(0, 0);
KahanPlus akplus = KahanPlus.getKahanPlusFnObject();
final int m = in.rlen;
final int n = in.clen;
final int rlen = Math.min(a.numRows(), m);
for( int i=0; i<rlen; i++ )
{
if( !a.isEmpty(i) )
{
int apos = a.pos(i);
int alen = a.size(i);
int[] aix = a.indexes(i);
double[] avals = a.values(i);
for( int j=apos; j<apos+alen && aix[j]<n-1; j++ )
{
int jix = aix[j];
double corr = in.quickGetValue(i, n-1);
buffer1._sum = aggVal.quickGetValue(i, jix);
buffer1._correction = aggVal.quickGetValue(i, n-1);
akplus.execute(buffer1, avals[j], corr);
aggVal.quickSetValue(i, jix, buffer1._sum);
aggVal.quickSetValue(i, n-1, buffer1._correction);
}
}
}
//note: nnz of aggVal/aggCorr maintained internally
aggVal.examSparsity();
}
private static void aggregateUnaryMatrixDense(MatrixBlock in, MatrixBlock out, AggType optype, ValueFunction vFn, IndexFunction ixFn, int rl, int ru)
throws DMLRuntimeException
{
final int m = in.rlen;
final int n = in.clen;
double[] a = in.getDenseBlock();
double[] c = out.getDenseBlock();
switch( optype )
{
case KAHAN_SUM: //SUM/TRACE via k+,
{
KahanObject kbuff = new KahanObject(0, 0);
if( ixFn instanceof ReduceAll ) // SUM
d_uakp(a, c, m, n, kbuff, (KahanPlus)vFn, rl, ru);
else if( ixFn instanceof ReduceCol ) //ROWSUM
d_uarkp(a, c, m, n, kbuff, (KahanPlus)vFn, rl, ru);
else if( ixFn instanceof ReduceRow ) //COLSUM
d_uackp(a, c, m, n, kbuff, (KahanPlus)vFn, rl, ru);
else if( ixFn instanceof ReduceDiag ) //TRACE
d_uakptrace(a, c, m, n, kbuff, (KahanPlus)vFn, rl, ru);
break;
}
case KAHAN_SUM_SQ: //SUM_SQ via k+,
{
KahanObject kbuff = new KahanObject(0, 0);
if( ixFn instanceof ReduceAll ) //SUM_SQ
d_uasqkp(a, c, m, n, kbuff, (KahanPlusSq)vFn, rl, ru);
else if( ixFn instanceof ReduceCol ) //ROWSUM_SQ
d_uarsqkp(a, c, m, n, kbuff, (KahanPlusSq)vFn, rl, ru);
else if( ixFn instanceof ReduceRow ) //COLSUM_SQ
d_uacsqkp(a, c, m, n, kbuff, (KahanPlusSq)vFn, rl, ru);
break;
}
case CUM_KAHAN_SUM: //CUMSUM
{
KahanObject kbuff = new KahanObject(0, 0);
KahanPlus kplus = KahanPlus.getKahanPlusFnObject();
d_ucumkp(a, null, c, m, n, kbuff, kplus, rl, ru);
break;
}
case CUM_PROD: //CUMPROD
{
d_ucumm(a, null, c, m, n, rl, ru);
break;
}
case CUM_MIN:
case CUM_MAX:
{
double init = Double.MAX_VALUE * ((optype==AggType.CUM_MAX)?-1:1);
d_ucummxx(a, null, c, m, n, init, (Builtin)vFn, rl, ru);
break;
}
case MIN:
case MAX: //MAX/MIN
{
double init = Double.MAX_VALUE * ((optype==AggType.MAX)?-1:1);
if( ixFn instanceof ReduceAll ) // MIN/MAX
d_uamxx(a, c, m, n, init, (Builtin)vFn, rl, ru);
else if( ixFn instanceof ReduceCol ) //ROWMIN/ROWMAX
d_uarmxx(a, c, m, n, init, (Builtin)vFn, rl, ru);
else if( ixFn instanceof ReduceRow ) //COLMIN/COLMAX
d_uacmxx(a, c, m, n, init, (Builtin)vFn, rl, ru);
break;
}
case MAX_INDEX:
{
double init = -Double.MAX_VALUE;
if( ixFn instanceof ReduceCol ) //ROWINDEXMAX
d_uarimxx(a, c, m, n, init, (Builtin)vFn, rl, ru);
break;
}
case MIN_INDEX:
{
double init = Double.MAX_VALUE;
if( ixFn instanceof ReduceCol ) //ROWINDEXMIN
d_uarimin(a, c, m, n, init, (Builtin)vFn, rl, ru);
break;
}
case MEAN: //MEAN
{
KahanObject kbuff = new KahanObject(0, 0);
if( ixFn instanceof ReduceAll ) // MEAN
d_uamean(a, c, m, n, kbuff, (Mean)vFn, rl, ru);
else if( ixFn instanceof ReduceCol ) //ROWMEAN
d_uarmean(a, c, m, n, kbuff, (Mean)vFn, rl, ru);
else if( ixFn instanceof ReduceRow ) //COLMEAN
d_uacmean(a, c, m, n, kbuff, (Mean)vFn, rl, ru);
break;
}
case VAR: //VAR
{
CM_COV_Object cbuff = new CM_COV_Object();
if( ixFn instanceof ReduceAll ) //VAR
d_uavar(a, c, m, n, cbuff, (CM)vFn, rl, ru);
else if( ixFn instanceof ReduceCol ) //ROWVAR
d_uarvar(a, c, m, n, cbuff, (CM)vFn, rl, ru);
else if( ixFn instanceof ReduceRow ) //COLVAR
d_uacvar(a, c, m, n, cbuff, (CM)vFn, rl, ru);
break;
}
case PROD: //PROD
{
if( ixFn instanceof ReduceAll ) // PROD
d_uam(a, c, m, n, rl, ru );
break;
}
default:
throw new DMLRuntimeException("Unsupported aggregation type: "+optype);
}
}
private static void aggregateUnaryMatrixSparse(MatrixBlock in, MatrixBlock out, AggType optype, ValueFunction vFn, IndexFunction ixFn, int rl, int ru)
throws DMLRuntimeException
{
final int m = in.rlen;
final int n = in.clen;
SparseBlock a = in.getSparseBlock();
double[] c = out.getDenseBlock();
switch( optype )
{
case KAHAN_SUM: //SUM via k+
{
KahanObject kbuff = new KahanObject(0, 0);
if( ixFn instanceof ReduceAll ) // SUM
s_uakp(a, c, m, n, kbuff, (KahanPlus)vFn, rl, ru);
else if( ixFn instanceof ReduceCol ) //ROWSUM
s_uarkp(a, c, m, n, kbuff, (KahanPlus)vFn, rl, ru);
else if( ixFn instanceof ReduceRow ) //COLSUM
s_uackp(a, c, m, n, kbuff, (KahanPlus)vFn, rl, ru);
else if( ixFn instanceof ReduceDiag ) //TRACE
s_uakptrace(a, c, m, n, kbuff, (KahanPlus)vFn, rl, ru);
break;
}
case KAHAN_SUM_SQ: //SUM_SQ via k+
{
KahanObject kbuff = new KahanObject(0, 0);
if( ixFn instanceof ReduceAll ) //SUM_SQ
s_uasqkp(a, c, m, n, kbuff, (KahanPlusSq)vFn, rl, ru);
else if( ixFn instanceof ReduceCol ) //ROWSUM_SQ
s_uarsqkp(a, c, m, n, kbuff, (KahanPlusSq)vFn, rl, ru);
else if( ixFn instanceof ReduceRow ) //COLSUM_SQ
s_uacsqkp(a, c, m, n, kbuff, (KahanPlusSq)vFn, rl, ru);
break;
}
case CUM_KAHAN_SUM: //CUMSUM
{
KahanObject kbuff = new KahanObject(0, 0);
KahanPlus kplus = KahanPlus.getKahanPlusFnObject();
s_ucumkp(a, null, c, m, n, kbuff, kplus, rl, ru);
break;
}
case CUM_PROD: //CUMPROD
{
s_ucumm(a, null, c, m, n, rl, ru);
break;
}
case CUM_MIN:
case CUM_MAX:
{
double init = Double.MAX_VALUE * ((optype==AggType.CUM_MAX)?-1:1);
s_ucummxx(a, null, c, m, n, init, (Builtin)vFn, rl, ru);
break;
}
case MIN:
case MAX: //MAX/MIN
{
double init = Double.MAX_VALUE * ((optype==AggType.MAX)?-1:1);
if( ixFn instanceof ReduceAll ) // MIN/MAX
s_uamxx(a, c, m, n, init, (Builtin)vFn, rl, ru);
else if( ixFn instanceof ReduceCol ) //ROWMIN/ROWMAX
s_uarmxx(a, c, m, n, init, (Builtin)vFn, rl, ru);
else if( ixFn instanceof ReduceRow ) //COLMIN/COLMAX
s_uacmxx(a, c, m, n, init, (Builtin)vFn, rl, ru);
break;
}
case MAX_INDEX:
{
double init = -Double.MAX_VALUE;
if( ixFn instanceof ReduceCol ) //ROWINDEXMAX
s_uarimxx(a, c, m, n, init, (Builtin)vFn, rl, ru);
break;
}
case MIN_INDEX:
{
double init = Double.MAX_VALUE;
if( ixFn instanceof ReduceCol ) //ROWINDEXMAX
s_uarimin(a, c, m, n, init, (Builtin)vFn, rl, ru);
break;
}
case MEAN:
{
KahanObject kbuff = new KahanObject(0, 0);
if( ixFn instanceof ReduceAll ) // MEAN
s_uamean(a, c, m, n, kbuff, (Mean)vFn, rl, ru);
else if( ixFn instanceof ReduceCol ) //ROWMEAN
s_uarmean(a, c, m, n, kbuff, (Mean)vFn, rl, ru);
else if( ixFn instanceof ReduceRow ) //COLMEAN
s_uacmean(a, c, m, n, kbuff, (Mean)vFn, rl, ru);
break;
}
case VAR: //VAR
{
CM_COV_Object cbuff = new CM_COV_Object();
if( ixFn instanceof ReduceAll ) //VAR
s_uavar(a, c, m, n, cbuff, (CM)vFn, rl, ru);
else if( ixFn instanceof ReduceCol ) //ROWVAR
s_uarvar(a, c, m, n, cbuff, (CM)vFn, rl, ru);
else if( ixFn instanceof ReduceRow ) //COLVAR
s_uacvar(a, c, m, n, cbuff, (CM)vFn, rl, ru);
break;
}
case PROD: //PROD
{
if( ixFn instanceof ReduceAll ) // PROD
s_uam(a, c, m, n, rl, ru );
break;
}
default:
throw new DMLRuntimeException("Unsupported aggregation type: "+optype);
}
}
private static void cumaggregateUnaryMatrixDense(MatrixBlock in, MatrixBlock out, AggType optype, ValueFunction vFn, double[] agg, int rl, int ru)
throws DMLRuntimeException
{
final int m = in.rlen;
final int n = in.clen;
double[] a = in.getDenseBlock();
double[] c = out.getDenseBlock();
switch( optype )
{
case CUM_KAHAN_SUM: //CUMSUM
{
KahanObject kbuff = new KahanObject(0, 0);
KahanPlus kplus = KahanPlus.getKahanPlusFnObject();
d_ucumkp(a, agg, c, m, n, kbuff, kplus, rl, ru);
break;
}
case CUM_PROD: //CUMPROD
{
d_ucumm(a, agg, c, m, n, rl, ru);
break;
}
case CUM_MIN:
case CUM_MAX:
{
double init = Double.MAX_VALUE * ((optype==AggType.CUM_MAX)?-1:1);
d_ucummxx(a, agg, c, m, n, init, (Builtin)vFn, rl, ru);
break;
}
default:
throw new DMLRuntimeException("Unsupported cumulative aggregation type: "+optype);
}
}
private static void cumaggregateUnaryMatrixSparse(MatrixBlock in, MatrixBlock out, AggType optype, ValueFunction vFn, double[] agg, int rl, int ru)
throws DMLRuntimeException
{
final int m = in.rlen;
final int n = in.clen;
SparseBlock a = in.getSparseBlock();
double[] c = out.getDenseBlock();
switch( optype )
{
case CUM_KAHAN_SUM: //CUMSUM
{
KahanObject kbuff = new KahanObject(0, 0);
KahanPlus kplus = KahanPlus.getKahanPlusFnObject();
s_ucumkp(a, agg, c, m, n, kbuff, kplus, rl, ru);
break;
}
case CUM_PROD: //CUMPROD
{
s_ucumm(a, agg, c, m, n, rl, ru);
break;
}
case CUM_MIN:
case CUM_MAX:
{
double init = Double.MAX_VALUE * ((optype==AggType.CUM_MAX)?-1:1);
s_ucummxx(a, agg, c, m, n, init, (Builtin)vFn, rl, ru);
break;
}
default:
throw new DMLRuntimeException("Unsupported cumulative aggregation type: "+optype);
}
}
private static MatrixBlock aggregateUnaryMatrixEmpty(MatrixBlock in, MatrixBlock out, AggType optype, IndexFunction ixFn)
throws DMLRuntimeException
{
//do nothing for pseudo sparse-safe operations
if(optype==AggType.KAHAN_SUM || optype==AggType.KAHAN_SUM_SQ
|| optype==AggType.MIN || optype==AggType.MAX || optype==AggType.PROD
|| optype == AggType.CUM_KAHAN_SUM || optype == AggType.CUM_PROD
|| optype == AggType.CUM_MIN || optype == AggType.CUM_MAX)
{
return out;
}
//compute result based on meta data only
switch( optype )
{
case MAX_INDEX:
{
if( ixFn instanceof ReduceCol ) { //ROWINDEXMAX
for(int i=0; i<out.rlen; i++) {
out.quickSetValue(i, 0, in.clen); //maxindex
}
}
break;
}
case MIN_INDEX:
{
if( ixFn instanceof ReduceCol ) //ROWINDEXMIN
for(int i=0; i<out.rlen; i++) {
out.quickSetValue(i, 0, in.clen); //minindex
}
break;
}
case MEAN:
{
if( ixFn instanceof ReduceAll ) // MEAN
out.quickSetValue(0, 1, in.rlen*in.clen); //count
else if( ixFn instanceof ReduceCol ) //ROWMEAN
for( int i=0; i<in.rlen; i++ ) //0-sum and 0-correction
out.quickSetValue(i, 1, in.clen); //count
else if( ixFn instanceof ReduceRow ) //COLMEAN
for( int j=0; j<in.clen; j++ ) //0-sum and 0-correction
out.quickSetValue(1, j, in.rlen); //count
break;
}
case VAR:
{
// results: { var | mean, count, m2 correction, mean correction }
if( ixFn instanceof ReduceAll ) //VAR
out.quickSetValue(0, 2, in.rlen*in.clen); //count
else if( ixFn instanceof ReduceCol ) //ROWVAR
for( int i=0; i<in.rlen; i++ )
out.quickSetValue(i, 2, in.clen); //count
else if( ixFn instanceof ReduceRow ) //COLVAR
for( int j=0; j<in.clen; j++ )
out.quickSetValue(2, j, in.rlen); //count
break;
}
default:
throw new DMLRuntimeException("Unsupported aggregation type: "+optype);
}
return out;
}
////////////////////////////////////////////
// core aggregation functions //
////////////////////////////////////////////
/**
* SUM, opcode: uak+, dense input.
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param kbuff ?
* @param kplus ?
* @param rl row lower index
* @param ru row upper index
*/
private static void d_uakp( double[] a, double[] c, int m, int n, KahanObject kbuff, KahanPlus kplus, int rl, int ru )
{
int len = Math.min((ru-rl)*n, a.length);
sum( a, rl*n, len, kbuff, kplus );
c[0] = kbuff._sum;
c[1] = kbuff._correction;
}
/**
* ROWSUM, opcode: uark+, dense input.
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param kbuff ?
* @param kplus ?
* @param rl row lower index
* @param ru row upper index
*/
private static void d_uarkp( double[] a, double[] c, int m, int n, KahanObject kbuff, KahanPlus kplus, int rl, int ru )
{
for( int i=rl, aix=rl*n, cix=rl*2; i<ru; i++, aix+=n, cix+=2 )
{
kbuff.set(0, 0); //reset buffer
sum( a, aix, n, kbuff, kplus );
c[cix+0] = kbuff._sum;
c[cix+1] = kbuff._correction;
}
}
/**
* COLSUM, opcode: uack+, dense input.
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param kbuff ?
* @param kplus ?
* @param rl row lower index
* @param ru row upper index
*/
private static void d_uackp( double[] a, double[] c, int m, int n, KahanObject kbuff, KahanPlus kplus, int rl, int ru )
{
for( int i=rl, aix=rl*n; i<ru; i++, aix+=n )
sumAgg( a, c, aix, 0, n, kbuff, kplus );
}
/**
* SUM_SQ, opcode: uasqk+, dense input.
*
* @param a Array of values to square & sum.
* @param c Output array to store sum and correction factor.
* @param m Number of rows.
* @param n Number of values per row.
* @param kbuff A KahanObject to hold the current sum and
* correction factor for the Kahan summation
* algorithm.
* @param kplusSq A KahanPlusSq object to perform summation of
* squared values.
* @param rl Lower row limit.
* @param ru Upper row limit.
*/
private static void d_uasqkp(double[] a, double[] c, int m, int n, KahanObject kbuff,
KahanPlusSq kplusSq, int rl, int ru)
{
int len = Math.min((ru-rl)*n, a.length);
sumSq(a, rl*n, len, kbuff, kplusSq);
c[0] = kbuff._sum;
c[1] = kbuff._correction;
}
/**
* ROWSUM_SQ, opcode: uarsqk+, dense input.
*
* @param a Array of values to square & sum row-wise.
* @param c Output array to store sum and correction factor
* for each row.
* @param m Number of rows.
* @param n Number of values per row.
* @param kbuff A KahanObject to hold the current sum and
* correction factor for the Kahan summation
* algorithm.
* @param kplusSq A KahanPlusSq object to perform summation of
* squared values.
* @param rl Lower row limit.
* @param ru Upper row limit.
*/
private static void d_uarsqkp(double[] a, double[] c, int m, int n, KahanObject kbuff,
KahanPlusSq kplusSq, int rl, int ru)
{
for (int i=rl, aix=rl*n, cix=rl*2; i<ru; i++, aix+=n, cix+=2) {
kbuff.set(0, 0); //reset buffer
sumSq(a, aix, n, kbuff, kplusSq);
c[cix+0] = kbuff._sum;
c[cix+1] = kbuff._correction;
}
}
/**
* COLSUM_SQ, opcode: uacsqk+, dense input.
*
* @param a Array of values to square & sum column-wise.
* @param c Output array to store sum and correction factor
* for each column.
* @param m Number of rows.
* @param n Number of values per row.
* @param kbuff A KahanObject to hold the current sum and
* correction factor for the Kahan summation
* algorithm.
* @param kplusSq A KahanPlusSq object to perform summation of
* squared values.
* @param rl Lower row limit.
* @param ru Upper row limit.
*/
private static void d_uacsqkp(double[] a, double[] c, int m, int n, KahanObject kbuff,
KahanPlusSq kplusSq, int rl, int ru)
{
for (int i=rl, aix=rl*n; i<ru; i++, aix+=n)
sumSqAgg(a, c, aix, 0, n, kbuff, kplusSq);
}
/**
* CUMSUM, opcode: ucumk+, dense input.
*
* @param a ?
* @param agg ?
* @param c ?
* @param m ?
* @param n ?
* @param kbuff ?
* @param kplus ?
* @param rl row lower index
* @param ru row upper index
*/
private static void d_ucumkp( double[] a, double[] agg, double[] c, int m, int n, KahanObject kbuff, KahanPlus kplus, int rl, int ru )
{
//init current row sum/correction arrays w/ neutral 0
double[] csums = new double[ 2*n ];
if( agg != null )
System.arraycopy(agg, 0, csums, 0, n);
//scan once and compute prefix sums
for( int i=rl, aix=rl*n; i<ru; i++, aix+=n ) {
sumAgg( a, csums, aix, 0, n, kbuff, kplus );
System.arraycopy(csums, 0, c, aix, n);
}
}
/**
* CUMPROD, opcode: ucum*, dense input.
*
* @param a ?
* @param agg ?
* @param c ?
* @param m ?
* @param n ?
* @param rl row lower index
* @param ru row upper index
*/
private static void d_ucumm( double[] a, double[] agg, double[] c, int m, int n, int rl, int ru )
{
//init current row product array w/ neutral 1
double[] cprods = (agg!=null) ? agg : new double[ n ];
if( agg == null )
Arrays.fill(cprods, 1);
//scan once and compute prefix products
for( int i=rl, aix=rl*n; i<ru; i++, aix+=n ) {
productAgg( a, cprods, aix, 0, n );
System.arraycopy(cprods, 0, c, aix, n);
}
}
/**
* CUMMIN/CUMMAX, opcode: ucummin/ucummax, dense input.
*
* @param a ?
* @param agg ?
* @param c ?
* @param m ?
* @param n ?
* @param init ?
* @param builtin ?
* @param rl row lower index
* @param ru row upper index
*/
private static void d_ucummxx( double[] a, double[] agg, double[] c, int m, int n, double init, Builtin builtin, int rl, int ru )
{
//init current row min/max array w/ extreme value
double[] cmxx = (agg!=null) ? agg : new double[ n ];
if( agg == null )
Arrays.fill(cmxx, init);
//scan once and compute prefix min/max
for( int i=rl, aix=rl*n; i<ru; i++, aix+=n ) {
builtinAgg( a, cmxx, aix, n, builtin );
System.arraycopy(cmxx, 0, c, aix, n);
}
}
/**
* TRACE, opcode: uaktrace
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param kbuff ?
* @param kplus ?
* @param rl ?
* @param ru ?
*/
private static void d_uakptrace( double[] a, double[] c, int m, int n, KahanObject kbuff, KahanPlus kplus, int rl, int ru )
{
//aggregate diag (via ix=n+1)
for( int i=rl, aix=rl*n+rl; i<ru; i++, aix+=(n+1) )
kplus.execute2(kbuff, a[ aix ]);
c[0] = kbuff._sum;
c[1] = kbuff._correction;
}
/**
* MIN/MAX, opcode: uamin/uamax, dense input.
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param init ?
* @param builtin ?
* @param rl row lower index
* @param ru row upper index
*/
private static void d_uamxx( double[] a, double[] c, int m, int n, double init, Builtin builtin, int rl, int ru )
{
int len = Math.min((ru-rl)*n, a.length);
c[0] = builtin(a, rl*n, init, len, builtin);
}
/**
* ROWMIN/ROWMAX, opcode: uarmin/uarmax, dense input.
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param init ?
* @param builtin ?
* @param rl row lower index
* @param ru row upper index
*/
private static void d_uarmxx( double[] a, double[] c, int m, int n, double init, Builtin builtin, int rl, int ru )
{
for( int i=rl, aix=rl*n; i<ru; i++, aix+=n )
c[i] = builtin(a, aix, init, n, builtin);
}
/**
* COLMIN/COLMAX, opcode: uacmin/uacmax, dense input.
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param init ?
* @param builtin ?
* @param rl row lower index
* @param ru row upper index
*/
private static void d_uacmxx( double[] a, double[] c, int m, int n, double init, Builtin builtin, int rl, int ru )
{
//init output (base for incremental agg)
Arrays.fill(c, init);
//execute builtin aggregate
for( int i=rl, aix=rl*n; i<ru; i++, aix+=n )
builtinAgg( a, c, aix, n, builtin );
}
/**
* ROWINDEXMAX, opcode: uarimax, dense input.
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param init ?
* @param builtin ?
* @param rl row lower index
* @param ru row upper index
*/
private static void d_uarimxx( double[] a, double[] c, int m, int n, double init, Builtin builtin, int rl, int ru )
{
for( int i=rl, aix=rl*n, cix=rl*2; i<ru; i++, aix+=n, cix+=2 )
{
int maxindex = indexmax(a, aix, init, n, builtin);
c[cix+0] = (double)maxindex + 1;
c[cix+1] = a[aix+maxindex]; //max value
}
}
/**
* ROWINDEXMIN, opcode: uarimin, dense input.
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param init ?
* @param builtin ?
* @param rl row lower index
* @param ru row upper index
*/
private static void d_uarimin( double[] a, double[] c, int m, int n, double init, Builtin builtin, int rl, int ru )
{
for( int i=rl, aix=rl*n, cix=rl*2; i<ru; i++, aix+=n, cix+=2 )
{
int minindex = indexmin(a, aix, init, n, builtin);
c[cix+0] = (double)minindex + 1;
c[cix+1] = a[aix+minindex]; //min value
}
}
/**
* MEAN, opcode: uamean, dense input.
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param kbuff ?
* @param kmean ?
* @param rl row lower index
* @param ru row upper index
*/
private static void d_uamean( double[] a, double[] c, int m, int n, KahanObject kbuff, Mean kmean, int rl, int ru )
{
int len = Math.min((ru-rl)*n, a.length);
mean(a, rl*n, len, 0, kbuff, kmean);
c[0] = kbuff._sum;
c[1] = len;
c[2] = kbuff._correction;
}
/**
* ROWMEAN, opcode: uarmean, dense input.
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param kbuff ?
* @param kmean ?
* @param rl ?
* @param ru ?
*/
private static void d_uarmean( double[] a, double[] c, int m, int n, KahanObject kbuff, Mean kmean, int rl, int ru )
{
for( int i=rl, aix=rl*n, cix=rl*3; i<ru; i++, aix+=n, cix+=3 )
{
kbuff.set(0, 0); //reset buffer
mean(a, aix, n, 0, kbuff, kmean);
c[cix+0] = kbuff._sum;
c[cix+1] = n;
c[cix+2] = kbuff._correction;
}
}
/**
* COLMEAN, opcode: uacmean, dense input.
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param kbuff ?
* @param kmean ?
* @param rl row lower index
* @param ru row upper index
*/
private static void d_uacmean( double[] a, double[] c, int m, int n, KahanObject kbuff, Mean kmean, int rl, int ru )
{
//execute builtin aggregate
for( int i=rl, aix=rl*n; i<ru; i++, aix+=n )
meanAgg( a, c, aix, 0, n, kbuff, kmean );
}
/**
* VAR, opcode: uavar, dense input.
*
* @param a Array of values.
* @param c Output array to store variance, mean, count,
* m2 correction factor, and mean correction factor.
* @param m Number of rows.
* @param n Number of values per row.
* @param cbuff A CM_COV_Object to hold various intermediate
* values for the variance calculation.
* @param cm A CM object of type Variance to perform the variance
* calculation.
* @param rl Lower row limit.
* @param ru Upper row limit.
* @throws DMLRuntimeException if DMLRuntimeException occurs
*/
private static void d_uavar(double[] a, double[] c, int m, int n, CM_COV_Object cbuff, CM cm,
int rl, int ru) throws DMLRuntimeException
{
int len = Math.min((ru-rl)*n, a.length);
var(a, rl*n, len, cbuff, cm);
// store results: { var | mean, count, m2 correction, mean correction }
c[0] = cbuff.getRequiredResult(AggregateOperationTypes.VARIANCE);
c[1] = cbuff.mean._sum;
c[2] = cbuff.w;
c[3] = cbuff.m2._correction;
c[4] = cbuff.mean._correction;
}
/**
* ROWVAR, opcode: uarvar, dense input.
*
* @param a Array of values.
* @param c Output array to store variance, mean, count,
* m2 correction factor, and mean correction factor
* for each row.
* @param m Number of rows.
* @param n Number of values per row.
* @param cbuff A CM_COV_Object to hold various intermediate
* values for the variance calculation.
* @param cm A CM object of type Variance to perform the variance
* calculation.
* @param rl Lower row limit.
* @param ru Upper row limit.
* @throws DMLRuntimeException if DMLRuntimeException occurs
*/
private static void d_uarvar(double[] a, double[] c, int m, int n, CM_COV_Object cbuff, CM cm,
int rl, int ru) throws DMLRuntimeException
{
// calculate variance for each row
for (int i=rl, aix=rl*n, cix=rl*5; i<ru; i++, aix+=n, cix+=5) {
cbuff.reset(); // reset buffer for each row
var(a, aix, n, cbuff, cm);
// store row results: { var | mean, count, m2 correction, mean correction }
c[cix] = cbuff.getRequiredResult(AggregateOperationTypes.VARIANCE);
c[cix+1] = cbuff.mean._sum;
c[cix+2] = cbuff.w;
c[cix+3] = cbuff.m2._correction;
c[cix+4] = cbuff.mean._correction;
}
}
/**
* COLVAR, opcode: uacvar, dense input.
*
* @param a Array of values.
* @param c Output array to store variance, mean, count,
* m2 correction factor, and mean correction factor
* for each column.
* @param m Number of rows.
* @param n Number of values per row.
* @param cbuff A CM_COV_Object to hold various intermediate
* values for the variance calculation.
* @param cm A CM object of type Variance to perform the variance
* calculation.
* @param rl Lower row limit.
* @param ru Upper row limit.
* @throws DMLRuntimeException if DMLRuntimeException occurs
*/
private static void d_uacvar(double[] a, double[] c, int m, int n, CM_COV_Object cbuff, CM cm,
int rl, int ru) throws DMLRuntimeException
{
// calculate variance for each column incrementally
for (int i=rl, aix=rl*n; i<ru; i++, aix+=n)
varAgg(a, c, aix, 0, n, cbuff, cm);
}
/**
* PROD, opcode: ua*, dense input.
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param rl row lower index
* @param ru row upper index
*/
private static void d_uam( double[] a, double[] c, int m, int n, int rl, int ru )
{
int len = Math.min((ru-rl)*n, a.length);
c[0] = product( a, rl*n, len );
}
/**
* SUM, opcode: uak+, sparse input.
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param kbuff ?
* @param kplus ?
* @param rl row lower index
* @param ru row upper index
*/
private static void s_uakp( SparseBlock a, double[] c, int m, int n, KahanObject kbuff, KahanPlus kplus, int rl, int ru )
{
if( a.isContiguous() ) {
sum(a.values(rl), a.pos(rl), (int)a.size(rl, ru), kbuff, kplus);
}
else {
for( int i=rl; i<ru; i++ ) {
if( !a.isEmpty(i) )
sum(a.values(i), a.pos(i), a.size(i), kbuff, kplus);
}
}
c[0] = kbuff._sum;
c[1] = kbuff._correction;
}
/**
* ROWSUM, opcode: uark+, sparse input.
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param kbuff ?
* @param kplus ?
* @param rl row lower index
* @param ru row upper index
*/
private static void s_uarkp( SparseBlock a, double[] c, int m, int n, KahanObject kbuff, KahanPlus kplus, int rl, int ru )
{
//compute row aggregates
for( int i=rl, cix=rl*2; i<ru; i++, cix+=2 )
if( !a.isEmpty(i) ) {
kbuff.set(0, 0); //reset buffer
sum( a.values(i), a.pos(i), a.size(i), kbuff, kplus );
c[cix+0] = kbuff._sum;
c[cix+1] = kbuff._correction;
}
}
/**
* COLSUM, opcode: uack+, sparse input.
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param kbuff ?
* @param kplus ?
* @param rl row lower index
* @param ru row upper index
*/
private static void s_uackp( SparseBlock a, double[] c, int m, int n, KahanObject kbuff, KahanPlus kplus, int rl, int ru )
{
//compute column aggregates
if( a.isContiguous() ) {
sumAgg( a.values(rl), c, a.indexes(rl), a.pos(rl), (int)a.size(rl, ru), n, kbuff, kplus );
}
else {
for( int i=rl; i<ru; i++ ) {
if( !a.isEmpty(i) )
sumAgg( a.values(i), c, a.indexes(i), a.pos(i), a.size(i), n, kbuff, kplus );
}
}
}
/**
* SUM_SQ, opcode: uasqk+, sparse input.
*
* @param a Sparse array of values to square & sum.
* @param c Output array to store sum and correction factor.
* @param m Number of rows.
* @param n Number of values per row.
* @param kbuff A KahanObject to hold the current sum and
* correction factor for the Kahan summation
* algorithm.
* @param kplusSq A KahanPlusSq object to perform summation of
* squared values.
* @param rl Lower row limit.
* @param ru Upper row limit.
*/
private static void s_uasqkp(SparseBlock a, double[] c, int m, int n, KahanObject kbuff,
KahanPlusSq kplusSq, int rl, int ru )
{
if( a.isContiguous() ) {
sumSq(a.values(rl), a.pos(rl), (int)a.size(rl, ru), kbuff, kplusSq);
}
else {
for (int i=rl; i<ru; i++) {
if (!a.isEmpty(i))
sumSq(a.values(i), a.pos(i), a.size(i), kbuff, kplusSq);
}
}
c[0] = kbuff._sum;
c[1] = kbuff._correction;
}
/**
* ROWSUM_SQ, opcode: uarsqk+, sparse input.
*
* @param a Sparse array of values to square & sum row-wise.
* @param c Output array to store sum and correction factor
* for each row.
* @param m Number of rows.
* @param n Number of values per row.
* @param kbuff A KahanObject to hold the current sum and
* correction factor for the Kahan summation
* algorithm.
* @param kplusSq A KahanPlusSq object to perform summation of
* squared values.
* @param rl Lower row limit.
* @param ru Upper row limit.
*/
private static void s_uarsqkp(SparseBlock a, double[] c, int m, int n, KahanObject kbuff,
KahanPlusSq kplusSq, int rl, int ru )
{
//compute row aggregates
for (int i=rl, cix=rl*2; i<ru; i++, cix+=2) {
if (!a.isEmpty(i)) {
kbuff.set(0, 0); //reset buffer
sumSq(a.values(i), a.pos(i), a.size(i), kbuff, kplusSq);
c[cix+0] = kbuff._sum;
c[cix+1] = kbuff._correction;
}
}
}
/**
* COLSUM_SQ, opcode: uacsqk+, sparse input.
*
* @param a Sparse array of values to square & sum column-wise.
* @param c Output array to store sum and correction factor
* for each column.
* @param m Number of rows.
* @param n Number of values per row.
* @param kbuff A KahanObject to hold the current sum and
* correction factor for the Kahan summation
* algorithm.
* @param kplusSq A KahanPlusSq object to perform summation of
* squared values.
* @param rl Lower row limit.
* @param ru Upper row limit.
*/
private static void s_uacsqkp(SparseBlock a, double[] c, int m, int n, KahanObject kbuff,
KahanPlusSq kplusSq, int rl, int ru )
{
//compute column aggregates
if( a.isContiguous() ) {
sumSqAgg(a.values(rl), c, a.indexes(rl), a.pos(rl), (int)a.size(rl, ru), n, kbuff, kplusSq);
}
else {
for (int i=rl; i<ru; i++) {
if (!a.isEmpty(i))
sumSqAgg(a.values(i), c, a.indexes(i), a.pos(i), a.size(i), n, kbuff, kplusSq);
}
}
}
/**
* CUMSUM, opcode: ucumk+, sparse input.
*
* @param a ?
* @param agg ?
* @param c ?
* @param m ?
* @param n ?
* @param kbuff ?
* @param kplus ?
* @param rl row lower index
* @param ru row upper index
*/
private static void s_ucumkp( SparseBlock a, double[] agg, double[] c, int m, int n, KahanObject kbuff, KahanPlus kplus, int rl, int ru )
{
//init current row sum/correction arrays w/ neutral 0
double[] csums = new double[ 2*n ];
if( agg != null )
System.arraycopy(agg, 0, csums, 0, n);
//scan once and compute prefix sums
for( int i=rl, ix=rl*n; i<ru; i++, ix+=n ) {
if( !a.isEmpty(i) )
sumAgg( a.values(i), csums, a.indexes(i), a.pos(i), a.size(i), n, kbuff, kplus );
//always copy current sum (not sparse-safe)
System.arraycopy(csums, 0, c, ix, n);
}
}
/**
* CUMPROD, opcode: ucum*, sparse input.
*
* @param a ?
* @param agg ?
* @param c ?
* @param m ?
* @param n ?
* @param rl row lower index
* @param ru row upper index
*/
private static void s_ucumm( SparseBlock a, double[] agg, double[] c, int m, int n, int rl, int ru )
{
//init current row prod arrays w/ neutral 1
double[] cprod = (agg!=null) ? agg : new double[ n ];
if( agg == null )
Arrays.fill(cprod, 1);
//init count arrays (helper, see correction)
int[] cnt = new int[ n ];
//scan once and compute prefix products
for( int i=rl, ix=rl*n; i<ru; i++, ix+=n )
{
//multiply row of non-zero elements
if( !a.isEmpty(i) ) {
int apos = a.pos(i);
int alen = a.size(i);
int[] aix = a.indexes(i);
double[] avals = a.values(i);
productAgg( avals, cprod, aix, apos, 0, alen );
countAgg( avals, cnt, aix, apos, alen );
}
//correction (not sparse-safe and cumulative)
//note: we need to determine if there are only nnz in a column
for( int j=0; j<n; j++ )
if( cnt[j] < i+1 ) //no dense column
cprod[j] *= 0;
//always copy current sum (not sparse-safe)
System.arraycopy(cprod, 0, c, ix, n);
}
}
/**
* CUMMIN/CUMMAX, opcode: ucummin/ucummax, sparse input.
*
* @param a ?
* @param agg ?
* @param c ?
* @param m ?
* @param n ?
* @param init ?
* @param builtin ?
* @param rl row lower index
* @param ru row upper index
*/
private static void s_ucummxx( SparseBlock a, double[] agg, double[] c, int m, int n, double init, Builtin builtin, int rl, int ru )
{
//init current row min/max array w/ extreme value
double[] cmxx = (agg!=null) ? agg : new double[ n ];
if( agg == null )
Arrays.fill(cmxx, init);
//init count arrays (helper, see correction)
int[] cnt = new int[ n ];
//compute column aggregates min/max
for( int i=rl, ix=rl*n; i<ru; i++, ix+=n )
{
if( !a.isEmpty(i) ) {
int apos = a.pos(i);
int alen = a.size(i);
int[] aix = a.indexes(i);
double[] avals = a.values(i);
builtinAgg( avals, cmxx, aix, apos, alen, builtin );
countAgg( avals, cnt, aix, apos, alen );
}
//correction (not sparse-safe and cumulative)
//note: we need to determine if there are only nnz in a column
for( int j=0; j<n; j++ )
if( cnt[j] < i+1 ) //no dense column
cmxx[j] = builtin.execute2(cmxx[j], 0);
//always copy current sum (not sparse-safe)
System.arraycopy(cmxx, 0, c, ix, n);
}
}
/**
* TRACE, opcode: uaktrace, sparse input.
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param kbuff ?
* @param kplus ?
* @param rl row lower index
* @param ru row upper index
*/
private static void s_uakptrace( SparseBlock a, double[] c, int m, int n, KahanObject kbuff, KahanPlus kplus, int rl, int ru )
{
for( int i=rl; i<ru; i++ ) {
if( !a.isEmpty(i) )
kplus.execute2(kbuff, a.get(i,i));
}
c[0] = kbuff._sum;
c[1] = kbuff._correction;
}
/**
* MIN/MAX, opcode: uamin/uamax, sparse input.
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param init ?
* @param builtin ?
* @param rl row lower index
* @param ru row upper index
*/
private static void s_uamxx( SparseBlock a, double[] c, int m, int n, double init, Builtin builtin, int rl, int ru )
{
double ret = init; //keep init val
if( a.isContiguous() ) {
int alen = (int) a.size(rl, ru);
double val = builtin(a.values(rl), a.pos(rl), init, alen, builtin);
ret = builtin.execute2(ret, val);
//correction (not sparse-safe)
ret = (alen<(ru-rl)*n) ? builtin.execute2(ret, 0) : ret;
}
else {
for( int i=rl; i<ru; i++ ) {
if( !a.isEmpty(i) ) {
double lval = builtin(a.values(i), a.pos(i), init, a.size(i), builtin);
ret = builtin.execute2(ret, lval);
}
//correction (not sparse-safe)
if( a.size(i) < n )
ret = builtin.execute2(ret, 0);
}
}
c[0] = ret;
}
/**
* ROWMIN/ROWMAX, opcode: uarmin/uarmax, sparse input.
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param init ?
* @param builtin ?
* @param rl row lower index
* @param ru row upper index
*/
private static void s_uarmxx( SparseBlock a, double[] c, int m, int n, double init, Builtin builtin, int rl, int ru )
{
//init result (for empty rows)
Arrays.fill(c, rl, ru, init); //not sparse-safe
for( int i=rl; i<ru; i++ )
{
if( !a.isEmpty(i) )
c[ i ] = builtin(a.values(i), a.pos(i), init, a.size(i), builtin);
//correction (not sparse-safe)
if( a.size(i) < n )
c[ i ] = builtin.execute2(c[ i ], 0);
}
}
/**
* COLMIN/COLMAX, opcode: uacmin/uacmax, sparse input.
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param init ?
* @param builtin ?
* @param rl row lower index
* @param ru row upper index
*/
private static void s_uacmxx( SparseBlock a, double[] c, int m, int n, double init, Builtin builtin, int rl, int ru )
{
//init output (base for incremental agg)
Arrays.fill(c, init);
//init count arrays (helper, see correction)
int[] cnt = new int[ n ];
//compute column aggregates min/max
if( a.isContiguous() ) {
int alen = (int) a.size(rl, ru);
builtinAgg( a.values(rl), c, a.indexes(rl), a.pos(rl), alen, builtin );
countAgg( a.values(rl), cnt, a.indexes(rl), a.pos(rl), alen );
}
else {
for( int i=rl; i<ru; i++ ) {
if( !a.isEmpty(i) ) {
int apos = a.pos(i);
int alen = a.size(i);
double[] avals = a.values(i);
int[] aix = a.indexes(i);
builtinAgg( avals, c, aix, apos, alen, builtin );
countAgg( avals, cnt, aix, apos, alen );
}
}
}
//correction (not sparse-safe)
//note: we need to determine if there are only nnz in a column
// in order to know if for example a colMax of -8 is true or need
// to be replaced with a 0 because there was a missing nonzero.
for( int i=0; i<n; i++ )
if( cnt[i] < m ) //no dense column
c[i] = builtin.execute2(c[i], 0);
}
/**
* ROWINDEXMAX, opcode: uarimax, sparse input.
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param init ?
* @param builtin ?
* @param rl row lower index
* @param ru row upper index
*/
private static void s_uarimxx( SparseBlock a, double[] c, int m, int n, double init, Builtin builtin, int rl, int ru )
{
for( int i=rl, cix=rl*2; i<ru; i++, cix+=2 )
{
if( !a.isEmpty(i) ) {
int apos = a.pos(i);
int alen = a.size(i);
int[] aix = a.indexes(i);
double[] avals = a.values(i);
int maxindex = indexmax(a.values(i), apos, init, alen, builtin);
c[cix+0] = (double)aix[apos+maxindex] + 1;
c[cix+1] = avals[apos+maxindex]; //max value
//correction (not sparse-safe)
if(alen < n && (builtin.execute2( 0, c[cix+1] ) == 1))
{
int ix = n-1; //find last 0 value
for( int j=alen-1; j>=0; j--, ix-- )
if( aix[j]!=ix )
break;
c[cix+0] = ix + 1; //max index (last)
c[cix+1] = 0; //max value
}
}
else //if( arow==null )
{
//correction (not sparse-safe)
c[cix+0] = n; //max index (last)
c[cix+1] = 0; //max value
}
}
}
/**
* ROWINDEXMIN, opcode: uarimin, sparse input.
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param init ?
* @param builtin ?
* @param rl row lower index
* @param ru row upper index
*/
private static void s_uarimin( SparseBlock a, double[] c, int m, int n, double init, Builtin builtin, int rl, int ru )
{
for( int i=rl, cix=rl*2; i<ru; i++, cix+=2 )
{
if( !a.isEmpty(i) )
{
int apos = a.pos(i);
int alen = a.size(i);
int[] aix = a.indexes(i);
double[] avals = a.values(i);
int minindex = indexmin(avals, apos, init, alen, builtin);
c[cix+0] = (double)aix[apos+minindex] + 1;
c[cix+1] = avals[apos+minindex]; //min value among non-zeros
//correction (not sparse-safe)
if(alen < n && (builtin.execute2( 0, c[cix+1] ) == 1))
{
int ix = n-1; //find last 0 value
for( int j=alen-1; j>=0; j--, ix-- )
if( aix[apos+j]!=ix )
break;
c[cix+0] = ix + 1; //min index (last)
c[cix+1] = 0; //min value
}
}
else //if( arow==null )
{
//correction (not sparse-safe)
c[cix+0] = n; //min index (last)
c[cix+1] = 0; //min value
}
}
}
/**
* MEAN, opcode: uamean, sparse input.
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param kbuff ?
* @param kmean ?
* @param rl row lower index
* @param ru row upper index
*/
private static void s_uamean( SparseBlock a, double[] c, int m, int n, KahanObject kbuff, Mean kmean, int rl, int ru )
{
int len = (ru-rl) * n;
int count = 0;
//correction remaining tuples (not sparse-safe)
//note: before aggregate computation in order to
//exploit 0 sum (noop) and better numerical stability
count += (ru-rl)*n - a.size(rl, ru);
//compute aggregate mean
if( a.isContiguous() ) {
int alen = (int) a.size(rl, ru);
mean(a.values(rl), a.pos(rl), alen, count, kbuff, kmean);
count += alen;
}
else {
for( int i=rl; i<ru; i++ ) {
if( !a.isEmpty(i) ) {
int alen = a.size(i);
mean(a.values(i), a.pos(i), alen, count, kbuff, kmean);
count += alen;
}
}
}
c[0] = kbuff._sum;
c[1] = len;
c[2] = kbuff._correction;
}
/**
* ROWMEAN, opcode: uarmean, sparse input.
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param kbuff ?
* @param kmean ?
* @param rl row lower index
* @param ru row upper index
*/
private static void s_uarmean( SparseBlock a, double[] c, int m, int n, KahanObject kbuff, Mean kmean, int rl, int ru )
{
for( int i=rl, cix=rl*3; i<ru; i++, cix+=3 )
{
//correction remaining tuples (not sparse-safe)
//note: before aggregate computation in order to
//exploit 0 sum (noop) and better numerical stability
int count = (a.isEmpty(i)) ? n : n-a.size(i);
kbuff.set(0, 0); //reset buffer
if( !a.isEmpty(i) ) {
mean(a.values(i), a.pos(i), a.size(i), count, kbuff, kmean);
}
c[cix+0] = kbuff._sum;
c[cix+1] = n;
c[cix+2] = kbuff._correction;
}
}
/**
* COLMEAN, opcode: uacmean, sparse input.
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param kbuff ?
* @param kmean ?
* @param rl row lower index
* @param ru row upper index
*/
private static void s_uacmean( SparseBlock a, double[] c, int m, int n, KahanObject kbuff, Mean kmean, int rl, int ru )
{
//correction remaining tuples (not sparse-safe)
//note: before aggregate computation in order to
//exploit 0 sum (noop) and better numerical stability
Arrays.fill(c, n, n*2, ru-rl);
if( a.isContiguous() ) {
countDisAgg( a.values(rl), c, a.indexes(rl), a.pos(rl), n, (int)a.size(rl, ru) );
}
else {
for( int i=rl; i<ru; i++ ) {
if( !a.isEmpty(i) )
countDisAgg( a.values(i), c, a.indexes(i), a.pos(i), n, a.size(i) );
}
}
//compute column aggregate means
if( a.isContiguous() ) {
meanAgg( a.values(rl), c, a.indexes(rl), a.pos(rl), (int)a.size(rl, ru), n, kbuff, kmean );
}
else {
for( int i=rl; i<ru; i++ ) {
if( !a.isEmpty(i) )
meanAgg( a.values(i), c, a.indexes(i), a.pos(i), a.size(i), n, kbuff, kmean );
}
}
}
/**
* VAR, opcode: uavar, sparse input.
*
* @param a Sparse array of values.
* @param c Output array to store variance, mean, count,
* m2 correction factor, and mean correction factor.
* @param m Number of rows.
* @param n Number of values per row.
* @param cbuff A CM_COV_Object to hold various intermediate
* values for the variance calculation.
* @param cm A CM object of type Variance to perform the variance
* calculation.
* @param rl Lower row limit.
* @param ru Upper row limit.
* @throws DMLRuntimeException if DMLRuntimeException occurs
*/
private static void s_uavar(SparseBlock a, double[] c, int m, int n, CM_COV_Object cbuff, CM cm,
int rl, int ru) throws DMLRuntimeException
{
// compute and store count of empty cells before aggregation
int count = (ru-rl)*n - (int)a.size(rl, ru);
cbuff.w = count;
// calculate aggregated variance (only using non-empty cells)
if( a.isContiguous() ) {
var(a.values(rl), a.pos(rl), (int)a.size(rl, ru), cbuff, cm);
}
else {
for (int i=rl; i<ru; i++) {
if (!a.isEmpty(i))
var(a.values(i), a.pos(i), a.size(i), cbuff, cm);
}
}
// store results: { var | mean, count, m2 correction, mean correction }
c[0] = cbuff.getRequiredResult(AggregateOperationTypes.VARIANCE);
c[1] = cbuff.mean._sum;
c[2] = cbuff.w;
c[3] = cbuff.m2._correction;
c[4] = cbuff.mean._correction;
}
/**
* ROWVAR, opcode: uarvar, sparse input.
*
* @param a Sparse array of values.
* @param c Output array to store variance, mean, count,
* m2 correction factor, and mean correction factor.
* @param m Number of rows.
* @param n Number of values per row.
* @param cbuff A CM_COV_Object to hold various intermediate
* values for the variance calculation.
* @param cm A CM object of type Variance to perform the variance
* calculation.
* @param rl Lower row limit.
* @param ru Upper row limit.
* @throws DMLRuntimeException if DMLRuntimeException occurs
*/
private static void s_uarvar(SparseBlock a, double[] c, int m, int n, CM_COV_Object cbuff, CM cm,
int rl, int ru) throws DMLRuntimeException
{
// calculate aggregated variance for each row
for (int i=rl, cix=rl*5; i<ru; i++, cix+=5) {
cbuff.reset(); // reset buffer for each row
// compute and store count of empty cells in this row
// before aggregation
int count = (a.isEmpty(i)) ? n : n-a.size(i);
cbuff.w = count;
if (!a.isEmpty(i)) {
var(a.values(i), a.pos(i), a.size(i), cbuff, cm);
}
// store results: { var | mean, count, m2 correction, mean correction }
c[cix] = cbuff.getRequiredResult(AggregateOperationTypes.VARIANCE);
c[cix+1] = cbuff.mean._sum;
c[cix+2] = cbuff.w;
c[cix+3] = cbuff.m2._correction;
c[cix+4] = cbuff.mean._correction;
}
}
/**
* COLVAR, opcode: uacvar, sparse input.
*
* @param a Sparse array of values.
* @param c Output array to store variance, mean, count,
* m2 correction factor, and mean correction factor.
* @param m Number of rows.
* @param n Number of values per row.
* @param cbuff A CM_COV_Object to hold various intermediate
* values for the variance calculation.
* @param cm A CM object of type Variance to perform the variance
* calculation.
* @param rl Lower row limit.
* @param ru Upper row limit.
* @throws DMLRuntimeException if DMLRuntimeException occurs
*/
private static void s_uacvar(SparseBlock a, double[] c, int m, int n, CM_COV_Object cbuff, CM cm,
int rl, int ru) throws DMLRuntimeException
{
// compute and store counts of empty cells per column before aggregation
// note: column results are { var | mean, count, m2 correction, mean correction }
// - first, store total possible column counts in 3rd row of output
Arrays.fill(c, n*2, n*3, ru-rl); // counts stored in 3rd row
// - then subtract one from the column count for each dense value in the column
if( a.isContiguous() ) {
countDisAgg(a.values(rl), c, a.indexes(rl), a.pos(rl), n*2, (int)a.size(rl, ru));
}
else {
for (int i=rl; i<ru; i++) {
if (!a.isEmpty(i)) // counts stored in 3rd row
countDisAgg(a.values(i), c, a.indexes(i), a.pos(i), n*2, a.size(i));
}
}
// calculate aggregated variance for each column
if( a.isContiguous() ) {
varAgg(a.values(rl), c, a.indexes(rl), a.pos(rl), (int)a.size(rl, ru), n, cbuff, cm);
}
else {
for (int i=rl; i<ru; i++) {
if (!a.isEmpty(i))
varAgg(a.values(i), c, a.indexes(i), a.pos(i), a.size(i), n, cbuff, cm);
}
}
}
/**
* PROD, opcode: ua*, sparse input.
*
* @param a ?
* @param c ?
* @param m ?
* @param n ?
* @param rl row lower index
* @param ru row upper index
*/
private static void s_uam( SparseBlock a, double[] c, int m, int n, int rl, int ru )
{
double ret = 1;
for( int i=rl; i<ru; i++ )
{
if( !a.isEmpty(i) ) {
int alen = a.size(i);
ret *= product(a.values(i), 0, alen);
ret *= (alen<n) ? 0 : 1;
}
//early abort (note: in case of NaNs this is an invalid optimization)
if( !NAN_AWARENESS && ret==0 ) break;
}
c[0] = ret;
}
////////////////////////////////////////////
// performance-relevant utility functions //
////////////////////////////////////////////
/**
* Summation using the Kahan summation algorithm with the
* KahanPlus function.
*
* @param a ?
* @param ai ?
* @param len ?
* @param kbuff ?
* @param kplus ?
*/
private static void sum(double[] a, int ai, final int len, KahanObject kbuff, KahanPlus kplus)
{
sumWithFn(a, ai, len, kbuff, kplus);
}
/**
* Aggregated summation using the Kahan summation algorithm with
* the KahanPlus function.
*
* @param a ?
* @param c ?
* @param ai ?
* @param ci ?
* @param len ?
* @param kbuff ?
* @param kplus ?
*/
private static void sumAgg(double[] a, double[] c, int ai, int ci, final int len,
KahanObject kbuff, KahanPlus kplus)
{
sumAggWithFn(a, c, ai, ci, len, kbuff, kplus);
}
/**
* Aggregated summation using the Kahan summation algorithm with
* the KahanPlus function.
*
* @param a ?
* @param c ?
* @param aix ?
* @param ai ?
* @param len ?
* @param n ?
* @param kbuff ?
* @param kplus ?
*/
private static void sumAgg(double[] a, double[] c, int[] aix, int ai, final int len, final int n,
KahanObject kbuff, KahanPlus kplus)
{
sumAggWithFn(a, c, aix, ai, len, n, kbuff, kplus);
}
/**
* Summation of squared values using the Kahan summation algorithm
* with the KahanPlusSq function.
*
* @param a ?
* @param ai ?
* @param len ?
* @param kbuff ?
* @param kplusSq ?
*/
private static void sumSq(double[] a, int ai, final int len,
KahanObject kbuff, KahanPlusSq kplusSq)
{
sumWithFn(a, ai, len, kbuff, kplusSq);
}
/**
* Aggregated summation of squared values using the Kahan
* summation algorithm with the KahanPlusSq function.
*
* @param a ?
* @param c ?
* @param ai ?
* @param ci ?
* @param len ?
* @param kbuff ?
* @param kplusSq ?
*/
private static void sumSqAgg(double[] a, double[] c, int ai, int ci, final int len,
KahanObject kbuff, KahanPlusSq kplusSq)
{
sumAggWithFn(a, c, ai, ci, len, kbuff, kplusSq);
}
/**
* Aggregated summation of squared values using the Kahan
* summation algorithm with the KahanPlusSq function.
*
* @param a ?
* @param c ?
* @param aix ?
* @param ai ?
* @param len ?
* @param n ?
* @param kbuff ?
* @param kplusSq ?
*/
private static void sumSqAgg(double[] a, double[] c, int[] aix, int ai, final int len, final int n,
KahanObject kbuff, KahanPlusSq kplusSq)
{
sumAggWithFn(a, c, aix, ai, len, n, kbuff, kplusSq);
}
/**
* Summation using the Kahan summation algorithm with one of the
* Kahan functions.
*
* @param a Array of values to sum.
* @param ai Index at which to start processing.
* @param len Number of values to process, starting at index ai.
* @param kbuff A KahanObject to hold the current sum and
* correction factor for the Kahan summation
* algorithm.
* @param kfunc A KahanFunction object to perform the summation.
*/
private static void sumWithFn(double[] a, int ai, final int len,
KahanObject kbuff, KahanFunction kfunc)
{
for (int i=0; i<len; i++, ai++)
kfunc.execute2(kbuff, a[ai]);
}
/**
* Aggregated summation using the Kahan summation algorithm
* with one of the Kahan functions.
*
* @param a Array of values to sum.
* @param c Output array to store aggregated sum and correction
* factors.
* @param ai Index at which to start processing array `a`.
* @param ci Index at which to start storing aggregated results
* into array `c`.
* @param len Number of values to process, starting at index ai.
* @param kbuff A KahanObject to hold the current sum and
* correction factor for the Kahan summation
* algorithm.
* @param kfunc A KahanFunction object to perform the summation.
*/
private static void sumAggWithFn(double[] a, double[] c, int ai, int ci, final int len,
KahanObject kbuff, KahanFunction kfunc)
{
for (int i=0; i<len; i++, ai++, ci++) {
kbuff._sum = c[ci];
kbuff._correction = c[ci+len];
kfunc.execute2(kbuff, a[ai]);
c[ci] = kbuff._sum;
c[ci+len] = kbuff._correction;
}
}
/**
* Aggregated summation using the Kahan summation algorithm
* with one of the Kahan functions.
*
* @param a Array of values to sum.
* @param c Output array to store aggregated sum and correction
* factors.
* @param aix ?
* @param ai Array of indices to process for array `a`.
* @param len Number of indices in `ai` to process.
* @param n Number of values per row.
* @param kbuff A KahanObject to hold the current sum and
* correction factor for the Kahan summation
* algorithm.
* @param kfunc A KahanFunction object to perform the summation.
*/
private static void sumAggWithFn(double[] a, double[] c, int[] aix, int ai, final int len, final int n,
KahanObject kbuff, KahanFunction kfunc)
{
for (int i=ai; i<ai+len; i++) {
kbuff._sum = c[aix[i]];
kbuff._correction = c[aix[i]+n];
kfunc.execute2(kbuff, a[i]);
c[aix[i]] = kbuff._sum;
c[aix[i]+n] = kbuff._correction;
}
}
private static double product( double[] a, int ai, final int len )
{
double val = 1;
if( NAN_AWARENESS )
{
//product without early abort
//even if val is 0, it might turn into NaN.
for( int i=0; i<len; i++, ai++ )
val *= a[ ai ];
}
else
{
//product with early abort (if 0)
//note: this will not work with NaNs (invalid optimization)
for( int i=0; i<len && val!=0; i++, ai++ )
val *= a[ ai ];
}
return val;
}
private static void productAgg( double[] a, double[] c, int ai, int ci, final int len )
{
//always w/ NAN_AWARENESS: product without early abort;
//even if val is 0, it might turn into NaN.
//(early abort would require column-flags and branches)
for( int i=0; i<len; i++, ai++, ci++ )
c[ ci ] *= a[ ai ];
}
private static void productAgg( double[] a, double[] c, int[] aix, int ai, int ci, final int len )
{
//always w/ NAN_AWARENESS: product without early abort;
//even if val is 0, it might turn into NaN.
//(early abort would require column-flags and branches)
for( int i=ai; i<ai+len; i++ )
c[ ci + aix[i] ] *= a[ i ];
}
private static void mean( double[] a, int ai, final int len, int count, KahanObject kbuff, Mean mean )
{
for( int i=0; i<len; i++, ai++, count++ )
{
//delta: (newvalue-buffer._sum)/count
mean.execute2(kbuff, a[ai], count+1);
}
}
private static void meanAgg( double[] a, double[] c, int ai, int ci, final int len, KahanObject kbuff, Mean mean )
{
for( int i=0; i<len; i++, ai++, ci++ )
{
kbuff._sum = c[ci];
double count = c[ci+len] + 1;
kbuff._correction = c[ci+2*len];
mean.execute2(kbuff, a[ai], count);
c[ci] = kbuff._sum;
c[ci+len] = count;
c[ci+2*len] = kbuff._correction;
}
}
private static void meanAgg( double[] a, double[] c, int[] aix, int ai, final int len, final int n, KahanObject kbuff, Mean mean )
{
for( int i=ai; i<ai+len; i++ )
{
kbuff._sum = c[aix[i]];
double count = c[aix[i]+n] + 1;
kbuff._correction = c[aix[i]+2*n];
mean.execute2(kbuff, a[ i ], count);
c[aix[i]] = kbuff._sum;
c[aix[i]+n] = count;
c[aix[i]+2*n] = kbuff._correction;
}
}
/**
* Variance
*
* @param a Array of values to sum.
* @param ai Index at which to start processing.
* @param len Number of values to process, starting at index ai.
* @param cbuff A CM_COV_Object to hold various intermediate
* values for the variance calculation.
* @param cm A CM object of type Variance to perform the variance
* calculation.
* @throws DMLRuntimeException if DMLRuntimeException occurs
*/
private static void var(double[] a, int ai, final int len, CM_COV_Object cbuff, CM cm)
throws DMLRuntimeException
{
for(int i=0; i<len; i++, ai++)
cbuff = (CM_COV_Object) cm.execute(cbuff, a[ai]);
}
/**
* Aggregated variance
*
* @param a Array of values to sum.
* @param c Output array to store aggregated sum and correction
* factors.
* @param ai Index at which to start processing array `a`.
* @param ci Index at which to start storing aggregated results
* into array `c`.
* @param len Number of values to process, starting at index ai.
* @param cbuff A CM_COV_Object to hold various intermediate
* values for the variance calculation.
* @param cm A CM object of type Variance to perform the variance
* calculation.
* @throws DMLRuntimeException if DMLRuntimeException occurs
*/
private static void varAgg(double[] a, double[] c, int ai, int ci, final int len,
CM_COV_Object cbuff, CM cm) throws DMLRuntimeException
{
for (int i=0; i<len; i++, ai++, ci++) {
// extract current values: { var | mean, count, m2 correction, mean correction }
cbuff.w = c[ci+2*len]; // count
cbuff.m2._sum = c[ci] * (cbuff.w - 1); // m2 = var * (n - 1)
cbuff.mean._sum = c[ci+len]; // mean
cbuff.m2._correction = c[ci+3*len];
cbuff.mean._correction = c[ci+4*len];
// calculate incremental aggregated variance
cbuff = (CM_COV_Object) cm.execute(cbuff, a[ai]);
// store updated values: { var | mean, count, m2 correction, mean correction }
c[ci] = cbuff.getRequiredResult(AggregateOperationTypes.VARIANCE);
c[ci+len] = cbuff.mean._sum;
c[ci+2*len] = cbuff.w;
c[ci+3*len] = cbuff.m2._correction;
c[ci+4*len] = cbuff.mean._correction;
}
}
/**
* Aggregated variance
*
* @param a Array of values to sum.
* @param c Output array to store aggregated sum and correction
* factors.
* @param aix ?
* @param ai Array of indices to process for array `a`.
* @param len Number of indices in `ai` to process.
* @param n Number of values per row.
* @param cbuff A CM_COV_Object to hold various intermediate
* values for the variance calculation.
* @param cm A CM object of type Variance to perform the variance
* calculation.
* @throws DMLRuntimeException if DMLRuntimeException occurs
*/
private static void varAgg(double[] a, double[] c, int[] aix, int ai, final int len, final int n,
CM_COV_Object cbuff, CM cm) throws DMLRuntimeException
{
for (int i=ai; i<ai+len; i++) {
// extract current values: { var | mean, count, m2 correction, mean correction }
cbuff.w = c[aix[i]+2*n]; // count
cbuff.m2._sum = c[aix[i]] * (cbuff.w - 1); // m2 = var * (n - 1)
cbuff.mean._sum = c[aix[i]+n]; // mean
cbuff.m2._correction = c[aix[i]+3*n];
cbuff.mean._correction = c[aix[i]+4*n];
// calculate incremental aggregated variance
cbuff = (CM_COV_Object) cm.execute(cbuff, a[i]);
// store updated values: { var | mean, count, m2 correction, mean correction }
c[aix[i]] = cbuff.getRequiredResult(AggregateOperationTypes.VARIANCE);
c[aix[i]+n] = cbuff.mean._sum;
c[aix[i]+2*n] = cbuff.w;
c[aix[i]+3*n] = cbuff.m2._correction;
c[aix[i]+4*n] = cbuff.mean._correction;
}
}
/**
* Meant for builtin function ops (min, max)
*
* @param a ?
* @param ai?
* @param init ?
* @param len ?
* @param aggop ?
* @return result value
* @throws DMLRuntimeException if DMLRuntimeException occurs
*/
private static double builtin( double[] a, int ai, final double init, final int len, Builtin aggop )
{
double val = init;
for( int i=0; i<len; i++, ai++ )
val = aggop.execute2( val, a[ ai ] );
return val;
}
private static void builtinAgg( double[] a, double[] c, int ai, final int len, Builtin aggop )
{
for( int i=0; i<len; i++, ai++ )
c[ i ] = aggop.execute2( c[ i ], a[ ai ] );
}
private static void builtinAgg( double[] a, double[] c, int[] aix, int ai, final int len, Builtin aggop )
{
for( int i=ai; i<ai+len; i++ )
c[ aix[i] ] = aggop.execute2( c[ aix[i] ], a[ i ] );
}
private static int indexmax( double[] a, int ai, final double init, final int len, Builtin aggop )
{
double maxval = init;
int maxindex = -1;
for( int i=ai; i<ai+len; i++ ) {
maxindex = (a[i]>=maxval) ? i-ai : maxindex;
maxval = (a[i]>=maxval) ? a[i] : maxval;
}
return maxindex;
}
private static int indexmin( double[] a, int ai, final double init, final int len, Builtin aggop )
{
double minval = init;
int minindex = -1;
for( int i=ai; i<ai+len; i++ ) {
minindex = (a[i]<=minval) ? i-ai : minindex;
minval = (a[i]<=minval) ? a[i] : minval;
}
return minindex;
}
private static void countAgg( double[] a, int[] c, int[] aix, int ai, final int len )
{
final int bn = len%8;
//compute rest, not aligned to 8-block
for( int i=ai; i<ai+bn; i++ )
c[ aix[i] ]++;
//unrolled 8-block (for better instruction level parallelism)
for( int i=ai+bn; i<ai+len; i+=8 )
{
c[ aix[ i+0 ] ] ++;
c[ aix[ i+1 ] ] ++;
c[ aix[ i+2 ] ] ++;
c[ aix[ i+3 ] ] ++;
c[ aix[ i+4 ] ] ++;
c[ aix[ i+5 ] ] ++;
c[ aix[ i+6 ] ] ++;
c[ aix[ i+7 ] ] ++;
}
}
private static void countDisAgg( double[] a, double[] c, int[] aix, int ai, final int ci, final int len )
{
final int bn = len%8;
//compute rest, not aligned to 8-block
for( int i=ai; i<ai+bn; i++ )
c[ ci+aix[i] ]--;
//unrolled 8-block (for better instruction level parallelism)
for( int i=ai+bn; i<ai+len; i+=8 )
{
c[ ci+aix[ i+0 ] ] --;
c[ ci+aix[ i+1 ] ] --;
c[ ci+aix[ i+2 ] ] --;
c[ ci+aix[ i+3 ] ] --;
c[ ci+aix[ i+4 ] ] --;
c[ ci+aix[ i+5 ] ] --;
c[ ci+aix[ i+6 ] ] --;
c[ ci+aix[ i+7 ] ] --;
}
}
/////////////////////////////////////////////////////////
// Task Implementations for Multi-Threaded Operations //
/////////////////////////////////////////////////////////
private static abstract class AggTask implements Callable<Object> {}
private static class RowAggTask extends AggTask
{
private MatrixBlock _in = null;
private MatrixBlock _ret = null;
private AggType _aggtype = null;
private AggregateUnaryOperator _uaop = null;
private int _rl = -1;
private int _ru = -1;
protected RowAggTask( MatrixBlock in, MatrixBlock ret, AggType aggtype, AggregateUnaryOperator uaop, int rl, int ru )
{
_in = in;
_ret = ret;
_aggtype = aggtype;
_uaop = uaop;
_rl = rl;
_ru = ru;
}
@Override
public Object call() throws DMLRuntimeException
{
if( !_in.sparse )
aggregateUnaryMatrixDense(_in, _ret, _aggtype, _uaop.aggOp.increOp.fn, _uaop.indexFn, _rl, _ru);
else
aggregateUnaryMatrixSparse(_in, _ret, _aggtype, _uaop.aggOp.increOp.fn, _uaop.indexFn, _rl, _ru);
return null;
}
}
private static class PartialAggTask extends AggTask
{
private MatrixBlock _in = null;
private MatrixBlock _ret = null;
private AggType _aggtype = null;
private AggregateUnaryOperator _uaop = null;
private int _rl = -1;
private int _ru = -1;
protected PartialAggTask( MatrixBlock in, MatrixBlock ret, AggType aggtype, AggregateUnaryOperator uaop, int rl, int ru )
throws DMLRuntimeException
{
_in = in;
_ret = ret;
_aggtype = aggtype;
_uaop = uaop;
_rl = rl;
_ru = ru;
}
@Override
public Object call() throws DMLRuntimeException
{
//thead-local allocation for partial aggregation
_ret = new MatrixBlock(_ret.rlen, _ret.clen, false);
_ret.allocateDenseBlock();
if( !_in.sparse )
aggregateUnaryMatrixDense(_in, _ret, _aggtype, _uaop.aggOp.increOp.fn, _uaop.indexFn, _rl, _ru);
else
aggregateUnaryMatrixSparse(_in, _ret, _aggtype, _uaop.aggOp.increOp.fn, _uaop.indexFn, _rl, _ru);
//recompute non-zeros of partial result
_ret.recomputeNonZeros();
return null;
}
public MatrixBlock getResult() {
return _ret;
}
}
private static class CumAggTask implements Callable<Long>
{
private MatrixBlock _in = null;
private double[] _agg = null;
private MatrixBlock _ret = null;
private AggType _aggtype = null;
private UnaryOperator _uop = null;
private int _rl = -1;
private int _ru = -1;
protected CumAggTask( MatrixBlock in, double[] agg, MatrixBlock ret, AggType aggtype, UnaryOperator uop, int rl, int ru )
throws DMLRuntimeException
{
_in = in;
_agg = agg;
_ret = ret;
_aggtype = aggtype;
_uop = uop;
_rl = rl;
_ru = ru;
}
@Override
public Long call() throws DMLRuntimeException
{
//compute partial cumulative aggregate
if( !_in.sparse )
cumaggregateUnaryMatrixDense(_in, _ret, _aggtype, _uop.fn, _agg, _rl, _ru);
else
cumaggregateUnaryMatrixSparse(_in, _ret, _aggtype, _uop.fn, _agg, _rl, _ru);
//recompute partial non-zeros (ru exlusive)
return _ret.recomputeNonZeros(_rl, _ru-1, 0, _ret.getNumColumns()-1);
}
}
private static class AggTernaryTask implements Callable<MatrixBlock>
{
private final MatrixBlock _in1;
private final MatrixBlock _in2;
private final MatrixBlock _in3;
private MatrixBlock _ret = null;
private final IndexFunction _ixFn;
private final int _rl;
private final int _ru;
protected AggTernaryTask( MatrixBlock in1, MatrixBlock in2, MatrixBlock in3, MatrixBlock ret, IndexFunction ixFn, int rl, int ru )
throws DMLRuntimeException
{
_in1 = in1;
_in2 = in2;
_in3 = in3;
_ret = ret;
_ixFn = ixFn;
_rl = rl;
_ru = ru;
}
@Override
public MatrixBlock call() throws DMLRuntimeException
{
//thead-local allocation for partial aggregation
_ret = new MatrixBlock(_ret.rlen, _ret.clen, false);
_ret.allocateDenseBlock();
if( !_in1.sparse && !_in2.sparse && (_in3==null||!_in3.sparse) ) //DENSE
aggregateTernaryDense(_in1, _in2, _in3, _ret, _ixFn, _rl, _ru);
else //GENERAL CASE
aggregateTernaryGeneric(_in1, _in2, _in3, _ret, _ixFn, _rl, _ru);
//recompute non-zeros of partial result
_ret.recomputeNonZeros();
return _ret;
}
}
private static class GrpAggTask extends AggTask
{
private MatrixBlock _groups = null;
private MatrixBlock _target = null;
private MatrixBlock _weights = null;
private MatrixBlock _ret = null;
private int _numGroups = -1;
private Operator _op = null;
private int _cl = -1;
private int _cu = -1;
protected GrpAggTask( MatrixBlock groups, MatrixBlock target, MatrixBlock weights, MatrixBlock ret, int numGroups, Operator op, int cl, int cu )
throws DMLRuntimeException
{
_groups = groups;
_target = target;
_weights = weights;
_ret = ret;
_numGroups = numGroups;
_op = op;
_cl = cl;
_cu = cu;
}
@Override
public Object call() throws DMLRuntimeException
{
//CM operator for count, mean, variance
//note: current support only for column vectors
if( _op instanceof CMOperator ) {
CMOperator cmOp = (CMOperator) _op;
groupedAggregateCM(_groups, _target, _weights, _ret, _numGroups, cmOp, _cl, _cu);
}
//Aggregate operator for sum (via kahan sum)
//note: support for row/column vectors and dense/sparse
else if( _op instanceof AggregateOperator ) {
AggregateOperator aggop = (AggregateOperator) _op;
groupedAggregateKahanPlus(_groups, _target, _weights, _ret, _numGroups, aggop, _cl, _cu);
}
return null;
}
}
}
| {
"content_hash": "106d173c008bc1794f6a3d8da63c0f21",
"timestamp": "",
"source": "github",
"line_count": 3609,
"max_line_length": 186,
"avg_line_length": 30.34912718204489,
"alnum_prop": 0.6226239386469461,
"repo_name": "iyounus/incubator-systemml",
"id": "404f440af0d1a281dd633743e35db5094b647459",
"size": "110339",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/main/java/org/apache/sysml/runtime/matrix/data/LibMatrixAgg.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "31285"
},
{
"name": "Batchfile",
"bytes": "22265"
},
{
"name": "C",
"bytes": "8676"
},
{
"name": "C++",
"bytes": "30804"
},
{
"name": "CMake",
"bytes": "10312"
},
{
"name": "Cuda",
"bytes": "30575"
},
{
"name": "Java",
"bytes": "12988931"
},
{
"name": "Jupyter Notebook",
"bytes": "36387"
},
{
"name": "Makefile",
"bytes": "936"
},
{
"name": "Protocol Buffer",
"bytes": "66399"
},
{
"name": "Python",
"bytes": "195992"
},
{
"name": "R",
"bytes": "671170"
},
{
"name": "Scala",
"bytes": "186014"
},
{
"name": "Shell",
"bytes": "152940"
}
],
"symlink_target": ""
} |
<?php
declare(strict_types=1);
namespace Sonata\OrderBundle;
use Sonata\CoreBundle\Form\FormHelper;
use Sonata\OrderBundle\Form\Type\OrderStatusType;
use Symfony\Component\DependencyInjection\ContainerBuilder;
use Symfony\Component\HttpKernel\Bundle\Bundle;
class SonataOrderBundle extends Bundle
{
public function build(ContainerBuilder $container): void
{
$this->registerFormMapping();
}
public function boot(): void
{
$this->registerFormMapping();
}
/**
* Register form mapping information.
*
* NEXT_MAJOR: remove this method
*/
public function registerFormMapping(): void
{
if (class_exists(FormHelper::class)) {
FormHelper::registerFormTypeMapping([
'sonata_order_status' => OrderStatusType::class,
]);
}
}
}
| {
"content_hash": "04cc43751362e756531c6cf68e332439",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 64,
"avg_line_length": 21.897435897435898,
"alnum_prop": 0.6557377049180327,
"repo_name": "sonata-project/ecommerce",
"id": "2ac13a23cefd040952fd55d50c3e90af05b0311b",
"size": "1101",
"binary": false,
"copies": "1",
"ref": "refs/heads/3.x",
"path": "src/OrderBundle/SonataOrderBundle.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "921"
},
{
"name": "HTML",
"bytes": "123098"
},
{
"name": "JavaScript",
"bytes": "4012"
},
{
"name": "Makefile",
"bytes": "1475"
},
{
"name": "PHP",
"bytes": "1303179"
},
{
"name": "Shell",
"bytes": "1097"
}
],
"symlink_target": ""
} |
<HTML><HEAD>
<TITLE>Review for Guinevere (1999)</TITLE>
<LINK REL="STYLESHEET" TYPE="text/css" HREF="/ramr.css">
</HEAD>
<BODY BGCOLOR="#FFFFFF" TEXT="#000000">
<H1 ALIGN="CENTER" CLASS="title"><A HREF="/Title?0160338">Guinevere (1999)</A></H1><H3 ALIGN=CENTER>reviewed by<BR><A HREF="/ReviewsBy?Harvey+S.+Karten">Harvey S. Karten</A></H3><HR WIDTH="40%" SIZE="4">
<PRE>GUINEVERE</PRE>
<PRE> Reviewed by Harvey Karten
Miramax Films
Director: Audrey Wells
Writer: Audrey Wells
Cast: Sarah Polley, Stephen Rea, Jean Smart, Gina
Gershon, Paul Dooley</PRE>
<P> Some time back Diogenes searched high and low in his
part of Greece, looking fruitlessly for an honest man. If he
were alive today, he'd probably search another lifetime for a
middle-aged man who has never entertained a fantasy about
a relationship with a 20-year-old woman (or man, depending).
For guys who live strictly in a fantasy world, there's nothing
like a solid movie for an awesome vicarious experience of
that specifically human fancy. "Guinevere" realizes the vision
better than any other film has done this year to date.
"Guinevere," which is both written and debut-directed by
Audrey Wells ("The Truth About Cats and Dogs") is a
remarkable accomplishment, ministering to our need for a
mature film filled with humor and pathos. The cast is a
crackerjack ensemble of actors with Stephen Rea as 40-
something bohemian photographer Connie Fitzpatrick and
Sarah Polley as Harper Sloane, his 20-year old lover and
devotee. "Guinevere," a slickly-made yet incisively-drawn
portrait of a May-September romance, was a hit at last
January's Sundance Festival, featuring much higher
production values than the typical indie programmed in that
Utah showplace each year.</P>
<P> Particularly appealing are the shades and nuances of
feeling experienced by the two principals, each of whom has
ambiguous feelings about the affiliation while both are
unquestionably gaining impressively from their connection.
Essentially this is a story of a young woman just out of
college who is the oddball in her haute bourgeois family, has
no idea what she's good at or even whether she has
potential, and who is culled right out of her husk by an older
man who showers attention and enlightenment on her. As
she transforms from a shy, antisocial individual who despite
her beauty may never have had a boy friend to a romantically
fulfilled and professionally competent human being, she is in
turning contributing a great deal of gratification to the roue
who has taken her in and given her a life.</P>
<P> The affair begins as ruggedly good-looking wedding
photographer Connie Fitzpatrick (Stephen Rea) plies his
trade at the plush wedding of Harper's sister. After spotting a
candid and particularly penetrating picture of herself, Harper
visits the photographer at his bohemian digs in a run-down
San Francisco neighborhood and is seduced by his
seemingly passive but obviously confident invitation to spend
the night. Moving in with the man after experiencing the joys
of both the flesh and the mind, Harper is prodded to study
the field that interests her the most and is plied with books,
attention, and the company of Connie's artistic friends.
Awkward at first (at one point she spills wine on a stack of
Connie photos but is told "That's OK, I've got another bottle"),
she soon develops all the grace of a assured, fulfilled woman.
By the end of the affair, both parties have obviously grown
emotionally, both delighting in each other's company and
riding out the inevitable periods of friction and rage.</P>
<P> Audrey Wells excels in both direction and scripting,
especially in getting Harper to convey nuances of emotion by
simple frowns, pouts, and subtle twists of her necklace.
Wells also jackets the film in a variety of moods, most striking
in the concluding moments when she converts Connie's
studio into a surreal vision of heavenly light. The director
charges the movie with witty dialogue throughout, in one
showstopping scene presenting a knockout demonstration of
putdowns from the lips of Harper's mom, Deborah (Jean
Smart), who sharply and amusingly challenges the artist's
ability to relate to a mature, experienced woman. (One
writer who was present at Sundance exclaimed that this
scene "brought the house down.")</P>
<P> Wells peppers the story with the little gestures that mean
so much in turning what could have been yet another
romance into a compelling, believable story. There's the
entirely hilarious rich mom played with pizazz by Jean Smart
who instructs the family not to bother asking for Harper's
opinions at the dinner table: "Don't bother her...she's digging
a hole in her bread." There's the envious middle-aged guy
sitting at a table a few yards from Connie and Harper who
quickly gives a thumbs-up sign to the would-be Humbert
Humbert. There's the bevy of women who have "graduated"
from Connie course, each spending two or three years with
the seductive photographer, each nicknamed Queen
Guinevere by the charming fellow. </P>
<P> The flaws are minor, virtually unnoticeable, none distracting
from the momentum of the story. Harper, for example,
considers herself unfit for anything. She is not sociable, she
has no interest in any craft and no intellectual pretensions.
Yet we are told that she has been admitted to Harvard Law
School. But who's worried? We've been made privy to the
pain that lies beneath the surface of Connie's bonhomie and
the potential possessed by Harper that needs only a catalyst
to secure its release. All insights are put over with a good
deal of humor and love making Guinevere a winning choice
for September movie mavens.</P>
<PRE>Rated R. Running Time: 108 minutes. (C) 1999
Harvey Karten</PRE>
<HR><P CLASS=flush><SMALL>The review above was posted to the
<A HREF="news:rec.arts.movies.reviews">rec.arts.movies.reviews</A> newsgroup (<A HREF="news:de.rec.film.kritiken">de.rec.film.kritiken</A> for German reviews).<BR>
The Internet Movie Database accepts no responsibility for the contents of the
review and has no editorial control. Unless stated otherwise, the copyright
belongs to the author.<BR>
Please direct comments/criticisms of the review to relevant newsgroups.<BR>
Broken URLs inthe reviews are the responsibility of the author.<BR>
The formatting of the review is likely to differ from the original due
to ASCII to HTML conversion.
</SMALL></P>
<P ALIGN=CENTER>Related links: <A HREF="/Reviews/">index of all rec.arts.movies.reviews reviews</A></P>
</P></BODY></HTML>
| {
"content_hash": "1da5b2796c1197d1fe48e9fe967ba66e",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 203,
"avg_line_length": 56.05042016806723,
"alnum_prop": 0.7715142428785607,
"repo_name": "xianjunzhengbackup/code",
"id": "520387af61a5c376d70cd55b945e6383b6690b96",
"size": "6670",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "data science/machine_learning_for_the_web/chapter_4/movie/19971.html",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "BitBake",
"bytes": "113"
},
{
"name": "BlitzBasic",
"bytes": "256"
},
{
"name": "CSS",
"bytes": "49827"
},
{
"name": "HTML",
"bytes": "157006325"
},
{
"name": "JavaScript",
"bytes": "14029"
},
{
"name": "Jupyter Notebook",
"bytes": "4875399"
},
{
"name": "Mako",
"bytes": "2060"
},
{
"name": "Perl",
"bytes": "716"
},
{
"name": "Python",
"bytes": "874414"
},
{
"name": "R",
"bytes": "454"
},
{
"name": "Shell",
"bytes": "3984"
}
],
"symlink_target": ""
} |
NS_HGE_BEGIN
bool HGEDish::digestJSON(JSONValue& json) {
return 0;
}
bool HGEDish::broadcastShake() {
JSONValue json(vapidjson::kObjectType);
json.AddMember(HGE_KEYTEXT_TASK, "shake");
json.AddMember(HGE_KEYTEXT_ARGS, true);
this->produceJSON(json);
return !0;
}
NS_HGE_END
| {
"content_hash": "80b271eac62b689898060ccdcbfd20db",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 43,
"avg_line_length": 15.31578947368421,
"alnum_prop": 0.7079037800687286,
"repo_name": "jcmoore/ggframework",
"id": "a498ac6bed13ebbe6b5965d995c92a9f7113526e",
"size": "404",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hybridge/service/HGEDish.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "82002"
},
{
"name": "C++",
"bytes": "260419"
},
{
"name": "Objective-C",
"bytes": "7880"
}
],
"symlink_target": ""
} |
<?php declare(strict_types=1);
namespace ApiClients\Client\Travis\Resource\Async;
use ApiClients\Client\Travis\Resource\EmptyJob as BaseEmptyJob;
class EmptyJob extends BaseEmptyJob
{
}
| {
"content_hash": "86b12b6aa1dce36e485fada49f8e2bb5",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 63,
"avg_line_length": 21,
"alnum_prop": 0.8095238095238095,
"repo_name": "php-api-clients/travis",
"id": "7c18498fb822e66e3bbf5a8902e624d1a500a27d",
"size": "189",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Resource/Async/EmptyJob.php",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "582"
},
{
"name": "PHP",
"bytes": "206824"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>coalgebras: Not compatible</title>
<link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" />
<link href="../../../../../bootstrap.min.css" rel="stylesheet">
<link href="../../../../../bootstrap-custom.css" rel="stylesheet">
<link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet">
<script src="../../../../../moment.min.js"></script>
<!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
<![endif]-->
</head>
<body>
<div class="container">
<div class="navbar navbar-default" role="navigation">
<div class="container-fluid">
<div class="navbar-header">
<a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a>
</div>
<div id="navbar" class="collapse navbar-collapse">
<ul class="nav navbar-nav">
<li><a href="../..">clean / extra-dev</a></li>
<li class="active"><a href="">8.10.dev / coalgebras - 8.7.0</a></li>
</ul>
</div>
</div>
</div>
<div class="article">
<div class="row">
<div class="col-md-12">
<a href="../..">« Up</a>
<h1>
coalgebras
<small>
8.7.0
<span class="label label-info">Not compatible</span>
</small>
</h1>
<p><em><script>document.write(moment("2020-03-17 10:27:16 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2020-03-17 10:27:16 UTC)</em><p>
<h2>Context</h2>
<pre># Packages matching: installed
# Name # Installed # Synopsis
base-bigarray base
base-threads base
base-unix base
conf-findutils 1 Virtual package relying on findutils
conf-m4 1 Virtual package relying on m4
coq 8.10.dev Formal proof management system
num 1.3 The legacy Num library for arbitrary-precision integer and rational arithmetic
ocaml 4.09.0 The OCaml compiler (virtual package)
ocaml-base-compiler 4.09.0 Official release 4.09.0
ocaml-config 1 OCaml Switch Configuration
ocamlfind 1.8.1 A library manager for OCaml
# opam file:
opam-version: "2.0"
maintainer: "[email protected]"
homepage: "https://github.com/coq-contribs/coalgebras"
license: "LGPL"
build: [make "-j%{jobs}%"]
install: [make "install"]
remove: ["rm" "-R" "%{lib}%/coq/user-contrib/Coalgebras"]
depends: [
"ocaml"
"coq" {>= "8.7" & < "8.8~"}
]
tags: [ "keyword: coalgebra" "keyword: bisimulation" "keyword: weakly final" "keyword: coiteration" "keyword: coinductive" "category: Mathematics/Category Theory" "date: 2008-10" ]
authors: [ "Milad Niqui <[email protected]> [http://www.cwi.nl/~milad]" ]
bug-reports: "https://github.com/coq-contribs/coalgebras/issues"
dev-repo: "git+https://github.com/coq-contribs/coalgebras.git"
synopsis: "Coalgebras, bisimulation and lambda-coiteration"
description:
"This contribution contains a formalisation of coalgebras, bisimulation on coalgebras, weakly final coalgebras, lambda-coiteration definition scheme (including primitive corecursion) and a version of lambda-bisimulation. The formalisation is modular. The implementation of the module types for streams and potentially infinite Peano numbers are provided using the coinductive types."
flags: light-uninstall
url {
src: "https://github.com/coq-contribs/coalgebras/archive/v8.7.0.tar.gz"
checksum: "md5=73818481b949ccbb1d4579b4da27c951"
}
</pre>
<h2>Lint</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Dry install</h2>
<p>Dry install with the current Coq version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam install -y --show-action coq-coalgebras.8.7.0 coq.8.10.dev</code></dd>
<dt>Return code</dt>
<dd>5120</dd>
<dt>Output</dt>
<dd><pre>[NOTE] Package coq is already installed (current version is 8.10.dev).
The following dependencies couldn't be met:
- coq-coalgebras -> coq < 8.8~ -> ocaml < 4.06.0
base of this switch (use `--unlock-base' to force)
Your request can't be satisfied:
- No available version of coq satisfies the constraints
No solution found, exiting
</pre></dd>
</dl>
<p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-coalgebras.8.7.0</code></dd>
<dt>Return code</dt>
<dd>0</dd>
</dl>
<h2>Install dependencies</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Install</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Duration</dt>
<dd>0 s</dd>
</dl>
<h2>Installation size</h2>
<p>No files were installed.</p>
<h2>Uninstall</h2>
<dl class="dl-horizontal">
<dt>Command</dt>
<dd><code>true</code></dd>
<dt>Return code</dt>
<dd>0</dd>
<dt>Missing removes</dt>
<dd>
none
</dd>
<dt>Wrong removes</dt>
<dd>
none
</dd>
</dl>
</div>
</div>
</div>
<hr/>
<div class="footer">
<p class="text-center">
<small>Sources are on <a href="https://github.com/coq-bench">GitHub</a>. © Guillaume Claret.</small>
</p>
</div>
</div>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
<script src="../../../../../bootstrap.min.js"></script>
</body>
</html>
| {
"content_hash": "ce57b2f1a9cab4e6a142986a78165bf5",
"timestamp": "",
"source": "github",
"line_count": 164,
"max_line_length": 395,
"avg_line_length": 44.396341463414636,
"alnum_prop": 0.5598132124708145,
"repo_name": "coq-bench/coq-bench.github.io",
"id": "08b5fd9182d0136d865778f3c7ce761c793e6b8b",
"size": "7283",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "clean/Linux-x86_64-4.09.0-2.0.5/extra-dev/8.10.dev/coalgebras/8.7.0.html",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
} |
(function () {
'use strict';
// Feats controller
angular
.module('feats')
.controller('FeatsController', FeatsController);
FeatsController.$inject = ['$scope', '$state', '$window', 'Authentication', 'featResolve', 'FeatsService', 'RulebooksService', 'FeatcategoriesService' ,'GameversionsService',];
function FeatsController ($scope, $state, $window, Authentication, feat, FeatsService, RulebooksService, FeatcategoriesService, GameversionsService) {
var vm = this;
vm.options = FeatsService.query();
vm.rulebooks = RulebooksService.query();
vm.gameversions = GameversionsService.query();
vm.featcategories = FeatcategoriesService.query();
vm.authentication = Authentication;
vm.feat = feat;
vm.error = null;
vm.form = {};
vm.remove = remove;
vm.save = save;
console.log(vm.options);
// Remove existing Feat
function remove() {
if ($window.confirm('Are you sure you want to delete?')) {
vm.feat.$remove($state.go('feats.list'));
}
}
// Save Feat
function save(isValid) {
if (!isValid) {
$scope.$broadcast('show-errors-check-validity', 'vm.form.featForm');
return false;
}
// TODO: move create/update logic to service
if (vm.feat._id) {
vm.feat.$update(successCallback, errorCallback);
} else {
vm.feat.$save(successCallback, errorCallback);
}
function successCallback(res) {
$state.go('feats.view', {
featId: res._id
});
}
function errorCallback(res) {
vm.error = res.data.message;
}
}
$scope.togglePrereq = function(option){
var i = 0;
var prereq = {
id: option._id,
name: option.name
}
var num = 0;
if(option.checked === true){
for(i; i < vm.feat.featprereq.length; i++){
console.log(vm.feat.featprereq[i].id);
console.log(prereq.id)
if(vm.feat.featprereq[i].id === prereq.id){
num++
console.log(num)
}
}
console.log(num)
if(num === 0){
vm.feat.featprereq.push(prereq)
}
} else if(option.checked === false){
for(i; i < vm.feat.featprereq.length; i++){
if(vm.feat.featprereq[i].id === prereq.id){
vm.feat.featprereq.splice(prereq)
}
}
}
}
$scope.removePrereq = function(option){
console.log(option)
var i = 0;
for(i; i < vm.feat.featprereq.length; i++){
if(vm.feat.featprereq[i].id === option){
vm.feat.featprereq.splice(vm.feat.featprereq[i])
}
}
}
$scope.updateRulebookInfo = function(){
var book = vm.feat.book;
var rules = vm.rulebooks;
for(var i = 0; i< rules.length; i++){
if(rules[i].name === book){
vm.feat.gameversion = rules[i].gameversions;
vm.feat.bookid = rules[i]._id;
vm.feat.gameversionID = rules[i].gameversionID;
}
}
};
}
}());
| {
"content_hash": "86f6ee1116dac6a5d4533ef25e96546c",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 178,
"avg_line_length": 27.07017543859649,
"alnum_prop": 0.559300064808814,
"repo_name": "atadsp/d20cp",
"id": "69c7f53cb4973e144706fec387bc7961356a77b6",
"size": "3086",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "modules/feats/client/controllers/feats.client.controller.js",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1974"
},
{
"name": "HTML",
"bytes": "119420"
},
{
"name": "JavaScript",
"bytes": "954644"
},
{
"name": "Shell",
"bytes": "685"
}
],
"symlink_target": ""
} |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.datalabeling.v1beta1.model;
/**
* Response used for ExportDataset longrunning operation.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Data Labeling API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class GoogleCloudDatalabelingV1beta1ExportDataOperationResponse extends com.google.api.client.json.GenericJson {
/**
* Output only. The name of annotated dataset in format "projects/datasets/annotatedDatasets".
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String annotatedDataset;
/**
* Ouptut only. The name of dataset. "projects/datasets"
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String dataset;
/**
* Output only. Number of examples exported successfully.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer exportCount;
/**
* Output only. Statistic infos of labels in the exported dataset.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudDatalabelingV1beta1LabelStats labelStats;
/**
* Output only. output_config in the ExportData request.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private GoogleCloudDatalabelingV1beta1OutputConfig outputConfig;
/**
* Output only. Total number of examples requested to export
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer totalCount;
/**
* Output only. The name of annotated dataset in format "projects/datasets/annotatedDatasets".
* @return value or {@code null} for none
*/
public java.lang.String getAnnotatedDataset() {
return annotatedDataset;
}
/**
* Output only. The name of annotated dataset in format "projects/datasets/annotatedDatasets".
* @param annotatedDataset annotatedDataset or {@code null} for none
*/
public GoogleCloudDatalabelingV1beta1ExportDataOperationResponse setAnnotatedDataset(java.lang.String annotatedDataset) {
this.annotatedDataset = annotatedDataset;
return this;
}
/**
* Ouptut only. The name of dataset. "projects/datasets"
* @return value or {@code null} for none
*/
public java.lang.String getDataset() {
return dataset;
}
/**
* Ouptut only. The name of dataset. "projects/datasets"
* @param dataset dataset or {@code null} for none
*/
public GoogleCloudDatalabelingV1beta1ExportDataOperationResponse setDataset(java.lang.String dataset) {
this.dataset = dataset;
return this;
}
/**
* Output only. Number of examples exported successfully.
* @return value or {@code null} for none
*/
public java.lang.Integer getExportCount() {
return exportCount;
}
/**
* Output only. Number of examples exported successfully.
* @param exportCount exportCount or {@code null} for none
*/
public GoogleCloudDatalabelingV1beta1ExportDataOperationResponse setExportCount(java.lang.Integer exportCount) {
this.exportCount = exportCount;
return this;
}
/**
* Output only. Statistic infos of labels in the exported dataset.
* @return value or {@code null} for none
*/
public GoogleCloudDatalabelingV1beta1LabelStats getLabelStats() {
return labelStats;
}
/**
* Output only. Statistic infos of labels in the exported dataset.
* @param labelStats labelStats or {@code null} for none
*/
public GoogleCloudDatalabelingV1beta1ExportDataOperationResponse setLabelStats(GoogleCloudDatalabelingV1beta1LabelStats labelStats) {
this.labelStats = labelStats;
return this;
}
/**
* Output only. output_config in the ExportData request.
* @return value or {@code null} for none
*/
public GoogleCloudDatalabelingV1beta1OutputConfig getOutputConfig() {
return outputConfig;
}
/**
* Output only. output_config in the ExportData request.
* @param outputConfig outputConfig or {@code null} for none
*/
public GoogleCloudDatalabelingV1beta1ExportDataOperationResponse setOutputConfig(GoogleCloudDatalabelingV1beta1OutputConfig outputConfig) {
this.outputConfig = outputConfig;
return this;
}
/**
* Output only. Total number of examples requested to export
* @return value or {@code null} for none
*/
public java.lang.Integer getTotalCount() {
return totalCount;
}
/**
* Output only. Total number of examples requested to export
* @param totalCount totalCount or {@code null} for none
*/
public GoogleCloudDatalabelingV1beta1ExportDataOperationResponse setTotalCount(java.lang.Integer totalCount) {
this.totalCount = totalCount;
return this;
}
@Override
public GoogleCloudDatalabelingV1beta1ExportDataOperationResponse set(String fieldName, Object value) {
return (GoogleCloudDatalabelingV1beta1ExportDataOperationResponse) super.set(fieldName, value);
}
@Override
public GoogleCloudDatalabelingV1beta1ExportDataOperationResponse clone() {
return (GoogleCloudDatalabelingV1beta1ExportDataOperationResponse) super.clone();
}
}
| {
"content_hash": "0f8735b46062ab3d922c2e9034a874fd",
"timestamp": "",
"source": "github",
"line_count": 186,
"max_line_length": 182,
"avg_line_length": 33.08064516129032,
"alnum_prop": 0.7339509182512596,
"repo_name": "googleapis/google-api-java-client-services",
"id": "16f3da41d71138ad999a7998ba253e44e7c1bee0",
"size": "6153",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "clients/google-api-services-datalabeling/v1beta1/1.30.1/com/google/api/services/datalabeling/v1beta1/model/GoogleCloudDatalabelingV1beta1ExportDataOperationResponse.java",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
} |
layout: page
title: "Michelle Renee Walker"
comments: true
description: "blanks"
keywords: "Michelle Renee Walker,CU,Boulder"
---
<head>
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.3/jquery.min.js"></script>
<script src="https://dl.dropboxusercontent.com/s/pc42nxpaw1ea4o9/highcharts.js?dl=0"></script>
<!-- <script src="../assets/js/highcharts.js"></script> -->
<style type="text/css">@font-face {
font-family: "Bebas Neue";
src: url(https://www.filehosting.org/file/details/544349/BebasNeue Regular.otf) format("opentype");
}
h1.Bebas {
font-family: "Bebas Neue", Verdana, Tahoma;
}
</style>
</head>
#### TEACHING INFORMATION
**College**: College of Arts and Sciences
**Classes taught**: SOCY 2031
#### SOCY 2031: Social Problems
**Terms taught**: Spring 2010, Fall 2010, Spring 2011, Fall 2011, Spring 2012, Fall 2012, Spring 2013
**Instructor rating**: 4.54
**Standard deviation in instructor rating**: 0.28
**Average grade** (4.0 scale): 3.11
**Standard deviation in grades** (4.0 scale): 0.18
**Average workload** (raw): 2.04
**Standard deviation in workload** (raw): 0.18
| {
"content_hash": "1206e6b00d675efe07a1b641ad7dbca9",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 101,
"avg_line_length": 26.714285714285715,
"alnum_prop": 0.696969696969697,
"repo_name": "nikhilrajaram/nikhilrajaram.github.io",
"id": "f6fb1bcefdc3c05f0b441b4a4a1e499ed5ff7c89",
"size": "1126",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "instructors/Michelle_Renee_Walker.md",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "15727"
},
{
"name": "HTML",
"bytes": "48339721"
},
{
"name": "Python",
"bytes": "9692"
},
{
"name": "Ruby",
"bytes": "5940"
}
],
"symlink_target": ""
} |
/**
* Autogenerated by Thrift Compiler (0.9.1)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.airavata.model.workspace.experiment;
import java.util.Map;
import java.util.HashMap;
import org.apache.thrift.TEnum;
@SuppressWarnings("all") public enum DataType implements org.apache.thrift.TEnum {
STRING(0),
INTEGER(1),
URI(2),
STDOUT(3),
STDERR(4);
private final int value;
private DataType(int value) {
this.value = value;
}
/**
* Get the integer value of this enum value, as defined in the Thrift IDL.
*/
public int getValue() {
return value;
}
/**
* Find a the enum type by its integer value, as defined in the Thrift IDL.
* @return null if the value is not found.
*/
public static DataType findByValue(int value) {
switch (value) {
case 0:
return STRING;
case 1:
return INTEGER;
case 2:
return URI;
case 3:
return STDOUT;
case 4:
return STDERR;
default:
return null;
}
}
}
| {
"content_hash": "dabe185406e3c5f3df4621e619543daa",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 82,
"avg_line_length": 19.482142857142858,
"alnum_prop": 0.6241979835013749,
"repo_name": "glahiru/airavata",
"id": "8cd6a02ac4b2aca521483e881361de65fc08c5d3",
"size": "1893",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "airavata-api/airavata-data-models/src/main/java/org/apache/airavata/model/workspace/experiment/DataType.java",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "67381"
},
{
"name": "C++",
"bytes": "2828557"
},
{
"name": "Java",
"bytes": "15148513"
},
{
"name": "PHP",
"bytes": "1418267"
},
{
"name": "Shell",
"bytes": "54877"
},
{
"name": "XSLT",
"bytes": "45917"
}
],
"symlink_target": ""
} |
<?xml version="1.0" encoding="utf-8"?>
<shape xmlns:android="http://schemas.android.com/apk/res/android" >
<solid android:color="@color/pj_white" />
<stroke
android:width="1dp"
android:color="@android:color/black" />
</shape> | {
"content_hash": "55c4387e2d2a608b9cd3109a602db58a",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 67,
"avg_line_length": 25.2,
"alnum_prop": 0.6309523809523809,
"repo_name": "polljoy/polljoy-android",
"id": "48f8d18c8bc01a26fad96580ec8458d779171f20",
"size": "252",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PolljoySDK/res/drawable/response_edit_text_background.xml",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Java",
"bytes": "180317"
}
],
"symlink_target": ""
} |
namespace blink {
// We inject a fake task runner in multiple tests, to avoid actually posting
// tasks cross-thread whilst still being able to know if they have been posted.
class FakeTaskRunner : public base::SingleThreadTaskRunner {
public:
FakeTaskRunner() : task_posted_(false) {}
bool PostNonNestableDelayedTask(const base::Location& from_here,
base::OnceClosure task,
base::TimeDelta delay) override {
task_posted_ = true;
return true;
}
bool PostDelayedTask(const base::Location& from_here,
base::OnceClosure task,
base::TimeDelta delay) override {
task_posted_ = true;
return true;
}
bool RunsTasksInCurrentSequence() const override { return true; }
bool task_posted_;
protected:
~FakeTaskRunner() override {}
};
class PaintWorkletProxyClientTest : public RenderingTest {
public:
PaintWorkletProxyClientTest() = default;
void SetUp() override {
RenderingTest::SetUp();
paint_worklet_ =
MakeGarbageCollected<PaintWorklet>(*GetFrame().DomWindow());
dispatcher_ = std::make_unique<PaintWorkletPaintDispatcher>();
fake_compositor_thread_runner_ = base::MakeRefCounted<FakeTaskRunner>();
proxy_client_ = MakeGarbageCollected<PaintWorkletProxyClient>(
1, paint_worklet_, dispatcher_->GetWeakPtr(),
fake_compositor_thread_runner_);
reporting_proxy_ = std::make_unique<WorkerReportingProxy>();
}
void AddGlobalScopeOnWorkletThread(WorkerThread* worker_thread,
PaintWorkletProxyClient* proxy_client,
base::WaitableEvent* waitable_event) {
// The natural flow for PaintWorkletGlobalScope is to be registered with the
// proxy client during its first registerPaint call. Rather than circumvent
// this with a specialised AddGlobalScopeForTesting method, we just use the
// standard flow.
ClassicScript::CreateUnspecifiedScript(
"registerPaint('add_global_scope', class { paint() { } });")
->RunScriptOnWorkerOrWorklet(*worker_thread->GlobalScope());
waitable_event->Signal();
}
using TestCallback = void (*)(WorkerThread*,
PaintWorkletProxyClient*,
base::WaitableEvent*);
void RunMultipleGlobalScopeTestsOnWorklet(TestCallback callback) {
// PaintWorklet is stateless, and this is enforced via having multiple
// global scopes (which are switched between). To mimic the real world,
// create multiple WorkerThread for this. Note that the underlying thread
// may be shared even though they are unique WorkerThread instances!
Vector<std::unique_ptr<WorkerThread>> worklet_threads;
for (wtf_size_t i = 0; i < PaintWorklet::kNumGlobalScopesPerThread; i++) {
worklet_threads.push_back(CreateThreadAndProvidePaintWorkletProxyClient(
&GetDocument(), reporting_proxy_.get(), proxy_client_));
}
// Add the global scopes. This must happen on the worklet thread.
for (wtf_size_t i = 0; i < PaintWorklet::kNumGlobalScopesPerThread; i++) {
base::WaitableEvent waitable_event;
PostCrossThreadTask(
*worklet_threads[i]->GetTaskRunner(TaskType::kInternalTest),
FROM_HERE,
CrossThreadBindOnce(
&PaintWorkletProxyClientTest::AddGlobalScopeOnWorkletThread,
CrossThreadUnretained(this),
CrossThreadUnretained(worklet_threads[i].get()),
CrossThreadPersistent<PaintWorkletProxyClient>(proxy_client_),
CrossThreadUnretained(&waitable_event)));
waitable_event.Wait();
}
// Now let the test actually run. We only run the test on the first worklet
// thread currently; this suffices since they share the proxy.
base::WaitableEvent waitable_event;
PostCrossThreadTask(
*worklet_threads[0]->GetTaskRunner(TaskType::kInternalTest), FROM_HERE,
CrossThreadBindOnce(
callback, CrossThreadUnretained(worklet_threads[0].get()),
CrossThreadPersistent<PaintWorkletProxyClient>(proxy_client_),
CrossThreadUnretained(&waitable_event)));
waitable_event.Wait();
// And finally clean up.
for (wtf_size_t i = 0; i < PaintWorklet::kNumGlobalScopesPerThread; i++) {
worklet_threads[i]->Terminate();
worklet_threads[i]->WaitForShutdownForTesting();
}
}
std::unique_ptr<PaintWorkletPaintDispatcher> dispatcher_;
Persistent<PaintWorklet> paint_worklet_;
scoped_refptr<FakeTaskRunner> fake_compositor_thread_runner_;
Persistent<PaintWorkletProxyClient> proxy_client_;
std::unique_ptr<WorkerReportingProxy> reporting_proxy_;
};
TEST_F(PaintWorkletProxyClientTest, PaintWorkletProxyClientConstruction) {
PaintWorkletProxyClient* proxy_client =
MakeGarbageCollected<PaintWorkletProxyClient>(1, nullptr, nullptr,
nullptr);
EXPECT_EQ(proxy_client->worklet_id_, 1);
EXPECT_EQ(proxy_client->paint_dispatcher_, nullptr);
auto dispatcher = std::make_unique<PaintWorkletPaintDispatcher>();
proxy_client = MakeGarbageCollected<PaintWorkletProxyClient>(
1, nullptr, dispatcher->GetWeakPtr(), nullptr);
EXPECT_EQ(proxy_client->worklet_id_, 1);
EXPECT_NE(proxy_client->paint_dispatcher_, nullptr);
}
void RunAddGlobalScopesTestOnWorklet(
WorkerThread* thread,
PaintWorkletProxyClient* proxy_client,
scoped_refptr<FakeTaskRunner> compositor_task_runner,
base::WaitableEvent* waitable_event) {
// For this test, we cheat and reuse the same global scope object from a
// single WorkerThread. In real code these would be different global scopes.
// First, add all but one of the global scopes. The proxy client should not
// yet register itself.
for (size_t i = 0; i < PaintWorklet::kNumGlobalScopesPerThread - 1; i++) {
proxy_client->AddGlobalScope(To<WorkletGlobalScope>(thread->GlobalScope()));
}
EXPECT_EQ(proxy_client->GetGlobalScopesForTesting().size(),
PaintWorklet::kNumGlobalScopesPerThread - 1);
EXPECT_FALSE(compositor_task_runner->task_posted_);
// Now add the final global scope. This should trigger the registration.
proxy_client->AddGlobalScope(To<WorkletGlobalScope>(thread->GlobalScope()));
EXPECT_EQ(proxy_client->GetGlobalScopesForTesting().size(),
PaintWorklet::kNumGlobalScopesPerThread);
EXPECT_TRUE(compositor_task_runner->task_posted_);
waitable_event->Signal();
}
TEST_F(PaintWorkletProxyClientTest, AddGlobalScopes) {
ScopedOffMainThreadCSSPaintForTest off_main_thread_css_paint(true);
// Global scopes must be created on worker threads.
std::unique_ptr<WorkerThread> worklet_thread =
CreateThreadAndProvidePaintWorkletProxyClient(
&GetDocument(), reporting_proxy_.get(), proxy_client_);
EXPECT_TRUE(proxy_client_->GetGlobalScopesForTesting().IsEmpty());
base::WaitableEvent waitable_event;
PostCrossThreadTask(
*worklet_thread->GetTaskRunner(TaskType::kInternalTest), FROM_HERE,
CrossThreadBindOnce(
&RunAddGlobalScopesTestOnWorklet,
CrossThreadUnretained(worklet_thread.get()),
CrossThreadPersistent<PaintWorkletProxyClient>(proxy_client_),
fake_compositor_thread_runner_,
CrossThreadUnretained(&waitable_event)));
waitable_event.Wait();
worklet_thread->Terminate();
worklet_thread->WaitForShutdownForTesting();
}
void RunPaintTestOnWorklet(WorkerThread* thread,
PaintWorkletProxyClient* proxy_client,
base::WaitableEvent* waitable_event) {
// Assert that all global scopes have been registered. Note that we don't
// use ASSERT_EQ here as that would crash the worklet thread and the test
// would timeout rather than fail.
EXPECT_EQ(proxy_client->GetGlobalScopesForTesting().size(),
PaintWorklet::kNumGlobalScopesPerThread);
// Register the painter on all global scopes.
for (const auto& global_scope : proxy_client->GetGlobalScopesForTesting()) {
ClassicScript::CreateUnspecifiedScript(
"registerPaint('foo', class { paint() { } });")
->RunScriptOnWorkerOrWorklet(*global_scope);
}
PaintWorkletStylePropertyMap::CrossThreadData data;
Vector<std::unique_ptr<CrossThreadStyleValue>> input_arguments;
std::vector<cc::PaintWorkletInput::PropertyKey> property_keys;
scoped_refptr<CSSPaintWorkletInput> input =
base::MakeRefCounted<CSSPaintWorkletInput>(
"foo", gfx::SizeF(100, 100), 1.0f, 1.0f, 1, std::move(data),
std::move(input_arguments), std::move(property_keys));
sk_sp<PaintRecord> record = proxy_client->Paint(input.get(), {});
EXPECT_NE(record, nullptr);
waitable_event->Signal();
}
TEST_F(PaintWorkletProxyClientTest, Paint) {
ScopedOffMainThreadCSSPaintForTest off_main_thread_css_paint(true);
RunMultipleGlobalScopeTestsOnWorklet(&RunPaintTestOnWorklet);
}
void RunDefinitionsMustBeCompatibleTestOnWorklet(
WorkerThread* thread,
PaintWorkletProxyClient* proxy_client,
base::WaitableEvent* waitable_event) {
// Assert that all global scopes have been registered. Note that we don't
// use ASSERT_EQ here as that would crash the worklet thread and the test
// would timeout rather than fail.
EXPECT_EQ(proxy_client->GetGlobalScopesForTesting().size(),
PaintWorklet::kNumGlobalScopesPerThread);
// This test doesn't make sense if there's only one global scope!
EXPECT_GT(PaintWorklet::kNumGlobalScopesPerThread, 1u);
const Vector<CrossThreadPersistent<PaintWorkletGlobalScope>>& global_scopes =
proxy_client->GetGlobalScopesForTesting();
// Things that can be different: alpha different, native properties
// different, custom properties different, input type args different.
const HashMap<String, std::unique_ptr<DocumentPaintDefinition>>&
document_definition_map = proxy_client->DocumentDefinitionMapForTesting();
// Differing native properties.
ClassicScript::CreateUnspecifiedScript(R"JS(registerPaint('test1', class {
static get inputProperties() { return ['border-image', 'color']; }
paint() { }
});)JS")
->RunScriptOnWorkerOrWorklet(*global_scopes[0]);
EXPECT_NE(document_definition_map.at("test1"), nullptr);
ClassicScript::CreateUnspecifiedScript(R"JS(registerPaint('test1', class {
static get inputProperties() { return ['left']; }
paint() { }
});)JS")
->RunScriptOnWorkerOrWorklet(*global_scopes[1]);
EXPECT_EQ(document_definition_map.at("test1"), nullptr);
// Differing custom properties.
ClassicScript::CreateUnspecifiedScript(R"JS(registerPaint('test2', class {
static get inputProperties() { return ['--foo', '--bar']; }
paint() { }
});)JS")
->RunScriptOnWorkerOrWorklet(*global_scopes[0]);
EXPECT_NE(document_definition_map.at("test2"), nullptr);
ClassicScript::CreateUnspecifiedScript(R"JS(registerPaint('test2', class {
static get inputProperties() { return ['--zoinks']; }
paint() { }
});)JS")
->RunScriptOnWorkerOrWorklet(*global_scopes[1]);
EXPECT_EQ(document_definition_map.at("test2"), nullptr);
// Differing alpha values. The default is 'true'.
ClassicScript::CreateUnspecifiedScript(
"registerPaint('test3', class { paint() { } });")
->RunScriptOnWorkerOrWorklet(*global_scopes[0]);
EXPECT_NE(document_definition_map.at("test3"), nullptr);
ClassicScript::CreateUnspecifiedScript(R"JS(registerPaint('test3', class {
static get contextOptions() { return {alpha: false}; }
paint() { }
});)JS")
->RunScriptOnWorkerOrWorklet(*global_scopes[1]);
EXPECT_EQ(document_definition_map.at("test3"), nullptr);
waitable_event->Signal();
}
TEST_F(PaintWorkletProxyClientTest, DefinitionsMustBeCompatible) {
ScopedOffMainThreadCSSPaintForTest off_main_thread_css_paint(true);
RunMultipleGlobalScopeTestsOnWorklet(
&RunDefinitionsMustBeCompatibleTestOnWorklet);
}
namespace {
// Calling registerPaint can cause the PaintWorkletProxyClient to post back from
// the worklet thread to the main thread. This is safe in the general case,
// since the task will just queue up to run after the test has finished, but
// the following tests want to know whether or not the task has posted; this
// class provides that information.
class ScopedFakeMainThreadTaskRunner {
public:
ScopedFakeMainThreadTaskRunner(PaintWorkletProxyClient* proxy_client)
: proxy_client_(proxy_client), fake_task_runner_(new FakeTaskRunner) {
original_task_runner_ = proxy_client->MainThreadTaskRunnerForTesting();
proxy_client_->SetMainThreadTaskRunnerForTesting(fake_task_runner_);
}
~ScopedFakeMainThreadTaskRunner() {
proxy_client_->SetMainThreadTaskRunnerForTesting(original_task_runner_);
}
void ResetTaskHasBeenPosted() { fake_task_runner_->task_posted_ = false; }
bool TaskHasBeenPosted() const { return fake_task_runner_->task_posted_; }
private:
// The PaintWorkletProxyClient is held on the main test thread, but we are
// constructed on the worklet thread so we have to hold the client reference
// in a CrossThreadPersistent.
CrossThreadPersistent<PaintWorkletProxyClient> proxy_client_;
scoped_refptr<FakeTaskRunner> fake_task_runner_;
scoped_refptr<base::SingleThreadTaskRunner> original_task_runner_;
};
} // namespace
void RunAllDefinitionsMustBeRegisteredBeforePostingTestOnWorklet(
WorkerThread* thread,
PaintWorkletProxyClient* proxy_client,
base::WaitableEvent* waitable_event) {
ScopedFakeMainThreadTaskRunner fake_runner(proxy_client);
// Assert that all global scopes have been registered. Note that we don't
// use ASSERT_EQ here as that would crash the worklet thread and the test
// would timeout rather than fail.
EXPECT_EQ(proxy_client->GetGlobalScopesForTesting().size(),
PaintWorklet::kNumGlobalScopesPerThread);
// Register a new paint function on all but one global scope. They should not
// end up posting a task to the PaintWorklet.
const Vector<CrossThreadPersistent<PaintWorkletGlobalScope>>& global_scopes =
proxy_client->GetGlobalScopesForTesting();
for (wtf_size_t i = 0; i < global_scopes.size() - 1; i++) {
ClassicScript::CreateUnspecifiedScript(
"registerPaint('foo', class { paint() { } });")
->RunScriptOnWorkerOrWorklet(*global_scopes[i]);
EXPECT_FALSE(fake_runner.TaskHasBeenPosted());
}
// Now register the final one; the task should then be posted.
ClassicScript::CreateUnspecifiedScript(
"registerPaint('foo', class { paint() { } });")
->RunScriptOnWorkerOrWorklet(*global_scopes.back());
EXPECT_TRUE(fake_runner.TaskHasBeenPosted());
waitable_event->Signal();
}
TEST_F(PaintWorkletProxyClientTest,
AllDefinitionsMustBeRegisteredBeforePosting) {
ScopedOffMainThreadCSSPaintForTest off_main_thread_css_paint(true);
RunMultipleGlobalScopeTestsOnWorklet(
&RunAllDefinitionsMustBeRegisteredBeforePostingTestOnWorklet);
}
} // namespace blink
| {
"content_hash": "a06a44dd5884f92f088d3df3d10010b0",
"timestamp": "",
"source": "github",
"line_count": 353,
"max_line_length": 80,
"avg_line_length": 42.90934844192635,
"alnum_prop": 0.7116920842411039,
"repo_name": "scheib/chromium",
"id": "4cacbdda3ce9121496d65609087aeecd283b5e5a",
"size": "16284",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "third_party/blink/renderer/modules/csspaint/paint_worklet_proxy_client_test.cc",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
namespace Esfa.Vacancy.Application.Commands.CreateApprenticeship
{
public interface IWageTextFormatter
{
string GetWageText(CreateApprenticeshipRequest request);
}
} | {
"content_hash": "9e044995e490f96aecafa0448700319f",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 65,
"avg_line_length": 26.571428571428573,
"alnum_prop": 0.7688172043010753,
"repo_name": "SkillsFundingAgency/vacancy-register-api",
"id": "f0e7c40c981de42f8b26d18658d4dcc9d06343c7",
"size": "188",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Esfa.Vacancy.Application/Commands/CreateApprenticeship/IWageTextFormatter.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ASP",
"bytes": "225"
},
{
"name": "C#",
"bytes": "876864"
},
{
"name": "CSS",
"bytes": "426075"
},
{
"name": "HTML",
"bytes": "43696"
},
{
"name": "JavaScript",
"bytes": "14582"
},
{
"name": "PowerShell",
"bytes": "11001"
}
],
"symlink_target": ""
} |
//-----------------------------------------------------------------------------
// Copyright (c) 2012 GarageGames, LLC
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//-----------------------------------------------------------------------------
#include "platform/platform.h"
#include "postFx/postEffect.h"
#include "console/engineAPI.h"
#include "core/stream/fileStream.h"
#include "core/strings/stringUnit.h"
#include "console/consoleTypes.h"
#include "console/engineAPI.h"
#include "math/util/frustum.h"
#include "math/mathUtils.h"
#include "gfx/gfxTransformSaver.h"
#include "gfx/gfxStringEnumTranslate.h"
#include "gfx/gfxTextureManager.h"
#include "gfx/gfxDebugEvent.h"
#include "gfx/util/screenspace.h"
#include "gfx/sim/gfxStateBlockData.h"
#include "scene/sceneRenderState.h"
#include "shaderGen/shaderGenVars.h"
#include "lighting/lightInfo.h"
#include "lighting/lightManager.h"
#include "materials/materialManager.h"
#include "materials/shaderData.h"
#include "postFx/postEffectManager.h"
#include "postFx/postEffectVis.h"
using namespace Torque;
ConsoleDocClass( PostEffect,
"@brief A fullscreen shader effect.\n\n"
"@section PFXTextureIdentifiers\n\n"
"@ingroup Rendering\n"
);
IMPLEMENT_CALLBACK( PostEffect, onAdd, void, (), (),
"Called when this object is first created and registered."
);
IMPLEMENT_CALLBACK( PostEffect, preProcess, void, (), (),
"Called when an effect is processed but before textures are bound. This "
"allows the user to change texture related paramaters or macros at runtime.\n"
"@tsexample\n"
"function SSAOPostFx::preProcess( %this )\n"
"{\n"
" if ( $SSAOPostFx::quality !$= %this.quality )\n"
" {\n"
" %this.quality = mClamp( mRound( $SSAOPostFx::quality ), 0, 2 );\n"
" \n"
" %this.setShaderMacro( \"QUALITY\", %this.quality );\n"
" }\n"
" %this.targetScale = $SSAOPostFx::targetScale;\n"
"}\n"
"@endtsexample\n"
"@see setShaderConst\n"
"@see setShaderMacro"
);
IMPLEMENT_CALLBACK( PostEffect, setShaderConsts, void, (), (),
"Called immediate before processing this effect. This is the user's chance "
"to set the value of shader uniforms (constants).\n"
"@see setShaderConst"
);
IMPLEMENT_CALLBACK( PostEffect, onEnabled, bool, (), (),
"Called when this effect becomes enabled. If the user returns false from "
"this callback the effect will not be enabled.\n"
"@return True to allow this effect to be enabled."
);
IMPLEMENT_CALLBACK( PostEffect, onDisabled, void, (), (),
"Called when this effect becomes disabled."
);
ImplementEnumType( PFXRenderTime,
"When to process this effect during the frame.\n"
"@ingroup Rendering\n\n")
{ PFXBeforeBin, "PFXBeforeBin", "Before a RenderInstManager bin.\n" },
{ PFXAfterBin, "PFXAfterBin", "After a RenderInstManager bin.\n" },
{ PFXAfterDiffuse, "PFXAfterDiffuse", "After the diffuse rendering pass.\n" },
{ PFXEndOfFrame, "PFXEndOfFrame", "When the end of the frame is reached.\n" },
{ PFXTexGenOnDemand, "PFXTexGenOnDemand", "This PostEffect is not processed by the manager. It will generate its texture when it is requested.\n" }
EndImplementEnumType;
ImplementEnumType( PFXTargetClear,
"Describes when the target texture should be cleared\n"
"@ingroup Rendering\n\n")
{ PFXTargetClear_None, "PFXTargetClear_None", "Never clear the PostEffect target.\n" },
{ PFXTargetClear_OnCreate, "PFXTargetClear_OnCreate", "Clear once on create.\n" },
{ PFXTargetClear_OnDraw, "PFXTargetClear_OnDraw", "Clear before every draw.\n" },
EndImplementEnumType;
ImplementEnumType( PFXTargetViewport,
"Specifies how the viewport should be set up for a PostEffect's target.\n"
"@note Applies to both the diffuse target and the depth target (if defined).\n"
"@ingroup Rendering\n\n")
{ PFXTargetViewport_TargetSize, "PFXTargetViewport_TargetSize", "Set viewport to match target size (default).\n" },
{ PFXTargetViewport_GFXViewport, "PFXTargetViewport_GFXViewport", "Use the current GFX viewport (scaled to match target size).\n" },
{ PFXTargetViewport_NamedInTexture0, "PFXTargetViewport_NamedInTexture0", "Use the input texture 0 if it is named (scaled to match target size), otherwise revert to PFXTargetViewport_TargetSize if there is none.\n" },
EndImplementEnumType;
GFXImplementVertexFormat( PFXVertex )
{
addElement( "POSITION", GFXDeclType_Float3 );
addElement( "TEXCOORD", GFXDeclType_Float2, 0 );
addElement( "TEXCOORD", GFXDeclType_Float3, 1 );
};
GFX_ImplementTextureProfile( PostFxTargetProfile,
GFXTextureProfile::DiffuseMap,
GFXTextureProfile::PreserveSize |
GFXTextureProfile::RenderTarget |
GFXTextureProfile::Pooled,
GFXTextureProfile::NONE );
IMPLEMENT_CONOBJECT(PostEffect);
GFX_ImplementTextureProfile( PostFxTextureProfile,
GFXTextureProfile::DiffuseMap,
GFXTextureProfile::Static | GFXTextureProfile::PreserveSize | GFXTextureProfile::NoMipmap,
GFXTextureProfile::NONE );
GFX_ImplementTextureProfile( PostFxTextureSRGBProfile,
GFXTextureProfile::DiffuseMap,
GFXTextureProfile::Static | GFXTextureProfile::PreserveSize | GFXTextureProfile::NoMipmap | GFXTextureProfile::SRGB,
GFXTextureProfile::NONE);
GFX_ImplementTextureProfile( VRTextureProfile,
GFXTextureProfile::DiffuseMap,
GFXTextureProfile::PreserveSize |
GFXTextureProfile::RenderTarget |
GFXTextureProfile::NoMipmap,
GFXTextureProfile::NONE );
GFX_ImplementTextureProfile( VRDepthProfile,
GFXTextureProfile::DiffuseMap,
GFXTextureProfile::PreserveSize |
GFXTextureProfile::NoMipmap |
GFXTextureProfile::ZTarget,
GFXTextureProfile::NONE );
void PostEffect::EffectConst::set( const String &newVal )
{
if ( mStringVal == newVal )
return;
mStringVal = newVal;
mDirty = true;
}
void PostEffect::EffectConst::setToBuffer( GFXShaderConstBufferRef buff )
{
// Nothing to do if the value hasn't changed.
if ( !mDirty )
return;
mDirty = false;
// If we don't have a handle... get it now.
if ( !mHandle )
mHandle = buff->getShader()->getShaderConstHandle( mName );
// If the handle isn't valid then we're done.
if ( !mHandle->isValid() )
return;
const GFXShaderConstType type = mHandle->getType();
// For now, we're only going
// to support float4 arrays.
// Expand to other types as necessary.
U32 arraySize = mHandle->getArraySize();
const char *strVal = mStringVal.c_str();
if ( type == GFXSCT_Int )
{
S32 val;
Con::setData( TypeS32, &val, 0, 1, &strVal );
buff->set( mHandle, val );
}
else if ( type == GFXSCT_Float )
{
F32 val;
Con::setData( TypeF32, &val, 0, 1, &strVal );
buff->set( mHandle, val );
}
else if ( type == GFXSCT_Float2 )
{
Point2F val;
Con::setData( TypePoint2F, &val, 0, 1, &strVal );
buff->set( mHandle, val );
}
else if ( type == GFXSCT_Float3 )
{
Point3F val;
Con::setData( TypePoint3F, &val, 0, 1, &strVal );
buff->set( mHandle, val );
}
else if ( type == GFXSCT_Float4 )
{
Point4F val;
if ( arraySize > 1 )
{
// Do array setup!
//U32 unitCount = StringUnit::getUnitCount( strVal, "\t" );
//AssertFatal( unitCount == arraySize, "" );
String tmpString;
Vector<Point4F> valArray;
for ( U32 i = 0; i < arraySize; i++ )
{
tmpString = StringUnit::getUnit( strVal, i, "\t" );
valArray.increment();
const char *tmpCStr = tmpString.c_str();
Con::setData( TypePoint4F, &valArray.last(), 0, 1, &tmpCStr );
}
AlignedArray<Point4F> rectData( valArray.size(), sizeof( Point4F ), (U8*)valArray.address(), false );
buff->set( mHandle, rectData );
}
else
{
// Do regular setup.
Con::setData( TypePoint4F, &val, 0, 1, &strVal );
buff->set( mHandle, val );
}
}
else
{
#if TORQUE_DEBUG
const char* err = avar("PostEffect::EffectConst::setToBuffer $s type is not implemented", mName.c_str());
Con::errorf(err);
GFXAssertFatal(0,err);
#endif
}
}
//-------------------------------------------------------------------------
// PostEffect
//-------------------------------------------------------------------------
PostEffect::PostEffect()
: mRenderTime( PFXAfterDiffuse ),
mRenderPriority( 1.0 ),
mEnabled( false ),
mStateBlockData( NULL ),
mUpdateShader( true ),
mSkip( false ),
mAllowReflectPass( false ),
mTargetClear( PFXTargetClear_None ),
mTargetScale( Point2F::One ),
mTargetViewport( PFXTargetViewport_TargetSize ),
mTargetSize( Point2I::Zero ),
mTargetFormat( GFXFormatR8G8B8A8 ),
mTargetClearColor( LinearColorF::BLACK ),
mOneFrameOnly( false ),
mOnThisFrame( true ),
mRTSizeSC( NULL ),
mIsValid( false ),
mShaderReloadKey( 0 ),
mOneOverRTSizeSC( NULL ),
mViewportOffsetSC( NULL ),
mTargetViewportSC( NULL ),
mFogDataSC( NULL ),
mFogColorSC( NULL ),
mEyePosSC( NULL ),
mMatWorldToScreenSC( NULL ),
mMatScreenToWorldSC( NULL ),
mMatPrevScreenToWorldSC( NULL ),
mNearFarSC( NULL ),
mInvNearFarSC( NULL ),
mWorldToScreenScaleSC( NULL ),
mProjectionOffsetSC( NULL ),
mWaterColorSC( NULL ),
mWaterFogDataSC( NULL ),
mAmbientColorSC( NULL ),
mWaterFogPlaneSC( NULL ),
mWaterDepthGradMaxSC( NULL ),
mScreenSunPosSC( NULL ),
mLightDirectionSC( NULL ),
mCameraForwardSC( NULL ),
mAccumTimeSC( NULL ),
mDeltaTimeSC( NULL ),
mInvCameraMatSC( NULL ),
mMatCameraToWorldSC( NULL)
{
dMemset( mTexSRGB, 0, sizeof(bool) * NumTextures);
dMemset( mActiveTextures, 0, sizeof( GFXTextureObject* ) * NumTextures );
dMemset( mActiveNamedTarget, 0, sizeof( NamedTexTarget* ) * NumTextures );
dMemset( mActiveTextureViewport, 0, sizeof( RectI ) * NumTextures );
dMemset( mTexSizeSC, 0, sizeof( GFXShaderConstHandle* ) * NumTextures );
dMemset( mRenderTargetParamsSC, 0, sizeof( GFXShaderConstHandle* ) * NumTextures );
}
PostEffect::~PostEffect()
{
EffectConstTable::Iterator iter = mEffectConsts.begin();
for ( ; iter != mEffectConsts.end(); iter++ )
delete iter->value;
}
void PostEffect::initPersistFields()
{
addField( "shader", TypeRealString, Offset( mShaderName, PostEffect ),
"Name of a GFXShaderData for this effect." );
addField( "stateBlock", TYPEID<GFXStateBlockData>(), Offset( mStateBlockData, PostEffect ),
"Name of a GFXStateBlockData for this effect." );
addField( "target", TypeRealString, Offset( mTargetName, PostEffect ),
"String identifier of this effect's target texture.\n"
"@see PFXTextureIdentifiers" );
addField( "targetDepthStencil", TypeRealString, Offset( mTargetDepthStencilName, PostEffect ),
"Optional string identifier for this effect's target depth/stencil texture.\n"
"@see PFXTextureIdentifiers" );
addField( "targetScale", TypePoint2F, Offset( mTargetScale, PostEffect ),
"If targetSize is zero this is used to set a relative size from the current target." );
addField( "targetSize", TypePoint2I, Offset( mTargetSize, PostEffect ),
"If non-zero this is used as the absolute target size." );
addField( "targetFormat", TypeGFXFormat, Offset( mTargetFormat, PostEffect ),
"Format of the target texture, not applicable if writing to the backbuffer." );
addField( "targetClearColor", TypeColorF, Offset( mTargetClearColor, PostEffect ),
"Color to which the target texture is cleared before rendering." );
addField( "targetClear", TYPEID< PFXTargetClear >(), Offset( mTargetClear, PostEffect ),
"Describes when the target texture should be cleared." );
addField( "targetViewport", TYPEID< PFXTargetViewport >(), Offset( mTargetViewport, PostEffect ),
"Specifies how the viewport should be set up for a target texture." );
addField( "texture", TypeImageFilename, Offset( mTexFilename, PostEffect ), NumTextures,
"Input textures to this effect ( samplers ).\n"
"@see PFXTextureIdentifiers" );
addField("textureSRGB", TypeBool, Offset(mTexSRGB, PostEffect), NumTextures,
"Set input texture to be sRGB");
addField( "renderTime", TYPEID< PFXRenderTime >(), Offset( mRenderTime, PostEffect ),
"When to process this effect during the frame." );
addField( "renderBin", TypeRealString, Offset( mRenderBin, PostEffect ),
"Name of a renderBin, used if renderTime is PFXBeforeBin or PFXAfterBin." );
addField( "renderPriority", TypeF32, Offset( mRenderPriority, PostEffect ),
"PostEffects are processed in DESCENDING order of renderPriority if more than one has the same renderBin/Time." );
addField( "allowReflectPass", TypeBool, Offset( mAllowReflectPass, PostEffect ),
"Is this effect processed during reflection render passes." );
addProtectedField( "isEnabled", TypeBool, Offset( mEnabled, PostEffect),
&PostEffect::_setIsEnabled, &defaultProtectedGetFn,
"Is the effect on." );
addField( "onThisFrame", TypeBool, Offset( mOnThisFrame, PostEffect ),
"Allows you to turn on a PostEffect for only a single frame." );
addField( "oneFrameOnly", TypeBool, Offset( mOneFrameOnly, PostEffect ),
"Allows you to turn on a PostEffect for only a single frame." );
addField( "skip", TypeBool, Offset( mSkip, PostEffect ),
"Skip processing of this PostEffect and its children even if its parent "
"is enabled. Parent and sibling PostEffects in the chain are still processed." );
Parent::initPersistFields();
}
bool PostEffect::onAdd()
{
if( !Parent::onAdd() )
return false;
LightManager::smActivateSignal.notify( this, &PostEffect::_onLMActivate );
mUpdateShader = true;
// Grab the script path.
Torque::Path scriptPath( Con::getVariable( "$Con::File" ) );
scriptPath.setFileName( String::EmptyString );
scriptPath.setExtension( String::EmptyString );
// Find additional textures
for( S32 i = 0; i < NumTextures; i++ )
{
String texFilename = mTexFilename[i];
// Skip empty stages or ones with variable or target names.
if ( texFilename.isEmpty() ||
texFilename[0] == '$' ||
texFilename[0] == '#' )
continue;
GFXTextureProfile *profile = &PostFxTextureProfile;
if (mTexSRGB[i])
profile = &PostFxTextureSRGBProfile;
// Try to load the texture.
bool success = mTextures[i].set( texFilename, &PostFxTextureProfile, avar( "%s() - (line %d)", __FUNCTION__, __LINE__ ) );
if (!success)
Con::errorf("Invalid Texture for PostEffect (%s), The Texture '%s' does not exist!", this->getName(), texFilename.c_str());
}
// Is the target a named target?
if ( mTargetName.isNotEmpty() && mTargetName[0] == '#' )
{
mNamedTarget.registerWithName( mTargetName.substr( 1 ) );
mNamedTarget.getTextureDelegate().bind( this, &PostEffect::_getTargetTexture );
}
if ( mTargetDepthStencilName.isNotEmpty() && mTargetDepthStencilName[0] == '#' )
mNamedTargetDepthStencil.registerWithName( mTargetDepthStencilName.substr( 1 ) );
if (mNamedTarget.isRegistered() || mNamedTargetDepthStencil.isRegistered())
GFXTextureManager::addEventDelegate( this, &PostEffect::_onTextureEvent );
// Call onAdd in script
onAdd_callback();
// Should we start enabled?
if ( mEnabled )
{
mEnabled = false;
enable();
}
getSet()->addObject( this );
return true;
}
void PostEffect::onRemove()
{
Parent::onRemove();
PFXMGR->_removeEffect( this );
LightManager::smActivateSignal.remove( this, &PostEffect::_onLMActivate );
mShader = NULL;
_cleanTargets();
if ( mNamedTarget.isRegistered() || mNamedTargetDepthStencil.isRegistered() )
GFXTextureManager::removeEventDelegate( this, &PostEffect::_onTextureEvent );
if ( mNamedTarget.isRegistered() )
{
mNamedTarget.unregister();
mNamedTarget.getTextureDelegate().clear();
}
if ( mNamedTargetDepthStencil.isRegistered() )
mNamedTargetDepthStencil.unregister();
}
void PostEffect::_updateScreenGeometry( const Frustum &frustum,
GFXVertexBufferHandle<PFXVertex> *outVB )
{
outVB->set( GFX, 4, GFXBufferTypeVolatile );
const Point3F *frustumPoints = frustum.getPoints();
const Point3F& cameraPos = frustum.getPosition();
// Perform a camera offset. We need to manually perform this offset on the postFx's
// polygon, which is at the far plane.
const Point2F& projOffset = frustum.getProjectionOffset();
Point3F cameraOffsetPos = cameraPos;
if(!projOffset.isZero())
{
// First we need to calculate the offset at the near plane. The projOffset
// given above can be thought of a percent as it ranges from 0..1 (or 0..-1).
F32 nearOffset = frustum.getNearRight() * projOffset.x;
// Now given the near plane distance from the camera we can solve the right
// triangle and calcuate the SIN theta for the offset at the near plane.
// SIN theta = x/y
F32 sinTheta = nearOffset / frustum.getNearDist();
// Finally, we can calcuate the offset at the far plane, which is where our sun (or vector)
// light's polygon is drawn.
F32 farOffset = frustum.getFarDist() * sinTheta;
// We can now apply this far plane offset to the far plane itself, which then compensates
// for the project offset.
MatrixF camTrans = frustum.getTransform();
VectorF offset = camTrans.getRightVector();
offset *= farOffset;
cameraOffsetPos += offset;
}
PFXVertex *vert = outVB->lock();
vert->point.set(-1.0, 1.0, 0.0);
vert->texCoord.set(0.0f, 0.0f);
vert->wsEyeRay = frustumPoints[Frustum::FarTopLeft] - cameraOffsetPos;
vert++;
vert->point.set(1.0, 1.0, 0.0);
vert->texCoord.set(1.0f, 0.0f);
vert->wsEyeRay = frustumPoints[Frustum::FarTopRight] - cameraOffsetPos;
vert++;
vert->point.set(-1.0, -1.0, 0.0);
vert->texCoord.set(0.0f, 1.0f);
vert->wsEyeRay = frustumPoints[Frustum::FarBottomLeft] - cameraOffsetPos;
vert++;
vert->point.set(1.0, -1.0, 0.0);
vert->texCoord.set(1.0f, 1.0f);
vert->wsEyeRay = frustumPoints[Frustum::FarBottomRight] - cameraOffsetPos;
vert++;
outVB->unlock();
}
void PostEffect::_setupStateBlock( const SceneRenderState *state )
{
if ( mStateBlock.isNull() )
{
GFXStateBlockDesc desc;
if ( mStateBlockData )
desc = mStateBlockData->getState();
mStateBlock = GFX->createStateBlock( desc );
}
GFX->setStateBlock( mStateBlock );
}
void PostEffect::_setupConstants( const SceneRenderState *state )
{
// Alloc the const buffer.
if ( mShaderConsts.isNull() )
{
mShaderConsts = mShader->allocConstBuffer();
mRTSizeSC = mShader->getShaderConstHandle( "$targetSize" );
mOneOverRTSizeSC = mShader->getShaderConstHandle( "$oneOverTargetSize" );
mTexSizeSC[0] = mShader->getShaderConstHandle( "$texSize0" );
mTexSizeSC[1] = mShader->getShaderConstHandle( "$texSize1" );
mTexSizeSC[2] = mShader->getShaderConstHandle( "$texSize2" );
mTexSizeSC[3] = mShader->getShaderConstHandle( "$texSize3" );
mTexSizeSC[4] = mShader->getShaderConstHandle( "$texSize4" );
mTexSizeSC[5] = mShader->getShaderConstHandle( "$texSize5" );
mTexSizeSC[6] = mShader->getShaderConstHandle( "$texSize6" );
mTexSizeSC[7] = mShader->getShaderConstHandle( "$texSize7" );
mRenderTargetParamsSC[0] = mShader->getShaderConstHandle( "$rtParams0" );
mRenderTargetParamsSC[1] = mShader->getShaderConstHandle( "$rtParams1" );
mRenderTargetParamsSC[2] = mShader->getShaderConstHandle( "$rtParams2" );
mRenderTargetParamsSC[3] = mShader->getShaderConstHandle( "$rtParams3" );
mRenderTargetParamsSC[4] = mShader->getShaderConstHandle( "$rtParams4" );
mRenderTargetParamsSC[5] = mShader->getShaderConstHandle( "$rtParams5" );
mRenderTargetParamsSC[6] = mShader->getShaderConstHandle( "$rtParams6" );
mRenderTargetParamsSC[7] = mShader->getShaderConstHandle( "$rtParams7" );
//mViewportSC = shader->getShaderConstHandle( "$viewport" );
mTargetViewportSC = mShader->getShaderConstHandle( "$targetViewport" );
mFogDataSC = mShader->getShaderConstHandle( ShaderGenVars::fogData );
mFogColorSC = mShader->getShaderConstHandle( ShaderGenVars::fogColor );
mEyePosSC = mShader->getShaderConstHandle( ShaderGenVars::eyePosWorld );
mNearFarSC = mShader->getShaderConstHandle( "$nearFar" );
mInvNearFarSC = mShader->getShaderConstHandle( "$invNearFar" );
mWorldToScreenScaleSC = mShader->getShaderConstHandle( "$worldToScreenScale" );
mMatWorldToScreenSC = mShader->getShaderConstHandle( "$matWorldToScreen" );
mMatScreenToWorldSC = mShader->getShaderConstHandle( "$matScreenToWorld" );
mMatPrevScreenToWorldSC = mShader->getShaderConstHandle( "$matPrevScreenToWorld" );
mProjectionOffsetSC = mShader->getShaderConstHandle( "$projectionOffset" );
mWaterColorSC = mShader->getShaderConstHandle( "$waterColor" );
mAmbientColorSC = mShader->getShaderConstHandle( "$ambientColor" );
mWaterFogDataSC = mShader->getShaderConstHandle( "$waterFogData" );
mWaterFogPlaneSC = mShader->getShaderConstHandle( "$waterFogPlane" );
mWaterDepthGradMaxSC = mShader->getShaderConstHandle( "$waterDepthGradMax" );
mScreenSunPosSC = mShader->getShaderConstHandle( "$screenSunPos" );
mLightDirectionSC = mShader->getShaderConstHandle( "$lightDirection" );
mCameraForwardSC = mShader->getShaderConstHandle( "$camForward" );
mAccumTimeSC = mShader->getShaderConstHandle( "$accumTime" );
mDeltaTimeSC = mShader->getShaderConstHandle( "$deltaTime" );
mInvCameraMatSC = mShader->getShaderConstHandle( "$invCameraMat" );
mMatCameraToWorldSC = mShader->getShaderConstHandle("$cameraToWorld");
}
// Set up shader constants for source image size
if ( mRTSizeSC->isValid() )
{
const Point2I &resolution = GFX->getActiveRenderTarget()->getSize();
Point2F pixelShaderConstantData;
pixelShaderConstantData.x = resolution.x;
pixelShaderConstantData.y = resolution.y;
mShaderConsts->set( mRTSizeSC, pixelShaderConstantData );
}
if ( mOneOverRTSizeSC->isValid() )
{
const Point2I &resolution = GFX->getActiveRenderTarget()->getSize();
Point2F oneOverTargetSize( 1.0f / (F32)resolution.x, 1.0f / (F32)resolution.y );
mShaderConsts->set( mOneOverRTSizeSC, oneOverTargetSize );
}
// Set up additional textures
Point2F texSizeConst;
for( U32 i = 0; i < NumTextures; i++ )
{
if( !mActiveTextures[i] )
continue;
if ( mTexSizeSC[i]->isValid() )
{
texSizeConst.x = (F32)mActiveTextures[i]->getWidth();
texSizeConst.y = (F32)mActiveTextures[i]->getHeight();
mShaderConsts->set( mTexSizeSC[i], texSizeConst );
}
}
for ( U32 i = 0; i < NumTextures; i++ )
{
if ( !mRenderTargetParamsSC[i]->isValid() )
continue;
Point4F rtParams( Point4F::One );
if ( mActiveTextures[i] )
{
const Point3I &targetSz = mActiveTextures[i]->getSize();
RectI targetVp = mActiveTextureViewport[i];
ScreenSpace::RenderTargetParameters(targetSz, targetVp, rtParams);
}
mShaderConsts->set( mRenderTargetParamsSC[i], rtParams );
}
// Target viewport (in target space)
if ( mTargetViewportSC->isValid() )
{
const Point2I& targetSize = GFX->getActiveRenderTarget()->getSize();
Point3I size(targetSize.x, targetSize.y, 0);
const RectI& viewport = GFX->getViewport();
Point2F offset((F32)viewport.point.x / (F32)targetSize.x, (F32)viewport.point.y / (F32)targetSize.y );
Point2F scale((F32)viewport.extent.x / (F32)targetSize.x, (F32)viewport.extent.y / (F32)targetSize.y );
Point4F targetParams;
targetParams.x = offset.x;
targetParams.y = offset.y;
targetParams.z = offset.x + scale.x;
targetParams.w = offset.y + scale.y;
mShaderConsts->set( mTargetViewportSC, targetParams );
}
// Set the fog data.
if ( mFogDataSC->isValid() )
{
const FogData &data = state->getSceneManager()->getFogData();
Point3F params;
params.x = data.density;
params.y = data.densityOffset;
if ( !mIsZero( data.atmosphereHeight ) )
params.z = 1.0f / data.atmosphereHeight;
else
params.z = 0.0f;
mShaderConsts->set( mFogDataSC, params );
}
const PFXFrameState &thisFrame = PFXMGR->getFrameState();
if ( mMatWorldToScreenSC->isValid() )
{
// Screen space->world space
MatrixF tempMat = thisFrame.cameraToScreen;
tempMat.mul( thisFrame.worldToCamera );
tempMat.fullInverse();
tempMat.transpose();
// Support using these matrices as float3x3 or float4x4...
mShaderConsts->set( mMatWorldToScreenSC, tempMat, mMatWorldToScreenSC->getType() );
}
if ( mMatScreenToWorldSC->isValid() )
{
// World space->screen space
MatrixF tempMat = thisFrame.cameraToScreen;
tempMat.mul( thisFrame.worldToCamera );
tempMat.transpose();
// Support using these matrices as float3x3 or float4x4...
mShaderConsts->set( mMatScreenToWorldSC, tempMat, mMatScreenToWorldSC->getType() );
}
if ( mMatPrevScreenToWorldSC->isValid() )
{
const PFXFrameState &lastFrame = PFXMGR->getLastFrameState();
// Previous frame world space->screen space
MatrixF tempMat = lastFrame.cameraToScreen;
tempMat.mul( lastFrame.worldToCamera );
tempMat.transpose();
mShaderConsts->set( mMatPrevScreenToWorldSC, tempMat );
}
if (mAmbientColorSC->isValid() && state)
{
const LinearColorF &sunlight = state->getAmbientLightColor();
Point3F ambientColor( sunlight.red, sunlight.green, sunlight.blue );
mShaderConsts->set( mAmbientColorSC, ambientColor );
}
if (mMatCameraToWorldSC->isValid())
{
MatrixF tempMat = thisFrame.worldToCamera;
tempMat.inverse();
mShaderConsts->set(mMatCameraToWorldSC, tempMat);
}
mShaderConsts->setSafe( mAccumTimeSC, MATMGR->getTotalTime() );
mShaderConsts->setSafe( mDeltaTimeSC, MATMGR->getDeltaTime() );
// Now set all the constants that are dependent on the scene state.
if ( state )
{
mShaderConsts->setSafe( mEyePosSC, state->getDiffuseCameraPosition() );
mShaderConsts->setSafe( mNearFarSC, Point2F( state->getNearPlane(), state->getFarPlane() ) );
mShaderConsts->setSafe( mInvNearFarSC, Point2F( 1.0f / state->getNearPlane(), 1.0f / state->getFarPlane() ) );
mShaderConsts->setSafe( mWorldToScreenScaleSC, state->getWorldToScreenScale() );
mShaderConsts->setSafe( mProjectionOffsetSC, state->getCameraFrustum().getProjectionOffset() );
mShaderConsts->setSafe( mFogColorSC, state->getSceneManager()->getFogData().color );
if ( mWaterColorSC->isValid() )
{
LinearColorF color( state->getSceneManager()->getWaterFogData().color );
mShaderConsts->set( mWaterColorSC, color );
}
if ( mWaterFogDataSC->isValid() )
{
const WaterFogData &data = state->getSceneManager()->getWaterFogData();
Point4F params( data.density, data.densityOffset, data.wetDepth, data.wetDarkening );
mShaderConsts->set( mWaterFogDataSC, params );
}
if ( mWaterFogPlaneSC->isValid() )
{
const PlaneF &plane = state->getSceneManager()->getWaterFogData().plane;
mShaderConsts->set( mWaterFogPlaneSC, plane );
}
if ( mWaterDepthGradMaxSC->isValid() )
{
mShaderConsts->set( mWaterDepthGradMaxSC, state->getSceneManager()->getWaterFogData().depthGradMax );
}
if ( mScreenSunPosSC->isValid() )
{
// Grab our projection matrix
// from the frustum.
Frustum frust = state->getCameraFrustum();
MatrixF proj( true );
frust.getProjectionMatrix( &proj );
// Grab the ScatterSky world matrix.
MatrixF camMat = state->getCameraTransform();
camMat.inverse();
MatrixF tmp( true );
tmp = camMat;
tmp.setPosition( Point3F( 0, 0, 0 ) );
Point3F sunPos( 0, 0, 0 );
// Get the light manager and sun light object.
LightInfo *sunLight = LIGHTMGR->getSpecialLight( LightManager::slSunLightType );
// Grab the light direction and scale
// by the ScatterSky radius to get the world
// space sun position.
const VectorF &lightDir = sunLight->getDirection();
Point3F lightPos( lightDir.x * (6378.0f * 1000.0f),
lightDir.y * (6378.0f * 1000.0f),
lightDir.z * (6378.0f * 1000.0f) );
RectI viewPort = GFX->getViewport();
// Get the screen space sun position.
MathUtils::mProjectWorldToScreen(lightPos, &sunPos, viewPort, tmp, proj);
// And normalize it to the 0 to 1 range.
sunPos.x -= (F32)viewPort.point.x;
sunPos.y -= (F32)viewPort.point.y;
sunPos.x /= (F32)viewPort.extent.x;
sunPos.y /= (F32)viewPort.extent.y;
mShaderConsts->set( mScreenSunPosSC, Point2F( sunPos.x, sunPos.y ) );
}
if ( mLightDirectionSC->isValid() )
{
LightInfo *sunLight = LIGHTMGR->getSpecialLight( LightManager::slSunLightType );
const VectorF &lightDir = sunLight->getDirection();
mShaderConsts->set( mLightDirectionSC, lightDir );
}
if ( mCameraForwardSC->isValid() )
{
const MatrixF &camMat = state->getCameraTransform();
VectorF camFwd( 0, 0, 0 );
camMat.getColumn( 1, &camFwd );
mShaderConsts->set( mCameraForwardSC, camFwd );
}
if ( mInvCameraMatSC->isValid() )
{
MatrixF mat = state->getCameraTransform();
mat.inverse();
mShaderConsts->set( mInvCameraMatSC, mat, mInvCameraMatSC->getType() );
}
} // if ( state )
// Set EffectConsts - specified from script
// If our shader has reloaded since last frame we must mark all
// EffectConsts dirty so they will be reset.
if ( mShader->getReloadKey() != mShaderReloadKey )
{
mShaderReloadKey = mShader->getReloadKey();
EffectConstTable::Iterator iter = mEffectConsts.begin();
for ( ; iter != mEffectConsts.end(); iter++ )
{
iter->value->mDirty = true;
iter->value->mHandle = NULL;
}
}
// Doesn't look like anyone is using this anymore.
// But if we do want to pass this info to script,
// we should do so in the same way as I am doing below.
/*
Point2F texSizeScriptConst( 0, 0 );
String buffer;
if ( mActiveTextures[0] )
{
texSizeScriptConst.x = (F32)mActiveTextures[0]->getWidth();
texSizeScriptConst.y = (F32)mActiveTextures[0]->getHeight();
dSscanf( buffer.c_str(), "%g %g", texSizeScriptConst.x, texSizeScriptConst.y );
}
*/
{
PROFILE_SCOPE( PostEffect_SetShaderConsts );
// Pass some data about the current render state to script.
//
// TODO: This is pretty messy... it should go away. This info
// should be available from some other script accessible method
// or field which isn't PostEffect specific.
//
if ( state )
{
Con::setFloatVariable( "$Param::NearDist", state->getNearPlane() );
Con::setFloatVariable( "$Param::FarDist", state->getFarPlane() );
}
setShaderConsts_callback();
}
EffectConstTable::Iterator iter = mEffectConsts.begin();
for ( ; iter != mEffectConsts.end(); iter++ )
iter->value->setToBuffer( mShaderConsts );
}
void PostEffect::_setupTexture( U32 stage, GFXTexHandle &inputTex, const RectI *inTexViewport )
{
const String &texFilename = mTexFilename[ stage ];
GFXTexHandle theTex;
NamedTexTarget *namedTarget = NULL;
RectI viewport = GFX->getViewport();
if ( texFilename.compare( "$inTex", 0, String::NoCase ) == 0 )
{
theTex = inputTex;
if ( inTexViewport )
{
viewport = *inTexViewport;
}
else if ( theTex )
{
viewport.set( 0, 0, theTex->getWidth(), theTex->getHeight() );
}
}
else if ( texFilename.compare( "$backBuffer", 0, String::NoCase ) == 0 )
{
theTex = PFXMGR->getBackBufferTex();
// Always use the GFX viewport when reading from the backbuffer
}
else if ( texFilename.isNotEmpty() && texFilename[0] == '#' )
{
namedTarget = NamedTexTarget::find( texFilename.c_str() + 1 );
if ( namedTarget )
{
theTex = namedTarget->getTexture( 0 );
viewport = namedTarget->getViewport();
}
}
else
{
theTex = mTextures[ stage ];
if ( theTex )
viewport.set( 0, 0, theTex->getWidth(), theTex->getHeight() );
}
mActiveTextures[ stage ] = theTex;
mActiveNamedTarget[ stage ] = namedTarget;
mActiveTextureViewport[ stage ] = viewport;
if ( theTex.isValid() )
GFX->setTexture( stage, theTex );
}
void PostEffect::_setupTransforms()
{
// Set everything to identity.
GFX->setWorldMatrix( MatrixF::Identity );
GFX->setProjectionMatrix( MatrixF::Identity );
}
void PostEffect::_setupTarget( const SceneRenderState *state, bool *outClearTarget )
{
if ( mNamedTarget.isRegistered() ||
mTargetName.compare( "$outTex", 0, String::NoCase ) == 0 )
{
// Size it relative to the texture of the first stage or
// if NULL then use the current target.
Point2I targetSize;
// If we have an absolute target size then use that.
if ( !mTargetSize.isZero() )
targetSize = mTargetSize;
// Else generate a relative size using the target scale.
else if ( mActiveTextures[ 0 ] )
{
const Point3I &texSize = mActiveTextures[ 0 ]->getSize();
targetSize.set( texSize.x * mTargetScale.x,
texSize.y * mTargetScale.y );
}
else
{
GFXTarget *oldTarget = GFX->getActiveRenderTarget();
const Point2I &oldTargetSize = oldTarget->getSize();
targetSize.set( oldTargetSize.x * mTargetScale.x,
oldTargetSize.y * mTargetScale.y );
}
// Make sure its at least 1x1.
targetSize.setMax( Point2I::One );
if ( mNamedTarget.isRegistered() ||
!mTargetTex ||
mTargetTex.getWidthHeight() != targetSize )
{
mTargetTex.set( targetSize.x, targetSize.y, mTargetFormat,
&PostFxTargetProfile, "PostEffect::_setupTarget" );
if ( mTargetClear == PFXTargetClear_OnCreate )
*outClearTarget = true;
if(mTargetViewport == PFXTargetViewport_GFXViewport)
{
// We may need to scale the GFX viewport to fit within
// our target texture size
GFXTarget *oldTarget = GFX->getActiveRenderTarget();
const Point2I &oldTargetSize = oldTarget->getSize();
Point2F scale(targetSize.x / F32(oldTargetSize.x), targetSize.y / F32(oldTargetSize.y));
const RectI &viewport = GFX->getViewport();
mNamedTarget.setViewport( RectI( viewport.point.x*scale.x, viewport.point.y*scale.y, viewport.extent.x*scale.x, viewport.extent.y*scale.y ) );
}
else if(mTargetViewport == PFXTargetViewport_NamedInTexture0 && mActiveNamedTarget[0] && mActiveNamedTarget[0]->getTexture())
{
// Scale the named input texture's viewport to match our target
const Point3I &namedTargetSize = mActiveNamedTarget[0]->getTexture()->getSize();
Point2F scale(targetSize.x / F32(namedTargetSize.x), targetSize.y / F32(namedTargetSize.y));
const RectI &viewport = mActiveNamedTarget[0]->getViewport();
mNamedTarget.setViewport( RectI( viewport.point.x*scale.x, viewport.point.y*scale.y, viewport.extent.x*scale.x, viewport.extent.y*scale.y ) );
}
else
{
// PFXTargetViewport_TargetSize
mNamedTarget.setViewport( RectI( 0, 0, targetSize.x, targetSize.y ) );
}
}
}
else
mTargetTex = NULL;
// Do we have a named depthStencil target?
if ( mNamedTargetDepthStencil.isRegistered() )
{
// Size it relative to the texture of the first stage or
// if NULL then use the current target.
Point2I targetSize;
// If we have an absolute target size then use that.
if ( !mTargetSize.isZero() )
targetSize = mTargetSize;
// Else generate a relative size using the target scale.
else if ( mActiveTextures[ 0 ] )
{
const Point3I &texSize = mActiveTextures[ 0 ]->getSize();
targetSize.set( texSize.x * mTargetScale.x,
texSize.y * mTargetScale.y );
}
else
{
GFXTarget *oldTarget = GFX->getActiveRenderTarget();
const Point2I &oldTargetSize = oldTarget->getSize();
targetSize.set( oldTargetSize.x * mTargetScale.x,
oldTargetSize.y * mTargetScale.y );
}
// Make sure its at least 1x1.
targetSize.setMax( Point2I::One );
if ( mNamedTargetDepthStencil.isRegistered() &&
mTargetDepthStencil.getWidthHeight() != targetSize )
{
mTargetDepthStencil.set( targetSize.x, targetSize.y, GFXFormatD24S8,
&GFXZTargetProfile, "PostEffect::_setupTarget" );
if ( mTargetClear == PFXTargetClear_OnCreate )
*outClearTarget = true;
if(mTargetViewport == PFXTargetViewport_GFXViewport)
{
// We may need to scale the GFX viewport to fit within
// our target texture size
GFXTarget *oldTarget = GFX->getActiveRenderTarget();
const Point2I &oldTargetSize = oldTarget->getSize();
Point2F scale(targetSize.x / F32(oldTargetSize.x), targetSize.y / F32(oldTargetSize.y));
const RectI &viewport = GFX->getViewport();
mNamedTargetDepthStencil.setViewport( RectI( viewport.point.x*scale.x, viewport.point.y*scale.y, viewport.extent.x*scale.x, viewport.extent.y*scale.y ) );
}
else if(mTargetViewport == PFXTargetViewport_NamedInTexture0 && mActiveNamedTarget[0] && mActiveNamedTarget[0]->getTexture())
{
// Scale the named input texture's viewport to match our target
const Point3I &namedTargetSize = mActiveNamedTarget[0]->getTexture()->getSize();
Point2F scale(targetSize.x / F32(namedTargetSize.x), targetSize.y / F32(namedTargetSize.y));
const RectI &viewport = mActiveNamedTarget[0]->getViewport();
mNamedTargetDepthStencil.setViewport( RectI( viewport.point.x*scale.x, viewport.point.y*scale.y, viewport.extent.x*scale.x, viewport.extent.y*scale.y ) );
}
else
{
// PFXTargetViewport_TargetSize
mNamedTargetDepthStencil.setViewport( RectI( 0, 0, targetSize.x, targetSize.y ) );
}
}
}
else
mTargetDepthStencil = NULL;
if ( mTargetClear == PFXTargetClear_OnDraw )
*outClearTarget = true;
if ( !mTarget && (mTargetTex || mTargetDepthStencil) )
mTarget = GFX->allocRenderToTextureTarget();
}
void PostEffect::_cleanTargets( bool recurse )
{
mTargetTex = NULL;
mTargetDepthStencil = NULL;
mTarget = NULL;
if ( !recurse )
return;
// Clear the children too!
for ( U32 i = 0; i < size(); i++ )
{
PostEffect *effect = (PostEffect*)(*this)[i];
effect->_cleanTargets( true );
}
}
void PostEffect::process( const SceneRenderState *state,
GFXTexHandle &inOutTex,
const RectI *inTexViewport )
{
// If the shader is forced to be skipped... then skip.
if ( mSkip )
return;
// Skip out if we don't support reflection passes.
if ( state && state->isReflectPass() && !mAllowReflectPass )
return;
if ( mOneFrameOnly && !mOnThisFrame )
return;
// Check requirements if the shader needs updating.
if ( mUpdateShader )
{
_checkRequirements();
// Clear the targets if we failed passing
// the requirements at this time.
if ( !mIsValid )
_cleanTargets( true );
}
// If we're not valid then we cannot render.
if ( !mIsValid )
return;
GFXDEBUGEVENT_SCOPE_EX( PostEffect_Process, ColorI::GREEN, avar("PostEffect: %s", getName()) );
preProcess_callback();
GFXTransformSaver saver;
// Set the textures.
for ( U32 i = 0; i < NumTextures; i++ )
_setupTexture( i, inOutTex, inTexViewport );
_setupStateBlock( state ) ;
_setupTransforms();
bool clearTarget = false;
_setupTarget( state, &clearTarget );
if ( mTargetTex || mTargetDepthStencil )
{
const RectI &oldViewport = GFX->getViewport();
GFXTarget *oldTarget = GFX->getActiveRenderTarget();
GFX->pushActiveRenderTarget();
mTarget->attachTexture( GFXTextureTarget::Color0, mTargetTex );
// Set the right depth stencil target.
if ( !mTargetDepthStencil && mTargetTex.getWidthHeight() == GFX->getActiveRenderTarget()->getSize() )
mTarget->attachTexture( GFXTextureTarget::DepthStencil, GFXTextureTarget::sDefaultDepthStencil );
else
mTarget->attachTexture( GFXTextureTarget::DepthStencil, mTargetDepthStencil );
// Set the render target but not its viewport. We'll do that below.
GFX->setActiveRenderTarget( mTarget, false );
if(mNamedTarget.isRegistered())
{
// Always use the name target's viewport, if available. It was set up in _setupTarget().
GFX->setViewport(mNamedTarget.getViewport());
}
else if(mTargetViewport == PFXTargetViewport_GFXViewport)
{
// Go with the current viewport as scaled against our render target.
const Point2I &oldTargetSize = oldTarget->getSize();
const Point2I &targetSize = mTarget->getSize();
Point2F scale(targetSize.x / F32(oldTargetSize.x), targetSize.y / F32(oldTargetSize.y));
GFX->setViewport( RectI( oldViewport.point.x*scale.x, oldViewport.point.y*scale.y, oldViewport.extent.x*scale.x, oldViewport.extent.y*scale.y ) );
}
else if(mTargetViewport == PFXTargetViewport_NamedInTexture0 && mActiveNamedTarget[0] && mActiveNamedTarget[0]->getTexture())
{
// Go with the first input texture, if it is named. Scale the named input texture's viewport to match our target
const Point3I &namedTargetSize = mActiveNamedTarget[0]->getTexture()->getSize();
const Point2I &targetSize = mTarget->getSize();
Point2F scale(targetSize.x / F32(namedTargetSize.x), targetSize.y / F32(namedTargetSize.y));
const RectI &viewport = mActiveNamedTarget[0]->getViewport();
GFX->setViewport( RectI( viewport.point.x*scale.x, viewport.point.y*scale.y, viewport.extent.x*scale.x, viewport.extent.y*scale.y ) );
}
else
{
// Default to using the whole target as the viewport
GFX->setViewport( RectI( Point2I::Zero, mTarget->getSize() ) );
}
}
if ( clearTarget )
GFX->clear( GFXClearTarget, mTargetClearColor, 1.f, 0 );
// Setup the shader and constants.
if ( mShader )
{
GFX->setShader( mShader );
_setupConstants( state );
GFX->setShaderConstBuffer( mShaderConsts );
}
else
GFX->setupGenericShaders();
Frustum frustum;
if ( state )
frustum = state->getCameraFrustum();
else
{
// If we don't have a scene state then setup
// a dummy frustum... you better not be depending
// on this being related to the camera in any way.
frustum = Frustum();
}
GFXVertexBufferHandle<PFXVertex> vb;
_updateScreenGeometry( frustum, &vb );
// Draw it.
GFX->setVertexBuffer( vb );
GFX->drawPrimitive( GFXTriangleStrip, 0, 2 );
// Allow PostEffecVis to hook in.
PFXVIS->onPFXProcessed( this );
if ( mTargetTex || mTargetDepthStencil )
{
mTarget->resolve();
GFX->popActiveRenderTarget();
}
else
{
// We wrote to the active back buffer, so release
// the current texture copy held by the manager.
//
// This ensures a new copy is made.
PFXMGR->releaseBackBufferTex();
}
// Return and release our target texture.
inOutTex = mTargetTex;
if ( !mNamedTarget.isRegistered() )
mTargetTex = NULL;
// Restore the transforms before the children
// are processed as it screws up the viewport.
saver.restore();
// Now process my children.
iterator i = begin();
for ( ; i != end(); i++ )
{
PostEffect *effect = static_cast<PostEffect*>(*i);
effect->process( state, inOutTex );
}
if ( mOneFrameOnly )
mOnThisFrame = false;
}
bool PostEffect::_setIsEnabled( void *object, const char *index, const char *data )
{
bool enabled = dAtob( data );
if ( enabled )
static_cast<PostEffect*>( object )->enable();
else
static_cast<PostEffect*>( object )->disable();
// Always return false from a protected field.
return false;
}
void PostEffect::enable()
{
// Don't add TexGen PostEffects to the PostEffectManager!
if ( mRenderTime == PFXTexGenOnDemand )
return;
// Ignore it if its already enabled.
if ( mEnabled )
return;
mEnabled = true;
// We cannot really enable the effect
// until its been registed.
if ( !isProperlyAdded() )
return;
// If the enable callback returns 'false' then
// leave the effect disabled.
if ( !onEnabled_callback() )
{
mEnabled = false;
return;
}
PFXMGR->_addEffect( this );
}
void PostEffect::disable()
{
if ( !mEnabled )
return;
mEnabled = false;
_cleanTargets( true );
if ( isProperlyAdded() )
{
PFXMGR->_removeEffect( this );
onDisabled_callback();
}
}
void PostEffect::reload()
{
// Reload the shader if we have one or mark it
// for updating when its processed next.
if ( mShader )
mShader->reload();
else
mUpdateShader = true;
// Null stateblock so it is reloaded.
mStateBlock = NULL;
// Call reload on any children
// this PostEffect may have.
for ( U32 i = 0; i < size(); i++ )
{
PostEffect *effect = (PostEffect*)(*this)[i];
effect->reload();
}
}
void PostEffect::setTexture( U32 index, const String &texFilePath )
{
// Set the new texture name.
mTexFilename[index] = texFilePath;
mTextures[index].free();
// Skip empty stages or ones with variable or target names.
if ( texFilePath.isEmpty() ||
texFilePath[0] == '$' ||
texFilePath[0] == '#' )
return;
// Try to load the texture.
mTextures[index].set( texFilePath, &PostFxTextureProfile, avar( "%s() - (line %d)", __FUNCTION__, __LINE__ ) );
}
void PostEffect::setShaderConst( const String &name, const String &val )
{
PROFILE_SCOPE( PostEffect_SetShaderConst );
EffectConstTable::Iterator iter = mEffectConsts.find( name );
if ( iter == mEffectConsts.end() )
{
EffectConst *newConst = new EffectConst( name, val );
iter = mEffectConsts.insertUnique( name, newConst );
}
iter->value->set( val );
}
F32 PostEffect::getAspectRatio() const
{
const Point2I &rtSize = GFX->getActiveRenderTarget()->getSize();
return (F32)rtSize.x / (F32)rtSize.y;
}
void PostEffect::_checkRequirements()
{
// This meets requirements if its shader loads
// properly, we can find all the input textures,
// and its formats are supported.
mIsValid = false;
mUpdateShader = false;
mShader = NULL;
mShaderConsts = NULL;
EffectConstTable::Iterator iter = mEffectConsts.begin();
for ( ; iter != mEffectConsts.end(); iter++ )
{
iter->value->mDirty = true;
iter->value->mHandle = NULL;
}
// First make sure the target format is supported.
if ( mNamedTarget.isRegistered() )
{
Vector<GFXFormat> formats;
formats.push_back( mTargetFormat );
GFXFormat format = GFX->selectSupportedFormat( &PostFxTargetProfile,
formats,
true,
false,
false );
// If we didn't get our format out then its unsupported!
if ( format != mTargetFormat )
return;
}
// Gather macros specified on this PostEffect.
Vector<GFXShaderMacro> macros( mShaderMacros );
// Now check the input named targets and make sure
// they exist... else we're invalid.
for ( U32 i=0; i < NumTextures; i++ )
{
const String &texFilename = mTexFilename[i];
if ( texFilename.isNotEmpty() && texFilename[0] == '#' )
{
NamedTexTarget *namedTarget = NamedTexTarget::find( texFilename.c_str() + 1 );
if ( !namedTarget )
return;
// Grab the macros for shader initialization.
namedTarget->getShaderMacros( ¯os );
}
}
// Finally find and load the shader.
ShaderData *shaderData;
if ( Sim::findObject( mShaderName, shaderData ) )
if ( shaderData->getPixVersion() <= GFX->getPixelShaderVersion() )
mShader = shaderData->getShader( macros );
// If we didn't get a shader... we're done.
if ( !mShader )
return;
// If we got here then we're valid.
mIsValid = true;
}
bool PostEffect::dumpShaderDisassembly( String &outFilename ) const
{
String data;
if ( !mShader || !mShader->getDisassembly( data ) )
return false;
outFilename = FS::MakeUniquePath( "", "ShaderDisassembly", "txt" );
FileStream *fstream = FileStream::createAndOpen( outFilename, Torque::FS::File::Write );
if ( !fstream )
return false;
fstream->write( data );
fstream->close();
delete fstream;
return true;
}
SimSet* PostEffect::getSet() const
{
SimSet *set;
if ( !Sim::findObject( "PFXSet", set ) )
{
set = new SimSet();
set->registerObject( "PFXSet" );
Sim::getRootGroup()->addObject( set );
}
return set;
}
void PostEffect::setShaderMacro( const String &name, const String &value )
{
// Check to see if we already have this macro.
Vector<GFXShaderMacro>::iterator iter = mShaderMacros.begin();
for ( ; iter != mShaderMacros.end(); iter++ )
{
if ( iter->name == name )
{
if ( iter->value != value )
{
iter->value = value;
mUpdateShader = true;
}
return;
}
}
// Add a new macro.
mShaderMacros.increment();
mShaderMacros.last().name = name;
mShaderMacros.last().value = value;
mUpdateShader = true;
}
bool PostEffect::removeShaderMacro( const String &name )
{
Vector<GFXShaderMacro>::iterator iter = mShaderMacros.begin();
for ( ; iter != mShaderMacros.end(); iter++ )
{
if ( iter->name == name )
{
mShaderMacros.erase( iter );
mUpdateShader = true;
return true;
}
}
return false;
}
void PostEffect::clearShaderMacros()
{
if ( mShaderMacros.empty() )
return;
mShaderMacros.clear();
mUpdateShader = true;
}
GFXTextureObject* PostEffect::_getTargetTexture( U32 )
{
// A TexGen PostEffect will generate its texture now if it
// has not already.
if ( mRenderTime == PFXTexGenOnDemand &&
( !mTargetTex || mUpdateShader ) )
{
GFXTexHandle chainTex;
process( NULL, chainTex );
// TODO: We should add a conditional copy
// to a non-RT texture here to reduce the
// amount of non-swappable RTs in use.
}
return mTargetTex.getPointer();
}
DefineEngineMethod( PostEffect, reload, void, (),,
"Reloads the effect shader and textures." )
{
return object->reload();
}
DefineEngineMethod( PostEffect, enable, void, (),,
"Enables the effect." )
{
object->enable();
}
DefineEngineMethod( PostEffect, disable, void, (),,
"Disables the effect." )
{
object->disable();
}
DefineEngineMethod( PostEffect, toggle, bool, (),,
"Toggles the effect between enabled / disabled.\n"
"@return True if effect is enabled." )
{
if ( object->isEnabled() )
object->disable();
else
object->enable();
return object->isEnabled();
}
DefineEngineMethod( PostEffect, isEnabled, bool, (),,
"@return True if the effect is enabled." )
{
return object->isEnabled();
}
DefineEngineMethod( PostEffect, setTexture, void, ( S32 index, const char *filePath ),,
"This is used to set the texture file and load the texture on a running effect. "
"If the texture file is not different from the current file nothing is changed. If "
"the texture cannot be found a null texture is assigned.\n"
"@param index The texture stage index.\n"
"@param filePath The file name of the texture to set.\n" )
{
if ( index > -1 && index < PostEffect::NumTextures )
object->setTexture( index, filePath );
}
DefineEngineMethod( PostEffect, setShaderConst, void, ( const char* name, const char* value ),,
"Sets the value of a uniform defined in the shader. This will usually "
"be called within the setShaderConsts callback. Array type constants are "
"not supported.\n"
"@param name Name of the constanst, prefixed with '$'.\n"
"@param value Value to set, space seperate values with more than one element.\n"
"@tsexample\n"
"function MyPfx::setShaderConsts( %this )\n"
"{\n"
" // example float4 uniform\n"
" %this.setShaderConst( \"$colorMod\", \"1.0 0.9 1.0 1.0\" );\n"
" // example float1 uniform\n"
" %this.setShaderConst( \"$strength\", \"3.0\" );\n"
" // example integer uniform\n"
" %this.setShaderConst( \"$loops\", \"5\" );"
"}\n"
"@endtsexample" )
{
object->setShaderConst( name, value );
}
DefineEngineMethod( PostEffect, getAspectRatio, F32, (),,
"@return Width over height of the backbuffer." )
{
return object->getAspectRatio();
}
DefineEngineMethod( PostEffect, dumpShaderDisassembly, String, (),,
"Dumps this PostEffect shader's disassembly to a temporary text file.\n"
"@return Full path to the dumped file or an empty string if failed." )
{
String fileName;
object->dumpShaderDisassembly( fileName );
return fileName;
}
DefineEngineMethod( PostEffect, setShaderMacro, void, ( const char* key, const char* value ), ( "" ),
"Adds a macro to the effect's shader or sets an existing one's value. "
"This will usually be called within the onAdd or preProcess callback.\n"
"@param key lval of the macro."
"@param value rval of the macro, or may be empty."
"@tsexample\n"
"function MyPfx::onAdd( %this )\n"
"{\n"
" %this.setShaderMacro( \"NUM_SAMPLES\", \"10\" );\n"
" %this.setShaderMacro( \"HIGH_QUALITY_MODE\" );\n"
" \n"
" // In the shader looks like... \n"
" // #define NUM_SAMPLES 10\n"
" // #define HIGH_QUALITY_MODE\n"
"}\n"
"@endtsexample" )
{
object->setShaderMacro( key, value );
}
DefineEngineMethod( PostEffect, removeShaderMacro, void, ( const char* key ),,
"Remove a shader macro. This will usually be called within the preProcess callback.\n"
"@param key Macro to remove." )
{
object->removeShaderMacro( key );
}
DefineEngineMethod( PostEffect, clearShaderMacros, void, (),,
"Remove all shader macros." )
{
object->clearShaderMacros();
}
DefineEngineFunction( dumpRandomNormalMap, void, (),,
"Creates a 64x64 normal map texture filled with noise. The texture is saved "
"to randNormTex.png in the location of the game executable.\n\n"
"@ingroup GFX")
{
GFXTexHandle tex;
tex.set( 64, 64, GFXFormatR8G8B8A8, &GFXTexturePersistentProfile, "" );
GFXLockedRect *rect = tex.lock();
U8 *f = rect->bits;
for ( U32 i = 0; i < 64*64; i++, f += 4 )
{
VectorF vec;
vec.x = mRandF( -1.0f, 1.0f );
vec.y = mRandF( -1.0f, 1.0f );
vec.z = mRandF( -1.0f, 1.0f );
vec.normalizeSafe();
f[0] = U8_MAX * ( ( 1.0f + vec.x ) * 0.5f );
f[1] = U8_MAX * ( ( 1.0f + vec.y ) * 0.5f );
f[2] = U8_MAX * ( ( 1.0f + vec.z ) * 0.5f );
f[3] = U8_MAX;
}
tex.unlock();
String path = Torque::FS::MakeUniquePath( "", "randNormTex", "png" );
tex->dumpToDisk( "png", path );
} | {
"content_hash": "b2dc34c37af8b8671b9e360333169f55",
"timestamp": "",
"source": "github",
"line_count": 1747,
"max_line_length": 220,
"avg_line_length": 33.593016599885516,
"alnum_prop": 0.6374665598854943,
"repo_name": "Azaezel/Torque3D",
"id": "4054554f02fe2e1af5a84b314e11991a38e6aca0",
"size": "58687",
"binary": false,
"copies": "1",
"ref": "refs/heads/PBR_PR",
"path": "Engine/source/postFx/postEffect.cpp",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "32222"
},
{
"name": "Batchfile",
"bytes": "8398"
},
{
"name": "C",
"bytes": "28794200"
},
{
"name": "C#",
"bytes": "5751764"
},
{
"name": "C++",
"bytes": "37203176"
},
{
"name": "CMake",
"bytes": "450410"
},
{
"name": "CSS",
"bytes": "29109"
},
{
"name": "GLSL",
"bytes": "832003"
},
{
"name": "HLSL",
"bytes": "855614"
},
{
"name": "HTML",
"bytes": "1231193"
},
{
"name": "JavaScript",
"bytes": "18010"
},
{
"name": "Lex",
"bytes": "18783"
},
{
"name": "Lua",
"bytes": "1288"
},
{
"name": "M4",
"bytes": "43658"
},
{
"name": "Makefile",
"bytes": "80861"
},
{
"name": "Metal",
"bytes": "3849"
},
{
"name": "Module Management System",
"bytes": "13253"
},
{
"name": "NSIS",
"bytes": "1194010"
},
{
"name": "Objective-C",
"bytes": "994296"
},
{
"name": "Objective-C++",
"bytes": "126485"
},
{
"name": "PHP",
"bytes": "615704"
},
{
"name": "Pascal",
"bytes": "6505"
},
{
"name": "Perl",
"bytes": "24056"
},
{
"name": "PowerShell",
"bytes": "12517"
},
{
"name": "Python",
"bytes": "4703"
},
{
"name": "Roff",
"bytes": "310763"
},
{
"name": "Ruby",
"bytes": "983"
},
{
"name": "SAS",
"bytes": "13756"
},
{
"name": "Shell",
"bytes": "454592"
},
{
"name": "Smalltalk",
"bytes": "1308"
},
{
"name": "Smarty",
"bytes": "333060"
},
{
"name": "WebAssembly",
"bytes": "13560"
},
{
"name": "Yacc",
"bytes": "19714"
}
],
"symlink_target": ""
} |
using ICSharpCode.NRefactory.TypeSystem;
using ICSharpCode.NRefactory.TypeSystem.Implementation;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Bridge.Contract
{
public class TypeComparer
{
sealed class NormalizeTypeVisitor : TypeVisitor
{
public override IType VisitTypeParameter(ITypeParameter type)
{
if (type.OwnerType == SymbolKind.Method)
{
return DummyTypeParameter.GetMethodTypeParameter(type.Index);
}
else
{
return base.VisitTypeParameter(type);
}
}
public override IType VisitTypeDefinition(ITypeDefinition type)
{
if (type.KnownTypeCode == KnownTypeCode.Object)
return SpecialType.Dynamic;
return base.VisitTypeDefinition(type);
}
}
static readonly NormalizeTypeVisitor normalizationVisitor = new NormalizeTypeVisitor();
public static bool Equals(IType a, IType b)
{
IType aType = a.AcceptVisitor(normalizationVisitor);
IType bType = b.AcceptVisitor(normalizationVisitor);
return aType.Equals(bType);
}
}
}
| {
"content_hash": "e65ef945ba166b54cf5d5a5d301a804c",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 95,
"avg_line_length": 30.4,
"alnum_prop": 0.5986842105263158,
"repo_name": "AndreyZM/Bridge",
"id": "0a21f50aa277bad2fd214e114ec29be5ae66f138",
"size": "1370",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Compiler/Contract/TypeComparer.cs",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "4769"
},
{
"name": "C#",
"bytes": "23533024"
},
{
"name": "CSS",
"bytes": "507"
},
{
"name": "HTML",
"bytes": "190"
},
{
"name": "JavaScript",
"bytes": "3937"
},
{
"name": "PowerShell",
"bytes": "5284"
},
{
"name": "Shell",
"bytes": "7698"
}
],
"symlink_target": ""
} |
namespace GitVersion
{
using System;
public abstract class BuildServerBase : IBuildServer
{
public abstract bool CanApplyToCurrentContext();
public abstract string GenerateSetVersionMessage(VersionVariables variables);
public abstract string[] GenerateSetParameterMessage(string name, string value);
public virtual string GetCurrentBranch(bool usingDynamicRepos)
{
return null;
}
public virtual bool PreventFetch()
{
return false;
}
public virtual void WriteIntegration(Action<string> writer, VersionVariables variables)
{
if (writer == null)
{
return;
}
writer(string.Format("Executing GenerateSetVersionMessage for '{0}'.", GetType().Name));
writer(GenerateSetVersionMessage(variables));
writer(string.Format("Executing GenerateBuildLogOutput for '{0}'.", GetType().Name));
foreach (var buildParameter in BuildOutputFormatter.GenerateBuildLogOutput(this, variables))
{
writer(buildParameter);
}
}
public virtual bool ShouldCleanUpRemotes()
{
return false;
}
}
} | {
"content_hash": "b2d5aa1a57d569fe348510b1c5394cdc",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 104,
"avg_line_length": 30.523809523809526,
"alnum_prop": 0.6053042121684867,
"repo_name": "dpurge/GitVersion",
"id": "7efe632eb1954d32f6d97a1f02580d8916aa4903",
"size": "1284",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/GitVersionCore/BuildServers/BuildServerBase.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "735625"
},
{
"name": "F#",
"bytes": "508"
},
{
"name": "PowerShell",
"bytes": "10810"
},
{
"name": "Roff",
"bytes": "812"
},
{
"name": "Ruby",
"bytes": "7612"
},
{
"name": "Shell",
"bytes": "2310"
},
{
"name": "TypeScript",
"bytes": "2383"
},
{
"name": "Visual Basic",
"bytes": "502"
}
],
"symlink_target": ""
} |
#ifndef MACRO_H
#define MACRO_H
#define DEFINED_STR "defined"
/*#define EVAL_DEBUG*/
typedef struct
{
where where;
char *nam, *val;
enum { MACRO, FUNC, VARIADIC } type;
char **args;
int blue; /* being evaluated? */
int use_cnt; /* track usage for double-eval */
int use_dump; /* for -dS */
int include_depth;
} macro;
macro *macro_add(const char *nam, const char *val, int inc_depth);
macro *macro_add_func(const char *nam, const char *val,
char **args, int variadic, int inc_depth);
macro *macro_add_sprintf(
const char *nam,
const char *fmt, ...)
ucc_printflike(2, 3);
macro *macro_find(const char *sp);
int macro_remove(const char *nam);
void macros_dump(int show_where);
void macros_stats(void);
void macros_warn_unused(void);
extern macro **macros;
void macro_use(macro *m, int adj);
#endif
| {
"content_hash": "a437b7590b3f8b292a11eddbd7833509",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 66,
"avg_line_length": 21.736842105263158,
"alnum_prop": 0.6779661016949152,
"repo_name": "8l/ucc-c-compiler",
"id": "d2a2fd08c71b7dca11294fc7e35726abca9539c7",
"size": "826",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/cpp2/macro.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "4631"
},
{
"name": "C",
"bytes": "1513476"
},
{
"name": "C++",
"bytes": "5852"
},
{
"name": "Makefile",
"bytes": "14364"
},
{
"name": "Objective-C",
"bytes": "1326"
},
{
"name": "Perl",
"bytes": "26851"
},
{
"name": "Shell",
"bytes": "5479"
}
],
"symlink_target": ""
} |
div.olMap {
z-index: 0;
padding: 0px!important;
margin: 0px!important;
cursor: default;
}
div.olMapViewport {
text-align: left;
}
div.olLayerDiv {
-moz-user-select: none;
}
.olLayerGoogleCopyright {
left: 2px;
bottom: 2px;
}
.olLayerGooglePoweredBy {
left: 2px;
bottom: 15px;
}
.olControlAttribution {
font-size: smaller;
right: 3px;
bottom: 4.5em;
position: absolute;
display: block;
}
.olControlScale {
right: 3px;
bottom: 3em;
display: block;
position: absolute;
font-size: smaller;
}
.olControlScaleLine {
left: 10px;
bottom: 15px;
font-size: xx-small;
}
.olControlScaleLineBottom {
border: solid 2px black;
border-bottom: none;
margin-top:-2px;
text-align: center;
}
.olControlScaleLineTop {
border: solid 2px black;
border-top: none;
text-align: center;
}
.olControlPermalink {
right: 3px;
bottom: 1.5em;
display: block;
position: absolute;
font-size: smaller;
}
div.olControlMousePosition {
bottom: 0em;
right: 3px;
display: block;
position: absolute;
font-family: Arial;
font-size: smaller;
}
.olControlOverviewMapContainer {
position: absolute;
bottom: 0px;
right: 0px;
}
.olControlOverviewMapElement {
padding: 10px 18px 10px 10px;
background-color: #00008B;
-moz-border-radius: 1em 0 0 0;
}
.olControlOverviewMapMinimizeButton {
right: 0px;
bottom: 80px;
}
.olControlOverviewMapMaximizeButton {
right: 0px;
bottom: 80px;
}
.olControlOverviewMapExtentRectangle {
overflow: hidden;
background-image: url("img/blank.gif");
cursor: move;
border: 2px dotted red;
}
.olControlOverviewMapRectReplacement {
overflow: hidden;
cursor: move;
background-image: url("img/overview_replacement.gif");
background-repeat: no-repeat;
background-position: center;
}
.olLayerGeoRSSDescription {
float:left;
width:100%;
overflow:auto;
font-size:1.0em;
}
.olLayerGeoRSSClose {
float:right;
color:gray;
font-size:1.2em;
margin-right:6px;
font-family:sans-serif;
}
.olLayerGeoRSSTitle {
float:left;font-size:1.2em;
}
.olPopupContent {
padding:5px;
overflow: auto;
}
.olControlNavToolbar {
width:0px;
height:0px;
}
.olControlNavToolbar div {
display:block;
width: 28px;
height: 28px;
top: 300px;
left: 6px;
position: relative;
}
.olControlNavigationHistory {
background-image: url("img/navigation_history.png");
background-repeat: no-repeat;
width: 24px;
height: 24px;
}
.olControlNavigationHistoryPreviousItemActive {
background-position: 0px 0px;
}
.olControlNavigationHistoryPreviousItemInactive {
background-position: 0px -24px;
}
.olControlNavigationHistoryNextItemActive {
background-position: -24px 0px;
}
.olControlNavigationHistoryNextItemInactive {
background-position: -24px -24px;
}
.olControlNavToolbar .olControlNavigationItemActive {
background-image: url("img/panning-hand-on.png");
background-repeat: no-repeat;
}
.olControlNavToolbar .olControlNavigationItemInactive {
background-image: url("img/panning-hand-off.png");
background-repeat: no-repeat;
}
.olControlNavToolbar .olControlZoomBoxItemActive {
background-image: url("img/drag-rectangle-on.png");
background-color: orange;
background-repeat: no-repeat;
}
.olControlNavToolbar .olControlZoomBoxItemInactive {
background-image: url("img/drag-rectangle-off.png");
background-repeat: no-repeat;
}
.olControlEditingToolbar {
float:right;
right: 0px;
height: 30px;
width: 200px;
}
.olControlEditingToolbar div {
background-image: url("img/editing_tool_bar.png");
background-repeat: no-repeat;
float:right;
width: 24px;
height: 24px;
margin: 5px;
}
.olControlEditingToolbar .olControlNavigationItemActive {
background-position: -103px -23px;
}
.olControlEditingToolbar .olControlNavigationItemInactive {
background-position: -103px -0px;
}
.olControlEditingToolbar .olControlDrawFeaturePointItemActive {
background-position: -77px -23px;
}
.olControlEditingToolbar .olControlDrawFeaturePointItemInactive {
background-position: -77px -0px;
}
.olControlEditingToolbar .olControlDrawFeaturePathItemInactive {
background-position: -51px 0px;
}
.olControlEditingToolbar .olControlDrawFeaturePathItemActive {
background-position: -51px -23px;
}
.olControlEditingToolbar .olControlDrawFeaturePolygonItemInactive {
background-position: -26px 0px;
}
.olControlEditingToolbar .olControlDrawFeaturePolygonItemActive {
background-position: -26px -23px ;
}
.olControlSaveFeaturesItemActive {
background-image: url(img/save_features_on.png);
background-repeat: no-repeat;
background-position: 0px 1px;
}
.olControlSaveFeaturesItemInactive {
background-image: url(img/save_features_off.png);
background-repeat: no-repeat;
background-position: 0px 1px;
}
.olHandlerBoxZoomBox {
border: 2px solid red;
position: absolute;
background-color: white;
opacity: 0.50;
font-size: 1px;
filter: alpha(opacity=50);
}
.olHandlerBoxSelectFeature {
border: 2px solid blue;
position: absolute;
background-color: white;
opacity: 0.50;
font-size: 1px;
filter: alpha(opacity=50);
}
.olControlPanPanel {
top: 10px;
left: 5px;
}
.olControlPanPanel div {
background-image: url(img/pan-panel.png);
height: 18px;
width: 18px;
cursor: pointer;
position: absolute;
}
.olControlPanPanel .olControlPanNorthItemInactive {
top: 0px;
left: 9px;
background-position: 0px 0px;
}
.olControlPanPanel .olControlPanSouthItemInactive {
top: 36px;
left: 9px;
background-position: 18px 0px;
}
.olControlPanPanel .olControlPanWestItemInactive {
position: absolute;
top: 18px;
left: 0px;
background-position: 0px 18px;
}
.olControlPanPanel .olControlPanEastItemInactive {
top: 18px;
left: 18px;
background-position: 18px 18px;
}
.olControlZoomPanel {
top: 71px;
left: 14px;
}
.olControlZoomPanel div {
background-image: url(img/zoom-panel.png);
position: absolute;
height: 18px;
width: 18px;
cursor: pointer;
}
.olControlZoomPanel .olControlZoomInItemInactive {
top: 0px;
left: 0px;
background-position: 0px 0px;
}
.olControlZoomPanel .olControlZoomToMaxExtentItemInactive {
top: 18px;
left: 0px;
background-position: 0px -18px;
}
.olControlZoomPanel .olControlZoomOutItemInactive {
top: 36px;
left: 0px;
background-position: 0px 18px;
}
.olPopupCloseBox {
background: url("img/close.gif") no-repeat;
cursor: pointer;
}
.olFramedCloudPopupContent {
padding: 5px;
overflow: auto;
}
.olControlNoSelect {
-moz-user-select: none;
}
/**
* Cursor styles
*/
.olCursorWait {
cursor: wait;
}
.olDragDown {
cursor: move;
}
.olDrawBox {
cursor: crosshair;
}
.olControlDragFeatureOver {
cursor: move;
}
.olControlDragFeatureActive.olControlDragFeatureOver.olDragDown {
cursor: -moz-grabbing;
}
| {
"content_hash": "08baa4d938e989f94b157b526977c4f6",
"timestamp": "",
"source": "github",
"line_count": 343,
"max_line_length": 67,
"avg_line_length": 20.79008746355685,
"alnum_prop": 0.6975178796802692,
"repo_name": "Piskvor/staticMapLiteExt",
"id": "c9cf5b89338c7ba74b9d67e7e7bcd9f321b83a59",
"size": "7131",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wizzard/js/openlayers/theme/default/style.css",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "10636"
},
{
"name": "HTML",
"bytes": "19683"
},
{
"name": "JavaScript",
"bytes": "3466"
},
{
"name": "PHP",
"bytes": "38575"
},
{
"name": "Shell",
"bytes": "799"
}
],
"symlink_target": ""
} |
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
describe PorpoiseExternalActions::Model do
it 'should normalize the base path' do
subject.stub(:movement_id).and_return('controlshift')
subject.normalized_base_path('foo').should == "api/movements/controlshift/foo/"
end
end | {
"content_hash": "e2ee63cf86d74d4c0d18956515fb16b2",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 83,
"avg_line_length": 27.727272727272727,
"alnum_prop": 0.7311475409836066,
"repo_name": "controlshift/porpoise_external_actions",
"id": "076f74e89e64efbfcfeaf7ad5186bef6994bee84",
"size": "305",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "spec/model_spec.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "13020"
}
],
"symlink_target": ""
} |
package ua.com.fielden.platform.example.dynamiccriteria;
import static ua.com.fielden.platform.entity.query.fluent.EntityQueryUtils.fetch;
import ua.com.fielden.platform.security.provider.IUserController;
import ua.com.fielden.platform.security.user.IUserDao;
import ua.com.fielden.platform.security.user.User;
import com.google.inject.Inject;
public class BaseUserProvider extends EntitycentreUserProvider {
@Inject
public BaseUserProvider(final IUserDao userDao) {
super(userDao);
}
@Override
protected User initUser() {
return userDao.findByKeyAndFetch(fetch(User.class).with("basedOnUser"), User.system_users.SU.name());
}
@Override
public void setUsername(final String username, final IUserController controller) {
}
}
| {
"content_hash": "42e8fa1f31785b6a32b60f0227b66fb9",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 109,
"avg_line_length": 30.115384615384617,
"alnum_prop": 0.7611749680715197,
"repo_name": "fieldenms/tg",
"id": "37a79a1f99bd636ae80ad3ae7f64f30b9cf23b7e",
"size": "783",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "platform-launcher/src/main/java/ua/com/fielden/platform/example/dynamiccriteria/BaseUserProvider.java",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "4729"
},
{
"name": "CSS",
"bytes": "177044"
},
{
"name": "CoffeeScript",
"bytes": "2455"
},
{
"name": "HTML",
"bytes": "2236957"
},
{
"name": "Java",
"bytes": "12685270"
},
{
"name": "JavaScript",
"bytes": "34404107"
},
{
"name": "Makefile",
"bytes": "28094"
},
{
"name": "Python",
"bytes": "3798"
},
{
"name": "Roff",
"bytes": "3102"
},
{
"name": "Shell",
"bytes": "13899"
},
{
"name": "TSQL",
"bytes": "1058"
},
{
"name": "TeX",
"bytes": "1296798"
},
{
"name": "XSLT",
"bytes": "6158"
}
],
"symlink_target": ""
} |
import sys, csv, time
from datetime import datetime
from cassandra.cluster import Cluster, Session, ResultSet
from cassandra.concurrent import execute_concurrent
def load_tag(tag_file: str) -> []:
"""
从标签文件载入标签ID
:param tag_file: 标签文件
:return: 标签ID列表
"""
tag_list = []
with open(tag_file, "r") as csv_input:
reader = csv.reader(csv_input, delimiter=',')
for row in reader:
if row is not None and len(row) > 0:
for tag_id in row:
tag_list.append(tag_id)
return tag_list
def load_to_file(tag_file, out_file):
"""
根据给定的标签,从cassandra数据库中读取标签用户数据到文件
:param tag_file: 标签文件
:param out_file: 标签用户文件
:return:
"""
row_count = 0
data_list = []
tag_list = load_tag(tag_file)
sql = "SELECT source_id, tag_id, user_id FROM tag_users"
sql = sql + " WHERE source_id in (1, 2) and tag_id=%s"
cluster = Cluster(['192.168.11.52'])
session = cluster.connect('dmp')
futures = []
with open(out_file, "w") as csv_output:
writer = csv.writer(csv_output, delimiter=',', lineterminator='\n')
for tag_id in tag_list:
futures.append(session.execute_async(sql, (int(tag_id),)))
# wait for them to complete and use the results
for future in futures:
rows = future.result()
for row in rows:
row_count = row_count + 1
data_list.append([str(row.source_id), str(row.tag_id), row.user_id])
if (len(data_list) >= 1000000):
print("标签用户文件写入" + str(row_count) + "行数据。")
writer.writerows(data_list)
data_list.clear()
if len(data_list) > 0:
writer.writerows(data_list)
session.shutdown()
cluster.shutdown()
if __name__ == '__main__':
"""
使用: python get_users.py tag_file out_file
在tag_file中给出要查找的标签,out_file里给出所查标签的标签用户数据
tag_file: 标签文件,一个标签ID一行,或多个标签ID一行,标签间用逗号分隔
out_file: 标签用户数据文件
"""
if (len(sys.argv) > 2):
tag_file = sys.argv[1]
out_file = sys.argv[2]
load_to_file(tag_file, out_file)
else:
print("usage: python get_users.py tag_file out_file")
| {
"content_hash": "1e765074a4957d84ba0b3cac0e950a7f",
"timestamp": "",
"source": "github",
"line_count": 84,
"max_line_length": 84,
"avg_line_length": 26.833333333333332,
"alnum_prop": 0.5687666370896185,
"repo_name": "hnlaomie/python-tools",
"id": "c8c33c446ef11e49ca7cc318c853f93a4ddc9f51",
"size": "2549",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "util/db/cassandra/get_users.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Protocol Buffer",
"bytes": "474"
},
{
"name": "Python",
"bytes": "206805"
}
],
"symlink_target": ""
} |
<!DOCTYPE html>
<html xmlns:msxsl="urn:schemas-microsoft-com:xslt">
<head>
<meta content="en-us" http-equiv="Content-Language" />
<meta content="text/html; charset=utf-16" http-equiv="Content-Type" />
<title _locid="PortabilityAnalysis0">.NET Portability Report</title>
<style>
/* Body style, for the entire document */
body {
background: #F3F3F4;
color: #1E1E1F;
font-family: "Segoe UI", Tahoma, Geneva, Verdana, sans-serif;
padding: 0;
margin: 0;
}
/* Header1 style, used for the main title */
h1 {
padding: 10px 0px 10px 10px;
font-size: 21pt;
background-color: #E2E2E2;
border-bottom: 1px #C1C1C2 solid;
color: #201F20;
margin: 0;
font-weight: normal;
}
/* Header2 style, used for "Overview" and other sections */
h2 {
font-size: 18pt;
font-weight: normal;
padding: 15px 0 5px 0;
margin: 0;
}
/* Header3 style, used for sub-sections, such as project name */
h3 {
font-weight: normal;
font-size: 15pt;
margin: 0;
padding: 15px 0 5px 0;
background-color: transparent;
}
h4 {
font-weight: normal;
font-size: 12pt;
margin: 0;
padding: 0 0 0 0;
background-color: transparent;
}
/* Color all hyperlinks one color */
a {
color: #1382CE;
}
/* Paragraph text (for longer informational messages) */
p {
font-size: 10pt;
}
/* Table styles */
table {
border-spacing: 0 0;
border-collapse: collapse;
font-size: 10pt;
}
table th {
background: #E7E7E8;
text-align: left;
text-decoration: none;
font-weight: normal;
padding: 3px 6px 3px 6px;
}
table td {
vertical-align: top;
padding: 3px 6px 5px 5px;
margin: 0px;
border: 1px solid #E7E7E8;
background: #F7F7F8;
}
.NoBreakingChanges {
color: darkgreen;
font-weight:bold;
}
.FewBreakingChanges {
color: orange;
font-weight:bold;
}
.ManyBreakingChanges {
color: red;
font-weight:bold;
}
.BreakDetails {
margin-left: 30px;
}
.CompatMessage {
font-style: italic;
font-size: 10pt;
}
.GoodMessage {
color: darkgreen;
}
/* Local link is a style for hyperlinks that link to file:/// content, there are lots so color them as 'normal' text until the user mouse overs */
.localLink {
color: #1E1E1F;
background: #EEEEED;
text-decoration: none;
}
.localLink:hover {
color: #1382CE;
background: #FFFF99;
text-decoration: none;
}
/* Center text, used in the over views cells that contain message level counts */
.textCentered {
text-align: center;
}
/* The message cells in message tables should take up all avaliable space */
.messageCell {
width: 100%;
}
/* Padding around the content after the h1 */
#content {
padding: 0px 12px 12px 12px;
}
/* The overview table expands to width, with a max width of 97% */
#overview table {
width: auto;
max-width: 75%;
}
/* The messages tables are always 97% width */
#messages table {
width: 97%;
}
/* All Icons */
.IconSuccessEncoded, .IconInfoEncoded, .IconWarningEncoded, .IconErrorEncoded {
min-width: 18px;
min-height: 18px;
background-repeat: no-repeat;
background-position: center;
}
/* Success icon encoded */
.IconSuccessEncoded {
/* Note: Do not delete the comment below. It is used to verify the correctness of the encoded image resource below before the product is released */
/* [---XsltValidateInternal-Base64EncodedImage:IconSuccess#Begin#background-image: url(data:image/png;base64,#Separator#);#End#] */
background-image: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAABPElEQVR4Xp1Tv0vDUBi8FqeA4NpBcBLcWnQSApncOnTo4FSnjP0DsnXpH5CxiwbHDg4Zuj4oOEXiJgiC4FDcCkLWmIMc1Pfw+eMgQ77v3Xf3Pe51YKGqqisAEwCR1TIAsiAIblSo6xrdHeJR85Xle3mdmCQKb0PsfqyxxzM8K15HZADl/H5+sHpZwYfxyRjTs+kWwKBx8yoHd2mRiuzF8mkJniWH/13u3Fjrs/EdhsdDFHGB/DLXEJBDLh1MWPAhPo1BLB4WX5yQywHR+m3tVe/t97D52CB/ziG0nIgD/qDuYg8WuCcVZ2YGwlJ3YDugkpR/VNcAEx6GEKhERSr71FuO4YCM4XBdwKvecjIlkSnsO0Hyp/GxSeJAdzBKzpOtnPwyyiPdAZhpZptT04tU+zk7s8czeges//s5C5+CwqrR4/gw+AAAAABJRU5ErkJggg==);
}
/* Information icon encoded */
.IconInfoEncoded {
/* Note: Do not delete the comment below. It is used to verify the correctness of the encoded image resource below before the product is released */
/* [---XsltValidateInternal-Base64EncodedImage:IconInformation#Begin#background-image: url(data:image/png;base64,#Separator#);#End#] */
background-image: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAABHElEQVR4Xs2TsUoDQRRF7wwoziokjZUKadInhdhukR9YP8DMX1hYW+QvdsXa/QHBbcXC7W0CamWTQnclFutceIQJwwaWNLlwm5k5d94M76mmaeCrrmsLYOocY12FcxZFUeozCqKqqgYA8uevv1H6VuPxcwlfk5N92KHBxfFeCSAxxswlYAW/Xr989x/mv9gkhtyMDhcAxgzRsp7flj8B/HF1RsMXq+NZMkopaHe7lbKxQUEIGbKsYNoGn969060hZBkQex/W8oRQwsQaW2o3Ago2SVcJUzAgY3N0lTCZZm+zPS8HB51gMmS1DEYyOz9acKO1D8JWTlafKIMxdhvlfdyT94Vv5h7P8Ky7nQzACmhvKq3zk3PjW9asz9D/1oigecsioooAAAAASUVORK5CYII=);
}
/* Warning icon encoded */
.IconWarningEncoded {
/* Note: Do not delete the comment below. It is used to verify the correctness of the encoded image resource below before the product is released */
/* [---XsltValidateInternal-Base64EncodedImage:IconWarning#Begin#background-image: url(data:image/png;base64,#Separator#);#End#] */
background-image: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAACXBIWXMAAA7EAAAOxAGVKw4bAAAAx0lEQVR4XpWSMQ7CMAxFf4xAyBMLCxMrO8dhaBcuwdCJS3RJBw7SA/QGTCxdWJgiQYWKXJWKIXHIlyw5lqr34tQgEOdcBsCOx5yZK3hCCKdYXneQkh4pEfqzLfu+wVDSyyzFoJjfz9NB+pAF+eizx2Vruts0k15mPgvS6GYvpVtQhB61IB/dk6AF6fS4Ben0uIX5odtFe8Q/eW1KvFeH4e8khT6+gm5B+t3juyDt7n0jpe+CANTd+oTUjN/U3yVaABnSUjFz/gFq44JaVSCXeQAAAABJRU5ErkJggg==);
}
/* Error icon encoded */
.IconErrorEncoded {
/* Note: Do not delete the comment below. It is used to verify the correctness of the encoded image resource below before the product is released */
/* [---XsltValidateInternal-Base64EncodedImage:IconError#Begin#background-image: url(data:image/png;base64,#Separator#);#End#] */
background-image: url(data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAABQElEQVR4XqWTvUoEQRCE6wYPZUA80AfwAQz23uCMjA7MDRQEIzPBVEyNTQUFIw00vcQTTMzuAh/AxEQQT8HF/3G/oGGnEUGuoNnd6qoZuqltyKEsyzVJq5I6rnUp6SjGeGhESikzzlc1eL7opfuVbrqbU1Zw9NCgtQMaZpY0eNnaaL2fHusvTK5vKu7sjSS1Y4y3QUA6K3e3Mau5UFDyMP7tYF9o8cAHZv68vipoIJg971PZIZ5HiwdvYGGvFVFHmGmZ2MxwmQYPXubPl9Up0tfoMQGetXd6mRbvhBw+boZ6WF7Mbv1+GsHRk0fQmPAH1GfmZirbCfDJ61tw3Px8/8pZsPAG4jlVhcPgZ7adwNWBB68lkRQWFiTgFlbnLY3DGGM7izIJIyT/jjIvEJw6fdJTc6krDzh6aMwMP9bvDH4ADSsa9uSWVJkAAAAASUVORK5CYII=);
}
</style>
</head>
<body>
<h1 _locid="PortabilityReport">.NET Portability Report</h1>
<div id="content">
<div id="submissionId" style="font-size:8pt;">
<p>
<i>
Submission Id
223613a4-4c09-4dcd-98b5-bf272069f74f
</i>
</p>
</div>
<h2 _locid="SummaryTitle">
<a name="Portability Summary"></a>Portability Summary
</h2>
<div id="summary">
<table>
<tbody>
<tr>
<th>Assembly</th>
<th>ASP.NET 5,Version=v1.0</th>
<th>Windows,Version=v8.1</th>
<th>.NET Framework,Version=v4.6</th>
<th>Windows Phone,Version=v8.1</th>
</tr>
<tr>
<td><strong><a href="#SharpDX.DXGI">SharpDX.DXGI</a></strong></td>
<td class="text-center">100.00 %</td>
<td class="text-center">100.00 %</td>
<td class="text-center">100.00 %</td>
<td class="text-center">100.00 %</td>
</tr>
</tbody>
</table>
</div>
<div id="details">
</div>
</div>
</body>
</html> | {
"content_hash": "eaf989188d0851e802507cb419373a04",
"timestamp": "",
"source": "github",
"line_count": 240,
"max_line_length": 562,
"avg_line_length": 40.1625,
"alnum_prop": 0.5731922398589065,
"repo_name": "kuhlenh/port-to-core",
"id": "dcafa2e28470b851df2bb0eabedab902489ca810",
"size": "9639",
"binary": false,
"copies": "1",
"ref": "refs/heads/gh-pages",
"path": "Reports/co/cocossharp.windowsphone81.1.4.0/SharpDX.DXGI-wpa81.html",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2323514650"
}
],
"symlink_target": ""
} |
using System;
namespace StringlyTyped
{
internal class StringlyTypeConverter<T> : InternalInterfacesTypeConverter<T>
{
private readonly Type _targetType = typeof(T);
private T _instance;
private Stringly _stringly;
protected override void OnInstantiation(T instance)
{
_stringly = instance as Stringly;
if (_stringly == null)
throw new InvalidOperationException($"Type '{_targetType.Name}' doesn't inherit from '{typeof(Stringly).Name}'.");
_instance = instance;
}
protected override bool TryParseInternal(string value, out T result)
{
result = default(T);
string parsed;
var success = _stringly.TryParse(value, out parsed);
if (!success) return false;
_stringly.Value = parsed;
result = _instance;
return true;
}
}
} | {
"content_hash": "b08286bf72b6143560f456da1f54566c",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 130,
"avg_line_length": 27,
"alnum_prop": 0.580952380952381,
"repo_name": "mission202/Stringly.Typed",
"id": "a35925e2b1cb40ff7410b3347f9bd41fb2483a02",
"size": "945",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Stringly.Typed/StringlyTypeConverter.cs",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "262"
},
{
"name": "C#",
"bytes": "19995"
}
],
"symlink_target": ""
} |
class PositiveNumberFormatValidator < ActiveModel::EachValidator
def validate_each(record, attribute, value)
return if value.blank?
if options[:string]
unless value =~ /^([0-9]+|\-[0-9]+)$/
record.errors.add(attribute, (options[:message] || :not_a_number))
return
end
end
value = value.to_i
if value < 0
record.errors.add(attribute, (options[:message] || :greater_than), count: 0)
end
if value > 200_000_000
record.errors.add(attribute, (options[:message] || :less_than), count: '2億')
end
end
end
| {
"content_hash": "f0df06113f7b4cb6f842c06c83782ccc",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 82,
"avg_line_length": 31.833333333333332,
"alnum_prop": 0.62478184991274,
"repo_name": "machikoe/palette_rails_validators",
"id": "6863d99e66446007bf09d3b26382e07765c0f8be",
"size": "575",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/validators/positive_number_format_validator.rb",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "30298"
},
{
"name": "Shell",
"bytes": "131"
}
],
"symlink_target": ""
} |
from nca47.db import api as db_api
from nca47.db.sqlalchemy.models import GslbZoneInfo
from nca47.objects import base
from nca47.objects import fields as object_fields
from nca47.common.exception import HaveSameObject
from nca47.common.exception import IsNotExistError
class GslbZone(base.Nca47Object):
VERSION = '1.0'
fields = {
'name': object_fields.StringField(),
'devices': object_fields.ListOfStringsField(),
'syn_server': object_fields.StringField(),
'enable': object_fields.StringField(),
'gslb_zone_id': object_fields.StringField(),
'tenant_id': object_fields.StringField()
}
def __init__(self, context=None, **kwarg):
self.db_api = db_api.get_instance()
super(GslbZone, self).__init__(context=None, **kwarg)
@staticmethod
def _from_db_object(dns_gslb_zone, db_dns_gslb_zone):
"""Converts a database entity to a formal :class:`GslbZone` object.
:param dns_zone: An object of :class:`GslbZone`.
:param db_dns_zone: A DB model of a GslbZone.
:return: a :class:`GslbZone` object.
"""
for field in dns_gslb_zone.fields:
dns_gslb_zone[field] = db_dns_gslb_zone[field]
dns_gslb_zone.obj_reset_changes()
return dns_gslb_zone
def create(self, context, values):
value = {}
value["name"] = values["name"]
value["tenant_id"] = values["tenant_id"]
obj_old = self.get_objects(context, **value)
if len(obj_old) != 0:
raise HaveSameObject(param_name=value["name"])
zone = self.db_api.create(GslbZoneInfo, values)
return zone
def update(self, context, zone_id, values):
value = {}
value["id"] = zone_id
obj_old = self.get_objects(context, **value)
if len(obj_old) == 0:
raise IsNotExistError(param_name=zone_id)
record = self.db_api.update_object(GslbZoneInfo, zone_id, values)
return record
def delete(self, context, zone_id):
value = {}
value["id"] = zone_id
obj_old = self.get_objects(context, **value)
if len(obj_old) == 0:
raise IsNotExistError(param_name=value["id"])
record = self.db_api.delete_object(GslbZoneInfo, zone_id)
return record
def get_objects(self, context, **values):
values["deleted"] = False
record = self.db_api.get_objects(GslbZoneInfo, **values)
return record
def get_object(self, context, **values):
values["deleted"] = False
try:
record = self.db_api.get_object(GslbZoneInfo, **values)
except Exception:
raise IsNotExistError(param_name=values["id"])
return record
def get_object_one(self, context, **values):
# get one information of gslb_zone
record = self.db_api.get_object(GslbZoneInfo, **values)
return record
| {
"content_hash": "6b18610c3a02733b0fed8c1b8944b6ce",
"timestamp": "",
"source": "github",
"line_count": 82,
"max_line_length": 75,
"avg_line_length": 35.68292682926829,
"alnum_prop": 0.6151742993848257,
"repo_name": "WosunOO/nca_xianshu",
"id": "3afc7bb5653af4f5b2d2d0a0049006612e90137a",
"size": "2926",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nca47/objects/dns/gslb_zone.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "723807"
}
],
"symlink_target": ""
} |
@protocol BasicClientDelegate <NSObject>
- (void)didReceiveStatusUpdateMessage:(NSString *)message;
@end
@interface BasicClient : NSObject
@property (nonatomic, strong) NSObject<BasicClientDelegate> *delegate;
+ (BasicClient *)sharedInstance;
- (void)sendHelloMessage;
@end
| {
"content_hash": "c087cd1cb4d19bfec0a172466bb4244e",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 70,
"avg_line_length": 18.733333333333334,
"alnum_prop": 0.7829181494661922,
"repo_name": "Vovkasquid/compassApp",
"id": "89d9e4699a01e1f087cfc108ef2d1743ff4efe6e",
"size": "1293",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "alljoyn/alljoyn_objc/samples/OSX/basic_client/basic_client/BasicClient.h",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Arduino",
"bytes": "37177"
},
{
"name": "Batchfile",
"bytes": "5209"
},
{
"name": "C",
"bytes": "2487701"
},
{
"name": "C#",
"bytes": "98407"
},
{
"name": "C++",
"bytes": "11424962"
},
{
"name": "CSS",
"bytes": "19287"
},
{
"name": "Groff",
"bytes": "3146"
},
{
"name": "HTML",
"bytes": "36175"
},
{
"name": "Java",
"bytes": "2602308"
},
{
"name": "JavaScript",
"bytes": "646500"
},
{
"name": "Makefile",
"bytes": "43413"
},
{
"name": "Objective-C",
"bytes": "1395199"
},
{
"name": "Objective-C++",
"bytes": "679757"
},
{
"name": "Python",
"bytes": "439743"
},
{
"name": "Shell",
"bytes": "47261"
},
{
"name": "TeX",
"bytes": "789"
},
{
"name": "Visual Basic",
"bytes": "1285"
},
{
"name": "XSLT",
"bytes": "103689"
}
],
"symlink_target": ""
} |
FROM balenalib/apalis-imx6-debian:sid-build
ENV NODE_VERSION 15.6.0
ENV YARN_VERSION 1.22.4
RUN for key in \
6A010C5166006599AA17F08146C2130DFD2497F5 \
; do \
gpg --batch --keyserver pgp.mit.edu --recv-keys "$key" || \
gpg --batch --keyserver keyserver.pgp.com --recv-keys "$key" || \
gpg --batch --keyserver ha.pool.sks-keyservers.net --recv-keys "$key" ; \
done \
&& curl -SLO "http://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-armv7l.tar.gz" \
&& echo "234871415c54174f91764f332a72631519a6af7b1a87797ad7c729855182f9cd node-v$NODE_VERSION-linux-armv7l.tar.gz" | sha256sum -c - \
&& tar -xzf "node-v$NODE_VERSION-linux-armv7l.tar.gz" -C /usr/local --strip-components=1 \
&& rm "node-v$NODE_VERSION-linux-armv7l.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz" \
&& curl -fSLO --compressed "https://yarnpkg.com/downloads/$YARN_VERSION/yarn-v$YARN_VERSION.tar.gz.asc" \
&& gpg --batch --verify yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& mkdir -p /opt/yarn \
&& tar -xzf yarn-v$YARN_VERSION.tar.gz -C /opt/yarn --strip-components=1 \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarn \
&& ln -s /opt/yarn/bin/yarn /usr/local/bin/yarnpkg \
&& rm yarn-v$YARN_VERSION.tar.gz.asc yarn-v$YARN_VERSION.tar.gz \
&& npm config set unsafe-perm true -g --unsafe-perm \
&& rm -rf /tmp/*
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN curl -SLO "https://raw.githubusercontent.com/balena-io-library/base-images/8accad6af708fca7271c5c65f18a86782e19f877/scripts/assets/tests/[email protected]" \
&& echo "Running test-stack@node" \
&& chmod +x [email protected] \
&& bash [email protected] \
&& rm -rf [email protected]
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Debian Sid \nVariant: build variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nNode.js v15.6.0, Yarn v1.22.4 \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | {
"content_hash": "b631bad49b56b1cf7b971e65795596c8",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 690,
"avg_line_length": 67.29268292682927,
"alnum_prop": 0.708590068865531,
"repo_name": "nghiant2710/base-images",
"id": "4b854d0ed3a2282c25792d87537d77dd43d76b86",
"size": "2780",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "balena-base-images/node/apalis-imx6/debian/sid/15.6.0/build/Dockerfile",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "144558581"
},
{
"name": "JavaScript",
"bytes": "16316"
},
{
"name": "Shell",
"bytes": "368690"
}
],
"symlink_target": ""
} |
Subsets and Splits