text
stringlengths 2
99.9k
| meta
dict |
---|---|
import sys
import time
global silent
enableInfo = True
enableError = True
enableWarning = True
enableDebug = False
silent = False
def info(s, pleaseNoPrint = None):
if pleaseNoPrint == None:
sys.stdout.write(s + "\n")
else:
while pleaseNoPrint.value() > 0:
#print("Wait")
time.sleep(0.01)
pleaseNoPrint.increment()
sys.stdout.write(s + "\n")
sys.stdout.flush()
pleaseNoPrint.decrement()
def infoNoNewline(s):
sys.stdout.write(s)
def error(s):
sys.stdout.write(s + "\n")
def warning(s):
sys.stdout.write(s + "\n")
def debug(s):
sys.stdout.write(s + "\n")
| {
"pile_set_name": "Github"
} |
import base64
import io
import json
import zlib
import msgpack
from requests.structures import CaseInsensitiveDict
from .compat import HTTPResponse, pickle, text_type
def _b64_decode_bytes(b):
return base64.b64decode(b.encode("ascii"))
def _b64_decode_str(s):
return _b64_decode_bytes(s).decode("utf8")
class Serializer(object):
def dumps(self, request, response, body=None):
response_headers = CaseInsensitiveDict(response.headers)
if body is None:
body = response.read(decode_content=False)
# NOTE: 99% sure this is dead code. I'm only leaving it
# here b/c I don't have a test yet to prove
# it. Basically, before using
# `cachecontrol.filewrapper.CallbackFileWrapper`,
# this made an effort to reset the file handle. The
# `CallbackFileWrapper` short circuits this code by
# setting the body as the content is consumed, the
# result being a `body` argument is *always* passed
# into cache_response, and in turn,
# `Serializer.dump`.
response._fp = io.BytesIO(body)
# NOTE: This is all a bit weird, but it's really important that on
# Python 2.x these objects are unicode and not str, even when
# they contain only ascii. The problem here is that msgpack
# understands the difference between unicode and bytes and we
# have it set to differentiate between them, however Python 2
# doesn't know the difference. Forcing these to unicode will be
# enough to have msgpack know the difference.
data = {
u"response": {
u"body": body,
u"headers": dict(
(text_type(k), text_type(v)) for k, v in response.headers.items()
),
u"status": response.status,
u"version": response.version,
u"reason": text_type(response.reason),
u"strict": response.strict,
u"decode_content": response.decode_content,
}
}
# Construct our vary headers
data[u"vary"] = {}
if u"vary" in response_headers:
varied_headers = response_headers[u"vary"].split(",")
for header in varied_headers:
header = text_type(header).strip()
header_value = request.headers.get(header, None)
if header_value is not None:
header_value = text_type(header_value)
data[u"vary"][header] = header_value
return b",".join([b"cc=4", msgpack.dumps(data, use_bin_type=True)])
def loads(self, request, data):
# Short circuit if we've been given an empty set of data
if not data:
return
# Determine what version of the serializer the data was serialized
# with
try:
ver, data = data.split(b",", 1)
except ValueError:
ver = b"cc=0"
# Make sure that our "ver" is actually a version and isn't a false
# positive from a , being in the data stream.
if ver[:3] != b"cc=":
data = ver + data
ver = b"cc=0"
# Get the version number out of the cc=N
ver = ver.split(b"=", 1)[-1].decode("ascii")
# Dispatch to the actual load method for the given version
try:
return getattr(self, "_loads_v{}".format(ver))(request, data)
except AttributeError:
# This is a version we don't have a loads function for, so we'll
# just treat it as a miss and return None
return
def prepare_response(self, request, cached):
"""Verify our vary headers match and construct a real urllib3
HTTPResponse object.
"""
# Special case the '*' Vary value as it means we cannot actually
# determine if the cached response is suitable for this request.
# This case is also handled in the controller code when creating
# a cache entry, but is left here for backwards compatibility.
if "*" in cached.get("vary", {}):
return
# Ensure that the Vary headers for the cached response match our
# request
for header, value in cached.get("vary", {}).items():
if request.headers.get(header, None) != value:
return
body_raw = cached["response"].pop("body")
headers = CaseInsensitiveDict(data=cached["response"]["headers"])
if headers.get("transfer-encoding", "") == "chunked":
headers.pop("transfer-encoding")
cached["response"]["headers"] = headers
try:
body = io.BytesIO(body_raw)
except TypeError:
# This can happen if cachecontrol serialized to v1 format (pickle)
# using Python 2. A Python 2 str(byte string) will be unpickled as
# a Python 3 str (unicode string), which will cause the above to
# fail with:
#
# TypeError: 'str' does not support the buffer interface
body = io.BytesIO(body_raw.encode("utf8"))
return HTTPResponse(body=body, preload_content=False, **cached["response"])
def _loads_v0(self, request, data):
# The original legacy cache data. This doesn't contain enough
# information to construct everything we need, so we'll treat this as
# a miss.
return
def _loads_v1(self, request, data):
try:
cached = pickle.loads(data)
except ValueError:
return
return self.prepare_response(request, cached)
def _loads_v2(self, request, data):
try:
cached = json.loads(zlib.decompress(data).decode("utf8"))
except (ValueError, zlib.error):
return
# We need to decode the items that we've base64 encoded
cached["response"]["body"] = _b64_decode_bytes(cached["response"]["body"])
cached["response"]["headers"] = dict(
(_b64_decode_str(k), _b64_decode_str(v))
for k, v in cached["response"]["headers"].items()
)
cached["response"]["reason"] = _b64_decode_str(cached["response"]["reason"])
cached["vary"] = dict(
(_b64_decode_str(k), _b64_decode_str(v) if v is not None else v)
for k, v in cached["vary"].items()
)
return self.prepare_response(request, cached)
def _loads_v3(self, request, data):
# Due to Python 2 encoding issues, it's impossible to know for sure
# exactly how to load v3 entries, thus we'll treat these as a miss so
# that they get rewritten out as v4 entries.
return
def _loads_v4(self, request, data):
try:
cached = msgpack.loads(data, raw=False)
except ValueError:
return
return self.prepare_response(request, cached)
| {
"pile_set_name": "Github"
} |
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: google.golang.org/appengine/internal/urlfetch/urlfetch_service.proto
package urlfetch
import proto "github.com/golang/protobuf/proto"
import fmt "fmt"
import math "math"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
type URLFetchServiceError_ErrorCode int32
const (
URLFetchServiceError_OK URLFetchServiceError_ErrorCode = 0
URLFetchServiceError_INVALID_URL URLFetchServiceError_ErrorCode = 1
URLFetchServiceError_FETCH_ERROR URLFetchServiceError_ErrorCode = 2
URLFetchServiceError_UNSPECIFIED_ERROR URLFetchServiceError_ErrorCode = 3
URLFetchServiceError_RESPONSE_TOO_LARGE URLFetchServiceError_ErrorCode = 4
URLFetchServiceError_DEADLINE_EXCEEDED URLFetchServiceError_ErrorCode = 5
URLFetchServiceError_SSL_CERTIFICATE_ERROR URLFetchServiceError_ErrorCode = 6
URLFetchServiceError_DNS_ERROR URLFetchServiceError_ErrorCode = 7
URLFetchServiceError_CLOSED URLFetchServiceError_ErrorCode = 8
URLFetchServiceError_INTERNAL_TRANSIENT_ERROR URLFetchServiceError_ErrorCode = 9
URLFetchServiceError_TOO_MANY_REDIRECTS URLFetchServiceError_ErrorCode = 10
URLFetchServiceError_MALFORMED_REPLY URLFetchServiceError_ErrorCode = 11
URLFetchServiceError_CONNECTION_ERROR URLFetchServiceError_ErrorCode = 12
)
var URLFetchServiceError_ErrorCode_name = map[int32]string{
0: "OK",
1: "INVALID_URL",
2: "FETCH_ERROR",
3: "UNSPECIFIED_ERROR",
4: "RESPONSE_TOO_LARGE",
5: "DEADLINE_EXCEEDED",
6: "SSL_CERTIFICATE_ERROR",
7: "DNS_ERROR",
8: "CLOSED",
9: "INTERNAL_TRANSIENT_ERROR",
10: "TOO_MANY_REDIRECTS",
11: "MALFORMED_REPLY",
12: "CONNECTION_ERROR",
}
var URLFetchServiceError_ErrorCode_value = map[string]int32{
"OK": 0,
"INVALID_URL": 1,
"FETCH_ERROR": 2,
"UNSPECIFIED_ERROR": 3,
"RESPONSE_TOO_LARGE": 4,
"DEADLINE_EXCEEDED": 5,
"SSL_CERTIFICATE_ERROR": 6,
"DNS_ERROR": 7,
"CLOSED": 8,
"INTERNAL_TRANSIENT_ERROR": 9,
"TOO_MANY_REDIRECTS": 10,
"MALFORMED_REPLY": 11,
"CONNECTION_ERROR": 12,
}
func (x URLFetchServiceError_ErrorCode) Enum() *URLFetchServiceError_ErrorCode {
p := new(URLFetchServiceError_ErrorCode)
*p = x
return p
}
func (x URLFetchServiceError_ErrorCode) String() string {
return proto.EnumName(URLFetchServiceError_ErrorCode_name, int32(x))
}
func (x *URLFetchServiceError_ErrorCode) UnmarshalJSON(data []byte) error {
value, err := proto.UnmarshalJSONEnum(URLFetchServiceError_ErrorCode_value, data, "URLFetchServiceError_ErrorCode")
if err != nil {
return err
}
*x = URLFetchServiceError_ErrorCode(value)
return nil
}
func (URLFetchServiceError_ErrorCode) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{0, 0}
}
type URLFetchRequest_RequestMethod int32
const (
URLFetchRequest_GET URLFetchRequest_RequestMethod = 1
URLFetchRequest_POST URLFetchRequest_RequestMethod = 2
URLFetchRequest_HEAD URLFetchRequest_RequestMethod = 3
URLFetchRequest_PUT URLFetchRequest_RequestMethod = 4
URLFetchRequest_DELETE URLFetchRequest_RequestMethod = 5
URLFetchRequest_PATCH URLFetchRequest_RequestMethod = 6
)
var URLFetchRequest_RequestMethod_name = map[int32]string{
1: "GET",
2: "POST",
3: "HEAD",
4: "PUT",
5: "DELETE",
6: "PATCH",
}
var URLFetchRequest_RequestMethod_value = map[string]int32{
"GET": 1,
"POST": 2,
"HEAD": 3,
"PUT": 4,
"DELETE": 5,
"PATCH": 6,
}
func (x URLFetchRequest_RequestMethod) Enum() *URLFetchRequest_RequestMethod {
p := new(URLFetchRequest_RequestMethod)
*p = x
return p
}
func (x URLFetchRequest_RequestMethod) String() string {
return proto.EnumName(URLFetchRequest_RequestMethod_name, int32(x))
}
func (x *URLFetchRequest_RequestMethod) UnmarshalJSON(data []byte) error {
value, err := proto.UnmarshalJSONEnum(URLFetchRequest_RequestMethod_value, data, "URLFetchRequest_RequestMethod")
if err != nil {
return err
}
*x = URLFetchRequest_RequestMethod(value)
return nil
}
func (URLFetchRequest_RequestMethod) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{1, 0}
}
type URLFetchServiceError struct {
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *URLFetchServiceError) Reset() { *m = URLFetchServiceError{} }
func (m *URLFetchServiceError) String() string { return proto.CompactTextString(m) }
func (*URLFetchServiceError) ProtoMessage() {}
func (*URLFetchServiceError) Descriptor() ([]byte, []int) {
return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{0}
}
func (m *URLFetchServiceError) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_URLFetchServiceError.Unmarshal(m, b)
}
func (m *URLFetchServiceError) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_URLFetchServiceError.Marshal(b, m, deterministic)
}
func (dst *URLFetchServiceError) XXX_Merge(src proto.Message) {
xxx_messageInfo_URLFetchServiceError.Merge(dst, src)
}
func (m *URLFetchServiceError) XXX_Size() int {
return xxx_messageInfo_URLFetchServiceError.Size(m)
}
func (m *URLFetchServiceError) XXX_DiscardUnknown() {
xxx_messageInfo_URLFetchServiceError.DiscardUnknown(m)
}
var xxx_messageInfo_URLFetchServiceError proto.InternalMessageInfo
type URLFetchRequest struct {
Method *URLFetchRequest_RequestMethod `protobuf:"varint,1,req,name=Method,enum=appengine.URLFetchRequest_RequestMethod" json:"Method,omitempty"`
Url *string `protobuf:"bytes,2,req,name=Url" json:"Url,omitempty"`
Header []*URLFetchRequest_Header `protobuf:"group,3,rep,name=Header,json=header" json:"header,omitempty"`
Payload []byte `protobuf:"bytes,6,opt,name=Payload" json:"Payload,omitempty"`
FollowRedirects *bool `protobuf:"varint,7,opt,name=FollowRedirects,def=1" json:"FollowRedirects,omitempty"`
Deadline *float64 `protobuf:"fixed64,8,opt,name=Deadline" json:"Deadline,omitempty"`
MustValidateServerCertificate *bool `protobuf:"varint,9,opt,name=MustValidateServerCertificate,def=1" json:"MustValidateServerCertificate,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *URLFetchRequest) Reset() { *m = URLFetchRequest{} }
func (m *URLFetchRequest) String() string { return proto.CompactTextString(m) }
func (*URLFetchRequest) ProtoMessage() {}
func (*URLFetchRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{1}
}
func (m *URLFetchRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_URLFetchRequest.Unmarshal(m, b)
}
func (m *URLFetchRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_URLFetchRequest.Marshal(b, m, deterministic)
}
func (dst *URLFetchRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_URLFetchRequest.Merge(dst, src)
}
func (m *URLFetchRequest) XXX_Size() int {
return xxx_messageInfo_URLFetchRequest.Size(m)
}
func (m *URLFetchRequest) XXX_DiscardUnknown() {
xxx_messageInfo_URLFetchRequest.DiscardUnknown(m)
}
var xxx_messageInfo_URLFetchRequest proto.InternalMessageInfo
const Default_URLFetchRequest_FollowRedirects bool = true
const Default_URLFetchRequest_MustValidateServerCertificate bool = true
func (m *URLFetchRequest) GetMethod() URLFetchRequest_RequestMethod {
if m != nil && m.Method != nil {
return *m.Method
}
return URLFetchRequest_GET
}
func (m *URLFetchRequest) GetUrl() string {
if m != nil && m.Url != nil {
return *m.Url
}
return ""
}
func (m *URLFetchRequest) GetHeader() []*URLFetchRequest_Header {
if m != nil {
return m.Header
}
return nil
}
func (m *URLFetchRequest) GetPayload() []byte {
if m != nil {
return m.Payload
}
return nil
}
func (m *URLFetchRequest) GetFollowRedirects() bool {
if m != nil && m.FollowRedirects != nil {
return *m.FollowRedirects
}
return Default_URLFetchRequest_FollowRedirects
}
func (m *URLFetchRequest) GetDeadline() float64 {
if m != nil && m.Deadline != nil {
return *m.Deadline
}
return 0
}
func (m *URLFetchRequest) GetMustValidateServerCertificate() bool {
if m != nil && m.MustValidateServerCertificate != nil {
return *m.MustValidateServerCertificate
}
return Default_URLFetchRequest_MustValidateServerCertificate
}
type URLFetchRequest_Header struct {
Key *string `protobuf:"bytes,4,req,name=Key" json:"Key,omitempty"`
Value *string `protobuf:"bytes,5,req,name=Value" json:"Value,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *URLFetchRequest_Header) Reset() { *m = URLFetchRequest_Header{} }
func (m *URLFetchRequest_Header) String() string { return proto.CompactTextString(m) }
func (*URLFetchRequest_Header) ProtoMessage() {}
func (*URLFetchRequest_Header) Descriptor() ([]byte, []int) {
return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{1, 0}
}
func (m *URLFetchRequest_Header) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_URLFetchRequest_Header.Unmarshal(m, b)
}
func (m *URLFetchRequest_Header) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_URLFetchRequest_Header.Marshal(b, m, deterministic)
}
func (dst *URLFetchRequest_Header) XXX_Merge(src proto.Message) {
xxx_messageInfo_URLFetchRequest_Header.Merge(dst, src)
}
func (m *URLFetchRequest_Header) XXX_Size() int {
return xxx_messageInfo_URLFetchRequest_Header.Size(m)
}
func (m *URLFetchRequest_Header) XXX_DiscardUnknown() {
xxx_messageInfo_URLFetchRequest_Header.DiscardUnknown(m)
}
var xxx_messageInfo_URLFetchRequest_Header proto.InternalMessageInfo
func (m *URLFetchRequest_Header) GetKey() string {
if m != nil && m.Key != nil {
return *m.Key
}
return ""
}
func (m *URLFetchRequest_Header) GetValue() string {
if m != nil && m.Value != nil {
return *m.Value
}
return ""
}
type URLFetchResponse struct {
Content []byte `protobuf:"bytes,1,opt,name=Content" json:"Content,omitempty"`
StatusCode *int32 `protobuf:"varint,2,req,name=StatusCode" json:"StatusCode,omitempty"`
Header []*URLFetchResponse_Header `protobuf:"group,3,rep,name=Header,json=header" json:"header,omitempty"`
ContentWasTruncated *bool `protobuf:"varint,6,opt,name=ContentWasTruncated,def=0" json:"ContentWasTruncated,omitempty"`
ExternalBytesSent *int64 `protobuf:"varint,7,opt,name=ExternalBytesSent" json:"ExternalBytesSent,omitempty"`
ExternalBytesReceived *int64 `protobuf:"varint,8,opt,name=ExternalBytesReceived" json:"ExternalBytesReceived,omitempty"`
FinalUrl *string `protobuf:"bytes,9,opt,name=FinalUrl" json:"FinalUrl,omitempty"`
ApiCpuMilliseconds *int64 `protobuf:"varint,10,opt,name=ApiCpuMilliseconds,def=0" json:"ApiCpuMilliseconds,omitempty"`
ApiBytesSent *int64 `protobuf:"varint,11,opt,name=ApiBytesSent,def=0" json:"ApiBytesSent,omitempty"`
ApiBytesReceived *int64 `protobuf:"varint,12,opt,name=ApiBytesReceived,def=0" json:"ApiBytesReceived,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *URLFetchResponse) Reset() { *m = URLFetchResponse{} }
func (m *URLFetchResponse) String() string { return proto.CompactTextString(m) }
func (*URLFetchResponse) ProtoMessage() {}
func (*URLFetchResponse) Descriptor() ([]byte, []int) {
return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{2}
}
func (m *URLFetchResponse) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_URLFetchResponse.Unmarshal(m, b)
}
func (m *URLFetchResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_URLFetchResponse.Marshal(b, m, deterministic)
}
func (dst *URLFetchResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_URLFetchResponse.Merge(dst, src)
}
func (m *URLFetchResponse) XXX_Size() int {
return xxx_messageInfo_URLFetchResponse.Size(m)
}
func (m *URLFetchResponse) XXX_DiscardUnknown() {
xxx_messageInfo_URLFetchResponse.DiscardUnknown(m)
}
var xxx_messageInfo_URLFetchResponse proto.InternalMessageInfo
const Default_URLFetchResponse_ContentWasTruncated bool = false
const Default_URLFetchResponse_ApiCpuMilliseconds int64 = 0
const Default_URLFetchResponse_ApiBytesSent int64 = 0
const Default_URLFetchResponse_ApiBytesReceived int64 = 0
func (m *URLFetchResponse) GetContent() []byte {
if m != nil {
return m.Content
}
return nil
}
func (m *URLFetchResponse) GetStatusCode() int32 {
if m != nil && m.StatusCode != nil {
return *m.StatusCode
}
return 0
}
func (m *URLFetchResponse) GetHeader() []*URLFetchResponse_Header {
if m != nil {
return m.Header
}
return nil
}
func (m *URLFetchResponse) GetContentWasTruncated() bool {
if m != nil && m.ContentWasTruncated != nil {
return *m.ContentWasTruncated
}
return Default_URLFetchResponse_ContentWasTruncated
}
func (m *URLFetchResponse) GetExternalBytesSent() int64 {
if m != nil && m.ExternalBytesSent != nil {
return *m.ExternalBytesSent
}
return 0
}
func (m *URLFetchResponse) GetExternalBytesReceived() int64 {
if m != nil && m.ExternalBytesReceived != nil {
return *m.ExternalBytesReceived
}
return 0
}
func (m *URLFetchResponse) GetFinalUrl() string {
if m != nil && m.FinalUrl != nil {
return *m.FinalUrl
}
return ""
}
func (m *URLFetchResponse) GetApiCpuMilliseconds() int64 {
if m != nil && m.ApiCpuMilliseconds != nil {
return *m.ApiCpuMilliseconds
}
return Default_URLFetchResponse_ApiCpuMilliseconds
}
func (m *URLFetchResponse) GetApiBytesSent() int64 {
if m != nil && m.ApiBytesSent != nil {
return *m.ApiBytesSent
}
return Default_URLFetchResponse_ApiBytesSent
}
func (m *URLFetchResponse) GetApiBytesReceived() int64 {
if m != nil && m.ApiBytesReceived != nil {
return *m.ApiBytesReceived
}
return Default_URLFetchResponse_ApiBytesReceived
}
type URLFetchResponse_Header struct {
Key *string `protobuf:"bytes,4,req,name=Key" json:"Key,omitempty"`
Value *string `protobuf:"bytes,5,req,name=Value" json:"Value,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *URLFetchResponse_Header) Reset() { *m = URLFetchResponse_Header{} }
func (m *URLFetchResponse_Header) String() string { return proto.CompactTextString(m) }
func (*URLFetchResponse_Header) ProtoMessage() {}
func (*URLFetchResponse_Header) Descriptor() ([]byte, []int) {
return fileDescriptor_urlfetch_service_b245a7065f33bced, []int{2, 0}
}
func (m *URLFetchResponse_Header) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_URLFetchResponse_Header.Unmarshal(m, b)
}
func (m *URLFetchResponse_Header) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_URLFetchResponse_Header.Marshal(b, m, deterministic)
}
func (dst *URLFetchResponse_Header) XXX_Merge(src proto.Message) {
xxx_messageInfo_URLFetchResponse_Header.Merge(dst, src)
}
func (m *URLFetchResponse_Header) XXX_Size() int {
return xxx_messageInfo_URLFetchResponse_Header.Size(m)
}
func (m *URLFetchResponse_Header) XXX_DiscardUnknown() {
xxx_messageInfo_URLFetchResponse_Header.DiscardUnknown(m)
}
var xxx_messageInfo_URLFetchResponse_Header proto.InternalMessageInfo
func (m *URLFetchResponse_Header) GetKey() string {
if m != nil && m.Key != nil {
return *m.Key
}
return ""
}
func (m *URLFetchResponse_Header) GetValue() string {
if m != nil && m.Value != nil {
return *m.Value
}
return ""
}
func init() {
proto.RegisterType((*URLFetchServiceError)(nil), "appengine.URLFetchServiceError")
proto.RegisterType((*URLFetchRequest)(nil), "appengine.URLFetchRequest")
proto.RegisterType((*URLFetchRequest_Header)(nil), "appengine.URLFetchRequest.Header")
proto.RegisterType((*URLFetchResponse)(nil), "appengine.URLFetchResponse")
proto.RegisterType((*URLFetchResponse_Header)(nil), "appengine.URLFetchResponse.Header")
}
func init() {
proto.RegisterFile("google.golang.org/appengine/internal/urlfetch/urlfetch_service.proto", fileDescriptor_urlfetch_service_b245a7065f33bced)
}
var fileDescriptor_urlfetch_service_b245a7065f33bced = []byte{
// 770 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x94, 0x54, 0xdd, 0x6e, 0xe3, 0x54,
0x10, 0xc6, 0x76, 0x7e, 0xa7, 0x5d, 0x7a, 0x76, 0xb6, 0x45, 0x66, 0xb5, 0xa0, 0x10, 0x09, 0x29,
0x17, 0x90, 0x2e, 0x2b, 0x24, 0x44, 0xaf, 0x70, 0xed, 0x93, 0xad, 0xa9, 0x63, 0x47, 0xc7, 0x4e,
0x61, 0xb9, 0xb1, 0xac, 0x78, 0x9a, 0x5a, 0xb2, 0xec, 0x60, 0x9f, 0x2c, 0xf4, 0x35, 0x78, 0x0d,
0xde, 0x87, 0xa7, 0xe1, 0x02, 0x9d, 0xc4, 0xc9, 0x6e, 0xbb, 0xd1, 0x4a, 0x5c, 0x65, 0xe6, 0x9b,
0xef, 0xcc, 0x99, 0x7c, 0xdf, 0xf8, 0x80, 0xb3, 0x2c, 0xcb, 0x65, 0x4e, 0xe3, 0x65, 0x99, 0x27,
0xc5, 0x72, 0x5c, 0x56, 0xcb, 0xf3, 0x64, 0xb5, 0xa2, 0x62, 0x99, 0x15, 0x74, 0x9e, 0x15, 0x92,
0xaa, 0x22, 0xc9, 0xcf, 0xd7, 0x55, 0x7e, 0x4b, 0x72, 0x71, 0xb7, 0x0f, 0xe2, 0x9a, 0xaa, 0xb7,
0xd9, 0x82, 0xc6, 0xab, 0xaa, 0x94, 0x25, 0xf6, 0xf7, 0x67, 0x86, 0x7f, 0xeb, 0x70, 0x3a, 0x17,
0xde, 0x44, 0xb1, 0xc2, 0x2d, 0x89, 0x57, 0x55, 0x59, 0x0d, 0xff, 0xd2, 0xa1, 0xbf, 0x89, 0xec,
0x32, 0x25, 0xec, 0x80, 0x1e, 0x5c, 0xb3, 0x4f, 0xf0, 0x04, 0x8e, 0x5c, 0xff, 0xc6, 0xf2, 0x5c,
0x27, 0x9e, 0x0b, 0x8f, 0x69, 0x0a, 0x98, 0xf0, 0xc8, 0xbe, 0x8a, 0xb9, 0x10, 0x81, 0x60, 0x3a,
0x9e, 0xc1, 0xd3, 0xb9, 0x1f, 0xce, 0xb8, 0xed, 0x4e, 0x5c, 0xee, 0x34, 0xb0, 0x81, 0x9f, 0x01,
0x0a, 0x1e, 0xce, 0x02, 0x3f, 0xe4, 0x71, 0x14, 0x04, 0xb1, 0x67, 0x89, 0xd7, 0x9c, 0xb5, 0x14,
0xdd, 0xe1, 0x96, 0xe3, 0xb9, 0x3e, 0x8f, 0xf9, 0xaf, 0x36, 0xe7, 0x0e, 0x77, 0x58, 0x1b, 0x3f,
0x87, 0xb3, 0x30, 0xf4, 0x62, 0x9b, 0x8b, 0xc8, 0x9d, 0xb8, 0xb6, 0x15, 0xf1, 0xa6, 0x53, 0x07,
0x9f, 0x40, 0xdf, 0xf1, 0xc3, 0x26, 0xed, 0x22, 0x40, 0xc7, 0xf6, 0x82, 0x90, 0x3b, 0xac, 0x87,
0x2f, 0xc0, 0x74, 0xfd, 0x88, 0x0b, 0xdf, 0xf2, 0xe2, 0x48, 0x58, 0x7e, 0xe8, 0x72, 0x3f, 0x6a,
0x98, 0x7d, 0x35, 0x82, 0xba, 0x79, 0x6a, 0xf9, 0x6f, 0x62, 0xc1, 0x1d, 0x57, 0x70, 0x3b, 0x0a,
0x19, 0xe0, 0x33, 0x38, 0x99, 0x5a, 0xde, 0x24, 0x10, 0x53, 0xee, 0xc4, 0x82, 0xcf, 0xbc, 0x37,
0xec, 0x08, 0x4f, 0x81, 0xd9, 0x81, 0xef, 0x73, 0x3b, 0x72, 0x03, 0xbf, 0x69, 0x71, 0x3c, 0xfc,
0xc7, 0x80, 0x93, 0x9d, 0x5a, 0x82, 0x7e, 0x5f, 0x53, 0x2d, 0xf1, 0x27, 0xe8, 0x4c, 0x49, 0xde,
0x95, 0xa9, 0xa9, 0x0d, 0xf4, 0xd1, 0xa7, 0xaf, 0x46, 0xe3, 0xbd, 0xba, 0xe3, 0x47, 0xdc, 0x71,
0xf3, 0xbb, 0xe5, 0x8b, 0xe6, 0x1c, 0x32, 0x30, 0xe6, 0x55, 0x6e, 0xea, 0x03, 0x7d, 0xd4, 0x17,
0x2a, 0xc4, 0x1f, 0xa1, 0x73, 0x47, 0x49, 0x4a, 0x95, 0x69, 0x0c, 0x8c, 0x11, 0xbc, 0xfa, 0xea,
0x23, 0x3d, 0xaf, 0x36, 0x44, 0xd1, 0x1c, 0xc0, 0x17, 0xd0, 0x9d, 0x25, 0xf7, 0x79, 0x99, 0xa4,
0x66, 0x67, 0xa0, 0x8d, 0x8e, 0x2f, 0xf5, 0x9e, 0x26, 0x76, 0x10, 0x8e, 0xe1, 0x64, 0x52, 0xe6,
0x79, 0xf9, 0x87, 0xa0, 0x34, 0xab, 0x68, 0x21, 0x6b, 0xb3, 0x3b, 0xd0, 0x46, 0xbd, 0x8b, 0x96,
0xac, 0xd6, 0x24, 0x1e, 0x17, 0xf1, 0x39, 0xf4, 0x1c, 0x4a, 0xd2, 0x3c, 0x2b, 0xc8, 0xec, 0x0d,
0xb4, 0x91, 0x26, 0xf6, 0x39, 0xfe, 0x0c, 0x5f, 0x4c, 0xd7, 0xb5, 0xbc, 0x49, 0xf2, 0x2c, 0x4d,
0x24, 0xa9, 0xed, 0xa1, 0xca, 0xa6, 0x4a, 0x66, 0xb7, 0xd9, 0x22, 0x91, 0x64, 0xf6, 0xdf, 0xeb,
0xfc, 0x71, 0xea, 0xf3, 0x97, 0xd0, 0xd9, 0xfe, 0x0f, 0x25, 0xc6, 0x35, 0xdd, 0x9b, 0xad, 0xad,
0x18, 0xd7, 0x74, 0x8f, 0xa7, 0xd0, 0xbe, 0x49, 0xf2, 0x35, 0x99, 0xed, 0x0d, 0xb6, 0x4d, 0x86,
0x1e, 0x3c, 0x79, 0xa0, 0x26, 0x76, 0xc1, 0x78, 0xcd, 0x23, 0xa6, 0x61, 0x0f, 0x5a, 0xb3, 0x20,
0x8c, 0x98, 0xae, 0xa2, 0x2b, 0x6e, 0x39, 0xcc, 0x50, 0xc5, 0xd9, 0x3c, 0x62, 0x2d, 0xb5, 0x2e,
0x0e, 0xf7, 0x78, 0xc4, 0x59, 0x1b, 0xfb, 0xd0, 0x9e, 0x59, 0x91, 0x7d, 0xc5, 0x3a, 0xc3, 0x7f,
0x0d, 0x60, 0xef, 0x84, 0xad, 0x57, 0x65, 0x51, 0x13, 0x9a, 0xd0, 0xb5, 0xcb, 0x42, 0x52, 0x21,
0x4d, 0x4d, 0x49, 0x29, 0x76, 0x29, 0x7e, 0x09, 0x10, 0xca, 0x44, 0xae, 0x6b, 0xf5, 0x71, 0x6c,
0x8c, 0x6b, 0x8b, 0xf7, 0x10, 0xbc, 0x78, 0xe4, 0xdf, 0xf0, 0xa0, 0x7f, 0xdb, 0x6b, 0x1e, 0x1b,
0xf8, 0x03, 0x3c, 0x6b, 0xae, 0xf9, 0x25, 0xa9, 0xa3, 0x6a, 0x5d, 0x28, 0x81, 0xb6, 0x66, 0xf6,
0x2e, 0xda, 0xb7, 0x49, 0x5e, 0x93, 0x38, 0xc4, 0xc0, 0x6f, 0xe0, 0x29, 0xff, 0x73, 0xfb, 0x02,
0x5c, 0xde, 0x4b, 0xaa, 0x43, 0x35, 0xb8, 0x72, 0xd7, 0x10, 0x1f, 0x16, 0xf0, 0x7b, 0x38, 0x7b,
0x00, 0x0a, 0x5a, 0x50, 0xf6, 0x96, 0xd2, 0x8d, 0xcd, 0x86, 0x38, 0x5c, 0x54, 0xfb, 0x30, 0xc9,
0x8a, 0x24, 0x57, 0xfb, 0xaa, 0xec, 0xed, 0x8b, 0x7d, 0x8e, 0xdf, 0x01, 0x5a, 0xab, 0xcc, 0x5e,
0xad, 0xa7, 0x59, 0x9e, 0x67, 0x35, 0x2d, 0xca, 0x22, 0xad, 0x4d, 0x50, 0xed, 0x2e, 0xb4, 0x97,
0xe2, 0x40, 0x11, 0xbf, 0x86, 0x63, 0x6b, 0x95, 0xbd, 0x9b, 0xf6, 0x68, 0x47, 0x7e, 0x00, 0xe3,
0xb7, 0xc0, 0x76, 0xf9, 0x7e, 0xcc, 0xe3, 0x1d, 0xf5, 0x83, 0xd2, 0xff, 0x5f, 0xa6, 0x4b, 0xf8,
0xad, 0xb7, 0x7b, 0x2a, 0xff, 0x0b, 0x00, 0x00, 0xff, 0xff, 0x1d, 0x9f, 0x6d, 0x24, 0x63, 0x05,
0x00, 0x00,
}
| {
"pile_set_name": "Github"
} |
/*/////////////////////////////////////////////////////////////////////////////
AUTHOR: Sajjad Taheri sajjadt[at]uci[at]edu
LICENSE AGREEMENT
Copyright (c) 2015, University of california, Irvine
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. All advertising materials mentioning features or use of this software
must display the following acknowledgement:
This product includes software developed by the UC Irvine.
4. Neither the name of the UC Irvine nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY UC IRVINE ''AS IS'' AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL UC IRVINE OR CONTRIBUTORS BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
/////////////////////////////////////////////////////////////////////////////*/
QUnit.module ("Computational Photography", {});
QUnit.test("Test Inpinting", function(assert) {
// Inpaint
{
let mat = new cv.Mat([4, 4], cv.CV_8UC3),
mask = cv.Mat.eye([4, 4], cv.CV_8UC1),
dest = new cv.Mat();
cv.inpaint(mat, mask, dest, 2, cv.INPAINT_TELEA);
let size = dest.size();
assert.equal(size.get(0), 4);
assert.equal(size.get(1), 4);
assert.equal(dest.channels(), 3);
mat.delete();
mask.delete();
dest.delete();
}
});
QUnit.test("Test Denoising", function(assert) {
// void fastNlMeansDenoising( InputArray src, OutputArray dst, float h = 3,
// int templateWindowSize = 7, int searchWindowSize = 21);
{
let mat = new cv.Mat([4, 4], cv.CV_8UC3),
dest = new cv.Mat();
cv.fastNlMeansDenoising(mat, dest, 3, 7, 21);
let size = dest.size();
assert.equal(size.get(0), 4);
assert.equal(size.get(1), 4);
assert.equal(dest.channels(), 3);
size.delete();
mat.delete();
dest.delete();
}
// void fastNlMeansDenoisingColoredMulti( InputArrayOfArrays srcImgs, OutputArray dst,
// int imgToDenoiseIndex, int temporalWindowSize,
// float h = 3, float hColor = 3,
// int templateWindowSize = 7, int searchWindowSize = 21);
{
let mat1 = new cv.Mat([4, 4], cv.CV_8UC3),
mat2 = new cv.Mat([4, 4], cv.CV_8UC3),
mat3 = new cv.Mat([4, 4], cv.CV_8UC3);
let inputArray = new cv.MatVector();
inputArray.push_back(mat1);
inputArray.push_back(mat2);
inputArray.push_back(mat3);
let dest = new cv.Mat();
cv.fastNlMeansDenoisingColoredMulti(inputArray, dest, 1, 1, 3, 3, 7, 21);
let size = dest.size();
assert.equal(size.get(0), 4);
assert.equal(size.get(1), 4);
assert.equal(dest.channels(), 3);
size.delete();
mat1.delete();
mat2.delete();
mat3.delete();
dest.delete();
inputArray.delete();
}
// void denoise_TVL1(const std::vector<Mat>& observations,Mat& result, double lambda=1.0, int niters=30);
{
let mat1 = new cv.Mat([4, 4], cv.CV_8UC1),
mat2 = new cv.Mat([4, 4], cv.CV_8UC1),
mat3 = new cv.Mat([4, 4], cv.CV_8UC1),
dest = new cv.Mat(),
inputArray = new cv.MatVector();
inputArray.push_back(mat1);
inputArray.push_back(mat2);
inputArray.push_back(mat3);
cv.denoise_TVL1(inputArray, dest, 1.0, 30);
let size = dest.size();
assert.equal(size.get(0), 4);
assert.equal(size.get(1), 4);
assert.equal(dest.channels(), 1);
size.delete();
mat1.delete();
mat2.delete();
mat3.delete();
dest.delete();
inputArray.delete();
}
});
QUnit.test("Tone Mapping", function(assert) {
// Linear Mapper
{
let gamma = 1.0,
mapper = new cv.Tonemap(gamma),
mat = new cv.Mat([4, 4], cv.CV_32SC3),
dst = new cv.Mat();
assert.equal(mapper.getGamma(), 1);
mapper.process(mat, dst);
let size = dst.size();
assert.equal(size.get(0), 4);
assert.equal(size.get(1), 4);
assert.equal(dst.channels(), 3);
size.delete();
mapper.delete();
mat.delete();
dst.delete();
}
// Durand Mapper
{
let gamma = 1.0,
contrast = 4.0,
saturation = 1.0,
sigma_space = 2.0,
sigma_color = 2.0,
mapper = new cv.TonemapDurand(gamma, contrast, saturation, sigma_space, sigma_color);
assert.equal(mapper.getGamma(), gamma);
assert.equal(mapper.getContrast(), contrast);
assert.equal(mapper.getSaturation(), saturation);
assert.equal(mapper.getSigmaSpace(), sigma_space);
assert.equal(mapper.getSigmaColor(), sigma_color);
let mat = new cv.Mat([4, 4], cv.CV_32SC3),
dst = new cv.Mat();
mapper.process(mat, dst);
let size = dst.size();
assert.equal(size.get(0), 4);
assert.equal(size.get(1), 4);
assert.equal(dst.channels(), 3);
size.delete();
mapper.delete();
mat.delete();
dst.delete();
}
});
QUnit.test("Image Allignment", function(assert) {
// AlignMTB
{
let maxBits = 6,
excludeRange = 4,
cut = true,
mtb = new cv.AlignMTB(maxBits, excludeRange, cut);
assert.equal(mtb.getMaxBits(), maxBits);
assert.equal(mtb.getExcludeRange(), excludeRange);
assert.equal(mtb.getCut(), cut);
let mat = new cv.Mat([4, 4], cv.CV_8UC1),
mat2 = cv.Mat.eye([4, 4], cv.CV_8UC1),
point = mtb.calculateShift(mat, mat2),
dst = new cv.Mat(),
dst2 = new cv.Mat();
mtb.computeBitmaps(mat, dst, dst2);
let size = dst.size();
assert.equal(size.get(0), 4);
assert.equal(size.get(1), 4);
assert.equal(dst.channels(), 1);
size = dst2.size();
assert.equal(size.get(0), 4);
assert.equal(size.get(1), 4);
assert.equal(dst2.channels(), 1);
mtb.delete();
mat.delete();
mat2.delete();
dst.delete();
dst2.delete();
}
// Robertson Calibrate
{
let maxIter = 30,
threshold = 0.01;
let mat = cv.Mat.eye([4,4], cv.CV_8UC3),
mat2 = cv.Mat.ones([4,4], cv.CV_8UC3),
times = cv.Mat.ones([1,2], cv.CV_8UC3),
dst = new cv.Mat(),
inputVector = new cv.MatVector();
inputVector.push_back(mat);
inputVector.push_back(mat2);
let calib = new cv.CalibrateRobertson(maxIter, threshold);
assert.equal(calib.getMaxIter(), maxIter);
assert.equal(Math.abs(calib.getThreshold()-threshold) < 0.0001, true);
calib.process(inputVector, dst, times);
let size = dst.size();
assert.equal(size.get(0), 256);
assert.equal(size.get(1), 1);
assert.equal(dst.channels(), 3);
mat.delete();
mat2.delete();
dst.delete();
times.delete();
inputVector.delete();
}
//
{
}
});
| {
"pile_set_name": "Github"
} |
/** @file
Private Header file for Usb Host Controller PEIM
Copyright (c) 2010, Intel Corporation. All rights reserved.<BR>
This program and the accompanying materials
are licensed and made available under the terms and conditions
of the BSD License which accompanies this distribution. The
full text of the license may be found at
http://opensource.org/licenses/bsd-license.php
THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
**/
#ifndef _EFI_EHCI_MEM_H_
#define _EFI_EHCI_MEM_H_
#include <Uefi.h>
#include <IndustryStandard/Pci22.h>
#define USB_HC_BIT(a) ((UINTN)(1 << (a)))
#define USB_HC_BIT_IS_SET(Data, Bit) \
((BOOLEAN)(((Data) & USB_HC_BIT(Bit)) == USB_HC_BIT(Bit)))
#define USB_HC_HIGH_32BIT(Addr64) \
((UINT32)(RShiftU64((UINTN)(Addr64), 32) & 0XFFFFFFFF))
typedef struct _USBHC_MEM_BLOCK USBHC_MEM_BLOCK;
struct _USBHC_MEM_BLOCK {
UINT8 *Bits; // Bit array to record which unit is allocated
UINTN BitsLen;
UINT8 *Buf;
UINT8 *BufHost;
UINTN BufLen; // Memory size in bytes
VOID *Mapping;
USBHC_MEM_BLOCK *Next;
};
//
// USBHC_MEM_POOL is used to manage the memory used by USB
// host controller. EHCI requires the control memory and transfer
// data to be on the same 4G memory.
//
typedef struct _USBHC_MEM_POOL {
BOOLEAN Check4G;
UINT32 Which4G;
USBHC_MEM_BLOCK *Head;
} USBHC_MEM_POOL;
//
// Memory allocation unit, must be 2^n, n>4
//
#define USBHC_MEM_UNIT 64
#define USBHC_MEM_UNIT_MASK (USBHC_MEM_UNIT - 1)
#define USBHC_MEM_DEFAULT_PAGES 16
#define USBHC_MEM_ROUND(Len) (((Len) + USBHC_MEM_UNIT_MASK) & (~USBHC_MEM_UNIT_MASK))
//
// Advance the byte and bit to the next bit, adjust byte accordingly.
//
#define NEXT_BIT(Byte, Bit) \
do { \
(Bit)++; \
if ((Bit) > 7) { \
(Byte)++; \
(Bit) = 0; \
} \
} while (0)
#endif
| {
"pile_set_name": "Github"
} |
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package htmlindex
import (
"testing"
"golang.org/x/text/encoding"
"golang.org/x/text/encoding/charmap"
"golang.org/x/text/encoding/internal/identifier"
"golang.org/x/text/encoding/unicode"
"golang.org/x/text/language"
)
func TestGet(t *testing.T) {
for i, tc := range []struct {
name string
canonical string
err error
}{
{"utf-8", "utf-8", nil},
{" utf-8 ", "utf-8", nil},
{" l5 ", "windows-1254", nil},
{"latin5 ", "windows-1254", nil},
{"latin 5", "", errInvalidName},
{"latin-5", "", errInvalidName},
} {
enc, err := Get(tc.name)
if err != tc.err {
t.Errorf("%d: error was %v; want %v", i, err, tc.err)
}
if err != nil {
continue
}
if got, err := Name(enc); got != tc.canonical {
t.Errorf("%d: Name(Get(%q)) = %q; want %q (%v)", i, tc.name, got, tc.canonical, err)
}
}
}
func TestTables(t *testing.T) {
for name, index := range nameMap {
got, err := Get(name)
if err != nil {
t.Errorf("%s:err: expected non-nil error", name)
}
if want := encodings[index]; got != want {
t.Errorf("%s:encoding: got %v; want %v", name, got, want)
}
mib, _ := got.(identifier.Interface).ID()
if mibMap[mib] != index {
t.Errorf("%s:mibMab: got %d; want %d", name, mibMap[mib], index)
}
}
}
func TestName(t *testing.T) {
for i, tc := range []struct {
desc string
enc encoding.Encoding
name string
err error
}{{
"defined encoding",
charmap.ISO8859_2,
"iso-8859-2",
nil,
}, {
"defined Unicode encoding",
unicode.UTF16(unicode.BigEndian, unicode.IgnoreBOM),
"utf-16be",
nil,
}, {
"undefined Unicode encoding in HTML standard",
unicode.UTF16(unicode.BigEndian, unicode.UseBOM),
"",
errUnsupported,
}, {
"undefined other encoding in HTML standard",
charmap.CodePage437,
"",
errUnsupported,
}, {
"unknown encoding",
encoding.Nop,
"",
errUnknown,
}} {
name, err := Name(tc.enc)
if name != tc.name || err != tc.err {
t.Errorf("%d:%s: got %q, %v; want %q, %v", i, tc.desc, name, err, tc.name, tc.err)
}
}
}
func TestLanguageDefault(t *testing.T) {
for _, tc := range []struct{ tag, want string }{
{"und", "windows-1252"}, // The default value.
{"ar", "windows-1256"},
{"ba", "windows-1251"},
{"be", "windows-1251"},
{"bg", "windows-1251"},
{"cs", "windows-1250"},
{"el", "iso-8859-7"},
{"et", "windows-1257"},
{"fa", "windows-1256"},
{"he", "windows-1255"},
{"hr", "windows-1250"},
{"hu", "iso-8859-2"},
{"ja", "shift_jis"},
{"kk", "windows-1251"},
{"ko", "euc-kr"},
{"ku", "windows-1254"},
{"ky", "windows-1251"},
{"lt", "windows-1257"},
{"lv", "windows-1257"},
{"mk", "windows-1251"},
{"pl", "iso-8859-2"},
{"ru", "windows-1251"},
{"sah", "windows-1251"},
{"sk", "windows-1250"},
{"sl", "iso-8859-2"},
{"sr", "windows-1251"},
{"tg", "windows-1251"},
{"th", "windows-874"},
{"tr", "windows-1254"},
{"tt", "windows-1251"},
{"uk", "windows-1251"},
{"vi", "windows-1258"},
{"zh-hans", "gb18030"},
{"zh-hant", "big5"},
// Variants and close approximates of the above.
{"ar_EG", "windows-1256"},
{"bs", "windows-1250"}, // Bosnian Latin maps to Croatian.
// Use default fallback in case of miss.
{"nl", "windows-1252"},
} {
if got := LanguageDefault(language.MustParse(tc.tag)); got != tc.want {
t.Errorf("LanguageDefault(%s) = %s; want %s", tc.tag, got, tc.want)
}
}
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 1998, 2000, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.awt.print;
/**
* The <code>Pageable</code> implementation represents a set of
* pages to be printed. The <code>Pageable</code> object returns
* the total number of pages in the set as well as the
* {@link PageFormat} and {@link Printable} for a specified page.
* @see java.awt.print.PageFormat
* @see java.awt.print.Printable
*/
public interface Pageable {
/**
* This constant is returned from the
* {@link #getNumberOfPages() getNumberOfPages}
* method if a <code>Pageable</code> implementation does not know
* the number of pages in its set.
*/
int UNKNOWN_NUMBER_OF_PAGES = -1;
/**
* Returns the number of pages in the set.
* To enable advanced printing features,
* it is recommended that <code>Pageable</code>
* implementations return the true number of pages
* rather than the
* UNKNOWN_NUMBER_OF_PAGES constant.
* @return the number of pages in this <code>Pageable</code>.
*/
int getNumberOfPages();
/**
* Returns the <code>PageFormat</code> of the page specified by
* <code>pageIndex</code>.
* @param pageIndex the zero based index of the page whose
* <code>PageFormat</code> is being requested
* @return the <code>PageFormat</code> describing the size and
* orientation.
* @throws IndexOutOfBoundsException if
* the <code>Pageable</code> does not contain the requested
* page.
*/
PageFormat getPageFormat(int pageIndex)
throws IndexOutOfBoundsException;
/**
* Returns the <code>Printable</code> instance responsible for
* rendering the page specified by <code>pageIndex</code>.
* @param pageIndex the zero based index of the page whose
* <code>Printable</code> is being requested
* @return the <code>Printable</code> that renders the page.
* @throws IndexOutOfBoundsException if
* the <code>Pageable</code> does not contain the requested
* page.
*/
Printable getPrintable(int pageIndex)
throws IndexOutOfBoundsException;
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<editor id="editor.imagick" enabled="false" openable="true" iconClass="icon-picture" previewProvider="true" formId="diaporama_box" text="328" title="329" icon="frame_image.png" className="IMagickPreviewer" mimes="pdf,svg,tif,tiff,psd,xcf,eps,xls,xlsx,ods,doc,docx,odt,ppt,pptx,odp,rtf,cr2" label="CONF_MESSAGE[Image Magick]" description="CONF_MESSAGE[Image Magick viewer supports PDF and various images formats preview. Requires external library to be installed on the server.]" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="file:../core.ajaxplorer/ajxp_registry.xsd">
<class_definition filename="plugins/editor.imagick/class.IMagickPreviewer.php" classname="IMagickPreviewer"/>
<client_settings>
<resources>
<js file="plugins/editor.imagick/class.IMagickPreviewer.js" className="IMagickPreviewer"/>
<i18n namespace="imagick_editor" path="plugins/editor.imagick/i18n"/>
</resources>
</client_settings>
<server_settings>
<global_param name="IMAGE_MAGICK_CONVERT" type="string" label="CONF_MESSAGE[Convert Path]" description="CONF_MESSAGE[Full path on the server to the 'convert' binary]" default="/usr/bin/convert" mandatory="true"/>
<global_param name="IM_THUMB_QUALITY" type="string" label="CONF_MESSAGE[Thumbs Options]" description="CONF_MESSAGE[Imagick Options used for generating the thumbnails]" default="-quality 65 -density 150 -resize 250 -flatten"/>
<global_param name="IM_VIEWER_QUALITY" type="string" label="CONF_MESSAGE[Images Options]" description="CONF_MESSAGE[Imagick Options used for generation the high scale images]" default="-quality 90 -density 250"/>
<global_param name="IM_CUSTOM_OPTIONS" type="string" label="CONF_MESSAGE[Shared Options]" description="CONF_MESSAGE[You can add any Image Magick options you want here, applied to both thumbs and big images]" default=""/>
<global_param name="ADDITIONAL_ENV_PATH" type="string" label="CONF_MESSAGE[Environment Path]" description="CONF_MESSAGE[Additional path to put in the environment PATH variable before calling the command. Can be necessary for Ghostscript on Mac for example.]" default=""/>
<global_param name="ONTHEFLY_THRESHOLD" type="integer" label="CONF_MESSAGE[Pregeneration Threshold]" description="CONF_MESSAGE[Threshold (in MegaByte) under which the pages are all generated when opening the document, and over which pages are generated on-demand only]" default="5"/>
<global_param name="UNOCONV" type="string" label="CONF_MESSAGE[Unoconv Path]" description="CONF_MESSAGE[Full path on the server to the 'unoconv' binary (see docs)]" default="" mandatory="false"/>
</server_settings>
<registry_contributions>
<hooks>
<serverCallback hookName="node.change" methodName="deleteImagickCache"></serverCallback>
</hooks>
<actions>
<action name="imagick_data_proxy" contentTypedProvider="true">
<processing>
<serverCallback methodName="switchAction"></serverCallback>
</processing>
</action>
<action name="delete_imagick_data">
<processing>
<serverCallback methodName="switchAction"></serverCallback>
</processing>
</action>
<action name="get_extracted_page">
<processing>
<serverCallback methodName="switchAction"></serverCallback>
</processing>
</action>
</actions>
</registry_contributions>
<dependencies>
<pluginResources pluginName="editor.diaporama"/>
<!-- Stream Wrapper must be implemented -->
<activePlugin pluginName="access.AJXP_STREAM_PROVIDER"/>
</dependencies>
</editor>
| {
"pile_set_name": "Github"
} |
$LOAD_PATH.unshift File.expand_path('../../test', __FILE__)
require 'test_helper'
class LikableTest < Minitest::Test
def setup
@user = Factory(:user)
@friend = Factory(:user)
@movie = Factory(:movie)
end
def test_liked_by_returns_relevant_users
assert_empty @movie.liked_by
@user.like(@movie)
assert_includes @movie.liked_by, @user
refute_includes @movie.liked_by, @friend
@friend.like(@movie)
assert_includes @movie.liked_by, @friend
end
def test_liked_by_count_returns_an_accurate_count
assert_empty @movie.liked_by
@user.like(@movie)
assert_equal @movie.liked_by_count, 1
@friend.like(@movie)
assert_equal @movie.liked_by_count, 2
end
def teardown
Recommendable.redis.flushdb
end
end
| {
"pile_set_name": "Github"
} |
/*
** Program-chaining function for Commodore platforms.
**
** This copy of the cc65 system library function makes smaller code by using
** Contiki's Personal File System (instead of POSIX) functions.
**
** 2016-03-16, Greg King
**
** This function exploits the program-chaining feature in CBM BASIC's ROM.
**
** CC65's CBM programs have a BASIC program stub. We start those programs by
** RUNning that stub; it SYSes to the Machine Language code. Normally, after
** the ML code exits, the BASIC ROM continues running the stub. But, it has
** no more statements; so, the program stops.
**
** This function puts the desired program's name and device number into a LOAD
** statement. Then, it points BASIC to that statement, so that the ROM will run
** that statement after this program quits. The ROM will load the next program,
** and will execute it (because the LOAD will be seen in a running program).
*/
#include <unistd.h>
#include <stdlib.h>
#include <string.h>
#include <errno.h>
#include <device.h>
#include "cfs.h"
/* The struct below is a line of BASIC code. It sits in the LOWCODE segment
** to make sure that it won't be hidden by a ROM when BASIC is re-enabled.
** The line is:
** 0 CLR:LOAD""+"" ,01
** After this function has written into the line, it might look like this:
** 0 CLR:LOAD""+"program name" ,08
**
** When BASIC's LOAD command asks the Kernal to load a file, it gives the
** Kernal a pointer to a file-name string. CC65's CBM programs use that
** pointer to give a copy of the program's name to main()'s argv[0] parameter.
** But, when BASIC uses a string literal that is in a program, it points
** directly to that literal -- in the models that don't use banked RAM
** (Pet/CBM, VIC-20, and 64). The literal is overwritten by the next program
** that is loaded. So, argv[0] would point to machine code. String operations
** create a new result string -- even when that operation changes nothing. The
** result is put in the string space at the top of BASIC's memory. So, the ""+
** in this BASIC line guarantees that argv[0] will get a name from a safe place.
*/
#pragma data-name(push, "LOWCODE")
static struct line {
const char end_of_line; /* fake previous line */
const struct line* const next;
const unsigned line_num;
const char CLR_token, colon, LOAD_token, quotes[2], add_token, quote;
char name[21];
const char comma;
char unit[3];
} basic = {
'\0', &basic + 1, /* high byte of link must be non-zero */
0, 0x9C, ':', 0x93, "\"\"", 0xAA, '\"',
"\" ", /* format: "123:1234567890123456\"" */
',', "01"
};
#pragma data-name(pop)
/* These values are platform-specific. */
extern const void* vartab; /* points to BASIC program variables */
#pragma zpsym("vartab")
extern const void* memsize; /* points to top of BASIC RAM */
#pragma zpsym("memsize")
extern const struct line* txtptr; /* points to BASIC code */
#pragma zpsym("txtptr")
extern char basbuf[]; /* BASIC's input buffer */
extern void basbuf_len[];
#pragma zpsym("basbuf_len")
int __fastcall__
exec(const char *progname, const char *cmdline)
{
static int fd;
static unsigned char dv, n;
/* Exclude devices that can't load files. */
/* (Use hand optimization, to make smaller code.) */
dv = getcurrentdevice();
if(dv < 8 && __AX__ != 1 || __AX__ > 30) {
return _mappederrno(9); /* illegal device number */
}
utoa(dv, basic.unit, 10);
/* Tape files can be openned only once; skip this test for the Datasette. */
if(dv != 1) {
/* Don't try to run a program that can't be found. */
fd = cfs_open(progname, CFS_READ);
if(fd < 0) {
return -1;
}
cfs_close(fd);
}
n = 0;
do {
if((basic.name[n] = progname[n]) == '\0') {
break;
}
} while(++n < 20); /* truncate long names */
basic.name[n] = '\"';
/* This next part isn't needed by machines that put
** BASIC source and variables in different RAM banks.
*/
#if !defined(__C128__)
/* cc65 program loads might extend beyond the end of the RAM that is allowed
** for BASIC. Then, the LOAD statement would complain that it is "out of
** memory". Some pointers that say where to put BASIC program variables
** must be changed, so that we do not get that error. One pointer is
** changed here; a BASIC CLR statement changes the others. Some space is
** needed for the file-name string. Subtracting an entire RAM page allows
** better optimization of this expression.
*/
vartab = (char*)memsize - 0x0100;
#endif
/* Build the next program's argument list. */
basbuf[0] = 0x8F; /* REM token */
basbuf[1] = '\0';
if(cmdline != NULL) {
strncat(basbuf, cmdline, (size_t)basbuf_len - 2);
}
/* Tell the ROM where to find that BASIC program. */
txtptr = &basic;
/* (The return code, in ST [status], will be destroyed by LOAD.
** So, don't bother to set it here.)
*/
exit(__AX__);
}
| {
"pile_set_name": "Github"
} |
/* ShipEvent.h
Copyright (c) 2014 by Michael Zahniser
Endless Sky is free software: you can redistribute it and/or modify it under the
terms of the GNU General Public License as published by the Free Software
Foundation, either version 3 of the License, or (at your option) any later version.
Endless Sky is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
PARTICULAR PURPOSE. See the GNU General Public License for more details.
*/
#ifndef SHIP_EVENT_H_
#define SHIP_EVENT_H_
#include <memory>
class Government;
class Ship;
// A ShipEvent is anything of significance that one ship does to another. These
// events might change one government's attitude toward the player, or may
// result in a certain message or dialog being displayed.
class ShipEvent {
public:
enum {
// This is a "null" event.
NONE = 0,
// This ship did something good for the given ship.
ASSIST = (1 << 0),
// This ship scanned the given ship's cargo. This is not necessarily an
// act of aggression, but it implies mistrust or underhanded intentions.
// Also, a mission may fail if a ship of a certain government scans your
// cargo and discovers you are carrying contraband.
SCAN_CARGO = (1 << 1),
// This ship scanned the given ship's outfits. (If it turns out the
// outfits include something illegal, this may result in a fine or an
// outright attack on the ship that was scanned.)
SCAN_OUTFITS = (1 << 2),
// This ship damaged the given ship while not currently being an enemy
// of that ship's government; this will result in temporary animosities
// between the two governments. If a ship is "forbearing," it can only
// be "provoked" if its shields are below 90%.
PROVOKE = (1 << 3),
// This ship disabled the given ship. This will have a permanent effect
// on your reputation with the given government. This event is generated
// when a ship takes damage that switches it to being disabled.
DISABLE = (1 << 4),
// This ship boarded the given ship. This may either be an attempt to
// render assistance, or an attempt to capture the ship.
BOARD = (1 << 5),
// This ship captured the given ship.
CAPTURE = (1 << 6),
// This ship destroyed the given ship. If your projectiles hit a ship
// that is already exploding, that does not generate a "destroy" event;
// this is only for the one projectile that caused the explosion.
DESTROY = (1 << 7),
// This is a crime that is so bad that it not only has a negative effect
// on your reputation, but entirely wipes out any positive reputation
// you had with the given government, first.
ATROCITY = (1 << 8),
// This ship just jumped into a different system.
JUMP = (1 << 9)
};
public:
ShipEvent(const Government *actor, const std::shared_ptr<Ship> &target, int type);
ShipEvent(const std::shared_ptr<Ship> &actor, const std::shared_ptr<Ship> &target, int type);
const std::shared_ptr<Ship> &Actor() const;
const Government *ActorGovernment() const;
const std::shared_ptr<Ship> &Target() const;
const Government *TargetGovernment() const;
int Type() const;
private:
std::shared_ptr<Ship> actor;
const Government *actorGovernment = nullptr;
std::shared_ptr<Ship> target;
const Government *targetGovernment = nullptr;
int type;
};
#endif
| {
"pile_set_name": "Github"
} |
class Source::KnownPattern < Source
URLS = [
{
template: "https://www.youtube.com/feeds/videos.xml?channel_id=%s".freeze,
regex: Regexp.new(/https:\/\/www\.youtube\.com\/channel\/([^\/#\?]*)/)
},
{
template: "https://www.youtube.com/feeds/videos.xml?user=%s".freeze,
regex: Regexp.new(/https:\/\/www\.youtube\.com\/user\/([^\/#\?]*)/)
},
{
template: "https://www.youtube.com/feeds/videos.xml?playlist_id=%s".freeze,
regex: Regexp.new(/https:\/\/www\.youtube\.com\/playlist\?list=([^&]*)/)
},
{
template: "https://www.reddit.com/r/%s.rss".freeze,
regex: Regexp.new(/https:\/\/www\.reddit\.com\/r\/([^\/#\?]*)/)
},
{
template: "https://vimeo.com/%s/videos/rss".freeze,
regex: Regexp.new(/https:\/\/vimeo\.com\/([^\/#\?]*)/)
}
]
def find
if (match = URLS.find { |candidate| response.url =~ candidate[:regex] }) && $1
feed_url = match[:template] % $1
feed = create_from_url!(feed_url)
feeds.push(feed) if feed
elsif document? && youtube_domain? && channel_id
feed_url = "https://www.youtube.com/feeds/videos.xml?channel_id=#{channel_id}"
feed = create_from_url!(feed_url)
feeds.push(feed) if feed
end
end
def youtube_domain?
response.url.start_with?("https://www.youtube.com")
end
def channel_id
@channel_id ||= begin
id = document.css("meta[itemprop='channelId']")
if id.present?
id.first["content"]
end
end
end
end
| {
"pile_set_name": "Github"
} |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
#include "kudu/util/trace.h"
#include <cstdint>
#include <cstring>
#include <iomanip>
#include <iostream>
#include <map>
#include <mutex>
#include <string>
#include <utility>
#include <vector>
#include <glog/logging.h>
#include "kudu/gutil/strings/substitute.h"
#include "kudu/gutil/walltime.h"
#include "kudu/util/jsonwriter.h"
#include "kudu/util/logging.h"
#include "kudu/util/memory/arena.h"
using std::pair;
using std::string;
using std::vector;
using strings::internal::SubstituteArg;
namespace kudu {
__thread Trace* Trace::threadlocal_trace_;
Trace::Trace()
: arena_(new ThreadSafeArena(1024)),
entries_head_(nullptr),
entries_tail_(nullptr) {
// We expect small allocations from our Arena so no need to have
// a large arena component. Small allocations are more likely to
// come out of thread cache and be fast.
arena_->SetMaxBufferSize(4096);
}
Trace::~Trace() {
}
// Struct which precedes each entry in the trace.
struct TraceEntry {
MicrosecondsInt64 timestamp_micros;
// The source file and line number which generated the trace message.
const char* file_path;
int line_number;
uint32_t message_len;
TraceEntry* next;
// The actual trace message follows the entry header.
char* message() {
return reinterpret_cast<char*>(this) + sizeof(*this);
}
};
// Get the part of filepath after the last path separator.
// (Doesn't modify filepath, contrary to basename() in libgen.h.)
// Borrowed from glog.
static const char* const_basename(const char* filepath) {
const char* base = strrchr(filepath, '/');
#ifdef OS_WINDOWS // Look for either path separator in Windows
if (!base)
base = strrchr(filepath, '\\');
#endif
return base ? (base+1) : filepath;
}
void Trace::SubstituteAndTrace(const char* file_path,
int line_number,
StringPiece format,
const SubstituteArg& arg0, const SubstituteArg& arg1,
const SubstituteArg& arg2, const SubstituteArg& arg3,
const SubstituteArg& arg4, const SubstituteArg& arg5,
const SubstituteArg& arg6, const SubstituteArg& arg7,
const SubstituteArg& arg8, const SubstituteArg& arg9) {
const SubstituteArg* const args_array[] = {
&arg0, &arg1, &arg2, &arg3, &arg4, &arg5, &arg6, &arg7, &arg8, &arg9, nullptr
};
int msg_len = strings::internal::SubstitutedSize(format, args_array);
TraceEntry* entry = NewEntry(msg_len, file_path, line_number);
SubstituteToBuffer(format, args_array, entry->message());
AddEntry(entry);
}
TraceEntry* Trace::NewEntry(int msg_len, const char* file_path, int line_number) {
int size = sizeof(TraceEntry) + msg_len;
uint8_t* dst = reinterpret_cast<uint8_t*>(arena_->AllocateBytes(size));
TraceEntry* entry = reinterpret_cast<TraceEntry*>(dst);
entry->timestamp_micros = GetCurrentTimeMicros();
entry->message_len = msg_len;
entry->file_path = file_path;
entry->line_number = line_number;
return entry;
}
void Trace::AddEntry(TraceEntry* entry) {
std::lock_guard<simple_spinlock> l(lock_);
entry->next = nullptr;
if (entries_tail_ != nullptr) {
entries_tail_->next = entry;
} else {
DCHECK(entries_head_ == nullptr);
entries_head_ = entry;
}
entries_tail_ = entry;
}
void Trace::Dump(std::ostream* out, int flags) const {
// Gather a copy of the list of entries under the lock. This is fast
// enough that we aren't worried about stalling concurrent tracers
// (whereas doing the logging itself while holding the lock might be
// too slow, if the output stream is a file, for example).
vector<TraceEntry*> entries;
vector<pair<StringPiece, scoped_refptr<Trace>>> child_traces;
{
std::lock_guard<simple_spinlock> l(lock_);
for (TraceEntry* cur = entries_head_;
cur != nullptr;
cur = cur->next) {
entries.push_back(cur);
}
child_traces = child_traces_;
}
// Save original flags.
std::ios::fmtflags save_flags(out->flags());
int64_t prev_usecs = 0;
for (TraceEntry* e : entries) {
// Log format borrowed from glog/logging.cc
int64_t usecs_since_prev = 0;
if (prev_usecs != 0) {
usecs_since_prev = e->timestamp_micros - prev_usecs;
}
prev_usecs = e->timestamp_micros;
using std::setw;
*out << FormatTimestampForLog(e->timestamp_micros);
*out << ' ';
if (flags & INCLUDE_TIME_DELTAS) {
out->fill(' ');
*out << "(+" << setw(6) << usecs_since_prev << "us) ";
}
*out << const_basename(e->file_path) << ':' << e->line_number
<< "] ";
out->write(reinterpret_cast<char*>(e) + sizeof(TraceEntry),
e->message_len);
*out << std::endl;
}
for (const auto& entry : child_traces) {
const auto& t = entry.second;
*out << "Related trace '" << entry.first << "':" << std::endl;
*out << t->DumpToString(flags & (~INCLUDE_METRICS));
}
if (flags & INCLUDE_METRICS) {
*out << "Metrics: " << MetricsAsJSON();
}
// Restore stream flags.
out->flags(save_flags);
}
string Trace::DumpToString(int flags) const {
std::ostringstream s;
Dump(&s, flags);
return s.str();
}
string Trace::MetricsAsJSON() const {
std::ostringstream s;
JsonWriter jw(&s, JsonWriter::COMPACT);
MetricsToJSON(&jw);
return s.str();
}
void Trace::MetricsToJSON(JsonWriter* jw) const {
// Convert into a map with 'std::string' keys instead of 'const char*'
// keys, so that the results are in a consistent (sorted) order.
std::map<string, int64_t> counters;
for (const auto& entry : metrics_.Get()) {
counters[entry.first] = entry.second;
}
jw->StartObject();
for (const auto& e : counters) {
jw->String(e.first);
jw->Int64(e.second);
}
vector<pair<StringPiece, scoped_refptr<Trace>>> child_traces;
{
std::lock_guard<simple_spinlock> l(lock_);
child_traces = child_traces_;
}
if (!child_traces.empty()) {
jw->String("child_traces");
jw->StartArray();
for (const auto& e : child_traces) {
jw->StartArray();
jw->String(e.first.data(), e.first.size());
e.second->MetricsToJSON(jw);
jw->EndArray();
}
jw->EndArray();
}
jw->EndObject();
}
void Trace::DumpCurrentTrace() {
Trace* t = CurrentTrace();
if (t == nullptr) {
LOG(INFO) << "No trace is currently active.";
return;
}
t->Dump(&std::cerr, true);
}
void Trace::AddChildTrace(StringPiece label, Trace* child_trace) {
CHECK(arena_->RelocateStringPiece(label, &label));
std::lock_guard<simple_spinlock> l(lock_);
scoped_refptr<Trace> ptr(child_trace);
child_traces_.emplace_back(label, ptr);
}
std::vector<std::pair<StringPiece, scoped_refptr<Trace>>> Trace::ChildTraces() const {
std::lock_guard<simple_spinlock> l(lock_);
return child_traces_;
}
} // namespace kudu
| {
"pile_set_name": "Github"
} |
{
"info" : {
"version" : 1,
"author" : "xcode"
}
} | {
"pile_set_name": "Github"
} |
using System;
using System.Collections.Generic;
namespace UnityEngine.Recorder
{
[Flags]
public enum EImageSource
{
ActiveCameras = 1,
SceneView = 2,
MainCamera = 4,
TaggedCamera = 8,
RenderTexture = 16,
}
public enum FrameRateMode
{
Variable,
Constant,
}
public enum DurationMode
{
Manual,
SingleFrame,
FrameInterval,
TimeInterval
}
public class InputFilter
{
public InputFilter(string title, Type type)
{
this.title = title;
this.type = type;
}
public string title { get; private set; }
public Type type { get; private set; }
}
public class TInputFilter<T> : InputFilter
{
public TInputFilter(string title) : base( title, typeof(T))
{
}
}
public struct InputGroupFilter
{
public string title;
public List<InputFilter> typesFilter;
}
/// <summary>
/// What is this: Base settings class for all Recorders.
/// Motivation : All recorders share a basic common set of settings and some of them are known to the
/// recording framework, so we need a base class to allow the framework to access these settings.
/// Notes:
/// - Some of the fields in this class actually don't apply to ALL recorders but are so common that they are included
/// here for convenience.
/// </summary>
[ExecuteInEditMode]
public abstract class RecorderSettings : ScriptableObject
{
[SerializeField]
string m_AssetID;
public int m_CaptureEveryNthFrame = 1;
public FrameRateMode m_FrameRateMode = FrameRateMode.Constant;
[Range(1, 120)]
public double m_FrameRate = 30.0;
public EFrameRate m_FrameRateExact = EFrameRate.FR_CUSTOM;
public int m_StartFrame;
public int m_EndFrame = 10;
public float m_StartTime = 0.0f;
public float m_EndTime = 1.0f;
public DurationMode m_DurationMode;
public bool m_SynchFrameRate = true;
public FileNameGenerator m_BaseFileName;
public OutputPath m_DestinationPath;
[SerializeField]
private InputSettingsList m_InputsSettings = new InputSettingsList();
public InputSettingsList inputsSettings
{
get { return m_InputsSettings; }
}
[SerializeField]
string m_RecorderTypeName;
public string assetID
{
get { return m_AssetID; }
set
{
m_AssetID = value;
m_InputsSettings.ownerRecorderSettingsAssetId = value;
}
}
public RecorderSettings()
{
m_DestinationPath.root = OutputPath.ERoot.Current;
m_DestinationPath.leaf = "Recordings";
}
public Type recorderType
{
get
{
if (string.IsNullOrEmpty(m_RecorderTypeName))
return null;
return Type.GetType(m_RecorderTypeName);
}
set { m_RecorderTypeName = value == null ? string.Empty : value.AssemblyQualifiedName; }
}
public bool fixedDuration
{
get { return m_DurationMode != DurationMode.Manual; }
}
public virtual bool ValidityCheck( List<string> errors )
{
bool ok = true;
if (m_InputsSettings != null)
{
var inputErrors = new List<string>();
if (!m_InputsSettings.ValidityCheck(inputErrors))
{
errors.Add("Input settings are incorrect.");
ok = false;
}
}
if (Math.Abs(m_FrameRate) <= float.Epsilon)
{
ok = false;
errors.Add("Invalid frame rate.");
}
if (m_CaptureEveryNthFrame <= 0)
{
ok = false;
errors.Add("Invalid frame skip value");
}
if (!isPlatformSupported)
{
errors.Add("Current platform is not supported");
ok = false;
}
return ok;
}
public virtual bool isPlatformSupported
{
get { return true; }
}
public virtual void OnEnable()
{
m_InputsSettings.OnEnable(m_AssetID);
BindSceneInputSettings();
}
public void BindSceneInputSettings()
{
if (!m_InputsSettings.hasBrokenBindings)
return;
m_InputsSettings.Rebuild();
#if UNITY_EDITOR
if (m_InputsSettings.hasBrokenBindings)
{
// only supported case is scene stored input settings are missing (for example: new scene loaded that does not contain the scene stored inputs.)
m_InputsSettings.RepareMissingBindings();
}
#endif
if (m_InputsSettings.hasBrokenBindings)
Debug.LogError("Recorder: missing input settings");
}
public virtual void OnDestroy()
{
if (m_InputsSettings != null)
m_InputsSettings.OnDestroy();
}
public abstract List<RecorderInputSetting> GetDefaultInputSettings();
public T NewInputSettingsObj<T>(string title) where T : class
{
return NewInputSettingsObj(typeof(T), title) as T;
}
public virtual RecorderInputSetting NewInputSettingsObj(Type type, string title)
{
var obj = (RecorderInputSetting)ScriptableObject.CreateInstance(type);
obj.m_DisplayName = title;
obj.name = Guid.NewGuid().ToString();
return obj;
}
public abstract List<InputGroupFilter> GetInputGroups();
/// <summary>
/// Allows for recorder specific settings logic to correct/adjust settings that might be missed by it's editor.
/// </summary>
/// <returns>true if setting where changed</returns>
public virtual bool SelfAdjustSettings()
{
return false;
}
}
}
| {
"pile_set_name": "Github"
} |
function rcnn_cache_pool5_features(imdb, varargin)
% rcnn_cache_pool5_features(imdb, varargin)
% Computes pool5 features and saves them to disk. We compute
% pool5 features because we can easily compute fc6 and fc7
% features from them on-the-fly and they tend to compress better
% than fc6 or fc7 features due to greater sparsity.
%
% Keys that can be passed in:
%
% start Index of the first image in imdb to process
% end Index of the last image in imdb to process
% crop_mode Crop mode (either 'warp' or 'square')
% crop_padding Amount of padding in crop
% net_file Path to the Caffe CNN to use
% cache_name Path to the precomputed feature cache
% AUTORIGHTS
% ---------------------------------------------------------
% Copyright (c) 2014, Ross Girshick
%
% This file is part of the R-CNN code and is available
% under the terms of the Simplified BSD License provided in
% LICENSE. Please retain this notice and LICENSE if you use
% this file (or any portion of it) in your project.
% ---------------------------------------------------------
ip = inputParser;
ip.addRequired('imdb', @isstruct);
ip.addOptional('start', 1, @isscalar);
ip.addOptional('end', 0, @isscalar);
ip.addOptional('crop_mode', 'warp', @isstr);
ip.addOptional('crop_padding', 16, @isscalar);
ip.addOptional('net_file', ...
'./data/caffe_nets/finetune_voc_2007_trainval_iter_70k', ...
@isstr);
ip.addOptional('cache_name', ...
'v1_finetune_voc_2007_trainval_iter_70000', @isstr);
ip.parse(imdb, varargin{:});
opts = ip.Results;
opts.net_def_file = './model-defs/rcnn_batch_256_output_pool5.prototxt';
image_ids = imdb.image_ids;
if opts.end == 0
opts.end = length(image_ids);
end
% Where to save feature cache
opts.output_dir = ['./feat_cache/' opts.cache_name '/' imdb.name '/'];
mkdir_if_missing(opts.output_dir);
% Log feature extraction
timestamp = datestr(datevec(now()), 'dd.mmm.yyyy:HH.MM.SS');
diary_file = [opts.output_dir 'rcnn_cache_pool5_features_' timestamp '.txt'];
diary(diary_file);
fprintf('Logging output in %s\n', diary_file);
fprintf('\n\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n');
fprintf('Feature caching options:\n');
disp(opts);
fprintf('~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n');
% load the region of interest database
roidb = imdb.roidb_func(imdb);
rcnn_model = rcnn_create_model(opts.net_def_file, opts.net_file);
rcnn_model = rcnn_load_model(rcnn_model);
rcnn_model.detectors.crop_mode = opts.crop_mode;
rcnn_model.detectors.crop_padding = opts.crop_padding;
total_time = 0;
count = 0;
for i = opts.start:opts.end
fprintf('%s: cache features: %d/%d\n', procid(), i, opts.end);
save_file = [opts.output_dir image_ids{i} '.mat'];
if exist(save_file, 'file') ~= 0
fprintf(' [already exists]\n');
continue;
end
count = count + 1;
tot_th = tic;
d = roidb.rois(i);
im = imread(imdb.image_at(i));
th = tic;
d.feat = rcnn_features(im, d.boxes, rcnn_model);
fprintf(' [features: %.3fs]\n', toc(th));
th = tic;
save(save_file, '-struct', 'd');
fprintf(' [saving: %.3fs]\n', toc(th));
total_time = total_time + toc(tot_th);
fprintf(' [avg time: %.3fs (total: %.3fs)]\n', ...
total_time/count, total_time);
end
| {
"pile_set_name": "Github"
} |
let%expect_test _ =
print_string "hello";
[%expect "hello"]
;;
| {
"pile_set_name": "Github"
} |
{ Debug server options form
Copyright (C) 2009 Michael Van Canneyt ([email protected])
This source is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
Software Foundation; either version 2 of the License, or (at your option)
any later version.
This code is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
details.
A copy of the GNU General Public License is available on the World Wide Web
at <http://www.gnu.org/copyleft/gpl.html>. You can also obtain it by writing
to the Free Software Foundation, Inc., 51 Franklin Street - Fifth Floor,
Boston, MA 02110-1335, USA.
}
unit frmOptions;
{$mode objfpc}{$H+}
interface
uses
Forms, ButtonPanel, StdCtrls;
type
{ TOptionsForm }
TOptionsForm = class(TForm)
ButtonPanel1: TButtonPanel;
CBNewVisible: TCheckBox;
CBCleanLogOnNewProcess: TCheckBox;
CBShowOnStartUp: TCheckBox;
CBShowOnMessage: TCheckBox;
CBNewAtBottom: TCheckBox;
GBWindow: TGroupBox;
GBMessages: TGroupBox;
private
function GetB(AIndex: integer): Boolean;
function GetCB(AIndex: Integer): TCheckBox;
procedure SetB(AIndex: integer; const AValue: Boolean);
public
Property ShowOnStartup : Boolean Index 0 Read GetB Write SetB;
Property ShowOnMessage : Boolean Index 1 Read GetB Write SetB;
Property NewMessageAtBottom : Boolean Index 2 Read GetB Write SetB;
Property NewMessageVisible: Boolean Index 3 Read GetB Write SetB;
Property CleanLogOnNewProcess: Boolean Index 4 Read GetB Write SetB;
end;
var
OptionsForm: TOptionsForm;
implementation
{$R *.lfm}
{ TOptionsForm }
function TOptionsForm.GetCB(AIndex : Integer) : TCheckBox;
begin
Case AIndex of
0 : Result:=CBShowOnStartUp;
1 : Result:=CBShowOnMessage;
2 : Result:=CBNewAtBottom;
3 : Result:=CBNewVisible;
4 : Result:=CBCleanLogOnNewProcess;
end;
end;
function TOptionsForm.GetB(AIndex: integer): Boolean;
begin
Result:=GetCb(AIndex).Checked;
end;
procedure TOptionsForm.SetB(AIndex: integer; const AValue: Boolean);
begin
GetCb(AIndex).Checked:=AValue;
end;
end.
| {
"pile_set_name": "Github"
} |
module.exports = function(it){
if(typeof it != 'function')throw TypeError(it + ' is not a function!');
return it;
}; | {
"pile_set_name": "Github"
} |
// SPDX-License-Identifier: GPL-2.0
/* Copyright 2019 Linaro, Ltd, Rob Herring <[email protected]> */
#include <linux/atomic.h>
#include <linux/bitfield.h>
#include <linux/delay.h>
#include <linux/dma-mapping.h>
#include <linux/interrupt.h>
#include <linux/io.h>
#include <linux/iopoll.h>
#include <linux/io-pgtable.h>
#include <linux/iommu.h>
#include <linux/platform_device.h>
#include <linux/pm_runtime.h>
#include <linux/shmem_fs.h>
#include <linux/sizes.h>
#include "panfrost_device.h"
#include "panfrost_mmu.h"
#include "panfrost_gem.h"
#include "panfrost_features.h"
#include "panfrost_regs.h"
#define mmu_write(dev, reg, data) writel(data, dev->iomem + reg)
#define mmu_read(dev, reg) readl(dev->iomem + reg)
static int wait_ready(struct panfrost_device *pfdev, u32 as_nr)
{
int ret;
u32 val;
/* Wait for the MMU status to indicate there is no active command, in
* case one is pending. */
ret = readl_relaxed_poll_timeout_atomic(pfdev->iomem + AS_STATUS(as_nr),
val, !(val & AS_STATUS_AS_ACTIVE), 10, 1000);
if (ret)
dev_err(pfdev->dev, "AS_ACTIVE bit stuck\n");
return ret;
}
static int write_cmd(struct panfrost_device *pfdev, u32 as_nr, u32 cmd)
{
int status;
/* write AS_COMMAND when MMU is ready to accept another command */
status = wait_ready(pfdev, as_nr);
if (!status)
mmu_write(pfdev, AS_COMMAND(as_nr), cmd);
return status;
}
static void lock_region(struct panfrost_device *pfdev, u32 as_nr,
u64 iova, size_t size)
{
u8 region_width;
u64 region = iova & PAGE_MASK;
/*
* fls returns:
* 1 .. 32
*
* 10 + fls(num_pages)
* results in the range (11 .. 42)
*/
size = round_up(size, PAGE_SIZE);
region_width = 10 + fls(size >> PAGE_SHIFT);
if ((size >> PAGE_SHIFT) != (1ul << (region_width - 11))) {
/* not pow2, so must go up to the next pow2 */
region_width += 1;
}
region |= region_width;
/* Lock the region that needs to be updated */
mmu_write(pfdev, AS_LOCKADDR_LO(as_nr), region & 0xFFFFFFFFUL);
mmu_write(pfdev, AS_LOCKADDR_HI(as_nr), (region >> 32) & 0xFFFFFFFFUL);
write_cmd(pfdev, as_nr, AS_COMMAND_LOCK);
}
static int mmu_hw_do_operation_locked(struct panfrost_device *pfdev, int as_nr,
u64 iova, size_t size, u32 op)
{
if (as_nr < 0)
return 0;
if (op != AS_COMMAND_UNLOCK)
lock_region(pfdev, as_nr, iova, size);
/* Run the MMU operation */
write_cmd(pfdev, as_nr, op);
/* Wait for the flush to complete */
return wait_ready(pfdev, as_nr);
}
static int mmu_hw_do_operation(struct panfrost_device *pfdev,
struct panfrost_mmu *mmu,
u64 iova, size_t size, u32 op)
{
int ret;
spin_lock(&pfdev->as_lock);
ret = mmu_hw_do_operation_locked(pfdev, mmu->as, iova, size, op);
spin_unlock(&pfdev->as_lock);
return ret;
}
static void panfrost_mmu_enable(struct panfrost_device *pfdev, struct panfrost_mmu *mmu)
{
int as_nr = mmu->as;
struct io_pgtable_cfg *cfg = &mmu->pgtbl_cfg;
u64 transtab = cfg->arm_mali_lpae_cfg.transtab;
u64 memattr = cfg->arm_mali_lpae_cfg.memattr;
mmu_hw_do_operation_locked(pfdev, as_nr, 0, ~0UL, AS_COMMAND_FLUSH_MEM);
mmu_write(pfdev, AS_TRANSTAB_LO(as_nr), transtab & 0xffffffffUL);
mmu_write(pfdev, AS_TRANSTAB_HI(as_nr), transtab >> 32);
/* Need to revisit mem attrs.
* NC is the default, Mali driver is inner WT.
*/
mmu_write(pfdev, AS_MEMATTR_LO(as_nr), memattr & 0xffffffffUL);
mmu_write(pfdev, AS_MEMATTR_HI(as_nr), memattr >> 32);
write_cmd(pfdev, as_nr, AS_COMMAND_UPDATE);
}
static void panfrost_mmu_disable(struct panfrost_device *pfdev, u32 as_nr)
{
mmu_hw_do_operation_locked(pfdev, as_nr, 0, ~0UL, AS_COMMAND_FLUSH_MEM);
mmu_write(pfdev, AS_TRANSTAB_LO(as_nr), 0);
mmu_write(pfdev, AS_TRANSTAB_HI(as_nr), 0);
mmu_write(pfdev, AS_MEMATTR_LO(as_nr), 0);
mmu_write(pfdev, AS_MEMATTR_HI(as_nr), 0);
write_cmd(pfdev, as_nr, AS_COMMAND_UPDATE);
}
u32 panfrost_mmu_as_get(struct panfrost_device *pfdev, struct panfrost_mmu *mmu)
{
int as;
spin_lock(&pfdev->as_lock);
as = mmu->as;
if (as >= 0) {
int en = atomic_inc_return(&mmu->as_count);
/*
* AS can be retained by active jobs or a perfcnt context,
* hence the '+ 1' here.
*/
WARN_ON(en >= (NUM_JOB_SLOTS + 1));
list_move(&mmu->list, &pfdev->as_lru_list);
goto out;
}
/* Check for a free AS */
as = ffz(pfdev->as_alloc_mask);
if (!(BIT(as) & pfdev->features.as_present)) {
struct panfrost_mmu *lru_mmu;
list_for_each_entry_reverse(lru_mmu, &pfdev->as_lru_list, list) {
if (!atomic_read(&lru_mmu->as_count))
break;
}
WARN_ON(&lru_mmu->list == &pfdev->as_lru_list);
list_del_init(&lru_mmu->list);
as = lru_mmu->as;
WARN_ON(as < 0);
lru_mmu->as = -1;
}
/* Assign the free or reclaimed AS to the FD */
mmu->as = as;
set_bit(as, &pfdev->as_alloc_mask);
atomic_set(&mmu->as_count, 1);
list_add(&mmu->list, &pfdev->as_lru_list);
dev_dbg(pfdev->dev, "Assigned AS%d to mmu %p, alloc_mask=%lx", as, mmu, pfdev->as_alloc_mask);
panfrost_mmu_enable(pfdev, mmu);
out:
spin_unlock(&pfdev->as_lock);
return as;
}
void panfrost_mmu_as_put(struct panfrost_device *pfdev, struct panfrost_mmu *mmu)
{
atomic_dec(&mmu->as_count);
WARN_ON(atomic_read(&mmu->as_count) < 0);
}
void panfrost_mmu_reset(struct panfrost_device *pfdev)
{
struct panfrost_mmu *mmu, *mmu_tmp;
spin_lock(&pfdev->as_lock);
pfdev->as_alloc_mask = 0;
list_for_each_entry_safe(mmu, mmu_tmp, &pfdev->as_lru_list, list) {
mmu->as = -1;
atomic_set(&mmu->as_count, 0);
list_del_init(&mmu->list);
}
spin_unlock(&pfdev->as_lock);
mmu_write(pfdev, MMU_INT_CLEAR, ~0);
mmu_write(pfdev, MMU_INT_MASK, ~0);
}
static size_t get_pgsize(u64 addr, size_t size)
{
if (addr & (SZ_2M - 1) || size < SZ_2M)
return SZ_4K;
return SZ_2M;
}
static void panfrost_mmu_flush_range(struct panfrost_device *pfdev,
struct panfrost_mmu *mmu,
u64 iova, size_t size)
{
if (mmu->as < 0)
return;
pm_runtime_get_noresume(pfdev->dev);
/* Flush the PTs only if we're already awake */
if (pm_runtime_active(pfdev->dev))
mmu_hw_do_operation(pfdev, mmu, iova, size, AS_COMMAND_FLUSH_PT);
pm_runtime_put_sync_autosuspend(pfdev->dev);
}
static int mmu_map_sg(struct panfrost_device *pfdev, struct panfrost_mmu *mmu,
u64 iova, int prot, struct sg_table *sgt)
{
unsigned int count;
struct scatterlist *sgl;
struct io_pgtable_ops *ops = mmu->pgtbl_ops;
u64 start_iova = iova;
for_each_sg(sgt->sgl, sgl, sgt->nents, count) {
unsigned long paddr = sg_dma_address(sgl);
size_t len = sg_dma_len(sgl);
dev_dbg(pfdev->dev, "map: as=%d, iova=%llx, paddr=%lx, len=%zx", mmu->as, iova, paddr, len);
while (len) {
size_t pgsize = get_pgsize(iova | paddr, len);
ops->map(ops, iova, paddr, pgsize, prot);
iova += pgsize;
paddr += pgsize;
len -= pgsize;
}
}
panfrost_mmu_flush_range(pfdev, mmu, start_iova, iova - start_iova);
return 0;
}
int panfrost_mmu_map(struct panfrost_gem_mapping *mapping)
{
struct panfrost_gem_object *bo = mapping->obj;
struct drm_gem_object *obj = &bo->base.base;
struct panfrost_device *pfdev = to_panfrost_device(obj->dev);
struct sg_table *sgt;
int prot = IOMMU_READ | IOMMU_WRITE;
if (WARN_ON(mapping->active))
return 0;
if (bo->noexec)
prot |= IOMMU_NOEXEC;
sgt = drm_gem_shmem_get_pages_sgt(obj);
if (WARN_ON(IS_ERR(sgt)))
return PTR_ERR(sgt);
mmu_map_sg(pfdev, mapping->mmu, mapping->mmnode.start << PAGE_SHIFT,
prot, sgt);
mapping->active = true;
return 0;
}
void panfrost_mmu_unmap(struct panfrost_gem_mapping *mapping)
{
struct panfrost_gem_object *bo = mapping->obj;
struct drm_gem_object *obj = &bo->base.base;
struct panfrost_device *pfdev = to_panfrost_device(obj->dev);
struct io_pgtable_ops *ops = mapping->mmu->pgtbl_ops;
u64 iova = mapping->mmnode.start << PAGE_SHIFT;
size_t len = mapping->mmnode.size << PAGE_SHIFT;
size_t unmapped_len = 0;
if (WARN_ON(!mapping->active))
return;
dev_dbg(pfdev->dev, "unmap: as=%d, iova=%llx, len=%zx",
mapping->mmu->as, iova, len);
while (unmapped_len < len) {
size_t unmapped_page;
size_t pgsize = get_pgsize(iova, len - unmapped_len);
if (ops->iova_to_phys(ops, iova)) {
unmapped_page = ops->unmap(ops, iova, pgsize, NULL);
WARN_ON(unmapped_page != pgsize);
}
iova += pgsize;
unmapped_len += pgsize;
}
panfrost_mmu_flush_range(pfdev, mapping->mmu,
mapping->mmnode.start << PAGE_SHIFT, len);
mapping->active = false;
}
static void mmu_tlb_inv_context_s1(void *cookie)
{}
static void mmu_tlb_sync_context(void *cookie)
{
//struct panfrost_device *pfdev = cookie;
// TODO: Wait 1000 GPU cycles for HW_ISSUE_6367/T60X
}
static void mmu_tlb_flush_walk(unsigned long iova, size_t size, size_t granule,
void *cookie)
{
mmu_tlb_sync_context(cookie);
}
static void mmu_tlb_flush_leaf(unsigned long iova, size_t size, size_t granule,
void *cookie)
{
mmu_tlb_sync_context(cookie);
}
static const struct iommu_flush_ops mmu_tlb_ops = {
.tlb_flush_all = mmu_tlb_inv_context_s1,
.tlb_flush_walk = mmu_tlb_flush_walk,
.tlb_flush_leaf = mmu_tlb_flush_leaf,
};
int panfrost_mmu_pgtable_alloc(struct panfrost_file_priv *priv)
{
struct panfrost_mmu *mmu = &priv->mmu;
struct panfrost_device *pfdev = priv->pfdev;
INIT_LIST_HEAD(&mmu->list);
mmu->as = -1;
mmu->pgtbl_cfg = (struct io_pgtable_cfg) {
.pgsize_bitmap = SZ_4K | SZ_2M,
.ias = FIELD_GET(0xff, pfdev->features.mmu_features),
.oas = FIELD_GET(0xff00, pfdev->features.mmu_features),
.tlb = &mmu_tlb_ops,
.iommu_dev = pfdev->dev,
};
mmu->pgtbl_ops = alloc_io_pgtable_ops(ARM_MALI_LPAE, &mmu->pgtbl_cfg,
priv);
if (!mmu->pgtbl_ops)
return -EINVAL;
return 0;
}
void panfrost_mmu_pgtable_free(struct panfrost_file_priv *priv)
{
struct panfrost_device *pfdev = priv->pfdev;
struct panfrost_mmu *mmu = &priv->mmu;
spin_lock(&pfdev->as_lock);
if (mmu->as >= 0) {
pm_runtime_get_noresume(pfdev->dev);
if (pm_runtime_active(pfdev->dev))
panfrost_mmu_disable(pfdev, mmu->as);
pm_runtime_put_autosuspend(pfdev->dev);
clear_bit(mmu->as, &pfdev->as_alloc_mask);
clear_bit(mmu->as, &pfdev->as_in_use_mask);
list_del(&mmu->list);
}
spin_unlock(&pfdev->as_lock);
free_io_pgtable_ops(mmu->pgtbl_ops);
}
static struct panfrost_gem_mapping *
addr_to_mapping(struct panfrost_device *pfdev, int as, u64 addr)
{
struct panfrost_gem_mapping *mapping = NULL;
struct panfrost_file_priv *priv;
struct drm_mm_node *node;
u64 offset = addr >> PAGE_SHIFT;
struct panfrost_mmu *mmu;
spin_lock(&pfdev->as_lock);
list_for_each_entry(mmu, &pfdev->as_lru_list, list) {
if (as == mmu->as)
goto found_mmu;
}
goto out;
found_mmu:
priv = container_of(mmu, struct panfrost_file_priv, mmu);
spin_lock(&priv->mm_lock);
drm_mm_for_each_node(node, &priv->mm) {
if (offset >= node->start &&
offset < (node->start + node->size)) {
mapping = drm_mm_node_to_panfrost_mapping(node);
kref_get(&mapping->refcount);
break;
}
}
spin_unlock(&priv->mm_lock);
out:
spin_unlock(&pfdev->as_lock);
return mapping;
}
#define NUM_FAULT_PAGES (SZ_2M / PAGE_SIZE)
static int panfrost_mmu_map_fault_addr(struct panfrost_device *pfdev, int as,
u64 addr)
{
int ret, i;
struct panfrost_gem_mapping *bomapping;
struct panfrost_gem_object *bo;
struct address_space *mapping;
pgoff_t page_offset;
struct sg_table *sgt;
struct page **pages;
bomapping = addr_to_mapping(pfdev, as, addr);
if (!bomapping)
return -ENOENT;
bo = bomapping->obj;
if (!bo->is_heap) {
dev_WARN(pfdev->dev, "matching BO is not heap type (GPU VA = %llx)",
bomapping->mmnode.start << PAGE_SHIFT);
ret = -EINVAL;
goto err_bo;
}
WARN_ON(bomapping->mmu->as != as);
/* Assume 2MB alignment and size multiple */
addr &= ~((u64)SZ_2M - 1);
page_offset = addr >> PAGE_SHIFT;
page_offset -= bomapping->mmnode.start;
mutex_lock(&bo->base.pages_lock);
if (!bo->base.pages) {
bo->sgts = kvmalloc_array(bo->base.base.size / SZ_2M,
sizeof(struct sg_table), GFP_KERNEL | __GFP_ZERO);
if (!bo->sgts) {
mutex_unlock(&bo->base.pages_lock);
ret = -ENOMEM;
goto err_bo;
}
pages = kvmalloc_array(bo->base.base.size >> PAGE_SHIFT,
sizeof(struct page *), GFP_KERNEL | __GFP_ZERO);
if (!pages) {
kvfree(bo->sgts);
bo->sgts = NULL;
mutex_unlock(&bo->base.pages_lock);
ret = -ENOMEM;
goto err_bo;
}
bo->base.pages = pages;
bo->base.pages_use_count = 1;
} else
pages = bo->base.pages;
mapping = bo->base.base.filp->f_mapping;
mapping_set_unevictable(mapping);
for (i = page_offset; i < page_offset + NUM_FAULT_PAGES; i++) {
pages[i] = shmem_read_mapping_page(mapping, i);
if (IS_ERR(pages[i])) {
mutex_unlock(&bo->base.pages_lock);
ret = PTR_ERR(pages[i]);
goto err_pages;
}
}
mutex_unlock(&bo->base.pages_lock);
sgt = &bo->sgts[page_offset / (SZ_2M / PAGE_SIZE)];
ret = sg_alloc_table_from_pages(sgt, pages + page_offset,
NUM_FAULT_PAGES, 0, SZ_2M, GFP_KERNEL);
if (ret)
goto err_pages;
if (!dma_map_sg(pfdev->dev, sgt->sgl, sgt->nents, DMA_BIDIRECTIONAL)) {
ret = -EINVAL;
goto err_map;
}
mmu_map_sg(pfdev, bomapping->mmu, addr,
IOMMU_WRITE | IOMMU_READ | IOMMU_NOEXEC, sgt);
bomapping->active = true;
dev_dbg(pfdev->dev, "mapped page fault @ AS%d %llx", as, addr);
panfrost_gem_mapping_put(bomapping);
return 0;
err_map:
sg_free_table(sgt);
err_pages:
drm_gem_shmem_put_pages(&bo->base);
err_bo:
drm_gem_object_put_unlocked(&bo->base.base);
return ret;
}
static const char *access_type_name(struct panfrost_device *pfdev,
u32 fault_status)
{
switch (fault_status & AS_FAULTSTATUS_ACCESS_TYPE_MASK) {
case AS_FAULTSTATUS_ACCESS_TYPE_ATOMIC:
if (panfrost_has_hw_feature(pfdev, HW_FEATURE_AARCH64_MMU))
return "ATOMIC";
else
return "UNKNOWN";
case AS_FAULTSTATUS_ACCESS_TYPE_READ:
return "READ";
case AS_FAULTSTATUS_ACCESS_TYPE_WRITE:
return "WRITE";
case AS_FAULTSTATUS_ACCESS_TYPE_EX:
return "EXECUTE";
default:
WARN_ON(1);
return NULL;
}
}
static irqreturn_t panfrost_mmu_irq_handler(int irq, void *data)
{
struct panfrost_device *pfdev = data;
if (!mmu_read(pfdev, MMU_INT_STAT))
return IRQ_NONE;
mmu_write(pfdev, MMU_INT_MASK, 0);
return IRQ_WAKE_THREAD;
}
static irqreturn_t panfrost_mmu_irq_handler_thread(int irq, void *data)
{
struct panfrost_device *pfdev = data;
u32 status = mmu_read(pfdev, MMU_INT_RAWSTAT);
int i, ret;
for (i = 0; status; i++) {
u32 mask = BIT(i) | BIT(i + 16);
u64 addr;
u32 fault_status;
u32 exception_type;
u32 access_type;
u32 source_id;
if (!(status & mask))
continue;
fault_status = mmu_read(pfdev, AS_FAULTSTATUS(i));
addr = mmu_read(pfdev, AS_FAULTADDRESS_LO(i));
addr |= (u64)mmu_read(pfdev, AS_FAULTADDRESS_HI(i)) << 32;
/* decode the fault status */
exception_type = fault_status & 0xFF;
access_type = (fault_status >> 8) & 0x3;
source_id = (fault_status >> 16);
/* Page fault only */
ret = -1;
if ((status & mask) == BIT(i) && (exception_type & 0xF8) == 0xC0)
ret = panfrost_mmu_map_fault_addr(pfdev, i, addr);
if (ret)
/* terminal fault, print info about the fault */
dev_err(pfdev->dev,
"Unhandled Page fault in AS%d at VA 0x%016llX\n"
"Reason: %s\n"
"raw fault status: 0x%X\n"
"decoded fault status: %s\n"
"exception type 0x%X: %s\n"
"access type 0x%X: %s\n"
"source id 0x%X\n",
i, addr,
"TODO",
fault_status,
(fault_status & (1 << 10) ? "DECODER FAULT" : "SLAVE FAULT"),
exception_type, panfrost_exception_name(pfdev, exception_type),
access_type, access_type_name(pfdev, fault_status),
source_id);
mmu_write(pfdev, MMU_INT_CLEAR, mask);
status &= ~mask;
}
mmu_write(pfdev, MMU_INT_MASK, ~0);
return IRQ_HANDLED;
};
int panfrost_mmu_init(struct panfrost_device *pfdev)
{
int err, irq;
irq = platform_get_irq_byname(to_platform_device(pfdev->dev), "mmu");
if (irq <= 0)
return -ENODEV;
err = devm_request_threaded_irq(pfdev->dev, irq, panfrost_mmu_irq_handler,
panfrost_mmu_irq_handler_thread,
IRQF_SHARED, "mmu", pfdev);
if (err) {
dev_err(pfdev->dev, "failed to request mmu irq");
return err;
}
return 0;
}
void panfrost_mmu_fini(struct panfrost_device *pfdev)
{
mmu_write(pfdev, MMU_INT_MASK, 0);
}
| {
"pile_set_name": "Github"
} |
/* eslint-disable */
const test_config = require('../test.config');
const { VERSIONS } = require('../constants');
const request = require('supertest');
const Server = require('./server');
let server;
// Helper function to replace express params with mock values
let fillRoute = (route, key) =>
route.replace(':base_version', VERSIONS['4_0_0']).replace(':id', 1).replace(':resource', key);
describe('Generic Profile Tests', () => {
beforeAll(() => {
// Standup a basic server
let config = Object.assign({}, test_config, { logging: { level: 'emerg' } });
server = new Server(config).setProfileRoutes().setErrorRoutes();
});
test('should be able to hit all routes', async () => {
let keys = Object.keys(server.config.profiles);
let { routes } = require('./route.config');
for (let key of keys) {
for (let route of routes) {
let path = fillRoute(route.path, key);
let method = route.type;
let response = await request(server.app)[method](path);
// Since we are not implementing services, these should all result in errors
expect(response.statusCode).not.toBe(200);
// Check that the resulting error is an OperationOutcome
let err = JSON.parse(response.error.text);
expect(err.resourceType).toBe('OperationOutcome');
expect(err.issue).toHaveLength(1);
// Make sure the severity is error
let issue = err.issue[0];
expect(issue.severity).toBe('error');
}
}
}, 60000);
});
| {
"pile_set_name": "Github"
} |
/**
* The Forgotten Server - a free and open-source MMORPG server emulator
* Copyright (C) 2019 Mark Samman <[email protected]>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#ifndef FS_MOUNTS_H_73716D11906A4C5C9F4A7B68D34C9BA6
#define FS_MOUNTS_H_73716D11906A4C5C9F4A7B68D34C9BA6
struct Mount
{
Mount(uint8_t id, uint16_t clientId, std::string name, int32_t speed, bool premium) :
name(std::move(name)), speed(speed), clientId(clientId), id(id), premium(premium) {}
std::string name;
int32_t speed;
uint16_t clientId;
uint8_t id;
bool premium;
};
class Mounts
{
public:
bool reload();
bool loadFromXml();
Mount* getMountByID(uint8_t id);
Mount* getMountByName(const std::string& name);
Mount* getMountByClientID(uint16_t clientId);
const std::vector<Mount>& getMounts() const {
return mounts;
}
private:
std::vector<Mount> mounts;
};
#endif
| {
"pile_set_name": "Github"
} |
# The MIT License
#
# Copyright (c) 2004-2010, Sun Microsystems, Inc., Reginaldo L. Russinholi, Cleiber Silva
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Used
MemoryUsageMonitor.USED=Usada
# Total
MemoryUsageMonitor.TOTAL=Total
# Old Data
OldDataMonitor.DisplayName=Dados Antigos
| {
"pile_set_name": "Github"
} |
#
# Cookbook Name:: dd-agent-install
# Recipe:: _install_windows
#
# Copyright (C) 2019-2020 Datadog
#
# All rights reserved - Do Not Redistribute
#
include_recipe 'dd-agent-install::_install_windows_base'
agent_config_file = ::File.join(node['dd-agent-install']['config_dir'], 'datadog.conf')
# Set the Agent service enable or disable
agent_enable = node['dd-agent-install']['agent_enable'] ? :enable : :disable
# Set the correct Agent startup action
agent_start = node['dd-agent-install']['agent_start'] ? :start : :stop
include_recipe 'dd-agent-install::_agent6_windows_config'
# Common configuration
service 'datadog-agent' do
service_name node['dd-agent-install']['agent_name']
action [agent_enable, agent_start]
supports :restart => true, :start => true, :stop => true
subscribes :restart, "template[#{agent_config_file}]", :delayed unless node['dd-agent-install']['agent_start'] == false
restart_command "powershell -Command \"restart-service -Force -Name datadogagent\""
# HACK: the restart can fail when we hit systemd's restart limits (by default, 5 starts every 10 seconds)
# To workaround this, retry once after 5 seconds, and a second time after 10 seconds
retries 2
retry_delay 5
end
| {
"pile_set_name": "Github"
} |
disabled=06target
eclipse.preferences.version=1
| {
"pile_set_name": "Github"
} |
/* Copyright (C) 2009-2019 Free Software Foundation, Inc.
Contributed by Richard Henderson <[email protected]>.
This file is part of the GNU Transactional Memory Library (libitm).
Libitm is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
Libitm is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
Under Section 7 of GPL version 3, you are granted additional
permissions described in the GCC Runtime Library Exception, version
3.1, as published by the Free Software Foundation.
You should have received a copy of the GNU General Public License and
a copy of the GCC Runtime Library Exception along with this program;
see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
<http://www.gnu.org/licenses/>. */
namespace GTM HIDDEN {
typedef struct gtm_jmpbuf
{
unsigned long pc;
unsigned long s[7];
void *cfa;
unsigned long f[8];
} gtm_jmpbuf;
/* The size of one line in hardware caches (in bytes). */
#define HW_CACHELINE_SIZE 64
static inline void
cpu_relax (void)
{
__asm volatile ("" : : : "memory");
}
} // namespace GTM
| {
"pile_set_name": "Github"
} |
/*
Tencent is pleased to support the open source community by making Blueking Container Service available.
Copyright (C) 2019 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except
in compliance with the License. You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under
the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
either express or implied. See the License for the specific language governing permissions and
limitations under the License.
*/
package strategy
import (
"context"
"encoding/json"
"errors"
"fmt"
"github.com/spf13/viper"
"bk-bscp/cmd/bscp-businessserver/modules/audit"
"bk-bscp/internal/database"
pb "bk-bscp/internal/protocol/businessserver"
pbcommon "bk-bscp/internal/protocol/common"
pbdatamanager "bk-bscp/internal/protocol/datamanager"
"bk-bscp/internal/strategy"
"bk-bscp/pkg/common"
"bk-bscp/pkg/logger"
)
// CreateAction creates a strategy object.
type CreateAction struct {
viper *viper.Viper
dataMgrCli pbdatamanager.DataManagerClient
req *pb.CreateStrategyReq
resp *pb.CreateStrategyResp
strategies *strategy.Strategy
newStrategyid string
}
// NewCreateAction creates new CreateAction.
func NewCreateAction(viper *viper.Viper, dataMgrCli pbdatamanager.DataManagerClient,
req *pb.CreateStrategyReq, resp *pb.CreateStrategyResp) *CreateAction {
action := &CreateAction{viper: viper, dataMgrCli: dataMgrCli, req: req, resp: resp}
action.resp.Seq = req.Seq
action.resp.ErrCode = pbcommon.ErrCode_E_OK
action.resp.ErrMsg = "OK"
return action
}
// Err setup error code message in response and return the error.
func (act *CreateAction) Err(errCode pbcommon.ErrCode, errMsg string) error {
act.resp.ErrCode = errCode
act.resp.ErrMsg = errMsg
return errors.New(errMsg)
}
// Input handles the input messages.
func (act *CreateAction) Input() error {
if err := act.verify(); err != nil {
return act.Err(pbcommon.ErrCode_E_BS_PARAMS_INVALID, err.Error())
}
return nil
}
// Output handles the output messages.
func (act *CreateAction) Output() error {
// do nothing.
return nil
}
func (act *CreateAction) verify() error {
length := len(act.req.Bid)
if length == 0 {
return errors.New("invalid params, bid missing")
}
if length > database.BSCPIDLENLIMIT {
return errors.New("invalid params, bid too long")
}
length = len(act.req.Appid)
if length == 0 {
return errors.New("invalid params, appid missing")
}
if length > database.BSCPIDLENLIMIT {
return errors.New("invalid params, appid too long")
}
length = len(act.req.Name)
if length == 0 {
return errors.New("invalid params, name missing")
}
if length > database.BSCPNAMELENLIMIT {
return errors.New("invalid params, name too long")
}
if act.req.Clusterids == nil {
act.req.Clusterids = []string{}
}
if len(act.req.Clusterids) > database.BSCPBATCHLIMIT {
return errors.New("invalid params, clusterids list too long")
}
if act.req.Zoneids == nil {
act.req.Zoneids = []string{}
}
if len(act.req.Zoneids) > database.BSCPBATCHLIMIT {
return errors.New("invalid params, zoneids list too long")
}
if act.req.Dcs == nil {
act.req.Dcs = []string{}
}
if len(act.req.Dcs) > database.BSCPBATCHLIMIT {
return errors.New("invalid params, dcs list too long")
}
if act.req.IPs == nil {
act.req.IPs = []string{}
}
if len(act.req.IPs) > database.BSCPBATCHLIMIT {
return errors.New("invalid params, ips list too long")
}
if act.req.Labels == nil {
act.req.Labels = make(map[string]string)
}
if len(act.req.Labels) > database.BSCPBATCHLIMIT {
return errors.New("invalid params, labels set too large")
}
length = len(act.req.Creator)
if length == 0 {
return errors.New("invalid params, creator missing")
}
if length > database.BSCPNAMELENLIMIT {
return errors.New("invalid params, creator too long")
}
if len(act.req.Memo) > database.BSCPLONGSTRLENLIMIT {
return errors.New("invalid params, memo too long")
}
return nil
}
func (act *CreateAction) genStrategyID() error {
id, err := common.GenStrategyid()
if err != nil {
return err
}
act.newStrategyid = id
return nil
}
func (act *CreateAction) queryCluster(clusterid string) (*pbcommon.Cluster, error) {
r := &pbdatamanager.QueryClusterReq{
Seq: act.req.Seq,
Bid: act.req.Bid,
Clusterid: clusterid,
}
ctx, cancel := context.WithTimeout(context.Background(), act.viper.GetDuration("datamanager.calltimeout"))
defer cancel()
logger.V(2).Infof("CreateStrategy[%d]| request to datamanager QueryCluster, %+v", act.req.Seq, r)
resp, err := act.dataMgrCli.QueryCluster(ctx, r)
if err != nil {
return nil, err
}
if resp.ErrCode != pbcommon.ErrCode_E_OK {
return nil, errors.New(resp.ErrMsg)
}
return resp.Cluster, nil
}
func (act *CreateAction) queryZone(zoneid string) (*pbcommon.Zone, error) {
r := &pbdatamanager.QueryZoneReq{
Seq: act.req.Seq,
Bid: act.req.Bid,
Zoneid: zoneid,
}
ctx, cancel := context.WithTimeout(context.Background(), act.viper.GetDuration("datamanager.calltimeout"))
defer cancel()
logger.V(2).Infof("CreateStrategy[%d]| request to datamanager QueryZone, %+v", act.req.Seq, r)
resp, err := act.dataMgrCli.QueryZone(ctx, r)
if err != nil {
return nil, err
}
if resp.ErrCode != pbcommon.ErrCode_E_OK {
return nil, errors.New(resp.ErrMsg)
}
return resp.Zone, nil
}
func (act *CreateAction) query() (pbcommon.ErrCode, string) {
for _, clusterid := range act.req.Clusterids {
cluster, err := act.queryCluster(clusterid)
if err != nil {
return pbcommon.ErrCode_E_BS_SYSTEM_UNKONW, fmt.Sprintf("can't query cluster[%+v] information to create strategy", clusterid)
}
if cluster.Appid != act.req.Appid {
return pbcommon.ErrCode_E_BS_SYSTEM_UNKONW, fmt.Sprintf("cluster[%+v] not under target app[%+v]", cluster, act.req.Appid)
}
}
for _, zoneid := range act.req.Zoneids {
zone, err := act.queryZone(zoneid)
if err != nil {
return pbcommon.ErrCode_E_BS_SYSTEM_UNKONW, fmt.Sprintf("can't query zone[%+v] information to create strategy", zoneid)
}
if zone.Appid != act.req.Appid {
return pbcommon.ErrCode_E_BS_SYSTEM_UNKONW, fmt.Sprintf("zone[%+v] not under target app[%+v]", zone, act.req.Appid)
}
}
return pbcommon.ErrCode_E_OK, ""
}
func (act *CreateAction) queryStrategy() (pbcommon.ErrCode, string) {
r := &pbdatamanager.QueryStrategyReq{
Seq: act.req.Seq,
Bid: act.req.Bid,
Appid: act.req.Appid,
Name: act.req.Name,
}
ctx, cancel := context.WithTimeout(context.Background(), act.viper.GetDuration("datamanager.calltimeout"))
defer cancel()
logger.V(2).Infof("CreateStrategy[%d]| request to datamanager QueryStrategy, %+v", act.req.Seq, r)
resp, err := act.dataMgrCli.QueryStrategy(ctx, r)
if err != nil {
return pbcommon.ErrCode_E_BS_SYSTEM_UNKONW, fmt.Sprintf("request to datamanager QueryStrategy, %+v", err)
}
if resp.ErrCode == pbcommon.ErrCode_E_OK {
act.resp.Strategyid = resp.Strategy.Strategyid
return pbcommon.ErrCode_E_BS_ALREADY_EXISTS, fmt.Sprintf("strategy with name[%+v] already exist", act.req.Name)
}
if resp.ErrCode != pbcommon.ErrCode_E_DM_NOT_FOUND {
return resp.ErrCode, resp.ErrMsg
}
return pbcommon.ErrCode_E_OK, ""
}
func (act *CreateAction) create() (pbcommon.ErrCode, string) {
act.strategies = &strategy.Strategy{
Appid: act.req.Appid,
Clusterids: act.req.Clusterids,
Zoneids: act.req.Zoneids,
Dcs: act.req.Dcs,
IPs: act.req.IPs,
Labels: act.req.Labels,
}
content, err := json.Marshal(act.strategies)
if err != nil {
return pbcommon.ErrCode_E_BS_SYSTEM_UNKONW, fmt.Sprintf("can't marshal strategy content, %+v", err)
}
r := &pbdatamanager.CreateStrategyReq{
Seq: act.req.Seq,
Bid: act.req.Bid,
Appid: act.req.Appid,
Strategyid: act.newStrategyid,
Name: act.req.Name,
Content: string(content),
Memo: act.req.Memo,
Creator: act.req.Creator,
}
ctx, cancel := context.WithTimeout(context.Background(), act.viper.GetDuration("datamanager.calltimeout"))
defer cancel()
logger.V(2).Infof("CreateStrategy[%d]| request to datamanager CreateStrategy, %+v", act.req.Seq, r)
resp, err := act.dataMgrCli.CreateStrategy(ctx, r)
if err != nil {
return pbcommon.ErrCode_E_BS_SYSTEM_UNKONW, fmt.Sprintf("request to datamanager CreateStrategy, %+v", err)
}
act.resp.Strategyid = resp.Strategyid
if resp.ErrCode != pbcommon.ErrCode_E_OK {
return resp.ErrCode, resp.ErrMsg
}
// audit here on new strategy created.
audit.Audit(int32(pbcommon.SourceType_ST_STRATEGY), int32(pbcommon.SourceOpType_SOT_CREATE),
act.req.Bid, act.resp.Strategyid, act.req.Creator, act.req.Memo)
return pbcommon.ErrCode_E_OK, ""
}
// Do makes the workflows of this action base on input messages.
func (act *CreateAction) Do() error {
if err := act.genStrategyID(); err != nil {
return act.Err(pbcommon.ErrCode_E_BS_SYSTEM_UNKONW, err.Error())
}
if errCode, errMsg := act.queryStrategy(); errCode != pbcommon.ErrCode_E_OK {
return act.Err(errCode, errMsg)
}
if errCode, errMsg := act.query(); errCode != pbcommon.ErrCode_E_OK {
return act.Err(errCode, errMsg)
}
if errCode, errMsg := act.create(); errCode != pbcommon.ErrCode_E_OK {
return act.Err(errCode, errMsg)
}
return nil
}
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env python3
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the blocksdir option.
"""
import os
import shutil
from test_framework.test_framework import BitcoinTestFramework, initialize_datadir
class BlocksdirTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
self.stop_node(0)
shutil.rmtree(self.nodes[0].datadir)
initialize_datadir(self.options.tmpdir, 0)
self.log.info("Starting with non exiting blocksdir ...")
blocksdir_path = os.path.join(self.options.tmpdir, 'blocksdir')
self.nodes[0].assert_start_raises_init_error(["-blocksdir=" + blocksdir_path], 'Error: Specified blocks directory "{}" does not exist.'.format(blocksdir_path))
os.mkdir(blocksdir_path)
self.log.info("Starting with exiting blocksdir ...")
self.start_node(0, ["-blocksdir=" + blocksdir_path])
self.log.info("mining blocks..")
self.nodes[0].generate(10)
assert os.path.isfile(os.path.join(blocksdir_path, "regtest", "blocks", "blk00000.dat"))
assert os.path.isdir(os.path.join(self.nodes[0].datadir, "regtest", "blocks", "index"))
if __name__ == '__main__':
BlocksdirTest().main()
| {
"pile_set_name": "Github"
} |
desc = "another test of append"
filedata = \
"""{$
def bar () {
locals { v : [ -1 ] }
for (i, range (300)) {
append (v, i);
}
print (v[231], "\n");
}
for (i, range (3)) {
bar ();
}
$}"""
outcome = '230 ' * 3
| {
"pile_set_name": "Github"
} |
# name backbone
gbif_name_backbone <- function(name, rank = NULL, kingdom = NULL, phylum = NULL,
class = NULL, order = NULL, family = NULL, genus = NULL, strict = FALSE,
start = NULL, limit = 500, ...) {
url = 'https://api.gbif.org/v1/species/match'
args <- tc(list(name = name, rank = rank, kingdom = kingdom,
phylum = phylum, class = class, order = order, family = family,
genus = genus, strict = strict, verbose = TRUE, offset = start,
limit = limit))
cli <- crul::HttpClient$new(url = url, headers = tx_ual)
temp <- cli$get(query = args, ...)
temp$raise_for_status()
tt <- jsonlite::fromJSON(temp$parse("UTF-8"), FALSE)
if (all(names(tt) %in% c('confidence', 'synonym', 'matchType'))) {
data.frame(NULL)
} else {
dd <- data.table::setDF(
data.table::rbindlist(
lapply(tt$alternatives,
function(x) lapply(x, function(x)
if (length(x) == 0) NA else x)), use.names = TRUE, fill= TRUE))
dat <- data.frame(tt[!names(tt) %in% c("alternatives",
"note")], stringsAsFactors = FALSE)
if (!all(names(dat) %in% c('confidence', 'synonym', 'matchType'))) {
dd <- dt2df(list(dat, dd), idcol = FALSE)
}
if (limit > 0) {
dd <- cols_move(dd, back_cols_use)
}
if (!is.null(dd)) dd$rank <- tolower(dd$rank)
names(dd) <- tolower(names(dd))
return(dd)
}
}
# name lookup
gbif_name_lookup <- function(query = NULL, rank = NULL,
higherTaxonKey = NULL, status = NULL, nameType = NULL,
datasetKey = 'd7dddbf4-2cf0-4f39-9b2a-bb099caae36c', limit = 500,
start = NULL, ...) {
url = 'https://api.gbif.org/v1/species/search'
args <- tc(list(q = query, rank = rank, higherTaxonKey = higherTaxonKey,
status = status, nameType = nameType, datasetKey = datasetKey,
limit = limit, offset = start))
cli <- crul::HttpClient$new(url = url, headers = tx_ual)
temp <- cli$get(query = args, ...)
temp$raise_for_status()
tt <- jsonlite::fromJSON(temp$parse("UTF-8"), FALSE)
dd <- data.table::setDF(
data.table::rbindlist(lapply(
tt$results,
nlkupcleaner), use.names = TRUE, fill = TRUE)
)
if (limit > 0) {
dd <- cols_move(dd, gbif_cols_use)
}
if (!is.null(dd)) dd$rank <- tolower(dd$rank)
names(dd) <- tolower(names(dd))
return(dd)
}
cols_move <- function (x, cols) {
other <- names(x)[!names(x) %in% cols]
x[, c(cols, other)]
}
nlkupcleaner <- function (x) {
tmp <- x[!names(x) %in% c("descriptions", "vernacularNames",
"higherClassificationMap")]
lapply(tmp, function(x) {
if (length(x) == 0) {
NA
}
else if (length(x) > 1 || is(x, "list")) {
paste0(x, collapse = ", ")
}
else {
x
}
})
}
gbif_cols_use <- c("key", "canonicalName", "authorship", "rank",
"taxonomicStatus", "synonym")
back_cols_use <- c("usageKey", "scientificName", "rank", "status", "matchType")
gbif_cols_show_backbone <- tolower(c("gbifid", "scientificName", "rank",
"status", "matchtype"))
gbif_cols_show_lookup <- tolower(c("gbifid", "canonicalName", "authorship",
"rank", "taxonomicStatus", "synonym"))
| {
"pile_set_name": "Github"
} |
//
// Copyright (c) 2016, Scientific Toolworks, Inc.
//
// This software is licensed under the MIT License. The LICENSE.md file
// describes the conditions under which this software may be distributed.
//
// Author: Jason Haslam
//
#include "Repository.h"
#include "TagRef.h"
#include "Tag.h"
namespace git {
TagRef::TagRef(git_reference *ref)
: Reference(ref)
{
if (isValid() && !isTag())
d.clear();
}
TagRef::TagRef(const Reference &rhs)
: Reference(rhs)
{
if (isValid() && !isTag())
d.clear();
}
Tag TagRef::tag() const
{
git_tag *tag = nullptr;
git_repository *repo = git_reference_owner(d.data());
git_tag_lookup(&tag, repo, git_reference_target(d.data()));
return Tag(tag);
}
bool TagRef::remove()
{
// Remember name.
QString name = this->name();
Repository repo(git_reference_owner(d.data()));
emit repo.notifier()->referenceAboutToBeRemoved(*this);
int error = git_reference_delete(d.data());
if (!error)
d.clear(); // Invalidate this branch.
// We have to notify even if removal failed and the tag is still valid.
// Clients can check this tag to see if the tag was really removed.
emit repo.notifier()->referenceRemoved(name);
return !error;
}
} // namespace git
| {
"pile_set_name": "Github"
} |
#ifndef DETECTOR_DESCRIPTION_CORE_DD_COMPACT_VIEW_H
#define DETECTOR_DESCRIPTION_CORE_DD_COMPACT_VIEW_H
#include <cstddef>
#include <memory>
#include <string>
#include <utility>
#include <vector>
#include <unordered_map>
#include "DetectorDescription/Core/interface/DDRotationMatrix.h"
#include "DetectorDescription/Core/interface/DDTranslation.h"
#include "DetectorDescription/Core/interface/Store.h"
#include "DetectorDescription/Core/interface/DDLogicalPart.h"
#include "DetectorDescription/Core/interface/DDPosData.h"
#include "DetectorDescription/Core/interface/DDTransform.h"
#include "DataFormats/Math/interface/Graph.h"
#include "DataFormats/Math/interface/GraphWalker.h"
class DDCompactViewImpl;
class DDDivision;
class DDName;
struct DDPosData;
namespace DDI {
class LogicalPart;
class Material;
class Solid;
class Specific;
} // namespace DDI
/**
Navigation through the compact view of the detector ...
*/
//MEC: these comments are kept from original... Will we ever do this? don't think so.
//FIXME: DDCompactView: check for proper acyclic directed graph structure!!
//FIXME:
//FIXME: X [A-X] ... LogicalPart
//FIXME: / \ | ... PosPart (directed parten to child)
//FIXME: A A
//FIXME: | |
//FIXME: B C
//FIXME:
//FIXME: THIS IS NOT ALLOWED, but currently can be specified using DDL ....
//FIXME:
//! Compact representation of the geometrical detector hierarchy
/** A DDCompactView represents the detector as an acyclic directed multigraph.
The nodes are instances of DDLogicalPart while the edges are pointers to
DDPosData. Edges are directed from parent-node to child-node.
Edges represented by DDPosData are the relative translation and rotation
accompanied by a copy-number of the child-node towards the parent-node.
One node is explicitly marked as the root node. It is the DDLogicalPart which
defines the base coordinate system for the detector. All subdetectors are
directly or inderectly positioned inside the root-node.
Example:
The figureshows a compact-view graph consisting of 16 DDLogicalParts
interconnected by 20 edges represented by pointers to DDPosData.
\image html compact-view.gif
\image latex compact-view.eps
The compact-view also serves as base for calculating nodes in the expanded
view. Paths through the compact-view can be viewed as nodes in the expanded-view
(expansion of an acyclic directed multigraph into a tree). In the figure there are
5 different paths from CMS to Module2 (e.g. CMS-Pos1->Ecal-Pos4->EEndcap-Pos21->Module2)
thus there will be 5 nodes of Module2 in the expanded view.
MEC:
There has been a re-purposing of the DDCompactView to not only hold the
representation described above (in detail this is the DDCompactViewImpl)
but also own the memory of the stores refered to by the graph.
DDCompactView now owns the DDMaterial, DDSpecific, DDLogicalPart,
DDRotation, DDSolid and etc. Removal of the global one-and-only
stores, methods and details such as DDRoot will mean that all of
these will be accessed via the DDCompactView.
*/
class DDCompactView {
public:
using Graph = math::Graph<DDLogicalPart, DDPosData*>;
using GraphWalker = math::GraphWalker<DDLogicalPart, DDPosData*>;
using Vectors = std::unordered_map<std::string, std::vector<double>>;
//! Creates a compact-view
explicit DDCompactView();
//! Creates a compact-view using a different root of the geometry hierarchy
explicit DDCompactView(const DDName&);
~DDCompactView();
//! Creates a compact-view using a different root of the geometry hierarchy.
// NOTE: It cannot be used to modify the stores if they are locked.
explicit DDCompactView(const DDLogicalPart& rootnodedata);
//! Provides read-only access to the data structure of the compact-view.
const Graph& graph() const;
GraphWalker walker() const;
//! returns the DDLogicalPart representing the root of the geometrical hierarchy
const DDLogicalPart& root() const;
//! The absolute position of the world
const DDPosData* worldPosition() const;
//! returns an empty container if not found
std::vector<double> const& vector(std::string_view iKey) const;
void position(const DDLogicalPart& self,
const DDLogicalPart& parent,
const std::string& copyno,
const DDTranslation& trans,
const DDRotation& rot,
const DDDivision* div = nullptr);
void position(const DDLogicalPart& self,
const DDLogicalPart& parent,
int copyno,
const DDTranslation& trans,
const DDRotation& rot,
const DDDivision* div = nullptr);
void setRoot(const DDLogicalPart& root);
void lockdown();
private:
void swap(DDCompactView&);
std::unique_ptr<DDCompactViewImpl> rep_;
std::unique_ptr<DDPosData> worldpos_;
DDI::Store<DDName, std::unique_ptr<DDI::Material>> matStore_;
DDI::Store<DDName, std::unique_ptr<DDI::Solid>> solidStore_;
DDI::Store<DDName, std::unique_ptr<DDI::LogicalPart>> lpStore_;
DDI::Store<DDName, std::unique_ptr<DDI::Specific>> specStore_;
DDI::Store<DDName, std::unique_ptr<DDRotationMatrix>> rotStore_;
Vectors vectors_;
};
#endif
| {
"pile_set_name": "Github"
} |
// Created by cgo -godefs - DO NOT EDIT
// cgo -godefs defs_linux.go
package socket
const (
sysAF_UNSPEC = 0x0
sysAF_INET = 0x2
sysAF_INET6 = 0xa
sysSOCK_RAW = 0x3
)
type iovec struct {
Base *byte
Len uint64
}
type msghdr struct {
Name *byte
Namelen uint32
Pad_cgo_0 [4]byte
Iov *iovec
Iovlen uint64
Control *byte
Controllen uint64
Flags int32
Pad_cgo_1 [4]byte
}
type mmsghdr struct {
Hdr msghdr
Len uint32
Pad_cgo_0 [4]byte
}
type cmsghdr struct {
Len uint64
Level int32
Type int32
}
type sockaddrInet struct {
Family uint16
Port uint16
Addr [4]byte /* in_addr */
X__pad [8]uint8
}
type sockaddrInet6 struct {
Family uint16
Port uint16
Flowinfo uint32
Addr [16]byte /* in6_addr */
Scope_id uint32
}
const (
sizeofIovec = 0x10
sizeofMsghdr = 0x38
sizeofMmsghdr = 0x40
sizeofCmsghdr = 0x10
sizeofSockaddrInet = 0x10
sizeofSockaddrInet6 = 0x1c
)
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env sh
#
# Copyright 2015 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin or MSYS, switch paths to Windows format before running java
if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=$(save "$@")
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
cd "$(dirname "$0")"
fi
exec "$JAVACMD" "$@"
| {
"pile_set_name": "Github"
} |
-----BEGIN CERTIFICATE-----
MIIHhzCCBW+gAwIBAgIBLTANBgkqhkiG9w0BAQsFADB9MQswCQYDVQQGEwJJTDEW
MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh
dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM3WhcNMzYwOTE3MTk0NjM2WjB9
MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi
U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh
cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA
A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk
pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf
OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C
Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT
Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi
HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM
Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w
+2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+
Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3
Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B
26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID
AQABo4ICEDCCAgwwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
VR0OBBYEFE4L7xqkQFulF2mHMMo0aEPQQa7yMB8GA1UdIwQYMBaAFE4L7xqkQFul
F2mHMMo0aEPQQa7yMIIBWgYDVR0gBIIBUTCCAU0wggFJBgsrBgEEAYG1NwEBATCC
ATgwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3BvbGljeS5w
ZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL2ludGVybWVk
aWF0ZS5wZGYwgc8GCCsGAQUFBwICMIHCMCcWIFN0YXJ0IENvbW1lcmNpYWwgKFN0
YXJ0Q29tKSBMdGQuMAMCAQEagZZMaW1pdGVkIExpYWJpbGl0eSwgcmVhZCB0aGUg
c2VjdGlvbiAqTGVnYWwgTGltaXRhdGlvbnMqIG9mIHRoZSBTdGFydENvbSBDZXJ0
aWZpY2F0aW9uIEF1dGhvcml0eSBQb2xpY3kgYXZhaWxhYmxlIGF0IGh0dHA6Ly93
d3cuc3RhcnRzc2wuY29tL3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgG
CWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1
dGhvcml0eTANBgkqhkiG9w0BAQsFAAOCAgEAjo/n3JR5fPGFf59Jb2vKXfuM/gTF
wWLRfUKKvFO3lANmMD+x5wqnUCBVJX92ehQN6wQOQOY+2IirByeDqXWmN3PH/UvS
Ta0XQMhGvjt/UfzDtgUx3M2FIk5xt/JxXrAaxrqTi3iSSoX4eA+D/i+tLPfkpLst
0OcNOrg+zvZ49q5HJMqjNTbOx8aHmNrs++myziebiMMEofYLWWivydsQD032ZGNc
pRJvkrKTlMeIFw6Ttn5ii5B/q06f/ON1FE8qMt9bDeD1e5MNq6HPh+GlBEXoPBKl
CcWw0bdT82AUuoVpaiF8H3VhFyAXe2w7QSlc4axa0c2Mm+tgHRns9+Ww2vl5GKVF
P0lDV9LdJNUso/2RjSe15esUBppMeyG7Oq0wBhjA2MFrLH9ZXF2RsXAiV+uKa0hK
1Q8p7MZAwC+ITGgBF3f0JBlPvfrhsiAhS90a2Cl9qrjeVOwhVYBsHvUwyKMQ5bLm
KhQxw4UtjJixhlpPiVktucf3HMiKf8CdBUrmQk9io20ppB+Fq9vlgcitKj1MXVuE
JnHEhV5xJMqlG2zYYdMa4FTbzrqpMrUi9nNBCV24F10OD5mQ1kfabwo6YigUZ4LZ
8dCAWZvLMdibD4x3TrVoivJs9iQOLWxwxXPR3hTQcY+203sC9uO41Alua551hDnm
fyWl8kgAwKQB2j8=
-----END CERTIFICATE-----
-----BEGIN CERTIFICATE-----
MIIHyTCCBbGgAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJJTDEW
MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh
dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM2WhcNMzYwOTE3MTk0NjM2WjB9
MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi
U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh
cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA
A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk
pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf
OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C
Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT
Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi
HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM
Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w
+2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+
Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3
Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B
26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID
AQABo4ICUjCCAk4wDAYDVR0TBAUwAwEB/zALBgNVHQ8EBAMCAa4wHQYDVR0OBBYE
FE4L7xqkQFulF2mHMMo0aEPQQa7yMGQGA1UdHwRdMFswLKAqoCiGJmh0dHA6Ly9j
ZXJ0LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMCugKaAnhiVodHRwOi8vY3Js
LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMIIBXQYDVR0gBIIBVDCCAVAwggFM
BgsrBgEEAYG1NwEBATCCATswLwYIKwYBBQUHAgEWI2h0dHA6Ly9jZXJ0LnN0YXJ0
Y29tLm9yZy9wb2xpY3kucGRmMDUGCCsGAQUFBwIBFilodHRwOi8vY2VydC5zdGFy
dGNvbS5vcmcvaW50ZXJtZWRpYXRlLnBkZjCB0AYIKwYBBQUHAgIwgcMwJxYgU3Rh
cnQgQ29tbWVyY2lhbCAoU3RhcnRDb20pIEx0ZC4wAwIBARqBl0xpbWl0ZWQgTGlh
YmlsaXR5LCByZWFkIHRoZSBzZWN0aW9uICpMZWdhbCBMaW1pdGF0aW9ucyogb2Yg
dGhlIFN0YXJ0Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFBvbGljeSBhdmFp
bGFibGUgYXQgaHR0cDovL2NlcnQuc3RhcnRjb20ub3JnL3BvbGljeS5wZGYwEQYJ
YIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNT
TCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQUFAAOCAgEAFmyZ
9GYMNPXQhV59CuzaEE44HF7fpiUFS5Eyweg78T3dRAlbB0mKKctmArexmvclmAk8
jhvh3TaHK0u7aNM5Zj2gJsfyOZEdUauCe37Vzlrk4gNXcGmXCPleWKYK34wGmkUW
FjgKXlf2Ysd6AgXmvB618p70qSmD+LIU424oh0TDkBreOKk8rENNZEXO3SipXPJz
ewT4F+irsfMuXGRuczE6Eri8sxHkfY+BUZo7jYn0TZNmezwD7dOaHZrzZVD1oNB1
ny+v8OqCQ5j4aZyJecRDjkZy42Q2Eq/3JR44iZB3fsNrarnDy0RLrHiQi+fHLB5L
EUTINFInzQpdn4XBidUaePKVEFMy3YCEZnXZtWgo+2EuvoSoOMCZEoalHmdkrQYu
L6lwhceWD3yJZfWOQ1QOq92lgDmUYMA0yZZwLKMS9R9Ie70cfmu3nZD0Ijuu+Pwq
yvqCUqDvr0tVk+vBtfAii6w0TiYiBKGHLHVKt+V9E9e4DGTANtLJL4YSjCMJwRuC
O3NJo2pXh5Tl1njFmUNj403gdy3hZZlyaQQaRwnmDwFWJPsfvw55qVguucQJAX6V
um0ABj6y6koQOdjQK/W/7HW/lwLFCRsI3FU34oH7N4RDYiDK51ZLZer+bMEkkySh
NOsF/5oirpt9P/FlUQqmMGqz9IgcgA38corog14=
-----END CERTIFICATE-----
| {
"pile_set_name": "Github"
} |
var $builtinmodule = window.memoise('anvil.facebook.auth', function() {
var mod = {};
var loginCallbackResolve = null;
var displayLogInModal = function(additionalScopes) {
var anvil = PyDefUtils.getModule("anvil");
var appPath = Sk.ffi.remapToJs(anvil.tp$getattr(new Sk.builtin.str("app_path")));
var scopesToRequest = (additionalScopes || []).join(',');
var doLogin = function() {
var authParams = {
scopes: scopesToRequest,
s: window.anvilSessionId,
};
var authUrl = appPath + "/_/facebook_auth_redirect?" + $.param(authParams);
var windowFeatures = {
width: 450,
height: 500,
scrollbars: "yes",
}
strWindowFeatures = "";
for(var k in windowFeatures) {
if (strWindowFeatures.length > 0)
strWindowFeatures += ",";
strWindowFeatures += k;
var v = windowFeatures[k];
if (v === true)
v = 1;
if (v === false)
v = 0;
strWindowFeatures+= "=" + v;
}
var popup = window.open(authUrl, null, strWindowFeatures);
};
if (PyDefUtils.isPopupOK()) {
doLogin();
} else {
$("#facebookLogInButton").off("click"); // Just in case they didn't click it last time.
$("#facebookLogInButton").one("click", doLogin);
$("#facebookCancelButton").off("click");
$("#facebookCancelButton").one("click", () => {
$("#facebook-login-modal").one("hidden.bs.modal.alertclear", function() {
loginCallbackResolve.reject("MODAL_CANCEL")
});
});
$('#facebook-login-modal').modal({backdrop: 'static', keyboard: false});
}
}
var registerCallbackHandlers = function(messageFns) {
messageFns.facebookAuthErrorCallback = function(params) {
console.error("Client auth ERROR", params);
if (loginCallbackResolve) {
if (params.message == "SESSION_EXPIRED") {
var server = PyDefUtils.getModule("anvil.server");
loginCallbackResolve.reject(Sk.misceval.callsim(server.tp$getattr(new Sk.builtin.str("SessionExpiredError"))));
} else {
loginCallbackResolve.reject(new Sk.builtin.Exception(Sk.ffi.remapToPy(params.message)));
}
}
}
messageFns.facebookAuthSuccessCallback = function(params) {
PyDefUtils.callAsync(mod["get_user_email"]).then(function(c) {
loginCallbackResolve.resolve(Sk.ffi.remapToJs(c));
}).catch(function(e) {
loginCallbackResolve.reject(e);
});
}
}
/*!defFunction(anvil.facebook.auth,!_)!2*/ "Prompt the user to log in with their Facebook account"
mod["login"] = new Sk.builtin.func(function(pyAdditionalScopes) {
// TODO: Try immediate auth before we do anything else. If that fails, then...
loginCallbackResolve = RSVP.defer();
displayLogInModal(Sk.ffi.remapToJs(pyAdditionalScopes || []));
// TODO: Should probably have a timeout on this promise.
return PyDefUtils.suspensionPromise(function(resolve, reject) {
loginCallbackResolve.promise.then(function(email) {
resolve(email);
}).catch(function(e) {
if (e == "MODAL_CANCEL") {
resolve(Sk.builtin.none.none$);
} else {
reject(e);
}
});
});
});
registerCallbackHandlers(window.messages);
/*!defFunction(anvil.facebook.auth,_,)!2*/ "Get the email address of the currently-logged-in Facebook user.\n\nTo log in with Facebook, call facebook.auth.login() from form code.";
mod["get_user_email"] = new Sk.builtin.func(function() {
var server = PyDefUtils.getModule("anvil.server");
var call = server.tp$getattr(new Sk.builtin.str("call_$rn$"));
return Sk.misceval.callOrSuspend(call, undefined, undefined, undefined, Sk.ffi.remapToPy("anvil.private.facebook.auth.get_user_email"));
})
/*!defFunction(anvil.facebook.auth,_,)!2*/ "Get the Facebook user ID of the currently-logged-in Facebook user.\n\nTo log in with Facebook, call facebook.auth.login() from form code.";
mod["get_user_id"] = new Sk.builtin.func(function() {
var server = PyDefUtils.getModule("anvil.server");
var call = server.tp$getattr(new Sk.builtin.str(new Sk.builtin.str("call_$rn$")));
return Sk.misceval.callOrSuspend(call, undefined, undefined, undefined, Sk.ffi.remapToPy("anvil.private.facebook.auth.get_user_id"));
})
/*!defFunction(anvil.facebook.auth,_,)!2*/ "Get the Facebook access token of the currently-logged-in Facebook user.\n\nTo log in with Facebook, call facebook.auth.login() from form code.";
mod["get_user_access_token"] = new Sk.builtin.func(function() {
var server = PyDefUtils.getModule("anvil.server");
var call = server.tp$getattr(new Sk.builtin.str("call_$rn$"));
return Sk.misceval.callOrSuspend(call, undefined, undefined, undefined, Sk.ffi.remapToPy("anvil.private.facebook.auth.get_user_access_token"));
})
return mod;
});
| {
"pile_set_name": "Github"
} |
#define IDD_BROWSE 95
#define IDL_BROWSE 100
#define IDT_BROWSE_FOLDER 101
#define IDE_BROWSE_PATH 102
#define IDC_BROWSE_FILTER 103
#define IDB_BROWSE_PARENT 110
#define IDB_BROWSE_CREATE_DIR 112
| {
"pile_set_name": "Github"
} |
function foo() {
}
if (typeof foo === 'function') {
console.log ("hello world");
}
| {
"pile_set_name": "Github"
} |
// +build !go1.7
package aws
import "time"
// An emptyCtx is a copy of the Go 1.7 context.emptyCtx type. This is copied to
// provide a 1.6 and 1.5 safe version of context that is compatible with Go
// 1.7's Context.
//
// An emptyCtx is never canceled, has no values, and has no deadline. It is not
// struct{}, since vars of this type must have distinct addresses.
type emptyCtx int
func (*emptyCtx) Deadline() (deadline time.Time, ok bool) {
return
}
func (*emptyCtx) Done() <-chan struct{} {
return nil
}
func (*emptyCtx) Err() error {
return nil
}
func (*emptyCtx) Value(key interface{}) interface{} {
return nil
}
func (e *emptyCtx) String() string {
switch e {
case backgroundCtx:
return "aws.BackgroundContext"
}
return "unknown empty Context"
}
var (
backgroundCtx = new(emptyCtx)
)
| {
"pile_set_name": "Github"
} |
---
layout: pid
title: Usb2Most(USB to MOST adapter)
owner: smartgauges
license: GPLv3
site: https://github.com/smartgauges/usb2most
source: https://github.com/smartgauges/usb2most
---
Is a board to translate audio from a USB(UAC1) to MOST(Media Oriented Systems Transport) bus.
| {
"pile_set_name": "Github"
} |
//-----------------------------------------------------------------------------
// Copyright (c) 2013 GarageGames, LLC
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//-----------------------------------------------------------------------------
#ifndef _EXTRAFIELD_H_
#define _EXTRAFIELD_H_
class Stream;
namespace Zip
{
/// @addtogroup zipint_group
/// @ingroup zip_group
// @{
// Forward Refs
class ExtraField;
// Creation Helpers
typedef ExtraField *(*ExtraFieldCreateFn)();
template<class T> ExtraField * createExtraField()
{
return new T;
}
// ExtraField base class
class ExtraField
{
ExtraField *mNext;
protected:
U16 mID;
ExtraFieldCreateFn mCreateFn;
public:
ExtraField()
{
mID = 0;
mCreateFn = NULL;
}
virtual ~ExtraField() {}
ExtraField(U16 id, ExtraFieldCreateFn fnCreate);
inline U16 getID() { return mID; }
virtual bool read(Stream *stream) = 0;
// Run time creation methods
static ExtraField *create(U16 id);
};
#define DeclareExtraField(name) \
name(U16 id, ExtraFieldCreateFn fnCreate) : Parent(id, fnCreate) {}
#define ImplementExtraField(name, id) \
name gExtraField##name##instance(id, &createExtraField<name>);
// @}
} // end namespace Zip
#endif // _EXTRAFIELD_H_
| {
"pile_set_name": "Github"
} |
function model = linregRobustHuberFit(X, y, delta, includeOffset)
% Minimize Huber loss function for linear regression
% We assume X is an N*D matrix; we will add a column of 1s internally
% w = [w0 w1 ... wD] is a column vector, where w0 is the bias
% This file is from pmtk3.googlecode.com
%PMTKauthor Mark Schmidt
%PMTKurl http://people.cs.ubc.ca/~schmidtm/Software/minFunc/minFunc.html#2
%%
if nargin < 3, delta = 1; end
if nargin < 4, includeOffset = true; end
[N,D] = size(X);
if includeOffset
X = [ones(N,1) X];
end
wLS = X \ y; % initialize with least squares
options.Display = 'none';
w = minFunc(@HuberLoss,wLS,options,X,y,delta);
model.w = w(2:end);
model.w0 = w(1);
model.includeOffset = includeOffset;
model.sigma2 = var((X*w - y).^2); % MLE of noise variance
end
| {
"pile_set_name": "Github"
} |
'use strict';
module.exports = function generate_properties(it, $keyword) {
var out = ' ';
var $lvl = it.level;
var $dataLvl = it.dataLevel;
var $schema = it.schema[$keyword];
var $schemaPath = it.schemaPath + it.util.getProperty($keyword);
var $errSchemaPath = it.errSchemaPath + '/' + $keyword;
var $breakOnError = !it.opts.allErrors;
var $data = 'data' + ($dataLvl || '');
var $valid = 'valid' + $lvl;
var $errs = 'errs__' + $lvl;
var $it = it.util.copy(it);
var $closingBraces = '';
$it.level++;
var $nextValid = 'valid' + $it.level;
var $key = 'key' + $lvl,
$dataNxt = $it.dataLevel = it.dataLevel + 1,
$nextData = 'data' + $dataNxt;
var $schemaKeys = Object.keys($schema || {}),
$pProperties = it.schema.patternProperties || {},
$pPropertyKeys = Object.keys($pProperties),
$aProperties = it.schema.additionalProperties,
$someProperties = $schemaKeys.length || $pPropertyKeys.length,
$noAdditional = $aProperties === false,
$additionalIsSchema = typeof $aProperties == 'object' && Object.keys($aProperties).length,
$removeAdditional = it.opts.removeAdditional,
$checkAdditional = $noAdditional || $additionalIsSchema || $removeAdditional,
$ownProperties = it.opts.ownProperties,
$currentBaseId = it.baseId;
var $required = it.schema.required;
if ($required && !(it.opts.v5 && $required.$data) && $required.length < it.opts.loopRequired) var $requiredHash = it.util.toHash($required);
if (it.opts.v5) {
var $pgProperties = it.schema.patternGroups || {},
$pgPropertyKeys = Object.keys($pgProperties);
}
out += 'var ' + ($errs) + ' = errors;var ' + ($nextValid) + ' = true;';
if ($checkAdditional) {
out += ' for (var ' + ($key) + ' in ' + ($data) + ') { ';
if ($ownProperties) {
out += ' if (!Object.prototype.hasOwnProperty.call(' + ($data) + ', ' + ($key) + ')) continue; ';
}
if ($someProperties) {
out += ' var isAdditional' + ($lvl) + ' = !(false ';
if ($schemaKeys.length) {
if ($schemaKeys.length > 5) {
out += ' || validate.schema' + ($schemaPath) + '[' + ($key) + '] ';
} else {
var arr1 = $schemaKeys;
if (arr1) {
var $propertyKey, i1 = -1,
l1 = arr1.length - 1;
while (i1 < l1) {
$propertyKey = arr1[i1 += 1];
out += ' || ' + ($key) + ' == ' + (it.util.toQuotedString($propertyKey)) + ' ';
}
}
}
}
if ($pPropertyKeys.length) {
var arr2 = $pPropertyKeys;
if (arr2) {
var $pProperty, $i = -1,
l2 = arr2.length - 1;
while ($i < l2) {
$pProperty = arr2[$i += 1];
out += ' || ' + (it.usePattern($pProperty)) + '.test(' + ($key) + ') ';
}
}
}
if (it.opts.v5 && $pgPropertyKeys && $pgPropertyKeys.length) {
var arr3 = $pgPropertyKeys;
if (arr3) {
var $pgProperty, $i = -1,
l3 = arr3.length - 1;
while ($i < l3) {
$pgProperty = arr3[$i += 1];
out += ' || ' + (it.usePattern($pgProperty)) + '.test(' + ($key) + ') ';
}
}
}
out += ' ); if (isAdditional' + ($lvl) + ') { ';
}
if ($removeAdditional == 'all') {
out += ' delete ' + ($data) + '[' + ($key) + ']; ';
} else {
var $currentErrorPath = it.errorPath;
var $additionalProperty = '\' + ' + $key + ' + \'';
if (it.opts._errorDataPathProperty) {
it.errorPath = it.util.getPathExpr(it.errorPath, $key, it.opts.jsonPointers);
}
if ($noAdditional) {
if ($removeAdditional) {
out += ' delete ' + ($data) + '[' + ($key) + ']; ';
} else {
out += ' ' + ($nextValid) + ' = false; ';
var $currErrSchemaPath = $errSchemaPath;
$errSchemaPath = it.errSchemaPath + '/additionalProperties';
var $$outStack = $$outStack || [];
$$outStack.push(out);
out = ''; /* istanbul ignore else */
if (it.createErrors !== false) {
out += ' { keyword: \'' + ('additionalProperties') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { additionalProperty: \'' + ($additionalProperty) + '\' } ';
if (it.opts.messages !== false) {
out += ' , message: \'should NOT have additional properties\' ';
}
if (it.opts.verbose) {
out += ' , schema: false , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
}
out += ' } ';
} else {
out += ' {} ';
}
var __err = out;
out = $$outStack.pop();
if (!it.compositeRule && $breakOnError) { /* istanbul ignore if */
if (it.async) {
out += ' throw new ValidationError([' + (__err) + ']); ';
} else {
out += ' validate.errors = [' + (__err) + ']; return false; ';
}
} else {
out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
}
$errSchemaPath = $currErrSchemaPath;
if ($breakOnError) {
out += ' break; ';
}
}
} else if ($additionalIsSchema) {
if ($removeAdditional == 'failing') {
out += ' var ' + ($errs) + ' = errors; ';
var $wasComposite = it.compositeRule;
it.compositeRule = $it.compositeRule = true;
$it.schema = $aProperties;
$it.schemaPath = it.schemaPath + '.additionalProperties';
$it.errSchemaPath = it.errSchemaPath + '/additionalProperties';
$it.errorPath = it.opts._errorDataPathProperty ? it.errorPath : it.util.getPathExpr(it.errorPath, $key, it.opts.jsonPointers);
var $passData = $data + '[' + $key + ']';
$it.dataPathArr[$dataNxt] = $key;
var $code = it.validate($it);
$it.baseId = $currentBaseId;
if (it.util.varOccurences($code, $nextData) < 2) {
out += ' ' + (it.util.varReplace($code, $nextData, $passData)) + ' ';
} else {
out += ' var ' + ($nextData) + ' = ' + ($passData) + '; ' + ($code) + ' ';
}
out += ' if (!' + ($nextValid) + ') { errors = ' + ($errs) + '; if (validate.errors !== null) { if (errors) validate.errors.length = errors; else validate.errors = null; } delete ' + ($data) + '[' + ($key) + ']; } ';
it.compositeRule = $it.compositeRule = $wasComposite;
} else {
$it.schema = $aProperties;
$it.schemaPath = it.schemaPath + '.additionalProperties';
$it.errSchemaPath = it.errSchemaPath + '/additionalProperties';
$it.errorPath = it.opts._errorDataPathProperty ? it.errorPath : it.util.getPathExpr(it.errorPath, $key, it.opts.jsonPointers);
var $passData = $data + '[' + $key + ']';
$it.dataPathArr[$dataNxt] = $key;
var $code = it.validate($it);
$it.baseId = $currentBaseId;
if (it.util.varOccurences($code, $nextData) < 2) {
out += ' ' + (it.util.varReplace($code, $nextData, $passData)) + ' ';
} else {
out += ' var ' + ($nextData) + ' = ' + ($passData) + '; ' + ($code) + ' ';
}
if ($breakOnError) {
out += ' if (!' + ($nextValid) + ') break; ';
}
}
}
it.errorPath = $currentErrorPath;
}
if ($someProperties) {
out += ' } ';
}
out += ' } ';
if ($breakOnError) {
out += ' if (' + ($nextValid) + ') { ';
$closingBraces += '}';
}
}
var $useDefaults = it.opts.useDefaults && !it.compositeRule;
if ($schemaKeys.length) {
var arr4 = $schemaKeys;
if (arr4) {
var $propertyKey, i4 = -1,
l4 = arr4.length - 1;
while (i4 < l4) {
$propertyKey = arr4[i4 += 1];
var $sch = $schema[$propertyKey];
if (it.util.schemaHasRules($sch, it.RULES.all)) {
var $prop = it.util.getProperty($propertyKey),
$passData = $data + $prop,
$hasDefault = $useDefaults && $sch.default !== undefined;
$it.schema = $sch;
$it.schemaPath = $schemaPath + $prop;
$it.errSchemaPath = $errSchemaPath + '/' + it.util.escapeFragment($propertyKey);
$it.errorPath = it.util.getPath(it.errorPath, $propertyKey, it.opts.jsonPointers);
$it.dataPathArr[$dataNxt] = it.util.toQuotedString($propertyKey);
var $code = it.validate($it);
$it.baseId = $currentBaseId;
if (it.util.varOccurences($code, $nextData) < 2) {
$code = it.util.varReplace($code, $nextData, $passData);
var $useData = $passData;
} else {
var $useData = $nextData;
out += ' var ' + ($nextData) + ' = ' + ($passData) + '; ';
}
if ($hasDefault) {
out += ' ' + ($code) + ' ';
} else {
if ($requiredHash && $requiredHash[$propertyKey]) {
out += ' if (' + ($useData) + ' === undefined) { ' + ($nextValid) + ' = false; ';
var $currentErrorPath = it.errorPath,
$currErrSchemaPath = $errSchemaPath,
$missingProperty = it.util.escapeQuotes($propertyKey);
if (it.opts._errorDataPathProperty) {
it.errorPath = it.util.getPath($currentErrorPath, $propertyKey, it.opts.jsonPointers);
}
$errSchemaPath = it.errSchemaPath + '/required';
var $$outStack = $$outStack || [];
$$outStack.push(out);
out = ''; /* istanbul ignore else */
if (it.createErrors !== false) {
out += ' { keyword: \'' + ('required') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { missingProperty: \'' + ($missingProperty) + '\' } ';
if (it.opts.messages !== false) {
out += ' , message: \'';
if (it.opts._errorDataPathProperty) {
out += 'is a required property';
} else {
out += 'should have required property \\\'' + ($missingProperty) + '\\\'';
}
out += '\' ';
}
if (it.opts.verbose) {
out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
}
out += ' } ';
} else {
out += ' {} ';
}
var __err = out;
out = $$outStack.pop();
if (!it.compositeRule && $breakOnError) { /* istanbul ignore if */
if (it.async) {
out += ' throw new ValidationError([' + (__err) + ']); ';
} else {
out += ' validate.errors = [' + (__err) + ']; return false; ';
}
} else {
out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
}
$errSchemaPath = $currErrSchemaPath;
it.errorPath = $currentErrorPath;
out += ' } else { ';
} else {
if ($breakOnError) {
out += ' if (' + ($useData) + ' === undefined) { ' + ($nextValid) + ' = true; } else { ';
} else {
out += ' if (' + ($useData) + ' !== undefined) { ';
}
}
out += ' ' + ($code) + ' } ';
}
}
if ($breakOnError) {
out += ' if (' + ($nextValid) + ') { ';
$closingBraces += '}';
}
}
}
}
var arr5 = $pPropertyKeys;
if (arr5) {
var $pProperty, i5 = -1,
l5 = arr5.length - 1;
while (i5 < l5) {
$pProperty = arr5[i5 += 1];
var $sch = $pProperties[$pProperty];
if (it.util.schemaHasRules($sch, it.RULES.all)) {
$it.schema = $sch;
$it.schemaPath = it.schemaPath + '.patternProperties' + it.util.getProperty($pProperty);
$it.errSchemaPath = it.errSchemaPath + '/patternProperties/' + it.util.escapeFragment($pProperty);
out += ' for (var ' + ($key) + ' in ' + ($data) + ') { ';
if ($ownProperties) {
out += ' if (!Object.prototype.hasOwnProperty.call(' + ($data) + ', ' + ($key) + ')) continue; ';
}
out += ' if (' + (it.usePattern($pProperty)) + '.test(' + ($key) + ')) { ';
$it.errorPath = it.util.getPathExpr(it.errorPath, $key, it.opts.jsonPointers);
var $passData = $data + '[' + $key + ']';
$it.dataPathArr[$dataNxt] = $key;
var $code = it.validate($it);
$it.baseId = $currentBaseId;
if (it.util.varOccurences($code, $nextData) < 2) {
out += ' ' + (it.util.varReplace($code, $nextData, $passData)) + ' ';
} else {
out += ' var ' + ($nextData) + ' = ' + ($passData) + '; ' + ($code) + ' ';
}
if ($breakOnError) {
out += ' if (!' + ($nextValid) + ') break; ';
}
out += ' } ';
if ($breakOnError) {
out += ' else ' + ($nextValid) + ' = true; ';
}
out += ' } ';
if ($breakOnError) {
out += ' if (' + ($nextValid) + ') { ';
$closingBraces += '}';
}
}
}
}
if (it.opts.v5) {
var arr6 = $pgPropertyKeys;
if (arr6) {
var $pgProperty, i6 = -1,
l6 = arr6.length - 1;
while (i6 < l6) {
$pgProperty = arr6[i6 += 1];
var $pgSchema = $pgProperties[$pgProperty],
$sch = $pgSchema.schema;
if (it.util.schemaHasRules($sch, it.RULES.all)) {
$it.schema = $sch;
$it.schemaPath = it.schemaPath + '.patternGroups' + it.util.getProperty($pgProperty) + '.schema';
$it.errSchemaPath = it.errSchemaPath + '/patternGroups/' + it.util.escapeFragment($pgProperty) + '/schema';
out += ' var pgPropCount' + ($lvl) + ' = 0; for (var ' + ($key) + ' in ' + ($data) + ') { ';
if ($ownProperties) {
out += ' if (!Object.prototype.hasOwnProperty.call(' + ($data) + ', ' + ($key) + ')) continue; ';
}
out += ' if (' + (it.usePattern($pgProperty)) + '.test(' + ($key) + ')) { pgPropCount' + ($lvl) + '++; ';
$it.errorPath = it.util.getPathExpr(it.errorPath, $key, it.opts.jsonPointers);
var $passData = $data + '[' + $key + ']';
$it.dataPathArr[$dataNxt] = $key;
var $code = it.validate($it);
$it.baseId = $currentBaseId;
if (it.util.varOccurences($code, $nextData) < 2) {
out += ' ' + (it.util.varReplace($code, $nextData, $passData)) + ' ';
} else {
out += ' var ' + ($nextData) + ' = ' + ($passData) + '; ' + ($code) + ' ';
}
if ($breakOnError) {
out += ' if (!' + ($nextValid) + ') break; ';
}
out += ' } ';
if ($breakOnError) {
out += ' else ' + ($nextValid) + ' = true; ';
}
out += ' } ';
if ($breakOnError) {
out += ' if (' + ($nextValid) + ') { ';
$closingBraces += '}';
}
var $pgMin = $pgSchema.minimum,
$pgMax = $pgSchema.maximum;
if ($pgMin !== undefined || $pgMax !== undefined) {
out += ' var ' + ($valid) + ' = true; ';
var $currErrSchemaPath = $errSchemaPath;
if ($pgMin !== undefined) {
var $limit = $pgMin,
$reason = 'minimum',
$moreOrLess = 'less';
out += ' ' + ($valid) + ' = pgPropCount' + ($lvl) + ' >= ' + ($pgMin) + '; ';
$errSchemaPath = it.errSchemaPath + '/patternGroups/minimum';
out += ' if (!' + ($valid) + ') { ';
var $$outStack = $$outStack || [];
$$outStack.push(out);
out = ''; /* istanbul ignore else */
if (it.createErrors !== false) {
out += ' { keyword: \'' + ('patternGroups') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { reason: \'' + ($reason) + '\', limit: ' + ($limit) + ', pattern: \'' + (it.util.escapeQuotes($pgProperty)) + '\' } ';
if (it.opts.messages !== false) {
out += ' , message: \'should NOT have ' + ($moreOrLess) + ' than ' + ($limit) + ' properties matching pattern "' + (it.util.escapeQuotes($pgProperty)) + '"\' ';
}
if (it.opts.verbose) {
out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
}
out += ' } ';
} else {
out += ' {} ';
}
var __err = out;
out = $$outStack.pop();
if (!it.compositeRule && $breakOnError) { /* istanbul ignore if */
if (it.async) {
out += ' throw new ValidationError([' + (__err) + ']); ';
} else {
out += ' validate.errors = [' + (__err) + ']; return false; ';
}
} else {
out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
}
out += ' } ';
if ($pgMax !== undefined) {
out += ' else ';
}
}
if ($pgMax !== undefined) {
var $limit = $pgMax,
$reason = 'maximum',
$moreOrLess = 'more';
out += ' ' + ($valid) + ' = pgPropCount' + ($lvl) + ' <= ' + ($pgMax) + '; ';
$errSchemaPath = it.errSchemaPath + '/patternGroups/maximum';
out += ' if (!' + ($valid) + ') { ';
var $$outStack = $$outStack || [];
$$outStack.push(out);
out = ''; /* istanbul ignore else */
if (it.createErrors !== false) {
out += ' { keyword: \'' + ('patternGroups') + '\' , dataPath: (dataPath || \'\') + ' + (it.errorPath) + ' , schemaPath: ' + (it.util.toQuotedString($errSchemaPath)) + ' , params: { reason: \'' + ($reason) + '\', limit: ' + ($limit) + ', pattern: \'' + (it.util.escapeQuotes($pgProperty)) + '\' } ';
if (it.opts.messages !== false) {
out += ' , message: \'should NOT have ' + ($moreOrLess) + ' than ' + ($limit) + ' properties matching pattern "' + (it.util.escapeQuotes($pgProperty)) + '"\' ';
}
if (it.opts.verbose) {
out += ' , schema: validate.schema' + ($schemaPath) + ' , parentSchema: validate.schema' + (it.schemaPath) + ' , data: ' + ($data) + ' ';
}
out += ' } ';
} else {
out += ' {} ';
}
var __err = out;
out = $$outStack.pop();
if (!it.compositeRule && $breakOnError) { /* istanbul ignore if */
if (it.async) {
out += ' throw new ValidationError([' + (__err) + ']); ';
} else {
out += ' validate.errors = [' + (__err) + ']; return false; ';
}
} else {
out += ' var err = ' + (__err) + '; if (vErrors === null) vErrors = [err]; else vErrors.push(err); errors++; ';
}
out += ' } ';
}
$errSchemaPath = $currErrSchemaPath;
if ($breakOnError) {
out += ' if (' + ($valid) + ') { ';
$closingBraces += '}';
}
}
}
}
}
}
if ($breakOnError) {
out += ' ' + ($closingBraces) + ' if (' + ($errs) + ' == errors) {';
}
out = it.util.cleanUpCode(out);
return out;
}
| {
"pile_set_name": "Github"
} |
package io.github.privacystreams.commons.string;
import io.github.privacystreams.utils.Assertions;
/**
* Check whether the string specified by a field contains a certain substring.
*/
final class StringContainOperator extends StringProcessor<Boolean> {
private final String searchString;
StringContainOperator(String stringField, String searchString) {
super(stringField);
this.searchString = Assertions.notNull("searchString", searchString);
this.addParameters(searchString);
}
@Override
protected Boolean processString(String stringValue) {
return stringValue != null && stringValue.contains(this.searchString);
}
}
| {
"pile_set_name": "Github"
} |
# This file is part of MLDB. Copyright 2015 mldb.ai inc. All rights reserved.
try:
mldb.create_dataset({
"type": "beh.mutable", "id": "x",
"params":{
"dataFileUrl": "relative/path/without/protocol.beh"
}}).commit() #should complain about missing protocol!
request.set_return("failure")
exit()
except:
pass
try:
mldb.create_dataset({
"type": "beh.mutable", "id": "y",
"params":{
"dataFileUrl": "/asbolute/path/without/protocol.beh"
}}).commit() #should complain about missing protocol!
request.set_return("failure")
exit()
except:
pass
request.set_return("success")
| {
"pile_set_name": "Github"
} |
<!-- We need jQuery at the top of the page -->
<script src="<?php echo \Idno\Core\Idno::site()->config()->getStaticURL() ?>vendor/npm-asset/jquery/dist/jquery.min.js"></script>
<!-- Le styles -->
<link
href="<?php echo \Idno\Core\Idno::site()->config()->getStaticURL() ?>vendor/twbs/bootstrap/dist/css/bootstrap.min.css"
rel="stylesheet"/>
<link
href="<?php echo \Idno\Core\Idno::site()->config()->getStaticURL() ?>vendor/twbs/bootstrap/dist/css/bootstrap-theme.min.css"/>
<script
src="<?php echo \Idno\Core\Idno::site()->config()->getStaticURL() ?>vendor/twbs/bootstrap/dist/js/bootstrap.min.js"></script>
<!-- Accessibility -->
<link rel="stylesheet"
href="<?php echo \Idno\Core\Idno::site()->config()->getStaticURL() ?>vendor/npm-asset/bootstrap-accessibility-plugin/plugins/css/bootstrap-accessibility.css">
<script
src="<?php echo \Idno\Core\Idno::site()->config()->getStaticURL() ?>vendor/npm-asset/bootstrap-accessibility-plugin/plugins/js/bootstrap-accessibility.min.js"></script>
<!-- Fonts -->
<link rel="stylesheet"
href="<?php echo \Idno\Core\Idno::site()->config()->getStaticURL() ?>vendor/forkawesome/fork-awesome/css/fork-awesome.min.css">
<style>
body {
padding-top: 100px; /* 60px to make the container go all the way to the bottom of the topbar */
}
</style>
| {
"pile_set_name": "Github"
} |
//
// blocking_udp_client.cpp
// ~~~~~~~~~~~~~~~~~~~~~~~
//
// Copyright (c) 2003-2017 Christopher M. Kohlhoff (chris at kohlhoff dot com)
//
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
//
#include "asio/deadline_timer.hpp"
#include "asio/io_context.hpp"
#include "asio/ip/udp.hpp"
#include <cstdlib>
#include <boost/bind.hpp>
#include <boost/date_time/posix_time/posix_time_types.hpp>
#include <iostream>
using asio::deadline_timer;
using asio::ip::udp;
//----------------------------------------------------------------------
//
// This class manages socket timeouts by applying the concept of a deadline.
// Each asynchronous operation is given a deadline by which it must complete.
// Deadlines are enforced by an "actor" that persists for the lifetime of the
// client object:
//
// +----------------+
// | |
// | check_deadline |<---+
// | | |
// +----------------+ | async_wait()
// | |
// +---------+
//
// If the actor determines that the deadline has expired, any outstanding
// socket operations are cancelled. The socket operations themselves are
// implemented as transient actors:
//
// +---------------+
// | |
// | receive |
// | |
// +---------------+
// |
// async_- | +----------------+
// receive() | | |
// +--->| handle_receive |
// | |
// +----------------+
//
// The client object runs the io_context to block thread execution until the
// actor completes.
//
class client
{
public:
client(const udp::endpoint& listen_endpoint)
: socket_(io_context_, listen_endpoint),
deadline_(io_context_)
{
// No deadline is required until the first socket operation is started. We
// set the deadline to positive infinity so that the actor takes no action
// until a specific deadline is set.
deadline_.expires_at(boost::posix_time::pos_infin);
// Start the persistent actor that checks for deadline expiry.
check_deadline();
}
std::size_t receive(const asio::mutable_buffer& buffer,
boost::posix_time::time_duration timeout, asio::error_code& ec)
{
// Set a deadline for the asynchronous operation.
deadline_.expires_from_now(timeout);
// Set up the variables that receive the result of the asynchronous
// operation. The error code is set to would_block to signal that the
// operation is incomplete. Asio guarantees that its asynchronous
// operations will never fail with would_block, so any other value in
// ec indicates completion.
ec = asio::error::would_block;
std::size_t length = 0;
// Start the asynchronous operation itself. The handle_receive function
// used as a callback will update the ec and length variables.
socket_.async_receive(asio::buffer(buffer),
boost::bind(&client::handle_receive, _1, _2, &ec, &length));
// Block until the asynchronous operation has completed.
do io_context_.run_one(); while (ec == asio::error::would_block);
return length;
}
private:
void check_deadline()
{
// Check whether the deadline has passed. We compare the deadline against
// the current time since a new asynchronous operation may have moved the
// deadline before this actor had a chance to run.
if (deadline_.expires_at() <= deadline_timer::traits_type::now())
{
// The deadline has passed. The outstanding asynchronous operation needs
// to be cancelled so that the blocked receive() function will return.
//
// Please note that cancel() has portability issues on some versions of
// Microsoft Windows, and it may be necessary to use close() instead.
// Consult the documentation for cancel() for further information.
socket_.cancel();
// There is no longer an active deadline. The expiry is set to positive
// infinity so that the actor takes no action until a new deadline is set.
deadline_.expires_at(boost::posix_time::pos_infin);
}
// Put the actor back to sleep.
deadline_.async_wait(boost::bind(&client::check_deadline, this));
}
static void handle_receive(
const asio::error_code& ec, std::size_t length,
asio::error_code* out_ec, std::size_t* out_length)
{
*out_ec = ec;
*out_length = length;
}
private:
asio::io_context io_context_;
udp::socket socket_;
deadline_timer deadline_;
};
//----------------------------------------------------------------------
int main(int argc, char* argv[])
{
try
{
using namespace std; // For atoi.
if (argc != 3)
{
std::cerr << "Usage: blocking_udp_timeout <listen_addr> <listen_port>\n";
return 1;
}
udp::endpoint listen_endpoint(
asio::ip::make_address(argv[1]),
std::atoi(argv[2]));
client c(listen_endpoint);
for (;;)
{
char data[1024];
asio::error_code ec;
std::size_t n = c.receive(asio::buffer(data),
boost::posix_time::seconds(10), ec);
if (ec)
{
std::cout << "Receive error: " << ec.message() << "\n";
}
else
{
std::cout << "Received: ";
std::cout.write(data, n);
std::cout << "\n";
}
}
}
catch (std::exception& e)
{
std::cerr << "Exception: " << e.what() << "\n";
}
return 0;
}
| {
"pile_set_name": "Github"
} |
/*
Copyright (c) 2014 <a href="http://www.gutgames.com">James Craig</a>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.*/
using System.Configuration;
using Utilities.ORM.Interfaces;
namespace Utilities.ORM.Manager.SourceProvider
{
/// <summary>
/// Default database object
/// </summary>
public class DefaultDatabase : IDatabase
{
/// <summary>
/// Should the database be auditted
/// </summary>
public bool Audit
{
get { return false; }
}
/// <summary>
/// The name of the connection string
/// </summary>
public string Name
{
get { return ConfigurationManager.ConnectionStrings[0].Name; }
}
/// <summary>
/// Order of the database (used when running commands to save/select objects)
/// </summary>
public int Order
{
get { return 0; }
}
/// <summary>
/// Is this readable?
/// </summary>
public bool Readable
{
get { return true; }
}
/// <summary>
/// Should we update the database
/// </summary>
public bool Update
{
get { return false; }
}
/// <summary>
/// Is this writable?
/// </summary>
public bool Writable
{
get { return true; }
}
}
} | {
"pile_set_name": "Github"
} |
// This file is part of Eigen, a lightweight C++ template library
// for linear algebra.
//
// Copyright (C) 2014 Benoit Steiner <[email protected]>
//
// This Source Code Form is subject to the terms of the Mozilla
// Public License v. 2.0. If a copy of the MPL was not distributed
// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
#ifndef EIGEN_CXX11_TENSOR_TENSOR_INITIALIZER_H
#define EIGEN_CXX11_TENSOR_TENSOR_INITIALIZER_H
#if EIGEN_HAS_VARIADIC_TEMPLATES
#include <initializer_list>
namespace Eigen {
/** \class TensorInitializer
* \ingroup CXX11_Tensor_Module
*
* \brief Helper template to initialize Tensors from std::initializer_lists.
*/
namespace internal {
template <typename Derived, int N>
struct Initializer {
typedef std::initializer_list<
typename Initializer<Derived, N - 1>::InitList> InitList;
static void run(TensorEvaluator<Derived, DefaultDevice>& tensor,
Eigen::array<typename traits<Derived>::Index, traits<Derived>::NumDimensions>* indices,
const InitList& vals) {
int i = 0;
for (auto v : vals) {
(*indices)[traits<Derived>::NumDimensions - N] = i++;
Initializer<Derived, N - 1>::run(tensor, indices, v);
}
}
};
template <typename Derived>
struct Initializer<Derived, 1> {
typedef std::initializer_list<typename traits<Derived>::Scalar> InitList;
static void run(TensorEvaluator<Derived, DefaultDevice>& tensor,
Eigen::array<typename traits<Derived>::Index, traits<Derived>::NumDimensions>* indices,
const InitList& vals) {
int i = 0;
// There is likely a faster way to do that than iterating.
for (auto v : vals) {
(*indices)[traits<Derived>::NumDimensions - 1] = i++;
tensor.coeffRef(*indices) = v;
}
}
};
template <typename Derived>
struct Initializer<Derived, 0> {
typedef typename traits<Derived>::Scalar InitList;
static void run(TensorEvaluator<Derived, DefaultDevice>& tensor,
Eigen::array<typename traits<Derived>::Index, traits<Derived>::NumDimensions>*,
const InitList& v) {
tensor.coeffRef(0) = v;
}
};
template <typename Derived, int N>
void initialize_tensor(TensorEvaluator<Derived, DefaultDevice>& tensor,
const typename Initializer<Derived, traits<Derived>::NumDimensions>::InitList& vals) {
Eigen::array<typename traits<Derived>::Index, traits<Derived>::NumDimensions> indices;
Initializer<Derived, traits<Derived>::NumDimensions>::run(tensor, &indices, vals);
}
} // namespace internal
} // namespace Eigen
#endif // EIGEN_HAS_VARIADIC_TEMPLATES
#endif // EIGEN_CXX11_TENSOR_TENSOR_INITIALIZER_H
| {
"pile_set_name": "Github"
} |
/*
* mish_priv_cmd.h
*
* Copyright (C) 2020 Michel Pollet <[email protected]>
*
* SPDX-License-Identifier: Apache-2.0
*/
#ifndef LIBMISH_SRC_MISH_PRIV_CMD_H_
#define LIBMISH_SRC_MISH_PRIV_CMD_H_
/*
* I decided that the command list would't be attached to the mish_t,
*
* In any other API I would, for consistency sake, but here I'm more
* interested in having a convenient way to add commands, regardless of the
* state of mish_t, and have macro that register them before main() is called
* and that sort of things.
*
* In the same vein, I also don't provide a way to remove a command, I don't
* think it's terribly necessary at the minute
*/
#include "bsd_queue.h"
typedef void (*mish_cmd_handler_p)(
void * param,
int argc,
const char *argv[]);
#define _LIBMISH_HAS_CMD_HANDLER_
int
mish_cmd_call(
const char * cmd_line,
void * c);
#endif /* LIBMISH_SRC_MISH_PRIV_CMD_H_ */
| {
"pile_set_name": "Github"
} |
version https://git-lfs.github.com/spec/v1
oid sha256:a743f0d9d443367c206c530202194fd773ac922701516efe4a23f736bcbf2f02
size 682
| {
"pile_set_name": "Github"
} |
(ns reagent.dom
(:require [cljsjs.react.dom]
[reagent.impl.util :as util]
[reagent.impl.template :as tmpl]
[reagent.impl.batching :as batch]
[reagent.ratom :as ratom]
[reagent.debug :refer-macros [dbg]]
[reagent.interop :refer-macros [$ $!]]))
(defonce ^:private imported nil)
(defn module []
(cond
(some? imported) imported
(exists? js/ReactDOM) (set! imported js/ReactDOM)
(exists? js/require) (or (set! imported (js/require "react-dom"))
(throw (js/Error. "require('react-dom') failed")))
:else
(throw (js/Error. "js/ReactDOM is missing"))))
(defonce ^:private roots (atom {}))
(defn- unmount-comp [container]
(swap! roots dissoc container)
($ (module) unmountComponentAtNode container))
(defn- render-comp [comp container callback]
(binding [util/*always-update* true]
(->> ($ (module) render (comp) container
(fn []
(binding [util/*always-update* false]
(swap! roots assoc container [comp container])
(batch/flush-after-render)
(if (some? callback)
(callback))))))))
(defn- re-render-component [comp container]
(render-comp comp container nil))
(defn render
"Render a Reagent component into the DOM. The first argument may be
either a vector (using Reagent's Hiccup syntax), or a React element. The second argument should be a DOM node.
Optionally takes a callback that is called when the component is in place.
Returns the mounted component instance."
([comp container]
(render comp container nil))
([comp container callback]
(ratom/flush!)
(let [f (fn []
(tmpl/as-element (if (fn? comp) (comp) comp)))]
(render-comp f container callback))))
(defn unmount-component-at-node [container]
(unmount-comp container))
(defn dom-node
"Returns the root DOM node of a mounted component."
[this]
($ (module) findDOMNode this))
(set! tmpl/find-dom-node dom-node)
(defn force-update-all
"Force re-rendering of all mounted Reagent components. This is
probably only useful in a development environment, when you want to
update components in response to some dynamic changes to code.
Note that force-update-all may not update root components. This
happens if a component 'foo' is mounted with `(render [foo])` (since
functions are passed by value, and not by reference, in
ClojureScript). To get around this you'll have to introduce a layer
of indirection, for example by using `(render [#'foo])` instead."
[]
(ratom/flush!)
(doseq [v (vals @roots)]
(apply re-render-component v))
"Updated")
| {
"pile_set_name": "Github"
} |
#!/usr/bin/perl
# (C) Maxim Dounin
# Tests for gunzip filter module.
###############################################################################
use warnings;
use strict;
use Test::More;
BEGIN { use FindBin; chdir($FindBin::Bin); }
use lib 'lib';
use Test::Nginx qw/ :DEFAULT :gzip /;
###############################################################################
select STDERR; $| = 1;
select STDOUT; $| = 1;
eval { require IO::Compress::Gzip; };
Test::More::plan(skip_all => "IO::Compress::Gzip not found") if $@;
my $t = Test::Nginx->new()->has(qw/http gunzip proxy gzip_static/)->plan(13);
$t->write_file_expand('nginx.conf', <<'EOF');
%%TEST_GLOBALS%%
daemon off;
events {
}
http {
%%TEST_GLOBALS_HTTP%%
server {
listen 127.0.0.1:8080;
server_name localhost;
location / {
gunzip on;
gzip_vary on;
proxy_pass http://127.0.0.1:8081/;
proxy_set_header Accept-Encoding gzip;
}
location /error {
error_page 500 /t1;
return 500;
}
}
server {
listen 127.0.0.1:8081;
server_name localhost;
location / {
default_type text/plain;
gzip_static on;
gzip_http_version 1.0;
gzip_types text/plain;
}
}
}
EOF
my $in = join('', map { sprintf "X%03dXXXXXX", $_ } (0 .. 99));
my $out;
IO::Compress::Gzip::gzip(\$in => \$out);
$t->write_file('t1.gz', $out);
$t->write_file('t2.gz', $out . $out);
$t->write_file('t3', 'not compressed');
my $emptyin = '';
my $emptyout;
IO::Compress::Gzip::gzip(\$emptyin => \$emptyout);
$t->write_file('empty.gz', $emptyout);
$t->run();
###############################################################################
pass('runs');
my $r = http_get('/t1');
unlike($r, qr/Content-Encoding/, 'no content encoding');
like($r, qr/^(X\d\d\dXXXXXX){100}$/m, 'correct gunzipped response');
$r = http_gzip_request('/t1');
like($r, qr/Content-Encoding: gzip/, 'gzip still works - encoding');
like($r, qr/\Q$out\E/, 'gzip still works - content');
like(http_get('/t2'), qr/^(X\d\d\dXXXXXX){200}$/m, 'multiple gzip members');
like(http_get('/error'), qr/^(X\d\d\dXXXXXX){100}$/m, 'errors gunzipped');
unlike(http_head('/t1'), qr/Content-Encoding/, 'head - no content encoding');
like(http_get('/t1'), qr/Vary/, 'get vary');
like(http_head('/t1'), qr/Vary/, 'head vary');
unlike(http_get('/t3'), qr/Vary/, 'no vary on non-gzipped get');
unlike(http_head('/t3'), qr/Vary/, 'no vary on non-gzipped head');
like(http_get('/empty'), qr/ 200 /, 'gunzip empty');
###############################################################################
| {
"pile_set_name": "Github"
} |
/*
Copyright (c) 2003-2011, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.html or http://ckeditor.com/license
*/
.cke_skin_kama
{
display: block;
}
/* Main editor only settings. */
span.cke_skin_kama
{
-moz-border-radius: 5px;
-webkit-border-radius: 5px;
border-radius: 5px;
border: 1px solid #D3D3D3;
padding: 5px;
}
.cke_skin_kama span.cke_browser_webkit,
.cke_skin_kama span.cke_browser_gecko18
{
display: block;
}
.cke_skin_kama .cke_wrapper
{
-moz-border-radius: 5px;
-webkit-border-radius: 5px;
border-radius: 5px;
background-color: #d3d3d3;
background-image: url(images/sprites.png);
background-repeat: repeat-x;
background-position: 0 -1950px;
display: block;
/* IE Quirks: editor chrome overflow horizontally without an explicit width. */
_display: inline-block;
padding: 5px;
/*background-color: Red;*/
}
.cke_shared .cke_skin_kama .cke_wrapper
{
padding-bottom: 0;
}
.cke_skin_kama .cke_browser_ie6 .cke_wrapper,
.cke_skin_kama .cke_browser_iequirks .cke_wrapper
{
background-image: none;
}
.cke_skin_kama .cke_editor
{
display: inline-table;
width: 100%;
}
.cke_skin_kama .cke_browser_webkit .cke_editor
{
display: table; /* #6684 */
}
.cke_skin_kama .ltr .cke_browser_ie iframe
{
margin-right: -10px;
}
.cke_skin_kama .rtl .cke_browser_ie iframe
{
margin-left: -10px;
}
.cke_skin_kama .cke_browser_opera .cke_editor.cke_skin_kama .cke_resizer
{
display: table;
}
.cke_skin_kama .cke_contents
{
margin: 5px;
}
.cke_skin_kama .cke_hc .cke_contents
{
border: 1px solid black;
}
.cke_skin_kama .cke_contents iframe
{
background-color: #fff;
}
.cke_skin_kama .cke_focus
{
outline: auto 5px -webkit-focus-ring-color;
}
.cke_skin_kama textarea.cke_source
{
font-family: 'Courier New' , Monospace;
font-size: small;
background-color: #fff;
white-space: pre;
}
.cke_skin_kama .cke_browser_iequirks textarea.cke_source
{
/* For IE6+Quirks only */
_white-space: normal;
}
.cke_skin_kama .cke_resizer
{
width: 12px;
height: 12px;
margin-top: 9px;
display: block;
float: right;
/* resizer.gif*/
background-image: url(images/sprites.png);
_background-image: url(images/sprites_ie6.png);
background-position: 0 -1428px;
background-repeat: no-repeat;
cursor: se-resize;
}
/* Adobe AIR doesn't support *-resize cursor shape. */
.cke_skin_kama .cke_browser_air .cke_resizer,
.cke_skin_kama .cke_browser_air .cke_rtl .cke_resizer
{
cursor: move;
}
.cke_skin_kama .cke_resizer_rtl
{
cursor: sw-resize;
/* resizer_rtl.gif*/
background-position: 0 -1455px;
float: left;
}
.cke_skin_kama .cke_resizer_horizontal,
.cke_skin_kama .cke_rtl .cke_resizer_horizontal
{
cursor: e-resize;
}
.cke_skin_kama .cke_resizer_vertical,
.cke_skin_kama .cke_rtl .cke_resizer_vertical
{
cursor: n-resize;
}
.cke_skin_kama .cke_maximized .cke_resizer
{
display: none;
}
.cke_skin_kama .cke_browser_ie6 .cke_contents textarea,
.cke_skin_kama .cke_browser_ie7 .cke_contents textarea
{
position: absolute;
}
.cke_skin_kama .cke_browser_ie.cke_browser_quirks .cke_contents iframe
{
position: absolute;
top: 0;
}
.cke_skin_kama .cke_browser_ie6 .cke_editor,
.cke_skin_kama .cke_browser_ie7 .cke_editor
{
display: inline-block;
}
.cke_skin_kama .cke_browser_ie6 .cke_editor,
.cke_shared .cke_skin_kama .cke_browser_ie7 .cke_wrapper
{
padding-bottom: 5px;
}
/* All voice labels are not displayed. */
.cke_skin_kama .cke_voice_label
{
display: none;
}
.cke_skin_kama legend.cke_voice_label
{
display: none;
}
.cke_skin_kama .cke_browser_ie legend.cke_voice_label
{
position: absolute;
display: block;
width: 0;
height: 0;
overflow: hidden;
}
| {
"pile_set_name": "Github"
} |
import rostest
import rospy
import numpy
import unittest
import sys
from tf import Transformer
import tf_conversions.posemath as pm
from geometry_msgs.msg import TransformStamped
from PyKDL import Frame
class TestPoseMath(unittest.TestCase):
def setUp(self):
pass
def test_fromTf(self):
transformer = Transformer(True, rospy.Duration(10.0))
m = TransformStamped()
m.header.frame_id = 'wim'
m.child_frame_id = 'james'
m.transform.translation.x = 2.71828183
m.transform.rotation.w = 1.0
transformer.setTransform(m)
b = pm.fromTf(transformer.lookupTransform('wim', 'james', rospy.Time(0)))
def test_roundtrip(self):
c = Frame()
d = pm.fromMsg(pm.toMsg(c))
self.assertEqual(repr(c), repr(d))
d = pm.fromMatrix(pm.toMatrix(c))
self.assertEqual(repr(c), repr(d))
d = pm.fromTf(pm.toTf(c))
self.assertEqual(repr(c), repr(d))
if __name__ == '__main__':
if len(sys.argv) == 1 or sys.argv[1].startswith('--gtest_output'):
rostest.unitrun('tf', 'directed', TestPoseMath)
else:
suite = unittest.TestSuite()
suite.addTest(TestPoseMath(sys.argv[1]))
unittest.TextTestRunner(verbosity=2).run(suite)
| {
"pile_set_name": "Github"
} |
<?php
/* vim: set expandtab tabstop=4 shiftwidth=4: */
/**
* Contains the Calendar_Factory class
*
* PHP versions 4 and 5
*
* LICENSE: Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE FREEBSD PROJECT OR CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* @category Date and Time
* @package Calendar
* @author Harry Fuecks <[email protected]>
* @author Lorenzo Alberton <[email protected]>
* @copyright 2003-2007 Harry Fuecks, Lorenzo Alberton
* @license http://www.debian.org/misc/bsd.license BSD License (3 Clause)
* @version CVS: $Id: Factory.php 246404 2007-11-18 21:46:43Z quipo $
* @link http://pear.php.net/package/Calendar
*/
/**
* Allows Calendar include path to be redefined
* @ignore
*/
if (!defined('CALENDAR_ROOT')) {
define('CALENDAR_ROOT', 'Calendar'.DIRECTORY_SEPARATOR);
}
/**
* Load Calendar base class
*/
require_once CALENDAR_ROOT.'Calendar.php';
/**
* Contains a factory method to return a Singleton instance of a class
* implementing the Calendar_Engine_Interface.<br>
* For Month objects, to control type of month returned, use CALENDAR_MONTH_STATE
* constact e.g.;
* <code>
* require_once 'Calendar/Factory.php';
* define ('CALENDAR_MONTH_STATE',CALENDAR_USE_MONTH_WEEKDAYS); // Use Calendar_Month_Weekdays
* // define ('CALENDAR_MONTH_STATE',CALENDAR_USE_MONTH_WEEKS); // Use Calendar_Month_Weeks
* // define ('CALENDAR_MONTH_STATE',CALENDAR_USE_MONTH); // Use Calendar_Month
* </code>
* It defaults to building Calendar_Month objects.<br>
* Use the constract CALENDAR_FIRST_DAY_OF_WEEK to control the first day of the week
* for Month or Week objects (e.g. 0 = Sunday, 6 = Saturday)
*
* @category Date and Time
* @package Calendar
* @author Harry Fuecks <[email protected]>
* @author Lorenzo Alberton <[email protected]>
* @copyright 2003-2007 Harry Fuecks, Lorenzo Alberton
* @license http://www.debian.org/misc/bsd.license BSD License (3 Clause)
* @link http://pear.php.net/package/Calendar
* @access protected
*/
class Calendar_Factory
{
/**
* Creates a calendar object given the type and units
*
* @param string $type class of calendar object to create
* @param int $y year
* @param int $m month
* @param int $d day
* @param int $h hour
* @param int $i minute
* @param int $s second
*
* @return object subclass of Calendar
* @access public
* @static
*/
function create($type, $y = 2000, $m = 1, $d = 1, $h = 0, $i = 0, $s = 0)
{
$firstDay = defined('CALENDAR_FIRST_DAY_OF_WEEK') ? CALENDAR_FIRST_DAY_OF_WEEK : 1;
switch ($type) {
case 'Day':
include_once CALENDAR_ROOT.'Day.php';
return new Calendar_Day($y, $m, $d);
case 'Month':
// Set default state for which month type to build
if (!defined('CALENDAR_MONTH_STATE')) {
define('CALENDAR_MONTH_STATE', CALENDAR_USE_MONTH);
}
switch (CALENDAR_MONTH_STATE) {
case CALENDAR_USE_MONTH_WEEKDAYS:
include_once CALENDAR_ROOT.'Month/Weekdays.php';
$class = 'Calendar_Month_Weekdays';
break;
case CALENDAR_USE_MONTH_WEEKS:
include_once CALENDAR_ROOT.'Month/Weeks.php';
$class = 'Calendar_Month_Weeks';
break;
case CALENDAR_USE_MONTH:
default:
include_once CALENDAR_ROOT.'Month.php';
$class = 'Calendar_Month';
break;
}
return new $class($y, $m, $firstDay);
case 'Week':
include_once CALENDAR_ROOT.'Week.php';
return new Calendar_Week($y, $m, $d, $firstDay);
case 'Hour':
include_once CALENDAR_ROOT.'Hour.php';
return new Calendar_Hour($y, $m, $d, $h);
case 'Minute':
include_once CALENDAR_ROOT.'Minute.php';
return new Calendar_Minute($y, $m, $d, $h, $i);
case 'Second':
include_once CALENDAR_ROOT.'Second.php';
return new Calendar_Second($y, $m, $d, $h, $i, $s);
case 'Year':
include_once CALENDAR_ROOT.'Year.php';
return new Calendar_Year($y);
default:
include_once 'PEAR.php';
PEAR::raiseError('Calendar_Factory::create() unrecognised type: '.$type,
null, PEAR_ERROR_TRIGGER, E_USER_NOTICE, 'Calendar_Factory::create()');
return false;
}
}
/**
* Creates an instance of a calendar object, given a type and timestamp
*
* @param string $type type of object to create
* @param mixed $stamp timestamp (depending on Calendar engine being used)
*
* @return object subclass of Calendar
* @access public
* @static
*/
function & createByTimestamp($type, $stamp)
{
$cE = & Calendar_Engine_Factory::getEngine();
$y = $cE->stampToYear($stamp);
$m = $cE->stampToMonth($stamp);
$d = $cE->stampToDay($stamp);
$h = $cE->stampToHour($stamp);
$i = $cE->stampToMinute($stamp);
$s = $cE->stampToSecond($stamp);
$cal = Calendar_Factory::create($type, $y, $m, $d, $h, $i, $s);
return $cal;
}
}
?> | {
"pile_set_name": "Github"
} |
/* ******************************************************************************
* Copyright (c) 2006-2012 XMind Ltd. and others.
*
* This file is a part of XMind 3. XMind releases 3 and
* above are dual-licensed under the Eclipse Public License (EPL),
* which is available at http://www.eclipse.org/legal/epl-v10.html
* and the GNU Lesser General Public License (LGPL),
* which is available at http://www.gnu.org/licenses/lgpl.html
* See https://www.xmind.net/license.html for details.
*
* Contributors:
* XMind Ltd. - initial API and implementation
*******************************************************************************/
package org.xmind.core.internal.dom;
import java.util.ArrayList;
import java.util.List;
import org.xmind.core.ISheet;
import org.xmind.core.ITopic;
import org.xmind.core.IWorkbook;
import org.xmind.core.internal.TopicPath;
/**
* @author briansun
*
*/
public class TopicPathImpl extends TopicPath {
private ITopic topic;
/**
*
*/
public TopicPathImpl(ITopic topic) {
this.topic = topic;
toList();
}
protected List<Object> createPathEntries() {
List<Object> entries = new ArrayList<Object>();
ITopic t = topic;
ITopic parent = t.getParent();
while (parent != null) {
entries.add(0, t);
t = parent;
parent = t.getParent();
}
entries.add(0, t);
if (t != null && t.isRoot()) {
ISheet sheet = t.getOwnedSheet();
if (sheet != null) {
entries.add(0, sheet);
IWorkbook workbook = sheet.getParent();
if (workbook != null) {
entries.add(0, workbook);
}
}
}
return entries;
}
} | {
"pile_set_name": "Github"
} |
/**
* Copyright (c) 2012, Ben Fortuna
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* o Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* o Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* o Neither the name of Ben Fortuna nor the names of any other contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.fortuna.ical4j.model.property;
import junit.framework.TestCase;
import net.fortuna.ical4j.data.CalendarBuilder;
import net.fortuna.ical4j.model.*;
import net.fortuna.ical4j.model.component.VEvent;
import net.fortuna.ical4j.model.component.VTimeZone;
import net.fortuna.ical4j.util.CompatibilityHints;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.text.ParseException;
import java.util.List;
/**
* $Id$
*
* Created on 10/12/2005
*
* Unit tests for the ExDate property.
* @author Ben Fortuna
*/
public class ExDateTest extends TestCase {
private static Logger LOG = LoggerFactory.getLogger(ExDateTest.class);
/* (non-Javadoc)
* @see junit.framework.TestCase#setUp()
*/
protected void setUp() throws Exception {
CompatibilityHints.setHintEnabled(CompatibilityHints.KEY_RELAXED_UNFOLDING, true);
CompatibilityHints.setHintEnabled(CompatibilityHints.KEY_RELAXED_PARSING, false);
}
/* (non-Javadoc)
* @see junit.framework.TestCase#tearDown()
*/
protected void tearDown() throws Exception {
CompatibilityHints.clearHintEnabled(CompatibilityHints.KEY_RELAXED_UNFOLDING);
CompatibilityHints.clearHintEnabled(CompatibilityHints.KEY_RELAXED_PARSING);
}
/**
* Ensure timezones are correctly parsed for this property.
* @throws Exception
*/
public void testTimeZones() throws Exception {
CalendarBuilder builder = new CalendarBuilder();
Calendar calendar = builder.build(getClass().getResourceAsStream("/samples/valid/EXDATE.ics"));
Component event = calendar.getComponent(Component.VEVENT);
List<ExDate> exdates = event.getProperties(Property.EXDATE);
for (ExDate exDate : exdates) {
assertNotNull("This EXDATE should have a timezone", exDate.getDates().getTimeZone());
}
}
public void testDstOnlyVTimeZones() throws Exception {
CalendarBuilder builder = new CalendarBuilder();
Calendar ical = builder.build(getClass().getResourceAsStream("/samples/valid/dst-only-vtimezone.ics"));
VTimeZone vTZ = (VTimeZone) ical.getComponent(VTimeZone.VTIMEZONE);
String id = vTZ.getTimeZoneId().getValue();
assertEquals("Europe/Berlin", id);
assertEquals(vTZ.getObservances().get(0), vTZ.getApplicableObservance(new Date("20180403")));
VEvent vEvent = (VEvent) ical.getComponent(VEvent.VEVENT);
DtStart start = vEvent.getStartDate();
assertEquals(vTZ, start.getTimeZone().getVTimeZone());
assertEquals(1522738800000L, start.getDate().getTime());
}
public void testShouldPreserveUtcTimezoneForExDate() throws Exception {
CalendarBuilder builder = new CalendarBuilder();
Calendar calendar = builder.build(getClass().getResourceAsStream("/samples/valid/EXDATE-IN-UTC.ics"));
Component event = calendar.getComponent(Component.VEVENT);
List<ExDate> exdates = event.getProperties(Property.EXDATE);
for (ExDate exDate : exdates) {
for (Date dateEx : exDate.getDates()) {
DateTime dateTimeEx = (DateTime) dateEx;
assertNotNull(dateTimeEx);
assertTrue("This exception date should be in UTC", dateTimeEx.isUtc());
}
}
}
/**
* Allow date values by default if relaxed parsing enabled.
*/
public void testRelaxedParsing() throws ParseException {
try {
new ExDate(new ParameterList(), "20080315");
fail("Should throw ParseException");
} catch (ParseException pe) {
LOG.trace("Caught exception: " + pe.getMessage());
}
CompatibilityHints.setHintEnabled(CompatibilityHints.KEY_RELAXED_PARSING, true);
new ExDate(new ParameterList(), "20080315");
}
}
| {
"pile_set_name": "Github"
} |
const { existsSync } = require('fs')
module.exports = function() {
if (!existsSync('_config.yml')) {
return false
}
return {
framework: 'jekyll',
frameworkPort: 4000,
command: 'bundle',
possibleArgsArrs: [['exec', 'jekyll', 'serve', '-w']],
dist: '_site',
}
}
| {
"pile_set_name": "Github"
} |
//==- DIAEnumSourceFiles.cpp - DIA Source File Enumerator impl ---*- C++ -*-==//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#include "llvm/DebugInfo/PDB/DIA/DIAEnumInjectedSources.h"
#include "llvm/DebugInfo/PDB/DIA/DIAInjectedSource.h"
#include "llvm/DebugInfo/PDB/PDBSymbol.h"
using namespace llvm;
using namespace llvm::pdb;
DIAEnumInjectedSources::DIAEnumInjectedSources(
CComPtr<IDiaEnumInjectedSources> DiaEnumerator)
: Enumerator(DiaEnumerator) {}
uint32_t DIAEnumInjectedSources::getChildCount() const {
LONG Count = 0;
return (S_OK == Enumerator->get_Count(&Count)) ? Count : 0;
}
std::unique_ptr<IPDBInjectedSource>
DIAEnumInjectedSources::getChildAtIndex(uint32_t Index) const {
CComPtr<IDiaInjectedSource> Item;
if (S_OK != Enumerator->Item(Index, &Item))
return nullptr;
return std::unique_ptr<IPDBInjectedSource>(new DIAInjectedSource(Item));
}
std::unique_ptr<IPDBInjectedSource> DIAEnumInjectedSources::getNext() {
CComPtr<IDiaInjectedSource> Item;
ULONG NumFetched = 0;
if (S_OK != Enumerator->Next(1, &Item, &NumFetched))
return nullptr;
return std::unique_ptr<IPDBInjectedSource>(new DIAInjectedSource(Item));
}
void DIAEnumInjectedSources::reset() { Enumerator->Reset(); }
| {
"pile_set_name": "Github"
} |
# KBD6x
A WKL Hot Swap Double USB C 60%
Keyboard Maintainer: [MechMerlin](https://github.com/mechmerlin)
Hardware Supported: KBD6x PCB
Hardware Availability: [KBDFans](https://kbdfans.cn/products/kbd6x-wkl-hot-swap-60-double-type-c-pcb)
Make example for this keyboard (after setting up your build environment):
make kbdfans/kbd6x:default
See the [build environment setup](https://docs.qmk.fm/#/getting_started_build_tools) and the [make instructions](https://docs.qmk.fm/#/getting_started_make_guide) for more information. Brand new to QMK? Start with our [Complete Newbs Guide](https://docs.qmk.fm/#/newbs).
| {
"pile_set_name": "Github"
} |
//
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by Steve Nygard.
//
#pragma mark -
//
// File: /Applications/Xcode-7GM.app/Contents/Developer/Platforms/WatchSimulator.platform/Developer/SDKs/WatchSimulator.sdk/System/Library/PrivateFrameworks/CoreCDP.framework/CoreCDP
// UUID: 132A51B4-8C05-3DDA-90A7-94975D7E3851
//
// Arch: i386
// Current version: 1.0.0
// Compatibility version: 1.0.0
// Source version: 19.0.0.0.0
//
//
// This file does not contain any Objective-C runtime information.
//
| {
"pile_set_name": "Github"
} |
<html xmlns:f="http://typo3.org/ns/TYPO3/CMS/Fluid/ViewHelpers" xmlns:formvh="http://typo3.org/ns/TYPO3/CMS/Form/ViewHelpers" data-namespace-typo3-fluid="true">
<formvh:renderRenderable renderable="{element}">
<f:render partial="Field/Field" arguments="{element: element}" contentAs="elementContent">
<div id="{element.uniqueIdentifier}" class="form-check-list">
<f:for each="{element.properties.options}" as="label" key="value" iteration="idIterator">
<div class="custom-control custom-radio">
<f:form.radio
property="{element.identifier}"
id="{element.uniqueIdentifier}-{idIterator.index}"
class="{element.properties.elementClassAttribute} form-check-input"
value="{value}"
errorClass="{element.properties.elementErrorClassAttribute}"
additionalAttributes="{formvh:translateElementProperty(element: element, property: 'fluidAdditionalAttributes')}"
/>
<label class="custom-control-label" for="{element.uniqueIdentifier}-{idIterator.index}">
<span>{formvh:translateElementProperty(element: element, property: '{0: \'options\', 1: value}')}</span>
</label>
</div>
</f:for>
</div>
</f:render>
</formvh:renderRenderable>
</html>
| {
"pile_set_name": "Github"
} |
# -*- mode: makefile -*-
#
# Copyright (c) 2012, Joyent, Inc. All rights reserved.
#
# Makefile.deps: Makefile for including common tools as dependencies
#
# NOTE: This makefile comes from the "eng" repo. It's designed to be dropped
# into other repos as-is without requiring any modifications. If you find
# yourself changing this file, you should instead update the original copy in
# eng.git and then update your repo to use the new version.
#
# This file is separate from Makefile.targ so that teams can choose
# independently whether to use the common targets in Makefile.targ and the
# common tools here.
#
#
# javascriptlint
#
JSL_EXEC ?= deps/javascriptlint/build/install/jsl
JSL ?= python2.6 $(JSL_EXEC)
$(JSL_EXEC): | deps/javascriptlint/.git
cd deps/javascriptlint && make install
#
# jsstyle
#
JSSTYLE_EXEC ?= deps/jsstyle/jsstyle
JSSTYLE ?= $(JSSTYLE_EXEC)
$(JSSTYLE_EXEC): | deps/jsstyle/.git
#
# restdown
#
RESTDOWN_EXEC ?= deps/restdown/bin/restdown
RESTDOWN ?= python2.6 $(RESTDOWN_EXEC)
$(RESTDOWN_EXEC): | deps/restdown/.git
| {
"pile_set_name": "Github"
} |
package com.deliveredtechnologies.rulebook.lang;
import com.deliveredtechnologies.rulebook.model.Auditor;
import com.deliveredtechnologies.rulebook.model.Rule;
import com.deliveredtechnologies.rulebook.model.RuleBook;
import com.deliveredtechnologies.rulebook.model.RuleBookAuditor;
import com.deliveredtechnologies.rulebook.model.rulechain.cor.CoRRuleBook;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.function.Consumer;
/**
* The initial builder used to build a RuleBook.
*/
public class RuleBookBuilder<T> implements TerminatingRuleBookBuilder<T> {
private static Logger LOGGER = LoggerFactory.getLogger(RuleBookBuilder.class);
private RuleBook<T> _ruleBook;
private Class<? extends RuleBook> _ruleBookClass;
private Class<?> _resultType = Object.class;
/**
* Returns a new RuleBookBuilder for the default RuleBook type.
* @return a new RuleBookBuilder
*/
public static RuleBookBuilder<Object> create() {
return new RuleBookBuilder<Object>(CoRRuleBook.class);
}
/**
* Returns a new RuleBookBuilder using the supplied RuleBook class.
* @param ruleBookClass a RuleBook class to be used in the builder
* @return a new RuleBookBuilder
*/
public static RuleBookBuilder<Object> create(Class<? extends RuleBook> ruleBookClass) {
return new RuleBookBuilder<Object>(ruleBookClass);
}
private RuleBookBuilder(Class<? extends RuleBook> ruleBookClass) {
_ruleBookClass = ruleBookClass;
}
@SuppressWarnings("unchecked")
private RuleBookBuilder(RuleBookBuilder ruleBookBuilder) {
_resultType = ruleBookBuilder._resultType;
_ruleBookClass = ruleBookBuilder._ruleBookClass;
newRuleBook();
}
/**
* Decorates the RuleBook with {@link Auditor} functionality for rules auditing.
* @return a builder that can add rules
*/
public RuleBookAddRuleBuilder<T> asAuditor() {
return new RuleBookAddRuleBuilder<>(new RuleBookAuditor<>(newRuleBook()));
}
/**
* Specifies the Result type for the RuleBook.
* @param resultType result class
* @param <U> type of the result class
* @return a builder with the new Result type
*/
public <U> RuleBookWithResultTypeBuilder<U> withResultType(Class<U> resultType) {
_resultType = resultType;
return new RuleBookWithResultTypeBuilder<U>((new RuleBookBuilder<U>(this)).newRuleBook());
}
/**
* Adds a rule to the RuleBook.
* @param consumer functional interface that supplies a RuleBookRuleBuilder for building a Rule
* @return a builder with the added Rule
*/
public RuleBookAddRuleBuilder<T> addRule(Consumer<RuleBookRuleBuilder<T>> consumer) {
return new RuleBookAddRuleBuilder<>(newRuleBook(), consumer);
}
/**
* Adds a rule to the RuleBook.
* @param rule Rule to be added into the RuleBook
* @param <U> the fact type of the Rule
* @return RuleBookBuilder with the added Rule
*/
public <U> RuleBookAddRuleBuilder<T> addRule(Rule<U, T> rule) {
return new RuleBookAddRuleBuilder<>(newRuleBook(), rule);
}
/**
* Builds the RuleBook.
* @return a RuleBook
*/
@Override
public RuleBook<T> build() {
return (new RuleBookBuilder<T>(_ruleBookClass)).newRuleBook();
}
private RuleBook<T> newRuleBook() {
if (_ruleBook == null) {
try {
_ruleBook = _ruleBookClass.newInstance();
} catch (IllegalAccessException | InstantiationException e) {
try {
Constructor<?> constructor = _ruleBookClass.getConstructor(Class.class);
_ruleBook = (RuleBook<T>) constructor.newInstance(_resultType);
} catch (InvocationTargetException
| NoSuchMethodException
| InstantiationException
| IllegalAccessException ex) {
throw new IllegalStateException("RuleBook of class " + _ruleBookClass + " can not be instantiated", ex);
}
}
}
return _ruleBook;
}
}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<HTML
><HEAD
><TITLE
>PyGopherd Manual</TITLE
><META
NAME="GENERATOR"
CONTENT="Modular DocBook HTML Stylesheet Version 1.7"><STYLE
TYPE="text/css"
>.synopsis, .classsynopsis {
background: #eeeeee;
border: solid 1px #aaaaaa;
padding: 0.5em;
}
.programlisting {
background: #eeeeff;
border: solid 1px #aaaaff;
padding: 0.5em;
}
.variablelist {
padding: 4px;
margin-left: 3em;
}
.navigation {
background: #ffeeee;
border: solid 1px #ffaaaa;
margin-top: 0.5em;
margin-bottom: 0.5em;
}
.navigation a {
color: #770000;
}
.navigation a:visited {
color: #550000;
}
.navigation .title {
font-size: 200%;
}</STYLE
></HEAD
><BODY
CLASS="REFERENCE"
BGCOLOR="#FFFFFF"
TEXT="#000000"
LINK="#0000FF"
VLINK="#840084"
ALINK="#0000FF"
><DIV
CLASS="REFERENCE"
><A
NAME="PYGOPHERD"
></A
><DIV
CLASS="TITLEPAGE"
><H1
CLASS="TITLE"
>I. PyGopherd Manual</H1
><DIV
CLASS="TOC"
><DL
><DT
><B
>Table of Contents</B
></DT
><DT
><A
HREF="pygopherd.html#AEN3"
>pygopherd</A
> -- Multiprotocol Information Server</DT
></DL
></DIV
></DIV
><H1
><A
NAME="AEN3"
></A
>pygopherd</H1
><DIV
CLASS="REFNAMEDIV"
><A
NAME="AEN15"
></A
><H2
>Name</H2
>PyGopherd -- Multiprotocol Information Server</DIV
><DIV
CLASS="REFSYNOPSISDIV"
><A
NAME="AEN18"
></A
><H2
>Synopsis</H2
><P
><B
CLASS="COMMAND"
>pygopherd</B
> [<VAR
CLASS="REPLACEABLE"
>configfile</VAR
>]</P
></DIV
><DIV
CLASS="REFSECT1"
><A
NAME="DESCRIPTION"
></A
><H2
>Description</H2
><P
> Welcome to <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
>. In a nutshell, <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
>
is a modern dynamic
multi-protocol hierarchical information server with a pluggable
modularized extension system,
full flexible caching, virtual files and
folders, and autodetection of file types -- all with support for
standardized yet extensible per-document metadata. Whew! Read on for
information on this what all these buzzwords mean.
</P
><DIV
CLASS="REFSECT2"
><A
NAME="DESCRIPTION.FEATURES"
></A
><H3
>Features</H3
><P
> Here are some of <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
>'s features:
</P
><P
></P
><UL
><LI
><P
>Provides built-in support for multiple protocols:
HTTP (Web), Gopher+, Gopher (RFC1436), Enhanced Gopher0,
and WAP (mobile phones). Protocols can be enabled or
disabled as desired.
</P
></LI
><LI
><P
>Provides protocol autodetection. That is,
<SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
> can listen for all the above protocols
<SPAN
CLASS="emphasis"
><I
CLASS="EMPHASIS"
>on a single port</I
></SPAN
> and will
automatically respond using the protocol it detects the
client is using. Practical effects of this are that you
can, for instance, give out a single URL and have it
viewable normally on desktop Web browsers and in WAP
mode on mobile phones -- and appropriately in various
Gopher browsers.
</P
></LI
><LI
><P
>Metadata and site links can be entered in a
variety of formats, including full UMN dotfile metadata
formats as well as Bucktooth gophermap files. Moreover,
gophermap files are not limited to Gopher protocols, and
can be used for all protocols.
</P
></LI
><LI
><P
>Support for inter-protocol linking (linking
from Gopher sites to web sites)</P
></LI
><LI
><P
>Virtual folder system lets you serve up
anything as if it were regular files and directories.
PyGopherd comes with the following virtual folder systems
built in:</P
><P
></P
><UL
><LI
><P
>Can present any Unix MBOX, MMDF box, MH
directory, Maildir directory, or Babyl mailbox as a
virtual folder, the contents of which are the
messages in the mailbox.
</P
></LI
><LI
><P
>Can use a configurable separator to
split a file into multiple parts, the first line of each
becoming the name for the virtual folder.</P
></LI
><LI
><P
>Can peek inside a ZIP file and serve it
up as first-class site citizens -- metadata can even be
stored in the ZIP files.
</P
></LI
><LI
><P
>Can serve up the contents of a dictd
server as a filesystem.
</P
></LI
></UL
></LI
><LI
><P
> Modular, extensible design: you can use PyGopherd's own
PYG extension format, or UMN- or Bucktooth-style
executables.
</P
></LI
><LI
><P
> Runs on any platform supported by Python 2.2 or 2.3.
This includes virtually every past and current flavor of
Unix (Linux, *BSD, Solaris, SunOS), Windows, MacOS 9.x
and X, and more. Some features may not be available on
non-Unix platforms.
</P
></LI
><LI
><P
>Runs on any platform supported by Java 1.1
via the Jython Python implementation.</P
></LI
><LI
><P
>Tunable server types via configuration
directive -- forking or threading.</P
></LI
><LI
><P
>Secure design with support for chrooted execution.</P
></LI
><LI
><P
>Feature-complete, full implementations of:
Gopher0 (RFC1435), Gopher+, HTTP, and WAP.</P
></LI
><LI
><P
>Support for automatically finding the titles
of HTML documents for presentation in a directory.</P
></LI
><LI
><P
>Versatile configuration file format is both
extensible and nicely complementary of the module system.</P
></LI
><LI
><P
>Protocol-independant, handler-dependant
caching. This increases performance by letting handlers
cache dynamically-generated information -- currently used by
the directory handlers. This can improve performance of
directories by several orders of magnitude. Because this is
a handler cache only, all protococls share the single
cache. Since the processing time for the protocols is
negligable, this works out very well.</P
></LI
><LI
><P
>Autosensing of MIME types and gopher0 item
types. Both are completely configurable. MIME type
detection is done using a standard mime.types file, and
gopher0 types are calculated by using a configurable
regexp-based MIME-to-gophertype map.</P
></LI
><LI
><P
>Heavy support of regular expressions in configuration.</P
></LI
><LI
><P
>ProtocolMultiplexer and HandlerMultiplexer
let you choose only those protocols and handlers that you
wish your server to support and the order in which they are
tried when a request comes in.
</P
></LI
><LI
><P
>Full logging via syslog.</P
></LI
></UL
></DIV
><HR><DIV
CLASS="REFSECT2"
><A
NAME="DESCRIPTION.ABOUTGOPHER"
></A
><H3
>About Gopher</H3
><P
> <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
> started life as a server for the Gopher Internet
protocol. With Gopher, you can mount a filesystem (viewing files and
folders as if they were local),
browse Gopherspace with a web browser,
download files, and be interactive with searching.
</P
><P
> But this is only part of the story. The world of Gopher is more
expansive than this. There are two major gopher protocols: Gopher0
(also known as RFC1436) and Gopher+. Gopher0 is a small, simple,
lightweight protocol that is very functional yet also extremely easy
to implement. Gopher0 clients can be easily places in small embedded
devices or in massive environments like a modern web browser.
</P
><P
> Gopher+ is based on Gopher0 but extends it by providing document
metadata such as file size and MIME type. Gopher+ allows all sorts of
neat features, such as configurable metadata (serving up a bunch of
photos? Add a Subject field to your metadata to let
a customized photo
browser display who is pictured) and multiple
views of a file (let the
user select to view your photos as PNG or JPEG).
</P
></DIV
></DIV
><DIV
CLASS="REFSECT1"
><A
NAME="QUICKSTART"
></A
><H2
>Quick Start</H2
><P
> If you have already installed <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
> system-wide, or your
administrator has done that for you, your task for setting up
<SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
> for the first time is quite simple. You just need
to set up your configuration file, make your folder directory,
and run it!
</P
><P
> You can quickly set up your configuration file. The
distribution includes two files of interest:
<TT
CLASS="FILENAME"
>conf/pygopherd.conf</TT
> and
<TT
CLASS="FILENAME"
>conf/mime.types</TT
>. Debian users will find
the configuration file pre-installed in
<TT
CLASS="FILENAME"
>/etc/pygopherd/pygopherd.conf</TT
> and the
<TT
CLASS="FILENAME"
>mime.types</TT
> file provided by the system
already.
</P
><P
> Open up <TT
CLASS="FILENAME"
>pygopherd.conf</TT
> in your editor and
adjust to suit. The file is heavily commented and you can
refer to it for detailed information. Some settings to take a
look at include: <SPAN
CLASS="PROPERTY"
>detach</SPAN
>,
<SPAN
CLASS="PROPERTY"
>pidfile</SPAN
>, <SPAN
CLASS="PROPERTY"
>port</SPAN
>,
<SPAN
CLASS="PROPERTY"
>usechroot</SPAN
>, <SPAN
CLASS="PROPERTY"
>setuid</SPAN
>,
<SPAN
CLASS="PROPERTY"
>setgid</SPAN
>, and <SPAN
CLASS="PROPERTY"
>root</SPAN
>.
These may or may not work at their defaults for you. The
remaining ones should be fine for a basic setup.
</P
><P
> Invoke <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
> with <B
CLASS="COMMAND"
>pygopherd
path/to/configfile</B
> (or
<B
CLASS="COMMAND"
>/etc/init.d/pygopherd start</B
> on Debian).
Place some files in the location specified by the
<SPAN
CLASS="PROPERTY"
>root</SPAN
> directive in the config file and
you're ready to run!
</P
></DIV
><DIV
CLASS="REFSECT1"
><A
NAME="INSTALLATION"
></A
><H2
>Installation</H2
><P
> If you are reading this document via the "man" command, it is likely
that you have no installation tasks to perform; your system administra-
tor has already installed <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
>. If you need to install it yourself, you
have three options: a system-wide installation with Debian, system-wide
installation with other systems, and a single-user installation. You
can download the latest version of PyGopherd
from
<A
HREF="http://quux.org/devel/gopher/pygopherd/"
TARGET="_top"
>http://quux.org/devel/gopher/pygopherd/</A
>
</P
><DIV
CLASS="REFSECT2"
><A
NAME="INSTALLATION.DEBIAN"
></A
><H3
>Debian System-Wide Installation</H3
><P
> If you are tracking Debian unstable, you may install
<SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
> by simply running this command as root:
</P
><P
> <B
CLASS="COMMAND"
>apt-get install pygopherd</B
>
</P
><P
> If you are not tracking Debian unstable, download the .deb
package from the <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
> website and then run
<B
CLASS="COMMAND"
>dpkg -i</B
> to install the downloaded
package. Then, skip to the configuration section below.
You will use <B
CLASS="COMMAND"
>/etc/init.d/pygopherd start</B
>
to start the program.
</P
></DIV
><HR><DIV
CLASS="REFSECT2"
><A
NAME="INSTALLATION.OTHER"
></A
><H3
>Other System-Wide Installation</H3
><P
> Download the tar.gz version of the package from the website. Make
sure you have Python 2.2 or above installed; if now, download and
install it from <A
HREF="http://www.python.org/"
TARGET="_top"
>http://www.python.org/</A
>. Then run these
commands:
</P
><PRE
CLASS="PROGRAMLISTING"
> <B
CLASS="COMMAND"
>tar -zxvf pygopherd-x.y.z.tar.gz</B
>
<B
CLASS="COMMAND"
>cd pygopherd-x.y.z</B
>
<B
CLASS="COMMAND"
>python2.2 setup.py</B
>
</PRE
><P
> Some systems will use <B
CLASS="COMMAND"
>python</B
> or
<B
CLASS="COMMAND"
>python2.3</B
> in place of
<B
CLASS="COMMAND"
>python2.2</B
>.
</P
><P
> Next, proceed to configuration. Make sure that the
<TT
CLASS="FILENAME"
>/etc/pygopherd/pygopherd.conf</TT
> file
names valid users (<SPAN
CLASS="PROPERTY"
>setuid</SPAN
> and
<SPAN
CLASS="PROPERTY"
>setgid</SPAN
> options) and a valid document
root (<SPAN
CLASS="PROPERTY"
>root</SPAN
> option).
</P
><P
> You will type <TT
CLASS="FILENAME"
>pygopherd</TT
> to invoke the
program.
</P
></DIV
><HR><DIV
CLASS="REFSECT2"
><A
NAME="INSTALLATION.SINGLE"
></A
><H3
>Single-Account Installation</H3
><P
> Download the tar.gz version of the package from the website. Make
sure you have Python 2.2 or above installed; if now, download and
install it from <A
HREF="http://www.python.org/"
TARGET="_top"
>http://www.python.org/</A
>. Then run these
commands:
</P
><PRE
CLASS="PROGRAMLISTING"
> <B
CLASS="COMMAND"
>tar -zxvf pygopherd-z.y.z.tar.gz</B
>
<B
CLASS="COMMAND"
>cd pygopherd-x.y.z</B
>
</PRE
><P
> Modify <TT
CLASS="FILENAME"
>conf/pygopherd.conf</TT
> as follows:
</P
><P
></P
><UL
><LI
><P
>Set <SPAN
CLASS="PROPERTY"
>usechroot = no</SPAN
></P
></LI
><LI
><P
>Comment out (add a # sign to the start of
the line) the <SPAN
CLASS="PROPERTY"
>pidfile</SPAN
>,
<SPAN
CLASS="PROPERTY"
>setuid</SPAN
>, and
<SPAN
CLASS="PROPERTY"
>setgid</SPAN
> lines.</P
></LI
><LI
><P
>Set <SPAN
CLASS="PROPERTY"
>root</SPAN
> to osomething appropriate.</P
></LI
><LI
><P
>Set <SPAN
CLASS="PROPERTY"
>port</SPAN
> to a number
greater than 1024.</P
></LI
></UL
><P
> When you want to run <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
>, you will issue the
<B
CLASS="COMMAND"
>cd</B
> command as above and then type
<B
CLASS="COMMAND"
>PYTHONPATH=. bin/pygopherd</B
>. There is no
installation step necessary.
</P
></DIV
></DIV
><DIV
CLASS="REFSECT1"
><A
NAME="CONFIGURATION"
></A
><H2
>Configuration</H2
><P
> <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
> is regulated by a configuratoin file normally
stored in <TT
CLASS="FILENAME"
>/etc/pygopherd/pygopherd.conf</TT
>.
You can specify an alternate configuration file on the command
line. The <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
> distribution ships
with a sample <TT
CLASS="FILENAME"
>pygopherd.conf</TT
> file that
thoroughly documents the configuration file options and
settings.
</P
></DIV
><DIV
CLASS="REFSECT1"
><A
NAME="OPTIONS"
></A
><H2
>Options</H2
><P
> All <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
> configuratoin is done via the configuration
file. Therefore, the program has only one command-line
option:
</P
><P
></P
><TABLE
CLASS="variablelist"
BORDER="0"
CELLSPACING="0"
CELLPADDING="4"
><TBODY
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN184"><SPAN
STYLE="white-space: nowrap"
><VAR
CLASS="REPLACEABLE"
>configfile</VAR
></SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>This option argument specifies the location
of the configuration file that <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
> is to use.</P
></TD
></TR
></TBODY
></TABLE
></DIV
><DIV
CLASS="REFSECT1"
><A
NAME="HANDLERS"
></A
><H2
>Handlers</H2
><P
> <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
> defines several handlers which are responsible for
finding data on your server and presenting it to the user. The
handlers are used to generate things like links to other documents and
directory listings. They are also responsible for serving up regular
files and even virtual folders.
</P
><P
> Handlers are specified with the <SPAN
CLASS="PROPERTY"
>handlers</SPAN
>
option in <TT
CLASS="FILENAME"
>pygopherd.conf</TT
>. This option is
a list of handlers to use. For each request that arrives,
<SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
> will ask each handler in
turn whether or not it can handle the request, and will handle the
request according to the first handler that is capable of doing so.
If no handlers can handle the request, a file not found error is
generated. See the example configuration file for an example.
</P
><P
> The remaining parts of this section describe the different
handlers that ship with <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
>. Please note that some
versions of this manual may show the handlers in all caps;
however, their names are not all caps and are case-sensitive.
</P
><DIV
CLASS="REFSECT2"
><A
NAME="HANDLERS.DIRHANDLER"
></A
><H3
>dir.DirHandler</H3
><P
> This handler is a basic one that generates menus based
on the contents of a directory. It is used for
directories that contain neither a
<TT
CLASS="FILENAME"
>gophermap</TT
> file nor UMN-style links
files, or situations where you have no need for either
of those.
</P
><P
> This handler simply reads the contents of your on-disk
directory, determines the appropriate types of each file,
and sends the result to the client. The descriptions of
each item are usually set to the filename, but the
<SPAN
CLASS="PROPERTY"
>html.HTMLFileTitleHandler</SPAN
> may override
that.
</P
></DIV
><HR><DIV
CLASS="REFSECT2"
><A
NAME="HANDLERS.BUCKGOPHERMAP"
></A
><H3
>gophermap.BuckGophermapHandler</H3
><P
> This handler is used to generate directory listings
based on <TT
CLASS="FILENAME"
>gophermap</TT
> files. It will
not read the directory on-disk, instead serving content
from the <TT
CLASS="FILENAME"
>gophermap</TT
> file only.
Gophermaps are useful if you want to present a directory
in which the files do not frequently change and there is
general information to present. Overall, if you only
wish to present information particular to certain files,
you should consider using the abstract feature of
UMN.UMNDirHandler.
</P
><P
> The <TT
CLASS="FILENAME"
>gophermap</TT
> files contain two
types of lines, which are described here using the same
convention normally used for command line arguments. In
this section, the symbol \t will be used to indicate a
tab character, Control-I.
</P
><P
> <VAR
CLASS="REPLACEABLE"
>full line of informational
text</VAR
> </P
><P
> <VAR
CLASS="REPLACEABLE"
>gophertypeDESCRIPTION</VAR
> [\t<VAR
CLASS="REPLACEABLE"
>selector</VAR
> [\t<VAR
CLASS="REPLACEABLE"
>host</VAR
> [\t<VAR
CLASS="REPLACEABLE"
>port</VAR
>]]]</P
><P
> Note: spaces shown above are for clarity only and should
not actually be present in your file.
</P
><P
> The informational text must not contain any tab
characters, but may contain spaces. Informational text
will be rendered with gopher type
<SPAN
CLASS="PROPERTY"
>i</SPAN
>, which will cause it to be
displayed on a client's screen at its particular
position in the file.
</P
><P
> The second type of line represents a link to a file or
directory. It begins with a single-character Gopher
type (see Gopher Item Types below) followed immediately
by a description and a tab character. There is no space
or other separator between the gopher type and the
description. The description may contain spaces but not
tabs.
</P
><P
> The remaining arguments are optional, but only to the
extent that arguments may be omitted only if all
arguments after them are also omitted. These arguments
are:
</P
><P
></P
><TABLE
CLASS="variablelist"
BORDER="0"
CELLSPACING="0"
CELLPADDING="4"
><TBODY
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN231"><SPAN
STYLE="white-space: nowrap"
><VAR
CLASS="REPLACEABLE"
>selector</VAR
></SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>The <SPAN
CLASS="PROPERTY"
>selector</SPAN
> is
the name of the file on the server. If it begins
with a slash, it is an absolute path; otherwise,
it is interpreted relative to the directory that
the gophermap file is in. If no selector is
specified, the description is also used as the
selector.
</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN237"><SPAN
STYLE="white-space: nowrap"
><VAR
CLASS="REPLACEABLE"
>host</VAR
></SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>The <SPAN
CLASS="PROPERTY"
>host</SPAN
>
specifies the host on which this resource is
located. If not specified, defaults to the
current server.
</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN243"><SPAN
STYLE="white-space: nowrap"
><VAR
CLASS="REPLACEABLE"
>port</VAR
></SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>The <SPAN
CLASS="PROPERTY"
>port</SPAN
>
specifies the port on which the resource is
located. If not specified, defaults to the port
the current server is listening on.
</P
></TD
></TR
></TBODY
></TABLE
><P
> An example of a gophermap to help illustrate the concept
is included with the <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
> distribution in the
file <TT
CLASS="FILENAME"
>examples/gophermap</TT
>.
</P
></DIV
><HR><DIV
CLASS="REFSECT2"
><A
NAME="HANDLERS.COMPRESSEDFILEHANDLER"
></A
><H3
>file.CompressedFileHandler</H3
><P
> In order to save space, you might want to store
documents on-disk in a compressed format. But then
clients would ordinarily have to decompress the files
themselves. It would be nice to have the server
automatically decompress the files on the fly, sending
that result to the client. That's where
<SPAN
CLASS="PROPERTY"
>file.CompressedFileHandler</SPAN
> comes
in.
</P
><P
> This handler will take compressed files, pipe them
through your chosen decompression program, and send the
result directly to clients -- completely transparently.
</P
><P
> To use this handler, set the
<SPAN
CLASS="PROPERTY"
>decompressors</SPAN
> option in the
configuration file. This option defines a mapping from
MIME encodings (as defined with the
<SPAN
CLASS="PROPERTY"
>encoding</SPAN
> option) to decompression
programs. Files that are not encoded, or which have an
encoding that does not occur in the
<SPAN
CLASS="PROPERTY"
>decompressors</SPAN
> map, will not be
decompressed by this handler.
</P
><P
> Please see the sample configuration file for more
examples and details about the configuration of this
handler.
</P
></DIV
><HR><DIV
CLASS="REFSECT2"
><A
NAME="HANDLERS.FILEHANDLER"
></A
><H3
>file.FileHandler</H3
><P
> The <SPAN
CLASS="PROPERTY"
>file.FileHandler</SPAN
> is just that
-- its duty is to serve up regular files to clients.
</P
></DIV
><HR><DIV
CLASS="REFSECT2"
><A
NAME="HANDLERS.HTMLFILETITLEHANDLER"
></A
><H3
>html.HTMLFileTitleHandler</H3
><P
> This handler is used when generating directories and
will set the description of HTML files to the HTML title
defined in them rather than let it be the default
filename. Other than that, it has no effect. UMN
gopherd implements a similar policy.
</P
></DIV
><HR><DIV
CLASS="REFSECT2"
><A
NAME="HANDLERS.MAILBOXES"
></A
><H3
>mbox handlers</H3
><P
> There are four mailbox handlers:
</P
><P
></P
><UL
><LI
><P
>mbox.MaildirFolderHandler</P
></LI
><LI
><P
>mbox.MaildirMessageHandler</P
></LI
><LI
><P
>mbox.MBoxMessageHandler</P
></LI
><LI
><P
>mbox.MBoxFolderHandler</P
></LI
></UL
><P
> These four handlers provide a unique "virtual folder"
service. They allow you to present mailboxes as if they
were folders, the items of the folders being the
messages in the mailbox, organized by subject. This is
useful for presenting mail archives or just making
e-mail accessible in a nice and easy fashion.
</P
><P
> To use these handlers, all you have to do is enable them
in your <SPAN
CLASS="PROPERTY"
>handlers</SPAN
> section. They
will automatically detect requests for mailboxes and
handle them appropriately.
</P
><P
> The different handlers are for traditional Unix mbox
mailboxes (all messages in a single file) and new
qmail-stype Maildir mailboxes. You can enable only the
two handlers for the specific mailbox type that you use,
if desired.
</P
></DIV
><HR><DIV
CLASS="REFSECT2"
><A
NAME="HANDLERS.PYGHANDLER"
></A
><H3
>pyg.PYGHandler</H3
><P
> PYG (short for PYGopherd) is a mechanism that provides a
tremendous amount of flexibility. Rather than just
letting you execute a script like other Gopher or HTTP
servers, PYGs are actually loaded up into PyGopherd and
become fully-capable first-class virtual handlers. Yet
they need not be known ahead of time, and are loaded
dynamically.
</P
><P
> With a PYG handler, you can generate gopher directories,
handle searches, generate files, and more on the fly.
You can create entire virtual directory trees (for
instance, to interface with NNTP servers or with DICT
servers), and access them all using the standard Gopher
protocol. All of this without having to modify even one
line of <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
> code.
</P
><P
> If enabled, the <SPAN
CLASS="PROPERTY"
>pyg.PYGHandler</SPAN
> will
look for files with the extension .pyg that are marked
executable. If found, they will be loaded and run as
PYGs.
</P
><P
> Please note: this module provides the capability to
execute arbitrary code. Please consider the security
ramifications of that before enabling it.
</P
><P
> See the <SPAN
CLASS="PROPERTY"
>virtual.Virtual</SPAN
> handler for
more information about passing data to your scripts at
runtime.
</P
><P
> At present, documentation on writing PYGs is not
provides, but you may find examples in the
<SPAN
CLASS="PROPERTY"
>pygfarm</SPAN
> directory included with the
<SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
> distribution.
</P
></DIV
><HR><DIV
CLASS="REFSECT2"
><A
NAME="HANDLERS.EXECHANDLER"
></A
><H3
>scriptexec.ExecHandler</H3
><P
> This handler implements "old-style" script execution;
that is, executing arbitrary programs and piping the
result to the client. It is, for the most part,
compatible with both scripts written for UMN gopherd and
the Bucktooth gopher server. If enabled, it will
execute any file that is marked executable in the
filesystem. It will normally list scripts as returning
plain text, but you may create a custom link to the
script that defines it as returning whatever kind of
file you desire. Unlike PYGs, this type must be known
in advance.
</P
><P
> The <SPAN
CLASS="PROPERTY"
>scriptexec.ExecHandler</SPAN
> will set
environment variables for your scripts to use. They are
as follows:
</P
><P
></P
><TABLE
CLASS="variablelist"
BORDER="0"
CELLSPACING="0"
CELLPADDING="4"
><TBODY
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN304"><SPAN
STYLE="white-space: nowrap"
>SERVER_NAME</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>The name of this server as defined in
the configuration file or detected from the
operating system.</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN308"><SPAN
STYLE="white-space: nowrap"
>SERVER_PORT</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>The port this server is listening on.</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN312"><SPAN
STYLE="white-space: nowrap"
>REMOTE_ADDR</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>The IP address of the client.</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN316"><SPAN
STYLE="white-space: nowrap"
>REMOTE_PORT</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>The port number of the client.</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN320"><SPAN
STYLE="white-space: nowrap"
>REMOTE_HOST</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>The same value as <SPAN
CLASS="PROPERTY"
>REMOTE_ADDR</SPAN
></P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN325"><SPAN
STYLE="white-space: nowrap"
>SELECTOR</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>The file that was requested; that is,
the relative path to this script. If the selector
included additional parameters after a |, they
will be included in this string as well.
</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN329"><SPAN
STYLE="white-space: nowrap"
>REQUEST</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>The "base" part of the selector; that
is, the part leading up to the |.</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN333"><SPAN
STYLE="white-space: nowrap"
>SEARCHREQUEST</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>Included only if the client specified
search data, this is used if the client is
searching for something.
</P
></TD
></TR
></TBODY
></TABLE
><P
> See the <SPAN
CLASS="PROPERTY"
>virtual.Virtual</SPAN
> handler for
more information about passing data to your scripts at
runtime.
</P
><P
> Please note: this module provides the capability to
execute arbitrary code. Please consider the security
ramifications of that before enabling it.
</P
></DIV
><HR><DIV
CLASS="REFSECT2"
><A
NAME="HANDLERS.UMN"
></A
><H3
>UMN.UMNDirHandler</H3
><P
> This is one of the most powerful workhorse handlers in
<SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
>. It is designed to emulate most of the ways
in which the UMN gopherd distribution generates
directories, even going so far as to be bug-compatible
in some cases. Generating directories with this handler
is often the best general-purpose way to make nice
directories in gopherspace.
</P
><P
> The remainder of the description of the
<SPAN
CLASS="PROPERTY"
>UMN.UMNDirHandler</SPAN
>, except for the
Abstracts and Info section, is lifted directly from the
original UMN gopherd documentation, with light editing,
because this handler implements it so exactly that there
was no point in rewriting all that documentation :-)
</P
><DIV
CLASS="REFSECT3"
><A
NAME="HANDLERS.UMN.LINKS"
></A
><H4
>Links</H4
><P
> You can override the default view of a directory as
generated by <SPAN
CLASS="PROPERTY"
>dir.DirHandler</SPAN
> by
creating what are known as <SPAN
CLASS="emphasis"
><I
CLASS="EMPHASIS"
>Links</I
></SPAN
> in
the data tree.
</P
><P
> The ability to make links to other hosts is how gopher
distributes itself among multiple hosts. There are two
different ways to make a link. The first and simplest is
to create a link file that contains the data needed by the
server. By default all files in the gopher data directory
starting with a period are taken to be link files. A link
file can contain multiple links. To define a link you
need to put five lines in a link file that define the
needed characteristics for the document. Here is an
example of a link.
</P
><PRE
CLASS="PROGRAMLISTING"
>Name=Cheese Ball Recipes
Numb=1
Type=1
Port=150
Path=1/Moo/Cheesy
Host=zippy.micro.umn.edu
</PRE
><P
> The Name= line is what the user will see when cruising
through the database. In this case the name is "Cheese
Ball Recipes". The "Type=" defines what kind of document
this object is. For a list of all defined types, see
Gopher Item Types below. For Gopher+ and HTTP, a MIME
type is also used, which is determined automatically based
on the type you specify.
</P
><P
> The "Path=" line contains the selector string that the
client will use to retrieve the actual document. The
Numb= specifies that this entry should be presented first
in the directory list (instead of being alphabetized).
The "Numb=" line is optional. If it is present it cannot
be the last line of the link. The "Port=" and "Host="
lines specify a fully qualified domain name (FQDN) and a
port respectively. You may substitute a plus '+' for
these two parameters if you wish. The server will insert
the current hostname and the current port when it sees a
plus in either of these two fields.
</P
><P
>An easy way to retrieve links is to use the Curses
Gopher Client. By pressing '=' You can get information
suitable for inclusion in a link file.
</P
></DIV
><DIV
CLASS="REFSECT3"
><A
NAME="HANDLERS.UMN.OVERRIDING"
></A
><H4
>Overriding Defaults</H4
><P
> The server looks for a directory called
<TT
CLASS="FILENAME"
>.cap</TT
> when parsing a directory. The
server then checks to see if the <TT
CLASS="FILENAME"
>.cap</TT
>
directory contains a file with the same name as the file
it's parsing. If this file exists then the server will
open it for reading. The server parses this file just
like a link file. However, instead of making a new
object, the parameters inside the
<TT
CLASS="FILENAME"
>.cap/</TT
> file are used to override any
of the server supplied default values.
</P
><P
> For instance, say you wanted to change the Title of a text
file for gopher, but don't want to change the filename.
You also don't want it alphabetized, instead you want it
second in the directory listing. You could make a
set-aside file in the <TT
CLASS="FILENAME"
>.cap</TT
> directory with the same
filename that contained the following lines:
</P
><PRE
CLASS="PROGRAMLISTING"
>Name=New Long Cool Name
Numb=2
</PRE
><P
> An alternative to <TT
CLASS="FILENAME"
>.cap</TT
> files are
extended link files. They work just the same as the files
described in Links above, but have a somewhat abbreviated
format. As an example, if the name of the file was
<TT
CLASS="FILENAME"
>file-to-change</TT
>, then you could create
a file called <TT
CLASS="FILENAME"
>.names</TT
> with the
following contents:
</P
><PRE
CLASS="PROGRAMLISTING"
>Path=./file-to-change
Name=New Long Cool Name
Numb=2
</PRE
></DIV
><DIV
CLASS="REFSECT3"
><A
NAME="HANDLERS.UMN.COOL"
></A
><H4
>Adding Cool Links</H4
><P
> One cool thing you can do with .Links is to add neato
services to your gopher server. Adding a link like this:
</P
><PRE
CLASS="PROGRAMLISTING"
>Name=Cool ftp directory
Type=h
Path=/URL:ftp://hostname/path/
Host=+
Port=+
Name=Cool web site
Type=h
Path=/URL:http://hostname/
Host=+
Port=+
</PRE
><P
> Will allow you to link in any FTP or Web site to your
gopher. (See url.URLHandler for more details.)
</P
><P
> You can easily add a finger site to your gopher server thusly:
</P
><PRE
CLASS="PROGRAMLISTING"
>Name=Finger information
Type=0
Path=lindner
Host=mudhoney.micro.umn.edu
Port=79
</PRE
></DIV
><DIV
CLASS="REFSECT3"
><A
NAME="HANDLERS.UMN.HIDING"
></A
><H4
>Hiding an Entry</H4
><P
> This kind of trick may be necessary in some cases,
and thus for
object "fred", the overriding .names file entry would be:
</P
><PRE
CLASS="PROGRAMLISTING"
> Type=X
Path=./fred
</PRE
><P
> by overriding default type to be "X". This kind of
hideouts may be usefull, when for some reason there are
symlinks (or whatever) in the directory at which
<SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
> looks at, and those entries are not desired to
be shown at all.
</P
></DIV
><DIV
CLASS="REFSECT3"
><A
NAME="HANDLERS.UMN.ABSTRACTS"
></A
><H4
>Abstracts and Info</H4
><P
> Many modern gopher server maintainers like to intersperse
gopher directory listings with other information -- often,
additional information about the contents of files in the
directory. The gophermap system provides one way to do
that, and abstracts used with UMN gopher directories
provides another.
</P
><P
> Subject to the <SPAN
CLASS="PROPERTY"
>abstract_headers</SPAN
> and
<SPAN
CLASS="PROPERTY"
>abstract_entries</SPAN
> configuration file
options, this feature allows you to define that extra
information. You can do that by simply creating a file
named <TT
CLASS="FILENAME"
>filename.abstract</TT
> right
alongside the regular file in your directory. The file
will be interpreted as the abstract. For a directory,
create a file named <TT
CLASS="FILENAME"
>.abstract</TT
> in the
directory. Simple as that!
</P
></DIV
></DIV
><HR><DIV
CLASS="REFSECT2"
><A
NAME="HANDLERS.HTMLURLHANDLER"
></A
><H3
>url.HTMLURLHandler</H3
><P
> <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
> provides ways for you to link to pages outside
Gopherspace -- that is, web pages, FTP sites, and the like.
This is accomplished according to the <A
HREF="http://lists.complete.org/[email protected]/2002/02/msg00033.html.gz"
TARGET="_top"
>Links
to URL</A
> specification (see Conforming To below for
details). In order to link to a URL (EXCEPT gopher URLs)
from a menu, you create a link of type h (regardless of the
actual type of the resource that you are linking to) in your
<TT
CLASS="FILENAME"
>gophermap</TT
> or
<TT
CLASS="FILENAME"
>.Links</TT
>
file that looks like this:
</P
><PRE
CLASS="PROGRAMLISTING"
>/URL:http://www.complete.org/
</PRE
><P
> Modern Gopher clients that follow the Links to URL
specification will automatically follow that link when you
select it. The rest need some help, and that's where this
handler comes in.
</P
><P
> For Gopher clients that do not follow the Links to URL
specification, the <SPAN
CLASS="PROPERTY"
>url.HTMLURLHandler</SPAN
>
will automatically generate an HTML document for them on the
fly. This document includes a refresh code that will send
them to the proper page. You should not disable this
handler.
</P
></DIV
><HR><DIV
CLASS="REFSECT2"
><A
NAME="HANDLERS.URLTYPEREWRITER"
></A
><H3
>url.URLTypeRewriter</H3
><P
> Some people wish to serve HTML documents from their Gopher
server. One problem with that is that links in Gopherspace
include an extra type character at the beginning, whereas
links in HTTP do not. This handler will remove the extra
type character from HTTP requests that come in, allowing a
single relative-to-root link to work for both.
</P
></DIV
><HR><DIV
CLASS="REFSECT2"
><A
NAME="HANDLERS.VIRTUAL"
></A
><H3
>virtual.Virtual</H3
><P
> This handler is not intended to ever be used directly, but
is used by many other handlers such as the mbox support, PYG
handlers, and others. It is used to generate virtual
entries in the directory hierarchy -- that is, entries that
look normal to a client, but do not actually correspond to a
file on disk.
</P
><P
> One special feature of the
<SPAN
CLASS="PROPERTY"
>virtual.Virtual</SPAN
> handler is that you can
send information to it at runtime in a manner similar to a
CGI script on the web. You do this by adding a question
mark after the regular selector, followed by any arbitrary
data that you wish to have sent to the virtual request
handler.
</P
></DIV
><HR><DIV
CLASS="REFSECT2"
><A
NAME="HANDLERS.ZIP"
></A
><H3
>ZIP.ZIPHandler</H3
><P
> Using zip.ZIPHandler, you can save space on your server by
converting part or all of your site into a ZIP file.
<SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
> can use the contents of that ZIP file as the
contents of your site -- completely transparently.
</P
><P
> The ZIP file handler must be enabled in the configuration
file for this to work.
</P
></DIV
></DIV
><DIV
CLASS="REFSECT1"
><A
NAME="TYPES"
></A
><H2
>Gopher Item Types</H2
><P
> When you construct links to files via
<TT
CLASS="FILENAME"
>.Links</TT
> or <TT
CLASS="FILENAME"
>gophermap</TT
>
files, or modify the <SPAN
CLASS="PROPERTY"
>mapping</SPAN
> in the
configuration file, you will need to know these. Items
bearing the "not implemented" text are not served up by
<SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
> as it ships, generally due to requirements of
customized per-site software, but may be served up via PYG
extension modules or other gopher servers.
</P
><P
> This list was prepared based on RFC1436, the UMN gopherd(1) manpage,
and best current practices.
</P
><P
></P
><TABLE
CLASS="variablelist"
BORDER="0"
CELLSPACING="0"
CELLPADDING="4"
><TBODY
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN424"><SPAN
STYLE="white-space: nowrap"
>0</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>Plain text file</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN428"><SPAN
STYLE="white-space: nowrap"
>1</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>Directory</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN432"><SPAN
STYLE="white-space: nowrap"
>2</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>CSO phone book server (not implemented by <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
>)</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN437"><SPAN
STYLE="white-space: nowrap"
>3</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>Error condition; text that follows is plain text</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN441"><SPAN
STYLE="white-space: nowrap"
>4</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>Macintosh file, BinHex format</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN445"><SPAN
STYLE="white-space: nowrap"
>5</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>DOS binary archive (not implemented by
<SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
>; use type 9 instead)</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN450"><SPAN
STYLE="white-space: nowrap"
>6</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>uuencoded file; not directly generated by
<SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
> automatically, but can be linked to
manually. Most gopher clients will handle this better
as type 1.
</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN455"><SPAN
STYLE="white-space: nowrap"
>7</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>Search</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN459"><SPAN
STYLE="white-space: nowrap"
>8</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>Telnet link</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN463"><SPAN
STYLE="white-space: nowrap"
>9</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>Binary file</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN467"><SPAN
STYLE="white-space: nowrap"
>+</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>Redundant server (not implemented by <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
>)</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN472"><SPAN
STYLE="white-space: nowrap"
>c</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>Calendar (not implemented by <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
>)</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN477"><SPAN
STYLE="white-space: nowrap"
>e</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>Event (not implemented by <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
>)</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN482"><SPAN
STYLE="white-space: nowrap"
>g</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>GIF-format graphic</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN486"><SPAN
STYLE="white-space: nowrap"
>h</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>HTML file</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN490"><SPAN
STYLE="white-space: nowrap"
>I</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>Any kind of graphic file other than GIF</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN494"><SPAN
STYLE="white-space: nowrap"
>i</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>Informational
text included in a directory that is displayed but does not
link to any actual file.</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN498"><SPAN
STYLE="white-space: nowrap"
>M</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>MIME multipart/mixed file</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN502"><SPAN
STYLE="white-space: nowrap"
>s</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>Any kind of sound file</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN506"><SPAN
STYLE="white-space: nowrap"
>T</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
>tn3270 link</P
></TD
></TR
><TR
><TD
ALIGN="LEFT"
VALIGN="TOP"
><A
NAME="AEN510"><SPAN
STYLE="white-space: nowrap"
>X</SPAN
>, <SPAN
STYLE="white-space: nowrap"
>-</SPAN
></TD
><TD
ALIGN="LEFT"
VALIGN="TOP"
><P
> UMN-specific -- signifies that this entry should not be
displayed in a directory entry, but may be accessed via a
direct link. This value is never transmitted in any Gopher
protocol.
</P
></TD
></TR
></TBODY
></TABLE
></DIV
><DIV
CLASS="REFSECT1"
><A
NAME="CONFORMING"
></A
><H2
>Conforming To</H2
><P
></P
><UL
><LI
><P
> The Internet Gopher Protocol as specified in RFC1436
</P
></LI
><LI
><P
> The Gopher+ upward-compatible enhancements to the Internet Gopher
Protocol from the University of Minnesota as laid out at
<A
HREF="gopher://gopher.quux.org/0/Archives/mirrors/boombox.micro.umn.edu/pub/gopher/gopher_protocol/Gopher+/Gopher+.txt"
TARGET="_top"
>gopher://gopher.quux.org/0/Archives/mirrors/boombox.micro.umn.edu/pub/gopher/gopher_protocol/Gopher+/Gopher+.txt</A
>.
</P
></LI
><LI
><P
> The gophermap file format as originally implemented in the
Bucktooth gopher server and described at
<A
HREF="gopher://gopher.floodgap.com/0/buck/dbrowse%3Ffaquse%201"
TARGET="_top"
>gopher://gopher.floodgap.com/0/buck/dbrowse%3Ffaquse%201</A
>.
</P
></LI
><LI
><P
> The Links to URL specification as laid out by John Goerzen
at
<A
HREF="gopher://gopher.quux.org/0/Archives/Mailing%20Lists/gopher/gopher.2002-02%3f/MBOX-MESSAGE/34"
TARGET="_top"
>gopher://gopher.quux.org/0/Archives/Mailing%20Lists/gopher/gopher.2002-02%3f/MBOX-MESSAGE/34</A
>.
</P
></LI
><LI
><P
> The UMN format for specifying object attributes and links
with .cap, .Links, .abstract, and similar files as specified elsewhere
in this document and implemented by UMN gopherd.
</P
></LI
><LI
><P
> The PYG format for extensible Python gopher objects as created for
<SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
>.</P
></LI
><LI
><P
> Hypertext Transfer Protocol HTTP/1.0 as specified in
RFC1945
</P
></LI
><LI
><P
> Hypertext Markup Language (HTML) 3.2 and 4.0
Transitional as specified in RFC1866 and RFC2854.
</P
></LI
><LI
><P
> Maildir as specified in
<A
HREF="http://www.qmail.org/qmail-manual-html/man5/maildir.html"
TARGET="_top"
>http://www.qmail.org/qmail-manual-html/man5/maildir.html</A
> and
<A
HREF="http://cr.yp.to/proto/maildir.html"
TARGET="_top"
>http://cr.yp.to/proto/maildir.html</A
>.
</P
></LI
><LI
><P
> The mbox mail storage format as specified in
<A
HREF="http://www.qmail.org/qmail-manual-html/man5/mbox.html"
TARGET="_top"
>http://www.qmail.org/qmail-manual-html/man5/mbox.html</A
>.
</P
></LI
><LI
><P
> Registered MIME media types as specified in RFC2048.
</P
></LI
><LI
><P
> Script execution conforming to both UMN standards as laid out in UMN
gopherd(1) and Bucktooth standards as specified at
<A
HREF="gopher://gopher.floodgap.com:70/0/buck/dbrowse%3ffaquse%202"
TARGET="_top"
>gopher://gopher.floodgap.com:70/0/buck/dbrowse%3ffaquse%202</A
>,
so far as each can be implemented consistent with secure
design principles.
</P
></LI
><LI
><P
> Standard Python 2.2.1 or above as implemented on
POSIX-compliant systems.
</P
></LI
><LI
><P
> WAP/WML as defined by the WAP Forum.</P
></LI
></UL
></DIV
><DIV
CLASS="REFSECT1"
><A
NAME="BUGS"
></A
><H2
>Bugs</H2
><P
> Reports of bugs should be sent via e-mail to the <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
>
bug-tracking system (BTS) at
<CODE
CLASS="EMAIL"
><<A
HREF="mailto:[email protected]"
>[email protected]</A
>></CODE
> or submitted online
using the Web interface at <A
HREF="http://bugs.complete.org/"
TARGET="_top"
>http://bugs.complete.org/</A
>.
</P
><P
> The Web site also lists all current bugs, where you can check their
status or contribute to fixing them.
</P
></DIV
><DIV
CLASS="REFSECT1"
><A
NAME="COPYRIGHT"
></A
><H2
>Copyright</H2
><P
> <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
> is Copyright (C) 2002, 2003 John Goerzen.
</P
><P
> This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation; version 2 of the
License.
</P
><P
> This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
</P
><P
> You should have received a copy of the GNU General Public License
along with this program; if not, write to:
</P
><PRE
CLASS="PROGRAMLISTING"
>Free Software Foundation, Inc.
59 Temple Place
Suite 330
Boston, MA 02111-1307
USA
</PRE
></DIV
><DIV
CLASS="REFSECT1"
><A
NAME="AUTHOR"
></A
><H2
>Author</H2
><P
> <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
>, its libraries, documentation, and all included
files (except where noted) was written by John Goerzen
<CODE
CLASS="EMAIL"
><<A
HREF="mailto:[email protected]"
>[email protected]</A
>></CODE
>
and copyright is held as stated in the
Copyright section.
</P
><P
> Portions of this manual (specifically relating to certian UMN gopherd
features and characteristics that PyGopherd emulates) are modified
versions of the original
gopherd(1) manpage accompanying the UMN gopher distribution. That
document is distributed under the same terms as this, and
bears the following copyright notices:
</P
><PRE
CLASS="PROGRAMLISTING"
>Copyright (C) 1991-2000 University of Minnesota
Copyright (C) 2000-2002 John Goerzen and other developers
</PRE
><P
> <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
> may be downloaded, and information found, from its
homepage via either Gopher or HTTP:
</P
><P
> <A
HREF="gopher://quux.org/1/devel/gopher/pygopherd"
TARGET="_top"
>gopher://quux.org/1/devel/gopher/pygopherd</A
></P
><P
> <A
HREF="http://quux.org/devel/gopher/pygopherd"
TARGET="_top"
>http://quux.org/devel/gopher/pygopherd</A
></P
><P
> <SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
> may also be downloaded using Subversion. Additionally,
the distributed tar.gz may be updated with a simple "svn update"
command; it is ready to go. For information on getting
<SPAN
CLASS="APPLICATION"
>PyGopherd</SPAN
> with Subversion, please visit <A
HREF="http://svn.complete.org/"
TARGET="_top"
>http://svn.complete.org/</A
>.
</P
></DIV
><DIV
CLASS="REFSECT1"
><A
NAME="AEN586"
></A
><H2
>See Also</H2
><P
> python (1).
</P
></DIV
></DIV
></BODY
></HTML
> | {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
~
~ Licensed under the Apache License, Version 2.0 (the "License");
~ you may not use this file except in compliance with the License.
~ You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<hazelcast xmlns="http://www.hazelcast.com/schema/config"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.hazelcast.com/schema/config
http://www.hazelcast.com/schema/config/hazelcast-config-4.1.xsd">
<cluster-name>test-group1</cluster-name>
<cache name="cacheWithPartitionLostListener">
<partition-lost-listeners>
<partition-lost-listener>DummyCachePartitionLostListenerImpl</partition-lost-listener>
</partition-lost-listeners>
</cache>
</hazelcast>
| {
"pile_set_name": "Github"
} |
<?php
declare(strict_types=1);
namespace Neos\Neos\View;
/*
* This file is part of the Neos.Neos package.
*
* (c) Contributors of the Neos Project - www.neos.io
*
* This package is Open Source Software. For the full copyright and license
* information, please view the LICENSE file which was distributed with this
* source code.
*/
use Neos\Flow\Annotations as Flow;
use Neos\Flow\Core\Bootstrap;
use Neos\Flow\Mvc\ActionResponse;
use Neos\Flow\Mvc\View\AbstractView;
use Neos\Fusion\Exception\RuntimeException;
use Neos\Neos\Domain\Service\FusionService;
use Neos\Fusion\Core\Runtime as FusionRuntime;
use Neos\Neos\Domain\Repository\SiteRepository;
use Neos\Neos\Domain\Repository\DomainRepository;
use Neos\Neos\Domain\Service\ContentContextFactory;
use Neos\ContentRepository\Domain\Model\NodeInterface;
use Neos\Flow\Security\Context;
use Neos\Flow\ObjectManagement\ObjectManagerInterface;
use Neos\Flow\Mvc\ActionRequest;
use Neos\Flow\Mvc\Routing\UriBuilder;
use Neos\Flow\Mvc\Controller\ControllerContext;
use Neos\Flow\Mvc\Controller\Arguments;
class FusionExceptionView extends AbstractView
{
use FusionViewI18nTrait;
/**
* This contains the supported options, their default values, descriptions and types.
* @var array
*/
protected $supportedOptions = [
'enableContentCache' => ['defaultValue', true, 'boolean'],
];
/**
* @Flow\Inject
* @var Bootstrap
*/
protected $bootstrap;
/**
* @var ObjectManagerInterface
* @Flow\Inject
*/
protected $objectManager;
/**
* @var FusionService
* @Flow\Inject
*/
protected $fusionService;
/**
* @var FusionRuntime
*/
protected $fusionRuntime;
/**
* @var SiteRepository
* @Flow\Inject
*/
protected $siteRepository;
/**
* @var DomainRepository
* @Flow\Inject
*/
protected $domainRepository;
/**
* @var ContentContextFactory
* @Flow\Inject
*/
protected $contentContextFactory;
/**
* @return string
* @throws \Neos\Flow\I18n\Exception\InvalidLocaleIdentifierException
* @throws \Neos\Fusion\Exception
* @throws \Neos\Neos\Domain\Exception
* @throws \Neos\Flow\Security\Exception
*/
public function render()
{
$domain = $this->domainRepository->findOneByActiveRequest();
if ($domain) {
$site = $domain->getSite();
} else {
$site = $this->siteRepository->findDefault();
}
$httpRequest = $this->bootstrap->getActiveRequestHandler()->getHttpRequest();
$request = ActionRequest::fromHttpRequest($httpRequest);
$request->setControllerPackageKey('Neos.Neos');
$request->setFormat('html');
$uriBuilder = new UriBuilder();
$uriBuilder->setRequest($request);
$controllerContext = new ControllerContext(
$request,
new ActionResponse(),
new Arguments([]),
$uriBuilder
);
$securityContext = $this->objectManager->get(Context::class);
$securityContext->setRequest($request);
$contentContext = $this->contentContextFactory->create(['currentSite' => $site]);
$currentSiteNode = $contentContext->getCurrentSiteNode();
$fusionRuntime = $this->getFusionRuntime($currentSiteNode, $controllerContext);
$this->setFallbackRuleFromDimension($currentSiteNode);
$fusionRuntime->pushContextArray(array_merge(
$this->variables,
[
'node' => $currentSiteNode,
'documentNode' => $currentSiteNode,
'site' => $currentSiteNode,
'editPreviewMode' => null
]
));
try {
$output = $fusionRuntime->render('error');
$output = $this->extractBodyFromOutput($output);
} catch (RuntimeException $exception) {
throw $exception->getPrevious();
}
$fusionRuntime->popContext();
return $output;
}
/**
* @param string $output
* @return string The message body without the message head
*/
protected function extractBodyFromOutput(string $output): string
{
if (substr($output, 0, 5) === 'HTTP/') {
$endOfHeader = strpos($output, "\r\n\r\n");
if ($endOfHeader !== false) {
$output = substr($output, $endOfHeader + 4);
}
}
return $output;
}
/**
* @param NodeInterface $currentSiteNode
* @param ControllerContext $controllerContext
* @return FusionRuntime
* @throws \Neos\Fusion\Exception
* @throws \Neos\Neos\Domain\Exception
*/
protected function getFusionRuntime(NodeInterface $currentSiteNode, ControllerContext $controllerContext): \Neos\Fusion\Core\Runtime
{
if ($this->fusionRuntime === null) {
$this->fusionRuntime = $this->fusionService->createRuntime($currentSiteNode, $controllerContext);
if (isset($this->options['enableContentCache']) && $this->options['enableContentCache'] !== null) {
$this->fusionRuntime->setEnableContentCache($this->options['enableContentCache']);
}
}
return $this->fusionRuntime;
}
}
| {
"pile_set_name": "Github"
} |
# coding=utf-8
from __future__ import absolute_import
import torch
from torch import nn
from torch.autograd import Variable
import numpy as np
class NCALoss(nn.Module):
def __init__(self, alpha=16, k=32, **kwargs):
super(NCALoss, self).__init__()
self.alpha = alpha
self.K = k
def forward(self, inputs, targets):
n = inputs.size(0)
sim_mat = torch.matmul(inputs, inputs.t())
targets = targets
base = 0.5
loss = list()
c = 0
for i in range(n):
pos_pair_ = torch.masked_select(sim_mat[i], targets==targets[i])
# move itself
pos_pair_ = torch.masked_select(pos_pair_, pos_pair_ < 1)
neg_pair_ = torch.masked_select(sim_mat[i], targets!=targets[i])
pos_pair = torch.sort(pos_pair_)[0]
neg_pair = torch.sort(neg_pair_)[0]
# 第K+1个近邻点到Anchor的距离值
pair = torch.cat([pos_pair, neg_pair])
threshold = torch.sort(pair)[0][self.K]
# 取出K近邻中的正样本对和负样本对
pos_neig = torch.masked_select(pos_pair, pos_pair < threshold)
neg_neig = torch.masked_select(neg_pair, neg_pair < threshold)
# 若前K个近邻中没有正样本,则仅取最近正样本
if len(pos_neig) == 0:
pos_neig = pos_pair[0]
base = torch.mean(sim_mat[i]).item()
# 计算logit, base的作用是防止超过计算机浮点数
pos_logit = torch.sum(torch.exp(self.alpha*(base - pos_neig)))
neg_logit = torch.sum(torch.exp(self.alpha*(base - neg_neig)))
loss_ = -torch.log(pos_logit/(pos_logit + neg_logit))
if loss_.data[0] < 0.6:
acc_num += 1
loss.append(loss_)
loss = sum(loss)/n
prec = float(c)/n
mean_neg_sim = torch.mean(neg_pair_).item()
mean_pos_sim = torch.mean(pos_pair_).item()
return loss, prec, mean_pos_sim, mean_neg_sim
def main():
data_size = 32
input_dim = 3
output_dim = 2
num_class = 4
# margin = 0.5
x = Variable(torch.rand(data_size, input_dim), requires_grad=False)
w = Variable(torch.rand(input_dim, output_dim), requires_grad=True)
inputs = x.mm(w)
y_ = 8*list(range(num_class))
targets = Variable(torch.IntTensor(y_))
print(NCALoss(alpha=30)(inputs, targets))
if __name__ == '__main__':
main()
print('Congratulations to you!')
| {
"pile_set_name": "Github"
} |
name: Publish and commit bottles on schedule
on:
workflow_dispatch:
schedule:
# Once every hour
- cron: '*/60 * * * *'
env:
HOMEBREW_FORCE_HOMEBREW_ON_LINUX: 1
jobs:
autopublish:
if: github.repository == 'Homebrew/homebrew-core'
runs-on: ubuntu-latest
steps:
- name: Set up Homebrew
id: set-up-homebrew
uses: Homebrew/actions/setup-homebrew@master
- name: Run automerge
env:
HOMEBREW_GITHUB_API_TOKEN: ${{secrets.HOMEBREW_GITHUB_API_TOKEN}}
run: brew pr-automerge --verbose --publish
| {
"pile_set_name": "Github"
} |
/**
* External dependencies
*/
import { __, sprintf } from '@wordpress/i18n';
import { Button, Icon } from '@wordpress/components';
import { Component, Fragment } from '@wordpress/element';
import { findIndex } from 'lodash';
import PropTypes from 'prop-types';
/**
* Internal dependencies
*/
import Tag from '../tag';
/**
* A list of tags to display selected items.
*/
class Tags extends Component {
constructor( props ) {
super( props );
this.removeAll = this.removeAll.bind( this );
this.removeResult = this.removeResult.bind( this );
}
removeAll() {
const { onChange } = this.props;
onChange( [] );
}
removeResult( key ) {
return () => {
const { selected, onChange } = this.props;
const i = findIndex( selected, { key } );
onChange( [
...selected.slice( 0, i ),
...selected.slice( i + 1 ),
] );
};
}
render() {
const { selected, showClearButton } = this.props;
if ( ! selected.length ) {
return null;
}
return (
<Fragment>
<div className="woocommerce-select-control__tags">
{ selected.map( ( item, i ) => {
if ( ! item.label ) {
return null;
}
const screenReaderLabel = sprintf(
__( '%1$s (%2$s of %3$s)', 'woocommerce-admin' ),
item.label,
i + 1,
selected.length
);
return (
<Tag
key={ item.key }
id={ item.key }
label={ item.label }
remove={ this.removeResult }
screenReaderLabel={ screenReaderLabel }
/>
);
} ) }
</div>
{ showClearButton && (
<Button
className="woocommerce-select-control__clear"
isLink
onClick={ this.removeAll }
>
<Icon icon="dismiss" />
<span className="screen-reader-text">
{ __( 'Clear all', 'woocommerce-admin' ) }
</span>
</Button>
) }
</Fragment>
);
}
}
Tags.propTypes = {
/**
* Function called when selected results change, passed result list.
*/
onChange: PropTypes.func,
/**
* Function to execute when an option is selected.
*/
onSelect: PropTypes.func,
/**
* An array of objects describing selected values. If the label of the selected
* value is omitted, the Tag of that value will not be rendered inside the
* search box.
*/
selected: PropTypes.arrayOf(
PropTypes.shape( {
key: PropTypes.oneOfType( [ PropTypes.number, PropTypes.string ] )
.isRequired,
label: PropTypes.string,
} )
),
/**
* Render a 'Clear' button next to the input box to remove its contents.
*/
showClearButton: PropTypes.bool,
};
export default Tags;
| {
"pile_set_name": "Github"
} |
@CHARSET "ISO-8859-1";
table {
border-width: 1px;
border-style: solid;
border-color: #000000;
border-collapse: collapse;
width: 94%;
margin: 10px 3%;
}
DIV.title {
font-size: 30px;
font-weight: bold;
text-align: center
}
DIV.subtitle {
font-size: large;
text-align: center
}
DIV.contact {
margin:30px 3%;
}
@media print {
DIV.contact {
margin-top: 300px;
}
DIV.title {
margin-top: 400px;
}
}
label {
font-weight: bold;
width: 100px;
float: left;
}
label:after {
content: ":";
}
TH {
background-color: #000000;
color: #ffffff;
padding-left: 5px;
padding-right: 5px;
}
TR {
}
TD {
border-width: 1px;
border-style: solid;
border-color: #a0a0a0;
padding-left: 5px;
padding-right: 5px;
}
BODY {
font-family: verdana;
font-size: small;
}
H1 {
page-break-before: always;
}
H1, H2, H3, H4 {
margin-top: 30px;
margin-right: 3%;
padding: 3px 3%;
color: #404040;
cursor: pointer;
}
H1, H2 {
background-color: #D3D3D3;
}
H3, H4 {
padding-top: 5px;
padding-bottom: 5px;
}
H1:hover, H2:hover, H3:hover, H4:hover {
background-color: #EBEBEB;
}
CODE.evidence {
font-size:larger
}
CODE.block {
color: #000000;
background-color: #DDDC75;
margin: 10px 0;
padding: 5px;
border-width: 1px;
border-style: dotted;
border-color: #000000;
white-space: pre;
display: block;
font-size: 2 em;
}
ul {
margin: 10px 3%;
}
li {
margin: 0 -15px;
}
ul.multicolumn {
list-style: none;
float: left;
padding-right: 0px;
margin-right: 0px;
}
li.multicolumn {
float: left;
width: 200px;
margin-right: 0px;
}
@media screen {
p {
margin: 10px 3%;
line-height: 130%;
}
}
span.fade {
color: gray;
}
span.header {
color: green;
}
span.header-greyed {
color: #4CBE4B;
}
span.data {
color: blue;
}
span.data-greyed {
color: #5D99C4;
}
span.descr {
color: red;
}
div.box {
margin: 15px 3%;
border-style: dotted;
border-width: 1px;
}
div.box-solid {
margin: 15px 3%;
border-style: solid;
border-width: 1px;
}
p.box-title {
font-style: italic;
font-size: 110%;
cursor: pointer;
}
p.box-title:hover {
background-color: #EBEBEB;
}
p.code {
font-family: "Courier New", Courier, monospace;
}
a {
color: #a000a0;
text-decoration: none;
}
a:hover {
color: #a000a0;
text-decoration: underline;
}
td.skip {
color: #808080;
padding-top: 10px;
padding-bottom: 10px;
text-align: center;
}
| {
"pile_set_name": "Github"
} |
/* -*- mode: C++; c-basic-offset: 4; indent-tabs-mode: nil; -*- */
/*
* Copyright (c) 2016 Philip Chimento
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to
* deal in the Software without restriction, including without limitation the
* rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
* sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
#include <locale.h> // for setlocale, LC_ALL
#include <stdlib.h> // for exit
#include <gio/gio.h>
#include <girepository.h>
#include <glib-object.h>
#include <glib.h>
#include <gjs/gjs.h>
[[noreturn]] static void bail_out(GjsContext* gjs_context, const char* msg) {
g_object_unref(gjs_context);
g_print("Bail out! %s\n", msg);
exit(1);
}
int
main(int argc, char **argv)
{
if (argc < 2)
g_error("Need a test file");
/* The fact that this isn't the default is kind of lame... */
g_setenv("GJS_DEBUG_OUTPUT", "stderr", false);
setlocale(LC_ALL, "");
if (g_getenv("GJS_USE_UNINSTALLED_FILES") != NULL) {
g_irepository_prepend_search_path(g_getenv("TOP_BUILDDIR"));
} else {
g_irepository_prepend_search_path(INSTTESTDIR);
g_irepository_prepend_library_path(INSTTESTDIR);
}
const char *coverage_prefix = g_getenv("GJS_UNIT_COVERAGE_PREFIX");
const char *coverage_output_path = g_getenv("GJS_UNIT_COVERAGE_OUTPUT");
const char *search_path[] = { "resource:///org/gjs/jsunit", NULL };
if (coverage_prefix)
gjs_coverage_enable();
GjsContext *cx = gjs_context_new_with_search_path((char **)search_path);
GjsCoverage *coverage = NULL;
if (coverage_prefix) {
const char *coverage_prefixes[2] = { coverage_prefix, NULL };
if (!coverage_output_path) {
bail_out(cx, "GJS_UNIT_COVERAGE_OUTPUT is required when using GJS_UNIT_COVERAGE_PREFIX");
}
GFile *output = g_file_new_for_commandline_arg(coverage_output_path);
coverage = gjs_coverage_new(coverage_prefixes, cx, output);
g_object_unref(output);
}
GError *error = NULL;
bool success;
int code;
success = gjs_context_eval(cx, "imports.minijasmine;", -1,
"<jasmine>", &code, &error);
if (!success)
bail_out(cx, error->message);
success = gjs_context_eval_file(cx, argv[1], &code, &error);
if (!success)
bail_out(cx, error->message);
/* jasmineEnv.execute() queues up all the tests and runs them
* asynchronously. This should start after the main loop starts, otherwise
* we will hit the main loop only after several tests have already run. For
* consistency we should guarantee that there is a main loop running during
* all tests. */
const char *start_suite_script =
"const GLib = imports.gi.GLib;\n"
"GLib.idle_add(GLib.PRIORITY_DEFAULT, function () {\n"
" try {\n"
" window._jasmineEnv.execute();\n"
" } catch (e) {\n"
" print('Bail out! Exception occurred inside Jasmine:', e);\n"
" window._jasmineRetval = 1;\n"
" window._jasmineMain.quit();\n"
" }\n"
" return GLib.SOURCE_REMOVE;\n"
"});\n"
"window._jasmineMain.run();\n"
"window._jasmineRetval;";
success = gjs_context_eval(cx, start_suite_script, -1, "<jasmine-start>",
&code, &error);
if (!success)
bail_out(cx, error->message);
if (code != 0)
g_print("# Test script failed; see test log for assertions\n");
if (coverage) {
gjs_coverage_write_statistics(coverage);
g_clear_object(&coverage);
}
gjs_memory_report("before destroying context", false);
g_object_unref(cx);
gjs_memory_report("after destroying context", true);
/* For TAP, should actually be return 0; as a nonzero return code would
* indicate an error in the test harness. But that would be quite silly
* when running the tests outside of the TAP driver. */
return code;
}
| {
"pile_set_name": "Github"
} |
// Mantid Repository : https://github.com/mantidproject/mantid
//
// Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
// NScD Oak Ridge National Laboratory, European Spallation Source,
// Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS
// SPDX - License - Identifier: GPL - 3.0 +
#include "ALCBaselineModellingView.h"
#include "MantidQtWidgets/Common/HelpWindow.h"
#include <QMenu>
#include <QMessageBox>
#include <QSignalMapper>
using namespace Mantid::API;
using namespace MantidQt::MantidWidgets;
namespace MantidQt {
namespace CustomInterfaces {
ALCBaselineModellingView::ALCBaselineModellingView(QWidget *widget)
: m_widget(widget), m_ui(), m_rangeSelectors(),
m_selectorModifiedMapper(new QSignalMapper(this)) {}
ALCBaselineModellingView::~ALCBaselineModellingView() {}
void ALCBaselineModellingView::initialize() {
m_ui.setupUi(m_widget);
connect(m_ui.fit, SIGNAL(clicked()), SIGNAL(fitRequested()));
m_ui.dataPlot->setCanvasColour(Qt::white);
m_ui.correctedPlot->setCanvasColour(Qt::white);
// Error bars on the plot
const QStringList dataPlotErrors{"Data"};
m_ui.dataPlot->setLinesWithErrors(dataPlotErrors);
const QStringList correctedPlotErrors{"Corrected"};
m_ui.correctedPlot->setLinesWithErrors(correctedPlotErrors);
// Context menu for sections table
m_ui.sections->setContextMenuPolicy(Qt::CustomContextMenu);
connect(m_ui.sections, SIGNAL(customContextMenuRequested(const QPoint &)),
SLOT(sectionsContextMenu(const QPoint &)));
// Make columns non-resizeable and to fill all the available space
#if QT_VERSION < QT_VERSION_CHECK(5, 0, 0)
m_ui.sections->horizontalHeader()->setResizeMode(QHeaderView::Stretch);
#elif QT_VERSION >= QT_VERSION_CHECK(5, 0, 0)
m_ui.sections->horizontalHeader()->setSectionResizeMode(QHeaderView::Stretch);
#endif
connect(m_ui.sections, SIGNAL(cellChanged(int, int)),
SIGNAL(sectionRowModified(int)));
connect(m_selectorModifiedMapper, SIGNAL(mapped(int)),
SIGNAL(sectionSelectorModified(int)));
connect(m_ui.help, SIGNAL(clicked()), this, SLOT(help()));
}
QString ALCBaselineModellingView::function() const {
return m_ui.function->getFunctionString();
}
IALCBaselineModellingView::SectionRow
ALCBaselineModellingView::sectionRow(int row) const {
QString first = m_ui.sections->item(row, 0)->text();
QString second = m_ui.sections->item(row, 1)->text();
return SectionRow(first, second);
}
IALCBaselineModellingView::SectionSelector
ALCBaselineModellingView::sectionSelector(int index) const {
auto rangeSelector = m_rangeSelectors.find(index)->second;
return std::make_pair(rangeSelector->getMinimum(),
rangeSelector->getMaximum());
}
int ALCBaselineModellingView::noOfSectionRows() const {
return m_ui.sections->rowCount();
}
void ALCBaselineModellingView::setDataCurve(MatrixWorkspace_sptr workspace,
std::size_t const &workspaceIndex) {
const auto kwargs = getPlotKwargs(m_ui.dataPlot, "Data");
m_ui.dataPlot->clear();
m_ui.dataPlot->addSpectrum("Data", workspace, workspaceIndex, Qt::black,
kwargs);
}
void ALCBaselineModellingView::setCorrectedCurve(
MatrixWorkspace_sptr workspace, std::size_t const &workspaceIndex) {
const auto kwargs = getPlotKwargs(m_ui.correctedPlot, "Corrected");
m_ui.correctedPlot->clear();
m_ui.correctedPlot->addSpectrum("Corrected", workspace, workspaceIndex,
Qt::blue, kwargs);
}
QHash<QString, QVariant>
ALCBaselineModellingView::getPlotKwargs(PreviewPlot *plot,
const QString &curveName) {
// Ensures the plot is plotted only with data points and no lines
QHash<QString, QVariant> kwargs;
#if QT_VERSION < QT_VERSION_CHECK(5, 0, 0)
plot->setCurveStyle(curveName, -1);
plot->setCurveSymbol(curveName, 0);
#else
UNUSED_ARG(plot);
UNUSED_ARG(curveName);
kwargs.insert("linestyle", QString("None").toLatin1().constData());
kwargs.insert("marker", QString(".").toLatin1().constData());
#endif
return kwargs;
}
void ALCBaselineModellingView::setBaselineCurve(
MatrixWorkspace_sptr workspace, std::size_t const &workspaceIndex) {
m_ui.dataPlot->addSpectrum("Baseline", workspace, workspaceIndex, Qt::red);
m_ui.dataPlot->replot();
}
void ALCBaselineModellingView::removePlot(QString const &plotName) {
m_ui.dataPlot->removeSpectrum(plotName);
m_ui.dataPlot->replot();
}
void ALCBaselineModellingView::setFunction(IFunction_const_sptr func) {
if (!func) {
m_ui.function->clear();
} else {
size_t nParams = func->nParams();
for (size_t i = 0; i < nParams; i++) {
QString name = QString::fromStdString(func->parameterName(i));
double value = func->getParameter(i);
double error = func->getError(i);
m_ui.function->setParameter(name, value);
m_ui.function->setParameterError(name, error);
}
}
}
void ALCBaselineModellingView::setNoOfSectionRows(int rows) {
m_ui.sections->setRowCount(rows);
}
void ALCBaselineModellingView::setSectionRow(
int row, IALCBaselineModellingView::SectionRow values) {
m_ui.sections->blockSignals(
true); // Setting values, no need for 'modified' signals
m_ui.sections->setFocus();
m_ui.sections->selectRow(row);
m_ui.sections->setItem(row, 0, new QTableWidgetItem(values.first));
m_ui.sections->setItem(row, 1, new QTableWidgetItem(values.second));
m_ui.sections->blockSignals(false);
}
void ALCBaselineModellingView::addSectionSelector(
int index, IALCBaselineModellingView::SectionSelector values) {
auto *newSelector = new RangeSelector(m_ui.dataPlot);
if (index % 3 == 0) {
newSelector->setColour(Qt::blue);
} else if ((index - 1) % 3 == 0) {
newSelector->setColour(Qt::red);
} else {
newSelector->setColour(Qt::green);
}
m_selectorModifiedMapper->setMapping(newSelector, index);
connect(newSelector, SIGNAL(selectionChanged(double, double)),
m_selectorModifiedMapper, SLOT(map()));
m_rangeSelectors[index] = newSelector;
// Set initial values
newSelector->setRange(values.first, values.second);
setSelectorValues(newSelector, values);
m_ui.dataPlot->replot();
}
void ALCBaselineModellingView::deleteSectionSelector(int index) {
auto rangeSelector = m_rangeSelectors[index];
m_rangeSelectors.erase(index);
rangeSelector->detach(); // This is not done when it's deleted
m_selectorModifiedMapper->removeMappings(rangeSelector);
delete rangeSelector;
}
void ALCBaselineModellingView::updateSectionSelector(
int index, IALCBaselineModellingView::SectionSelector values) {
setSelectorValues(m_rangeSelectors[index], values);
}
void ALCBaselineModellingView::displayError(const QString &message) {
QMessageBox::critical(m_widget, "Error", message);
}
void ALCBaselineModellingView::sectionsContextMenu(const QPoint &widgetPoint) {
QMenu menu(m_widget);
menu.addAction("Add section", this, SIGNAL(addSectionRequested()));
// Helper mapper to map removal action to row id
QSignalMapper removalActionMapper;
connect(&removalActionMapper, SIGNAL(mapped(int)),
SIGNAL(removeSectionRequested(int)));
int row = m_ui.sections->rowAt(widgetPoint.y());
if (row != -1) {
// Add removal action
QAction *removeAction =
menu.addAction("Remove section", &removalActionMapper, SLOT(map()));
removalActionMapper.setMapping(removeAction, row);
}
menu.exec(QCursor::pos());
}
void ALCBaselineModellingView::setSelectorValues(
RangeSelector *selector,
IALCBaselineModellingView::SectionSelector values) {
// if the values are not increasing then reverse them
if (values.first > values.second) {
const double tempSwapValue = values.first;
values.first = values.second;
values.second = tempSwapValue;
}
selector->setMinimum(values.first);
selector->setMaximum(values.second);
}
void ALCBaselineModellingView::help() {
MantidQt::API::HelpWindow::showCustomInterface(nullptr, QString("Muon ALC"));
}
void ALCBaselineModellingView::emitFitRequested() { emit fitRequested(); }
} // namespace CustomInterfaces
} // namespace MantidQt
| {
"pile_set_name": "Github"
} |
import { MultiArgument, ArgumentStore, Argument } from 'klasa';
export default class CoreMultiArgument extends MultiArgument {
public constructor(store: ArgumentStore, directory: string, file: readonly string[]) {
super(store, directory, file, { aliases: ['...role'] });
}
public get base(): Argument {
return this.store.get('role') as Argument;
}
}
| {
"pile_set_name": "Github"
} |
<?php
namespace App\Http\Controllers\Admin;
use App\Http\Controllers\Controller as BaseController;
use App\Models\Developer;
use App\Models\Payment;
use App\Models\StatisticalReportsAdmin;
use App\Models\User;
use App\Models\Withdraw;
use Illuminate\Http\Request;
use Illuminate\Support\Facades\DB;
class DashboardController extends BaseController
{
public function dashboard(Request $request)
{
$user_24hours_count = User::where('created_at', '>', date('Y-m-d H:i:s', time() - 86400))->count('id');
$developer_24hours_count = Developer::where('created_at', '>', date('Y-m-d H:i:s', time() - 86400))->count('id');
$payment_money_total = Payment::where([
['done_at', '>', date('Y-m-d H:i:s', time() - 86400)],
['money', '>', 0],
])->sum('money');
$payment_actual_money_total = Payment::where([
['done_at', '>', date('Y-m-d H:i:s', time() - 86400)],
['actual_money', '>', 0],
])->sum('actual_money');
$withdraw_count = Withdraw::where([
['status', '=', Withdraw::STATUS_CREATED],
])->count('id');
return view('admin.dashboard', [
'data' => [
'user_count' => $user_24hours_count,
'developer_count' => $developer_24hours_count,
'money_total' => $payment_money_total,
'money_actual_total' => $payment_actual_money_total,
'withdraw_count' => $withdraw_count,
],
]);
}
public function statistics()
{
$statisticalReports = \Cache::store('file')->remember('admin_dashboard_statistics', 5, function (){
$reports = StatisticalReportsAdmin::where('created_at', '>' , (time() - 86400))->get();
$data = [];
foreach ($reports as $report) {
$date_key = strtotime(date('Y-m-d H:00:00', $report->created_at));
if(!array_key_exists($date_key, $data)) {
$data[$date_key] = [
'numbers' => 0,
'points' => 0
];
}
$data[$date_key]['numbers'] += $report->number;
$data[$date_key]['points'] += $report->points;
}
return $data;
});
return $this->responseSuccess('success',$statisticalReports);
}
}
| {
"pile_set_name": "Github"
} |
// RUN: %clang_cc1 -fsyntax-only -fshort-wchar -verify %s
void f() {
(void)L"\U00010000"; // unicode escape produces UTF-16 sequence, so no warning
(void)L'\U00010000'; // expected-error {{character too large for enclosing character literal type}}
(void)L'ab'; // expected-warning {{extraneous characters in character constant ignored}}
(void)L'a\u1000'; // expected-warning {{extraneous characters in character constant ignored}}
}
| {
"pile_set_name": "Github"
} |
using System;
using System.Collections.Generic;
using System.Linq;
using System.ServiceModel.Syndication;
using Firehose.Web.Infrastructure;
namespace Firehose.Web.Authors
{
public class AndrewHoefling : IAmAMicrosoftMVP
{
public string FirstName => "Andrew";
public string LastName => "Hoefling";
public string StateOrRegion => "New York, United States";
public string EmailAddress => "[email protected]";
public string ShortBioOrTagLine => "Microsoft MVP (Developer Technologies) Open Source developer who loves integrating Xamarin with other platforms";
public Uri WebSite => new Uri("https://www.andrewhoefling.com/");
public string TwitterHandle => "andrew_hoefling";
public string GitHubHandle => "ahoefling";
public string GravatarHash => "beab68478a5128e634590af5e4f01941";
public IEnumerable<Uri> FeedUris
{
get { yield return new Uri("https://www.andrewhoefling.com/feed.xml?category=xamarin&uno-platform"); }
}
public GeoPosition Position => new GeoPosition(43.156578, -77.608849);
public string FeedLanguageCode => "en";
}
}
| {
"pile_set_name": "Github"
} |
{
"extends": "tslint:recommended",
"rulesDirectory": [
"codelyzer"
],
"rules": {
"align": {
"options": [
"parameters",
"statements"
]
},
"array-type": false,
"arrow-return-shorthand": true,
"curly": true,
"deprecation": {
"severity": "warning"
},
"eofline": true,
"import-blacklist": [
true,
"rxjs/Rx"
],
"import-spacing": true,
"indent": {
"options": [
"spaces"
]
},
"max-classes-per-file": false,
"max-line-length": [
true,
140
],
"member-ordering": [
true,
{
"order": [
"static-field",
"instance-field",
"static-method",
"instance-method"
]
}
],
"no-console": [
true,
"debug",
"info",
"time",
"timeEnd",
"trace"
],
"no-empty": false,
"no-inferrable-types": [
true,
"ignore-params"
],
"no-non-null-assertion": true,
"no-redundant-jsdoc": true,
"no-switch-case-fall-through": true,
"no-var-requires": false,
"object-literal-key-quotes": [
true,
"as-needed"
],
"quotemark": [
true,
"single"
],
"semicolon": {
"options": [
"always"
]
},
"space-before-function-paren": {
"options": {
"anonymous": "never",
"asyncArrow": "always",
"constructor": "never",
"method": "never",
"named": "never"
}
},
"typedef-whitespace": {
"options": [
{
"call-signature": "nospace",
"index-signature": "nospace",
"parameter": "nospace",
"property-declaration": "nospace",
"variable-declaration": "nospace"
},
{
"call-signature": "onespace",
"index-signature": "onespace",
"parameter": "onespace",
"property-declaration": "onespace",
"variable-declaration": "onespace"
}
]
},
"variable-name": {
"options": [
"ban-keywords",
"check-format",
"allow-pascal-case"
]
},
"whitespace": {
"options": [
"check-branch",
"check-decl",
"check-operator",
"check-separator",
"check-type",
"check-typecast"
]
},
"component-class-suffix": true,
"contextual-lifecycle": true,
"directive-class-suffix": true,
"no-conflicting-lifecycle": true,
"no-host-metadata-property": true,
"no-input-rename": true,
"no-inputs-metadata-property": true,
"no-output-native": true,
"no-output-on-prefix": true,
"no-output-rename": true,
"no-outputs-metadata-property": true,
"template-banana-in-box": true,
"template-no-negated-async": true,
"use-lifecycle-interface": true,
"use-pipe-transform-interface": true
}
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2009 Mike Cumings
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.kenai.jbosh;
/**
* Data type representing the getValue of the {@code sid} attribute of the
* {@code bosh} element.
*/
final class AttrSessionID extends AbstractAttr<String> {
/**
* Creates a new attribute object.
*
* @param val attribute getValue
*/
private AttrSessionID(final String val) {
super(val);
}
/**
* Creates a new attribute instance from the provided String.
*
* @param str string representation of the attribute
* @return attribute instance
*/
static AttrSessionID createFromString(final String str) {
return new AttrSessionID(str);
}
}
| {
"pile_set_name": "Github"
} |
"use strict";
const HTMLElementImpl = require("./HTMLElement-impl").implementation;
const { cloningSteps, domSymbolTree } = require("../helpers/internal-constants");
const { clone } = require("../node");
class HTMLTemplateElementImpl extends HTMLElementImpl {
constructor(args, privateData) {
super(args, privateData);
this._templateContents = this._ownerDocument.createDocumentFragment();
}
get content() {
return this._templateContents;
}
[cloningSteps](copy, node, document, cloneChildren) {
if (!cloneChildren) {
return;
}
for (const child of domSymbolTree.childrenIterator(node._templateContents)) {
const childCopy = clone(child, copy._templateContents._ownerDocument, true);
copy._templateContents.appendChild(childCopy);
}
}
}
module.exports = {
implementation: HTMLTemplateElementImpl
};
| {
"pile_set_name": "Github"
} |
// gameswf_video_impl.h -- Vitaly Alexeev <[email protected]> 2007
// This source code has been donated to the Public Domain. Do
// whatever you want with it.
#ifndef GAMESWF_VIDEO_H
#define GAMESWF_VIDEO_H
#include "gameswf_impl.h"
#include "as_netstream.h"
namespace gameswf
{
struct video_stream_definition : public character_def
{
// Unique id of a gameswf resource
enum { m_class_id = AS_VIDEO_DEF };
virtual bool is(int class_id) const
{
if (m_class_id == class_id) return true;
else return character_def::is(class_id);
}
video_stream_definition(player* player) :
character_def(player)
{
}
// virtual ~video_stream_definition();
character* create_character_instance(character* parent, int id);
void read(stream* in, int tag, movie_definition* m);
virtual void get_bound(rect* bound);
Uint16 m_width;
Uint16 m_height;
private:
// uint8_t reserved_flags;
Uint8 m_deblocking_flags;
bool m_smoothing_flags;
// 0: extern file
// 2: H.263
// 3: screen video (Flash 7+ only)
// 4: VP6
Uint8 m_codec_id;
array<void*> m_frames;
};
struct video_stream_instance : public character
{
// Unique id of a gameswf resource
enum { m_class_id = AS_VIDEO_INST };
virtual bool is(int class_id) const
{
if (m_class_id == class_id) return true;
else return character::is(class_id);
}
video_stream_instance(player* player, video_stream_definition* def, character* parent, int id);
~video_stream_instance();
void display();
virtual character_def* get_character_def() { return m_def.get_ptr(); }
//
// ActionScript overrides
//
// To drop the connection to the Video object, pass null for source.
void attach_netstream(as_netstream* ns)
{
m_ns = ns;
}
gc_ptr<video_handler> m_video_handler;
private:
gc_ptr<video_stream_definition> m_def;
// A Camera object that is capturing video data or a NetStream object.
gc_ptr<as_netstream> m_ns;
};
} // end namespace gameswf
#endif // GAMESWF_VIDEO_H
| {
"pile_set_name": "Github"
} |
With the dark cloud of high raw material costs now clearing, large U.S. newspapers companies are expected to report good third-quarter earnings despite weak growth in advertising.
"I keep hearing good news out of these guys, and they're all smiling about newsprint," said Prudential Securities newspaper industry analyst James Marsh.
Newsprint for newspapers now costs an average $550 a ton, much less than the average forecast of $600 a ton at the start of 1996, he said. Per ton prices peaked at $743 last January.
"Trends in advertising have been a little sluggish, but with lower newsprint costs, most newspapers should have a strong quarter," said newspaper industry analyst Edward Atorino at Oppenheimer & Co.
Retail advertising were tepid through late summer, after rising only about one percent during the first half of the year, compared with the first six months of 1995, analysts said.
Classified advertising rose 10 percent in the first half but likely cooled in the third quarter, Atorino said.
Times Mirror Co will enjoy an especially strong lift from newsprint savings because of its inventory accounting methods and a sharp decline in its consumption of newsprint this year after shutting two big city papers, Marsh said.
"The fall in newsprint prices coupled with ongoing cost reduction efforts will result in strong second-half earnings for newspaper publishing companies this year and provide further benefit into 1997," Atorino said in a written report.
| {
"pile_set_name": "Github"
} |
///////////////////////////////////////////////////////////////////////////////////
// Copyright (C) 2019 F4EXB //
// written by Edouard Griffiths //
// //
// This program is free software; you can redistribute it and/or modify //
// it under the terms of the GNU General Public License as published by //
// the Free Software Foundation as version 3 of the License, or //
// (at your option) any later version. //
// //
// This program is distributed in the hope that it will be useful, //
// but WITHOUT ANY WARRANTY; without even the implied warranty of //
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the //
// GNU General Public License V3 for more details. //
// //
// You should have received a copy of the GNU General Public License //
// along with this program. If not, see <http://www.gnu.org/licenses/>. //
///////////////////////////////////////////////////////////////////////////////////
#include <algorithm>
#include <chrono>
#include <thread>
#include "audio/audiofifo.h"
#include "ambeworker.h"
MESSAGE_CLASS_DEFINITION(AMBEWorker::MsgMbeDecode, Message)
MESSAGE_CLASS_DEFINITION(AMBEWorker::MsgTest, Message)
AMBEWorker::AMBEWorker() :
m_running(false),
m_currentGainIn(0),
m_currentGainOut(0),
m_upsamplerLastValue(0.0f),
m_phase(0),
m_upsampling(1),
m_volume(1.0f)
{
m_audioBuffer.resize(48000);
m_audioBufferFill = 0;
m_audioFifo = 0;
std::fill(m_dvAudioSamples, m_dvAudioSamples+SerialDV::MBE_AUDIO_BLOCK_SIZE, 0);
setVolumeFactors();
}
AMBEWorker::~AMBEWorker()
{}
bool AMBEWorker::open(const std::string& deviceRef)
{
return m_dvController.open(deviceRef);
}
void AMBEWorker::close()
{
m_dvController.close();
}
void AMBEWorker::process()
{
m_running = true;
qDebug("AMBEWorker::process: started");
while (m_running)
{
std::this_thread::sleep_for(std::chrono::seconds(1));
}
qDebug("AMBEWorker::process: stopped");
emit finished();
}
void AMBEWorker::stop()
{
m_running = false;
}
void AMBEWorker::handleInputMessages()
{
Message* message;
m_audioBufferFill = 0;
AudioFifo *audioFifo = 0;
while ((message = m_inputMessageQueue.pop()) != 0)
{
if (MsgMbeDecode::match(*message))
{
MsgMbeDecode *decodeMsg = (MsgMbeDecode *) message;
int dBVolume = (decodeMsg->getVolumeIndex() - 30) / 4;
float volume = pow(10.0, dBVolume / 10.0f);
int upsampling = decodeMsg->getUpsampling();
upsampling = upsampling > 6 ? 6 : upsampling < 1 ? 1 : upsampling;
if ((volume != m_volume) || (upsampling != m_upsampling))
{
m_volume = volume;
m_upsampling = upsampling;
setVolumeFactors();
}
m_upsampleFilter.useHP(decodeMsg->getUseHP());
if (m_dvController.decode(m_dvAudioSamples, decodeMsg->getMbeFrame(), decodeMsg->getMbeRate()))
{
if (upsampling > 1) {
upsample(upsampling, m_dvAudioSamples, SerialDV::MBE_AUDIO_BLOCK_SIZE, decodeMsg->getChannels());
} else {
noUpsample(m_dvAudioSamples, SerialDV::MBE_AUDIO_BLOCK_SIZE, decodeMsg->getChannels());
}
audioFifo = decodeMsg->getAudioFifo();
if (audioFifo && (m_audioBufferFill >= m_audioBuffer.size() - 960))
{
uint res = audioFifo->write((const quint8*)&m_audioBuffer[0], m_audioBufferFill);
if (res != m_audioBufferFill) {
qDebug("AMBEWorker::handleInputMessages: %u/%u audio samples written", res, m_audioBufferFill);
}
m_audioBufferFill = 0;
}
}
else
{
qDebug("AMBEWorker::handleInputMessages: MsgMbeDecode: decode failed");
}
}
delete message;
if (m_inputMessageQueue.size() > 100)
{
qDebug("AMBEWorker::handleInputMessages: MsgMbeDecode: too many messages in queue. Flushing...");
m_inputMessageQueue.clear();
break;
}
}
if (audioFifo)
{
uint res = audioFifo->write((const quint8*)&m_audioBuffer[0], m_audioBufferFill);
if (res != m_audioBufferFill) {
qDebug("AMBEWorker::handleInputMessages: %u/%u audio samples written", res, m_audioBufferFill);
}
m_audioBufferFill = 0;
}
m_timestamp = QDateTime::currentDateTime();
}
void AMBEWorker::pushMbeFrame(const unsigned char *mbeFrame,
int mbeRateIndex,
int mbeVolumeIndex,
unsigned char channels,
bool useHP,
int upsampling,
AudioFifo *audioFifo)
{
m_audioFifo = audioFifo;
m_inputMessageQueue.push(MsgMbeDecode::create(mbeFrame, mbeRateIndex, mbeVolumeIndex, channels, useHP, upsampling, audioFifo));
}
bool AMBEWorker::isAvailable()
{
if (m_audioFifo == 0) {
return true;
}
return m_timestamp.time().msecsTo(QDateTime::currentDateTime().time()) > 1000; // 1 second inactivity timeout
}
bool AMBEWorker::hasFifo(AudioFifo *audioFifo)
{
return m_audioFifo == audioFifo;
}
void AMBEWorker::upsample(int upsampling, short *in, int nbSamplesIn, unsigned char channels)
{
for (int i = 0; i < nbSamplesIn; i++)
{
//float cur = m_upsampleFilter.usesHP() ? m_upsampleFilter.runHP((float) m_compressor.compress(in[i])) : (float) m_compressor.compress(in[i]);
float cur = m_upsampleFilter.usesHP() ? m_upsampleFilter.runHP((float) in[i]) : (float) in[i];
float prev = m_upsamplerLastValue;
qint16 upsample;
for (int j = 1; j <= upsampling; j++)
{
upsample = (qint16) m_upsampleFilter.runLP(cur*m_upsamplingFactors[j] + prev*m_upsamplingFactors[upsampling-j]);
m_audioBuffer[m_audioBufferFill].l = channels & 1 ? m_compressor.compress(upsample) : 0;
m_audioBuffer[m_audioBufferFill].r = (channels>>1) & 1 ? m_compressor.compress(upsample) : 0;
if (m_audioBufferFill < m_audioBuffer.size() - 1) {
++m_audioBufferFill;
}
}
m_upsamplerLastValue = cur;
}
if (m_audioBufferFill >= m_audioBuffer.size() - 1) {
qDebug("AMBEWorker::upsample(%d): audio buffer is full check its size", upsampling);
}
}
void AMBEWorker::noUpsample(short *in, int nbSamplesIn, unsigned char channels)
{
for (int i = 0; i < nbSamplesIn; i++)
{
float cur = m_upsampleFilter.usesHP() ? m_upsampleFilter.runHP((float) in[i]) : (float) in[i];
m_audioBuffer[m_audioBufferFill].l = channels & 1 ? cur*m_upsamplingFactors[0] : 0;
m_audioBuffer[m_audioBufferFill].r = (channels>>1) & 1 ? cur*m_upsamplingFactors[0] : 0;
if (m_audioBufferFill < m_audioBuffer.size() - 1) {
++m_audioBufferFill;
}
}
if (m_audioBufferFill >= m_audioBuffer.size() - 1) {
qDebug("AMBEWorker::noUpsample: audio buffer is full check its size");
}
}
void AMBEWorker::setVolumeFactors()
{
m_upsamplingFactors[0] = m_volume;
for (int i = 1; i <= m_upsampling; i++) {
m_upsamplingFactors[i] = (i*m_volume) / (float) m_upsampling;
}
}
| {
"pile_set_name": "Github"
} |
package Slim::Plugin::YALP::Plugin;
# WMA metadata parser for YALP radio
# /tilive1.alice.cdn.interbusiness.it/
use strict;
use Slim::Formats::RemoteMetadata;
use Slim::Utils::Log;
my $log = logger('formats.metadata');
use constant IMAGE_PREFIX => 'http://images.rossoalice.alice.it/musicbox/';
sub initPlugin {
Slim::Formats::RemoteMetadata->registerParser(
match => qr/tilive1.alice.cdn.interbusiness.it/,
func => \&parser,
);
}
sub parser {
my ( $client, $url, $metadata ) = @_;
# Sequence number|Asset ID|Song Title|Artist Name|Comment|Sellable|Small Image|Large Image
# There are 4 songs in the metadata, separated by semicolons,
# the current song has sequence number 0
my ($title, $artist, $comment, $simage, $limage)
= $metadata =~ m{;0\|\d+\|([^|]+)?\|([^|]+)?\|([^|]+)?\|\d?\|([^|]+)?\|([^|]+)?;};
my $cover
= $limage =~ /\.(?:jpg|png|gif)/i ? IMAGE_PREFIX . $limage
: $simage =~ /\.(?:jpg|png|gif)/i ? IMAGE_PREFIX . $simage
: undef;
my $meta = {
title => $title,
artist => $artist,
cover => $cover,
};
# This metadata is read by HTTP's getMetadataFor
$client->playingSong->pluginData( wmaMeta => $meta );
main::DEBUGLOG && $log->is_debug && $log->debug( "YALP metadata: " . Data::Dump::dump($meta) );
return 1;
}
1; | {
"pile_set_name": "Github"
} |
### Are you submitting a **bug report** or a **feature request**?
### What is the current behavior?
<!-- If this is a bug, please include steps to reproduce. -->
### What is the expected behavior?
### What's your environment?
<!-- Include Squidwarc version and OS affected, etc. -->
### Other information
<!-- Include here any detailed explanation, stacktraces, related issues, links for Stack Overflow, Twitter, etc. --> | {
"pile_set_name": "Github"
} |
/**
*
* Copyright (c) 2014, the Railo Company Ltd. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library. If not, see <http://www.gnu.org/licenses/>.
*
**/
package lucee.runtime.type;
import java.util.Iterator;
import lucee.runtime.PageContext;
import lucee.runtime.dump.DumpData;
import lucee.runtime.dump.DumpProperties;
import lucee.runtime.exp.PageException;
import lucee.runtime.op.Duplicator;
import lucee.runtime.type.dt.DateTime;
import lucee.runtime.type.util.StructSupport;
public final class CollectionStruct extends StructSupport implements ObjectWrap, Struct {
private final Collection coll;
public CollectionStruct(Collection coll) {
this.coll = coll;
}
@Override
public void clear() {
coll.clear();
}
@Override
public final boolean containsKey(Key key) {
return coll.containsKey(key);
}
@Override
public final boolean containsKey(PageContext pc, Key key) {
return coll.containsKey(key);
}
@Override
public Collection duplicate(boolean deepCopy) {
return (Collection) Duplicator.duplicate(coll, deepCopy);
}
@Override
public final Object get(Key key) throws PageException {
return coll.get(key);
}
@Override
public final Object get(PageContext pc, Key key) throws PageException {
return coll.get(key);
}
@Override
public final Object get(Key key, Object defaultValue) {
return coll.get(key, defaultValue);
}
@Override
public final Object get(PageContext pc, Key key, Object defaultValue) {
return coll.get(key, defaultValue);
}
@Override
public Key[] keys() {
return coll.keys();
}
@Override
public Object remove(Key key) throws PageException {
return coll.remove(key);
}
@Override
public Object removeEL(Key key) {
return coll.removeEL(key);
}
@Override
public Object set(Key key, Object value) throws PageException {
return coll.set(key, value);
}
@Override
public Object setEL(Key key, Object value) {
return coll.setEL(key, value);
}
@Override
public int size() {
return coll.size();
}
@Override
public Iterator<Collection.Key> keyIterator() {
return coll.keyIterator();
}
@Override
public Iterator<String> keysAsStringIterator() {
return coll.keysAsStringIterator();
}
@Override
public Iterator<Entry<Key, Object>> entryIterator() {
return coll.entryIterator();
}
@Override
public Iterator<Object> valueIterator() {
return coll.valueIterator();
}
@Override
public DumpData toDumpData(PageContext pageContext, int maxlevel, DumpProperties properties) {
return coll.toDumpData(pageContext, maxlevel, properties);
}
@Override
public boolean castToBooleanValue() throws PageException {
return coll.castToBooleanValue();
}
@Override
public double castToDoubleValue() throws PageException {
return coll.castToDoubleValue();
}
@Override
public DateTime castToDateTime() throws PageException {
return coll.castToDateTime();
}
@Override
public String castToString() throws PageException {
return coll.castToString();
}
@Override
public int compareTo(boolean b) throws PageException {
return coll.compareTo(b);
}
@Override
public int compareTo(DateTime dt) throws PageException {
return coll.compareTo(dt);
}
@Override
public int compareTo(double d) throws PageException {
return coll.compareTo(d);
}
@Override
public int compareTo(String str) throws PageException {
return coll.compareTo(str);
}
@Override
public Object getEmbededObject(Object defaultValue) {
return coll;
}
@Override
public Object getEmbededObject() throws PageException {
return coll;
}
/**
* @return
*/
public Collection getCollection() {
return coll;
}
@Override
public int getType() {
if (coll instanceof StructSupport) return ((StructSupport) coll).getType();
return Struct.TYPE_REGULAR;
}
} | {
"pile_set_name": "Github"
} |
/***************************************************************************//**
* @file ad9172/src/parameters.h
* @brief Parameters Definitions.
* @author Cristian Pop ([email protected])
********************************************************************************
* Copyright 2019(c) Analog Devices, Inc.
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided with the
* distribution.
* - Neither the name of Analog Devices, Inc. nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
* - The use of this software may or may not infringe the patent rights
* of one or more patent holders. This license does not release you
* from the requirement that you obtain separate licenses from these
* patent holders to use this software.
* - Use of the software either in source or binary form, must be run
* on or directly connected to an Analog Devices Inc. component.
*
* THIS SOFTWARE IS PROVIDED BY ANALOG DEVICES "AS IS" AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, NON-INFRINGEMENT,
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL ANALOG DEVICES BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, INTELLECTUAL PROPERTY RIGHTS, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*******************************************************************************/
#ifndef __PARAMETERS_H__
#define __PARAMETERS_H__
/******************************************************************************/
/***************************** Include Files **********************************/
/******************************************************************************/
#include <xparameters.h>
#define SPI_DEVICE_ID XPAR_PS7_SPI_0_DEVICE_ID
#ifdef PLATFORM_ZYNQMP
#define GPIO_DEVICE_ID XPAR_PSU_GPIO_0_DEVICE_ID
#else
#define GPIO_DEVICE_ID XPAR_PS7_GPIO_0_DEVICE_ID
#endif
#define SPI_HMC7044_CS 0x00
#define SPI_AD9172_CS 0x01
#define UART_DEVICE_ID XPAR_XUARTPS_0_DEVICE_ID
#define UART_IRQ_ID XPAR_XUARTPS_1_INTR
#define INTC_DEVICE_ID XPAR_SCUGIC_SINGLE_DEVICE_ID
#define TX_JESD_BASEADDR XPAR_DAC_JESD204_LINK_TX_AXI_BASEADDR
#define TX_XCVR_BASEADDR XPAR_DAC_JESD204_XCVR_BASEADDR
#define TX_CORE_BASEADDR XPAR_DAC_JESD204_TRANSPORT_TPL_CORE_BASEADDR
#define TX_DMA_BASEADDR XPAR_DAC_DMA_BASEADDR
#define DDR_MEM_BASEADDR (XPAR_PS7_DDR_0_S_AXI_BASEADDR + 0xA000000)
#endif /* __PARAMETERS_H__ */
| {
"pile_set_name": "Github"
} |
; RUN: llc < %s -march=x86 -mcpu=corei7 | FileCheck %s
define <2 x i64> @shl1(<4 x i32> %r, <4 x i32> %a) nounwind readnone ssp {
entry:
; CHECK-NOT: shll
; CHECK: pslld
; CHECK: paddd
; CHECK: cvttps2dq
; CHECK: pmulld
%shl = shl <4 x i32> %r, %a ; <<4 x i32>> [#uses=1]
%tmp2 = bitcast <4 x i32> %shl to <2 x i64> ; <<2 x i64>> [#uses=1]
ret <2 x i64> %tmp2
}
define <2 x i64> @shl2(<16 x i8> %r, <16 x i8> %a) nounwind readnone ssp {
entry:
; CHECK-NOT: shlb
; CHECK: pblendvb
; CHECK: pblendvb
; CHECK: pblendvb
%shl = shl <16 x i8> %r, %a ; <<16 x i8>> [#uses=1]
%tmp2 = bitcast <16 x i8> %shl to <2 x i64> ; <<2 x i64>> [#uses=1]
ret <2 x i64> %tmp2
}
| {
"pile_set_name": "Github"
} |
/**
* @file /magma/core/encodings/qp.c
*
* @brief Functions for encoding/decoding quoted printable data, as described by RFC 2045, section 6.7.
* @note This function operates on standard 8-bit characters, transforming non-printable characters into printable ones.
* It is used as a MIME content encoding, and wraps lines at 76 characters.
*/
#include "magma.h"
/**
* @brief Perform QP (quoted-printable) encoding of a string.
* @param s a pointer to a managed string containing data to be encoded.
* @return a pointer to a managed string containing the QP encoded data, or NULL on failure.
*/
stringer_t * qp_encode(stringer_t *s) {
chr_t hex[4];
uchr_t *p;//, *o;
stringer_t *output, *r;
size_t len, expected = 0, line = 0;
if (st_empty_out(s, &p, &len)) {
log_pedantic("An empty string was passed in for encoding.");
return NULL;
}
// Increment through the stringer and count the characters that need to be encoded.
for (size_t i = 0; i < len; i++) {
if (*p < '!' || *p > '~' || *p == '=' || *p == ' ' || *p == '\r' || *p == '\n' || *p == '\t') {
expected += 3;
}
else {
expected++;
}
p++;
}
// Include room for the soft line break sequence every seventy six characters.
expected += ((expected + QP_LINE_WRAP_LENGTH) / QP_LINE_WRAP_LENGTH) * 3;
// Allocate one byte for printable characters and three bytes for non-printable characters.
if (!(output = st_alloc_opts(MANAGED_T | JOINTED | HEAP, expected))) {
log_pedantic("Could not allocate a buffer large enough to hold encoded result. {requested = %zu}", expected);
return NULL;
}
// Get setup.
p = st_data_get(s);
//o = st_data_get(output);
// Increment through the stringer and copy the data into the new stringer.
for (size_t i = 0; i < len; i++) {
// Escape the characters matching this boolean while simply copying any other characters we encounter.
if (*p < '!' || *p > '~' || *p == '=' || *p == ' ' || *p == '\r' || *p == '\n' || *p == '\t') {
// If were within three characters of the limit append a soft line break to the buffer.
if (line > (QP_LINE_WRAP_LENGTH - 3) && snprintf(hex, 4, "=\r\n") == 3 && (r = st_append(output, PLACER(&hex[0], 3)))) {
output = r;
line = 0;
}
if (snprintf(hex, 4, "=%02X", *p) == 3 && (r = st_append(output, PLACER(&hex[0], 3)))) {
output = r;
line += 3;
}
}
else {
// If were near the line length limit this will append a soft line break before appending the next character.
if (line > (QP_LINE_WRAP_LENGTH - 1) && snprintf(hex, 4, "=\r\n") == 3 && (r = st_append(output, PLACER(&hex[0], 3)))) {
output = r;
line = 0;
}
if ((r = st_append(output, PLACER(p, 1)))) {
output = r;
line++;
}
}
// We always advance the input pointer.
p++;
}
return output;
}
/**
* @brief Perform QP (quoted-printable) decoding of a string.
* @param s the managed string containing data to be decoded.
* @return a pointer to a managed string containing the 8-bit decoded output, or NULL on failure.
*/
stringer_t * qp_decode(stringer_t *s) {
uchr_t *p, *o;
stringer_t *output;
size_t len, written = 0;
if (st_empty_out(s, &p, &len)) {
log_pedantic("An empty string was passed in for decoding.");
return NULL;
}
// Allocate one byte for printable characters and three bytes for non-printable characters.
if (!(output = st_alloc(len))) {
log_pedantic("Could not allocate a buffer large enough to hold decoded result. {requested = %zu}", len);
return NULL;
}
// Get setup.
o = st_data_get(output);
#ifdef MAGMA_PEDANTIC
// In pedantic mode we perform an extra check to make sure the loop doesn't loop past zero.
while (len && len <= st_length_get(s)) {
#else
while (len) {
#endif
// Advance past the trigger.
if (*p == '=') {
len--;
p++;
// Valid hex pair.
if (len >= 2 && hex_valid_chr(*p) && hex_valid_chr(*(p + 1))) {
*o++ = hex_decode_chr(*p, *(p + 1));
written++;
len -= 2;
p += 2;
}
// Soft line breaks are signaled by a line break following an equal sign.
else if (len >= 2 && *p == '\r' && *(p + 1) == '\n') {
len -= 2;
p += 2;
}
else if (len >= 1 && *p == '\n') {
len--;
p++;
}
// Equal signs which aren't followed by a valid hex pair or a line break are illegal, but if the character is printable
// we can let through the original sequence.
else if (len >= 1 && ((*p >= '!' && *p <= '<') || (*p >= '>' && *p <= '~'))) {
*o++ = '=';
*o++ = *p++;
written += 2;
len--;
}
// Characters outside the printable range are simply skipped.
else if (len >= 1) {
len--;
p++;
}
}
// Let through any characters found inside this range.
else if ((*p >= '!' && *p <= '<') || (*p >= '>' && *p <= '~')) {
*o++ = *p++;
written++;
len--;
}
// Characters outside the range above should have been encoded. Any that weren't should be skipped.
else {
len--;
p++;
}
}
// We allocated a default string buffer, which means the length is tracked so we need to set the data length.
st_length_set(output, written);
return output;
}
| {
"pile_set_name": "Github"
} |
'use strict';
/*
* An AngularJS Localization Service
*
* Written by Jim Lavin
* http://codingsmackdown.tv
*
*/
angular.module('localization', [])
// localization service responsible for retrieving resource files from the server and
// managing the translation dictionary
.provider('localize', function localizeProvider() {
this.languages = ['en-US'];
this.defaultLanguage = 'en-US';
this.ext = 'js';
this.baseUrl = 'i18n/';
var provider = this;
this.$get = ['$http', '$rootScope', '$window', '$filter', function ($http, $rootScope, $window, $filter) {
var localize = {
// use the $window service to get the language of the user's browser
language:'',
// array to hold the localized resource string entries
dictionary:[],
// location of the resource file
url: undefined,
// flag to indicate if the service hs loaded the resource file
resourceFileLoaded:false,
// success handler for all server communication
successCallback:function (data) {
// store the returned array in the dictionary
localize.dictionary = data;
// set the flag that the resource are loaded
localize.resourceFileLoaded = true;
// broadcast that the file has been loaded
$rootScope.$broadcast('localizeResourcesUpdated');
},
// allows setting of language on the fly
setLanguage: function(value) {
localize.language = this.fallbackLanguage(value);
localize.initLocalizedResources();
},
fallbackLanguage: function(value) {
value = String(value);
if (provider.languages.indexOf(value) > -1) {
return value;
}
value = value.split('-')[0];
if (provider.languages.indexOf(value) > -1) {
return value;
}
return provider.defaultLanguage;
},
// allows setting of resource url on the fly
setUrl: function(value) {
localize.url = value;
localize.initLocalizedResources();
},
// builds the url for locating an image
buildImgUrl: function(imageUrl) {
return $http({ method: "GET", url: imageUrl, cache: false });
},
// builds the url for locating the resource file
buildUrl: function(baseUrl) {
if(!localize.language){
var lang, androidLang;
// works for earlier version of Android (2.3.x)
if ($window.navigator && $window.navigator.userAgent && (androidLang = $window.navigator.userAgent.match(/android.*\W(\w\w)-(\w\w)\W/i))) {
lang = androidLang[1];
} else {
// works for iOS, Android 4.x and other devices
lang = $window.navigator.userLanguage || $window.navigator.language;
}
// set language
localize.language = this.fallbackLanguage(lang);
}
return baseUrl + 'resources-locale_' + localize.language + '.' + provider.ext;
},
// loads the language resource file from the server
initLocalizedResources:function () {
// build the url to retrieve the localized resource file
var url = localize.url || localize.buildUrl(provider.baseUrl);
// request the resource file
$http({ method:"GET", url:url, cache:false }).success(localize.successCallback).error(function () {
// the request failed set the url to the default resource file
var url = provider.baseUrl + 'resources-locale_default' + '.' + provider.ext;
// request the default resource file
$http({ method:"GET", url:url, cache:false }).success(localize.successCallback);
});
},
// checks the dictionary for a localized resource string
getLocalizedString: function(KEY) {
// default the result to an empty string
var result = '';
// make sure the dictionary has valid data
if (localize.resourceFileLoaded) {
// use the filter service to only return those entries which match the KEY
// and only take the first result
var entry = $filter('filter')(localize.dictionary, function(element) {
return element.key === KEY;
}
)[0];
// set the result
result = entry.value ? entry.value : KEY;
}
// return the value to the call
return result;
}
};
// force the load of the resource file
localize.initLocalizedResources();
// return the local instance when called
return localize;
} ];
})
// simple translation filter
// usage {{ TOKEN | i18n }}
.filter('i18n', ['localize', function (localize) {
return function (input) {
return localize.getLocalizedString(input);
};
}])
// translation directive that can handle dynamic strings
// updates the text value of the attached element
// usage <span data-i18n="TOKEN" ></span>
// or
// <span data-i18n="TOKEN|VALUE1|VALUE2" ></span>
.directive('i18n', ['localize', function(localize){
var i18nDirective = {
restrict:"EAC",
updateText:function(elm, token, html){
var values = token.split('|');
if (values.length >= 1) {
// construct the tag to insert into the element
var tag = localize.getLocalizedString(values[0]);
// update the element only if data was returned
if ((tag !== null) && (tag !== undefined) && (tag !== '')) {
if (values.length > 1) {
for (var index = 1; index < values.length; index++) {
var target = '{' + (index - 1) + '}';
tag = tag.replace(target, values[index]);
}
}
// insert the text into the element
elm[html ? 'html':'text'](tag);
};
}
},
link:function (scope, elm, attrs) {
scope.$on('localizeResourcesUpdated', function() {
i18nDirective.updateText(elm, attrs.i18n, angular.isDefined(attrs.i18nHtml));
});
attrs.$observe('i18n', function (value) {
i18nDirective.updateText(elm, attrs.i18n, angular.isDefined(attrs.i18nHtml));
});
}
};
return i18nDirective;
}])
// translation directive that can handle dynamic strings
// updates the attribute value of the attached element
// usage <span data-i18n-attr="TOKEN|ATTRIBUTE" ></span>
// or
// <span data-i18n-attr="TOKEN|ATTRIBUTE|VALUE1|VALUE2" ></span>
.directive('i18nAttr', ['localize', function (localize) {
var i18NAttrDirective = {
restrict: "EAC",
updateText:function(elm, token){
var values = token.split('|');
// construct the tag to insert into the element
var tag = localize.getLocalizedString(values[0]);
// update the element only if data was returned
if ((tag !== null) && (tag !== undefined) && (tag !== '')) {
if (values.length > 2) {
for (var index = 2; index < values.length; index++) {
var target = '{' + (index - 2) + '}';
tag = tag.replace(target, values[index]);
}
}
// insert the text into the element
elm.attr(values[1], tag);
}
},
link: function (scope, elm, attrs) {
scope.$on('localizeResourcesUpdated', function() {
i18NAttrDirective.updateText(elm, attrs.i18nAttr);
});
attrs.$observe('i18nAttr', function (value) {
i18NAttrDirective.updateText(elm, value);
});
}
};
return i18NAttrDirective;
}])
// translation directive that handles the localization of images.
// usage <img data-i18n-img-src="IMAGE" />
.directive('i18nImgSrc', [
'localize', function(localize) {
var i18NImageDirective = {
restrict: 'A',
link: function(scope, element, attrs) {
var i18Nsrc = attrs.i18nImgSrc;
var imagePath = provider.baseUrl + '/images/' + localize.language + '/';
var imageUrl = imagePath + i18Nsrc;
localize.buildImgUrl(imageUrl).success(function() {
element[0].src = imageUrl;
}).error(function() { element[0].src = provider.baseUrl + '/images/default/' + i18Nsrc; });
}
};
return i18NImageDirective;
}
]);
| {
"pile_set_name": "Github"
} |
# crypto-browserify
A (partial) port of `crypto` to the browser.
[](https://travis-ci.org/dominictarr/crypto-browserify)
[](http://ci.testling.com/dominictarr/crypto-browserify)
Basically, I found some crypto implemented in JS lieing on the internet somewhere
and wrapped it in the part of the `crypto` api that I am currently using.
In a way that will be compatible with [browserify](https://github.com/substack/node-browserify/).
I will extend this if I need more features, or if anyone else wants to extend this,
I will add you as a maintainer.
Provided that you agree that it should replicate the [node.js/crypto](http://nodejs.org/api/crypto.html) api exactly, of course.
| {
"pile_set_name": "Github"
} |
// ***************************************************************************
// *
// * Copyright (C) 2010 International Business Machines
// * Corporation and others. All Rights Reserved.
// * Tool: com.ibm.icu.dev.tool.cldr.LDML2ICUConverter.java
// * Source File:<path>/common/main/sr_ME.xml
// *
// ***************************************************************************
sr_Latn_ME{
/**
* empty target resource
*/
___{""}
}
| {
"pile_set_name": "Github"
} |
#region Copyright (C) 2005-2011 Team MediaPortal
// Copyright (C) 2005-2011 Team MediaPortal
// http://www.team-mediaportal.com
//
// MediaPortal is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 2 of the License, or
// (at your option) any later version.
//
// MediaPortal is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with MediaPortal. If not, see <http://www.gnu.org/licenses/>.
#endregion
using System;
using System.Runtime.InteropServices;
using DirectShowLib;
using TvLibrary.Channels;
namespace TvLibrary.Implementations.DVB
{
internal class ViXSATSC : IDisposable
{
#region constants
private readonly Guid guidViXSTunerExtention = new Guid(0x02779308, 0x77d8, 0x4914, 0x9f, 0x15, 0x7f, 0xa6, 0xe1,
0x55, 0x84, 0xc7);
#endregion
#region variables
private readonly bool _isViXSATSC;
private readonly IntPtr _tempValue = IntPtr.Zero; //Marshal.AllocCoTaskMem(1024);
private readonly IKsPropertySet _propertySet;
#endregion
/// <summary>
/// Gets a value indicating whether this instance is generic qam.
/// </summary>
/// <value>
/// <c>true</c> if this instance is generic qam; otherwise, <c>false</c>.
/// </value>
public bool IsCamPresent()
{
return false;
}
public bool IsViXSATSC
{
get { return _isViXSATSC; }
}
/// <summary>
/// Initializes a new instance of the <see cref="ViXSATSC"/> class.
/// </summary>
/// <param name="tunerFilter">The tuner filter.</param>
public ViXSATSC(IBaseFilter tunerFilter)
{
IPin pin = DsFindPin.ByName(tunerFilter, "MPEG2 Transport");
if (pin != null)
{
_propertySet = tunerFilter as IKsPropertySet;
if (_propertySet != null)
{
KSPropertySupport supported;
_propertySet.QuerySupported(guidViXSTunerExtention, (int)BdaDigitalModulator.MODULATION_TYPE, out supported);
if ((supported & KSPropertySupport.Set) != 0)
{
Log.Log.Debug("ViXS ATSC: DVB-S card found!");
_tempValue = Marshal.AllocCoTaskMem(1024);
_isViXSATSC = true;
}
else
{
Log.Log.Debug("ViXS ATSC: card NOT found!");
_isViXSATSC = false;
}
}
}
else
Log.Log.Info("ViXS ATSC: could not find MPEG2 Transport pin!");
}
/// <summary>
/// sets the QAM modulation for ViXS ATSC cards
/// </summary>
public void SetViXSQam(ATSCChannel channel)
{
KSPropertySupport supported;
_propertySet.QuerySupported(guidViXSTunerExtention, (int)BdaDigitalModulator.MODULATION_TYPE, out supported);
if ((supported & KSPropertySupport.Set) == KSPropertySupport.Set)
{
Log.Log.Debug("ViXS ATSC: Set ModulationType value: {0}", (Int32)channel.ModulationType);
Marshal.WriteInt32(_tempValue, (Int32)channel.ModulationType);
int hr = _propertySet.Set(guidViXSTunerExtention, (int)BdaDigitalModulator.MODULATION_TYPE, _tempValue, 4,
_tempValue, 4);
if (hr != 0)
{
Log.Log.Info("ViXS ATSC: Set returned: 0x{0:X} - {1}", hr, DsError.GetErrorText(hr));
}
}
}
/// <summary>
/// gets the QAM modulation for ViXS ATSC cards
/// </summary>
public void GetViXSQam(ATSCChannel channel)
{
KSPropertySupport supported;
_propertySet.QuerySupported(guidViXSTunerExtention, (int)BdaDigitalModulator.MODULATION_TYPE, out supported);
if ((supported & KSPropertySupport.Get) == KSPropertySupport.Get)
{
int length;
Marshal.WriteInt32(_tempValue, 0);
int hr = _propertySet.Get(guidViXSTunerExtention, (int)BdaDigitalModulator.MODULATION_TYPE, _tempValue, 4,
_tempValue, 4, out length);
if (hr != 0)
{
Log.Log.Info("ViXS ATSC: Get returned:{0:X}", hr);
}
Log.Log.Info("ViXS ATSC: Get ModulationType returned value: {0}", Marshal.ReadInt32(_tempValue));
}
}
/// <summary>
/// Disposes COM task memory resources
/// </summary>
public void Dispose()
{
Marshal.FreeCoTaskMem(_tempValue);
}
}
} | {
"pile_set_name": "Github"
} |
// Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build openbsd
// +build 386 amd64 arm
package unix
import (
"errors"
"fmt"
"strconv"
"syscall"
"unsafe"
)
// Pledge implements the pledge syscall.
//
// The pledge syscall does not accept execpromises on OpenBSD releases
// before 6.3.
//
// execpromises must be empty when Pledge is called on OpenBSD
// releases predating 6.3, otherwise an error will be returned.
//
// For more information see pledge(2).
func Pledge(promises, execpromises string) error {
maj, min, err := majmin()
if err != nil {
return err
}
err = pledgeAvailable(maj, min, execpromises)
if err != nil {
return err
}
pptr, err := syscall.BytePtrFromString(promises)
if err != nil {
return err
}
// This variable will hold either a nil unsafe.Pointer or
// an unsafe.Pointer to a string (execpromises).
var expr unsafe.Pointer
// If we're running on OpenBSD > 6.2, pass execpromises to the syscall.
if maj > 6 || (maj == 6 && min > 2) {
exptr, err := syscall.BytePtrFromString(execpromises)
if err != nil {
return err
}
expr = unsafe.Pointer(exptr)
}
_, _, e := syscall.Syscall(SYS_PLEDGE, uintptr(unsafe.Pointer(pptr)), uintptr(expr), 0)
if e != 0 {
return e
}
return nil
}
// PledgePromises implements the pledge syscall.
//
// This changes the promises and leaves the execpromises untouched.
//
// For more information see pledge(2).
func PledgePromises(promises string) error {
maj, min, err := majmin()
if err != nil {
return err
}
err = pledgeAvailable(maj, min, "")
if err != nil {
return err
}
// This variable holds the execpromises and is always nil.
var expr unsafe.Pointer
pptr, err := syscall.BytePtrFromString(promises)
if err != nil {
return err
}
_, _, e := syscall.Syscall(SYS_PLEDGE, uintptr(unsafe.Pointer(pptr)), uintptr(expr), 0)
if e != 0 {
return e
}
return nil
}
// PledgeExecpromises implements the pledge syscall.
//
// This changes the execpromises and leaves the promises untouched.
//
// For more information see pledge(2).
func PledgeExecpromises(execpromises string) error {
maj, min, err := majmin()
if err != nil {
return err
}
err = pledgeAvailable(maj, min, execpromises)
if err != nil {
return err
}
// This variable holds the promises and is always nil.
var pptr unsafe.Pointer
exptr, err := syscall.BytePtrFromString(execpromises)
if err != nil {
return err
}
_, _, e := syscall.Syscall(SYS_PLEDGE, uintptr(pptr), uintptr(unsafe.Pointer(exptr)), 0)
if e != 0 {
return e
}
return nil
}
// majmin returns major and minor version number for an OpenBSD system.
func majmin() (major int, minor int, err error) {
var v Utsname
err = Uname(&v)
if err != nil {
return
}
major, err = strconv.Atoi(string(v.Release[0]))
if err != nil {
err = errors.New("cannot parse major version number returned by uname")
return
}
minor, err = strconv.Atoi(string(v.Release[2]))
if err != nil {
err = errors.New("cannot parse minor version number returned by uname")
return
}
return
}
// pledgeAvailable checks for availability of the pledge(2) syscall
// based on the running OpenBSD version.
func pledgeAvailable(maj, min int, execpromises string) error {
// If OpenBSD <= 5.9, pledge is not available.
if (maj == 5 && min != 9) || maj < 5 {
return fmt.Errorf("pledge syscall is not available on OpenBSD %d.%d", maj, min)
}
// If OpenBSD <= 6.2 and execpromises is not empty,
// return an error - execpromises is not available before 6.3
if (maj < 6 || (maj == 6 && min <= 2)) && execpromises != "" {
return fmt.Errorf("cannot use execpromises on OpenBSD %d.%d", maj, min)
}
return nil
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" ?><!DOCTYPE TS><TS language="ja_JP" version="2.1">
<context>
<name>KWalletPlugin</name>
<message>
<location filename="../kwalletpasswordbackend.cpp" line="53"/>
<source>KWallet</source>
<translation>KWallet</translation>
</message>
</context>
</TS> | {
"pile_set_name": "Github"
} |
/******************************************************************************************************
* (C) 2014 [email protected]. This file is part of Engauge Digitizer, which is released *
* under GNU General Public License version 2 (GPLv2) or (at your option) any later version. See file *
* LICENSE or go to gnu.org/licenses for details. Distribution requires prior written permission. *
******************************************************************************************************/
#include "CallbackDocumentHash.h"
#include "Document.h"
#include "DocumentHashGenerator.h"
#include "Logger.h"
DocumentHashGenerator::DocumentHashGenerator()
{
}
DocumentHash DocumentHashGenerator::generate (const Document &document) const
{
// LOG4CPP_INFO_S is below
// Get hash by letting functor iterate through Document
CallbackDocumentHash ftor (document.documentAxesPointsRequired());
Functor2wRet<const QString &, const Point &, CallbackSearchReturn> ftorWithCallback = functor_ret (ftor,
&CallbackDocumentHash::callback);
document.iterateThroughCurvePointsAxes (ftorWithCallback);
document.iterateThroughCurvesPointsGraphs (ftorWithCallback);
LOG4CPP_INFO_S ((*mainCat)) << "DocumentHashGenerator::generator result=" << ftor.hash().data ();
return ftor.hash ();
}
| {
"pile_set_name": "Github"
} |
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test general health of the fonts."""
import json
from nototools import render
def _run_harfbuzz(text, font, language, extra_parameters=None):
"""Run harfbuzz on some text and return the shaped list."""
try:
# if extra_parameters is a string, split it into a list
extra_parameters = extra_parameters.split(" ")
except AttributeError:
pass
hb_output = render.run_harfbuzz_on_text(text, font, language, extra_parameters)
return json.loads(hb_output)
_advance_cache = {}
def get_advances(text, font, extra_parameters=None):
"""Get a list of horizontal advances for text rendered in a font."""
try:
return _advance_cache[(text, font, extra_parameters)]
except KeyError:
pass
hb_output = _run_harfbuzz(text, font, None, extra_parameters)
advances = [glyph["ax"] for glyph in hb_output]
_advance_cache[(text, font, extra_parameters)] = advances
return advances
_shape_cache = {}
def get_glyphs(text, font, extra_parameters=None):
"""Get a list of shaped glyphs for text rendered in a font."""
try:
return _shape_cache[(text, font, extra_parameters)]
except KeyError:
pass
hb_output = _run_harfbuzz(text, font, None, extra_parameters)
shapes = [glyph["g"] for glyph in hb_output]
_shape_cache[(text, font, extra_parameters)] = shapes
return shapes
| {
"pile_set_name": "Github"
} |
<?php
namespace Guzzle\Http\Message\Header;
/**
* Interface for creating headers
*/
interface HeaderFactoryInterface
{
/**
* Create a header from a header name and a single value
*
* @param string $header Name of the header to create
* @param string $value Value to set on the header
*
* @return HeaderInterface
*/
public function createHeader($header, $value = null);
}
| {
"pile_set_name": "Github"
} |
// Copyright 2009 the Sputnik authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
info: >
MemberExpression calls ToObject(MemberExpression) and
ToString(Expression). CallExpression calls ToObject(CallExpression) and
ToString(Expression)
es5id: 11.2.1_A3_T2
description: Checking Number case
---*/
//CHECK#1
if (1..toString() !== "1") {
$ERROR('#1: 1..toString() === "1". Actual: ' + (1..toString()));
}
//CHECK#2
if (1.1.toFixed(5) !== "1.10000") {
$ERROR('#2: 1.1.toFixed(5) === "1.10000". Actual: ' + (1.1.toFixed(5)));
}
//CHECK#3
if (1["toString"]() !== "1") {
$ERROR('#3: 1["toString"]() === "1". Actual: ' + (1["toString"]()));
}
//CHECK#4
if (1.["toFixed"](5) !== "1.00000") {
$ERROR('#4: 1.["toFixed"](5) === "1.00000". Actual: ' + (1.["toFixed"](5)));
}
//CHECK#5
if (new Number(1).toString() !== "1") {
$ERROR('#5: new Number(1).toString() === "1". Actual: ' + (new Number(1).toString()));
}
//CHECK#6
if (new Number(1)["toFixed"](5) !== "1.00000") {
$ERROR('#6: new Number(1)["toFixed"](5) === "1.00000". Actual: ' + (new Number(1)["toFixed"](5)));
}
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.